file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
main.rs | #![feature(proc_macro)]
#![no_std]
extern crate cortex_m;
extern crate cortex_m_rtfm as rtfm;
extern crate stm32f30x_hal as hal;
extern crate ls010b7dh01;
extern crate rn4870;
extern crate embedded_graphics as graphics;
extern crate panic_abort;
extern crate nb;
mod display;
mod ble;
use cortex_m::asm;
use cortex_m::peripheral::syst::SystClkSource;
use rtfm::{app, Threshold};
use hal::prelude::*;
use hal::timer;
use hal::timer::Timer;
use hal::spi::Spi;
use hal::serial;
use hal::serial::Serial;
use hal::delay::Delay;
use hal::gpio::{gpiob, gpioc, Input, Output, PullUp, PushPull, AF7};
use ls010b7dh01::Ls010b7dh01;
use graphics::prelude::*;
use graphics::primitives::{Circle, Line, Rect};
use graphics::fonts::{Font, Font6x8};
use graphics::transform::Transform;
use graphics::image::Image1BPP;
app! {
device: hal::stm32f30x,
resources: {
static TOGGLE: bool = false;
static TIME: u8 = 0;
static STATE: State = State::Time;
static EXTI: hal::stm32f30x::EXTI;
static RESET_BLE: bool = true;
static REDRAW: bool = true;
static DRAW_BUFFER: [u8; 16] = [32; 16];
static BUFFER_POS: u8 = 0;
// Late Resources
static EXTCOMIN: display::Extcomin;
static DISPLAY: display::Display;
static BLE: ble::Ble;
},
tasks: {
TIM7: {
path: tick,
resources: [TOGGLE, EXTCOMIN, DISPLAY],
},
SYS_TICK: {
path: sys_tick,
resources: [TOGGLE, EXTCOMIN, DISPLAY,
TIME, BLE, RESET_BLE, STATE, REDRAW,
DRAW_BUFFER],
},
USART1_EXTI25: {
path: ble_message,
resources: [BLE, DRAW_BUFFER, BUFFER_POS],
},
EXTI9_5: {
enabled: true,
priority: 1,
path: exti9_5,
resources: [STATE, EXTI],
},
EXTI15_10: {
path: exti15_10,
resources: [STATE, EXTI],
},
},
}
pub enum State {
Ble,
Time,
Face,
}
fn init(mut p: init::Peripherals, _r: init::Resources) -> init::LateResources {
let mut rcc = p.device.RCC.constrain();
let mut flash = p.device.FLASH.constrain();
let mut gpioa = p.device.GPIOA.split(&mut rcc.ahb);
let mut gpiob = p.device.GPIOB.split(&mut rcc.ahb);
let mut gpioc = p.device.GPIOC.split(&mut rcc.ahb);
// Enable the syscfg
rcc.apb2.enr().modify(|_, w| w.syscfgen().enabled());
rcc.apb2.rstr().modify(|_, w| w.syscfgrst().set_bit());
rcc.apb2.rstr().modify(|_, w| w.syscfgrst().clear_bit());
// Enable systick
p.core.SYST.set_clock_source(SystClkSource::Core);
p.core.SYST.set_reload(16_000_000);
p.core.SYST.enable_interrupt();
p.core.SYST.enable_counter();
// Set up our clocks & timer & delay
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut timer = Timer::tim7(p.device.TIM7, 1.hz(), clocks, &mut rcc.apb1);
//timer.listen(timer::Event::TimeOut);
let mut delay = Delay::new(p.core.SYST, clocks);
// Set up our GPIO pins
let disp_en = gpiob.pb2.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let extcomin = gpiob.pb1.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let cs = gpiob.pb0.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let mut v5_en = gpioa.pa3.into_push_pull_output(
&mut gpioa.moder,
&mut gpioa.otyper,
);
let reset_ble = gpiob.pb5.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let tx = gpiob.pb6.into_af7(&mut gpiob.moder, &mut gpiob.afrl);
let rx = gpiob.pb7.into_af7(&mut gpiob.moder, &mut gpiob.afrl);
let button_1 = gpiob.pb8.into_pull_up_input(
&mut gpiob.moder,
&mut gpiob.pupdr,
);
let button_2 = gpiob.pb9.into_pull_up_input(
&mut gpiob.moder,
&mut gpiob.pupdr,
);
let button_3 = gpioc.pc13.into_pull_up_input(
&mut gpioc.moder,
&mut gpioc.pupdr,
);
// Set up our display
let mode = ls010b7dh01::MODE;
let spi = Spi::spi1(
p.device.SPI1,
(sck, miso, mosi),
mode,
1.mhz(),
clocks,
&mut rcc.apb2,
);
let mut display = Ls010b7dh01::new(spi, cs, disp_en);
// Set up our BLE
let mut serial = Serial::usart1(
p.device.USART1,
(tx, rx),
115_200.bps(),
clocks,
&mut rcc.apb2,
);
serial.listen(serial::Event::Rxne); // TODO: Serial interrupts?
let mut ble = rn4870::Rn4870::new(serial, reset_ble);
// Set the default values
v5_en.set_high();
display.enable();
// Set up syscfg to link GPIO to EXTI
p.device.SYSCFG.exticr3.modify(|_, w| unsafe {
w.bits(0x11)
/* This does not work
w.exti8().bits(0b001) // Port b
.exti9().bits(0b001) // Port b
*/
});
p.device.SYSCFG.exticr4.modify(|_, w| unsafe {
w.exti13().bits(0b010) // Port c
});
p.device.EXTI.imr1.modify(|_, w| {
w.mr8().set_bit().mr9().set_bit().mr13().set_bit()
});
p.device.EXTI.ftsr1.modify(|_, w| {
w.tr8().set_bit().tr9().set_bit().tr13().set_bit()
});
init::LateResources {
DISPLAY: display,
EXTCOMIN: extcomin,
BLE: ble,
EXTI: p.device.EXTI,
}
}
fn idle() -> ! {
loop {
rtfm::wfi();
}
}
fn ble_message(_t: &mut Threshold, mut r: USART1_EXTI25::Resources) |
fn exti9_5(_t: &mut Threshold, mut r: EXTI9_5::Resources) {
if r.EXTI.pr1.read().pr8().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr8().set_bit());
*r.STATE = State::Ble;
}
if r.EXTI.pr1.read().pr9().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr9().set_bit());
*r.STATE = State::Time;
}
}
fn exti15_10(_t: &mut Threshold, mut r: EXTI15_10::Resources) {
if r.EXTI.pr1.read().pr13().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr13().set_bit());
*r.STATE = State::Face;
}
}
fn tick(_t: &mut Threshold, mut r: TIM7::Resources) {
}
fn sys_tick(_t: &mut Threshold, mut r: SYS_TICK::Resources) {
let toggle = *r.TOGGLE;
let extcomin = &mut *r.EXTCOMIN;
if *r.RESET_BLE {
r.BLE.hard_reset_on();
*r.RESET_BLE = false;
} else {
r.BLE.hard_reset_off();
}
match *r.STATE {
State::Ble => {
r.DISPLAY.clear();
//let s = String::from_utf8_lossy(&*r.DRAW_BUFFER);
unsafe {
let s = &*(&*r.DRAW_BUFFER as *const [u8] as *const str);
r.DISPLAY.draw(Font6x8::render_str(s).translate((5, 50)).into_iter());
r.DISPLAY.flush_buffer();
}
}
State::Time => {
*r.REDRAW = true;
draw_time(&mut *r.DISPLAY, *r.TIME);
*r.TIME += 1;
if *r.TIME == 60 {
*r.TIME = 0;
}
}
State::Face => {
if *r.REDRAW {
draw_face(&mut *r.DISPLAY);
*r.REDRAW = false;
}
}
}
// Toggle extcomin manually
if toggle {
(*extcomin).set_high();
} else {
(*extcomin).set_low();
}
*r.TOGGLE = !toggle;
}
fn draw_face(mut display: &mut display::Display) {
display.clear();
let bpp = Image1BPP::new(include_bytes!("../data/face_1bpp_neg.raw"), 120, 120)
.translate((0, 0));
display.draw(bpp.into_iter());
display.flush_buffer();
}
fn draw_time(mut display: &mut display::Display, time: u8) {
display.clear();
/*
let values = [
(125, 65), (124, 71), (123, 77), (122, 83), (119, 89),
(116, 94), (113, 100), (109, 105), (105, 109), (100, 113),
(95, 116), (89, 119), (83, 122), (77, 123), (71, 124),
(65, 125), (59, 124), (53, 123), (47, 122), (41, 119),
(36, 116), (30, 113), (25, 109), (21, 105), (17, 100),
(14, 95), (11, 89), (8, 83), (7, 77), (6, 71),
(5, 65), (6, 59), (7, 53), (8, 47), (11, 41),
(14, 36), (17, 30), (21, 25), (25, 21), (30, 17),
(35, 14), (41, 11), (47, 8), (53, 7), (59, 6),
(65, 5), (71, 6), (77, 7), (83, 8), (89, 11),
(94, 14), (100, 17), (105, 21), (109, 25), (113, 30),
(116, 35), (119, 41), (122, 47), (123, 53), (124, 59),
];
*/
let values =[(109, 64), (108, 68), (108, 73), (106, 77), (105, 82), (102, 86), (100, 90), (97, 94), (94, 97), (90, 100), (86, 102), (82, 105), (77, 106), (73, 108), (68, 108), (64, 109), (60, 108), (55, 108), (51, 106), (46, 105), (42, 102), (38, 100), (34, 97), (31, 94), (28, 90), (26, 86), (23, 82), (22, 77), (20, 73), (20, 68), (19, 64), (20, 60), (20, 55), (22, 51), (23, 46), (26, 42), (28, 38), (31, 34), (34, 31), (38, 28), (42, 26), (46, 23), (51, 22), (55, 20), (60, 20), (64, 19), (68, 20), (73, 20), (77, 22), (82, 23), (86, 26), (90, 28), (94, 31), (97, 34), (100, 38), (102, 42), (105, 46), (106, 51), (108, 55), (108, 60)];
let digits = [(116, 60), (108, 87), (88, 107), (61, 115), (34, 107), (14, 87), (6, 60), (14, 33), (34, 13), (61, 5), (88, 13), (108, 33)];
display.draw(Font6x8::render_str("3").translate(digits[0]).into_iter());
display.draw(Font6x8::render_str("4").translate(digits[1]).into_iter());
display.draw(Font6x8::render_str("5").translate(digits[2]).into_iter());
display.draw(Font6x8::render_str("6").translate(digits[3]).into_iter());
display.draw(Font6x8::render_str("7").translate(digits[4]).into_iter());
display.draw(Font6x8::render_str("8").translate(digits[5]).into_iter());
display.draw(Font6x8::render_str("9").translate(digits[6]).into_iter());
display.draw(Font6x8::render_str("10").translate(digits[7]).into_iter());
display.draw(Font6x8::render_str("11").translate(digits[8]).into_iter());
display.draw(Font6x8::render_str("12").translate(digits[9]).into_iter());
display.draw(Font6x8::render_str("1").translate(digits[10]).into_iter());
display.draw(Font6x8::render_str("2").translate(digits[11]).into_iter());
display.draw(Line::new((65, 65), values[time as usize], 1).into_iter());
display.flush_buffer();
}
fn draw_buffer(buffer: &[u8]) {
}
| {
let res = r.BLE.read_raw();
match res {
Ok(n) => {
if n < 32 {
return
}
(*r.DRAW_BUFFER)[*r.BUFFER_POS as usize] = n;
*r.BUFFER_POS += 1;
if *r.BUFFER_POS == 16 {
*r.BUFFER_POS = 0;
}
}
Err(nb::Error::Other(_)) => {
r.BLE.handle_error(|uart| { uart.clear_overflow_error(); } );
}
Err(nb::Error::WouldBlock) => {}
}
} | identifier_body |
main.rs | #![feature(proc_macro)]
#![no_std]
extern crate cortex_m;
extern crate cortex_m_rtfm as rtfm;
extern crate stm32f30x_hal as hal;
extern crate ls010b7dh01;
extern crate rn4870;
extern crate embedded_graphics as graphics;
extern crate panic_abort;
extern crate nb;
mod display;
mod ble;
use cortex_m::asm;
use cortex_m::peripheral::syst::SystClkSource;
use rtfm::{app, Threshold};
use hal::prelude::*;
use hal::timer;
use hal::timer::Timer;
use hal::spi::Spi;
use hal::serial;
use hal::serial::Serial;
use hal::delay::Delay;
use hal::gpio::{gpiob, gpioc, Input, Output, PullUp, PushPull, AF7};
use ls010b7dh01::Ls010b7dh01;
use graphics::prelude::*;
use graphics::primitives::{Circle, Line, Rect};
use graphics::fonts::{Font, Font6x8};
use graphics::transform::Transform;
use graphics::image::Image1BPP;
app! {
device: hal::stm32f30x,
resources: {
static TOGGLE: bool = false;
static TIME: u8 = 0;
static STATE: State = State::Time;
static EXTI: hal::stm32f30x::EXTI;
static RESET_BLE: bool = true;
static REDRAW: bool = true;
static DRAW_BUFFER: [u8; 16] = [32; 16];
static BUFFER_POS: u8 = 0;
// Late Resources
static EXTCOMIN: display::Extcomin;
static DISPLAY: display::Display;
static BLE: ble::Ble;
},
tasks: {
TIM7: {
path: tick,
resources: [TOGGLE, EXTCOMIN, DISPLAY],
},
SYS_TICK: {
path: sys_tick,
resources: [TOGGLE, EXTCOMIN, DISPLAY,
TIME, BLE, RESET_BLE, STATE, REDRAW,
DRAW_BUFFER],
},
USART1_EXTI25: {
path: ble_message,
resources: [BLE, DRAW_BUFFER, BUFFER_POS],
},
EXTI9_5: {
enabled: true,
priority: 1,
path: exti9_5,
resources: [STATE, EXTI],
},
EXTI15_10: {
path: exti15_10,
resources: [STATE, EXTI],
},
},
}
pub enum State {
Ble,
Time,
Face,
}
fn init(mut p: init::Peripherals, _r: init::Resources) -> init::LateResources {
let mut rcc = p.device.RCC.constrain();
let mut flash = p.device.FLASH.constrain();
let mut gpioa = p.device.GPIOA.split(&mut rcc.ahb);
let mut gpiob = p.device.GPIOB.split(&mut rcc.ahb);
let mut gpioc = p.device.GPIOC.split(&mut rcc.ahb);
// Enable the syscfg
rcc.apb2.enr().modify(|_, w| w.syscfgen().enabled());
rcc.apb2.rstr().modify(|_, w| w.syscfgrst().set_bit());
rcc.apb2.rstr().modify(|_, w| w.syscfgrst().clear_bit());
// Enable systick
p.core.SYST.set_clock_source(SystClkSource::Core);
p.core.SYST.set_reload(16_000_000);
p.core.SYST.enable_interrupt();
p.core.SYST.enable_counter();
// Set up our clocks & timer & delay
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut timer = Timer::tim7(p.device.TIM7, 1.hz(), clocks, &mut rcc.apb1);
//timer.listen(timer::Event::TimeOut);
let mut delay = Delay::new(p.core.SYST, clocks);
// Set up our GPIO pins
let disp_en = gpiob.pb2.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let extcomin = gpiob.pb1.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let cs = gpiob.pb0.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let mut v5_en = gpioa.pa3.into_push_pull_output(
&mut gpioa.moder,
&mut gpioa.otyper,
);
let reset_ble = gpiob.pb5.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let tx = gpiob.pb6.into_af7(&mut gpiob.moder, &mut gpiob.afrl);
let rx = gpiob.pb7.into_af7(&mut gpiob.moder, &mut gpiob.afrl);
let button_1 = gpiob.pb8.into_pull_up_input(
&mut gpiob.moder,
&mut gpiob.pupdr,
);
let button_2 = gpiob.pb9.into_pull_up_input(
&mut gpiob.moder,
&mut gpiob.pupdr,
);
let button_3 = gpioc.pc13.into_pull_up_input(
&mut gpioc.moder,
&mut gpioc.pupdr,
);
// Set up our display
let mode = ls010b7dh01::MODE;
let spi = Spi::spi1(
p.device.SPI1,
(sck, miso, mosi),
mode,
1.mhz(),
clocks,
&mut rcc.apb2,
);
let mut display = Ls010b7dh01::new(spi, cs, disp_en);
// Set up our BLE
let mut serial = Serial::usart1(
p.device.USART1,
(tx, rx),
115_200.bps(),
clocks,
&mut rcc.apb2,
);
serial.listen(serial::Event::Rxne); // TODO: Serial interrupts?
let mut ble = rn4870::Rn4870::new(serial, reset_ble);
// Set the default values
v5_en.set_high();
display.enable();
// Set up syscfg to link GPIO to EXTI
p.device.SYSCFG.exticr3.modify(|_, w| unsafe {
w.bits(0x11)
/* This does not work
w.exti8().bits(0b001) // Port b
.exti9().bits(0b001) // Port b
*/
});
p.device.SYSCFG.exticr4.modify(|_, w| unsafe {
w.exti13().bits(0b010) // Port c
});
p.device.EXTI.imr1.modify(|_, w| {
w.mr8().set_bit().mr9().set_bit().mr13().set_bit()
});
p.device.EXTI.ftsr1.modify(|_, w| {
w.tr8().set_bit().tr9().set_bit().tr13().set_bit()
});
init::LateResources {
DISPLAY: display,
EXTCOMIN: extcomin,
BLE: ble,
EXTI: p.device.EXTI,
}
}
fn idle() -> ! {
loop {
rtfm::wfi();
}
}
fn ble_message(_t: &mut Threshold, mut r: USART1_EXTI25::Resources) {
let res = r.BLE.read_raw();
match res {
Ok(n) => {
if n < 32 {
return
}
(*r.DRAW_BUFFER)[*r.BUFFER_POS as usize] = n;
*r.BUFFER_POS += 1;
if *r.BUFFER_POS == 16 |
}
Err(nb::Error::Other(_)) => {
r.BLE.handle_error(|uart| { uart.clear_overflow_error(); } );
}
Err(nb::Error::WouldBlock) => {}
}
}
fn exti9_5(_t: &mut Threshold, mut r: EXTI9_5::Resources) {
if r.EXTI.pr1.read().pr8().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr8().set_bit());
*r.STATE = State::Ble;
}
if r.EXTI.pr1.read().pr9().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr9().set_bit());
*r.STATE = State::Time;
}
}
fn exti15_10(_t: &mut Threshold, mut r: EXTI15_10::Resources) {
if r.EXTI.pr1.read().pr13().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr13().set_bit());
*r.STATE = State::Face;
}
}
fn tick(_t: &mut Threshold, mut r: TIM7::Resources) {
}
fn sys_tick(_t: &mut Threshold, mut r: SYS_TICK::Resources) {
let toggle = *r.TOGGLE;
let extcomin = &mut *r.EXTCOMIN;
if *r.RESET_BLE {
r.BLE.hard_reset_on();
*r.RESET_BLE = false;
} else {
r.BLE.hard_reset_off();
}
match *r.STATE {
State::Ble => {
r.DISPLAY.clear();
//let s = String::from_utf8_lossy(&*r.DRAW_BUFFER);
unsafe {
let s = &*(&*r.DRAW_BUFFER as *const [u8] as *const str);
r.DISPLAY.draw(Font6x8::render_str(s).translate((5, 50)).into_iter());
r.DISPLAY.flush_buffer();
}
}
State::Time => {
*r.REDRAW = true;
draw_time(&mut *r.DISPLAY, *r.TIME);
*r.TIME += 1;
if *r.TIME == 60 {
*r.TIME = 0;
}
}
State::Face => {
if *r.REDRAW {
draw_face(&mut *r.DISPLAY);
*r.REDRAW = false;
}
}
}
// Toggle extcomin manually
if toggle {
(*extcomin).set_high();
} else {
(*extcomin).set_low();
}
*r.TOGGLE = !toggle;
}
fn draw_face(mut display: &mut display::Display) {
display.clear();
let bpp = Image1BPP::new(include_bytes!("../data/face_1bpp_neg.raw"), 120, 120)
.translate((0, 0));
display.draw(bpp.into_iter());
display.flush_buffer();
}
fn draw_time(mut display: &mut display::Display, time: u8) {
display.clear();
/*
let values = [
(125, 65), (124, 71), (123, 77), (122, 83), (119, 89),
(116, 94), (113, 100), (109, 105), (105, 109), (100, 113),
(95, 116), (89, 119), (83, 122), (77, 123), (71, 124),
(65, 125), (59, 124), (53, 123), (47, 122), (41, 119),
(36, 116), (30, 113), (25, 109), (21, 105), (17, 100),
(14, 95), (11, 89), (8, 83), (7, 77), (6, 71),
(5, 65), (6, 59), (7, 53), (8, 47), (11, 41),
(14, 36), (17, 30), (21, 25), (25, 21), (30, 17),
(35, 14), (41, 11), (47, 8), (53, 7), (59, 6),
(65, 5), (71, 6), (77, 7), (83, 8), (89, 11),
(94, 14), (100, 17), (105, 21), (109, 25), (113, 30),
(116, 35), (119, 41), (122, 47), (123, 53), (124, 59),
];
*/
let values =[(109, 64), (108, 68), (108, 73), (106, 77), (105, 82), (102, 86), (100, 90), (97, 94), (94, 97), (90, 100), (86, 102), (82, 105), (77, 106), (73, 108), (68, 108), (64, 109), (60, 108), (55, 108), (51, 106), (46, 105), (42, 102), (38, 100), (34, 97), (31, 94), (28, 90), (26, 86), (23, 82), (22, 77), (20, 73), (20, 68), (19, 64), (20, 60), (20, 55), (22, 51), (23, 46), (26, 42), (28, 38), (31, 34), (34, 31), (38, 28), (42, 26), (46, 23), (51, 22), (55, 20), (60, 20), (64, 19), (68, 20), (73, 20), (77, 22), (82, 23), (86, 26), (90, 28), (94, 31), (97, 34), (100, 38), (102, 42), (105, 46), (106, 51), (108, 55), (108, 60)];
let digits = [(116, 60), (108, 87), (88, 107), (61, 115), (34, 107), (14, 87), (6, 60), (14, 33), (34, 13), (61, 5), (88, 13), (108, 33)];
display.draw(Font6x8::render_str("3").translate(digits[0]).into_iter());
display.draw(Font6x8::render_str("4").translate(digits[1]).into_iter());
display.draw(Font6x8::render_str("5").translate(digits[2]).into_iter());
display.draw(Font6x8::render_str("6").translate(digits[3]).into_iter());
display.draw(Font6x8::render_str("7").translate(digits[4]).into_iter());
display.draw(Font6x8::render_str("8").translate(digits[5]).into_iter());
display.draw(Font6x8::render_str("9").translate(digits[6]).into_iter());
display.draw(Font6x8::render_str("10").translate(digits[7]).into_iter());
display.draw(Font6x8::render_str("11").translate(digits[8]).into_iter());
display.draw(Font6x8::render_str("12").translate(digits[9]).into_iter());
display.draw(Font6x8::render_str("1").translate(digits[10]).into_iter());
display.draw(Font6x8::render_str("2").translate(digits[11]).into_iter());
display.draw(Line::new((65, 65), values[time as usize], 1).into_iter());
display.flush_buffer();
}
fn draw_buffer(buffer: &[u8]) {
}
| {
*r.BUFFER_POS = 0;
} | conditional_block |
main.rs | #![feature(proc_macro)]
#![no_std]
extern crate cortex_m;
extern crate cortex_m_rtfm as rtfm;
extern crate stm32f30x_hal as hal;
extern crate ls010b7dh01;
extern crate rn4870;
extern crate embedded_graphics as graphics;
extern crate panic_abort;
extern crate nb;
mod display;
mod ble;
use cortex_m::asm;
use cortex_m::peripheral::syst::SystClkSource;
use rtfm::{app, Threshold};
use hal::prelude::*;
use hal::timer;
use hal::timer::Timer;
use hal::spi::Spi;
use hal::serial;
use hal::serial::Serial;
use hal::delay::Delay;
use hal::gpio::{gpiob, gpioc, Input, Output, PullUp, PushPull, AF7};
use ls010b7dh01::Ls010b7dh01;
use graphics::prelude::*;
use graphics::primitives::{Circle, Line, Rect};
use graphics::fonts::{Font, Font6x8};
use graphics::transform::Transform;
use graphics::image::Image1BPP;
app! {
device: hal::stm32f30x,
resources: {
static TOGGLE: bool = false;
static TIME: u8 = 0;
static STATE: State = State::Time;
static EXTI: hal::stm32f30x::EXTI;
static RESET_BLE: bool = true;
static REDRAW: bool = true;
static DRAW_BUFFER: [u8; 16] = [32; 16];
static BUFFER_POS: u8 = 0;
// Late Resources
static EXTCOMIN: display::Extcomin;
static DISPLAY: display::Display;
static BLE: ble::Ble;
},
tasks: {
TIM7: {
path: tick,
resources: [TOGGLE, EXTCOMIN, DISPLAY],
},
SYS_TICK: {
path: sys_tick,
resources: [TOGGLE, EXTCOMIN, DISPLAY,
TIME, BLE, RESET_BLE, STATE, REDRAW,
DRAW_BUFFER],
},
USART1_EXTI25: {
path: ble_message,
resources: [BLE, DRAW_BUFFER, BUFFER_POS],
},
EXTI9_5: {
enabled: true,
priority: 1,
path: exti9_5,
resources: [STATE, EXTI],
},
EXTI15_10: {
path: exti15_10,
resources: [STATE, EXTI],
},
},
}
pub enum State {
Ble,
Time,
Face,
}
fn init(mut p: init::Peripherals, _r: init::Resources) -> init::LateResources {
let mut rcc = p.device.RCC.constrain();
let mut flash = p.device.FLASH.constrain();
let mut gpioa = p.device.GPIOA.split(&mut rcc.ahb);
let mut gpiob = p.device.GPIOB.split(&mut rcc.ahb);
let mut gpioc = p.device.GPIOC.split(&mut rcc.ahb);
// Enable the syscfg
rcc.apb2.enr().modify(|_, w| w.syscfgen().enabled());
rcc.apb2.rstr().modify(|_, w| w.syscfgrst().set_bit());
rcc.apb2.rstr().modify(|_, w| w.syscfgrst().clear_bit());
// Enable systick
p.core.SYST.set_clock_source(SystClkSource::Core);
p.core.SYST.set_reload(16_000_000);
p.core.SYST.enable_interrupt();
p.core.SYST.enable_counter();
// Set up our clocks & timer & delay
let clocks = rcc.cfgr.freeze(&mut flash.acr);
let mut timer = Timer::tim7(p.device.TIM7, 1.hz(), clocks, &mut rcc.apb1);
//timer.listen(timer::Event::TimeOut);
let mut delay = Delay::new(p.core.SYST, clocks);
// Set up our GPIO pins
let disp_en = gpiob.pb2.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let extcomin = gpiob.pb1.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let cs = gpiob.pb0.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let mut v5_en = gpioa.pa3.into_push_pull_output(
&mut gpioa.moder,
&mut gpioa.otyper,
);
let reset_ble = gpiob.pb5.into_push_pull_output(
&mut gpiob.moder,
&mut gpiob.otyper,
);
let sck = gpioa.pa5.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let miso = gpioa.pa6.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let mosi = gpioa.pa7.into_af5(&mut gpioa.moder, &mut gpioa.afrl);
let tx = gpiob.pb6.into_af7(&mut gpiob.moder, &mut gpiob.afrl);
let rx = gpiob.pb7.into_af7(&mut gpiob.moder, &mut gpiob.afrl);
let button_1 = gpiob.pb8.into_pull_up_input(
&mut gpiob.moder,
&mut gpiob.pupdr,
);
let button_2 = gpiob.pb9.into_pull_up_input(
&mut gpiob.moder,
&mut gpiob.pupdr,
);
let button_3 = gpioc.pc13.into_pull_up_input(
&mut gpioc.moder,
&mut gpioc.pupdr,
);
// Set up our display
let mode = ls010b7dh01::MODE;
let spi = Spi::spi1(
p.device.SPI1,
(sck, miso, mosi),
mode,
1.mhz(),
clocks,
&mut rcc.apb2,
);
let mut display = Ls010b7dh01::new(spi, cs, disp_en);
// Set up our BLE
let mut serial = Serial::usart1(
p.device.USART1,
(tx, rx),
115_200.bps(),
clocks,
&mut rcc.apb2,
);
serial.listen(serial::Event::Rxne); // TODO: Serial interrupts?
let mut ble = rn4870::Rn4870::new(serial, reset_ble);
// Set the default values
v5_en.set_high();
display.enable();
// Set up syscfg to link GPIO to EXTI
p.device.SYSCFG.exticr3.modify(|_, w| unsafe {
w.bits(0x11)
/* This does not work
w.exti8().bits(0b001) // Port b
.exti9().bits(0b001) // Port b
*/
});
p.device.SYSCFG.exticr4.modify(|_, w| unsafe {
w.exti13().bits(0b010) // Port c
});
p.device.EXTI.imr1.modify(|_, w| {
w.mr8().set_bit().mr9().set_bit().mr13().set_bit()
});
p.device.EXTI.ftsr1.modify(|_, w| {
w.tr8().set_bit().tr9().set_bit().tr13().set_bit()
});
init::LateResources {
DISPLAY: display,
EXTCOMIN: extcomin,
BLE: ble,
EXTI: p.device.EXTI,
}
}
fn idle() -> ! {
loop {
rtfm::wfi();
}
}
fn ble_message(_t: &mut Threshold, mut r: USART1_EXTI25::Resources) {
let res = r.BLE.read_raw();
match res {
Ok(n) => {
if n < 32 {
return
}
(*r.DRAW_BUFFER)[*r.BUFFER_POS as usize] = n;
*r.BUFFER_POS += 1;
if *r.BUFFER_POS == 16 {
*r.BUFFER_POS = 0;
}
}
Err(nb::Error::Other(_)) => {
r.BLE.handle_error(|uart| { uart.clear_overflow_error(); } );
}
Err(nb::Error::WouldBlock) => {}
}
}
fn exti9_5(_t: &mut Threshold, mut r: EXTI9_5::Resources) {
if r.EXTI.pr1.read().pr8().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr8().set_bit());
*r.STATE = State::Ble;
}
if r.EXTI.pr1.read().pr9().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr9().set_bit());
*r.STATE = State::Time;
}
}
fn exti15_10(_t: &mut Threshold, mut r: EXTI15_10::Resources) {
if r.EXTI.pr1.read().pr13().bit_is_set() {
r.EXTI.pr1.modify(|_, w| w.pr13().set_bit());
*r.STATE = State::Face;
}
}
fn | (_t: &mut Threshold, mut r: TIM7::Resources) {
}
fn sys_tick(_t: &mut Threshold, mut r: SYS_TICK::Resources) {
let toggle = *r.TOGGLE;
let extcomin = &mut *r.EXTCOMIN;
if *r.RESET_BLE {
r.BLE.hard_reset_on();
*r.RESET_BLE = false;
} else {
r.BLE.hard_reset_off();
}
match *r.STATE {
State::Ble => {
r.DISPLAY.clear();
//let s = String::from_utf8_lossy(&*r.DRAW_BUFFER);
unsafe {
let s = &*(&*r.DRAW_BUFFER as *const [u8] as *const str);
r.DISPLAY.draw(Font6x8::render_str(s).translate((5, 50)).into_iter());
r.DISPLAY.flush_buffer();
}
}
State::Time => {
*r.REDRAW = true;
draw_time(&mut *r.DISPLAY, *r.TIME);
*r.TIME += 1;
if *r.TIME == 60 {
*r.TIME = 0;
}
}
State::Face => {
if *r.REDRAW {
draw_face(&mut *r.DISPLAY);
*r.REDRAW = false;
}
}
}
// Toggle extcomin manually
if toggle {
(*extcomin).set_high();
} else {
(*extcomin).set_low();
}
*r.TOGGLE = !toggle;
}
fn draw_face(mut display: &mut display::Display) {
display.clear();
let bpp = Image1BPP::new(include_bytes!("../data/face_1bpp_neg.raw"), 120, 120)
.translate((0, 0));
display.draw(bpp.into_iter());
display.flush_buffer();
}
fn draw_time(mut display: &mut display::Display, time: u8) {
display.clear();
/*
let values = [
(125, 65), (124, 71), (123, 77), (122, 83), (119, 89),
(116, 94), (113, 100), (109, 105), (105, 109), (100, 113),
(95, 116), (89, 119), (83, 122), (77, 123), (71, 124),
(65, 125), (59, 124), (53, 123), (47, 122), (41, 119),
(36, 116), (30, 113), (25, 109), (21, 105), (17, 100),
(14, 95), (11, 89), (8, 83), (7, 77), (6, 71),
(5, 65), (6, 59), (7, 53), (8, 47), (11, 41),
(14, 36), (17, 30), (21, 25), (25, 21), (30, 17),
(35, 14), (41, 11), (47, 8), (53, 7), (59, 6),
(65, 5), (71, 6), (77, 7), (83, 8), (89, 11),
(94, 14), (100, 17), (105, 21), (109, 25), (113, 30),
(116, 35), (119, 41), (122, 47), (123, 53), (124, 59),
];
*/
let values =[(109, 64), (108, 68), (108, 73), (106, 77), (105, 82), (102, 86), (100, 90), (97, 94), (94, 97), (90, 100), (86, 102), (82, 105), (77, 106), (73, 108), (68, 108), (64, 109), (60, 108), (55, 108), (51, 106), (46, 105), (42, 102), (38, 100), (34, 97), (31, 94), (28, 90), (26, 86), (23, 82), (22, 77), (20, 73), (20, 68), (19, 64), (20, 60), (20, 55), (22, 51), (23, 46), (26, 42), (28, 38), (31, 34), (34, 31), (38, 28), (42, 26), (46, 23), (51, 22), (55, 20), (60, 20), (64, 19), (68, 20), (73, 20), (77, 22), (82, 23), (86, 26), (90, 28), (94, 31), (97, 34), (100, 38), (102, 42), (105, 46), (106, 51), (108, 55), (108, 60)];
let digits = [(116, 60), (108, 87), (88, 107), (61, 115), (34, 107), (14, 87), (6, 60), (14, 33), (34, 13), (61, 5), (88, 13), (108, 33)];
display.draw(Font6x8::render_str("3").translate(digits[0]).into_iter());
display.draw(Font6x8::render_str("4").translate(digits[1]).into_iter());
display.draw(Font6x8::render_str("5").translate(digits[2]).into_iter());
display.draw(Font6x8::render_str("6").translate(digits[3]).into_iter());
display.draw(Font6x8::render_str("7").translate(digits[4]).into_iter());
display.draw(Font6x8::render_str("8").translate(digits[5]).into_iter());
display.draw(Font6x8::render_str("9").translate(digits[6]).into_iter());
display.draw(Font6x8::render_str("10").translate(digits[7]).into_iter());
display.draw(Font6x8::render_str("11").translate(digits[8]).into_iter());
display.draw(Font6x8::render_str("12").translate(digits[9]).into_iter());
display.draw(Font6x8::render_str("1").translate(digits[10]).into_iter());
display.draw(Font6x8::render_str("2").translate(digits[11]).into_iter());
display.draw(Line::new((65, 65), values[time as usize], 1).into_iter());
display.flush_buffer();
}
fn draw_buffer(buffer: &[u8]) {
}
| tick | identifier_name |
model.py | import torch
from torch import nn
from torch.distributions.categorical import Categorical
from torch.nn import functional as F
import math
import torch
from resnet import ResNetEncoder
class Policy(nn.Module):
def __init__(self, env_config, num_players=2, max_entities=10):
super(Policy, self).__init__()
# State Parser
self.parse_state = ParseState(env_config, 10)
self.MAX_ENTITIES = 10
# Map Encoder
self.map = MapEmbedding(128)
# Entity Encoder
self.entity = EntityEmbedding(128, env_config['size'], 1)
# Scalar Encoder
self.scalar_encoder = nn.Linear(num_players, 128)
# transformer
# self.max_entities = 10
self.action_map = [None, "NORTH", "EAST", "SOUTH", "WEST", "CONVERT", "SPAWN"]
self.SHIP_TYPE = 2
self.SHIPYARD_TYPE = 1
num_actions = len(self.action_map)
self.transformer = nn.TransformerEncoder(nn.TransformerEncoderLayer(d_model=128, nhead=8, dim_feedforward=100), 2, norm=None)
self.policy = nn.Sequential(
nn.Linear(128, 128),
nn.ReLU(),
nn.Linear(128, num_actions)
)
self.value = nn.Sequential(
nn.Linear(128 * self.MAX_ENTITIES, 400),
nn.ReLU(),
nn.Linear(400, 100),
torch.nn.ReLU(),
nn.Linear(100, 1)
)
self.softmax = nn.Softmax(-1)
def device(self):
return next(self.parameters()).device
def | (self, state, mask = False):
# Scalar encoding
state = self.parse_state(state)
scalar = state['scalar'].to(self.device())
scalar_encoding = F.relu(self.scalar_encoder(scalar)).unsqueeze(1)
# Spatial Encoding
game_map = state['map'].to(self.device())
map_encoding = self.map(game_map).unsqueeze(1)
# Entity Encoding
entity_typ = state['entity_typ'].to(self.device())
entity_pos = state['entity_pos'].to(self.device())
entity_scalar = state['entity_scalar'].to(self.device())
entity_encodings = self.entity(entity_typ, entity_pos, entity_scalar)
embeddings = map_encoding + entity_encodings + scalar_encoding
set_embedding = self.transformer(embeddings)
out = self.policy(set_embedding)
if mask == True:
lens = []
for eid in state['entity_id']:
n_entities = len(eid)
lens.append(torch.tensor([1] * n_entities + [0] * (self.MAX_ENTITIES - n_entities)))
m = torch.stack(lens).to(self.device())
return self.softmax(out), m
return self.softmax(out)
def action(self, states):
if not isinstance(states, list):
states = [states]
t_states = self.parse_state(states)
out = self.forward(states)
actions_iter = []
raw_actions_iter = []
for i, state in enumerate(states):
raw_actions = Categorical(probs=out[i]).sample()
actions = {}
n_entities = len(t_states['entity_id'][i])
# TODO: Migrate this code to env helper
for e, eid in enumerate(t_states['entity_id'][i]):
act = self.action_map[raw_actions[e]]
typ = t_states['entity_typ'][i][e]
if typ == self.SHIP_TYPE and act == "SPAWN":
act = None
elif typ == self.SHIPYARD_TYPE and (act != "SPAWN" and act != None):
act = None
elif typ == 0:
continue
if act == "SPAWN":
if n_entities < self.MAX_ENTITIES:
n_entities += 1
else:
act = None
if act is not None:
actions[eid] = act
actions_iter.append(actions)
raw_actions_iter.append(raw_actions)
return actions_iter, raw_actions_iter
class ParseState(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, config, max_entities):
self.map_size = config['size']
self.max_halite = config['maxCellHalite']
self.starting_halite = config['startingHalite']
self.max_entities = max_entities
def __call__(self, states):
if not isinstance(states, list):
states = [states]
spat_map_iter = []
entity_typ_iter = []
entity_pos_iter = []
entity_id_iter = []
entity_scalar_iter = []
scalar_iter = []
for s in states:
step = s['step']
halite = torch.tensor(s['halite']).float()
halite = halite.reshape(self.map_size, self.map_size, 1) / self.max_halite
obstruction = torch.zeros(self.map_size**2).float()
me = s['players'][s['player']]
my_halite, my_shipyards, my_ships = tuple(me)
scalar = torch.zeros(len(s['players']))
scalar[0] = my_halite
entity_typ = []
entity_pos = []
entity_scalar = []
entity_id = []
for shipyard_id, shipyard_pos in my_shipyards.items():
obstruction[shipyard_pos] = 1.0
x = int(shipyard_pos % self.map_size)
y = int(shipyard_pos / self.map_size)
entity_typ.append(1)
entity_pos.append([x,y])
entity_scalar.append([0])
entity_id.append(shipyard_id)
for ship_id, ship_pos in my_ships.items():
obstruction[ship_pos[0]] = 1.0
x = int(ship_pos[0] % self.map_size)
y = int(ship_pos[0] / self.map_size)
entity_typ.append(2)
entity_pos.append([x,y])
entity_scalar.append([ship_pos[1]])
entity_id.append(ship_id)
opponents = s['players']
scalar_loc = 1
for i, opponent in enumerate(opponents):
if i != s['player']:
opp_halite, opp_shipyards, opp_ships = tuple(opponent)
scalar[scalar_loc] = opp_halite
for shipyard_pos in opp_shipyards.values():
obstruction[shipyard_pos] = 1.0
for ship_pos in opp_ships.values():
obstruction[ship_pos[0]] = 1.0
scalar_loc += 1
obstruction = obstruction.reshape(self.map_size, self.map_size, 1)
spat_map = torch.cat((halite, obstruction), 2).unsqueeze(0).permute(0,3,1,2)
n_entities = len(entity_id)
diff = self.max_entities - n_entities
entity_typ = F.pad(torch.tensor(entity_typ).long().unsqueeze(0), (0, diff), "constant", 0)
entity_pos = F.pad(torch.tensor(entity_pos).long().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
entity_scalar = F.pad(torch.tensor(entity_scalar).float().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
scalar = scalar.unsqueeze(0) / self.starting_halite
spat_map_iter.append(spat_map)
entity_typ_iter.append(entity_typ)
entity_pos_iter.append(entity_pos)
entity_id_iter.append(entity_id)
entity_scalar_iter.append(entity_scalar)
scalar_iter.append(scalar)
return {
'map': torch.cat(spat_map_iter),
'entity_typ': torch.cat(entity_typ_iter),
'entity_pos': torch.cat(entity_pos_iter),
'entity_scalar': torch.cat(entity_scalar_iter),
'entity_id': entity_id_iter,
'scalar': torch.cat(scalar_iter)
}
class MapEmbedding(nn.Module):
def __init__(self, embed_size=256, depth=2, maps=2):
super(MapEmbedding, self).__init__()
blocks = []
c_b = 64
while c_b < embed_size:
blocks.append(c_b)
c_b *= 2
blocks.append(embed_size)
deepths = [depth] * len(blocks)
self.resnet = ResNetEncoder(in_channels=maps, blocks_sizes=blocks, deepths=deepths)
def forward(self, multi_layer_map):
return self.resnet(multi_layer_map)
class EntityEmbedding(nn.Module):
def __init__(self, d_model, map_size, n_scalars):
super(EntityEmbedding, self).__init__()
# self.lut = pre_trained.embeddings.word_embeddings
self.EntityType = nn.Embedding(2 + 1, d_model)
self.EntityPosition = PositionalEncoding2D(d_model, map_size, map_size)
self.fc = nn.Linear(n_scalars, d_model)
self.EntityType.weight.data.uniform_(-0.1, .1)
def forward(self, typ, pos, scalar):
return self.EntityType(typ) + self.EntityPosition(pos) + self.fc(scalar)
# Retrieved from pytorch website
class PositionalEncoding2D(nn.Module):
def __init__(self, d_model, height, width):
super(PositionalEncoding2D, self).__init__()
if d_model % 4 != 0:
raise Error()
pe = torch.zeros(d_model, height, width)
d_model = int(d_model / 2)
div_term = torch.exp(torch.arange(0., d_model, 2) * -(math.log(10000.0) / d_model))
pos_w = torch.arange(0., width).unsqueeze(1)
pos_h = torch.arange(0., height).unsqueeze(1)
pe[0:d_model:2, :, :] = torch.sin(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[1:d_model:2, :, :] = torch.cos(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[d_model::2, :, :] = torch.sin(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
pe[d_model + 1::2, :, :] = torch.cos(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
self.register_buffer('pe', pe)
def forward(self, pos):
# (*, 2)
pos = pos.transpose(0, -1)
return self.pe[:, pos[0], pos[1]].transpose(0, -1)
if __name__ == "__main__":
pe = PositionalEncoding2D(8, 10, 10)
pos = torch.tensor([[[0,0], [0,0], [9,9]],[[0,0], [0,0], [9,9]]])
print(pe(pos))
| forward | identifier_name |
model.py | import torch
from torch import nn
from torch.distributions.categorical import Categorical
from torch.nn import functional as F
import math
import torch
from resnet import ResNetEncoder
class Policy(nn.Module):
def __init__(self, env_config, num_players=2, max_entities=10):
super(Policy, self).__init__()
# State Parser
self.parse_state = ParseState(env_config, 10)
self.MAX_ENTITIES = 10
# Map Encoder
self.map = MapEmbedding(128)
# Entity Encoder
self.entity = EntityEmbedding(128, env_config['size'], 1)
# Scalar Encoder
self.scalar_encoder = nn.Linear(num_players, 128)
# transformer
# self.max_entities = 10
self.action_map = [None, "NORTH", "EAST", "SOUTH", "WEST", "CONVERT", "SPAWN"]
self.SHIP_TYPE = 2
self.SHIPYARD_TYPE = 1
num_actions = len(self.action_map)
self.transformer = nn.TransformerEncoder(nn.TransformerEncoderLayer(d_model=128, nhead=8, dim_feedforward=100), 2, norm=None)
self.policy = nn.Sequential(
nn.Linear(128, 128),
nn.ReLU(),
nn.Linear(128, num_actions)
)
self.value = nn.Sequential(
nn.Linear(128 * self.MAX_ENTITIES, 400),
nn.ReLU(),
nn.Linear(400, 100),
torch.nn.ReLU(),
nn.Linear(100, 1)
)
self.softmax = nn.Softmax(-1)
def device(self):
return next(self.parameters()).device
def forward(self, state, mask = False):
# Scalar encoding
state = self.parse_state(state)
scalar = state['scalar'].to(self.device())
scalar_encoding = F.relu(self.scalar_encoder(scalar)).unsqueeze(1)
# Spatial Encoding
game_map = state['map'].to(self.device())
map_encoding = self.map(game_map).unsqueeze(1)
# Entity Encoding
entity_typ = state['entity_typ'].to(self.device())
entity_pos = state['entity_pos'].to(self.device())
entity_scalar = state['entity_scalar'].to(self.device())
entity_encodings = self.entity(entity_typ, entity_pos, entity_scalar)
embeddings = map_encoding + entity_encodings + scalar_encoding
set_embedding = self.transformer(embeddings)
out = self.policy(set_embedding)
if mask == True:
lens = []
for eid in state['entity_id']:
n_entities = len(eid)
lens.append(torch.tensor([1] * n_entities + [0] * (self.MAX_ENTITIES - n_entities)))
m = torch.stack(lens).to(self.device())
return self.softmax(out), m
return self.softmax(out)
def action(self, states):
if not isinstance(states, list):
states = [states]
t_states = self.parse_state(states)
out = self.forward(states)
actions_iter = []
raw_actions_iter = []
for i, state in enumerate(states):
raw_actions = Categorical(probs=out[i]).sample()
actions = {}
n_entities = len(t_states['entity_id'][i])
# TODO: Migrate this code to env helper
for e, eid in enumerate(t_states['entity_id'][i]):
act = self.action_map[raw_actions[e]]
typ = t_states['entity_typ'][i][e]
if typ == self.SHIP_TYPE and act == "SPAWN":
act = None
elif typ == self.SHIPYARD_TYPE and (act != "SPAWN" and act != None):
act = None
elif typ == 0:
continue
if act == "SPAWN":
if n_entities < self.MAX_ENTITIES:
n_entities += 1
else:
act = None
if act is not None:
actions[eid] = act
actions_iter.append(actions)
raw_actions_iter.append(raw_actions)
return actions_iter, raw_actions_iter
class ParseState(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, config, max_entities):
self.map_size = config['size']
self.max_halite = config['maxCellHalite']
self.starting_halite = config['startingHalite']
self.max_entities = max_entities
def __call__(self, states):
if not isinstance(states, list):
states = [states]
spat_map_iter = []
entity_typ_iter = []
entity_pos_iter = []
entity_id_iter = []
entity_scalar_iter = []
scalar_iter = []
for s in states:
step = s['step']
halite = torch.tensor(s['halite']).float()
halite = halite.reshape(self.map_size, self.map_size, 1) / self.max_halite
obstruction = torch.zeros(self.map_size**2).float()
me = s['players'][s['player']]
my_halite, my_shipyards, my_ships = tuple(me)
scalar = torch.zeros(len(s['players']))
scalar[0] = my_halite
entity_typ = []
entity_pos = []
entity_scalar = []
entity_id = []
for shipyard_id, shipyard_pos in my_shipyards.items():
obstruction[shipyard_pos] = 1.0
x = int(shipyard_pos % self.map_size)
y = int(shipyard_pos / self.map_size)
entity_typ.append(1)
entity_pos.append([x,y])
entity_scalar.append([0])
entity_id.append(shipyard_id)
for ship_id, ship_pos in my_ships.items():
obstruction[ship_pos[0]] = 1.0
x = int(ship_pos[0] % self.map_size)
y = int(ship_pos[0] / self.map_size)
entity_typ.append(2)
entity_pos.append([x,y])
entity_scalar.append([ship_pos[1]])
entity_id.append(ship_id)
opponents = s['players']
scalar_loc = 1
for i, opponent in enumerate(opponents):
if i != s['player']:
opp_halite, opp_shipyards, opp_ships = tuple(opponent)
scalar[scalar_loc] = opp_halite
for shipyard_pos in opp_shipyards.values():
obstruction[shipyard_pos] = 1.0
for ship_pos in opp_ships.values():
obstruction[ship_pos[0]] = 1.0
scalar_loc += 1
obstruction = obstruction.reshape(self.map_size, self.map_size, 1)
spat_map = torch.cat((halite, obstruction), 2).unsqueeze(0).permute(0,3,1,2)
n_entities = len(entity_id)
diff = self.max_entities - n_entities
entity_typ = F.pad(torch.tensor(entity_typ).long().unsqueeze(0), (0, diff), "constant", 0)
entity_pos = F.pad(torch.tensor(entity_pos).long().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
entity_scalar = F.pad(torch.tensor(entity_scalar).float().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
scalar = scalar.unsqueeze(0) / self.starting_halite
spat_map_iter.append(spat_map)
entity_typ_iter.append(entity_typ)
entity_pos_iter.append(entity_pos)
entity_id_iter.append(entity_id)
entity_scalar_iter.append(entity_scalar)
scalar_iter.append(scalar)
return {
'map': torch.cat(spat_map_iter),
'entity_typ': torch.cat(entity_typ_iter),
'entity_pos': torch.cat(entity_pos_iter),
'entity_scalar': torch.cat(entity_scalar_iter),
'entity_id': entity_id_iter,
'scalar': torch.cat(scalar_iter)
}
class MapEmbedding(nn.Module):
def __init__(self, embed_size=256, depth=2, maps=2):
super(MapEmbedding, self).__init__()
blocks = []
c_b = 64
while c_b < embed_size:
blocks.append(c_b)
c_b *= 2
blocks.append(embed_size)
deepths = [depth] * len(blocks)
self.resnet = ResNetEncoder(in_channels=maps, blocks_sizes=blocks, deepths=deepths)
def forward(self, multi_layer_map):
return self.resnet(multi_layer_map)
class EntityEmbedding(nn.Module):
def __init__(self, d_model, map_size, n_scalars):
super(EntityEmbedding, self).__init__()
# self.lut = pre_trained.embeddings.word_embeddings
self.EntityType = nn.Embedding(2 + 1, d_model)
self.EntityPosition = PositionalEncoding2D(d_model, map_size, map_size)
self.fc = nn.Linear(n_scalars, d_model)
self.EntityType.weight.data.uniform_(-0.1, .1)
def forward(self, typ, pos, scalar):
|
# Retrieved from pytorch website
class PositionalEncoding2D(nn.Module):
def __init__(self, d_model, height, width):
super(PositionalEncoding2D, self).__init__()
if d_model % 4 != 0:
raise Error()
pe = torch.zeros(d_model, height, width)
d_model = int(d_model / 2)
div_term = torch.exp(torch.arange(0., d_model, 2) * -(math.log(10000.0) / d_model))
pos_w = torch.arange(0., width).unsqueeze(1)
pos_h = torch.arange(0., height).unsqueeze(1)
pe[0:d_model:2, :, :] = torch.sin(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[1:d_model:2, :, :] = torch.cos(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[d_model::2, :, :] = torch.sin(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
pe[d_model + 1::2, :, :] = torch.cos(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
self.register_buffer('pe', pe)
def forward(self, pos):
# (*, 2)
pos = pos.transpose(0, -1)
return self.pe[:, pos[0], pos[1]].transpose(0, -1)
if __name__ == "__main__":
pe = PositionalEncoding2D(8, 10, 10)
pos = torch.tensor([[[0,0], [0,0], [9,9]],[[0,0], [0,0], [9,9]]])
print(pe(pos))
| return self.EntityType(typ) + self.EntityPosition(pos) + self.fc(scalar) | identifier_body |
model.py | import torch
from torch import nn
from torch.distributions.categorical import Categorical
from torch.nn import functional as F
import math
import torch
from resnet import ResNetEncoder
class Policy(nn.Module):
def __init__(self, env_config, num_players=2, max_entities=10):
super(Policy, self).__init__()
# State Parser
self.parse_state = ParseState(env_config, 10)
self.MAX_ENTITIES = 10
# Map Encoder
self.map = MapEmbedding(128)
# Entity Encoder
self.entity = EntityEmbedding(128, env_config['size'], 1)
# Scalar Encoder
self.scalar_encoder = nn.Linear(num_players, 128)
# transformer
# self.max_entities = 10
self.action_map = [None, "NORTH", "EAST", "SOUTH", "WEST", "CONVERT", "SPAWN"]
self.SHIP_TYPE = 2
self.SHIPYARD_TYPE = 1
num_actions = len(self.action_map)
self.transformer = nn.TransformerEncoder(nn.TransformerEncoderLayer(d_model=128, nhead=8, dim_feedforward=100), 2, norm=None)
self.policy = nn.Sequential(
nn.Linear(128, 128),
nn.ReLU(),
nn.Linear(128, num_actions)
)
self.value = nn.Sequential(
nn.Linear(128 * self.MAX_ENTITIES, 400),
nn.ReLU(),
nn.Linear(400, 100),
torch.nn.ReLU(),
nn.Linear(100, 1)
)
self.softmax = nn.Softmax(-1)
def device(self):
return next(self.parameters()).device
def forward(self, state, mask = False):
# Scalar encoding
state = self.parse_state(state)
scalar = state['scalar'].to(self.device())
scalar_encoding = F.relu(self.scalar_encoder(scalar)).unsqueeze(1)
# Spatial Encoding
game_map = state['map'].to(self.device())
map_encoding = self.map(game_map).unsqueeze(1)
# Entity Encoding
entity_typ = state['entity_typ'].to(self.device())
entity_pos = state['entity_pos'].to(self.device())
entity_scalar = state['entity_scalar'].to(self.device())
entity_encodings = self.entity(entity_typ, entity_pos, entity_scalar)
embeddings = map_encoding + entity_encodings + scalar_encoding
set_embedding = self.transformer(embeddings)
out = self.policy(set_embedding)
if mask == True:
lens = []
for eid in state['entity_id']:
n_entities = len(eid)
lens.append(torch.tensor([1] * n_entities + [0] * (self.MAX_ENTITIES - n_entities)))
m = torch.stack(lens).to(self.device())
return self.softmax(out), m
return self.softmax(out)
def action(self, states):
if not isinstance(states, list):
states = [states]
t_states = self.parse_state(states)
out = self.forward(states)
actions_iter = []
raw_actions_iter = []
for i, state in enumerate(states):
raw_actions = Categorical(probs=out[i]).sample()
actions = {}
n_entities = len(t_states['entity_id'][i])
# TODO: Migrate this code to env helper
for e, eid in enumerate(t_states['entity_id'][i]):
act = self.action_map[raw_actions[e]]
typ = t_states['entity_typ'][i][e]
if typ == self.SHIP_TYPE and act == "SPAWN":
act = None
elif typ == self.SHIPYARD_TYPE and (act != "SPAWN" and act != None):
act = None
elif typ == 0:
continue
if act == "SPAWN":
if n_entities < self.MAX_ENTITIES:
n_entities += 1
else:
act = None
if act is not None:
actions[eid] = act
actions_iter.append(actions)
raw_actions_iter.append(raw_actions)
return actions_iter, raw_actions_iter
class ParseState(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, config, max_entities):
self.map_size = config['size']
self.max_halite = config['maxCellHalite']
self.starting_halite = config['startingHalite']
self.max_entities = max_entities
def __call__(self, states):
if not isinstance(states, list):
states = [states]
spat_map_iter = []
entity_typ_iter = []
entity_pos_iter = []
entity_id_iter = []
entity_scalar_iter = []
scalar_iter = []
for s in states:
step = s['step']
halite = torch.tensor(s['halite']).float()
halite = halite.reshape(self.map_size, self.map_size, 1) / self.max_halite
obstruction = torch.zeros(self.map_size**2).float()
me = s['players'][s['player']]
my_halite, my_shipyards, my_ships = tuple(me)
scalar = torch.zeros(len(s['players']))
scalar[0] = my_halite
entity_typ = []
entity_pos = []
entity_scalar = []
entity_id = []
for shipyard_id, shipyard_pos in my_shipyards.items():
obstruction[shipyard_pos] = 1.0
x = int(shipyard_pos % self.map_size)
y = int(shipyard_pos / self.map_size)
entity_typ.append(1)
entity_pos.append([x,y])
entity_scalar.append([0])
entity_id.append(shipyard_id)
for ship_id, ship_pos in my_ships.items():
obstruction[ship_pos[0]] = 1.0
x = int(ship_pos[0] % self.map_size)
y = int(ship_pos[0] / self.map_size)
entity_typ.append(2)
entity_pos.append([x,y])
entity_scalar.append([ship_pos[1]])
entity_id.append(ship_id)
opponents = s['players']
scalar_loc = 1
for i, opponent in enumerate(opponents):
if i != s['player']:
opp_halite, opp_shipyards, opp_ships = tuple(opponent)
scalar[scalar_loc] = opp_halite
for shipyard_pos in opp_shipyards.values():
obstruction[shipyard_pos] = 1.0
for ship_pos in opp_ships.values():
obstruction[ship_pos[0]] = 1.0
scalar_loc += 1
obstruction = obstruction.reshape(self.map_size, self.map_size, 1)
spat_map = torch.cat((halite, obstruction), 2).unsqueeze(0).permute(0,3,1,2)
n_entities = len(entity_id)
diff = self.max_entities - n_entities
entity_typ = F.pad(torch.tensor(entity_typ).long().unsqueeze(0), (0, diff), "constant", 0)
entity_pos = F.pad(torch.tensor(entity_pos).long().unsqueeze(0), (0, 0, 0, diff), "constant", 0) | entity_typ_iter.append(entity_typ)
entity_pos_iter.append(entity_pos)
entity_id_iter.append(entity_id)
entity_scalar_iter.append(entity_scalar)
scalar_iter.append(scalar)
return {
'map': torch.cat(spat_map_iter),
'entity_typ': torch.cat(entity_typ_iter),
'entity_pos': torch.cat(entity_pos_iter),
'entity_scalar': torch.cat(entity_scalar_iter),
'entity_id': entity_id_iter,
'scalar': torch.cat(scalar_iter)
}
class MapEmbedding(nn.Module):
def __init__(self, embed_size=256, depth=2, maps=2):
super(MapEmbedding, self).__init__()
blocks = []
c_b = 64
while c_b < embed_size:
blocks.append(c_b)
c_b *= 2
blocks.append(embed_size)
deepths = [depth] * len(blocks)
self.resnet = ResNetEncoder(in_channels=maps, blocks_sizes=blocks, deepths=deepths)
def forward(self, multi_layer_map):
return self.resnet(multi_layer_map)
class EntityEmbedding(nn.Module):
def __init__(self, d_model, map_size, n_scalars):
super(EntityEmbedding, self).__init__()
# self.lut = pre_trained.embeddings.word_embeddings
self.EntityType = nn.Embedding(2 + 1, d_model)
self.EntityPosition = PositionalEncoding2D(d_model, map_size, map_size)
self.fc = nn.Linear(n_scalars, d_model)
self.EntityType.weight.data.uniform_(-0.1, .1)
def forward(self, typ, pos, scalar):
return self.EntityType(typ) + self.EntityPosition(pos) + self.fc(scalar)
# Retrieved from pytorch website
class PositionalEncoding2D(nn.Module):
def __init__(self, d_model, height, width):
super(PositionalEncoding2D, self).__init__()
if d_model % 4 != 0:
raise Error()
pe = torch.zeros(d_model, height, width)
d_model = int(d_model / 2)
div_term = torch.exp(torch.arange(0., d_model, 2) * -(math.log(10000.0) / d_model))
pos_w = torch.arange(0., width).unsqueeze(1)
pos_h = torch.arange(0., height).unsqueeze(1)
pe[0:d_model:2, :, :] = torch.sin(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[1:d_model:2, :, :] = torch.cos(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[d_model::2, :, :] = torch.sin(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
pe[d_model + 1::2, :, :] = torch.cos(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
self.register_buffer('pe', pe)
def forward(self, pos):
# (*, 2)
pos = pos.transpose(0, -1)
return self.pe[:, pos[0], pos[1]].transpose(0, -1)
if __name__ == "__main__":
pe = PositionalEncoding2D(8, 10, 10)
pos = torch.tensor([[[0,0], [0,0], [9,9]],[[0,0], [0,0], [9,9]]])
print(pe(pos)) | entity_scalar = F.pad(torch.tensor(entity_scalar).float().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
scalar = scalar.unsqueeze(0) / self.starting_halite
spat_map_iter.append(spat_map) | random_line_split |
model.py | import torch
from torch import nn
from torch.distributions.categorical import Categorical
from torch.nn import functional as F
import math
import torch
from resnet import ResNetEncoder
class Policy(nn.Module):
def __init__(self, env_config, num_players=2, max_entities=10):
super(Policy, self).__init__()
# State Parser
self.parse_state = ParseState(env_config, 10)
self.MAX_ENTITIES = 10
# Map Encoder
self.map = MapEmbedding(128)
# Entity Encoder
self.entity = EntityEmbedding(128, env_config['size'], 1)
# Scalar Encoder
self.scalar_encoder = nn.Linear(num_players, 128)
# transformer
# self.max_entities = 10
self.action_map = [None, "NORTH", "EAST", "SOUTH", "WEST", "CONVERT", "SPAWN"]
self.SHIP_TYPE = 2
self.SHIPYARD_TYPE = 1
num_actions = len(self.action_map)
self.transformer = nn.TransformerEncoder(nn.TransformerEncoderLayer(d_model=128, nhead=8, dim_feedforward=100), 2, norm=None)
self.policy = nn.Sequential(
nn.Linear(128, 128),
nn.ReLU(),
nn.Linear(128, num_actions)
)
self.value = nn.Sequential(
nn.Linear(128 * self.MAX_ENTITIES, 400),
nn.ReLU(),
nn.Linear(400, 100),
torch.nn.ReLU(),
nn.Linear(100, 1)
)
self.softmax = nn.Softmax(-1)
def device(self):
return next(self.parameters()).device
def forward(self, state, mask = False):
# Scalar encoding
state = self.parse_state(state)
scalar = state['scalar'].to(self.device())
scalar_encoding = F.relu(self.scalar_encoder(scalar)).unsqueeze(1)
# Spatial Encoding
game_map = state['map'].to(self.device())
map_encoding = self.map(game_map).unsqueeze(1)
# Entity Encoding
entity_typ = state['entity_typ'].to(self.device())
entity_pos = state['entity_pos'].to(self.device())
entity_scalar = state['entity_scalar'].to(self.device())
entity_encodings = self.entity(entity_typ, entity_pos, entity_scalar)
embeddings = map_encoding + entity_encodings + scalar_encoding
set_embedding = self.transformer(embeddings)
out = self.policy(set_embedding)
if mask == True:
lens = []
for eid in state['entity_id']:
n_entities = len(eid)
lens.append(torch.tensor([1] * n_entities + [0] * (self.MAX_ENTITIES - n_entities)))
m = torch.stack(lens).to(self.device())
return self.softmax(out), m
return self.softmax(out)
def action(self, states):
if not isinstance(states, list):
states = [states]
t_states = self.parse_state(states)
out = self.forward(states)
actions_iter = []
raw_actions_iter = []
for i, state in enumerate(states):
raw_actions = Categorical(probs=out[i]).sample()
actions = {}
n_entities = len(t_states['entity_id'][i])
# TODO: Migrate this code to env helper
for e, eid in enumerate(t_states['entity_id'][i]):
act = self.action_map[raw_actions[e]]
typ = t_states['entity_typ'][i][e]
if typ == self.SHIP_TYPE and act == "SPAWN":
act = None
elif typ == self.SHIPYARD_TYPE and (act != "SPAWN" and act != None):
act = None
elif typ == 0:
continue
if act == "SPAWN":
if n_entities < self.MAX_ENTITIES:
|
else:
act = None
if act is not None:
actions[eid] = act
actions_iter.append(actions)
raw_actions_iter.append(raw_actions)
return actions_iter, raw_actions_iter
class ParseState(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or int): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, config, max_entities):
self.map_size = config['size']
self.max_halite = config['maxCellHalite']
self.starting_halite = config['startingHalite']
self.max_entities = max_entities
def __call__(self, states):
if not isinstance(states, list):
states = [states]
spat_map_iter = []
entity_typ_iter = []
entity_pos_iter = []
entity_id_iter = []
entity_scalar_iter = []
scalar_iter = []
for s in states:
step = s['step']
halite = torch.tensor(s['halite']).float()
halite = halite.reshape(self.map_size, self.map_size, 1) / self.max_halite
obstruction = torch.zeros(self.map_size**2).float()
me = s['players'][s['player']]
my_halite, my_shipyards, my_ships = tuple(me)
scalar = torch.zeros(len(s['players']))
scalar[0] = my_halite
entity_typ = []
entity_pos = []
entity_scalar = []
entity_id = []
for shipyard_id, shipyard_pos in my_shipyards.items():
obstruction[shipyard_pos] = 1.0
x = int(shipyard_pos % self.map_size)
y = int(shipyard_pos / self.map_size)
entity_typ.append(1)
entity_pos.append([x,y])
entity_scalar.append([0])
entity_id.append(shipyard_id)
for ship_id, ship_pos in my_ships.items():
obstruction[ship_pos[0]] = 1.0
x = int(ship_pos[0] % self.map_size)
y = int(ship_pos[0] / self.map_size)
entity_typ.append(2)
entity_pos.append([x,y])
entity_scalar.append([ship_pos[1]])
entity_id.append(ship_id)
opponents = s['players']
scalar_loc = 1
for i, opponent in enumerate(opponents):
if i != s['player']:
opp_halite, opp_shipyards, opp_ships = tuple(opponent)
scalar[scalar_loc] = opp_halite
for shipyard_pos in opp_shipyards.values():
obstruction[shipyard_pos] = 1.0
for ship_pos in opp_ships.values():
obstruction[ship_pos[0]] = 1.0
scalar_loc += 1
obstruction = obstruction.reshape(self.map_size, self.map_size, 1)
spat_map = torch.cat((halite, obstruction), 2).unsqueeze(0).permute(0,3,1,2)
n_entities = len(entity_id)
diff = self.max_entities - n_entities
entity_typ = F.pad(torch.tensor(entity_typ).long().unsqueeze(0), (0, diff), "constant", 0)
entity_pos = F.pad(torch.tensor(entity_pos).long().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
entity_scalar = F.pad(torch.tensor(entity_scalar).float().unsqueeze(0), (0, 0, 0, diff), "constant", 0)
scalar = scalar.unsqueeze(0) / self.starting_halite
spat_map_iter.append(spat_map)
entity_typ_iter.append(entity_typ)
entity_pos_iter.append(entity_pos)
entity_id_iter.append(entity_id)
entity_scalar_iter.append(entity_scalar)
scalar_iter.append(scalar)
return {
'map': torch.cat(spat_map_iter),
'entity_typ': torch.cat(entity_typ_iter),
'entity_pos': torch.cat(entity_pos_iter),
'entity_scalar': torch.cat(entity_scalar_iter),
'entity_id': entity_id_iter,
'scalar': torch.cat(scalar_iter)
}
class MapEmbedding(nn.Module):
def __init__(self, embed_size=256, depth=2, maps=2):
super(MapEmbedding, self).__init__()
blocks = []
c_b = 64
while c_b < embed_size:
blocks.append(c_b)
c_b *= 2
blocks.append(embed_size)
deepths = [depth] * len(blocks)
self.resnet = ResNetEncoder(in_channels=maps, blocks_sizes=blocks, deepths=deepths)
def forward(self, multi_layer_map):
return self.resnet(multi_layer_map)
class EntityEmbedding(nn.Module):
def __init__(self, d_model, map_size, n_scalars):
super(EntityEmbedding, self).__init__()
# self.lut = pre_trained.embeddings.word_embeddings
self.EntityType = nn.Embedding(2 + 1, d_model)
self.EntityPosition = PositionalEncoding2D(d_model, map_size, map_size)
self.fc = nn.Linear(n_scalars, d_model)
self.EntityType.weight.data.uniform_(-0.1, .1)
def forward(self, typ, pos, scalar):
return self.EntityType(typ) + self.EntityPosition(pos) + self.fc(scalar)
# Retrieved from pytorch website
class PositionalEncoding2D(nn.Module):
def __init__(self, d_model, height, width):
super(PositionalEncoding2D, self).__init__()
if d_model % 4 != 0:
raise Error()
pe = torch.zeros(d_model, height, width)
d_model = int(d_model / 2)
div_term = torch.exp(torch.arange(0., d_model, 2) * -(math.log(10000.0) / d_model))
pos_w = torch.arange(0., width).unsqueeze(1)
pos_h = torch.arange(0., height).unsqueeze(1)
pe[0:d_model:2, :, :] = torch.sin(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[1:d_model:2, :, :] = torch.cos(pos_w * div_term).transpose(0, 1).unsqueeze(1).repeat(1, height, 1)
pe[d_model::2, :, :] = torch.sin(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
pe[d_model + 1::2, :, :] = torch.cos(pos_h * div_term).transpose(0, 1).unsqueeze(2).repeat(1, 1, width)
self.register_buffer('pe', pe)
def forward(self, pos):
# (*, 2)
pos = pos.transpose(0, -1)
return self.pe[:, pos[0], pos[1]].transpose(0, -1)
if __name__ == "__main__":
pe = PositionalEncoding2D(8, 10, 10)
pos = torch.tensor([[[0,0], [0,0], [9,9]],[[0,0], [0,0], [9,9]]])
print(pe(pos))
| n_entities += 1 | conditional_block |
match.go | // Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"errors"
"strings"
"golang.org/x/text/internal/language"
)
// A MatchOption configures a Matcher.
type MatchOption func(*matcher)
// PreferSameScript will, in the absence of a match, result in the first
// preferred tag with the same script as a supported tag to match this supported
// tag. The default is currently true, but this may change in the future.
func PreferSameScript(preferSame bool) MatchOption {
return func(m *matcher) { m.preferSameScript = preferSame }
}
// TODO(v1.0.0): consider making Matcher a concrete type, instead of interface.
// There doesn't seem to be too much need for multiple types.
// Making it a concrete type allows MatchStrings to be a method, which will
// improve its discoverability.
// MatchStrings parses and matches the given strings until one of them matches
// the language in the Matcher. A string may be an Accept-Language header as
// handled by ParseAcceptLanguage. The default language is returned if no
// other language matched.
func MatchStrings(m Matcher, lang ...string) (tag Tag, index int) {
for _, accept := range lang {
desired, _, err := ParseAcceptLanguage(accept)
if err != nil {
continue
}
if tag, index, conf := m.Match(desired...); conf != No {
return tag, index
}
}
tag, index, _ = m.Match()
return
}
// Matcher is the interface that wraps the Match method.
//
// Match returns the best match for any of the given tags, along with
// a unique index associated with the returned tag and a confidence
// score.
type Matcher interface {
Match(t ...Tag) (tag Tag, index int, c Confidence)
}
// Comprehends reports the confidence score for a speaker of a given language
// to being able to comprehend the written form of an alternative language.
func Comprehends(speaker, alternative Tag) Confidence {
_, _, c := NewMatcher([]Tag{alternative}).Match(speaker)
return c
}
// NewMatcher returns a Matcher that matches an ordered list of preferred tags
// against a list of supported tags based on written intelligibility, closeness
// of dialect, equivalence of subtags and various other rules. It is initialized
// with the list of supported tags. The first element is used as the default
// value in case no match is found.
//
// Its Match method matches the first of the given Tags to reach a certain
// confidence threshold. The tags passed to Match should therefore be specified
// in order of preference. Extensions are ignored for matching.
//
// The index returned by the Match method corresponds to the index of the
// matched tag in t, but is augmented with the Unicode extension ('u')of the
// corresponding preferred tag. This allows user locale options to be passed
// transparently.
func NewMatcher(t []Tag, options ...MatchOption) Matcher {
return newMatcher(t, options)
}
func (m *matcher) Match(want ...Tag) (t Tag, index int, c Confidence) {
var tt language.Tag
match, w, c := m.getBest(want...)
if match != nil {
tt, index = match.tag, match.index
} else {
// TODO: this should be an option
tt = m.default_.tag
if m.preferSameScript {
outer:
for _, w := range want {
script, _ := w.Script()
if script.scriptID == 0 {
// Don't do anything if there is no script, such as with
// private subtags.
continue
}
for i, h := range m.supported {
if script.scriptID == h.maxScript {
tt, index = h.tag, i
break outer
}
}
}
}
// TODO: select first language tag based on script.
}
if w.RegionID != tt.RegionID && w.RegionID != 0 {
if w.RegionID != 0 && tt.RegionID != 0 && tt.RegionID.Contains(w.RegionID) {
tt.RegionID = w.RegionID
tt.RemakeString()
} else if r := w.RegionID.String(); len(r) == 2 {
// TODO: also filter macro and deprecated.
tt, _ = tt.SetTypeForKey("rg", strings.ToLower(r)+"zzzz")
}
}
// Copy options from the user-provided tag into the result tag. This is hard
// to do after the fact, so we do it here.
// TODO: add in alternative variants to -u-va-.
// TODO: add preferred region to -u-rg-.
if e := w.Extensions(); len(e) > 0 {
b := language.Builder{}
b.SetTag(tt)
for _, e := range e {
b.AddExt(e)
}
tt = b.Make()
}
return makeTag(tt), index, c
}
// ErrMissingLikelyTagsData indicates no information was available
// to compute likely values of missing tags.
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
// func (t *Tag) setTagsFrom(id Tag) {
// t.LangID = id.LangID
// t.ScriptID = id.ScriptID
// t.RegionID = id.RegionID
// }
// Tag Matching
// CLDR defines an algorithm for finding the best match between two sets of language
// tags. The basic algorithm defines how to score a possible match and then find
// the match with the best score
// (see https://www.unicode.org/reports/tr35/#LanguageMatching).
// Using scoring has several disadvantages. The scoring obfuscates the importance of
// the various factors considered, making the algorithm harder to understand. Using
// scoring also requires the full score to be computed for each pair of tags.
//
// We will use a different algorithm which aims to have the following properties:
// - clarity on the precedence of the various selection factors, and
// - improved performance by allowing early termination of a comparison.
//
// Matching algorithm (overview)
// Input:
// - supported: a set of supported tags
// - default: the default tag to return in case there is no match
// - desired: list of desired tags, ordered by preference, starting with
// the most-preferred.
//
// Algorithm:
// 1) Set the best match to the lowest confidence level
// 2) For each tag in "desired":
// a) For each tag in "supported":
// 1) compute the match between the two tags.
// 2) if the match is better than the previous best match, replace it
// with the new match. (see next section)
// b) if the current best match is Exact and pin is true the result will be
// frozen to the language found thusfar, although better matches may
// still be found for the same language.
// 3) If the best match so far is below a certain threshold, return "default".
//
// Ranking:
// We use two phases to determine whether one pair of tags are a better match
// than another pair of tags. First, we determine a rough confidence level. If the
// levels are different, the one with the highest confidence wins.
// Second, if the rough confidence levels are identical, we use a set of tie-breaker
// rules.
//
// The confidence level of matching a pair of tags is determined by finding the
// lowest confidence level of any matches of the corresponding subtags (the
// result is deemed as good as its weakest link).
// We define the following levels:
// Exact - An exact match of a subtag, before adding likely subtags.
// MaxExact - An exact match of a subtag, after adding likely subtags.
// [See Note 2].
// High - High level of mutual intelligibility between different subtag
// variants.
// Low - Low level of mutual intelligibility between different subtag
// variants.
// No - No mutual intelligibility.
//
// The following levels can occur for each type of subtag:
// Base: Exact, MaxExact, High, Low, No
// Script: Exact, MaxExact [see Note 3], Low, No
// Region: Exact, MaxExact, High
// Variant: Exact, High
// Private: Exact, No
//
// Any result with a confidence level of Low or higher is deemed a possible match.
// Once a desired tag matches any of the supported tags with a level of MaxExact
// or higher, the next desired tag is not considered (see Step 2.b).
// Note that CLDR provides languageMatching data that defines close equivalence
// classes for base languages, scripts and regions.
//
// Tie-breaking
// If we get the same confidence level for two matches, we apply a sequence of
// tie-breaking rules. The first that succeeds defines the result. The rules are
// applied in the following order.
// 1) Original language was defined and was identical.
// 2) Original region was defined and was identical.
// 3) Distance between two maximized regions was the smallest.
// 4) Original script was defined and was identical.
// 5) Distance from want tag to have tag using the parent relation [see Note 5.]
// If there is still no winner after these rules are applied, the first match
// found wins.
//
// Notes:
// [2] In practice, as matching of Exact is done in a separate phase from
// matching the other levels, we reuse the Exact level to mean MaxExact in
// the second phase. As a consequence, we only need the levels defined by
// the Confidence type. The MaxExact confidence level is mapped to High in
// the public API.
// [3] We do not differentiate between maximized script values that were derived
// from suppressScript versus most likely tag data. We determined that in
// ranking the two, one ranks just after the other. Moreover, the two cannot
// occur concurrently. As a consequence, they are identical for practical
// purposes.
// [4] In case of deprecated, macro-equivalents and legacy mappings, we assign
// the MaxExact level to allow iw vs he to still be a closer match than
// en-AU vs en-US, for example.
// [5] In CLDR a locale inherits fields that are unspecified for this locale
// from its parent. Therefore, if a locale is a parent of another locale,
// it is a strong measure for closeness, especially when no other tie
// breaker rule applies. One could also argue it is inconsistent, for
// example, when pt-AO matches pt (which CLDR equates with pt-BR), even
// though its parent is pt-PT according to the inheritance rules.
//
// Implementation Details:
// There are several performance considerations worth pointing out. Most notably,
// we preprocess as much as possible (within reason) at the time of creation of a
// matcher. This includes:
// - creating a per-language map, which includes data for the raw base language
// and its canonicalized variant (if applicable),
// - expanding entries for the equivalence classes defined in CLDR's
// languageMatch data.
// The per-language map ensures that typically only a very small number of tags
// need to be considered. The pre-expansion of canonicalized subtags and
// equivalence classes reduces the amount of map lookups that need to be done at
// runtime.
// matcher keeps a set of supported language tags, indexed by language.
type matcher struct {
default_ *haveTag
supported []*haveTag
index map[language.Language]*matchHeader
passSettings bool
preferSameScript bool
}
// matchHeader has the lists of tags for exact matches and matches based on
// maximized and canonicalized tags for a given language.
type matchHeader struct {
haveTags []*haveTag
original bool
}
// haveTag holds a supported Tag and its maximized script and region. The maximized
// or canonicalized language is not stored as it is not needed during matching.
type haveTag struct {
tag language.Tag
// index of this tag in the original list of supported tags.
index int
// conf is the maximum confidence that can result from matching this haveTag.
// When conf < Exact this means it was inserted after applying a CLDR equivalence rule.
conf Confidence
// Maximized region and script.
maxRegion language.Region
maxScript language.Script
// altScript may be checked as an alternative match to maxScript. If altScript
// matches, the confidence level for this match is Low. Theoretically there
// could be multiple alternative scripts. This does not occur in practice.
altScript language.Script
// nextMax is the index of the next haveTag with the same maximized tags.
nextMax uint16
}
func makeHaveTag(tag language.Tag, index int) (haveTag, language.Language) {
max := tag
if tag.LangID != 0 || tag.RegionID != 0 || tag.ScriptID != 0 {
max, _ = canonicalize(All, max)
max, _ = max.Maximize()
max.RemakeString()
}
return haveTag{tag, index, Exact, max.RegionID, max.ScriptID, altScript(max.LangID, max.ScriptID), 0}, max.LangID
}
// altScript returns an alternative script that may match the given script with
// a low confidence. At the moment, the langMatch data allows for at most one
// script to map to another and we rely on this to keep the code simple.
func | (l language.Language, s language.Script) language.Script {
for _, alt := range matchScript {
// TODO: also match cases where language is not the same.
if (language.Language(alt.wantLang) == l || language.Language(alt.haveLang) == l) &&
language.Script(alt.haveScript) == s {
return language.Script(alt.wantScript)
}
}
return 0
}
// addIfNew adds a haveTag to the list of tags only if it is a unique tag.
// Tags that have the same maximized values are linked by index.
func (h *matchHeader) addIfNew(n haveTag, exact bool) {
h.original = h.original || exact
// Don't add new exact matches.
for _, v := range h.haveTags {
if equalsRest(v.tag, n.tag) {
return
}
}
// Allow duplicate maximized tags, but create a linked list to allow quickly
// comparing the equivalents and bail out.
for i, v := range h.haveTags {
if v.maxScript == n.maxScript &&
v.maxRegion == n.maxRegion &&
v.tag.VariantOrPrivateUseTags() == n.tag.VariantOrPrivateUseTags() {
for h.haveTags[i].nextMax != 0 {
i = int(h.haveTags[i].nextMax)
}
h.haveTags[i].nextMax = uint16(len(h.haveTags))
break
}
}
h.haveTags = append(h.haveTags, &n)
}
// header returns the matchHeader for the given language. It creates one if
// it doesn't already exist.
func (m *matcher) header(l language.Language) *matchHeader {
if h := m.index[l]; h != nil {
return h
}
h := &matchHeader{}
m.index[l] = h
return h
}
func toConf(d uint8) Confidence {
if d <= 10 {
return High
}
if d < 30 {
return Low
}
return No
}
// newMatcher builds an index for the given supported tags and returns it as
// a matcher. It also expands the index by considering various equivalence classes
// for a given tag.
func newMatcher(supported []Tag, options []MatchOption) *matcher {
m := &matcher{
index: make(map[language.Language]*matchHeader),
preferSameScript: true,
}
for _, o := range options {
o(m)
}
if len(supported) == 0 {
m.default_ = &haveTag{}
return m
}
// Add supported languages to the index. Add exact matches first to give
// them precedence.
for i, tag := range supported {
tt := tag.tag()
pair, _ := makeHaveTag(tt, i)
m.header(tt.LangID).addIfNew(pair, true)
m.supported = append(m.supported, &pair)
}
m.default_ = m.header(supported[0].lang()).haveTags[0]
// Keep these in two different loops to support the case that two equivalent
// languages are distinguished, such as iw and he.
for i, tag := range supported {
tt := tag.tag()
pair, max := makeHaveTag(tt, i)
if max != tt.LangID {
m.header(max).addIfNew(pair, true)
}
}
// update is used to add indexes in the map for equivalent languages.
// update will only add entries to original indexes, thus not computing any
// transitive relations.
update := func(want, have uint16, conf Confidence) {
if hh := m.index[language.Language(have)]; hh != nil {
if !hh.original {
return
}
hw := m.header(language.Language(want))
for _, ht := range hh.haveTags {
v := *ht
if conf < v.conf {
v.conf = conf
}
v.nextMax = 0 // this value needs to be recomputed
if v.altScript != 0 {
v.altScript = altScript(language.Language(want), v.maxScript)
}
hw.addIfNew(v, conf == Exact && hh.original)
}
}
}
// Add entries for languages with mutual intelligibility as defined by CLDR's
// languageMatch data.
for _, ml := range matchLang {
update(ml.want, ml.have, toConf(ml.distance))
if !ml.oneway {
update(ml.have, ml.want, toConf(ml.distance))
}
}
// Add entries for possible canonicalizations. This is an optimization to
// ensure that only one map lookup needs to be done at runtime per desired tag.
// First we match deprecated equivalents. If they are perfect equivalents
// (their canonicalization simply substitutes a different language code, but
// nothing else), the match confidence is Exact, otherwise it is High.
for i, lm := range language.AliasMap {
// If deprecated codes match and there is no fiddling with the script or
// or region, we consider it an exact match.
conf := Exact
if language.AliasTypes[i] != language.Macro {
if !isExactEquivalent(language.Language(lm.From)) {
conf = High
}
update(lm.To, lm.From, conf)
}
update(lm.From, lm.To, conf)
}
return m
}
// getBest gets the best matching tag in m for any of the given tags, taking into
// account the order of preference of the given tags.
func (m *matcher) getBest(want ...Tag) (got *haveTag, orig language.Tag, c Confidence) {
best := bestMatch{}
for i, ww := range want {
w := ww.tag()
var max language.Tag
// Check for exact match first.
h := m.index[w.LangID]
if w.LangID != 0 {
if h == nil {
continue
}
// Base language is defined.
max, _ = canonicalize(Legacy|Deprecated|Macro, w)
// A region that is added through canonicalization is stronger than
// a maximized region: set it in the original (e.g. mo -> ro-MD).
if w.RegionID != max.RegionID {
w.RegionID = max.RegionID
}
// TODO: should we do the same for scripts?
// See test case: en, sr, nl ; sh ; sr
max, _ = max.Maximize()
} else {
// Base language is not defined.
if h != nil {
for i := range h.haveTags {
have := h.haveTags[i]
if equalsRest(have.tag, w) {
return have, w, Exact
}
}
}
if w.ScriptID == 0 && w.RegionID == 0 {
// We skip all tags matching und for approximate matching, including
// private tags.
continue
}
max, _ = w.Maximize()
if h = m.index[max.LangID]; h == nil {
continue
}
}
pin := true
for _, t := range want[i+1:] {
if w.LangID == t.lang() {
pin = false
break
}
}
// Check for match based on maximized tag.
for i := range h.haveTags {
have := h.haveTags[i]
best.update(have, w, max.ScriptID, max.RegionID, pin)
if best.conf == Exact {
for have.nextMax != 0 {
have = h.haveTags[have.nextMax]
best.update(have, w, max.ScriptID, max.RegionID, pin)
}
return best.have, best.want, best.conf
}
}
}
if best.conf <= No {
if len(want) != 0 {
return nil, want[0].tag(), No
}
return nil, language.Tag{}, No
}
return best.have, best.want, best.conf
}
// bestMatch accumulates the best match so far.
type bestMatch struct {
have *haveTag
want language.Tag
conf Confidence
pinnedRegion language.Region
pinLanguage bool
sameRegionGroup bool
// Cached results from applying tie-breaking rules.
origLang bool
origReg bool
paradigmReg bool
regGroupDist uint8
origScript bool
}
// update updates the existing best match if the new pair is considered to be a
// better match. To determine if the given pair is a better match, it first
// computes the rough confidence level. If this surpasses the current match, it
// will replace it and update the tie-breaker rule cache. If there is a tie, it
// proceeds with applying a series of tie-breaker rules. If there is no
// conclusive winner after applying the tie-breaker rules, it leaves the current
// match as the preferred match.
//
// If pin is true and have and tag are a strong match, it will henceforth only
// consider matches for this language. This corresponds to the idea that most
// users have a strong preference for the first defined language. A user can
// still prefer a second language over a dialect of the preferred language by
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
// be false.
func (m *bestMatch) update(have *haveTag, tag language.Tag, maxScript language.Script, maxRegion language.Region, pin bool) {
// Bail if the maximum attainable confidence is below that of the current best match.
c := have.conf
if c < m.conf {
return
}
// Don't change the language once we already have found an exact match.
if m.pinLanguage && tag.LangID != m.want.LangID {
return
}
// Pin the region group if we are comparing tags for the same language.
if tag.LangID == m.want.LangID && m.sameRegionGroup {
_, sameGroup := regionGroupDist(m.pinnedRegion, have.maxRegion, have.maxScript, m.want.LangID)
if !sameGroup {
return
}
}
if c == Exact && have.maxScript == maxScript {
// If there is another language and then another entry of this language,
// don't pin anything, otherwise pin the language.
m.pinLanguage = pin
}
if equalsRest(have.tag, tag) {
} else if have.maxScript != maxScript {
// There is usually very little comprehension between different scripts.
// In a few cases there may still be Low comprehension. This possibility
// is pre-computed and stored in have.altScript.
if Low < m.conf || have.altScript != maxScript {
return
}
c = Low
} else if have.maxRegion != maxRegion {
if High < c {
// There is usually a small difference between languages across regions.
c = High
}
}
// We store the results of the computations of the tie-breaker rules along
// with the best match. There is no need to do the checks once we determine
// we have a winner, but we do still need to do the tie-breaker computations.
// We use "beaten" to keep track if we still need to do the checks.
beaten := false // true if the new pair defeats the current one.
if c != m.conf {
if c < m.conf {
return
}
beaten = true
}
// Tie-breaker rules:
// We prefer if the pre-maximized language was specified and identical.
origLang := have.tag.LangID == tag.LangID && tag.LangID != 0
if !beaten && m.origLang != origLang {
if m.origLang {
return
}
beaten = true
}
// We prefer if the pre-maximized region was specified and identical.
origReg := have.tag.RegionID == tag.RegionID && tag.RegionID != 0
if !beaten && m.origReg != origReg {
if m.origReg {
return
}
beaten = true
}
regGroupDist, sameGroup := regionGroupDist(have.maxRegion, maxRegion, maxScript, tag.LangID)
if !beaten && m.regGroupDist != regGroupDist {
if regGroupDist > m.regGroupDist {
return
}
beaten = true
}
paradigmReg := isParadigmLocale(tag.LangID, have.maxRegion)
if !beaten && m.paradigmReg != paradigmReg {
if !paradigmReg {
return
}
beaten = true
}
// Next we prefer if the pre-maximized script was specified and identical.
origScript := have.tag.ScriptID == tag.ScriptID && tag.ScriptID != 0
if !beaten && m.origScript != origScript {
if m.origScript {
return
}
beaten = true
}
// Update m to the newly found best match.
if beaten {
m.have = have
m.want = tag
m.conf = c
m.pinnedRegion = maxRegion
m.sameRegionGroup = sameGroup
m.origLang = origLang
m.origReg = origReg
m.paradigmReg = paradigmReg
m.origScript = origScript
m.regGroupDist = regGroupDist
}
}
func isParadigmLocale(lang language.Language, r language.Region) bool {
for _, e := range paradigmLocales {
if language.Language(e[0]) == lang && (r == language.Region(e[1]) || r == language.Region(e[2])) {
return true
}
}
return false
}
// regionGroupDist computes the distance between two regions based on their
// CLDR grouping.
func regionGroupDist(a, b language.Region, script language.Script, lang language.Language) (dist uint8, same bool) {
const defaultDistance = 4
aGroup := uint(regionToGroups[a]) << 1
bGroup := uint(regionToGroups[b]) << 1
for _, ri := range matchRegion {
if language.Language(ri.lang) == lang && (ri.script == 0 || language.Script(ri.script) == script) {
group := uint(1 << (ri.group &^ 0x80))
if 0x80&ri.group == 0 {
if aGroup&bGroup&group != 0 { // Both regions are in the group.
return ri.distance, ri.distance == defaultDistance
}
} else {
if (aGroup|bGroup)&group == 0 { // Both regions are not in the group.
return ri.distance, ri.distance == defaultDistance
}
}
}
}
return defaultDistance, true
}
// equalsRest compares everything except the language.
func equalsRest(a, b language.Tag) bool {
// TODO: don't include extensions in this comparison. To do this efficiently,
// though, we should handle private tags separately.
return a.ScriptID == b.ScriptID && a.RegionID == b.RegionID && a.VariantOrPrivateUseTags() == b.VariantOrPrivateUseTags()
}
// isExactEquivalent returns true if canonicalizing the language will not alter
// the script or region of a tag.
func isExactEquivalent(l language.Language) bool {
for _, o := range notEquivalent {
if o == l {
return false
}
}
return true
}
var notEquivalent []language.Language
func init() {
// Create a list of all languages for which canonicalization may alter the
// script or region.
for _, lm := range language.AliasMap {
tag := language.Tag{LangID: language.Language(lm.From)}
if tag, _ = canonicalize(All, tag); tag.ScriptID != 0 || tag.RegionID != 0 {
notEquivalent = append(notEquivalent, language.Language(lm.From))
}
}
// Maximize undefined regions of paradigm locales.
for i, v := range paradigmLocales {
t := language.Tag{LangID: language.Language(v[0])}
max, _ := t.Maximize()
if v[1] == 0 {
paradigmLocales[i][1] = uint16(max.RegionID)
}
if v[2] == 0 {
paradigmLocales[i][2] = uint16(max.RegionID)
}
}
}
| altScript | identifier_name |
match.go | // Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"errors"
"strings"
"golang.org/x/text/internal/language"
)
// A MatchOption configures a Matcher.
type MatchOption func(*matcher)
// PreferSameScript will, in the absence of a match, result in the first
// preferred tag with the same script as a supported tag to match this supported
// tag. The default is currently true, but this may change in the future.
func PreferSameScript(preferSame bool) MatchOption {
return func(m *matcher) { m.preferSameScript = preferSame }
}
// TODO(v1.0.0): consider making Matcher a concrete type, instead of interface.
// There doesn't seem to be too much need for multiple types.
// Making it a concrete type allows MatchStrings to be a method, which will
// improve its discoverability.
// MatchStrings parses and matches the given strings until one of them matches
// the language in the Matcher. A string may be an Accept-Language header as
// handled by ParseAcceptLanguage. The default language is returned if no
// other language matched.
func MatchStrings(m Matcher, lang ...string) (tag Tag, index int) {
for _, accept := range lang {
desired, _, err := ParseAcceptLanguage(accept)
if err != nil {
continue
}
if tag, index, conf := m.Match(desired...); conf != No {
return tag, index
}
}
tag, index, _ = m.Match()
return
}
// Matcher is the interface that wraps the Match method.
//
// Match returns the best match for any of the given tags, along with
// a unique index associated with the returned tag and a confidence
// score.
type Matcher interface {
Match(t ...Tag) (tag Tag, index int, c Confidence)
}
// Comprehends reports the confidence score for a speaker of a given language
// to being able to comprehend the written form of an alternative language.
func Comprehends(speaker, alternative Tag) Confidence {
_, _, c := NewMatcher([]Tag{alternative}).Match(speaker)
return c
}
// NewMatcher returns a Matcher that matches an ordered list of preferred tags
// against a list of supported tags based on written intelligibility, closeness
// of dialect, equivalence of subtags and various other rules. It is initialized
// with the list of supported tags. The first element is used as the default
// value in case no match is found.
//
// Its Match method matches the first of the given Tags to reach a certain
// confidence threshold. The tags passed to Match should therefore be specified
// in order of preference. Extensions are ignored for matching.
//
// The index returned by the Match method corresponds to the index of the
// matched tag in t, but is augmented with the Unicode extension ('u')of the
// corresponding preferred tag. This allows user locale options to be passed
// transparently.
func NewMatcher(t []Tag, options ...MatchOption) Matcher {
return newMatcher(t, options)
}
func (m *matcher) Match(want ...Tag) (t Tag, index int, c Confidence) {
var tt language.Tag
match, w, c := m.getBest(want...)
if match != nil {
tt, index = match.tag, match.index
} else {
// TODO: this should be an option
tt = m.default_.tag
if m.preferSameScript {
outer:
for _, w := range want {
script, _ := w.Script()
if script.scriptID == 0 {
// Don't do anything if there is no script, such as with
// private subtags.
continue
}
for i, h := range m.supported {
if script.scriptID == h.maxScript {
tt, index = h.tag, i
break outer
}
}
}
}
// TODO: select first language tag based on script.
}
if w.RegionID != tt.RegionID && w.RegionID != 0 {
if w.RegionID != 0 && tt.RegionID != 0 && tt.RegionID.Contains(w.RegionID) {
tt.RegionID = w.RegionID
tt.RemakeString()
} else if r := w.RegionID.String(); len(r) == 2 {
// TODO: also filter macro and deprecated.
tt, _ = tt.SetTypeForKey("rg", strings.ToLower(r)+"zzzz")
}
}
// Copy options from the user-provided tag into the result tag. This is hard
// to do after the fact, so we do it here.
// TODO: add in alternative variants to -u-va-.
// TODO: add preferred region to -u-rg-.
if e := w.Extensions(); len(e) > 0 {
b := language.Builder{}
b.SetTag(tt)
for _, e := range e {
b.AddExt(e)
}
tt = b.Make()
}
return makeTag(tt), index, c
}
// ErrMissingLikelyTagsData indicates no information was available
// to compute likely values of missing tags.
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
// func (t *Tag) setTagsFrom(id Tag) {
// t.LangID = id.LangID
// t.ScriptID = id.ScriptID
// t.RegionID = id.RegionID
// }
// Tag Matching
// CLDR defines an algorithm for finding the best match between two sets of language
// tags. The basic algorithm defines how to score a possible match and then find
// the match with the best score
// (see https://www.unicode.org/reports/tr35/#LanguageMatching).
// Using scoring has several disadvantages. The scoring obfuscates the importance of
// the various factors considered, making the algorithm harder to understand. Using
// scoring also requires the full score to be computed for each pair of tags.
//
// We will use a different algorithm which aims to have the following properties:
// - clarity on the precedence of the various selection factors, and
// - improved performance by allowing early termination of a comparison.
//
// Matching algorithm (overview)
// Input:
// - supported: a set of supported tags
// - default: the default tag to return in case there is no match
// - desired: list of desired tags, ordered by preference, starting with
// the most-preferred.
//
// Algorithm:
// 1) Set the best match to the lowest confidence level
// 2) For each tag in "desired":
// a) For each tag in "supported":
// 1) compute the match between the two tags.
// 2) if the match is better than the previous best match, replace it
// with the new match. (see next section)
// b) if the current best match is Exact and pin is true the result will be
// frozen to the language found thusfar, although better matches may
// still be found for the same language.
// 3) If the best match so far is below a certain threshold, return "default".
//
// Ranking:
// We use two phases to determine whether one pair of tags are a better match
// than another pair of tags. First, we determine a rough confidence level. If the
// levels are different, the one with the highest confidence wins.
// Second, if the rough confidence levels are identical, we use a set of tie-breaker
// rules.
//
// The confidence level of matching a pair of tags is determined by finding the
// lowest confidence level of any matches of the corresponding subtags (the
// result is deemed as good as its weakest link).
// We define the following levels:
// Exact - An exact match of a subtag, before adding likely subtags.
// MaxExact - An exact match of a subtag, after adding likely subtags.
// [See Note 2].
// High - High level of mutual intelligibility between different subtag
// variants.
// Low - Low level of mutual intelligibility between different subtag
// variants.
// No - No mutual intelligibility.
//
// The following levels can occur for each type of subtag:
// Base: Exact, MaxExact, High, Low, No
// Script: Exact, MaxExact [see Note 3], Low, No
// Region: Exact, MaxExact, High
// Variant: Exact, High
// Private: Exact, No
//
// Any result with a confidence level of Low or higher is deemed a possible match.
// Once a desired tag matches any of the supported tags with a level of MaxExact
// or higher, the next desired tag is not considered (see Step 2.b).
// Note that CLDR provides languageMatching data that defines close equivalence
// classes for base languages, scripts and regions.
//
// Tie-breaking
// If we get the same confidence level for two matches, we apply a sequence of
// tie-breaking rules. The first that succeeds defines the result. The rules are
// applied in the following order.
// 1) Original language was defined and was identical.
// 2) Original region was defined and was identical.
// 3) Distance between two maximized regions was the smallest.
// 4) Original script was defined and was identical.
// 5) Distance from want tag to have tag using the parent relation [see Note 5.]
// If there is still no winner after these rules are applied, the first match
// found wins.
//
// Notes:
// [2] In practice, as matching of Exact is done in a separate phase from
// matching the other levels, we reuse the Exact level to mean MaxExact in
// the second phase. As a consequence, we only need the levels defined by
// the Confidence type. The MaxExact confidence level is mapped to High in
// the public API.
// [3] We do not differentiate between maximized script values that were derived
// from suppressScript versus most likely tag data. We determined that in
// ranking the two, one ranks just after the other. Moreover, the two cannot
// occur concurrently. As a consequence, they are identical for practical
// purposes.
// [4] In case of deprecated, macro-equivalents and legacy mappings, we assign
// the MaxExact level to allow iw vs he to still be a closer match than
// en-AU vs en-US, for example.
// [5] In CLDR a locale inherits fields that are unspecified for this locale
// from its parent. Therefore, if a locale is a parent of another locale,
// it is a strong measure for closeness, especially when no other tie
// breaker rule applies. One could also argue it is inconsistent, for
// example, when pt-AO matches pt (which CLDR equates with pt-BR), even
// though its parent is pt-PT according to the inheritance rules.
//
// Implementation Details:
// There are several performance considerations worth pointing out. Most notably,
// we preprocess as much as possible (within reason) at the time of creation of a
// matcher. This includes:
// - creating a per-language map, which includes data for the raw base language
// and its canonicalized variant (if applicable),
// - expanding entries for the equivalence classes defined in CLDR's
// languageMatch data.
// The per-language map ensures that typically only a very small number of tags
// need to be considered. The pre-expansion of canonicalized subtags and
// equivalence classes reduces the amount of map lookups that need to be done at
// runtime.
// matcher keeps a set of supported language tags, indexed by language.
type matcher struct {
default_ *haveTag
supported []*haveTag
index map[language.Language]*matchHeader
passSettings bool
preferSameScript bool
}
// matchHeader has the lists of tags for exact matches and matches based on
// maximized and canonicalized tags for a given language.
type matchHeader struct {
haveTags []*haveTag
original bool
}
// haveTag holds a supported Tag and its maximized script and region. The maximized
// or canonicalized language is not stored as it is not needed during matching.
type haveTag struct {
tag language.Tag
// index of this tag in the original list of supported tags.
index int
// conf is the maximum confidence that can result from matching this haveTag.
// When conf < Exact this means it was inserted after applying a CLDR equivalence rule.
conf Confidence
// Maximized region and script.
maxRegion language.Region
maxScript language.Script
// altScript may be checked as an alternative match to maxScript. If altScript
// matches, the confidence level for this match is Low. Theoretically there
// could be multiple alternative scripts. This does not occur in practice.
altScript language.Script
// nextMax is the index of the next haveTag with the same maximized tags.
nextMax uint16
}
func makeHaveTag(tag language.Tag, index int) (haveTag, language.Language) {
max := tag
if tag.LangID != 0 || tag.RegionID != 0 || tag.ScriptID != 0 {
max, _ = canonicalize(All, max)
max, _ = max.Maximize()
max.RemakeString()
}
return haveTag{tag, index, Exact, max.RegionID, max.ScriptID, altScript(max.LangID, max.ScriptID), 0}, max.LangID
}
// altScript returns an alternative script that may match the given script with
// a low confidence. At the moment, the langMatch data allows for at most one
// script to map to another and we rely on this to keep the code simple.
func altScript(l language.Language, s language.Script) language.Script {
for _, alt := range matchScript {
// TODO: also match cases where language is not the same.
if (language.Language(alt.wantLang) == l || language.Language(alt.haveLang) == l) &&
language.Script(alt.haveScript) == s {
return language.Script(alt.wantScript)
}
}
return 0
}
// addIfNew adds a haveTag to the list of tags only if it is a unique tag.
// Tags that have the same maximized values are linked by index.
func (h *matchHeader) addIfNew(n haveTag, exact bool) {
h.original = h.original || exact
// Don't add new exact matches.
for _, v := range h.haveTags {
if equalsRest(v.tag, n.tag) {
return
}
}
// Allow duplicate maximized tags, but create a linked list to allow quickly
// comparing the equivalents and bail out.
for i, v := range h.haveTags {
if v.maxScript == n.maxScript &&
v.maxRegion == n.maxRegion &&
v.tag.VariantOrPrivateUseTags() == n.tag.VariantOrPrivateUseTags() {
for h.haveTags[i].nextMax != 0 {
i = int(h.haveTags[i].nextMax)
}
h.haveTags[i].nextMax = uint16(len(h.haveTags))
break
}
}
h.haveTags = append(h.haveTags, &n)
}
// header returns the matchHeader for the given language. It creates one if
// it doesn't already exist.
func (m *matcher) header(l language.Language) *matchHeader {
if h := m.index[l]; h != nil {
return h
}
h := &matchHeader{}
m.index[l] = h
return h
}
func toConf(d uint8) Confidence {
if d <= 10 {
return High
}
if d < 30 {
return Low
}
return No
}
// newMatcher builds an index for the given supported tags and returns it as
// a matcher. It also expands the index by considering various equivalence classes
// for a given tag.
func newMatcher(supported []Tag, options []MatchOption) *matcher {
m := &matcher{
index: make(map[language.Language]*matchHeader),
preferSameScript: true,
}
for _, o := range options {
o(m)
}
if len(supported) == 0 {
m.default_ = &haveTag{}
return m
}
// Add supported languages to the index. Add exact matches first to give
// them precedence.
for i, tag := range supported {
tt := tag.tag()
pair, _ := makeHaveTag(tt, i)
m.header(tt.LangID).addIfNew(pair, true)
m.supported = append(m.supported, &pair)
}
m.default_ = m.header(supported[0].lang()).haveTags[0]
// Keep these in two different loops to support the case that two equivalent
// languages are distinguished, such as iw and he.
for i, tag := range supported {
tt := tag.tag()
pair, max := makeHaveTag(tt, i)
if max != tt.LangID {
m.header(max).addIfNew(pair, true)
}
}
// update is used to add indexes in the map for equivalent languages.
// update will only add entries to original indexes, thus not computing any
// transitive relations.
update := func(want, have uint16, conf Confidence) {
if hh := m.index[language.Language(have)]; hh != nil {
if !hh.original {
return
}
hw := m.header(language.Language(want))
for _, ht := range hh.haveTags {
v := *ht
if conf < v.conf {
v.conf = conf
}
v.nextMax = 0 // this value needs to be recomputed
if v.altScript != 0 {
v.altScript = altScript(language.Language(want), v.maxScript)
}
hw.addIfNew(v, conf == Exact && hh.original)
}
}
}
// Add entries for languages with mutual intelligibility as defined by CLDR's
// languageMatch data.
for _, ml := range matchLang {
update(ml.want, ml.have, toConf(ml.distance))
if !ml.oneway {
update(ml.have, ml.want, toConf(ml.distance))
}
}
// Add entries for possible canonicalizations. This is an optimization to
// ensure that only one map lookup needs to be done at runtime per desired tag.
// First we match deprecated equivalents. If they are perfect equivalents
// (their canonicalization simply substitutes a different language code, but
// nothing else), the match confidence is Exact, otherwise it is High.
for i, lm := range language.AliasMap {
// If deprecated codes match and there is no fiddling with the script or
// or region, we consider it an exact match.
conf := Exact
if language.AliasTypes[i] != language.Macro {
if !isExactEquivalent(language.Language(lm.From)) {
conf = High
}
update(lm.To, lm.From, conf)
}
update(lm.From, lm.To, conf)
}
return m
}
// getBest gets the best matching tag in m for any of the given tags, taking into
// account the order of preference of the given tags.
func (m *matcher) getBest(want ...Tag) (got *haveTag, orig language.Tag, c Confidence) {
best := bestMatch{}
for i, ww := range want {
w := ww.tag()
var max language.Tag
// Check for exact match first.
h := m.index[w.LangID]
if w.LangID != 0 {
if h == nil {
continue
}
// Base language is defined.
max, _ = canonicalize(Legacy|Deprecated|Macro, w)
// A region that is added through canonicalization is stronger than
// a maximized region: set it in the original (e.g. mo -> ro-MD).
if w.RegionID != max.RegionID {
w.RegionID = max.RegionID
}
// TODO: should we do the same for scripts?
// See test case: en, sr, nl ; sh ; sr
max, _ = max.Maximize()
} else {
// Base language is not defined.
if h != nil {
for i := range h.haveTags {
have := h.haveTags[i]
if equalsRest(have.tag, w) {
return have, w, Exact
}
}
}
if w.ScriptID == 0 && w.RegionID == 0 {
// We skip all tags matching und for approximate matching, including
// private tags.
continue
}
max, _ = w.Maximize()
if h = m.index[max.LangID]; h == nil {
continue
}
}
pin := true
for _, t := range want[i+1:] {
if w.LangID == t.lang() {
pin = false
break
}
}
// Check for match based on maximized tag.
for i := range h.haveTags {
have := h.haveTags[i]
best.update(have, w, max.ScriptID, max.RegionID, pin)
if best.conf == Exact {
for have.nextMax != 0 {
have = h.haveTags[have.nextMax]
best.update(have, w, max.ScriptID, max.RegionID, pin)
}
return best.have, best.want, best.conf
}
}
}
if best.conf <= No {
if len(want) != 0 {
return nil, want[0].tag(), No
}
return nil, language.Tag{}, No
}
return best.have, best.want, best.conf
}
// bestMatch accumulates the best match so far.
type bestMatch struct {
have *haveTag
want language.Tag
conf Confidence
pinnedRegion language.Region
pinLanguage bool
sameRegionGroup bool
// Cached results from applying tie-breaking rules.
origLang bool
origReg bool
paradigmReg bool
regGroupDist uint8
origScript bool
}
// update updates the existing best match if the new pair is considered to be a
// better match. To determine if the given pair is a better match, it first
// computes the rough confidence level. If this surpasses the current match, it
// will replace it and update the tie-breaker rule cache. If there is a tie, it
// proceeds with applying a series of tie-breaker rules. If there is no
// conclusive winner after applying the tie-breaker rules, it leaves the current
// match as the preferred match.
//
// If pin is true and have and tag are a strong match, it will henceforth only
// consider matches for this language. This corresponds to the idea that most
// users have a strong preference for the first defined language. A user can
// still prefer a second language over a dialect of the preferred language by
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
// be false.
func (m *bestMatch) update(have *haveTag, tag language.Tag, maxScript language.Script, maxRegion language.Region, pin bool) |
func isParadigmLocale(lang language.Language, r language.Region) bool {
for _, e := range paradigmLocales {
if language.Language(e[0]) == lang && (r == language.Region(e[1]) || r == language.Region(e[2])) {
return true
}
}
return false
}
// regionGroupDist computes the distance between two regions based on their
// CLDR grouping.
func regionGroupDist(a, b language.Region, script language.Script, lang language.Language) (dist uint8, same bool) {
const defaultDistance = 4
aGroup := uint(regionToGroups[a]) << 1
bGroup := uint(regionToGroups[b]) << 1
for _, ri := range matchRegion {
if language.Language(ri.lang) == lang && (ri.script == 0 || language.Script(ri.script) == script) {
group := uint(1 << (ri.group &^ 0x80))
if 0x80&ri.group == 0 {
if aGroup&bGroup&group != 0 { // Both regions are in the group.
return ri.distance, ri.distance == defaultDistance
}
} else {
if (aGroup|bGroup)&group == 0 { // Both regions are not in the group.
return ri.distance, ri.distance == defaultDistance
}
}
}
}
return defaultDistance, true
}
// equalsRest compares everything except the language.
func equalsRest(a, b language.Tag) bool {
// TODO: don't include extensions in this comparison. To do this efficiently,
// though, we should handle private tags separately.
return a.ScriptID == b.ScriptID && a.RegionID == b.RegionID && a.VariantOrPrivateUseTags() == b.VariantOrPrivateUseTags()
}
// isExactEquivalent returns true if canonicalizing the language will not alter
// the script or region of a tag.
func isExactEquivalent(l language.Language) bool {
for _, o := range notEquivalent {
if o == l {
return false
}
}
return true
}
var notEquivalent []language.Language
func init() {
// Create a list of all languages for which canonicalization may alter the
// script or region.
for _, lm := range language.AliasMap {
tag := language.Tag{LangID: language.Language(lm.From)}
if tag, _ = canonicalize(All, tag); tag.ScriptID != 0 || tag.RegionID != 0 {
notEquivalent = append(notEquivalent, language.Language(lm.From))
}
}
// Maximize undefined regions of paradigm locales.
for i, v := range paradigmLocales {
t := language.Tag{LangID: language.Language(v[0])}
max, _ := t.Maximize()
if v[1] == 0 {
paradigmLocales[i][1] = uint16(max.RegionID)
}
if v[2] == 0 {
paradigmLocales[i][2] = uint16(max.RegionID)
}
}
}
| {
// Bail if the maximum attainable confidence is below that of the current best match.
c := have.conf
if c < m.conf {
return
}
// Don't change the language once we already have found an exact match.
if m.pinLanguage && tag.LangID != m.want.LangID {
return
}
// Pin the region group if we are comparing tags for the same language.
if tag.LangID == m.want.LangID && m.sameRegionGroup {
_, sameGroup := regionGroupDist(m.pinnedRegion, have.maxRegion, have.maxScript, m.want.LangID)
if !sameGroup {
return
}
}
if c == Exact && have.maxScript == maxScript {
// If there is another language and then another entry of this language,
// don't pin anything, otherwise pin the language.
m.pinLanguage = pin
}
if equalsRest(have.tag, tag) {
} else if have.maxScript != maxScript {
// There is usually very little comprehension between different scripts.
// In a few cases there may still be Low comprehension. This possibility
// is pre-computed and stored in have.altScript.
if Low < m.conf || have.altScript != maxScript {
return
}
c = Low
} else if have.maxRegion != maxRegion {
if High < c {
// There is usually a small difference between languages across regions.
c = High
}
}
// We store the results of the computations of the tie-breaker rules along
// with the best match. There is no need to do the checks once we determine
// we have a winner, but we do still need to do the tie-breaker computations.
// We use "beaten" to keep track if we still need to do the checks.
beaten := false // true if the new pair defeats the current one.
if c != m.conf {
if c < m.conf {
return
}
beaten = true
}
// Tie-breaker rules:
// We prefer if the pre-maximized language was specified and identical.
origLang := have.tag.LangID == tag.LangID && tag.LangID != 0
if !beaten && m.origLang != origLang {
if m.origLang {
return
}
beaten = true
}
// We prefer if the pre-maximized region was specified and identical.
origReg := have.tag.RegionID == tag.RegionID && tag.RegionID != 0
if !beaten && m.origReg != origReg {
if m.origReg {
return
}
beaten = true
}
regGroupDist, sameGroup := regionGroupDist(have.maxRegion, maxRegion, maxScript, tag.LangID)
if !beaten && m.regGroupDist != regGroupDist {
if regGroupDist > m.regGroupDist {
return
}
beaten = true
}
paradigmReg := isParadigmLocale(tag.LangID, have.maxRegion)
if !beaten && m.paradigmReg != paradigmReg {
if !paradigmReg {
return
}
beaten = true
}
// Next we prefer if the pre-maximized script was specified and identical.
origScript := have.tag.ScriptID == tag.ScriptID && tag.ScriptID != 0
if !beaten && m.origScript != origScript {
if m.origScript {
return
}
beaten = true
}
// Update m to the newly found best match.
if beaten {
m.have = have
m.want = tag
m.conf = c
m.pinnedRegion = maxRegion
m.sameRegionGroup = sameGroup
m.origLang = origLang
m.origReg = origReg
m.paradigmReg = paradigmReg
m.origScript = origScript
m.regGroupDist = regGroupDist
}
} | identifier_body |
match.go | // Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"errors"
"strings"
|
// A MatchOption configures a Matcher.
type MatchOption func(*matcher)
// PreferSameScript will, in the absence of a match, result in the first
// preferred tag with the same script as a supported tag to match this supported
// tag. The default is currently true, but this may change in the future.
func PreferSameScript(preferSame bool) MatchOption {
return func(m *matcher) { m.preferSameScript = preferSame }
}
// TODO(v1.0.0): consider making Matcher a concrete type, instead of interface.
// There doesn't seem to be too much need for multiple types.
// Making it a concrete type allows MatchStrings to be a method, which will
// improve its discoverability.
// MatchStrings parses and matches the given strings until one of them matches
// the language in the Matcher. A string may be an Accept-Language header as
// handled by ParseAcceptLanguage. The default language is returned if no
// other language matched.
func MatchStrings(m Matcher, lang ...string) (tag Tag, index int) {
for _, accept := range lang {
desired, _, err := ParseAcceptLanguage(accept)
if err != nil {
continue
}
if tag, index, conf := m.Match(desired...); conf != No {
return tag, index
}
}
tag, index, _ = m.Match()
return
}
// Matcher is the interface that wraps the Match method.
//
// Match returns the best match for any of the given tags, along with
// a unique index associated with the returned tag and a confidence
// score.
type Matcher interface {
Match(t ...Tag) (tag Tag, index int, c Confidence)
}
// Comprehends reports the confidence score for a speaker of a given language
// to being able to comprehend the written form of an alternative language.
func Comprehends(speaker, alternative Tag) Confidence {
_, _, c := NewMatcher([]Tag{alternative}).Match(speaker)
return c
}
// NewMatcher returns a Matcher that matches an ordered list of preferred tags
// against a list of supported tags based on written intelligibility, closeness
// of dialect, equivalence of subtags and various other rules. It is initialized
// with the list of supported tags. The first element is used as the default
// value in case no match is found.
//
// Its Match method matches the first of the given Tags to reach a certain
// confidence threshold. The tags passed to Match should therefore be specified
// in order of preference. Extensions are ignored for matching.
//
// The index returned by the Match method corresponds to the index of the
// matched tag in t, but is augmented with the Unicode extension ('u')of the
// corresponding preferred tag. This allows user locale options to be passed
// transparently.
func NewMatcher(t []Tag, options ...MatchOption) Matcher {
return newMatcher(t, options)
}
func (m *matcher) Match(want ...Tag) (t Tag, index int, c Confidence) {
var tt language.Tag
match, w, c := m.getBest(want...)
if match != nil {
tt, index = match.tag, match.index
} else {
// TODO: this should be an option
tt = m.default_.tag
if m.preferSameScript {
outer:
for _, w := range want {
script, _ := w.Script()
if script.scriptID == 0 {
// Don't do anything if there is no script, such as with
// private subtags.
continue
}
for i, h := range m.supported {
if script.scriptID == h.maxScript {
tt, index = h.tag, i
break outer
}
}
}
}
// TODO: select first language tag based on script.
}
if w.RegionID != tt.RegionID && w.RegionID != 0 {
if w.RegionID != 0 && tt.RegionID != 0 && tt.RegionID.Contains(w.RegionID) {
tt.RegionID = w.RegionID
tt.RemakeString()
} else if r := w.RegionID.String(); len(r) == 2 {
// TODO: also filter macro and deprecated.
tt, _ = tt.SetTypeForKey("rg", strings.ToLower(r)+"zzzz")
}
}
// Copy options from the user-provided tag into the result tag. This is hard
// to do after the fact, so we do it here.
// TODO: add in alternative variants to -u-va-.
// TODO: add preferred region to -u-rg-.
if e := w.Extensions(); len(e) > 0 {
b := language.Builder{}
b.SetTag(tt)
for _, e := range e {
b.AddExt(e)
}
tt = b.Make()
}
return makeTag(tt), index, c
}
// ErrMissingLikelyTagsData indicates no information was available
// to compute likely values of missing tags.
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
// func (t *Tag) setTagsFrom(id Tag) {
// t.LangID = id.LangID
// t.ScriptID = id.ScriptID
// t.RegionID = id.RegionID
// }
// Tag Matching
// CLDR defines an algorithm for finding the best match between two sets of language
// tags. The basic algorithm defines how to score a possible match and then find
// the match with the best score
// (see https://www.unicode.org/reports/tr35/#LanguageMatching).
// Using scoring has several disadvantages. The scoring obfuscates the importance of
// the various factors considered, making the algorithm harder to understand. Using
// scoring also requires the full score to be computed for each pair of tags.
//
// We will use a different algorithm which aims to have the following properties:
// - clarity on the precedence of the various selection factors, and
// - improved performance by allowing early termination of a comparison.
//
// Matching algorithm (overview)
// Input:
// - supported: a set of supported tags
// - default: the default tag to return in case there is no match
// - desired: list of desired tags, ordered by preference, starting with
// the most-preferred.
//
// Algorithm:
// 1) Set the best match to the lowest confidence level
// 2) For each tag in "desired":
// a) For each tag in "supported":
// 1) compute the match between the two tags.
// 2) if the match is better than the previous best match, replace it
// with the new match. (see next section)
// b) if the current best match is Exact and pin is true the result will be
// frozen to the language found thusfar, although better matches may
// still be found for the same language.
// 3) If the best match so far is below a certain threshold, return "default".
//
// Ranking:
// We use two phases to determine whether one pair of tags are a better match
// than another pair of tags. First, we determine a rough confidence level. If the
// levels are different, the one with the highest confidence wins.
// Second, if the rough confidence levels are identical, we use a set of tie-breaker
// rules.
//
// The confidence level of matching a pair of tags is determined by finding the
// lowest confidence level of any matches of the corresponding subtags (the
// result is deemed as good as its weakest link).
// We define the following levels:
// Exact - An exact match of a subtag, before adding likely subtags.
// MaxExact - An exact match of a subtag, after adding likely subtags.
// [See Note 2].
// High - High level of mutual intelligibility between different subtag
// variants.
// Low - Low level of mutual intelligibility between different subtag
// variants.
// No - No mutual intelligibility.
//
// The following levels can occur for each type of subtag:
// Base: Exact, MaxExact, High, Low, No
// Script: Exact, MaxExact [see Note 3], Low, No
// Region: Exact, MaxExact, High
// Variant: Exact, High
// Private: Exact, No
//
// Any result with a confidence level of Low or higher is deemed a possible match.
// Once a desired tag matches any of the supported tags with a level of MaxExact
// or higher, the next desired tag is not considered (see Step 2.b).
// Note that CLDR provides languageMatching data that defines close equivalence
// classes for base languages, scripts and regions.
//
// Tie-breaking
// If we get the same confidence level for two matches, we apply a sequence of
// tie-breaking rules. The first that succeeds defines the result. The rules are
// applied in the following order.
// 1) Original language was defined and was identical.
// 2) Original region was defined and was identical.
// 3) Distance between two maximized regions was the smallest.
// 4) Original script was defined and was identical.
// 5) Distance from want tag to have tag using the parent relation [see Note 5.]
// If there is still no winner after these rules are applied, the first match
// found wins.
//
// Notes:
// [2] In practice, as matching of Exact is done in a separate phase from
// matching the other levels, we reuse the Exact level to mean MaxExact in
// the second phase. As a consequence, we only need the levels defined by
// the Confidence type. The MaxExact confidence level is mapped to High in
// the public API.
// [3] We do not differentiate between maximized script values that were derived
// from suppressScript versus most likely tag data. We determined that in
// ranking the two, one ranks just after the other. Moreover, the two cannot
// occur concurrently. As a consequence, they are identical for practical
// purposes.
// [4] In case of deprecated, macro-equivalents and legacy mappings, we assign
// the MaxExact level to allow iw vs he to still be a closer match than
// en-AU vs en-US, for example.
// [5] In CLDR a locale inherits fields that are unspecified for this locale
// from its parent. Therefore, if a locale is a parent of another locale,
// it is a strong measure for closeness, especially when no other tie
// breaker rule applies. One could also argue it is inconsistent, for
// example, when pt-AO matches pt (which CLDR equates with pt-BR), even
// though its parent is pt-PT according to the inheritance rules.
//
// Implementation Details:
// There are several performance considerations worth pointing out. Most notably,
// we preprocess as much as possible (within reason) at the time of creation of a
// matcher. This includes:
// - creating a per-language map, which includes data for the raw base language
// and its canonicalized variant (if applicable),
// - expanding entries for the equivalence classes defined in CLDR's
// languageMatch data.
// The per-language map ensures that typically only a very small number of tags
// need to be considered. The pre-expansion of canonicalized subtags and
// equivalence classes reduces the amount of map lookups that need to be done at
// runtime.
// matcher keeps a set of supported language tags, indexed by language.
type matcher struct {
default_ *haveTag
supported []*haveTag
index map[language.Language]*matchHeader
passSettings bool
preferSameScript bool
}
// matchHeader has the lists of tags for exact matches and matches based on
// maximized and canonicalized tags for a given language.
type matchHeader struct {
haveTags []*haveTag
original bool
}
// haveTag holds a supported Tag and its maximized script and region. The maximized
// or canonicalized language is not stored as it is not needed during matching.
type haveTag struct {
tag language.Tag
// index of this tag in the original list of supported tags.
index int
// conf is the maximum confidence that can result from matching this haveTag.
// When conf < Exact this means it was inserted after applying a CLDR equivalence rule.
conf Confidence
// Maximized region and script.
maxRegion language.Region
maxScript language.Script
// altScript may be checked as an alternative match to maxScript. If altScript
// matches, the confidence level for this match is Low. Theoretically there
// could be multiple alternative scripts. This does not occur in practice.
altScript language.Script
// nextMax is the index of the next haveTag with the same maximized tags.
nextMax uint16
}
func makeHaveTag(tag language.Tag, index int) (haveTag, language.Language) {
max := tag
if tag.LangID != 0 || tag.RegionID != 0 || tag.ScriptID != 0 {
max, _ = canonicalize(All, max)
max, _ = max.Maximize()
max.RemakeString()
}
return haveTag{tag, index, Exact, max.RegionID, max.ScriptID, altScript(max.LangID, max.ScriptID), 0}, max.LangID
}
// altScript returns an alternative script that may match the given script with
// a low confidence. At the moment, the langMatch data allows for at most one
// script to map to another and we rely on this to keep the code simple.
func altScript(l language.Language, s language.Script) language.Script {
for _, alt := range matchScript {
// TODO: also match cases where language is not the same.
if (language.Language(alt.wantLang) == l || language.Language(alt.haveLang) == l) &&
language.Script(alt.haveScript) == s {
return language.Script(alt.wantScript)
}
}
return 0
}
// addIfNew adds a haveTag to the list of tags only if it is a unique tag.
// Tags that have the same maximized values are linked by index.
func (h *matchHeader) addIfNew(n haveTag, exact bool) {
h.original = h.original || exact
// Don't add new exact matches.
for _, v := range h.haveTags {
if equalsRest(v.tag, n.tag) {
return
}
}
// Allow duplicate maximized tags, but create a linked list to allow quickly
// comparing the equivalents and bail out.
for i, v := range h.haveTags {
if v.maxScript == n.maxScript &&
v.maxRegion == n.maxRegion &&
v.tag.VariantOrPrivateUseTags() == n.tag.VariantOrPrivateUseTags() {
for h.haveTags[i].nextMax != 0 {
i = int(h.haveTags[i].nextMax)
}
h.haveTags[i].nextMax = uint16(len(h.haveTags))
break
}
}
h.haveTags = append(h.haveTags, &n)
}
// header returns the matchHeader for the given language. It creates one if
// it doesn't already exist.
func (m *matcher) header(l language.Language) *matchHeader {
if h := m.index[l]; h != nil {
return h
}
h := &matchHeader{}
m.index[l] = h
return h
}
func toConf(d uint8) Confidence {
if d <= 10 {
return High
}
if d < 30 {
return Low
}
return No
}
// newMatcher builds an index for the given supported tags and returns it as
// a matcher. It also expands the index by considering various equivalence classes
// for a given tag.
func newMatcher(supported []Tag, options []MatchOption) *matcher {
m := &matcher{
index: make(map[language.Language]*matchHeader),
preferSameScript: true,
}
for _, o := range options {
o(m)
}
if len(supported) == 0 {
m.default_ = &haveTag{}
return m
}
// Add supported languages to the index. Add exact matches first to give
// them precedence.
for i, tag := range supported {
tt := tag.tag()
pair, _ := makeHaveTag(tt, i)
m.header(tt.LangID).addIfNew(pair, true)
m.supported = append(m.supported, &pair)
}
m.default_ = m.header(supported[0].lang()).haveTags[0]
// Keep these in two different loops to support the case that two equivalent
// languages are distinguished, such as iw and he.
for i, tag := range supported {
tt := tag.tag()
pair, max := makeHaveTag(tt, i)
if max != tt.LangID {
m.header(max).addIfNew(pair, true)
}
}
// update is used to add indexes in the map for equivalent languages.
// update will only add entries to original indexes, thus not computing any
// transitive relations.
update := func(want, have uint16, conf Confidence) {
if hh := m.index[language.Language(have)]; hh != nil {
if !hh.original {
return
}
hw := m.header(language.Language(want))
for _, ht := range hh.haveTags {
v := *ht
if conf < v.conf {
v.conf = conf
}
v.nextMax = 0 // this value needs to be recomputed
if v.altScript != 0 {
v.altScript = altScript(language.Language(want), v.maxScript)
}
hw.addIfNew(v, conf == Exact && hh.original)
}
}
}
// Add entries for languages with mutual intelligibility as defined by CLDR's
// languageMatch data.
for _, ml := range matchLang {
update(ml.want, ml.have, toConf(ml.distance))
if !ml.oneway {
update(ml.have, ml.want, toConf(ml.distance))
}
}
// Add entries for possible canonicalizations. This is an optimization to
// ensure that only one map lookup needs to be done at runtime per desired tag.
// First we match deprecated equivalents. If they are perfect equivalents
// (their canonicalization simply substitutes a different language code, but
// nothing else), the match confidence is Exact, otherwise it is High.
for i, lm := range language.AliasMap {
// If deprecated codes match and there is no fiddling with the script or
// or region, we consider it an exact match.
conf := Exact
if language.AliasTypes[i] != language.Macro {
if !isExactEquivalent(language.Language(lm.From)) {
conf = High
}
update(lm.To, lm.From, conf)
}
update(lm.From, lm.To, conf)
}
return m
}
// getBest gets the best matching tag in m for any of the given tags, taking into
// account the order of preference of the given tags.
func (m *matcher) getBest(want ...Tag) (got *haveTag, orig language.Tag, c Confidence) {
best := bestMatch{}
for i, ww := range want {
w := ww.tag()
var max language.Tag
// Check for exact match first.
h := m.index[w.LangID]
if w.LangID != 0 {
if h == nil {
continue
}
// Base language is defined.
max, _ = canonicalize(Legacy|Deprecated|Macro, w)
// A region that is added through canonicalization is stronger than
// a maximized region: set it in the original (e.g. mo -> ro-MD).
if w.RegionID != max.RegionID {
w.RegionID = max.RegionID
}
// TODO: should we do the same for scripts?
// See test case: en, sr, nl ; sh ; sr
max, _ = max.Maximize()
} else {
// Base language is not defined.
if h != nil {
for i := range h.haveTags {
have := h.haveTags[i]
if equalsRest(have.tag, w) {
return have, w, Exact
}
}
}
if w.ScriptID == 0 && w.RegionID == 0 {
// We skip all tags matching und for approximate matching, including
// private tags.
continue
}
max, _ = w.Maximize()
if h = m.index[max.LangID]; h == nil {
continue
}
}
pin := true
for _, t := range want[i+1:] {
if w.LangID == t.lang() {
pin = false
break
}
}
// Check for match based on maximized tag.
for i := range h.haveTags {
have := h.haveTags[i]
best.update(have, w, max.ScriptID, max.RegionID, pin)
if best.conf == Exact {
for have.nextMax != 0 {
have = h.haveTags[have.nextMax]
best.update(have, w, max.ScriptID, max.RegionID, pin)
}
return best.have, best.want, best.conf
}
}
}
if best.conf <= No {
if len(want) != 0 {
return nil, want[0].tag(), No
}
return nil, language.Tag{}, No
}
return best.have, best.want, best.conf
}
// bestMatch accumulates the best match so far.
type bestMatch struct {
have *haveTag
want language.Tag
conf Confidence
pinnedRegion language.Region
pinLanguage bool
sameRegionGroup bool
// Cached results from applying tie-breaking rules.
origLang bool
origReg bool
paradigmReg bool
regGroupDist uint8
origScript bool
}
// update updates the existing best match if the new pair is considered to be a
// better match. To determine if the given pair is a better match, it first
// computes the rough confidence level. If this surpasses the current match, it
// will replace it and update the tie-breaker rule cache. If there is a tie, it
// proceeds with applying a series of tie-breaker rules. If there is no
// conclusive winner after applying the tie-breaker rules, it leaves the current
// match as the preferred match.
//
// If pin is true and have and tag are a strong match, it will henceforth only
// consider matches for this language. This corresponds to the idea that most
// users have a strong preference for the first defined language. A user can
// still prefer a second language over a dialect of the preferred language by
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
// be false.
func (m *bestMatch) update(have *haveTag, tag language.Tag, maxScript language.Script, maxRegion language.Region, pin bool) {
// Bail if the maximum attainable confidence is below that of the current best match.
c := have.conf
if c < m.conf {
return
}
// Don't change the language once we already have found an exact match.
if m.pinLanguage && tag.LangID != m.want.LangID {
return
}
// Pin the region group if we are comparing tags for the same language.
if tag.LangID == m.want.LangID && m.sameRegionGroup {
_, sameGroup := regionGroupDist(m.pinnedRegion, have.maxRegion, have.maxScript, m.want.LangID)
if !sameGroup {
return
}
}
if c == Exact && have.maxScript == maxScript {
// If there is another language and then another entry of this language,
// don't pin anything, otherwise pin the language.
m.pinLanguage = pin
}
if equalsRest(have.tag, tag) {
} else if have.maxScript != maxScript {
// There is usually very little comprehension between different scripts.
// In a few cases there may still be Low comprehension. This possibility
// is pre-computed and stored in have.altScript.
if Low < m.conf || have.altScript != maxScript {
return
}
c = Low
} else if have.maxRegion != maxRegion {
if High < c {
// There is usually a small difference between languages across regions.
c = High
}
}
// We store the results of the computations of the tie-breaker rules along
// with the best match. There is no need to do the checks once we determine
// we have a winner, but we do still need to do the tie-breaker computations.
// We use "beaten" to keep track if we still need to do the checks.
beaten := false // true if the new pair defeats the current one.
if c != m.conf {
if c < m.conf {
return
}
beaten = true
}
// Tie-breaker rules:
// We prefer if the pre-maximized language was specified and identical.
origLang := have.tag.LangID == tag.LangID && tag.LangID != 0
if !beaten && m.origLang != origLang {
if m.origLang {
return
}
beaten = true
}
// We prefer if the pre-maximized region was specified and identical.
origReg := have.tag.RegionID == tag.RegionID && tag.RegionID != 0
if !beaten && m.origReg != origReg {
if m.origReg {
return
}
beaten = true
}
regGroupDist, sameGroup := regionGroupDist(have.maxRegion, maxRegion, maxScript, tag.LangID)
if !beaten && m.regGroupDist != regGroupDist {
if regGroupDist > m.regGroupDist {
return
}
beaten = true
}
paradigmReg := isParadigmLocale(tag.LangID, have.maxRegion)
if !beaten && m.paradigmReg != paradigmReg {
if !paradigmReg {
return
}
beaten = true
}
// Next we prefer if the pre-maximized script was specified and identical.
origScript := have.tag.ScriptID == tag.ScriptID && tag.ScriptID != 0
if !beaten && m.origScript != origScript {
if m.origScript {
return
}
beaten = true
}
// Update m to the newly found best match.
if beaten {
m.have = have
m.want = tag
m.conf = c
m.pinnedRegion = maxRegion
m.sameRegionGroup = sameGroup
m.origLang = origLang
m.origReg = origReg
m.paradigmReg = paradigmReg
m.origScript = origScript
m.regGroupDist = regGroupDist
}
}
func isParadigmLocale(lang language.Language, r language.Region) bool {
for _, e := range paradigmLocales {
if language.Language(e[0]) == lang && (r == language.Region(e[1]) || r == language.Region(e[2])) {
return true
}
}
return false
}
// regionGroupDist computes the distance between two regions based on their
// CLDR grouping.
func regionGroupDist(a, b language.Region, script language.Script, lang language.Language) (dist uint8, same bool) {
const defaultDistance = 4
aGroup := uint(regionToGroups[a]) << 1
bGroup := uint(regionToGroups[b]) << 1
for _, ri := range matchRegion {
if language.Language(ri.lang) == lang && (ri.script == 0 || language.Script(ri.script) == script) {
group := uint(1 << (ri.group &^ 0x80))
if 0x80&ri.group == 0 {
if aGroup&bGroup&group != 0 { // Both regions are in the group.
return ri.distance, ri.distance == defaultDistance
}
} else {
if (aGroup|bGroup)&group == 0 { // Both regions are not in the group.
return ri.distance, ri.distance == defaultDistance
}
}
}
}
return defaultDistance, true
}
// equalsRest compares everything except the language.
func equalsRest(a, b language.Tag) bool {
// TODO: don't include extensions in this comparison. To do this efficiently,
// though, we should handle private tags separately.
return a.ScriptID == b.ScriptID && a.RegionID == b.RegionID && a.VariantOrPrivateUseTags() == b.VariantOrPrivateUseTags()
}
// isExactEquivalent returns true if canonicalizing the language will not alter
// the script or region of a tag.
func isExactEquivalent(l language.Language) bool {
for _, o := range notEquivalent {
if o == l {
return false
}
}
return true
}
var notEquivalent []language.Language
func init() {
// Create a list of all languages for which canonicalization may alter the
// script or region.
for _, lm := range language.AliasMap {
tag := language.Tag{LangID: language.Language(lm.From)}
if tag, _ = canonicalize(All, tag); tag.ScriptID != 0 || tag.RegionID != 0 {
notEquivalent = append(notEquivalent, language.Language(lm.From))
}
}
// Maximize undefined regions of paradigm locales.
for i, v := range paradigmLocales {
t := language.Tag{LangID: language.Language(v[0])}
max, _ := t.Maximize()
if v[1] == 0 {
paradigmLocales[i][1] = uint16(max.RegionID)
}
if v[2] == 0 {
paradigmLocales[i][2] = uint16(max.RegionID)
}
}
} | "golang.org/x/text/internal/language"
) | random_line_split |
match.go | // Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package language
import (
"errors"
"strings"
"golang.org/x/text/internal/language"
)
// A MatchOption configures a Matcher.
type MatchOption func(*matcher)
// PreferSameScript will, in the absence of a match, result in the first
// preferred tag with the same script as a supported tag to match this supported
// tag. The default is currently true, but this may change in the future.
func PreferSameScript(preferSame bool) MatchOption {
return func(m *matcher) { m.preferSameScript = preferSame }
}
// TODO(v1.0.0): consider making Matcher a concrete type, instead of interface.
// There doesn't seem to be too much need for multiple types.
// Making it a concrete type allows MatchStrings to be a method, which will
// improve its discoverability.
// MatchStrings parses and matches the given strings until one of them matches
// the language in the Matcher. A string may be an Accept-Language header as
// handled by ParseAcceptLanguage. The default language is returned if no
// other language matched.
func MatchStrings(m Matcher, lang ...string) (tag Tag, index int) {
for _, accept := range lang {
desired, _, err := ParseAcceptLanguage(accept)
if err != nil {
continue
}
if tag, index, conf := m.Match(desired...); conf != No {
return tag, index
}
}
tag, index, _ = m.Match()
return
}
// Matcher is the interface that wraps the Match method.
//
// Match returns the best match for any of the given tags, along with
// a unique index associated with the returned tag and a confidence
// score.
type Matcher interface {
Match(t ...Tag) (tag Tag, index int, c Confidence)
}
// Comprehends reports the confidence score for a speaker of a given language
// to being able to comprehend the written form of an alternative language.
func Comprehends(speaker, alternative Tag) Confidence {
_, _, c := NewMatcher([]Tag{alternative}).Match(speaker)
return c
}
// NewMatcher returns a Matcher that matches an ordered list of preferred tags
// against a list of supported tags based on written intelligibility, closeness
// of dialect, equivalence of subtags and various other rules. It is initialized
// with the list of supported tags. The first element is used as the default
// value in case no match is found.
//
// Its Match method matches the first of the given Tags to reach a certain
// confidence threshold. The tags passed to Match should therefore be specified
// in order of preference. Extensions are ignored for matching.
//
// The index returned by the Match method corresponds to the index of the
// matched tag in t, but is augmented with the Unicode extension ('u')of the
// corresponding preferred tag. This allows user locale options to be passed
// transparently.
func NewMatcher(t []Tag, options ...MatchOption) Matcher {
return newMatcher(t, options)
}
func (m *matcher) Match(want ...Tag) (t Tag, index int, c Confidence) {
var tt language.Tag
match, w, c := m.getBest(want...)
if match != nil {
tt, index = match.tag, match.index
} else {
// TODO: this should be an option
tt = m.default_.tag
if m.preferSameScript {
outer:
for _, w := range want {
script, _ := w.Script()
if script.scriptID == 0 {
// Don't do anything if there is no script, such as with
// private subtags.
continue
}
for i, h := range m.supported {
if script.scriptID == h.maxScript {
tt, index = h.tag, i
break outer
}
}
}
}
// TODO: select first language tag based on script.
}
if w.RegionID != tt.RegionID && w.RegionID != 0 {
if w.RegionID != 0 && tt.RegionID != 0 && tt.RegionID.Contains(w.RegionID) {
tt.RegionID = w.RegionID
tt.RemakeString()
} else if r := w.RegionID.String(); len(r) == 2 {
// TODO: also filter macro and deprecated.
tt, _ = tt.SetTypeForKey("rg", strings.ToLower(r)+"zzzz")
}
}
// Copy options from the user-provided tag into the result tag. This is hard
// to do after the fact, so we do it here.
// TODO: add in alternative variants to -u-va-.
// TODO: add preferred region to -u-rg-.
if e := w.Extensions(); len(e) > 0 {
b := language.Builder{}
b.SetTag(tt)
for _, e := range e {
b.AddExt(e)
}
tt = b.Make()
}
return makeTag(tt), index, c
}
// ErrMissingLikelyTagsData indicates no information was available
// to compute likely values of missing tags.
var ErrMissingLikelyTagsData = errors.New("missing likely tags data")
// func (t *Tag) setTagsFrom(id Tag) {
// t.LangID = id.LangID
// t.ScriptID = id.ScriptID
// t.RegionID = id.RegionID
// }
// Tag Matching
// CLDR defines an algorithm for finding the best match between two sets of language
// tags. The basic algorithm defines how to score a possible match and then find
// the match with the best score
// (see https://www.unicode.org/reports/tr35/#LanguageMatching).
// Using scoring has several disadvantages. The scoring obfuscates the importance of
// the various factors considered, making the algorithm harder to understand. Using
// scoring also requires the full score to be computed for each pair of tags.
//
// We will use a different algorithm which aims to have the following properties:
// - clarity on the precedence of the various selection factors, and
// - improved performance by allowing early termination of a comparison.
//
// Matching algorithm (overview)
// Input:
// - supported: a set of supported tags
// - default: the default tag to return in case there is no match
// - desired: list of desired tags, ordered by preference, starting with
// the most-preferred.
//
// Algorithm:
// 1) Set the best match to the lowest confidence level
// 2) For each tag in "desired":
// a) For each tag in "supported":
// 1) compute the match between the two tags.
// 2) if the match is better than the previous best match, replace it
// with the new match. (see next section)
// b) if the current best match is Exact and pin is true the result will be
// frozen to the language found thusfar, although better matches may
// still be found for the same language.
// 3) If the best match so far is below a certain threshold, return "default".
//
// Ranking:
// We use two phases to determine whether one pair of tags are a better match
// than another pair of tags. First, we determine a rough confidence level. If the
// levels are different, the one with the highest confidence wins.
// Second, if the rough confidence levels are identical, we use a set of tie-breaker
// rules.
//
// The confidence level of matching a pair of tags is determined by finding the
// lowest confidence level of any matches of the corresponding subtags (the
// result is deemed as good as its weakest link).
// We define the following levels:
// Exact - An exact match of a subtag, before adding likely subtags.
// MaxExact - An exact match of a subtag, after adding likely subtags.
// [See Note 2].
// High - High level of mutual intelligibility between different subtag
// variants.
// Low - Low level of mutual intelligibility between different subtag
// variants.
// No - No mutual intelligibility.
//
// The following levels can occur for each type of subtag:
// Base: Exact, MaxExact, High, Low, No
// Script: Exact, MaxExact [see Note 3], Low, No
// Region: Exact, MaxExact, High
// Variant: Exact, High
// Private: Exact, No
//
// Any result with a confidence level of Low or higher is deemed a possible match.
// Once a desired tag matches any of the supported tags with a level of MaxExact
// or higher, the next desired tag is not considered (see Step 2.b).
// Note that CLDR provides languageMatching data that defines close equivalence
// classes for base languages, scripts and regions.
//
// Tie-breaking
// If we get the same confidence level for two matches, we apply a sequence of
// tie-breaking rules. The first that succeeds defines the result. The rules are
// applied in the following order.
// 1) Original language was defined and was identical.
// 2) Original region was defined and was identical.
// 3) Distance between two maximized regions was the smallest.
// 4) Original script was defined and was identical.
// 5) Distance from want tag to have tag using the parent relation [see Note 5.]
// If there is still no winner after these rules are applied, the first match
// found wins.
//
// Notes:
// [2] In practice, as matching of Exact is done in a separate phase from
// matching the other levels, we reuse the Exact level to mean MaxExact in
// the second phase. As a consequence, we only need the levels defined by
// the Confidence type. The MaxExact confidence level is mapped to High in
// the public API.
// [3] We do not differentiate between maximized script values that were derived
// from suppressScript versus most likely tag data. We determined that in
// ranking the two, one ranks just after the other. Moreover, the two cannot
// occur concurrently. As a consequence, they are identical for practical
// purposes.
// [4] In case of deprecated, macro-equivalents and legacy mappings, we assign
// the MaxExact level to allow iw vs he to still be a closer match than
// en-AU vs en-US, for example.
// [5] In CLDR a locale inherits fields that are unspecified for this locale
// from its parent. Therefore, if a locale is a parent of another locale,
// it is a strong measure for closeness, especially when no other tie
// breaker rule applies. One could also argue it is inconsistent, for
// example, when pt-AO matches pt (which CLDR equates with pt-BR), even
// though its parent is pt-PT according to the inheritance rules.
//
// Implementation Details:
// There are several performance considerations worth pointing out. Most notably,
// we preprocess as much as possible (within reason) at the time of creation of a
// matcher. This includes:
// - creating a per-language map, which includes data for the raw base language
// and its canonicalized variant (if applicable),
// - expanding entries for the equivalence classes defined in CLDR's
// languageMatch data.
// The per-language map ensures that typically only a very small number of tags
// need to be considered. The pre-expansion of canonicalized subtags and
// equivalence classes reduces the amount of map lookups that need to be done at
// runtime.
// matcher keeps a set of supported language tags, indexed by language.
type matcher struct {
default_ *haveTag
supported []*haveTag
index map[language.Language]*matchHeader
passSettings bool
preferSameScript bool
}
// matchHeader has the lists of tags for exact matches and matches based on
// maximized and canonicalized tags for a given language.
type matchHeader struct {
haveTags []*haveTag
original bool
}
// haveTag holds a supported Tag and its maximized script and region. The maximized
// or canonicalized language is not stored as it is not needed during matching.
type haveTag struct {
tag language.Tag
// index of this tag in the original list of supported tags.
index int
// conf is the maximum confidence that can result from matching this haveTag.
// When conf < Exact this means it was inserted after applying a CLDR equivalence rule.
conf Confidence
// Maximized region and script.
maxRegion language.Region
maxScript language.Script
// altScript may be checked as an alternative match to maxScript. If altScript
// matches, the confidence level for this match is Low. Theoretically there
// could be multiple alternative scripts. This does not occur in practice.
altScript language.Script
// nextMax is the index of the next haveTag with the same maximized tags.
nextMax uint16
}
func makeHaveTag(tag language.Tag, index int) (haveTag, language.Language) {
max := tag
if tag.LangID != 0 || tag.RegionID != 0 || tag.ScriptID != 0 {
max, _ = canonicalize(All, max)
max, _ = max.Maximize()
max.RemakeString()
}
return haveTag{tag, index, Exact, max.RegionID, max.ScriptID, altScript(max.LangID, max.ScriptID), 0}, max.LangID
}
// altScript returns an alternative script that may match the given script with
// a low confidence. At the moment, the langMatch data allows for at most one
// script to map to another and we rely on this to keep the code simple.
func altScript(l language.Language, s language.Script) language.Script {
for _, alt := range matchScript {
// TODO: also match cases where language is not the same.
if (language.Language(alt.wantLang) == l || language.Language(alt.haveLang) == l) &&
language.Script(alt.haveScript) == s {
return language.Script(alt.wantScript)
}
}
return 0
}
// addIfNew adds a haveTag to the list of tags only if it is a unique tag.
// Tags that have the same maximized values are linked by index.
func (h *matchHeader) addIfNew(n haveTag, exact bool) {
h.original = h.original || exact
// Don't add new exact matches.
for _, v := range h.haveTags {
if equalsRest(v.tag, n.tag) {
return
}
}
// Allow duplicate maximized tags, but create a linked list to allow quickly
// comparing the equivalents and bail out.
for i, v := range h.haveTags {
if v.maxScript == n.maxScript &&
v.maxRegion == n.maxRegion &&
v.tag.VariantOrPrivateUseTags() == n.tag.VariantOrPrivateUseTags() {
for h.haveTags[i].nextMax != 0 {
i = int(h.haveTags[i].nextMax)
}
h.haveTags[i].nextMax = uint16(len(h.haveTags))
break
}
}
h.haveTags = append(h.haveTags, &n)
}
// header returns the matchHeader for the given language. It creates one if
// it doesn't already exist.
func (m *matcher) header(l language.Language) *matchHeader {
if h := m.index[l]; h != nil {
return h
}
h := &matchHeader{}
m.index[l] = h
return h
}
func toConf(d uint8) Confidence {
if d <= 10 {
return High
}
if d < 30 {
return Low
}
return No
}
// newMatcher builds an index for the given supported tags and returns it as
// a matcher. It also expands the index by considering various equivalence classes
// for a given tag.
func newMatcher(supported []Tag, options []MatchOption) *matcher {
m := &matcher{
index: make(map[language.Language]*matchHeader),
preferSameScript: true,
}
for _, o := range options {
o(m)
}
if len(supported) == 0 {
m.default_ = &haveTag{}
return m
}
// Add supported languages to the index. Add exact matches first to give
// them precedence.
for i, tag := range supported {
tt := tag.tag()
pair, _ := makeHaveTag(tt, i)
m.header(tt.LangID).addIfNew(pair, true)
m.supported = append(m.supported, &pair)
}
m.default_ = m.header(supported[0].lang()).haveTags[0]
// Keep these in two different loops to support the case that two equivalent
// languages are distinguished, such as iw and he.
for i, tag := range supported {
tt := tag.tag()
pair, max := makeHaveTag(tt, i)
if max != tt.LangID {
m.header(max).addIfNew(pair, true)
}
}
// update is used to add indexes in the map for equivalent languages.
// update will only add entries to original indexes, thus not computing any
// transitive relations.
update := func(want, have uint16, conf Confidence) {
if hh := m.index[language.Language(have)]; hh != nil {
if !hh.original {
return
}
hw := m.header(language.Language(want))
for _, ht := range hh.haveTags {
v := *ht
if conf < v.conf {
v.conf = conf
}
v.nextMax = 0 // this value needs to be recomputed
if v.altScript != 0 {
v.altScript = altScript(language.Language(want), v.maxScript)
}
hw.addIfNew(v, conf == Exact && hh.original)
}
}
}
// Add entries for languages with mutual intelligibility as defined by CLDR's
// languageMatch data.
for _, ml := range matchLang {
update(ml.want, ml.have, toConf(ml.distance))
if !ml.oneway {
update(ml.have, ml.want, toConf(ml.distance))
}
}
// Add entries for possible canonicalizations. This is an optimization to
// ensure that only one map lookup needs to be done at runtime per desired tag.
// First we match deprecated equivalents. If they are perfect equivalents
// (their canonicalization simply substitutes a different language code, but
// nothing else), the match confidence is Exact, otherwise it is High.
for i, lm := range language.AliasMap {
// If deprecated codes match and there is no fiddling with the script or
// or region, we consider it an exact match.
conf := Exact
if language.AliasTypes[i] != language.Macro {
if !isExactEquivalent(language.Language(lm.From)) {
conf = High
}
update(lm.To, lm.From, conf)
}
update(lm.From, lm.To, conf)
}
return m
}
// getBest gets the best matching tag in m for any of the given tags, taking into
// account the order of preference of the given tags.
func (m *matcher) getBest(want ...Tag) (got *haveTag, orig language.Tag, c Confidence) {
best := bestMatch{}
for i, ww := range want {
w := ww.tag()
var max language.Tag
// Check for exact match first.
h := m.index[w.LangID]
if w.LangID != 0 {
if h == nil {
continue
}
// Base language is defined.
max, _ = canonicalize(Legacy|Deprecated|Macro, w)
// A region that is added through canonicalization is stronger than
// a maximized region: set it in the original (e.g. mo -> ro-MD).
if w.RegionID != max.RegionID {
w.RegionID = max.RegionID
}
// TODO: should we do the same for scripts?
// See test case: en, sr, nl ; sh ; sr
max, _ = max.Maximize()
} else {
// Base language is not defined.
if h != nil {
for i := range h.haveTags {
have := h.haveTags[i]
if equalsRest(have.tag, w) {
return have, w, Exact
}
}
}
if w.ScriptID == 0 && w.RegionID == 0 {
// We skip all tags matching und for approximate matching, including
// private tags.
continue
}
max, _ = w.Maximize()
if h = m.index[max.LangID]; h == nil {
continue
}
}
pin := true
for _, t := range want[i+1:] {
if w.LangID == t.lang() {
pin = false
break
}
}
// Check for match based on maximized tag.
for i := range h.haveTags {
have := h.haveTags[i]
best.update(have, w, max.ScriptID, max.RegionID, pin)
if best.conf == Exact {
for have.nextMax != 0 {
have = h.haveTags[have.nextMax]
best.update(have, w, max.ScriptID, max.RegionID, pin)
}
return best.have, best.want, best.conf
}
}
}
if best.conf <= No {
if len(want) != 0 {
return nil, want[0].tag(), No
}
return nil, language.Tag{}, No
}
return best.have, best.want, best.conf
}
// bestMatch accumulates the best match so far.
type bestMatch struct {
have *haveTag
want language.Tag
conf Confidence
pinnedRegion language.Region
pinLanguage bool
sameRegionGroup bool
// Cached results from applying tie-breaking rules.
origLang bool
origReg bool
paradigmReg bool
regGroupDist uint8
origScript bool
}
// update updates the existing best match if the new pair is considered to be a
// better match. To determine if the given pair is a better match, it first
// computes the rough confidence level. If this surpasses the current match, it
// will replace it and update the tie-breaker rule cache. If there is a tie, it
// proceeds with applying a series of tie-breaker rules. If there is no
// conclusive winner after applying the tie-breaker rules, it leaves the current
// match as the preferred match.
//
// If pin is true and have and tag are a strong match, it will henceforth only
// consider matches for this language. This corresponds to the idea that most
// users have a strong preference for the first defined language. A user can
// still prefer a second language over a dialect of the preferred language by
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
// be false.
func (m *bestMatch) update(have *haveTag, tag language.Tag, maxScript language.Script, maxRegion language.Region, pin bool) {
// Bail if the maximum attainable confidence is below that of the current best match.
c := have.conf
if c < m.conf {
return
}
// Don't change the language once we already have found an exact match.
if m.pinLanguage && tag.LangID != m.want.LangID {
return
}
// Pin the region group if we are comparing tags for the same language.
if tag.LangID == m.want.LangID && m.sameRegionGroup {
_, sameGroup := regionGroupDist(m.pinnedRegion, have.maxRegion, have.maxScript, m.want.LangID)
if !sameGroup {
return
}
}
if c == Exact && have.maxScript == maxScript {
// If there is another language and then another entry of this language,
// don't pin anything, otherwise pin the language.
m.pinLanguage = pin
}
if equalsRest(have.tag, tag) {
} else if have.maxScript != maxScript | else if have.maxRegion != maxRegion {
if High < c {
// There is usually a small difference between languages across regions.
c = High
}
}
// We store the results of the computations of the tie-breaker rules along
// with the best match. There is no need to do the checks once we determine
// we have a winner, but we do still need to do the tie-breaker computations.
// We use "beaten" to keep track if we still need to do the checks.
beaten := false // true if the new pair defeats the current one.
if c != m.conf {
if c < m.conf {
return
}
beaten = true
}
// Tie-breaker rules:
// We prefer if the pre-maximized language was specified and identical.
origLang := have.tag.LangID == tag.LangID && tag.LangID != 0
if !beaten && m.origLang != origLang {
if m.origLang {
return
}
beaten = true
}
// We prefer if the pre-maximized region was specified and identical.
origReg := have.tag.RegionID == tag.RegionID && tag.RegionID != 0
if !beaten && m.origReg != origReg {
if m.origReg {
return
}
beaten = true
}
regGroupDist, sameGroup := regionGroupDist(have.maxRegion, maxRegion, maxScript, tag.LangID)
if !beaten && m.regGroupDist != regGroupDist {
if regGroupDist > m.regGroupDist {
return
}
beaten = true
}
paradigmReg := isParadigmLocale(tag.LangID, have.maxRegion)
if !beaten && m.paradigmReg != paradigmReg {
if !paradigmReg {
return
}
beaten = true
}
// Next we prefer if the pre-maximized script was specified and identical.
origScript := have.tag.ScriptID == tag.ScriptID && tag.ScriptID != 0
if !beaten && m.origScript != origScript {
if m.origScript {
return
}
beaten = true
}
// Update m to the newly found best match.
if beaten {
m.have = have
m.want = tag
m.conf = c
m.pinnedRegion = maxRegion
m.sameRegionGroup = sameGroup
m.origLang = origLang
m.origReg = origReg
m.paradigmReg = paradigmReg
m.origScript = origScript
m.regGroupDist = regGroupDist
}
}
func isParadigmLocale(lang language.Language, r language.Region) bool {
for _, e := range paradigmLocales {
if language.Language(e[0]) == lang && (r == language.Region(e[1]) || r == language.Region(e[2])) {
return true
}
}
return false
}
// regionGroupDist computes the distance between two regions based on their
// CLDR grouping.
func regionGroupDist(a, b language.Region, script language.Script, lang language.Language) (dist uint8, same bool) {
const defaultDistance = 4
aGroup := uint(regionToGroups[a]) << 1
bGroup := uint(regionToGroups[b]) << 1
for _, ri := range matchRegion {
if language.Language(ri.lang) == lang && (ri.script == 0 || language.Script(ri.script) == script) {
group := uint(1 << (ri.group &^ 0x80))
if 0x80&ri.group == 0 {
if aGroup&bGroup&group != 0 { // Both regions are in the group.
return ri.distance, ri.distance == defaultDistance
}
} else {
if (aGroup|bGroup)&group == 0 { // Both regions are not in the group.
return ri.distance, ri.distance == defaultDistance
}
}
}
}
return defaultDistance, true
}
// equalsRest compares everything except the language.
func equalsRest(a, b language.Tag) bool {
// TODO: don't include extensions in this comparison. To do this efficiently,
// though, we should handle private tags separately.
return a.ScriptID == b.ScriptID && a.RegionID == b.RegionID && a.VariantOrPrivateUseTags() == b.VariantOrPrivateUseTags()
}
// isExactEquivalent returns true if canonicalizing the language will not alter
// the script or region of a tag.
func isExactEquivalent(l language.Language) bool {
for _, o := range notEquivalent {
if o == l {
return false
}
}
return true
}
var notEquivalent []language.Language
func init() {
// Create a list of all languages for which canonicalization may alter the
// script or region.
for _, lm := range language.AliasMap {
tag := language.Tag{LangID: language.Language(lm.From)}
if tag, _ = canonicalize(All, tag); tag.ScriptID != 0 || tag.RegionID != 0 {
notEquivalent = append(notEquivalent, language.Language(lm.From))
}
}
// Maximize undefined regions of paradigm locales.
for i, v := range paradigmLocales {
t := language.Tag{LangID: language.Language(v[0])}
max, _ := t.Maximize()
if v[1] == 0 {
paradigmLocales[i][1] = uint16(max.RegionID)
}
if v[2] == 0 {
paradigmLocales[i][2] = uint16(max.RegionID)
}
}
}
| {
// There is usually very little comprehension between different scripts.
// In a few cases there may still be Low comprehension. This possibility
// is pre-computed and stored in have.altScript.
if Low < m.conf || have.altScript != maxScript {
return
}
c = Low
} | conditional_block |
lane_detector.py | import cv2
import numpy as np
class LaneDetector():
''' Lane Detector - performs three key functions:
1a) detects lanes in given image using sliding window algorithm
1b) detects lanes around previously found lanes
2) calculates lane curvature
3) displays lane information
Uses code from Udacity lessons
'''
def __init__(self):
self.left_fit = None
self.right_fit = None
self.leftx = None
self.rightx = None
self.car_position = None
print('(init: LaneDetector)')
def | (self, img):
''' Apply polynomial fit to the given image, returning fit for left/right lanes
Called when one frame of image has previously found left_fit/right_fit.
This method attempts to find lane fits in the vicinity of previous fits
:param img -- input image with lane lines
:return left_fit, right_fit
'''
if self.left_fit is None or self.right_fit is None:
return self.sliding_window_fit(img)
# from the next frame of video (also called "binary_warped")
# It's now much easier to find line pixels!
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
margin = 100
left_lane_inds = ((nonzerox > (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] - margin)) &
(nonzerox < (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] + margin)))
right_lane_inds = ((nonzerox > (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] - margin)) &
(nonzerox < (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] + margin)))
# Again, extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
self.rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty, self.rightx, 2)
return self.left_fit, self.right_fit
def sliding_window_fit(self, img):
''' Apply sliding windows search to the given image to find polynomial to find lane lines
Code based largely on Udacity lessons
:param img - given image
:return left_fit, right_fit - polynomials fitting the left/right lane lines
'''
y_half = int(img.shape[0]/2)
# take histogram of bottom half of img
histogram = np.sum(img[y_half:, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((img, img, img))*255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]/2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Choose the number of sliding windows
nwindows = 9
# Set height of windows
window_height = np.int(img.shape[0]/nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated for each window
leftx_current = leftx_base
rightx_current = rightx_base
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = img.shape[0] - (window+1) * window_height
win_y_high = img.shape[0] - window * window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low), (win_xleft_high,win_y_high), (0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),(win_xright_high,win_y_high),(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int( np.mean(nonzerox[good_left_inds]) )
if len(good_right_inds) > minpix:
rightx_current = np.int( np.mean(nonzerox[good_right_inds]) )
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
righty = nonzeroy[right_lane_inds]
self.rightx = nonzerox[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty,self.rightx, 2)
return self.left_fit, self.right_fit
def find_lane_curvature(self, img):
''' Find lane curvature for the given img
:param img - the input image
:return lane curvature
'''
# Generate some fake data to represent lane-line pixels
ploty = np.linspace(0, 719, num=720) # to cover same y-range as image
quadratic_coeff = 3e-4 # arbitrary quadratic coefficient
# For each y position generate random x position within +/-50 pix
# of the line base position in each case (x=200 for left, and x=900 for right)
leftx = np.array([200 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
rightx = np.array([900 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
leftx = leftx[::-1] # Reverse to match top-to-bottom in y
rightx = rightx[::-1] # Reverse to match top-to-bottom in y
# Fit a second order polynomial to pixel positions in each fake lane line
# left_fit = np.polyfit(ploty, leftx, 2)
# left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
# right_fit = np.polyfit(ploty, rightx, 2)
# right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
# Define y-value where we want radius of curvature
# I'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
# left_curverad = ((1 + (2*left_fit[0]*y_eval + left_fit[1])**2)**1.5) / np.absolute(2*left_fit[0])
# right_curverad = ((1 + (2*right_fit[0]*y_eval + right_fit[1])**2)**1.5) / np.absolute(2*right_fit[0])
# print(left_curverad, right_curverad)
# Example values: 1926.74 1908.48
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30/720 # meters per pixel in y dimension
xm_per_pix = 3.7/700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(ploty * ym_per_pix, leftx * xm_per_pix, 2)
right_fit_cr = np.polyfit(ploty * ym_per_pix, rightx * xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2 * left_fit_cr[0] * y_eval*ym_per_pix + \
left_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * left_fit_cr[0])
right_curverad = ((1 + (2 * right_fit_cr[0] * y_eval * ym_per_pix + \
right_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * right_fit_cr[0])
# Now our radius of curvature is in meters
# print(left_curverad, 'm', right_curverad, 'm')
# Example values: 632.1 m 626.2 m
lx = self.left_fit[0] * (img.shape[0] - 1)**2 + \
self.left_fit[1] * (img.shape[0] - 1) + \
self.left_fit[2]
rx = self.right_fit[0] * (img.shape[0] - 1)**2 + \
self.right_fit[1] * (img.shape[0] - 1) + \
self.right_fit[2]
# calc car's position in the lane w.r.to center
position = ((img.shape[1] / 2) - ((lx + rx)/2)) * xm_per_pix
# calc mean curvature
mean_curverad = (left_curverad + right_curverad) / 2
# save the car's position
self.car_position = position.round(2)
return mean_curverad
def draw_polygon(self, img, left_fit, right_fit, M_inverse):
''' Draw shaded polygon on the lane between left_fit and right_fit
:param img - undistorted image, on which to draw the lane polygon
:param left_fit - left lane values (x)
:param right_fit - right lane values (x)
:param M_inverse - matrix for inverse transform warping
:return - img - the modified image with polygon
'''
fity = np.linspace(0, img.shape[0] - 1, img.shape[0])
left_fitx = left_fit[0] * fity ** 2 + left_fit[1] * fity + left_fit[2]
right_fitx = right_fit[0] * fity ** 2 + right_fit[1] * fity + right_fit[2]
color_warp = np.zeros_like(img).astype(np.uint8)
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array( [np.transpose(np.vstack([left_fitx, fity]))] )
pts_right = np.array( [np.flipud(np.transpose(np.vstack([right_fitx, fity])))] )
pts = np.hstack((pts_left, pts_right))
pts = np.array(pts, dtype=np.int32)
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, M_inverse, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(img, 1, newwarp, 0.3, 0)
return result
def display_dashboard(self, img, lane_curve):
''' Display a dashboard on the image, with info on
Lane curve (avg)
:param img - image with lane lines
:param lane_curve - the avg lane curvature
:param position
:return modified img
'''
COLOR_LIGHTBLUE = (172,227,239)
COLOR_GOLD = (255, 215, 0)
if self.car_position > 0:
msg = '{}m right of center'.format(self.car_position)
else:
msg = '{}m left of center'.format(np.abs(self.car_position))
cv2.putText(img, 'Lane curve radius: {}m'.format(lane_curve.round()),
(10,50), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.putText(img, 'Car is {}'.format(msg),
(10,80), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.rectangle(img, (5, 10), (480, 100), color=COLOR_GOLD, thickness=2)
return img
| window_fit | identifier_name |
lane_detector.py | import cv2
import numpy as np
class LaneDetector():
''' Lane Detector - performs three key functions:
1a) detects lanes in given image using sliding window algorithm
1b) detects lanes around previously found lanes
2) calculates lane curvature
3) displays lane information
Uses code from Udacity lessons
'''
def __init__(self):
self.left_fit = None
self.right_fit = None
self.leftx = None
self.rightx = None
self.car_position = None
print('(init: LaneDetector)')
def window_fit(self, img):
''' Apply polynomial fit to the given image, returning fit for left/right lanes
Called when one frame of image has previously found left_fit/right_fit.
This method attempts to find lane fits in the vicinity of previous fits
:param img -- input image with lane lines
:return left_fit, right_fit
'''
if self.left_fit is None or self.right_fit is None:
|
# from the next frame of video (also called "binary_warped")
# It's now much easier to find line pixels!
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
margin = 100
left_lane_inds = ((nonzerox > (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] - margin)) &
(nonzerox < (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] + margin)))
right_lane_inds = ((nonzerox > (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] - margin)) &
(nonzerox < (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] + margin)))
# Again, extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
self.rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty, self.rightx, 2)
return self.left_fit, self.right_fit
def sliding_window_fit(self, img):
''' Apply sliding windows search to the given image to find polynomial to find lane lines
Code based largely on Udacity lessons
:param img - given image
:return left_fit, right_fit - polynomials fitting the left/right lane lines
'''
y_half = int(img.shape[0]/2)
# take histogram of bottom half of img
histogram = np.sum(img[y_half:, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((img, img, img))*255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]/2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Choose the number of sliding windows
nwindows = 9
# Set height of windows
window_height = np.int(img.shape[0]/nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated for each window
leftx_current = leftx_base
rightx_current = rightx_base
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = img.shape[0] - (window+1) * window_height
win_y_high = img.shape[0] - window * window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low), (win_xleft_high,win_y_high), (0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),(win_xright_high,win_y_high),(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int( np.mean(nonzerox[good_left_inds]) )
if len(good_right_inds) > minpix:
rightx_current = np.int( np.mean(nonzerox[good_right_inds]) )
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
righty = nonzeroy[right_lane_inds]
self.rightx = nonzerox[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty,self.rightx, 2)
return self.left_fit, self.right_fit
def find_lane_curvature(self, img):
''' Find lane curvature for the given img
:param img - the input image
:return lane curvature
'''
# Generate some fake data to represent lane-line pixels
ploty = np.linspace(0, 719, num=720) # to cover same y-range as image
quadratic_coeff = 3e-4 # arbitrary quadratic coefficient
# For each y position generate random x position within +/-50 pix
# of the line base position in each case (x=200 for left, and x=900 for right)
leftx = np.array([200 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
rightx = np.array([900 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
leftx = leftx[::-1] # Reverse to match top-to-bottom in y
rightx = rightx[::-1] # Reverse to match top-to-bottom in y
# Fit a second order polynomial to pixel positions in each fake lane line
# left_fit = np.polyfit(ploty, leftx, 2)
# left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
# right_fit = np.polyfit(ploty, rightx, 2)
# right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
# Define y-value where we want radius of curvature
# I'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
# left_curverad = ((1 + (2*left_fit[0]*y_eval + left_fit[1])**2)**1.5) / np.absolute(2*left_fit[0])
# right_curverad = ((1 + (2*right_fit[0]*y_eval + right_fit[1])**2)**1.5) / np.absolute(2*right_fit[0])
# print(left_curverad, right_curverad)
# Example values: 1926.74 1908.48
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30/720 # meters per pixel in y dimension
xm_per_pix = 3.7/700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(ploty * ym_per_pix, leftx * xm_per_pix, 2)
right_fit_cr = np.polyfit(ploty * ym_per_pix, rightx * xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2 * left_fit_cr[0] * y_eval*ym_per_pix + \
left_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * left_fit_cr[0])
right_curverad = ((1 + (2 * right_fit_cr[0] * y_eval * ym_per_pix + \
right_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * right_fit_cr[0])
# Now our radius of curvature is in meters
# print(left_curverad, 'm', right_curverad, 'm')
# Example values: 632.1 m 626.2 m
lx = self.left_fit[0] * (img.shape[0] - 1)**2 + \
self.left_fit[1] * (img.shape[0] - 1) + \
self.left_fit[2]
rx = self.right_fit[0] * (img.shape[0] - 1)**2 + \
self.right_fit[1] * (img.shape[0] - 1) + \
self.right_fit[2]
# calc car's position in the lane w.r.to center
position = ((img.shape[1] / 2) - ((lx + rx)/2)) * xm_per_pix
# calc mean curvature
mean_curverad = (left_curverad + right_curverad) / 2
# save the car's position
self.car_position = position.round(2)
return mean_curverad
def draw_polygon(self, img, left_fit, right_fit, M_inverse):
''' Draw shaded polygon on the lane between left_fit and right_fit
:param img - undistorted image, on which to draw the lane polygon
:param left_fit - left lane values (x)
:param right_fit - right lane values (x)
:param M_inverse - matrix for inverse transform warping
:return - img - the modified image with polygon
'''
fity = np.linspace(0, img.shape[0] - 1, img.shape[0])
left_fitx = left_fit[0] * fity ** 2 + left_fit[1] * fity + left_fit[2]
right_fitx = right_fit[0] * fity ** 2 + right_fit[1] * fity + right_fit[2]
color_warp = np.zeros_like(img).astype(np.uint8)
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array( [np.transpose(np.vstack([left_fitx, fity]))] )
pts_right = np.array( [np.flipud(np.transpose(np.vstack([right_fitx, fity])))] )
pts = np.hstack((pts_left, pts_right))
pts = np.array(pts, dtype=np.int32)
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, M_inverse, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(img, 1, newwarp, 0.3, 0)
return result
def display_dashboard(self, img, lane_curve):
''' Display a dashboard on the image, with info on
Lane curve (avg)
:param img - image with lane lines
:param lane_curve - the avg lane curvature
:param position
:return modified img
'''
COLOR_LIGHTBLUE = (172,227,239)
COLOR_GOLD = (255, 215, 0)
if self.car_position > 0:
msg = '{}m right of center'.format(self.car_position)
else:
msg = '{}m left of center'.format(np.abs(self.car_position))
cv2.putText(img, 'Lane curve radius: {}m'.format(lane_curve.round()),
(10,50), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.putText(img, 'Car is {}'.format(msg),
(10,80), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.rectangle(img, (5, 10), (480, 100), color=COLOR_GOLD, thickness=2)
return img
| return self.sliding_window_fit(img) | conditional_block |
lane_detector.py | import cv2
import numpy as np
class LaneDetector():
''' Lane Detector - performs three key functions:
1a) detects lanes in given image using sliding window algorithm
1b) detects lanes around previously found lanes
2) calculates lane curvature
3) displays lane information
Uses code from Udacity lessons
'''
def __init__(self):
self.left_fit = None
self.right_fit = None
self.leftx = None
self.rightx = None
self.car_position = None
print('(init: LaneDetector)')
def window_fit(self, img):
|
def sliding_window_fit(self, img):
''' Apply sliding windows search to the given image to find polynomial to find lane lines
Code based largely on Udacity lessons
:param img - given image
:return left_fit, right_fit - polynomials fitting the left/right lane lines
'''
y_half = int(img.shape[0]/2)
# take histogram of bottom half of img
histogram = np.sum(img[y_half:, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((img, img, img))*255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]/2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Choose the number of sliding windows
nwindows = 9
# Set height of windows
window_height = np.int(img.shape[0]/nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated for each window
leftx_current = leftx_base
rightx_current = rightx_base
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = img.shape[0] - (window+1) * window_height
win_y_high = img.shape[0] - window * window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low), (win_xleft_high,win_y_high), (0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),(win_xright_high,win_y_high),(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int( np.mean(nonzerox[good_left_inds]) )
if len(good_right_inds) > minpix:
rightx_current = np.int( np.mean(nonzerox[good_right_inds]) )
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
righty = nonzeroy[right_lane_inds]
self.rightx = nonzerox[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty,self.rightx, 2)
return self.left_fit, self.right_fit
def find_lane_curvature(self, img):
''' Find lane curvature for the given img
:param img - the input image
:return lane curvature
'''
# Generate some fake data to represent lane-line pixels
ploty = np.linspace(0, 719, num=720) # to cover same y-range as image
quadratic_coeff = 3e-4 # arbitrary quadratic coefficient
# For each y position generate random x position within +/-50 pix
# of the line base position in each case (x=200 for left, and x=900 for right)
leftx = np.array([200 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
rightx = np.array([900 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
leftx = leftx[::-1] # Reverse to match top-to-bottom in y
rightx = rightx[::-1] # Reverse to match top-to-bottom in y
# Fit a second order polynomial to pixel positions in each fake lane line
# left_fit = np.polyfit(ploty, leftx, 2)
# left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
# right_fit = np.polyfit(ploty, rightx, 2)
# right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
# Define y-value where we want radius of curvature
# I'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
# left_curverad = ((1 + (2*left_fit[0]*y_eval + left_fit[1])**2)**1.5) / np.absolute(2*left_fit[0])
# right_curverad = ((1 + (2*right_fit[0]*y_eval + right_fit[1])**2)**1.5) / np.absolute(2*right_fit[0])
# print(left_curverad, right_curverad)
# Example values: 1926.74 1908.48
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30/720 # meters per pixel in y dimension
xm_per_pix = 3.7/700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(ploty * ym_per_pix, leftx * xm_per_pix, 2)
right_fit_cr = np.polyfit(ploty * ym_per_pix, rightx * xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2 * left_fit_cr[0] * y_eval*ym_per_pix + \
left_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * left_fit_cr[0])
right_curverad = ((1 + (2 * right_fit_cr[0] * y_eval * ym_per_pix + \
right_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * right_fit_cr[0])
# Now our radius of curvature is in meters
# print(left_curverad, 'm', right_curverad, 'm')
# Example values: 632.1 m 626.2 m
lx = self.left_fit[0] * (img.shape[0] - 1)**2 + \
self.left_fit[1] * (img.shape[0] - 1) + \
self.left_fit[2]
rx = self.right_fit[0] * (img.shape[0] - 1)**2 + \
self.right_fit[1] * (img.shape[0] - 1) + \
self.right_fit[2]
# calc car's position in the lane w.r.to center
position = ((img.shape[1] / 2) - ((lx + rx)/2)) * xm_per_pix
# calc mean curvature
mean_curverad = (left_curverad + right_curverad) / 2
# save the car's position
self.car_position = position.round(2)
return mean_curverad
def draw_polygon(self, img, left_fit, right_fit, M_inverse):
''' Draw shaded polygon on the lane between left_fit and right_fit
:param img - undistorted image, on which to draw the lane polygon
:param left_fit - left lane values (x)
:param right_fit - right lane values (x)
:param M_inverse - matrix for inverse transform warping
:return - img - the modified image with polygon
'''
fity = np.linspace(0, img.shape[0] - 1, img.shape[0])
left_fitx = left_fit[0] * fity ** 2 + left_fit[1] * fity + left_fit[2]
right_fitx = right_fit[0] * fity ** 2 + right_fit[1] * fity + right_fit[2]
color_warp = np.zeros_like(img).astype(np.uint8)
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array( [np.transpose(np.vstack([left_fitx, fity]))] )
pts_right = np.array( [np.flipud(np.transpose(np.vstack([right_fitx, fity])))] )
pts = np.hstack((pts_left, pts_right))
pts = np.array(pts, dtype=np.int32)
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, M_inverse, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(img, 1, newwarp, 0.3, 0)
return result
def display_dashboard(self, img, lane_curve):
''' Display a dashboard on the image, with info on
Lane curve (avg)
:param img - image with lane lines
:param lane_curve - the avg lane curvature
:param position
:return modified img
'''
COLOR_LIGHTBLUE = (172,227,239)
COLOR_GOLD = (255, 215, 0)
if self.car_position > 0:
msg = '{}m right of center'.format(self.car_position)
else:
msg = '{}m left of center'.format(np.abs(self.car_position))
cv2.putText(img, 'Lane curve radius: {}m'.format(lane_curve.round()),
(10,50), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.putText(img, 'Car is {}'.format(msg),
(10,80), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.rectangle(img, (5, 10), (480, 100), color=COLOR_GOLD, thickness=2)
return img
| ''' Apply polynomial fit to the given image, returning fit for left/right lanes
Called when one frame of image has previously found left_fit/right_fit.
This method attempts to find lane fits in the vicinity of previous fits
:param img -- input image with lane lines
:return left_fit, right_fit
'''
if self.left_fit is None or self.right_fit is None:
return self.sliding_window_fit(img)
# from the next frame of video (also called "binary_warped")
# It's now much easier to find line pixels!
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
margin = 100
left_lane_inds = ((nonzerox > (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] - margin)) &
(nonzerox < (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] + margin)))
right_lane_inds = ((nonzerox > (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] - margin)) &
(nonzerox < (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] + margin)))
# Again, extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
self.rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty, self.rightx, 2)
return self.left_fit, self.right_fit | identifier_body |
lane_detector.py | import cv2
import numpy as np
class LaneDetector():
''' Lane Detector - performs three key functions:
1a) detects lanes in given image using sliding window algorithm
1b) detects lanes around previously found lanes
2) calculates lane curvature
3) displays lane information
Uses code from Udacity lessons
'''
def __init__(self):
self.left_fit = None
self.right_fit = None
self.leftx = None
self.rightx = None
self.car_position = None
print('(init: LaneDetector)')
def window_fit(self, img):
''' Apply polynomial fit to the given image, returning fit for left/right lanes
Called when one frame of image has previously found left_fit/right_fit.
This method attempts to find lane fits in the vicinity of previous fits
:param img -- input image with lane lines
:return left_fit, right_fit
'''
if self.left_fit is None or self.right_fit is None:
return self.sliding_window_fit(img)
# from the next frame of video (also called "binary_warped")
# It's now much easier to find line pixels!
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
margin = 100
left_lane_inds = ((nonzerox > (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] - margin)) &
(nonzerox < (self.left_fit[0]*(nonzeroy**2) + self.left_fit[1]*nonzeroy + self.left_fit[2] + margin)))
right_lane_inds = ((nonzerox > (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] - margin)) &
(nonzerox < (self.right_fit[0]*(nonzeroy**2) + self.right_fit[1]*nonzeroy + self.right_fit[2] + margin)))
# Again, extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
self.rightx = nonzerox[right_lane_inds]
righty = nonzeroy[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty, self.rightx, 2)
return self.left_fit, self.right_fit
def sliding_window_fit(self, img):
''' Apply sliding windows search to the given image to find polynomial to find lane lines
Code based largely on Udacity lessons
:param img - given image
:return left_fit, right_fit - polynomials fitting the left/right lane lines
'''
y_half = int(img.shape[0]/2)
# take histogram of bottom half of img
histogram = np.sum(img[y_half:, :], axis=0)
# Create an output image to draw on and visualize the result
out_img = np.dstack((img, img, img))*255
# Find the peak of the left and right halves of the histogram
# These will be the starting point for the left and right lines
midpoint = np.int(histogram.shape[0]/2)
leftx_base = np.argmax(histogram[:midpoint])
rightx_base = np.argmax(histogram[midpoint:]) + midpoint
# Choose the number of sliding windows
nwindows = 9
# Set height of windows
window_height = np.int(img.shape[0]/nwindows)
# Identify the x and y positions of all nonzero pixels in the image
nonzero = img.nonzero()
nonzeroy = np.array(nonzero[0])
nonzerox = np.array(nonzero[1])
# Current positions to be updated for each window
leftx_current = leftx_base
rightx_current = rightx_base
# Set the width of the windows +/- margin
margin = 100
# Set minimum number of pixels found to recenter window
minpix = 50
# Create empty lists to receive left and right lane pixel indices
left_lane_inds = []
right_lane_inds = []
for window in range(nwindows):
# Identify window boundaries in x and y (and right and left)
win_y_low = img.shape[0] - (window+1) * window_height
win_y_high = img.shape[0] - window * window_height
win_xleft_low = leftx_current - margin
win_xleft_high = leftx_current + margin
win_xright_low = rightx_current - margin
win_xright_high = rightx_current + margin
# Draw the windows on the visualization image
cv2.rectangle(out_img,(win_xleft_low,win_y_low), (win_xleft_high,win_y_high), (0,255,0), 2)
cv2.rectangle(out_img,(win_xright_low,win_y_low),(win_xright_high,win_y_high),(0,255,0), 2)
# Identify the nonzero pixels in x and y within the window
good_left_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xleft_low) & (nonzerox < win_xleft_high)).nonzero()[0]
good_right_inds = ((nonzeroy >= win_y_low) & (nonzeroy < win_y_high) &
(nonzerox >= win_xright_low) & (nonzerox < win_xright_high)).nonzero()[0]
# Append these indices to the lists
left_lane_inds.append(good_left_inds)
right_lane_inds.append(good_right_inds)
# If you found > minpix pixels, recenter next window on their mean position
if len(good_left_inds) > minpix:
leftx_current = np.int( np.mean(nonzerox[good_left_inds]) )
if len(good_right_inds) > minpix:
rightx_current = np.int( np.mean(nonzerox[good_right_inds]) )
# Concatenate the arrays of indices
left_lane_inds = np.concatenate(left_lane_inds)
right_lane_inds = np.concatenate(right_lane_inds)
# Extract left and right line pixel positions
self.leftx = nonzerox[left_lane_inds]
lefty = nonzeroy[left_lane_inds]
righty = nonzeroy[right_lane_inds]
self.rightx = nonzerox[right_lane_inds]
# Fit a second order polynomial to each
self.left_fit = np.polyfit(lefty, self.leftx, 2)
self.right_fit = np.polyfit(righty,self.rightx, 2)
return self.left_fit, self.right_fit
def find_lane_curvature(self, img):
''' Find lane curvature for the given img
:param img - the input image
:return lane curvature
'''
# Generate some fake data to represent lane-line pixels
ploty = np.linspace(0, 719, num=720) # to cover same y-range as image
quadratic_coeff = 3e-4 # arbitrary quadratic coefficient
# For each y position generate random x position within +/-50 pix
# of the line base position in each case (x=200 for left, and x=900 for right)
leftx = np.array([200 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
rightx = np.array([900 + (y**2)*quadratic_coeff + np.random.randint(-50, high=51)
for y in ploty])
leftx = leftx[::-1] # Reverse to match top-to-bottom in y
rightx = rightx[::-1] # Reverse to match top-to-bottom in y
# Fit a second order polynomial to pixel positions in each fake lane line
# left_fit = np.polyfit(ploty, leftx, 2)
# left_fitx = left_fit[0]*ploty**2 + left_fit[1]*ploty + left_fit[2]
# right_fit = np.polyfit(ploty, rightx, 2)
# right_fitx = right_fit[0]*ploty**2 + right_fit[1]*ploty + right_fit[2]
# Define y-value where we want radius of curvature
# I'll choose the maximum y-value, corresponding to the bottom of the image
y_eval = np.max(ploty)
# left_curverad = ((1 + (2*left_fit[0]*y_eval + left_fit[1])**2)**1.5) / np.absolute(2*left_fit[0])
# right_curverad = ((1 + (2*right_fit[0]*y_eval + right_fit[1])**2)**1.5) / np.absolute(2*right_fit[0])
# print(left_curverad, right_curverad)
# Example values: 1926.74 1908.48
# Define conversions in x and y from pixels space to meters
ym_per_pix = 30/720 # meters per pixel in y dimension
xm_per_pix = 3.7/700 # meters per pixel in x dimension
# Fit new polynomials to x,y in world space
left_fit_cr = np.polyfit(ploty * ym_per_pix, leftx * xm_per_pix, 2)
right_fit_cr = np.polyfit(ploty * ym_per_pix, rightx * xm_per_pix, 2)
# Calculate the new radii of curvature
left_curverad = ((1 + (2 * left_fit_cr[0] * y_eval*ym_per_pix + \
left_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * left_fit_cr[0])
right_curverad = ((1 + (2 * right_fit_cr[0] * y_eval * ym_per_pix + \
right_fit_cr[1]) ** 2) ** 1.5) / np.absolute(2 * right_fit_cr[0])
# Now our radius of curvature is in meters
# print(left_curverad, 'm', right_curverad, 'm')
# Example values: 632.1 m 626.2 m
lx = self.left_fit[0] * (img.shape[0] - 1)**2 + \
self.left_fit[1] * (img.shape[0] - 1) + \
self.left_fit[2]
rx = self.right_fit[0] * (img.shape[0] - 1)**2 + \
self.right_fit[1] * (img.shape[0] - 1) + \
self.right_fit[2]
# calc car's position in the lane w.r.to center
position = ((img.shape[1] / 2) - ((lx + rx)/2)) * xm_per_pix
# calc mean curvature
mean_curverad = (left_curverad + right_curverad) / 2
# save the car's position
self.car_position = position.round(2)
return mean_curverad
| :param img - undistorted image, on which to draw the lane polygon
:param left_fit - left lane values (x)
:param right_fit - right lane values (x)
:param M_inverse - matrix for inverse transform warping
:return - img - the modified image with polygon
'''
fity = np.linspace(0, img.shape[0] - 1, img.shape[0])
left_fitx = left_fit[0] * fity ** 2 + left_fit[1] * fity + left_fit[2]
right_fitx = right_fit[0] * fity ** 2 + right_fit[1] * fity + right_fit[2]
color_warp = np.zeros_like(img).astype(np.uint8)
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array( [np.transpose(np.vstack([left_fitx, fity]))] )
pts_right = np.array( [np.flipud(np.transpose(np.vstack([right_fitx, fity])))] )
pts = np.hstack((pts_left, pts_right))
pts = np.array(pts, dtype=np.int32)
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, M_inverse, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(img, 1, newwarp, 0.3, 0)
return result
def display_dashboard(self, img, lane_curve):
''' Display a dashboard on the image, with info on
Lane curve (avg)
:param img - image with lane lines
:param lane_curve - the avg lane curvature
:param position
:return modified img
'''
COLOR_LIGHTBLUE = (172,227,239)
COLOR_GOLD = (255, 215, 0)
if self.car_position > 0:
msg = '{}m right of center'.format(self.car_position)
else:
msg = '{}m left of center'.format(np.abs(self.car_position))
cv2.putText(img, 'Lane curve radius: {}m'.format(lane_curve.round()),
(10,50), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.putText(img, 'Car is {}'.format(msg),
(10,80), cv2.FONT_HERSHEY_SIMPLEX, 1,
color=COLOR_GOLD, thickness=2)
cv2.rectangle(img, (5, 10), (480, 100), color=COLOR_GOLD, thickness=2)
return img | def draw_polygon(self, img, left_fit, right_fit, M_inverse):
''' Draw shaded polygon on the lane between left_fit and right_fit | random_line_split |
quiz.js | // 선긋기*************************************************************************
// svg : 생성
function CESVG (target, type) {
var svgContainer = document.createElementNS('http://www.w3.org/2000/svg', type);
target.appendChild(svgContainer);
return svgContainer;
}
// 효과음 기본 설정
function efSound (src) {
var efAudio = new Audio;
var efPlay = function () {
efAudio.removeEventListener('loadeddata', efPlay);
efAudio.play();
};
efAudio.src = src;
efAudio.addEventListener('loadeddata', efPlay);
efAudio.load();
}
// 드래그&드랍 : 설정
function DragDrop (param) {
this.element = param.element;
this.parentElment = window;
this.createDragDrop(param);
}
// 드래그&드랍 : 위치 이동
DragDrop.prototype.createDragDrop = function (param) {
var dragObj = this,
left = param.left + param.width,
top = param.top + param.height,
answerLine = (param.quizName !== 'dragLine') ? null : CESVG(QS('.quiz_'+ param.quizNum +' .svgContainer'), 'path'),
startDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventDown', e);
dragObj.element.style.zIndex = 3;
dragObj.offY = eventMaster.clientY - (dragObj.element.offsetTop * GameManager.event.zoomRate);
dragObj.offX = eventMaster.clientX - (dragObj.element.offsetLeft * GameManager.event.zoomRate);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventOut'), endDrag, true);
},
drag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventMove', e);
dragObj.newY = eventMaster.clientY - dragObj.offY;
dragObj.newX = eventMaster.clientX - dragObj.offX;
dragObj.element.style.left = (dragObj.newX / GameManager.event.zoomRate) + 'px';
dragObj.element.style.top = (dragObj.newY / GameManager.event.zoomRate) + 'px';
var newLeft = (dragObj.newX + param.width * GameManager.event.zoomRate) / GameManager.event.zoomRate;
var newTop = (dragObj.newY + param.height * GameManager.event.zoomRate) / GameManager.event.zoomRate;
if (answerLine !== null) {
answerLine.setAttribute('d', 'M '+ left +' '+ top + ' L '+ newLeft +' '+ newTop);
}
},
endDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventUp', e);
dragObj.element.removeEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.style.zIndex = 3;
param.callBack(e, param);
}
dragObj.element.addEventListener(GameManager.event.eventSelector('eventDown'), startDrag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventUp'), endDrag, true);
};
// ********************************************************************************
// 퀴즈 : 기본 설정
var QUIZ = QUIZ || {};
QUIZ = (function(){
var quizObj = {
objCount: null,
totalCount: 0,
quizNameArray: [],
aniArray: [],
aniId: null,
init: function () {
var quiz = QSAll('.quiz'),
popupQuiz = QSAll('.popupPageContainer .quiz');
this.objCount = new Array(quiz.length);
for (var i = 0; i < quiz.length; i++) {
var quizNameArray = quiz[i].getAttribute('quiz'),
quizNum = i + 1;
this.objCount[i] = 0;
quiz[i].classList.add('quiz_' + quizNum);
quiz[i].setAttribute('idx', quizNum);
if (quizNameArray !== null) {
quizNameArray = (new String(quizNameArray).indexOf(',') > -1) ? quizNameArray.split(',') : [quizNameArray];
this.start(quizNum, quizNameArray);
}
else {
console.log('noQuiz');
}
}
},
start: function (quizNum, quizName) {
this.quizNameArray.push(quizName);
for (var i = 0; i < quizName.length; i++) this[quizName[i]]['init'](quizNum);
}
}
return quizObj;
})();
// 퀴즈 : dragLine 이벤트
QUIZ.dragLine = {
name: 'dragLine',
dragLineObj: null,
dropArea: null,
path: null,
objSize: {width: null, height: null},
dropSize: {width: null, height: null},
dropPosition: [],
objPosition: [],
// 초기 설정
init: function (quizNum) { console.log('>>>>>> dragLine');
var svgContainer = CESVG(QS('.quiz_'+ quizNum), 'svg');
svgContainer.setAttribute('class', 'svgContainer');
// $('.svgContainer').insertBefore($('.questionDotWrap'));
this.append(quizNum);
QUIZ.objCount[quizNum-1] += this.dragLineObj.length;
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.cursor = 'pointer';
this.dragLineObj[i].setAttribute('value', i + 1);
new DragDrop({
quizNum: quizNum,
quizName: this.name,
element: this.dragLineObj[i],
top: this.objPosition[i].top,
left: this.objPosition[i].left,
width: this.objSize.width,
height: this.objSize.height,
callBack: function (e, param) {
var eventMaster = eventSelector('eventUp', e),
dropArea = QSAll('.quiz_'+ param.quizNum +' .lineDropArea'),
answerCount = 0;
if (eventMaster !== undefined && QUIZ.dragLine.dropCompare(param.quizNum, this, dropArea, eventMaster.clientX, eventMaster.clientY)) {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, true);
} else {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, false);
}
}
});
}
this.path = QSAll('.quiz_'+ quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('class', 'answerLine');
this.path[i].setAttribute('value', this.dragLineObj[i].getAttribute('value'));
}
},
// 각 요소 위치 저장
append: function (quizNum) {
var svgContainer = QS('.quiz_'+ quizNum + ' .svgContainer');
this.dragLineObj = QSAll('.quiz_'+ quizNum +' .dragLineObj');
this.dropArea = QSAll('.quiz_'+ quizNum +' .lineDropArea');
//this.objSize.width = QS('.quiz_'+ quizNum +' .dragLineObj').offsetWidth / 2;
//this.objSize.height = QS('.quiz_'+ quizNum +' .dragLineObj').offsetHeight / 2;
this.objSize.width = dlo[quizNum-1].width[0] / 2;
this.objSize.height = dlo[quizNum-1].height[0] / 2;
this.dropSize.width = QS('.quiz_'+ quizNum +' .lineDropArea').offsetWidth / 2;
this.dropSize.height = QS('.quiz_'+ quizNum +' .lineDropArea').offsetHeight / 2;
this.dropPosition = [];
this.objPosition = [];
for (var i = 0; i < this.dropArea.length; i++) {
this.dropPosition.push({top: this.dropArea[i].offsetTop, left: this.dropArea[i].offsetLeft});
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.objPosition.push({top: dlo[quizNum-1].top[i], left: dlo[quizNum-1].left[i]});
//this.objPosition.push({top: this.dragLineObj[i].offsetTop, left: this.dragLineObj[i].offsetLeft});
}
},
// 드랍 영역 체크
dropCompare: function (quizNum, dragObj, dropArea, x, y) {
var dragObjValue = dragObj.element !== undefined ? dragObj.element.getAttribute('value') : dragObj.getAttribute('value'),
allDap = false,
result;
//변수처리함 .
var dotLeft = dragObj.element.parentNode.parentNode.getElementsByClassName('dLeft');
var dotRight = dragObj.element.parentNode.parentNode.getElementsByClassName('dRight');
for (var i = 0; i < dropArea.length; i++) {
var dropValue = dropArea[i].getAttribute('value').indexOf(',') > -1 ? dropArea[i].getAttribute('value').split(',') : [dropArea[i].getAttribute('value')],
dropAreaCss = dropArea[i].getBoundingClientRect();
if (x === undefined && y === undefined) allDap = true;
var comparePosition = x >= dropAreaCss.left &&
x <= (dropAreaCss.left + dropAreaCss.width) &&
y >= dropAreaCss.top &&
y <= dropAreaCss.top + dropAreaCss.height;
if (comparePosition || allDap) {
for (var j = 0; j < dropValue.length; j++) {
if (dragObjValue == dropValue[j]) {
//var dLeft = QSAll('.dLeft');
//var dRight = QSAll('.dRight');
var dLeft = dotLeft;//신규추가
var dRight = dotRight;//신규추가
dLeft[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
dRight[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
result = true;
}
}
if (result === undefined) result = false;
}
}
return result;
},
setDragObjPosition: function (quizNum, dragObj, param, type) {
console.log('setDragObjPosition')
var obj = dragObj.element !== undefined ? dragObj.element : dragObj,
idx = obj.getAttribute('value') - 1,
top, left, targetPath, value, dropTop, dropLeft;
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
if (obj.getAttribute('value') == this.path[i].getAttribute('value')) {
targetPath = this.path[i];
}
}
value = targetPath.getAttribute('value');
for (var i = 0; i < this.dropArea.length; i++) {
if (obj.getAttribute('value') == this.dropArea[i].getAttribute('value')) {
dropTop = this.dropArea[i].offsetTop + this.dropSize.width;
dropLeft = this.dropArea[i].offsetLeft + this.dropSize.height;
}
}
obj.style.left = param.left + 'px';
obj.style.top = param.top + 'px';
left = param.left + param.width;
top = param.top + param.height;
if (type) {
obj.style.pointerEvents = 'none';
obj.classList.add(this.name + 'Complete');
targetPath.setAttribute('d', 'M '+ left +' '+ top + ' L '+ dropLeft +' '+ dropTop);
} else {
targetPath.setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
//console.error(obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid'));
var dataQid = obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid');
DTCaliperSensor.fire({
correct: null, // 정답 여부입력 [true, false] 중에서 택일
itemObject: document.querySelector('[data-qid='+dataQid+']'), // 해당 문항 객체
value: '' // 실제 정답 데이터 입력 <correctResponse>에 입력된 값이랑 동일 | });
},
// 드래그&드랍 성공
COMPLETE: function (correct, quizNum) {
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
if (correct) {
for (var i = 0, path, value, left1, top1, left2, top2; i < this.path.length; i++) {
path = this.path[i];
value = this.dropArea[i].getAttribute('value') - 1;
left1 = this.objPosition[i].left + this.objSize.width;
top1 = this.objPosition[i].top + this.objSize.height;
left2 = this.dropPosition[value].left + this.dropSize.width;
top2 = this.dropPosition[value].top + this.dropSize.height;
path.setAttribute('d', 'M '+ left1 +' '+ top1 + ' L '+ left2 +' '+ top2);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'none';
this.dragLineObj[i].classList.add(this.name + 'Complete');
}
}
else {
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'auto';
this.dragLineObj[i].classList.remove(this.name + 'Complete');
}
}
},
resetAll:function(obj){//전체 리셋 신규추가
var len = obj.find('.svgContainer').children().length;
for(i=0;i<len;i++){
obj.find('.svgContainer').children()[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
obj.find('.drawBox').each(function(){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
});
obj.find('.dLeft').each(function(){
$(this).children().attr('style','');
});
obj.find('.dRight').each(function(index){
$(this).children().attr('style','');
});
//obj.find('.hold').prop('style','');
},
reset:function(obj,idx){//개별리셋 신규추가
var len = obj.find('.svgContainer').children().length;
obj.find('.svgContainer').children()[idx].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
obj.find('.drawBox').each(function(index){
if(idx===index){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
}
});
obj.find('.dLeft').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
obj.find('.dRight').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
}
}
//console.error('test',QUIZ.dragLine.reset);
//QS('.dRight');
//전체 리셋버튼
$('.answer_cancel').on('click',function(){
var obj = $(this).parent();
QUIZ.dragLine.resetAll(obj);
});
//개별 리셋
$('.dot.dLeft').on('click',function(){
var idx = $(this).index();
var obj = $(this).parent().parent();
QUIZ.dragLine.reset(obj,idx);
}); | random_line_split | |
quiz.js |
// 선긋기*************************************************************************
// svg : 생성
function CESVG (target, type) {
var svgContainer = document.createElementNS('http://www.w3.org/2000/svg', type);
target.appendChild(svgContainer);
return svgContainer;
}
// 효과음 기본 설정
function efSound (src) {
var efAudio = new | on DragDrop (param) {
this.element = param.element;
this.parentElment = window;
this.createDragDrop(param);
}
// 드래그&드랍 : 위치 이동
DragDrop.prototype.createDragDrop = function (param) {
var dragObj = this,
left = param.left + param.width,
top = param.top + param.height,
answerLine = (param.quizName !== 'dragLine') ? null : CESVG(QS('.quiz_'+ param.quizNum +' .svgContainer'), 'path'),
startDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventDown', e);
dragObj.element.style.zIndex = 3;
dragObj.offY = eventMaster.clientY - (dragObj.element.offsetTop * GameManager.event.zoomRate);
dragObj.offX = eventMaster.clientX - (dragObj.element.offsetLeft * GameManager.event.zoomRate);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventOut'), endDrag, true);
},
drag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventMove', e);
dragObj.newY = eventMaster.clientY - dragObj.offY;
dragObj.newX = eventMaster.clientX - dragObj.offX;
dragObj.element.style.left = (dragObj.newX / GameManager.event.zoomRate) + 'px';
dragObj.element.style.top = (dragObj.newY / GameManager.event.zoomRate) + 'px';
var newLeft = (dragObj.newX + param.width * GameManager.event.zoomRate) / GameManager.event.zoomRate;
var newTop = (dragObj.newY + param.height * GameManager.event.zoomRate) / GameManager.event.zoomRate;
if (answerLine !== null) {
answerLine.setAttribute('d', 'M '+ left +' '+ top + ' L '+ newLeft +' '+ newTop);
}
},
endDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventUp', e);
dragObj.element.removeEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.style.zIndex = 3;
param.callBack(e, param);
}
dragObj.element.addEventListener(GameManager.event.eventSelector('eventDown'), startDrag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventUp'), endDrag, true);
};
// ********************************************************************************
// 퀴즈 : 기본 설정
var QUIZ = QUIZ || {};
QUIZ = (function(){
var quizObj = {
objCount: null,
totalCount: 0,
quizNameArray: [],
aniArray: [],
aniId: null,
init: function () {
var quiz = QSAll('.quiz'),
popupQuiz = QSAll('.popupPageContainer .quiz');
this.objCount = new Array(quiz.length);
for (var i = 0; i < quiz.length; i++) {
var quizNameArray = quiz[i].getAttribute('quiz'),
quizNum = i + 1;
this.objCount[i] = 0;
quiz[i].classList.add('quiz_' + quizNum);
quiz[i].setAttribute('idx', quizNum);
if (quizNameArray !== null) {
quizNameArray = (new String(quizNameArray).indexOf(',') > -1) ? quizNameArray.split(',') : [quizNameArray];
this.start(quizNum, quizNameArray);
}
else {
console.log('noQuiz');
}
}
},
start: function (quizNum, quizName) {
this.quizNameArray.push(quizName);
for (var i = 0; i < quizName.length; i++) this[quizName[i]]['init'](quizNum);
}
}
return quizObj;
})();
// 퀴즈 : dragLine 이벤트
QUIZ.dragLine = {
name: 'dragLine',
dragLineObj: null,
dropArea: null,
path: null,
objSize: {width: null, height: null},
dropSize: {width: null, height: null},
dropPosition: [],
objPosition: [],
// 초기 설정
init: function (quizNum) { console.log('>>>>>> dragLine');
var svgContainer = CESVG(QS('.quiz_'+ quizNum), 'svg');
svgContainer.setAttribute('class', 'svgContainer');
// $('.svgContainer').insertBefore($('.questionDotWrap'));
this.append(quizNum);
QUIZ.objCount[quizNum-1] += this.dragLineObj.length;
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.cursor = 'pointer';
this.dragLineObj[i].setAttribute('value', i + 1);
new DragDrop({
quizNum: quizNum,
quizName: this.name,
element: this.dragLineObj[i],
top: this.objPosition[i].top,
left: this.objPosition[i].left,
width: this.objSize.width,
height: this.objSize.height,
callBack: function (e, param) {
var eventMaster = eventSelector('eventUp', e),
dropArea = QSAll('.quiz_'+ param.quizNum +' .lineDropArea'),
answerCount = 0;
if (eventMaster !== undefined && QUIZ.dragLine.dropCompare(param.quizNum, this, dropArea, eventMaster.clientX, eventMaster.clientY)) {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, true);
} else {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, false);
}
}
});
}
this.path = QSAll('.quiz_'+ quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('class', 'answerLine');
this.path[i].setAttribute('value', this.dragLineObj[i].getAttribute('value'));
}
},
// 각 요소 위치 저장
append: function (quizNum) {
var svgContainer = QS('.quiz_'+ quizNum + ' .svgContainer');
this.dragLineObj = QSAll('.quiz_'+ quizNum +' .dragLineObj');
this.dropArea = QSAll('.quiz_'+ quizNum +' .lineDropArea');
//this.objSize.width = QS('.quiz_'+ quizNum +' .dragLineObj').offsetWidth / 2;
//this.objSize.height = QS('.quiz_'+ quizNum +' .dragLineObj').offsetHeight / 2;
this.objSize.width = dlo[quizNum-1].width[0] / 2;
this.objSize.height = dlo[quizNum-1].height[0] / 2;
this.dropSize.width = QS('.quiz_'+ quizNum +' .lineDropArea').offsetWidth / 2;
this.dropSize.height = QS('.quiz_'+ quizNum +' .lineDropArea').offsetHeight / 2;
this.dropPosition = [];
this.objPosition = [];
for (var i = 0; i < this.dropArea.length; i++) {
this.dropPosition.push({top: this.dropArea[i].offsetTop, left: this.dropArea[i].offsetLeft});
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.objPosition.push({top: dlo[quizNum-1].top[i], left: dlo[quizNum-1].left[i]});
//this.objPosition.push({top: this.dragLineObj[i].offsetTop, left: this.dragLineObj[i].offsetLeft});
}
},
// 드랍 영역 체크
dropCompare: function (quizNum, dragObj, dropArea, x, y) {
var dragObjValue = dragObj.element !== undefined ? dragObj.element.getAttribute('value') : dragObj.getAttribute('value'),
allDap = false,
result;
//변수처리함 .
var dotLeft = dragObj.element.parentNode.parentNode.getElementsByClassName('dLeft');
var dotRight = dragObj.element.parentNode.parentNode.getElementsByClassName('dRight');
for (var i = 0; i < dropArea.length; i++) {
var dropValue = dropArea[i].getAttribute('value').indexOf(',') > -1 ? dropArea[i].getAttribute('value').split(',') : [dropArea[i].getAttribute('value')],
dropAreaCss = dropArea[i].getBoundingClientRect();
if (x === undefined && y === undefined) allDap = true;
var comparePosition = x >= dropAreaCss.left &&
x <= (dropAreaCss.left + dropAreaCss.width) &&
y >= dropAreaCss.top &&
y <= dropAreaCss.top + dropAreaCss.height;
if (comparePosition || allDap) {
for (var j = 0; j < dropValue.length; j++) {
if (dragObjValue == dropValue[j]) {
//var dLeft = QSAll('.dLeft');
//var dRight = QSAll('.dRight');
var dLeft = dotLeft;//신규추가
var dRight = dotRight;//신규추가
dLeft[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
dRight[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
result = true;
}
}
if (result === undefined) result = false;
}
}
return result;
},
setDragObjPosition: function (quizNum, dragObj, param, type) {
console.log('setDragObjPosition')
var obj = dragObj.element !== undefined ? dragObj.element : dragObj,
idx = obj.getAttribute('value') - 1,
top, left, targetPath, value, dropTop, dropLeft;
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
if (obj.getAttribute('value') == this.path[i].getAttribute('value')) {
targetPath = this.path[i];
}
}
value = targetPath.getAttribute('value');
for (var i = 0; i < this.dropArea.length; i++) {
if (obj.getAttribute('value') == this.dropArea[i].getAttribute('value')) {
dropTop = this.dropArea[i].offsetTop + this.dropSize.width;
dropLeft = this.dropArea[i].offsetLeft + this.dropSize.height;
}
}
obj.style.left = param.left + 'px';
obj.style.top = param.top + 'px';
left = param.left + param.width;
top = param.top + param.height;
if (type) {
obj.style.pointerEvents = 'none';
obj.classList.add(this.name + 'Complete');
targetPath.setAttribute('d', 'M '+ left +' '+ top + ' L '+ dropLeft +' '+ dropTop);
} else {
targetPath.setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
//console.error(obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid'));
var dataQid = obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid');
DTCaliperSensor.fire({
correct: null, // 정답 여부입력 [true, false] 중에서 택일
itemObject: document.querySelector('[data-qid='+dataQid+']'), // 해당 문항 객체
value: '' // 실제 정답 데이터 입력 <correctResponse>에 입력된 값이랑 동일
});
},
// 드래그&드랍 성공
COMPLETE: function (correct, quizNum) {
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
if (correct) {
for (var i = 0, path, value, left1, top1, left2, top2; i < this.path.length; i++) {
path = this.path[i];
value = this.dropArea[i].getAttribute('value') - 1;
left1 = this.objPosition[i].left + this.objSize.width;
top1 = this.objPosition[i].top + this.objSize.height;
left2 = this.dropPosition[value].left + this.dropSize.width;
top2 = this.dropPosition[value].top + this.dropSize.height;
path.setAttribute('d', 'M '+ left1 +' '+ top1 + ' L '+ left2 +' '+ top2);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'none';
this.dragLineObj[i].classList.add(this.name + 'Complete');
}
}
else {
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'auto';
this.dragLineObj[i].classList.remove(this.name + 'Complete');
}
}
},
resetAll:function(obj){//전체 리셋 신규추가
var len = obj.find('.svgContainer').children().length;
for(i=0;i<len;i++){
obj.find('.svgContainer').children()[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
obj.find('.drawBox').each(function(){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
});
obj.find('.dLeft').each(function(){
$(this).children().attr('style','');
});
obj.find('.dRight').each(function(index){
$(this).children().attr('style','');
});
//obj.find('.hold').prop('style','');
},
reset:function(obj,idx){//개별리셋 신규추가
var len = obj.find('.svgContainer').children().length;
obj.find('.svgContainer').children()[idx].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
obj.find('.drawBox').each(function(index){
if(idx===index){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
}
});
obj.find('.dLeft').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
obj.find('.dRight').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
}
}
//console.error('test',QUIZ.dragLine.reset);
//QS('.dRight');
//전체 리셋버튼
$('.answer_cancel').on('click',function(){
var obj = $(this).parent();
QUIZ.dragLine.resetAll(obj);
});
//개별 리셋
$('.dot.dLeft').on('click',function(){
var idx = $(this).index();
var obj = $(this).parent().parent();
QUIZ.dragLine.reset(obj,idx);
});
| Audio;
var efPlay = function () {
efAudio.removeEventListener('loadeddata', efPlay);
efAudio.play();
};
efAudio.src = src;
efAudio.addEventListener('loadeddata', efPlay);
efAudio.load();
}
// 드래그&드랍 : 설정
functi | identifier_body |
quiz.js |
// 선긋기*************************************************************************
// svg : 생성
function CESVG (tar | type) {
var svgContainer = document.createElementNS('http://www.w3.org/2000/svg', type);
target.appendChild(svgContainer);
return svgContainer;
}
// 효과음 기본 설정
function efSound (src) {
var efAudio = new Audio;
var efPlay = function () {
efAudio.removeEventListener('loadeddata', efPlay);
efAudio.play();
};
efAudio.src = src;
efAudio.addEventListener('loadeddata', efPlay);
efAudio.load();
}
// 드래그&드랍 : 설정
function DragDrop (param) {
this.element = param.element;
this.parentElment = window;
this.createDragDrop(param);
}
// 드래그&드랍 : 위치 이동
DragDrop.prototype.createDragDrop = function (param) {
var dragObj = this,
left = param.left + param.width,
top = param.top + param.height,
answerLine = (param.quizName !== 'dragLine') ? null : CESVG(QS('.quiz_'+ param.quizNum +' .svgContainer'), 'path'),
startDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventDown', e);
dragObj.element.style.zIndex = 3;
dragObj.offY = eventMaster.clientY - (dragObj.element.offsetTop * GameManager.event.zoomRate);
dragObj.offX = eventMaster.clientX - (dragObj.element.offsetLeft * GameManager.event.zoomRate);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventOut'), endDrag, true);
},
drag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventMove', e);
dragObj.newY = eventMaster.clientY - dragObj.offY;
dragObj.newX = eventMaster.clientX - dragObj.offX;
dragObj.element.style.left = (dragObj.newX / GameManager.event.zoomRate) + 'px';
dragObj.element.style.top = (dragObj.newY / GameManager.event.zoomRate) + 'px';
var newLeft = (dragObj.newX + param.width * GameManager.event.zoomRate) / GameManager.event.zoomRate;
var newTop = (dragObj.newY + param.height * GameManager.event.zoomRate) / GameManager.event.zoomRate;
if (answerLine !== null) {
answerLine.setAttribute('d', 'M '+ left +' '+ top + ' L '+ newLeft +' '+ newTop);
}
},
endDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventUp', e);
dragObj.element.removeEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.style.zIndex = 3;
param.callBack(e, param);
}
dragObj.element.addEventListener(GameManager.event.eventSelector('eventDown'), startDrag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventUp'), endDrag, true);
};
// ********************************************************************************
// 퀴즈 : 기본 설정
var QUIZ = QUIZ || {};
QUIZ = (function(){
var quizObj = {
objCount: null,
totalCount: 0,
quizNameArray: [],
aniArray: [],
aniId: null,
init: function () {
var quiz = QSAll('.quiz'),
popupQuiz = QSAll('.popupPageContainer .quiz');
this.objCount = new Array(quiz.length);
for (var i = 0; i < quiz.length; i++) {
var quizNameArray = quiz[i].getAttribute('quiz'),
quizNum = i + 1;
this.objCount[i] = 0;
quiz[i].classList.add('quiz_' + quizNum);
quiz[i].setAttribute('idx', quizNum);
if (quizNameArray !== null) {
quizNameArray = (new String(quizNameArray).indexOf(',') > -1) ? quizNameArray.split(',') : [quizNameArray];
this.start(quizNum, quizNameArray);
}
else {
console.log('noQuiz');
}
}
},
start: function (quizNum, quizName) {
this.quizNameArray.push(quizName);
for (var i = 0; i < quizName.length; i++) this[quizName[i]]['init'](quizNum);
}
}
return quizObj;
})();
// 퀴즈 : dragLine 이벤트
QUIZ.dragLine = {
name: 'dragLine',
dragLineObj: null,
dropArea: null,
path: null,
objSize: {width: null, height: null},
dropSize: {width: null, height: null},
dropPosition: [],
objPosition: [],
// 초기 설정
init: function (quizNum) { console.log('>>>>>> dragLine');
var svgContainer = CESVG(QS('.quiz_'+ quizNum), 'svg');
svgContainer.setAttribute('class', 'svgContainer');
// $('.svgContainer').insertBefore($('.questionDotWrap'));
this.append(quizNum);
QUIZ.objCount[quizNum-1] += this.dragLineObj.length;
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.cursor = 'pointer';
this.dragLineObj[i].setAttribute('value', i + 1);
new DragDrop({
quizNum: quizNum,
quizName: this.name,
element: this.dragLineObj[i],
top: this.objPosition[i].top,
left: this.objPosition[i].left,
width: this.objSize.width,
height: this.objSize.height,
callBack: function (e, param) {
var eventMaster = eventSelector('eventUp', e),
dropArea = QSAll('.quiz_'+ param.quizNum +' .lineDropArea'),
answerCount = 0;
if (eventMaster !== undefined && QUIZ.dragLine.dropCompare(param.quizNum, this, dropArea, eventMaster.clientX, eventMaster.clientY)) {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, true);
} else {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, false);
}
}
});
}
this.path = QSAll('.quiz_'+ quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('class', 'answerLine');
this.path[i].setAttribute('value', this.dragLineObj[i].getAttribute('value'));
}
},
// 각 요소 위치 저장
append: function (quizNum) {
var svgContainer = QS('.quiz_'+ quizNum + ' .svgContainer');
this.dragLineObj = QSAll('.quiz_'+ quizNum +' .dragLineObj');
this.dropArea = QSAll('.quiz_'+ quizNum +' .lineDropArea');
//this.objSize.width = QS('.quiz_'+ quizNum +' .dragLineObj').offsetWidth / 2;
//this.objSize.height = QS('.quiz_'+ quizNum +' .dragLineObj').offsetHeight / 2;
this.objSize.width = dlo[quizNum-1].width[0] / 2;
this.objSize.height = dlo[quizNum-1].height[0] / 2;
this.dropSize.width = QS('.quiz_'+ quizNum +' .lineDropArea').offsetWidth / 2;
this.dropSize.height = QS('.quiz_'+ quizNum +' .lineDropArea').offsetHeight / 2;
this.dropPosition = [];
this.objPosition = [];
for (var i = 0; i < this.dropArea.length; i++) {
this.dropPosition.push({top: this.dropArea[i].offsetTop, left: this.dropArea[i].offsetLeft});
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.objPosition.push({top: dlo[quizNum-1].top[i], left: dlo[quizNum-1].left[i]});
//this.objPosition.push({top: this.dragLineObj[i].offsetTop, left: this.dragLineObj[i].offsetLeft});
}
},
// 드랍 영역 체크
dropCompare: function (quizNum, dragObj, dropArea, x, y) {
var dragObjValue = dragObj.element !== undefined ? dragObj.element.getAttribute('value') : dragObj.getAttribute('value'),
allDap = false,
result;
//변수처리함 .
var dotLeft = dragObj.element.parentNode.parentNode.getElementsByClassName('dLeft');
var dotRight = dragObj.element.parentNode.parentNode.getElementsByClassName('dRight');
for (var i = 0; i < dropArea.length; i++) {
var dropValue = dropArea[i].getAttribute('value').indexOf(',') > -1 ? dropArea[i].getAttribute('value').split(',') : [dropArea[i].getAttribute('value')],
dropAreaCss = dropArea[i].getBoundingClientRect();
if (x === undefined && y === undefined) allDap = true;
var comparePosition = x >= dropAreaCss.left &&
x <= (dropAreaCss.left + dropAreaCss.width) &&
y >= dropAreaCss.top &&
y <= dropAreaCss.top + dropAreaCss.height;
if (comparePosition || allDap) {
for (var j = 0; j < dropValue.length; j++) {
if (dragObjValue == dropValue[j]) {
//var dLeft = QSAll('.dLeft');
//var dRight = QSAll('.dRight');
var dLeft = dotLeft;//신규추가
var dRight = dotRight;//신규추가
dLeft[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
dRight[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
result = true;
}
}
if (result === undefined) result = false;
}
}
return result;
},
setDragObjPosition: function (quizNum, dragObj, param, type) {
console.log('setDragObjPosition')
var obj = dragObj.element !== undefined ? dragObj.element : dragObj,
idx = obj.getAttribute('value') - 1,
top, left, targetPath, value, dropTop, dropLeft;
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
if (obj.getAttribute('value') == this.path[i].getAttribute('value')) {
targetPath = this.path[i];
}
}
value = targetPath.getAttribute('value');
for (var i = 0; i < this.dropArea.length; i++) {
if (obj.getAttribute('value') == this.dropArea[i].getAttribute('value')) {
dropTop = this.dropArea[i].offsetTop + this.dropSize.width;
dropLeft = this.dropArea[i].offsetLeft + this.dropSize.height;
}
}
obj.style.left = param.left + 'px';
obj.style.top = param.top + 'px';
left = param.left + param.width;
top = param.top + param.height;
if (type) {
obj.style.pointerEvents = 'none';
obj.classList.add(this.name + 'Complete');
targetPath.setAttribute('d', 'M '+ left +' '+ top + ' L '+ dropLeft +' '+ dropTop);
} else {
targetPath.setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
//console.error(obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid'));
var dataQid = obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid');
DTCaliperSensor.fire({
correct: null, // 정답 여부입력 [true, false] 중에서 택일
itemObject: document.querySelector('[data-qid='+dataQid+']'), // 해당 문항 객체
value: '' // 실제 정답 데이터 입력 <correctResponse>에 입력된 값이랑 동일
});
},
// 드래그&드랍 성공
COMPLETE: function (correct, quizNum) {
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
if (correct) {
for (var i = 0, path, value, left1, top1, left2, top2; i < this.path.length; i++) {
path = this.path[i];
value = this.dropArea[i].getAttribute('value') - 1;
left1 = this.objPosition[i].left + this.objSize.width;
top1 = this.objPosition[i].top + this.objSize.height;
left2 = this.dropPosition[value].left + this.dropSize.width;
top2 = this.dropPosition[value].top + this.dropSize.height;
path.setAttribute('d', 'M '+ left1 +' '+ top1 + ' L '+ left2 +' '+ top2);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'none';
this.dragLineObj[i].classList.add(this.name + 'Complete');
}
}
else {
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'auto';
this.dragLineObj[i].classList.remove(this.name + 'Complete');
}
}
},
resetAll:function(obj){//전체 리셋 신규추가
var len = obj.find('.svgContainer').children().length;
for(i=0;i<len;i++){
obj.find('.svgContainer').children()[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
obj.find('.drawBox').each(function(){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
});
obj.find('.dLeft').each(function(){
$(this).children().attr('style','');
});
obj.find('.dRight').each(function(index){
$(this).children().attr('style','');
});
//obj.find('.hold').prop('style','');
},
reset:function(obj,idx){//개별리셋 신규추가
var len = obj.find('.svgContainer').children().length;
obj.find('.svgContainer').children()[idx].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
obj.find('.drawBox').each(function(index){
if(idx===index){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
}
});
obj.find('.dLeft').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
obj.find('.dRight').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
}
}
//console.error('test',QUIZ.dragLine.reset);
//QS('.dRight');
//전체 리셋버튼
$('.answer_cancel').on('click',function(){
var obj = $(this).parent();
QUIZ.dragLine.resetAll(obj);
});
//개별 리셋
$('.dot.dLeft').on('click',function(){
var idx = $(this).index();
var obj = $(this).parent().parent();
QUIZ.dragLine.reset(obj,idx);
});
| get, | identifier_name |
quiz.js |
// 선긋기*************************************************************************
// svg : 생성
function CESVG (target, type) {
var svgContainer = document.createElementNS('http://www.w3.org/2000/svg', type);
target.appendChild(svgContainer);
return svgContainer;
}
// 효과음 기본 설정
function efSound (src) {
var efAudio = new Audio;
var efPlay = function () {
efAudio.removeEventListener('loadeddata', efPlay);
efAudio.play();
};
efAudio.src = src;
efAudio.addEventListener('loadeddata', efPlay);
efAudio.load();
}
// 드래그&드랍 : 설정
function DragDrop (param) {
this.element = param.element;
this.parentElment = window;
this.createDragDrop(param);
}
// 드래그&드랍 : 위치 이동
DragDrop.prototype.createDragDrop = function (param) {
var dragObj = this,
left = param.left + param.width,
top = param.top + param.height,
answerLine = (param.quizName !== 'dragLine') ? null : CESVG(QS('.quiz_'+ param.quizNum +' .svgContainer'), 'path'),
startDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventDown', e);
dragObj.element.style.zIndex = 3;
dragObj.offY = eventMaster.clientY - (dragObj.element.offsetTop * GameManager.event.zoomRate);
dragObj.offX = eventMaster.clientX - (dragObj.element.offsetLeft * GameManager.event.zoomRate);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventOut'), endDrag, true);
},
drag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventMove', e);
dragObj.newY = eventMaster.clientY - dragObj.offY;
dragObj.newX = eventMaster.clientX - dragObj.offX;
dragObj.element.style.left = (dragObj.newX / GameManager.event.zoomRate) + 'px';
dragObj.element.style.top = (dragObj.newY / GameManager.event.zoomRate) + 'px';
var newLeft = (dragObj.newX + param.width * GameManager.event.zoomRate) / GameManager.event.zoomRate;
var newTop = (dragObj.newY + param.height * GameManager.event.zoomRate) / GameManager.event.zoomRate;
if (answerLine !== null) {
answerLine.setAttribute('d', 'M '+ left +' '+ top + ' L '+ newLeft +' '+ newTop);
}
},
endDrag = function (e) {
e.preventDefault();
var eventMaster = eventSelector('eventUp', e);
dragObj.element.removeEventListener(GameManager.event.eventSelector('eventMove'), drag, true);
dragObj.element.style.zIndex = 3;
param.callBack(e, param);
}
dragObj.element.addEventListener(GameManager.event.eventSelector('eventDown'), startDrag, true);
dragObj.element.addEventListener(GameManager.event.eventSelector('eventUp'), endDrag, true);
};
// ********************************************************************************
// 퀴즈 : 기본 설정
var QUIZ = QUIZ || {};
QUIZ = (function(){
var quizObj = {
objCount: null,
totalCount: 0,
quizNameArray: [],
aniArray: [],
aniId: null,
init: function () {
var quiz = QSAll('.quiz'),
popupQuiz = QSAll('.popupPageContainer .quiz');
this.objCount = new Array(quiz.length);
for (var i = 0; i < quiz.length; i++) {
var quizNameArray = quiz[i].getAttribute('quiz'),
quizNum = i + 1;
this.objCount[i] = 0;
quiz[i].classList.add('quiz_' + quizNum);
quiz[i].setAttribute('idx', quizNum);
if (quizNameArray !== null) {
quizNameArray = (new String(quizNameArray).indexOf(',') > -1) | function (quizNum, quizName) {
this.quizNameArray.push(quizName);
for (var i = 0; i < quizName.length; i++) this[quizName[i]]['init'](quizNum);
}
}
return quizObj;
})();
// 퀴즈 : dragLine 이벤트
QUIZ.dragLine = {
name: 'dragLine',
dragLineObj: null,
dropArea: null,
path: null,
objSize: {width: null, height: null},
dropSize: {width: null, height: null},
dropPosition: [],
objPosition: [],
// 초기 설정
init: function (quizNum) { console.log('>>>>>> dragLine');
var svgContainer = CESVG(QS('.quiz_'+ quizNum), 'svg');
svgContainer.setAttribute('class', 'svgContainer');
// $('.svgContainer').insertBefore($('.questionDotWrap'));
this.append(quizNum);
QUIZ.objCount[quizNum-1] += this.dragLineObj.length;
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.cursor = 'pointer';
this.dragLineObj[i].setAttribute('value', i + 1);
new DragDrop({
quizNum: quizNum,
quizName: this.name,
element: this.dragLineObj[i],
top: this.objPosition[i].top,
left: this.objPosition[i].left,
width: this.objSize.width,
height: this.objSize.height,
callBack: function (e, param) {
var eventMaster = eventSelector('eventUp', e),
dropArea = QSAll('.quiz_'+ param.quizNum +' .lineDropArea'),
answerCount = 0;
if (eventMaster !== undefined && QUIZ.dragLine.dropCompare(param.quizNum, this, dropArea, eventMaster.clientX, eventMaster.clientY)) {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, true);
} else {
QUIZ.dragLine.setDragObjPosition(param.quizNum, this, param, false);
}
}
});
}
this.path = QSAll('.quiz_'+ quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('class', 'answerLine');
this.path[i].setAttribute('value', this.dragLineObj[i].getAttribute('value'));
}
},
// 각 요소 위치 저장
append: function (quizNum) {
var svgContainer = QS('.quiz_'+ quizNum + ' .svgContainer');
this.dragLineObj = QSAll('.quiz_'+ quizNum +' .dragLineObj');
this.dropArea = QSAll('.quiz_'+ quizNum +' .lineDropArea');
//this.objSize.width = QS('.quiz_'+ quizNum +' .dragLineObj').offsetWidth / 2;
//this.objSize.height = QS('.quiz_'+ quizNum +' .dragLineObj').offsetHeight / 2;
this.objSize.width = dlo[quizNum-1].width[0] / 2;
this.objSize.height = dlo[quizNum-1].height[0] / 2;
this.dropSize.width = QS('.quiz_'+ quizNum +' .lineDropArea').offsetWidth / 2;
this.dropSize.height = QS('.quiz_'+ quizNum +' .lineDropArea').offsetHeight / 2;
this.dropPosition = [];
this.objPosition = [];
for (var i = 0; i < this.dropArea.length; i++) {
this.dropPosition.push({top: this.dropArea[i].offsetTop, left: this.dropArea[i].offsetLeft});
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.objPosition.push({top: dlo[quizNum-1].top[i], left: dlo[quizNum-1].left[i]});
//this.objPosition.push({top: this.dragLineObj[i].offsetTop, left: this.dragLineObj[i].offsetLeft});
}
},
// 드랍 영역 체크
dropCompare: function (quizNum, dragObj, dropArea, x, y) {
var dragObjValue = dragObj.element !== undefined ? dragObj.element.getAttribute('value') : dragObj.getAttribute('value'),
allDap = false,
result;
//변수처리함 .
var dotLeft = dragObj.element.parentNode.parentNode.getElementsByClassName('dLeft');
var dotRight = dragObj.element.parentNode.parentNode.getElementsByClassName('dRight');
for (var i = 0; i < dropArea.length; i++) {
var dropValue = dropArea[i].getAttribute('value').indexOf(',') > -1 ? dropArea[i].getAttribute('value').split(',') : [dropArea[i].getAttribute('value')],
dropAreaCss = dropArea[i].getBoundingClientRect();
if (x === undefined && y === undefined) allDap = true;
var comparePosition = x >= dropAreaCss.left &&
x <= (dropAreaCss.left + dropAreaCss.width) &&
y >= dropAreaCss.top &&
y <= dropAreaCss.top + dropAreaCss.height;
if (comparePosition || allDap) {
for (var j = 0; j < dropValue.length; j++) {
if (dragObjValue == dropValue[j]) {
//var dLeft = QSAll('.dLeft');
//var dRight = QSAll('.dRight');
var dLeft = dotLeft;//신규추가
var dRight = dotRight;//신규추가
dLeft[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
dRight[dragObjValue-1].childNodes[0].style.backgroundColor = '#000';
result = true;
}
}
if (result === undefined) result = false;
}
}
return result;
},
setDragObjPosition: function (quizNum, dragObj, param, type) {
console.log('setDragObjPosition')
var obj = dragObj.element !== undefined ? dragObj.element : dragObj,
idx = obj.getAttribute('value') - 1,
top, left, targetPath, value, dropTop, dropLeft;
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
for (var i = 0; i < this.path.length; i++) {
if (obj.getAttribute('value') == this.path[i].getAttribute('value')) {
targetPath = this.path[i];
}
}
value = targetPath.getAttribute('value');
for (var i = 0; i < this.dropArea.length; i++) {
if (obj.getAttribute('value') == this.dropArea[i].getAttribute('value')) {
dropTop = this.dropArea[i].offsetTop + this.dropSize.width;
dropLeft = this.dropArea[i].offsetLeft + this.dropSize.height;
}
}
obj.style.left = param.left + 'px';
obj.style.top = param.top + 'px';
left = param.left + param.width;
top = param.top + param.height;
if (type) {
obj.style.pointerEvents = 'none';
obj.classList.add(this.name + 'Complete');
targetPath.setAttribute('d', 'M '+ left +' '+ top + ' L '+ dropLeft +' '+ dropTop);
} else {
targetPath.setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
//console.error(obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid'));
var dataQid = obj.parentNode.parentNode.parentNode.parentNode.getAttribute('data-qid');
DTCaliperSensor.fire({
correct: null, // 정답 여부입력 [true, false] 중에서 택일
itemObject: document.querySelector('[data-qid='+dataQid+']'), // 해당 문항 객체
value: '' // 실제 정답 데이터 입력 <correctResponse>에 입력된 값이랑 동일
});
},
// 드래그&드랍 성공
COMPLETE: function (correct, quizNum) {
QUIZ.dragLine.append(quizNum);
this.path = QSAll('.quiz_' + quizNum +' .svgContainer > path');
if (correct) {
for (var i = 0, path, value, left1, top1, left2, top2; i < this.path.length; i++) {
path = this.path[i];
value = this.dropArea[i].getAttribute('value') - 1;
left1 = this.objPosition[i].left + this.objSize.width;
top1 = this.objPosition[i].top + this.objSize.height;
left2 = this.dropPosition[value].left + this.dropSize.width;
top2 = this.dropPosition[value].top + this.dropSize.height;
path.setAttribute('d', 'M '+ left1 +' '+ top1 + ' L '+ left2 +' '+ top2);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'none';
this.dragLineObj[i].classList.add(this.name + 'Complete');
}
}
else {
for (var i = 0; i < this.path.length; i++) {
this.path[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
for (var i = 0; i < this.dragLineObj.length; i++) {
this.dragLineObj[i].style.pointerEvents = 'auto';
this.dragLineObj[i].classList.remove(this.name + 'Complete');
}
}
},
resetAll:function(obj){//전체 리셋 신규추가
var len = obj.find('.svgContainer').children().length;
for(i=0;i<len;i++){
obj.find('.svgContainer').children()[i].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
}
obj.find('.drawBox').each(function(){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
});
obj.find('.dLeft').each(function(){
$(this).children().attr('style','');
});
obj.find('.dRight').each(function(index){
$(this).children().attr('style','');
});
//obj.find('.hold').prop('style','');
},
reset:function(obj,idx){//개별리셋 신규추가
var len = obj.find('.svgContainer').children().length;
obj.find('.svgContainer').children()[idx].setAttribute('d', 'M '+ 0 +' '+ 0 + ' L '+ 0 +' '+ 0);
obj.find('.drawBox').each(function(index){
if(idx===index){
$(this).removeClass('dragLineComplete');
//var style_txt = $(this).attr('style').replace('z-index: 3; pointer-events: none;','');
//$(this).attr('style',style_txt);
$(this).css({'z-index':'3','pointer-events':'initial'});
}
});
obj.find('.dLeft').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
obj.find('.dRight').each(function(index){
if(idx===index){
$(this).children().attr('style','');
}
});
}
}
//console.error('test',QUIZ.dragLine.reset);
//QS('.dRight');
//전체 리셋버튼
$('.answer_cancel').on('click',function(){
var obj = $(this).parent();
QUIZ.dragLine.resetAll(obj);
});
//개별 리셋
$('.dot.dLeft').on('click',function(){
var idx = $(this).index();
var obj = $(this).parent().parent();
QUIZ.dragLine.reset(obj,idx);
});
| ? quizNameArray.split(',') : [quizNameArray];
this.start(quizNum, quizNameArray);
}
else {
console.log('noQuiz');
}
}
},
start: | conditional_block |
intraday.py | """
IntradayPriceManager connects to TradingView and stores indicators and price series in an
in memory dictionary self._alerts. These indicators are then published to slack periodically.
"""
import datetime
import json
import pandas as pd
import random
import re
import string
import time
import threading
import websocket
from utilfns.slack import send_alert
class IntradayPriceManager():
def __init__(self, debug=False):
self._alerts = {
"indicators": {},
"price": {}
} # In-memory dict of alerts to be sent out to slack
self._debug = debug
self._histbars = 300
self._indicators = []
self._slackchannel = "C01UACFTMTK" # TODO: Shift to config
self._slackfreq = 300 # Every 5 mins
self._state = {}
self._syms = [
"BINANCE:UNIUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD", "SGX:ES3",
"SGX:CLR"
]
self._t = None
self._timeframe = 240 # Default to 4 hours chart
self._ws_url = "wss://data.tradingview.com/socket.io/websocket"
def get(self, type: str, **kwargs):
"""
Type is either quote (live) or chart (historical + live)
Support kwargs:
syms: list of symbols, e.g. [BINANCE:ETHUSD]
indicators: list of indicators, e.g. [rsi]
timeframe: int of minutes of chart time frame, e.g. 240 -> 4 hours chart
histbars: int of number of historical data points, e.g. 300
"""
websocket.enableTrace(True)
ws = websocket.WebSocketApp(
self._ws_url,
on_open=lambda ws: self.on_open(ws, type, **kwargs),
on_close=self.on_close,
on_message=lambda ws, message: self.on_message(ws, message),
on_error=self.on_error)
ws.run_forever()
def send_slack(self):
"""
Periodic slack alerts - Indicators
"""
while True:
indicators = self._alerts.get("indicators")
if indicators:
res = pd.DataFrame(indicators).transpose().reset_index()
res.rename(columns={"index": "sym"}, inplace=True)
send_alert(self._slackchannel, [("Indicators", res)])
time.sleep(self._slackfreq)
def on_message(self, ws, message):
pattern = re.compile(r'~m~\d+~m~~h~\d+$')
if pattern.match(message):
ws.send(message)
else:
msg_body = re.compile(r'~m~\d+~m~')
messages = msg_body.split(message)
for msg in messages:
if msg:
parsed_msg = json.loads(msg)
params = parsed_msg.get("p")
if parsed_msg.get("m") == "timescale_update":
# timescale_update -> initial historical data
# TODO: handling of these data for plotting on UI
continue
if parsed_msg.get("m") == "du":
# du -> data update
sym = self._state.get(params[0]).get("sym")
now = datetime.datetime.now().strftime(
'%Y-%m-%d %H:%M:%S')
for k, v in params[1].items():
if v.get("st"):
# study
indicator = k.split("_")[0]
vals = v.get("st")[0].get("v")
val = vals[1]
val_dict = {"dtime": now, indicator: val}
# print({sym: val_dict})
if not self._alerts["indicators"].get(sym):
|
self._alerts["indicators"][sym][
indicator] = val
elif v.get("s"):
# series
vals = v.get("s")[0].get("v")
val_dict = dict(
zip([
"dtime", "open", "high", "low", "last",
"vol"
], vals))
val_dict["dtime"] = now
# print({sym: val_dict})
if not self._alerts["price"].get(sym):
self._alerts["price"][sym] = {}
self._alerts["price"][sym]["last"] = val_dict[
"last"]
@staticmethod
def on_error(ws, error):
print(error)
@staticmethod
def on_close(ws):
print("### closed ###")
def on_open(self, ws, type: str, **kwargs):
def run(*args, **kwargs):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
# ~m~395~m~{"m":"quote_set_fields","p":["qs_3bDnffZvz5ur","ch","chp","lp"]}
# ~m~89~m~{"m":"quote_add_symbols","p":["qs_3bDnffZvz5ur","SP:SPX",{"flags":["force_permission"]}]}
# ~m~315~m~{"m":"quote_fast_symbols","p":["qs_3bDnffZvz5ur","SP:SPX","TVC:NDX","CBOE:VIX","TVC:DXY","SGX:ES3","NASDAQ:AAPL","NASDAQ:MSFT","NASDAQ:TSLA","TVC:USOIL","TVC:GOLD","TVC:SILVER","FX:AUDUSD","FX:EURUSD","FX:GBPUSD","FX:USDJPY","BITSTAMP:BTCUSD","BITSTAMP:ETHUSD","COINBASE:UNIUSD","BINANCE:DOGEUSD","BINANCE:DOTUSD"]}
syms = kwargs.get("syms") or self._syms
timeframe = f'{kwargs.get("timeframe") or self._timeframe}'
indicators = kwargs.get("indicators") or self._indicators
histbars = kwargs.get("histbars") or self._histbars
send = self._send
send(ws, "set_auth_token", ["unauthorized_user_token"])
# Quote session
if not args or (args and args[0] == "quote"):
session = self._gen_session() # Quote session ID
send(ws, "quote_create_session", [session])
send(ws, "quote_set_fields", [session, "lp", "volume"])
[ws.send(self._add_symbol(session, s)) for s in syms]
send(ws, "quote_fast_symbols", [session, *syms])
send(ws, "quote_hibernate_all", [session])
# Chart session - Prefer to use this over quote sessions since it has a historical series
else:
for i, sym in enumerate(syms):
# Each ticker warrants a separate chart session ID
c_session = self._gen_session(type="chart")
self._state[c_session] = {
"sym": sym,
"indicators": [],
"series": [],
"timeframe": timeframe
}
# Users are allowed to select specific tickers
send(ws, "chart_create_session", [c_session, ""])
send(ws, "switch_timezone", [c_session, "Asia/Singapore"])
send(ws, "resolve_symbol", [
c_session, f"symbol_{i}",
self._add_chart_symbol(sym)
])
# s (in resp) -> series
self._state[c_session].get("series").append(f"s_{i}")
send(ws, "create_series", [
c_session, f"s_{i}", f"s_{i}", f"symbol_{i}",
timeframe, histbars
])
for indicator in indicators:
# Users are allowed to select specific indicators
# st (in resp) -> study
self._state[c_session].get("indicators").append(
f"{indicator}_{i}")
send(ws, "create_study", [
c_session, f"{indicator}_{i}", f"{indicator}_{i}",
f"s_{i}", "Script@tv-scripting-101!",
self._indicator_mapper(indicator)
])
self._t = threading.Thread(target=run, args=(type, ), kwargs=kwargs)
self._t.setDaemon(True)
self._t.start()
def _send(self, ws, func, params):
""" Client sends msg to websockets server """
ws.send(self._create_msg(func, params))
def _indicator_mapper(self, indicator: str) -> dict:
""" Indicator params that are accepted by the tv server """
return {
"rsi": {
"text":
"1f0fkZ72S0de2geyaUhXXw==_xwY73vljRXeew69Rl27RumLDs6aJ9NLsTYN9Xrht254BTb8uSOgccpLDt/cdRWopwJPNZx40m19yEFwJFswkSi62X4guNJYpXe4A6S9iq2n+OXM6mqWeWzDbjTl0lYmEf1ujbg7i3FvUdV/zCSrqd+iwnvvZSV+O2acpfNLpUlDdB6PZX4Y9y8tlQLWA2PiF8CVJng7DF1LPeecWC4fv+lNg+s5OXU46AjIhc+TFu8DOwiuKjNh7wWz6EZ7gpQS3",
"pineId": "STD;RSI",
"pineVersion": "12.0",
"in_2": {
"v": "",
"f": True,
"t": "resolution"
},
"in_0": {
"v": 14,
"f": True,
"t": "integer"
},
"in_1": {
"v": "close",
"f": True,
"t": "source"
}
}
}.get(indicator.lower())
def _create_msg(self, func, params):
""" _create_msg("set_auth_token", "unauthorized_user_token") """
msg = self._prepend_header(json.dumps({"m": func, "p": params}))
if self._debug:
print("DEBUG:", msg)
return msg
def _gen_session(self, type="chart"):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
session = ""
if type == "quote":
session = "qs_"
elif type == "chart":
session = "cs_"
else:
raise Exception("Invalid session type")
return session + "".join(random.choices(string.ascii_letters, k=12))
def _add_symbol(self, quote_session: str, sym: str):
""" Quote symbol: _add_symbol("3bDnffZvz5ur", "BINANCE:UNIUSD") """
return self._create_msg("quote_add_symbols", [quote_session, sym])
def _add_chart_symbol(self, sym: str):
""" Chart symbol - Only required for the first symbol """
return "=" + json.dumps({"symbol": sym})
def _prepend_header(self, msg):
return f'~m~{len(msg)}~m~{msg}'
if __name__ == "__main__":
ipm = IntradayPriceManager()
alerting_thread = threading.Thread(target=ipm.send_slack)
alerting_thread.start()
ipm.get(type="chart",
syms=[
"BINANCE:BTCUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD",
"BINANCE:UNIUSD", "BINANCE:SOLUSD"
],
indicators=["rsi"],
timeframe=240,
histbars=300)
| self._alerts["indicators"][sym] = {} | conditional_block |
intraday.py | """
IntradayPriceManager connects to TradingView and stores indicators and price series in an
in memory dictionary self._alerts. These indicators are then published to slack periodically.
"""
import datetime
import json
import pandas as pd
import random
import re
import string
import time
import threading
import websocket
from utilfns.slack import send_alert
class IntradayPriceManager():
def __init__(self, debug=False):
self._alerts = {
"indicators": {},
"price": {}
} # In-memory dict of alerts to be sent out to slack
self._debug = debug
self._histbars = 300
self._indicators = []
self._slackchannel = "C01UACFTMTK" # TODO: Shift to config
self._slackfreq = 300 # Every 5 mins
self._state = {}
self._syms = [
"BINANCE:UNIUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD", "SGX:ES3",
"SGX:CLR"
]
self._t = None
self._timeframe = 240 # Default to 4 hours chart
self._ws_url = "wss://data.tradingview.com/socket.io/websocket"
def get(self, type: str, **kwargs):
"""
Type is either quote (live) or chart (historical + live)
Support kwargs:
syms: list of symbols, e.g. [BINANCE:ETHUSD]
indicators: list of indicators, e.g. [rsi]
timeframe: int of minutes of chart time frame, e.g. 240 -> 4 hours chart
histbars: int of number of historical data points, e.g. 300
"""
websocket.enableTrace(True)
ws = websocket.WebSocketApp(
self._ws_url,
on_open=lambda ws: self.on_open(ws, type, **kwargs),
on_close=self.on_close,
on_message=lambda ws, message: self.on_message(ws, message),
on_error=self.on_error)
ws.run_forever()
def send_slack(self):
"""
Periodic slack alerts - Indicators
"""
while True:
indicators = self._alerts.get("indicators")
if indicators:
res = pd.DataFrame(indicators).transpose().reset_index()
res.rename(columns={"index": "sym"}, inplace=True)
send_alert(self._slackchannel, [("Indicators", res)])
time.sleep(self._slackfreq)
def on_message(self, ws, message):
pattern = re.compile(r'~m~\d+~m~~h~\d+$')
if pattern.match(message):
ws.send(message)
else:
msg_body = re.compile(r'~m~\d+~m~')
messages = msg_body.split(message)
for msg in messages:
if msg:
parsed_msg = json.loads(msg)
params = parsed_msg.get("p")
if parsed_msg.get("m") == "timescale_update":
# timescale_update -> initial historical data
# TODO: handling of these data for plotting on UI
continue
if parsed_msg.get("m") == "du":
# du -> data update
sym = self._state.get(params[0]).get("sym")
now = datetime.datetime.now().strftime(
'%Y-%m-%d %H:%M:%S')
for k, v in params[1].items():
if v.get("st"):
# study
indicator = k.split("_")[0]
vals = v.get("st")[0].get("v")
val = vals[1]
val_dict = {"dtime": now, indicator: val}
# print({sym: val_dict})
if not self._alerts["indicators"].get(sym):
self._alerts["indicators"][sym] = {}
self._alerts["indicators"][sym][
indicator] = val
elif v.get("s"):
# series
vals = v.get("s")[0].get("v")
val_dict = dict(
zip([
"dtime", "open", "high", "low", "last",
"vol"
], vals))
val_dict["dtime"] = now
# print({sym: val_dict})
if not self._alerts["price"].get(sym):
self._alerts["price"][sym] = {}
self._alerts["price"][sym]["last"] = val_dict[
"last"]
@staticmethod
def on_error(ws, error):
print(error)
@staticmethod
def on_close(ws):
print("### closed ###")
def on_open(self, ws, type: str, **kwargs):
def run(*args, **kwargs):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
# ~m~395~m~{"m":"quote_set_fields","p":["qs_3bDnffZvz5ur","ch","chp","lp"]}
# ~m~89~m~{"m":"quote_add_symbols","p":["qs_3bDnffZvz5ur","SP:SPX",{"flags":["force_permission"]}]}
# ~m~315~m~{"m":"quote_fast_symbols","p":["qs_3bDnffZvz5ur","SP:SPX","TVC:NDX","CBOE:VIX","TVC:DXY","SGX:ES3","NASDAQ:AAPL","NASDAQ:MSFT","NASDAQ:TSLA","TVC:USOIL","TVC:GOLD","TVC:SILVER","FX:AUDUSD","FX:EURUSD","FX:GBPUSD","FX:USDJPY","BITSTAMP:BTCUSD","BITSTAMP:ETHUSD","COINBASE:UNIUSD","BINANCE:DOGEUSD","BINANCE:DOTUSD"]}
syms = kwargs.get("syms") or self._syms
timeframe = f'{kwargs.get("timeframe") or self._timeframe}'
indicators = kwargs.get("indicators") or self._indicators
histbars = kwargs.get("histbars") or self._histbars
send = self._send
send(ws, "set_auth_token", ["unauthorized_user_token"])
# Quote session
if not args or (args and args[0] == "quote"):
session = self._gen_session() # Quote session ID
send(ws, "quote_create_session", [session])
send(ws, "quote_set_fields", [session, "lp", "volume"])
[ws.send(self._add_symbol(session, s)) for s in syms]
send(ws, "quote_fast_symbols", [session, *syms])
send(ws, "quote_hibernate_all", [session])
# Chart session - Prefer to use this over quote sessions since it has a historical series
else:
for i, sym in enumerate(syms):
# Each ticker warrants a separate chart session ID
c_session = self._gen_session(type="chart")
self._state[c_session] = {
"sym": sym,
"indicators": [],
"series": [],
"timeframe": timeframe
}
# Users are allowed to select specific tickers | send(ws, "switch_timezone", [c_session, "Asia/Singapore"])
send(ws, "resolve_symbol", [
c_session, f"symbol_{i}",
self._add_chart_symbol(sym)
])
# s (in resp) -> series
self._state[c_session].get("series").append(f"s_{i}")
send(ws, "create_series", [
c_session, f"s_{i}", f"s_{i}", f"symbol_{i}",
timeframe, histbars
])
for indicator in indicators:
# Users are allowed to select specific indicators
# st (in resp) -> study
self._state[c_session].get("indicators").append(
f"{indicator}_{i}")
send(ws, "create_study", [
c_session, f"{indicator}_{i}", f"{indicator}_{i}",
f"s_{i}", "Script@tv-scripting-101!",
self._indicator_mapper(indicator)
])
self._t = threading.Thread(target=run, args=(type, ), kwargs=kwargs)
self._t.setDaemon(True)
self._t.start()
def _send(self, ws, func, params):
""" Client sends msg to websockets server """
ws.send(self._create_msg(func, params))
def _indicator_mapper(self, indicator: str) -> dict:
""" Indicator params that are accepted by the tv server """
return {
"rsi": {
"text":
"1f0fkZ72S0de2geyaUhXXw==_xwY73vljRXeew69Rl27RumLDs6aJ9NLsTYN9Xrht254BTb8uSOgccpLDt/cdRWopwJPNZx40m19yEFwJFswkSi62X4guNJYpXe4A6S9iq2n+OXM6mqWeWzDbjTl0lYmEf1ujbg7i3FvUdV/zCSrqd+iwnvvZSV+O2acpfNLpUlDdB6PZX4Y9y8tlQLWA2PiF8CVJng7DF1LPeecWC4fv+lNg+s5OXU46AjIhc+TFu8DOwiuKjNh7wWz6EZ7gpQS3",
"pineId": "STD;RSI",
"pineVersion": "12.0",
"in_2": {
"v": "",
"f": True,
"t": "resolution"
},
"in_0": {
"v": 14,
"f": True,
"t": "integer"
},
"in_1": {
"v": "close",
"f": True,
"t": "source"
}
}
}.get(indicator.lower())
def _create_msg(self, func, params):
""" _create_msg("set_auth_token", "unauthorized_user_token") """
msg = self._prepend_header(json.dumps({"m": func, "p": params}))
if self._debug:
print("DEBUG:", msg)
return msg
def _gen_session(self, type="chart"):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
session = ""
if type == "quote":
session = "qs_"
elif type == "chart":
session = "cs_"
else:
raise Exception("Invalid session type")
return session + "".join(random.choices(string.ascii_letters, k=12))
def _add_symbol(self, quote_session: str, sym: str):
""" Quote symbol: _add_symbol("3bDnffZvz5ur", "BINANCE:UNIUSD") """
return self._create_msg("quote_add_symbols", [quote_session, sym])
def _add_chart_symbol(self, sym: str):
""" Chart symbol - Only required for the first symbol """
return "=" + json.dumps({"symbol": sym})
def _prepend_header(self, msg):
return f'~m~{len(msg)}~m~{msg}'
if __name__ == "__main__":
ipm = IntradayPriceManager()
alerting_thread = threading.Thread(target=ipm.send_slack)
alerting_thread.start()
ipm.get(type="chart",
syms=[
"BINANCE:BTCUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD",
"BINANCE:UNIUSD", "BINANCE:SOLUSD"
],
indicators=["rsi"],
timeframe=240,
histbars=300) | send(ws, "chart_create_session", [c_session, ""]) | random_line_split |
intraday.py | """
IntradayPriceManager connects to TradingView and stores indicators and price series in an
in memory dictionary self._alerts. These indicators are then published to slack periodically.
"""
import datetime
import json
import pandas as pd
import random
import re
import string
import time
import threading
import websocket
from utilfns.slack import send_alert
class IntradayPriceManager():
def __init__(self, debug=False):
self._alerts = {
"indicators": {},
"price": {}
} # In-memory dict of alerts to be sent out to slack
self._debug = debug
self._histbars = 300
self._indicators = []
self._slackchannel = "C01UACFTMTK" # TODO: Shift to config
self._slackfreq = 300 # Every 5 mins
self._state = {}
self._syms = [
"BINANCE:UNIUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD", "SGX:ES3",
"SGX:CLR"
]
self._t = None
self._timeframe = 240 # Default to 4 hours chart
self._ws_url = "wss://data.tradingview.com/socket.io/websocket"
def get(self, type: str, **kwargs):
"""
Type is either quote (live) or chart (historical + live)
Support kwargs:
syms: list of symbols, e.g. [BINANCE:ETHUSD]
indicators: list of indicators, e.g. [rsi]
timeframe: int of minutes of chart time frame, e.g. 240 -> 4 hours chart
histbars: int of number of historical data points, e.g. 300
"""
websocket.enableTrace(True)
ws = websocket.WebSocketApp(
self._ws_url,
on_open=lambda ws: self.on_open(ws, type, **kwargs),
on_close=self.on_close,
on_message=lambda ws, message: self.on_message(ws, message),
on_error=self.on_error)
ws.run_forever()
def send_slack(self):
"""
Periodic slack alerts - Indicators
"""
while True:
indicators = self._alerts.get("indicators")
if indicators:
res = pd.DataFrame(indicators).transpose().reset_index()
res.rename(columns={"index": "sym"}, inplace=True)
send_alert(self._slackchannel, [("Indicators", res)])
time.sleep(self._slackfreq)
def on_message(self, ws, message):
pattern = re.compile(r'~m~\d+~m~~h~\d+$')
if pattern.match(message):
ws.send(message)
else:
msg_body = re.compile(r'~m~\d+~m~')
messages = msg_body.split(message)
for msg in messages:
if msg:
parsed_msg = json.loads(msg)
params = parsed_msg.get("p")
if parsed_msg.get("m") == "timescale_update":
# timescale_update -> initial historical data
# TODO: handling of these data for plotting on UI
continue
if parsed_msg.get("m") == "du":
# du -> data update
sym = self._state.get(params[0]).get("sym")
now = datetime.datetime.now().strftime(
'%Y-%m-%d %H:%M:%S')
for k, v in params[1].items():
if v.get("st"):
# study
indicator = k.split("_")[0]
vals = v.get("st")[0].get("v")
val = vals[1]
val_dict = {"dtime": now, indicator: val}
# print({sym: val_dict})
if not self._alerts["indicators"].get(sym):
self._alerts["indicators"][sym] = {}
self._alerts["indicators"][sym][
indicator] = val
elif v.get("s"):
# series
vals = v.get("s")[0].get("v")
val_dict = dict(
zip([
"dtime", "open", "high", "low", "last",
"vol"
], vals))
val_dict["dtime"] = now
# print({sym: val_dict})
if not self._alerts["price"].get(sym):
self._alerts["price"][sym] = {}
self._alerts["price"][sym]["last"] = val_dict[
"last"]
@staticmethod
def on_error(ws, error):
print(error)
@staticmethod
def on_close(ws):
print("### closed ###")
def on_open(self, ws, type: str, **kwargs):
def | (*args, **kwargs):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
# ~m~395~m~{"m":"quote_set_fields","p":["qs_3bDnffZvz5ur","ch","chp","lp"]}
# ~m~89~m~{"m":"quote_add_symbols","p":["qs_3bDnffZvz5ur","SP:SPX",{"flags":["force_permission"]}]}
# ~m~315~m~{"m":"quote_fast_symbols","p":["qs_3bDnffZvz5ur","SP:SPX","TVC:NDX","CBOE:VIX","TVC:DXY","SGX:ES3","NASDAQ:AAPL","NASDAQ:MSFT","NASDAQ:TSLA","TVC:USOIL","TVC:GOLD","TVC:SILVER","FX:AUDUSD","FX:EURUSD","FX:GBPUSD","FX:USDJPY","BITSTAMP:BTCUSD","BITSTAMP:ETHUSD","COINBASE:UNIUSD","BINANCE:DOGEUSD","BINANCE:DOTUSD"]}
syms = kwargs.get("syms") or self._syms
timeframe = f'{kwargs.get("timeframe") or self._timeframe}'
indicators = kwargs.get("indicators") or self._indicators
histbars = kwargs.get("histbars") or self._histbars
send = self._send
send(ws, "set_auth_token", ["unauthorized_user_token"])
# Quote session
if not args or (args and args[0] == "quote"):
session = self._gen_session() # Quote session ID
send(ws, "quote_create_session", [session])
send(ws, "quote_set_fields", [session, "lp", "volume"])
[ws.send(self._add_symbol(session, s)) for s in syms]
send(ws, "quote_fast_symbols", [session, *syms])
send(ws, "quote_hibernate_all", [session])
# Chart session - Prefer to use this over quote sessions since it has a historical series
else:
for i, sym in enumerate(syms):
# Each ticker warrants a separate chart session ID
c_session = self._gen_session(type="chart")
self._state[c_session] = {
"sym": sym,
"indicators": [],
"series": [],
"timeframe": timeframe
}
# Users are allowed to select specific tickers
send(ws, "chart_create_session", [c_session, ""])
send(ws, "switch_timezone", [c_session, "Asia/Singapore"])
send(ws, "resolve_symbol", [
c_session, f"symbol_{i}",
self._add_chart_symbol(sym)
])
# s (in resp) -> series
self._state[c_session].get("series").append(f"s_{i}")
send(ws, "create_series", [
c_session, f"s_{i}", f"s_{i}", f"symbol_{i}",
timeframe, histbars
])
for indicator in indicators:
# Users are allowed to select specific indicators
# st (in resp) -> study
self._state[c_session].get("indicators").append(
f"{indicator}_{i}")
send(ws, "create_study", [
c_session, f"{indicator}_{i}", f"{indicator}_{i}",
f"s_{i}", "Script@tv-scripting-101!",
self._indicator_mapper(indicator)
])
self._t = threading.Thread(target=run, args=(type, ), kwargs=kwargs)
self._t.setDaemon(True)
self._t.start()
def _send(self, ws, func, params):
""" Client sends msg to websockets server """
ws.send(self._create_msg(func, params))
def _indicator_mapper(self, indicator: str) -> dict:
""" Indicator params that are accepted by the tv server """
return {
"rsi": {
"text":
"1f0fkZ72S0de2geyaUhXXw==_xwY73vljRXeew69Rl27RumLDs6aJ9NLsTYN9Xrht254BTb8uSOgccpLDt/cdRWopwJPNZx40m19yEFwJFswkSi62X4guNJYpXe4A6S9iq2n+OXM6mqWeWzDbjTl0lYmEf1ujbg7i3FvUdV/zCSrqd+iwnvvZSV+O2acpfNLpUlDdB6PZX4Y9y8tlQLWA2PiF8CVJng7DF1LPeecWC4fv+lNg+s5OXU46AjIhc+TFu8DOwiuKjNh7wWz6EZ7gpQS3",
"pineId": "STD;RSI",
"pineVersion": "12.0",
"in_2": {
"v": "",
"f": True,
"t": "resolution"
},
"in_0": {
"v": 14,
"f": True,
"t": "integer"
},
"in_1": {
"v": "close",
"f": True,
"t": "source"
}
}
}.get(indicator.lower())
def _create_msg(self, func, params):
""" _create_msg("set_auth_token", "unauthorized_user_token") """
msg = self._prepend_header(json.dumps({"m": func, "p": params}))
if self._debug:
print("DEBUG:", msg)
return msg
def _gen_session(self, type="chart"):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
session = ""
if type == "quote":
session = "qs_"
elif type == "chart":
session = "cs_"
else:
raise Exception("Invalid session type")
return session + "".join(random.choices(string.ascii_letters, k=12))
def _add_symbol(self, quote_session: str, sym: str):
""" Quote symbol: _add_symbol("3bDnffZvz5ur", "BINANCE:UNIUSD") """
return self._create_msg("quote_add_symbols", [quote_session, sym])
def _add_chart_symbol(self, sym: str):
""" Chart symbol - Only required for the first symbol """
return "=" + json.dumps({"symbol": sym})
def _prepend_header(self, msg):
return f'~m~{len(msg)}~m~{msg}'
if __name__ == "__main__":
ipm = IntradayPriceManager()
alerting_thread = threading.Thread(target=ipm.send_slack)
alerting_thread.start()
ipm.get(type="chart",
syms=[
"BINANCE:BTCUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD",
"BINANCE:UNIUSD", "BINANCE:SOLUSD"
],
indicators=["rsi"],
timeframe=240,
histbars=300)
| run | identifier_name |
intraday.py | """
IntradayPriceManager connects to TradingView and stores indicators and price series in an
in memory dictionary self._alerts. These indicators are then published to slack periodically.
"""
import datetime
import json
import pandas as pd
import random
import re
import string
import time
import threading
import websocket
from utilfns.slack import send_alert
class IntradayPriceManager():
def __init__(self, debug=False):
self._alerts = {
"indicators": {},
"price": {}
} # In-memory dict of alerts to be sent out to slack
self._debug = debug
self._histbars = 300
self._indicators = []
self._slackchannel = "C01UACFTMTK" # TODO: Shift to config
self._slackfreq = 300 # Every 5 mins
self._state = {}
self._syms = [
"BINANCE:UNIUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD", "SGX:ES3",
"SGX:CLR"
]
self._t = None
self._timeframe = 240 # Default to 4 hours chart
self._ws_url = "wss://data.tradingview.com/socket.io/websocket"
def get(self, type: str, **kwargs):
"""
Type is either quote (live) or chart (historical + live)
Support kwargs:
syms: list of symbols, e.g. [BINANCE:ETHUSD]
indicators: list of indicators, e.g. [rsi]
timeframe: int of minutes of chart time frame, e.g. 240 -> 4 hours chart
histbars: int of number of historical data points, e.g. 300
"""
websocket.enableTrace(True)
ws = websocket.WebSocketApp(
self._ws_url,
on_open=lambda ws: self.on_open(ws, type, **kwargs),
on_close=self.on_close,
on_message=lambda ws, message: self.on_message(ws, message),
on_error=self.on_error)
ws.run_forever()
def send_slack(self):
"""
Periodic slack alerts - Indicators
"""
while True:
indicators = self._alerts.get("indicators")
if indicators:
res = pd.DataFrame(indicators).transpose().reset_index()
res.rename(columns={"index": "sym"}, inplace=True)
send_alert(self._slackchannel, [("Indicators", res)])
time.sleep(self._slackfreq)
def on_message(self, ws, message):
pattern = re.compile(r'~m~\d+~m~~h~\d+$')
if pattern.match(message):
ws.send(message)
else:
msg_body = re.compile(r'~m~\d+~m~')
messages = msg_body.split(message)
for msg in messages:
if msg:
parsed_msg = json.loads(msg)
params = parsed_msg.get("p")
if parsed_msg.get("m") == "timescale_update":
# timescale_update -> initial historical data
# TODO: handling of these data for plotting on UI
continue
if parsed_msg.get("m") == "du":
# du -> data update
sym = self._state.get(params[0]).get("sym")
now = datetime.datetime.now().strftime(
'%Y-%m-%d %H:%M:%S')
for k, v in params[1].items():
if v.get("st"):
# study
indicator = k.split("_")[0]
vals = v.get("st")[0].get("v")
val = vals[1]
val_dict = {"dtime": now, indicator: val}
# print({sym: val_dict})
if not self._alerts["indicators"].get(sym):
self._alerts["indicators"][sym] = {}
self._alerts["indicators"][sym][
indicator] = val
elif v.get("s"):
# series
vals = v.get("s")[0].get("v")
val_dict = dict(
zip([
"dtime", "open", "high", "low", "last",
"vol"
], vals))
val_dict["dtime"] = now
# print({sym: val_dict})
if not self._alerts["price"].get(sym):
self._alerts["price"][sym] = {}
self._alerts["price"][sym]["last"] = val_dict[
"last"]
@staticmethod
def on_error(ws, error):
print(error)
@staticmethod
def on_close(ws):
print("### closed ###")
def on_open(self, ws, type: str, **kwargs):
def run(*args, **kwargs):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
# ~m~395~m~{"m":"quote_set_fields","p":["qs_3bDnffZvz5ur","ch","chp","lp"]}
# ~m~89~m~{"m":"quote_add_symbols","p":["qs_3bDnffZvz5ur","SP:SPX",{"flags":["force_permission"]}]}
# ~m~315~m~{"m":"quote_fast_symbols","p":["qs_3bDnffZvz5ur","SP:SPX","TVC:NDX","CBOE:VIX","TVC:DXY","SGX:ES3","NASDAQ:AAPL","NASDAQ:MSFT","NASDAQ:TSLA","TVC:USOIL","TVC:GOLD","TVC:SILVER","FX:AUDUSD","FX:EURUSD","FX:GBPUSD","FX:USDJPY","BITSTAMP:BTCUSD","BITSTAMP:ETHUSD","COINBASE:UNIUSD","BINANCE:DOGEUSD","BINANCE:DOTUSD"]}
syms = kwargs.get("syms") or self._syms
timeframe = f'{kwargs.get("timeframe") or self._timeframe}'
indicators = kwargs.get("indicators") or self._indicators
histbars = kwargs.get("histbars") or self._histbars
send = self._send
send(ws, "set_auth_token", ["unauthorized_user_token"])
# Quote session
if not args or (args and args[0] == "quote"):
session = self._gen_session() # Quote session ID
send(ws, "quote_create_session", [session])
send(ws, "quote_set_fields", [session, "lp", "volume"])
[ws.send(self._add_symbol(session, s)) for s in syms]
send(ws, "quote_fast_symbols", [session, *syms])
send(ws, "quote_hibernate_all", [session])
# Chart session - Prefer to use this over quote sessions since it has a historical series
else:
for i, sym in enumerate(syms):
# Each ticker warrants a separate chart session ID
c_session = self._gen_session(type="chart")
self._state[c_session] = {
"sym": sym,
"indicators": [],
"series": [],
"timeframe": timeframe
}
# Users are allowed to select specific tickers
send(ws, "chart_create_session", [c_session, ""])
send(ws, "switch_timezone", [c_session, "Asia/Singapore"])
send(ws, "resolve_symbol", [
c_session, f"symbol_{i}",
self._add_chart_symbol(sym)
])
# s (in resp) -> series
self._state[c_session].get("series").append(f"s_{i}")
send(ws, "create_series", [
c_session, f"s_{i}", f"s_{i}", f"symbol_{i}",
timeframe, histbars
])
for indicator in indicators:
# Users are allowed to select specific indicators
# st (in resp) -> study
self._state[c_session].get("indicators").append(
f"{indicator}_{i}")
send(ws, "create_study", [
c_session, f"{indicator}_{i}", f"{indicator}_{i}",
f"s_{i}", "Script@tv-scripting-101!",
self._indicator_mapper(indicator)
])
self._t = threading.Thread(target=run, args=(type, ), kwargs=kwargs)
self._t.setDaemon(True)
self._t.start()
def _send(self, ws, func, params):
""" Client sends msg to websockets server """
ws.send(self._create_msg(func, params))
def _indicator_mapper(self, indicator: str) -> dict:
|
def _create_msg(self, func, params):
""" _create_msg("set_auth_token", "unauthorized_user_token") """
msg = self._prepend_header(json.dumps({"m": func, "p": params}))
if self._debug:
print("DEBUG:", msg)
return msg
def _gen_session(self, type="chart"):
# ~m~52~m~{"m":"quote_create_session","p":["qs_3bDnffZvz5ur"]}
session = ""
if type == "quote":
session = "qs_"
elif type == "chart":
session = "cs_"
else:
raise Exception("Invalid session type")
return session + "".join(random.choices(string.ascii_letters, k=12))
def _add_symbol(self, quote_session: str, sym: str):
""" Quote symbol: _add_symbol("3bDnffZvz5ur", "BINANCE:UNIUSD") """
return self._create_msg("quote_add_symbols", [quote_session, sym])
def _add_chart_symbol(self, sym: str):
""" Chart symbol - Only required for the first symbol """
return "=" + json.dumps({"symbol": sym})
def _prepend_header(self, msg):
return f'~m~{len(msg)}~m~{msg}'
if __name__ == "__main__":
ipm = IntradayPriceManager()
alerting_thread = threading.Thread(target=ipm.send_slack)
alerting_thread.start()
ipm.get(type="chart",
syms=[
"BINANCE:BTCUSD", "BINANCE:ETHUSD", "BINANCE:DOTUSD",
"BINANCE:UNIUSD", "BINANCE:SOLUSD"
],
indicators=["rsi"],
timeframe=240,
histbars=300)
| """ Indicator params that are accepted by the tv server """
return {
"rsi": {
"text":
"1f0fkZ72S0de2geyaUhXXw==_xwY73vljRXeew69Rl27RumLDs6aJ9NLsTYN9Xrht254BTb8uSOgccpLDt/cdRWopwJPNZx40m19yEFwJFswkSi62X4guNJYpXe4A6S9iq2n+OXM6mqWeWzDbjTl0lYmEf1ujbg7i3FvUdV/zCSrqd+iwnvvZSV+O2acpfNLpUlDdB6PZX4Y9y8tlQLWA2PiF8CVJng7DF1LPeecWC4fv+lNg+s5OXU46AjIhc+TFu8DOwiuKjNh7wWz6EZ7gpQS3",
"pineId": "STD;RSI",
"pineVersion": "12.0",
"in_2": {
"v": "",
"f": True,
"t": "resolution"
},
"in_0": {
"v": 14,
"f": True,
"t": "integer"
},
"in_1": {
"v": "close",
"f": True,
"t": "source"
}
}
}.get(indicator.lower()) | identifier_body |
conv2d_compact.rs | use super::{ConstraintSystem, Scalar, MemoryManager, Memory, TensorAddress, SCALAR_SIZE, BigScalar, RangeFull, Range, RangeFrom, RangeTo, Id, min, Functions, ActivationFunction};
use crate::scalar::power_of_two;
impl ConstraintSystem {
pub fn run_conv2d_compact<T: Scalar>(mem: &MemoryManager, param: &[u32], var_dict: &mut Memory<T>) {
let (mul_result, k_col, packed_size,bit_length,extracted) = (param[0], param[1], param[2], param[3], param[4]);
let (fout, row_out, col_packed) = (mem[mul_result].dim[0],mem[mul_result].dim[1],mem[mul_result].dim[2]);
let row_dim = mem[extracted].dim[2];
let offset = power_of_two::<T>(bit_length - 1);
let mut big_offset = T::zero();
for _ in 0..packed_size + k_col - 1 {
big_offset = (big_offset * T::from_i32(2) + T::one()) * offset;
}
let n_packed = packed_size + k_col - 1;
for layer_out in 0..fout {
//matching result
for r in 0..row_out {
for c in 0..col_packed {
let val = (var_dict[mem[mul_result].at_idx(&[layer_out, r, c]) as usize] + big_offset).to_bytes();
let mut ext = Vec::new();
ext.resize((packed_size + k_col - 1) as usize, T::zero());
for k in 0..(packed_size + k_col - 1) * bit_length {
ext[(k / bit_length) as usize] += T::from_i32(((val[(k/8) as usize] >> (k % 8)) & 1) as i32) * power_of_two(k % bit_length);
}
for k in 0..packed_size + k_col - 1 {
let idx = c * n_packed + k;
if idx >= row_dim {
break
}
var_dict[mem[extracted].at_idx(&[layer_out,r,idx]) as usize] = ext[k as usize] - offset;
}
}
}
}
}
pub fn conv2d_compact(&mut self, input: TensorAddress, output: TensorAddress, weight_rev: TensorAddress, bias: Option<(TensorAddress, u32)>, bit_length: u8, act: ActivationFunction) {
// packing weight
let dim = &self.mem[weight_rev].dim;
let (fout, fin, k_row, k_col) = (dim[0], dim[1], dim[2], dim[3]);
let packed_weight = self.mem.alloc(&[fout, fin, k_row]);
assert!(k_col * (bit_length as u32) <= SCALAR_SIZE);
self.packing_tensor(weight_rev, packed_weight, bit_length, k_col as u8,1, BigScalar::one(), true);
let (row, col) = (self.mem[input].dim[1], self.mem[input].dim[2]);
let packed_size = min((SCALAR_SIZE / (bit_length as u32)).checked_sub(k_col).unwrap(),col);
let col_packed = (col-1)/packed_size + 1;
let packed_layer = self.mem.alloc(&[fin, row, col_packed]);
// packing row of inputs
self.packing_tensor_by_dim(input,&[-1], packed_layer, bit_length, packed_size as u8,1,BigScalar::one(), true);
// splicing output by row
let mut mul_input = Vec::new();
for r in 0..row - k_row + 1 { | }
mul_input.push(mul_input_row);
}
//packing bias
let mut packed_bias: Vec<Vec<TensorAddress>> = Vec::with_capacity(fout as usize);
let mut bias_dim = 0;
let mut bias_scale = 0;
if let Some((b, scale)) = bias {
bias_dim = (col - k_col)/packed_size + 1;
bias_scale = scale;
for layer_out in 0..fout {
let mut packed_bias_row: Vec<TensorAddress> = Vec::with_capacity(((row - k_row)/scale + 1) as usize);
for r in 0..(row - k_row)/scale + 1 {
let packed_bias = self.mem.alloc(&[bias_dim]);
let bias_row = self.mem.save(self.mem[b].at_(&[layer_out, r]));
self.packing_tensor(bias_row, packed_bias, bit_length, packed_size as u8, scale,power_of_two(bit_length as u32 * (k_col - 1)), true);
packed_bias_row.push(packed_bias);
}
packed_bias.push(packed_bias_row);
}
}
let mul_result = self.mem.alloc(&[fout, row - k_row + 1, col_packed]);
for layer_out in 0..fout {
let packed_weight = self.mem.save(self.mem[packed_weight].at_(&[layer_out]));
for r in 0..row - k_row + 1 {
for c in 0..col_packed {
let cur_bias = if c < bias_dim {Some(self.mem[packed_bias[layer_out as usize][(r/bias_scale) as usize]].at_idx(&[c]))} else {None};
self.dot(mul_input[r as usize][c as usize], packed_weight, self.mem[mul_result].at_idx(&[layer_out, r, c]), cur_bias);
}
}
}
// sign extraction
let n_packed = packed_size + k_col - 1;
let extracted_length = (col_packed - 1) * n_packed + ((col-1) % packed_size) + k_col;
let extracted = self.mem.alloc(&[fout, row - k_row + 1, extracted_length]);
self.packing_tensor_by_dim(extracted,&[-1], mul_result, bit_length, n_packed as u8,1,BigScalar::one(), false);
let params = vec![mul_result, k_col, packed_size, bit_length as u32, extracted];
self.compute.push((params.into_boxed_slice(), Functions::ConvCompact));
fn split_tensor<const N:usize>(mem: &mut MemoryManager,tensor: TensorAddress, length: u32, pos: [u32; N]) -> [(Option<TensorAddress>, Option<TensorAddress>); N] {
let fully_packed = mem[tensor].dim[2]/length;
let remainder = mem[tensor].dim[2] % length;
// should not save this
let tmp=mem[tensor].partition(2, length);
let mut res: [(Option<TensorAddress>, Option<TensorAddress>); N] = [(None, None); N];
for i in 0..N - 1 {
if pos[i] == pos[i+1] {
res[i] = (None, None);
continue;
}
let n= fully_packed + if remainder >= pos[i+1] {1} else {0};
let full = if n > 0 {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), RangeTo(..n), Range(pos[i]..pos[i+1])])))
} else {
None
};
let rem = if pos[i] < remainder && remainder < pos[i+1] {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), Id(n), Range(pos[i]..remainder)])))
} else {
None
};
res[i] = (full, rem);
}
res
}
fn extract_sign_part(c: &mut ConstraintSystem, extracted: TensorAddress, bit_length: u8) {
let output = c.mem.alloc(&c.mem[extracted].dim.to_owned());
c.sign(extracted, output, bit_length - 1);
}
let reduced_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeTo(..extracted_length - k_col + 1)]));
if k_col != 1 {
let rem_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeFrom(extracted_length - k_col + 1..)]));
extract_sign_part(self, rem_extract, bit_length);
}
let [(output_full, output_full_rem), (output_part, output_part_rem), (_,_)]= split_tensor(&mut self.mem, output, packed_size, [0, packed_size-(k_col-1), packed_size]);
let [(ext_left, ext_left_rem), (ext_full, ext_full_rem), (ext_right,ext_right_rem), (_,_)]= split_tensor(&mut self.mem, reduced_extract, n_packed, [0, k_col-1, packed_size, n_packed]);
// extract the fully correct part
if let Some(e) = ext_full {
self.activation(e, output_full.unwrap(), bit_length - 1, act);
}
if let Some(e) = ext_full_rem {
self.activation(e, output_full_rem.unwrap(), bit_length - 1, act);
}
//extract left and right sign part
if let Some(e) = ext_left {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_left_rem {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_right {
extract_sign_part(self,e, bit_length);
}
assert_eq!(ext_right_rem, None);
if let Some(left_rem) = ext_left_rem {
if let Some(right) = ext_right {
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[right].dim[2] - 1, k_col - 1]);
let left = self.mem.save(self.mem[ext_left.unwrap()].at(&[RangeFull(), RangeFrom(1..)]));
self.sum_two(right, left, sum_res);
self.activation(sum_res, output_part.unwrap(), bit_length - 1, act);
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[left_rem].dim[2]]);
let right_rem = self.mem.save(self.mem[right].at(&[RangeFull(), Id(self.mem[right].dim[2] - 1), RangeTo(..self.mem[left_rem].dim[2])]));
self.sum_two(right_rem, left_rem, sum_res);
self.activation(sum_res, output_part_rem.unwrap(), bit_length - 1, act);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scalar::slice_to_scalar;
#[test]
fn conv2d_compact_test() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[2,5,5]);
let weight = x.mem.alloc(&[2,2,3,3]);
let output = x.mem.alloc(&[2,3,3]);
let bias = x.mem.alloc(&[2,3,3]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, Some((bias, 1)), 7, ActivationFunction::Sign);
let mut mem: Vec<BigScalar> = slice_to_scalar(&[1,0,1,-1,0,0,0,-2,4,-1,-4,0,3,-4,0,0,0,1,-1,1,-4,2,3,-1,0,-4,2,2,-3,-1,-1,1,2,-1,1,4,4,2,3,-3,0,3,-2,3,0,2,3,3,-2,2,4,3,3,-4,-4,-1,3,1,4,-2,-2,0,-2,4,-3,0,0,0,-2,0,0,0,0,3,4,-3,-4,-1,-1,-4,3,1,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,-3,0,1,-4,-1,2,0,0,-4,2,1,3,2,-3,4,-3]);
mem.resize(x.mem.n_var as usize, Scalar::zero());
x.compute(&mut mem);
assert_eq!(mem[87..87+18], slice_to_scalar(&[1,1,-1,-1,-1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1]));
x.sort_cons();
assert!(x.verify(&mem));
}
#[test]
fn conv2d_compact_test_small() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[1,4,3]);
let weight = x.mem.alloc(&[1,1,3,3]);
let output = x.mem.alloc(&[1,2,1]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, None, 5,ActivationFunction::Sign);
let mut mem = x.mem.new_memory::<BigScalar>();
x.load_memory(input, &mut mem, &slice_to_scalar(&[1,1,2, 1,2,1, 1,1,1, 1,2,1]));
x.load_memory(weight, &mut mem, &slice_to_scalar(&[1,1,-1, 1,-1,1, 1,1,1]));
x.compute(&mut mem);
assert_eq!(mem[x.mem[output].begin() as usize..x.mem[output].end() as usize], slice_to_scalar(&[1,1]));
x.sort_cons();
assert!(x.verify(&mem));
}
} | let mut mul_input_row = Vec::new();
for c in 0..col_packed {
mul_input_row.push(self.mem.save(self.mem[packed_layer].at(&[RangeFull(), Range(r..r + k_row), Id(c)]))); | random_line_split |
conv2d_compact.rs | use super::{ConstraintSystem, Scalar, MemoryManager, Memory, TensorAddress, SCALAR_SIZE, BigScalar, RangeFull, Range, RangeFrom, RangeTo, Id, min, Functions, ActivationFunction};
use crate::scalar::power_of_two;
impl ConstraintSystem {
pub fn run_conv2d_compact<T: Scalar>(mem: &MemoryManager, param: &[u32], var_dict: &mut Memory<T>) {
let (mul_result, k_col, packed_size,bit_length,extracted) = (param[0], param[1], param[2], param[3], param[4]);
let (fout, row_out, col_packed) = (mem[mul_result].dim[0],mem[mul_result].dim[1],mem[mul_result].dim[2]);
let row_dim = mem[extracted].dim[2];
let offset = power_of_two::<T>(bit_length - 1);
let mut big_offset = T::zero();
for _ in 0..packed_size + k_col - 1 {
big_offset = (big_offset * T::from_i32(2) + T::one()) * offset;
}
let n_packed = packed_size + k_col - 1;
for layer_out in 0..fout {
//matching result
for r in 0..row_out {
for c in 0..col_packed {
let val = (var_dict[mem[mul_result].at_idx(&[layer_out, r, c]) as usize] + big_offset).to_bytes();
let mut ext = Vec::new();
ext.resize((packed_size + k_col - 1) as usize, T::zero());
for k in 0..(packed_size + k_col - 1) * bit_length {
ext[(k / bit_length) as usize] += T::from_i32(((val[(k/8) as usize] >> (k % 8)) & 1) as i32) * power_of_two(k % bit_length);
}
for k in 0..packed_size + k_col - 1 {
let idx = c * n_packed + k;
if idx >= row_dim {
break
}
var_dict[mem[extracted].at_idx(&[layer_out,r,idx]) as usize] = ext[k as usize] - offset;
}
}
}
}
}
pub fn conv2d_compact(&mut self, input: TensorAddress, output: TensorAddress, weight_rev: TensorAddress, bias: Option<(TensorAddress, u32)>, bit_length: u8, act: ActivationFunction) |
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scalar::slice_to_scalar;
#[test]
fn conv2d_compact_test() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[2,5,5]);
let weight = x.mem.alloc(&[2,2,3,3]);
let output = x.mem.alloc(&[2,3,3]);
let bias = x.mem.alloc(&[2,3,3]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, Some((bias, 1)), 7, ActivationFunction::Sign);
let mut mem: Vec<BigScalar> = slice_to_scalar(&[1,0,1,-1,0,0,0,-2,4,-1,-4,0,3,-4,0,0,0,1,-1,1,-4,2,3,-1,0,-4,2,2,-3,-1,-1,1,2,-1,1,4,4,2,3,-3,0,3,-2,3,0,2,3,3,-2,2,4,3,3,-4,-4,-1,3,1,4,-2,-2,0,-2,4,-3,0,0,0,-2,0,0,0,0,3,4,-3,-4,-1,-1,-4,3,1,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,-3,0,1,-4,-1,2,0,0,-4,2,1,3,2,-3,4,-3]);
mem.resize(x.mem.n_var as usize, Scalar::zero());
x.compute(&mut mem);
assert_eq!(mem[87..87+18], slice_to_scalar(&[1,1,-1,-1,-1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1]));
x.sort_cons();
assert!(x.verify(&mem));
}
#[test]
fn conv2d_compact_test_small() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[1,4,3]);
let weight = x.mem.alloc(&[1,1,3,3]);
let output = x.mem.alloc(&[1,2,1]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, None, 5,ActivationFunction::Sign);
let mut mem = x.mem.new_memory::<BigScalar>();
x.load_memory(input, &mut mem, &slice_to_scalar(&[1,1,2, 1,2,1, 1,1,1, 1,2,1]));
x.load_memory(weight, &mut mem, &slice_to_scalar(&[1,1,-1, 1,-1,1, 1,1,1]));
x.compute(&mut mem);
assert_eq!(mem[x.mem[output].begin() as usize..x.mem[output].end() as usize], slice_to_scalar(&[1,1]));
x.sort_cons();
assert!(x.verify(&mem));
}
} | {
// packing weight
let dim = &self.mem[weight_rev].dim;
let (fout, fin, k_row, k_col) = (dim[0], dim[1], dim[2], dim[3]);
let packed_weight = self.mem.alloc(&[fout, fin, k_row]);
assert!(k_col * (bit_length as u32) <= SCALAR_SIZE);
self.packing_tensor(weight_rev, packed_weight, bit_length, k_col as u8,1, BigScalar::one(), true);
let (row, col) = (self.mem[input].dim[1], self.mem[input].dim[2]);
let packed_size = min((SCALAR_SIZE / (bit_length as u32)).checked_sub(k_col).unwrap(),col);
let col_packed = (col-1)/packed_size + 1;
let packed_layer = self.mem.alloc(&[fin, row, col_packed]);
// packing row of inputs
self.packing_tensor_by_dim(input,&[-1], packed_layer, bit_length, packed_size as u8,1,BigScalar::one(), true);
// splicing output by row
let mut mul_input = Vec::new();
for r in 0..row - k_row + 1 {
let mut mul_input_row = Vec::new();
for c in 0..col_packed {
mul_input_row.push(self.mem.save(self.mem[packed_layer].at(&[RangeFull(), Range(r..r + k_row), Id(c)])));
}
mul_input.push(mul_input_row);
}
//packing bias
let mut packed_bias: Vec<Vec<TensorAddress>> = Vec::with_capacity(fout as usize);
let mut bias_dim = 0;
let mut bias_scale = 0;
if let Some((b, scale)) = bias {
bias_dim = (col - k_col)/packed_size + 1;
bias_scale = scale;
for layer_out in 0..fout {
let mut packed_bias_row: Vec<TensorAddress> = Vec::with_capacity(((row - k_row)/scale + 1) as usize);
for r in 0..(row - k_row)/scale + 1 {
let packed_bias = self.mem.alloc(&[bias_dim]);
let bias_row = self.mem.save(self.mem[b].at_(&[layer_out, r]));
self.packing_tensor(bias_row, packed_bias, bit_length, packed_size as u8, scale,power_of_two(bit_length as u32 * (k_col - 1)), true);
packed_bias_row.push(packed_bias);
}
packed_bias.push(packed_bias_row);
}
}
let mul_result = self.mem.alloc(&[fout, row - k_row + 1, col_packed]);
for layer_out in 0..fout {
let packed_weight = self.mem.save(self.mem[packed_weight].at_(&[layer_out]));
for r in 0..row - k_row + 1 {
for c in 0..col_packed {
let cur_bias = if c < bias_dim {Some(self.mem[packed_bias[layer_out as usize][(r/bias_scale) as usize]].at_idx(&[c]))} else {None};
self.dot(mul_input[r as usize][c as usize], packed_weight, self.mem[mul_result].at_idx(&[layer_out, r, c]), cur_bias);
}
}
}
// sign extraction
let n_packed = packed_size + k_col - 1;
let extracted_length = (col_packed - 1) * n_packed + ((col-1) % packed_size) + k_col;
let extracted = self.mem.alloc(&[fout, row - k_row + 1, extracted_length]);
self.packing_tensor_by_dim(extracted,&[-1], mul_result, bit_length, n_packed as u8,1,BigScalar::one(), false);
let params = vec![mul_result, k_col, packed_size, bit_length as u32, extracted];
self.compute.push((params.into_boxed_slice(), Functions::ConvCompact));
fn split_tensor<const N:usize>(mem: &mut MemoryManager,tensor: TensorAddress, length: u32, pos: [u32; N]) -> [(Option<TensorAddress>, Option<TensorAddress>); N] {
let fully_packed = mem[tensor].dim[2]/length;
let remainder = mem[tensor].dim[2] % length;
// should not save this
let tmp=mem[tensor].partition(2, length);
let mut res: [(Option<TensorAddress>, Option<TensorAddress>); N] = [(None, None); N];
for i in 0..N - 1 {
if pos[i] == pos[i+1] {
res[i] = (None, None);
continue;
}
let n= fully_packed + if remainder >= pos[i+1] {1} else {0};
let full = if n > 0 {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), RangeTo(..n), Range(pos[i]..pos[i+1])])))
} else {
None
};
let rem = if pos[i] < remainder && remainder < pos[i+1] {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), Id(n), Range(pos[i]..remainder)])))
} else {
None
};
res[i] = (full, rem);
}
res
}
fn extract_sign_part(c: &mut ConstraintSystem, extracted: TensorAddress, bit_length: u8) {
let output = c.mem.alloc(&c.mem[extracted].dim.to_owned());
c.sign(extracted, output, bit_length - 1);
}
let reduced_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeTo(..extracted_length - k_col + 1)]));
if k_col != 1 {
let rem_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeFrom(extracted_length - k_col + 1..)]));
extract_sign_part(self, rem_extract, bit_length);
}
let [(output_full, output_full_rem), (output_part, output_part_rem), (_,_)]= split_tensor(&mut self.mem, output, packed_size, [0, packed_size-(k_col-1), packed_size]);
let [(ext_left, ext_left_rem), (ext_full, ext_full_rem), (ext_right,ext_right_rem), (_,_)]= split_tensor(&mut self.mem, reduced_extract, n_packed, [0, k_col-1, packed_size, n_packed]);
// extract the fully correct part
if let Some(e) = ext_full {
self.activation(e, output_full.unwrap(), bit_length - 1, act);
}
if let Some(e) = ext_full_rem {
self.activation(e, output_full_rem.unwrap(), bit_length - 1, act);
}
//extract left and right sign part
if let Some(e) = ext_left {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_left_rem {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_right {
extract_sign_part(self,e, bit_length);
}
assert_eq!(ext_right_rem, None);
if let Some(left_rem) = ext_left_rem {
if let Some(right) = ext_right {
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[right].dim[2] - 1, k_col - 1]);
let left = self.mem.save(self.mem[ext_left.unwrap()].at(&[RangeFull(), RangeFrom(1..)]));
self.sum_two(right, left, sum_res);
self.activation(sum_res, output_part.unwrap(), bit_length - 1, act);
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[left_rem].dim[2]]);
let right_rem = self.mem.save(self.mem[right].at(&[RangeFull(), Id(self.mem[right].dim[2] - 1), RangeTo(..self.mem[left_rem].dim[2])]));
self.sum_two(right_rem, left_rem, sum_res);
self.activation(sum_res, output_part_rem.unwrap(), bit_length - 1, act);
}
}
} | identifier_body |
conv2d_compact.rs | use super::{ConstraintSystem, Scalar, MemoryManager, Memory, TensorAddress, SCALAR_SIZE, BigScalar, RangeFull, Range, RangeFrom, RangeTo, Id, min, Functions, ActivationFunction};
use crate::scalar::power_of_two;
impl ConstraintSystem {
pub fn run_conv2d_compact<T: Scalar>(mem: &MemoryManager, param: &[u32], var_dict: &mut Memory<T>) {
let (mul_result, k_col, packed_size,bit_length,extracted) = (param[0], param[1], param[2], param[3], param[4]);
let (fout, row_out, col_packed) = (mem[mul_result].dim[0],mem[mul_result].dim[1],mem[mul_result].dim[2]);
let row_dim = mem[extracted].dim[2];
let offset = power_of_two::<T>(bit_length - 1);
let mut big_offset = T::zero();
for _ in 0..packed_size + k_col - 1 {
big_offset = (big_offset * T::from_i32(2) + T::one()) * offset;
}
let n_packed = packed_size + k_col - 1;
for layer_out in 0..fout {
//matching result
for r in 0..row_out {
for c in 0..col_packed {
let val = (var_dict[mem[mul_result].at_idx(&[layer_out, r, c]) as usize] + big_offset).to_bytes();
let mut ext = Vec::new();
ext.resize((packed_size + k_col - 1) as usize, T::zero());
for k in 0..(packed_size + k_col - 1) * bit_length {
ext[(k / bit_length) as usize] += T::from_i32(((val[(k/8) as usize] >> (k % 8)) & 1) as i32) * power_of_two(k % bit_length);
}
for k in 0..packed_size + k_col - 1 {
let idx = c * n_packed + k;
if idx >= row_dim {
break
}
var_dict[mem[extracted].at_idx(&[layer_out,r,idx]) as usize] = ext[k as usize] - offset;
}
}
}
}
}
pub fn conv2d_compact(&mut self, input: TensorAddress, output: TensorAddress, weight_rev: TensorAddress, bias: Option<(TensorAddress, u32)>, bit_length: u8, act: ActivationFunction) {
// packing weight
let dim = &self.mem[weight_rev].dim;
let (fout, fin, k_row, k_col) = (dim[0], dim[1], dim[2], dim[3]);
let packed_weight = self.mem.alloc(&[fout, fin, k_row]);
assert!(k_col * (bit_length as u32) <= SCALAR_SIZE);
self.packing_tensor(weight_rev, packed_weight, bit_length, k_col as u8,1, BigScalar::one(), true);
let (row, col) = (self.mem[input].dim[1], self.mem[input].dim[2]);
let packed_size = min((SCALAR_SIZE / (bit_length as u32)).checked_sub(k_col).unwrap(),col);
let col_packed = (col-1)/packed_size + 1;
let packed_layer = self.mem.alloc(&[fin, row, col_packed]);
// packing row of inputs
self.packing_tensor_by_dim(input,&[-1], packed_layer, bit_length, packed_size as u8,1,BigScalar::one(), true);
// splicing output by row
let mut mul_input = Vec::new();
for r in 0..row - k_row + 1 {
let mut mul_input_row = Vec::new();
for c in 0..col_packed {
mul_input_row.push(self.mem.save(self.mem[packed_layer].at(&[RangeFull(), Range(r..r + k_row), Id(c)])));
}
mul_input.push(mul_input_row);
}
//packing bias
let mut packed_bias: Vec<Vec<TensorAddress>> = Vec::with_capacity(fout as usize);
let mut bias_dim = 0;
let mut bias_scale = 0;
if let Some((b, scale)) = bias {
bias_dim = (col - k_col)/packed_size + 1;
bias_scale = scale;
for layer_out in 0..fout {
let mut packed_bias_row: Vec<TensorAddress> = Vec::with_capacity(((row - k_row)/scale + 1) as usize);
for r in 0..(row - k_row)/scale + 1 {
let packed_bias = self.mem.alloc(&[bias_dim]);
let bias_row = self.mem.save(self.mem[b].at_(&[layer_out, r]));
self.packing_tensor(bias_row, packed_bias, bit_length, packed_size as u8, scale,power_of_two(bit_length as u32 * (k_col - 1)), true);
packed_bias_row.push(packed_bias);
}
packed_bias.push(packed_bias_row);
}
}
let mul_result = self.mem.alloc(&[fout, row - k_row + 1, col_packed]);
for layer_out in 0..fout {
let packed_weight = self.mem.save(self.mem[packed_weight].at_(&[layer_out]));
for r in 0..row - k_row + 1 {
for c in 0..col_packed {
let cur_bias = if c < bias_dim {Some(self.mem[packed_bias[layer_out as usize][(r/bias_scale) as usize]].at_idx(&[c]))} else {None};
self.dot(mul_input[r as usize][c as usize], packed_weight, self.mem[mul_result].at_idx(&[layer_out, r, c]), cur_bias);
}
}
}
// sign extraction
let n_packed = packed_size + k_col - 1;
let extracted_length = (col_packed - 1) * n_packed + ((col-1) % packed_size) + k_col;
let extracted = self.mem.alloc(&[fout, row - k_row + 1, extracted_length]);
self.packing_tensor_by_dim(extracted,&[-1], mul_result, bit_length, n_packed as u8,1,BigScalar::one(), false);
let params = vec![mul_result, k_col, packed_size, bit_length as u32, extracted];
self.compute.push((params.into_boxed_slice(), Functions::ConvCompact));
fn split_tensor<const N:usize>(mem: &mut MemoryManager,tensor: TensorAddress, length: u32, pos: [u32; N]) -> [(Option<TensorAddress>, Option<TensorAddress>); N] {
let fully_packed = mem[tensor].dim[2]/length;
let remainder = mem[tensor].dim[2] % length;
// should not save this
let tmp=mem[tensor].partition(2, length);
let mut res: [(Option<TensorAddress>, Option<TensorAddress>); N] = [(None, None); N];
for i in 0..N - 1 {
if pos[i] == pos[i+1] {
res[i] = (None, None);
continue;
}
let n= fully_packed + if remainder >= pos[i+1] {1} else {0};
let full = if n > 0 {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), RangeTo(..n), Range(pos[i]..pos[i+1])])))
} else {
None
};
let rem = if pos[i] < remainder && remainder < pos[i+1] {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), Id(n), Range(pos[i]..remainder)])))
} else {
None
};
res[i] = (full, rem);
}
res
}
fn extract_sign_part(c: &mut ConstraintSystem, extracted: TensorAddress, bit_length: u8) {
let output = c.mem.alloc(&c.mem[extracted].dim.to_owned());
c.sign(extracted, output, bit_length - 1);
}
let reduced_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeTo(..extracted_length - k_col + 1)]));
if k_col != 1 {
let rem_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeFrom(extracted_length - k_col + 1..)]));
extract_sign_part(self, rem_extract, bit_length);
}
let [(output_full, output_full_rem), (output_part, output_part_rem), (_,_)]= split_tensor(&mut self.mem, output, packed_size, [0, packed_size-(k_col-1), packed_size]);
let [(ext_left, ext_left_rem), (ext_full, ext_full_rem), (ext_right,ext_right_rem), (_,_)]= split_tensor(&mut self.mem, reduced_extract, n_packed, [0, k_col-1, packed_size, n_packed]);
// extract the fully correct part
if let Some(e) = ext_full {
self.activation(e, output_full.unwrap(), bit_length - 1, act);
}
if let Some(e) = ext_full_rem {
self.activation(e, output_full_rem.unwrap(), bit_length - 1, act);
}
//extract left and right sign part
if let Some(e) = ext_left {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_left_rem {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_right {
extract_sign_part(self,e, bit_length);
}
assert_eq!(ext_right_rem, None);
if let Some(left_rem) = ext_left_rem {
if let Some(right) = ext_right {
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[right].dim[2] - 1, k_col - 1]);
let left = self.mem.save(self.mem[ext_left.unwrap()].at(&[RangeFull(), RangeFrom(1..)]));
self.sum_two(right, left, sum_res);
self.activation(sum_res, output_part.unwrap(), bit_length - 1, act);
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[left_rem].dim[2]]);
let right_rem = self.mem.save(self.mem[right].at(&[RangeFull(), Id(self.mem[right].dim[2] - 1), RangeTo(..self.mem[left_rem].dim[2])]));
self.sum_two(right_rem, left_rem, sum_res);
self.activation(sum_res, output_part_rem.unwrap(), bit_length - 1, act);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scalar::slice_to_scalar;
#[test]
fn conv2d_compact_test() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[2,5,5]);
let weight = x.mem.alloc(&[2,2,3,3]);
let output = x.mem.alloc(&[2,3,3]);
let bias = x.mem.alloc(&[2,3,3]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, Some((bias, 1)), 7, ActivationFunction::Sign);
let mut mem: Vec<BigScalar> = slice_to_scalar(&[1,0,1,-1,0,0,0,-2,4,-1,-4,0,3,-4,0,0,0,1,-1,1,-4,2,3,-1,0,-4,2,2,-3,-1,-1,1,2,-1,1,4,4,2,3,-3,0,3,-2,3,0,2,3,3,-2,2,4,3,3,-4,-4,-1,3,1,4,-2,-2,0,-2,4,-3,0,0,0,-2,0,0,0,0,3,4,-3,-4,-1,-1,-4,3,1,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,-3,0,1,-4,-1,2,0,0,-4,2,1,3,2,-3,4,-3]);
mem.resize(x.mem.n_var as usize, Scalar::zero());
x.compute(&mut mem);
assert_eq!(mem[87..87+18], slice_to_scalar(&[1,1,-1,-1,-1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1]));
x.sort_cons();
assert!(x.verify(&mem));
}
#[test]
fn | () {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[1,4,3]);
let weight = x.mem.alloc(&[1,1,3,3]);
let output = x.mem.alloc(&[1,2,1]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, None, 5,ActivationFunction::Sign);
let mut mem = x.mem.new_memory::<BigScalar>();
x.load_memory(input, &mut mem, &slice_to_scalar(&[1,1,2, 1,2,1, 1,1,1, 1,2,1]));
x.load_memory(weight, &mut mem, &slice_to_scalar(&[1,1,-1, 1,-1,1, 1,1,1]));
x.compute(&mut mem);
assert_eq!(mem[x.mem[output].begin() as usize..x.mem[output].end() as usize], slice_to_scalar(&[1,1]));
x.sort_cons();
assert!(x.verify(&mem));
}
} | conv2d_compact_test_small | identifier_name |
conv2d_compact.rs | use super::{ConstraintSystem, Scalar, MemoryManager, Memory, TensorAddress, SCALAR_SIZE, BigScalar, RangeFull, Range, RangeFrom, RangeTo, Id, min, Functions, ActivationFunction};
use crate::scalar::power_of_two;
impl ConstraintSystem {
pub fn run_conv2d_compact<T: Scalar>(mem: &MemoryManager, param: &[u32], var_dict: &mut Memory<T>) {
let (mul_result, k_col, packed_size,bit_length,extracted) = (param[0], param[1], param[2], param[3], param[4]);
let (fout, row_out, col_packed) = (mem[mul_result].dim[0],mem[mul_result].dim[1],mem[mul_result].dim[2]);
let row_dim = mem[extracted].dim[2];
let offset = power_of_two::<T>(bit_length - 1);
let mut big_offset = T::zero();
for _ in 0..packed_size + k_col - 1 {
big_offset = (big_offset * T::from_i32(2) + T::one()) * offset;
}
let n_packed = packed_size + k_col - 1;
for layer_out in 0..fout {
//matching result
for r in 0..row_out {
for c in 0..col_packed {
let val = (var_dict[mem[mul_result].at_idx(&[layer_out, r, c]) as usize] + big_offset).to_bytes();
let mut ext = Vec::new();
ext.resize((packed_size + k_col - 1) as usize, T::zero());
for k in 0..(packed_size + k_col - 1) * bit_length {
ext[(k / bit_length) as usize] += T::from_i32(((val[(k/8) as usize] >> (k % 8)) & 1) as i32) * power_of_two(k % bit_length);
}
for k in 0..packed_size + k_col - 1 {
let idx = c * n_packed + k;
if idx >= row_dim {
break
}
var_dict[mem[extracted].at_idx(&[layer_out,r,idx]) as usize] = ext[k as usize] - offset;
}
}
}
}
}
pub fn conv2d_compact(&mut self, input: TensorAddress, output: TensorAddress, weight_rev: TensorAddress, bias: Option<(TensorAddress, u32)>, bit_length: u8, act: ActivationFunction) {
// packing weight
let dim = &self.mem[weight_rev].dim;
let (fout, fin, k_row, k_col) = (dim[0], dim[1], dim[2], dim[3]);
let packed_weight = self.mem.alloc(&[fout, fin, k_row]);
assert!(k_col * (bit_length as u32) <= SCALAR_SIZE);
self.packing_tensor(weight_rev, packed_weight, bit_length, k_col as u8,1, BigScalar::one(), true);
let (row, col) = (self.mem[input].dim[1], self.mem[input].dim[2]);
let packed_size = min((SCALAR_SIZE / (bit_length as u32)).checked_sub(k_col).unwrap(),col);
let col_packed = (col-1)/packed_size + 1;
let packed_layer = self.mem.alloc(&[fin, row, col_packed]);
// packing row of inputs
self.packing_tensor_by_dim(input,&[-1], packed_layer, bit_length, packed_size as u8,1,BigScalar::one(), true);
// splicing output by row
let mut mul_input = Vec::new();
for r in 0..row - k_row + 1 {
let mut mul_input_row = Vec::new();
for c in 0..col_packed {
mul_input_row.push(self.mem.save(self.mem[packed_layer].at(&[RangeFull(), Range(r..r + k_row), Id(c)])));
}
mul_input.push(mul_input_row);
}
//packing bias
let mut packed_bias: Vec<Vec<TensorAddress>> = Vec::with_capacity(fout as usize);
let mut bias_dim = 0;
let mut bias_scale = 0;
if let Some((b, scale)) = bias {
bias_dim = (col - k_col)/packed_size + 1;
bias_scale = scale;
for layer_out in 0..fout {
let mut packed_bias_row: Vec<TensorAddress> = Vec::with_capacity(((row - k_row)/scale + 1) as usize);
for r in 0..(row - k_row)/scale + 1 {
let packed_bias = self.mem.alloc(&[bias_dim]);
let bias_row = self.mem.save(self.mem[b].at_(&[layer_out, r]));
self.packing_tensor(bias_row, packed_bias, bit_length, packed_size as u8, scale,power_of_two(bit_length as u32 * (k_col - 1)), true);
packed_bias_row.push(packed_bias);
}
packed_bias.push(packed_bias_row);
}
}
let mul_result = self.mem.alloc(&[fout, row - k_row + 1, col_packed]);
for layer_out in 0..fout {
let packed_weight = self.mem.save(self.mem[packed_weight].at_(&[layer_out]));
for r in 0..row - k_row + 1 {
for c in 0..col_packed {
let cur_bias = if c < bias_dim {Some(self.mem[packed_bias[layer_out as usize][(r/bias_scale) as usize]].at_idx(&[c]))} else {None};
self.dot(mul_input[r as usize][c as usize], packed_weight, self.mem[mul_result].at_idx(&[layer_out, r, c]), cur_bias);
}
}
}
// sign extraction
let n_packed = packed_size + k_col - 1;
let extracted_length = (col_packed - 1) * n_packed + ((col-1) % packed_size) + k_col;
let extracted = self.mem.alloc(&[fout, row - k_row + 1, extracted_length]);
self.packing_tensor_by_dim(extracted,&[-1], mul_result, bit_length, n_packed as u8,1,BigScalar::one(), false);
let params = vec![mul_result, k_col, packed_size, bit_length as u32, extracted];
self.compute.push((params.into_boxed_slice(), Functions::ConvCompact));
fn split_tensor<const N:usize>(mem: &mut MemoryManager,tensor: TensorAddress, length: u32, pos: [u32; N]) -> [(Option<TensorAddress>, Option<TensorAddress>); N] {
let fully_packed = mem[tensor].dim[2]/length;
let remainder = mem[tensor].dim[2] % length;
// should not save this
let tmp=mem[tensor].partition(2, length);
let mut res: [(Option<TensorAddress>, Option<TensorAddress>); N] = [(None, None); N];
for i in 0..N - 1 {
if pos[i] == pos[i+1] {
res[i] = (None, None);
continue;
}
let n= fully_packed + if remainder >= pos[i+1] {1} else {0};
let full = if n > 0 {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), RangeTo(..n), Range(pos[i]..pos[i+1])])))
} else {
None
};
let rem = if pos[i] < remainder && remainder < pos[i+1] | else {
None
};
res[i] = (full, rem);
}
res
}
fn extract_sign_part(c: &mut ConstraintSystem, extracted: TensorAddress, bit_length: u8) {
let output = c.mem.alloc(&c.mem[extracted].dim.to_owned());
c.sign(extracted, output, bit_length - 1);
}
let reduced_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeTo(..extracted_length - k_col + 1)]));
if k_col != 1 {
let rem_extract = self.mem.save(self.mem[extracted].at(&[RangeFull(), RangeFull(), RangeFrom(extracted_length - k_col + 1..)]));
extract_sign_part(self, rem_extract, bit_length);
}
let [(output_full, output_full_rem), (output_part, output_part_rem), (_,_)]= split_tensor(&mut self.mem, output, packed_size, [0, packed_size-(k_col-1), packed_size]);
let [(ext_left, ext_left_rem), (ext_full, ext_full_rem), (ext_right,ext_right_rem), (_,_)]= split_tensor(&mut self.mem, reduced_extract, n_packed, [0, k_col-1, packed_size, n_packed]);
// extract the fully correct part
if let Some(e) = ext_full {
self.activation(e, output_full.unwrap(), bit_length - 1, act);
}
if let Some(e) = ext_full_rem {
self.activation(e, output_full_rem.unwrap(), bit_length - 1, act);
}
//extract left and right sign part
if let Some(e) = ext_left {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_left_rem {
extract_sign_part(self,e, bit_length);
}
if let Some(e) = ext_right {
extract_sign_part(self,e, bit_length);
}
assert_eq!(ext_right_rem, None);
if let Some(left_rem) = ext_left_rem {
if let Some(right) = ext_right {
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[right].dim[2] - 1, k_col - 1]);
let left = self.mem.save(self.mem[ext_left.unwrap()].at(&[RangeFull(), RangeFrom(1..)]));
self.sum_two(right, left, sum_res);
self.activation(sum_res, output_part.unwrap(), bit_length - 1, act);
let sum_res = self.mem.alloc(&[fout, row - k_row + 1, self.mem[left_rem].dim[2]]);
let right_rem = self.mem.save(self.mem[right].at(&[RangeFull(), Id(self.mem[right].dim[2] - 1), RangeTo(..self.mem[left_rem].dim[2])]));
self.sum_two(right_rem, left_rem, sum_res);
self.activation(sum_res, output_part_rem.unwrap(), bit_length - 1, act);
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::scalar::slice_to_scalar;
#[test]
fn conv2d_compact_test() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[2,5,5]);
let weight = x.mem.alloc(&[2,2,3,3]);
let output = x.mem.alloc(&[2,3,3]);
let bias = x.mem.alloc(&[2,3,3]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, Some((bias, 1)), 7, ActivationFunction::Sign);
let mut mem: Vec<BigScalar> = slice_to_scalar(&[1,0,1,-1,0,0,0,-2,4,-1,-4,0,3,-4,0,0,0,1,-1,1,-4,2,3,-1,0,-4,2,2,-3,-1,-1,1,2,-1,1,4,4,2,3,-3,0,3,-2,3,0,2,3,3,-2,2,4,3,3,-4,-4,-1,3,1,4,-2,-2,0,-2,4,-3,0,0,0,-2,0,0,0,0,3,4,-3,-4,-1,-1,-4,3,1,-2,0,0,0,-1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,-3,0,-3,0,1,-4,-1,2,0,0,-4,2,1,3,2,-3,4,-3]);
mem.resize(x.mem.n_var as usize, Scalar::zero());
x.compute(&mut mem);
assert_eq!(mem[87..87+18], slice_to_scalar(&[1,1,-1,-1,-1,1,1,-1,-1,-1,1,-1,-1,-1,1,-1,-1,-1]));
x.sort_cons();
assert!(x.verify(&mem));
}
#[test]
fn conv2d_compact_test_small() {
let mut x = ConstraintSystem::new();
let input = x.mem.alloc(&[1,4,3]);
let weight = x.mem.alloc(&[1,1,3,3]);
let output = x.mem.alloc(&[1,2,1]);
let weight_rev = x.mem.save(x.mem[weight].reverse(3));
x.conv2d_compact(input, output, weight_rev, None, 5,ActivationFunction::Sign);
let mut mem = x.mem.new_memory::<BigScalar>();
x.load_memory(input, &mut mem, &slice_to_scalar(&[1,1,2, 1,2,1, 1,1,1, 1,2,1]));
x.load_memory(weight, &mut mem, &slice_to_scalar(&[1,1,-1, 1,-1,1, 1,1,1]));
x.compute(&mut mem);
assert_eq!(mem[x.mem[output].begin() as usize..x.mem[output].end() as usize], slice_to_scalar(&[1,1]));
x.sort_cons();
assert!(x.verify(&mem));
}
} | {
Some(mem.save(tmp.at(&[RangeFull(), RangeFull(), Id(n), Range(pos[i]..remainder)])))
} | conditional_block |
director_test.go | //
// Copyright 2016 Gregory Trubetskoy. All Rights Reserved.
// | //
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package receiver
import (
"fmt"
"log"
"math"
"os"
"strings"
"sync"
"testing"
"time"
"github.com/hashicorp/memberlist"
"github.com/tgres/tgres/cluster"
"github.com/tgres/tgres/rrd"
"github.com/tgres/tgres/serde"
)
type fakeLogger struct {
last []byte
}
func (f *fakeLogger) Write(p []byte) (n int, err error) {
f.last = p
return len(p), nil
}
func Test_directorIncomingDPMessages(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
rcv := make(chan *cluster.Msg)
dpCh := make(chan *IncomingDP)
count := 0
go func() {
for {
if _, ok := <-dpCh; !ok {
break
}
count++
}
}()
go directorIncomingDPMessages(rcv, dpCh)
// Sending a bogus message should not cause anything be written to dpCh
rcv <- &cluster.Msg{}
rcv <- &cluster.Msg{} // second send ensures the loop has gone full circle
if count > 0 {
t.Errorf("Malformed messages should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "decoding FAILED") {
t.Errorf("Malformed messages should log 'decoding FAILED'")
}
// now we need a real message
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
m, _ := cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
rcv <- m
if count < 1 {
t.Errorf("At least 1 data point should have been sent to dpCh")
}
dp.Hops = 1000 // exceed maxhops (which in fakeCluster is 0?)
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m // "clear" the loop
count = 0
rcv <- m
rcv <- m
if count > 0 {
t.Errorf("Hops exceeded should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "max hops") {
t.Errorf("Hops exceeded messages should log 'max hops'")
}
// Closing the dpCh should cause the recover() to happen
// The test here is that it doesn't panic
close(dpCh)
dp.Hops = 0
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
// Closing the channel exists (not sure how to really test for that)
go directorIncomingDPMessages(rcv, dpCh)
close(rcv)
}
func Test_directorForwardDPToNode(t *testing.T) {
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md}}
snd := make(chan *cluster.Msg)
count := 0
go func() {
for {
if _, ok := <-snd; !ok {
break
}
count++
}
}()
// if hops is > 0, nothing happens
dp.Hops = 1
directorForwardDPToNode(dp, node, snd)
directorForwardDPToNode(dp, node, snd)
if count > 0 {
t.Errorf("directorForwardDPToNode: Data points with hops > 0 should not be forwarded")
}
// otherwise it should work
dp.Hops = 0
directorForwardDPToNode(dp, node, snd)
dp.Hops = 0 // because it just got incremented
directorForwardDPToNode(dp, node, snd)
if count < 1 {
t.Errorf("Data point not sent to channel?")
}
// mark node not Ready
md[0] = 0
dp.Hops = 0 // because it just got incremented
if err := directorForwardDPToNode(dp, node, snd); err == nil {
t.Errorf("not ready node should cause an error")
}
}
func Test_directorProcessOrForward(t *testing.T) {
saveFn := directorForwardDPToNode
forward, fwErr := 0, error(nil)
directorForwardDPToNode = func(dp *IncomingDP, node *cluster.Node, snd chan *cluster.Msg) error {
forward++
return fwErr
}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
// rds
foo := serde.Ident{"name": "foo"}
ds := serde.NewDbDataSource(0, foo, rrd.NewDataSource(*DftDSSPec))
rds := &cachedDs{DbDataSourcer: ds}
// cluster
clstr := &fakeCluster{}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "local"}}
clstr.nodesForDd = []*cluster.Node{node}
clstr.ln = node
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// Test if we are LocalNode
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if sent < 1 {
t.Errorf("directorProcessOrForward: Nothing sent to workerChs")
}
// Now test we are NOT LN, forward
remote := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "remote"}}
clstr.nodesForDd = []*cluster.Node{remote}
n := directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if forward != 1 {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not called")
}
if n != 1 {
t.Errorf("directorProcessOrForward: return value != 1")
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fwErr = fmt.Errorf("some error")
n = directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if n != 0 {
t.Errorf("directorProcessOrForward: return value != 0")
}
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not logged")
}
fwErr = nil
// make an rds with points
foo = serde.Ident{"name": "foo"}
ds = serde.NewDbDataSource(0, foo, rrd.NewDataSource(rrd.DSSpec{
Step: 10 * time.Second,
RRAs: []rrd.RRASpec{
rrd.RRASpec{Function: rrd.WMEAN,
Step: 10 * time.Second,
Span: 30 * time.Second,
Latest: time.Unix(1000, 0),
},
},
}))
ds.ProcessDataPoint(123, time.Unix(2000, 0))
ds.ProcessDataPoint(123, time.Unix(3000, 0))
rds = &cachedDs{DbDataSourcer: ds}
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if !strings.Contains(string(fl.last), "PointCount") {
t.Errorf("directorProcessOrForward: Missing the PointCount warning log")
}
if rds.PointCount() != 0 {
t.Errorf("directorProcessOrForward: ClearRRAs(true) not called")
}
// restore directorForwardDPToNode
directorForwardDPToNode = saveFn
}
func Test_directorProcessIncomingDP(t *testing.T) {
saveFn := directorProcessOrForward
dpofCalled := 0
directorProcessOrForward = func(dsc *dsCache, cds *cachedDs, clstr clusterer, workerChs workerChannels, dp *IncomingDP, snd chan *cluster.Msg) (forwarded int) {
dpofCalled++
return 0
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
// dp
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
scr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: scr}
dsc := newDsCache(db, df, dsf)
// cluster
clstr := &fakeCluster{cChange: make(chan bool)}
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// NaN
dp.Value = math.NaN()
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a NaN, reportStatCount() should only be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a NaN, directorProcessOrForward should not be called")
}
// A value
dp.Value = 1234
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, clstr, nil)
if scr.called != 2 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called twice: %v", scr.called)
}
if dpofCalled != 1 {
t.Errorf("directorProcessIncomingDP: With a value, directorProcessOrForward should be called once: %v", dpofCalled)
}
// A blank name should cause a nil rds
dp.Name = ""
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a blank name, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a blank name, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "No spec matched") {
t.Errorf("should log 'No spec matched'")
}
// fake a db error
dp.Name = "blah"
db.fakeErr = true
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a db error, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a db error, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "error") {
t.Errorf("should log 'error'")
}
// nil cluster
dp.Value = 1234
db.fakeErr = false
scr.called = 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called once: %v", scr.called)
}
if dpofCalled != 0 {
t.Errorf("directorProcessIncomingDP: With a value and no cluster, directorProcessOrForward should not be called: %v", dpofCalled)
}
directorProcessOrForward = saveFn
}
func Test_the_director(t *testing.T) {
saveFn1 := directorIncomingDPMessages
saveFn2 := directorProcessIncomingDP
dimCalled := 0
directorIncomingDPMessages = func(rcv chan *cluster.Msg, dpCh chan *IncomingDP) { dimCalled++ }
dpidpCalled := 0
directorProcessIncomingDP = func(dp *IncomingDP, scr statReporter, dsc *dsCache, workerChs workerChannels, clstr clusterer, snd chan *cluster.Msg) {
dpidpCalled++
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
wc := &wrkCtl{wg: &sync.WaitGroup{}, startWg: &sync.WaitGroup{}, id: "FOO"}
clstr := &fakeCluster{cChange: make(chan bool)}
dpCh := make(chan *IncomingDP)
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
if clstr.nReady == 0 {
t.Errorf("director: Ready(true) not called on cluster")
}
if clstr.nReg == 0 {
t.Errorf("director: cluster.RegisterMsgType() not called")
}
// This sometimes can fail because we don't wait for that goroutine in this test...
time.Sleep(5 * time.Millisecond)
if dimCalled == 0 {
t.Errorf("director: directorIncomingDPMessages not started")
}
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
dpCh <- dp
dpCh <- dp
if dpidpCalled == 0 {
t.Errorf("director: directorProcessIncomingDP not called")
}
// Trigger a transition
clstr.cChange <- true
dpCh <- dp
if clstr.nTrans == 0 {
t.Errorf("director: on cluster change, Transition() not called")
}
// Transition with error
clstr.tErr = true
clstr.cChange <- true
dpCh <- dp
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("director: on transition error, 'some error' missing from logs")
}
dpidpCalled = 0
close(dpCh)
time.Sleep(1 * time.Second) // so that nil dp goroutine panics/recovers
if dpidpCalled > 0 {
t.Errorf("director: directorProcessIncomingDP must not be called on channel close")
}
if !strings.Contains(string(fl.last), "shutting down") {
t.Errorf("director: on channel close, missing 'shutting down' log entry")
}
// overrun
dpCh = make(chan *IncomingDP, 5)
dpCh <- dp
dpCh <- dp
dpCh <- dp
dpCh <- dp
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
time.Sleep(100 * time.Millisecond)
close(dpCh)
directorIncomingDPMessages = saveFn1
directorProcessIncomingDP = saveFn2
}
func Test_director_reportDirectorChannelFillPercent(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
ch := make(chan *IncomingDP, 10)
sr := &fakeSr{}
for i := 0; i < 9; i++ {
ch <- &IncomingDP{}
}
queue := &dpQueue{}
queue.push(&IncomingDP{})
go reportDirectorChannelFillPercent(ch, queue, sr, time.Millisecond)
time.Sleep(50 * time.Millisecond)
if sr.called == 0 {
t.Errorf("reportDirectorChannelFillPercent: statReporter should have been called a bunch of times")
}
if !strings.Contains(string(fl.last), "WARNING") {
t.Errorf("reportDirectorChannelFillPercent: there should be a warning about director channel nearly full")
}
}
func Test_director_queue(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
queue.push(dp)
if queue.pop() != dp {
t.Errorf("queue: pop returned wrong dp")
}
if queue.size() != 0 {
t.Errorf("queue: should be empty")
}
queue.push(&IncomingDP{})
if queue.size() != 1 {
t.Errorf("queue: size != 1")
}
}
func Test_director_checkSetAside(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
dp2 := &IncomingDP{}
r := checkSetAside(dp, queue, true)
if r != nil {
t.Errorf("with skip, checkSetAside should return nil")
}
if queue.size() != 1 {
t.Errorf("checkSetAside: queue size != 1")
}
r = checkSetAside(dp2, queue, false)
if r != dp {
t.Errorf("checkSetAside returned wrong point")
}
r = checkSetAside(nil, queue, false)
if r != dp2 {
t.Errorf("checkSetAside returned wrong point")
}
if queue.size() != 0 {
t.Errorf("checkSetAside: queue size != 0")
}
r = checkSetAside(nil, queue, false)
if r != nil {
t.Errorf("with skip false and empty queue, checkSetAside should return our point: nil")
}
} | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0 | random_line_split |
director_test.go | //
// Copyright 2016 Gregory Trubetskoy. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package receiver
import (
"fmt"
"log"
"math"
"os"
"strings"
"sync"
"testing"
"time"
"github.com/hashicorp/memberlist"
"github.com/tgres/tgres/cluster"
"github.com/tgres/tgres/rrd"
"github.com/tgres/tgres/serde"
)
type fakeLogger struct {
last []byte
}
func (f *fakeLogger) Write(p []byte) (n int, err error) {
f.last = p
return len(p), nil
}
func Test_directorIncomingDPMessages(t *testing.T) |
func Test_directorForwardDPToNode(t *testing.T) {
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md}}
snd := make(chan *cluster.Msg)
count := 0
go func() {
for {
if _, ok := <-snd; !ok {
break
}
count++
}
}()
// if hops is > 0, nothing happens
dp.Hops = 1
directorForwardDPToNode(dp, node, snd)
directorForwardDPToNode(dp, node, snd)
if count > 0 {
t.Errorf("directorForwardDPToNode: Data points with hops > 0 should not be forwarded")
}
// otherwise it should work
dp.Hops = 0
directorForwardDPToNode(dp, node, snd)
dp.Hops = 0 // because it just got incremented
directorForwardDPToNode(dp, node, snd)
if count < 1 {
t.Errorf("Data point not sent to channel?")
}
// mark node not Ready
md[0] = 0
dp.Hops = 0 // because it just got incremented
if err := directorForwardDPToNode(dp, node, snd); err == nil {
t.Errorf("not ready node should cause an error")
}
}
func Test_directorProcessOrForward(t *testing.T) {
saveFn := directorForwardDPToNode
forward, fwErr := 0, error(nil)
directorForwardDPToNode = func(dp *IncomingDP, node *cluster.Node, snd chan *cluster.Msg) error {
forward++
return fwErr
}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
// rds
foo := serde.Ident{"name": "foo"}
ds := serde.NewDbDataSource(0, foo, rrd.NewDataSource(*DftDSSPec))
rds := &cachedDs{DbDataSourcer: ds}
// cluster
clstr := &fakeCluster{}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "local"}}
clstr.nodesForDd = []*cluster.Node{node}
clstr.ln = node
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// Test if we are LocalNode
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if sent < 1 {
t.Errorf("directorProcessOrForward: Nothing sent to workerChs")
}
// Now test we are NOT LN, forward
remote := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "remote"}}
clstr.nodesForDd = []*cluster.Node{remote}
n := directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if forward != 1 {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not called")
}
if n != 1 {
t.Errorf("directorProcessOrForward: return value != 1")
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fwErr = fmt.Errorf("some error")
n = directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if n != 0 {
t.Errorf("directorProcessOrForward: return value != 0")
}
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not logged")
}
fwErr = nil
// make an rds with points
foo = serde.Ident{"name": "foo"}
ds = serde.NewDbDataSource(0, foo, rrd.NewDataSource(rrd.DSSpec{
Step: 10 * time.Second,
RRAs: []rrd.RRASpec{
rrd.RRASpec{Function: rrd.WMEAN,
Step: 10 * time.Second,
Span: 30 * time.Second,
Latest: time.Unix(1000, 0),
},
},
}))
ds.ProcessDataPoint(123, time.Unix(2000, 0))
ds.ProcessDataPoint(123, time.Unix(3000, 0))
rds = &cachedDs{DbDataSourcer: ds}
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if !strings.Contains(string(fl.last), "PointCount") {
t.Errorf("directorProcessOrForward: Missing the PointCount warning log")
}
if rds.PointCount() != 0 {
t.Errorf("directorProcessOrForward: ClearRRAs(true) not called")
}
// restore directorForwardDPToNode
directorForwardDPToNode = saveFn
}
func Test_directorProcessIncomingDP(t *testing.T) {
saveFn := directorProcessOrForward
dpofCalled := 0
directorProcessOrForward = func(dsc *dsCache, cds *cachedDs, clstr clusterer, workerChs workerChannels, dp *IncomingDP, snd chan *cluster.Msg) (forwarded int) {
dpofCalled++
return 0
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
// dp
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
scr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: scr}
dsc := newDsCache(db, df, dsf)
// cluster
clstr := &fakeCluster{cChange: make(chan bool)}
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// NaN
dp.Value = math.NaN()
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a NaN, reportStatCount() should only be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a NaN, directorProcessOrForward should not be called")
}
// A value
dp.Value = 1234
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, clstr, nil)
if scr.called != 2 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called twice: %v", scr.called)
}
if dpofCalled != 1 {
t.Errorf("directorProcessIncomingDP: With a value, directorProcessOrForward should be called once: %v", dpofCalled)
}
// A blank name should cause a nil rds
dp.Name = ""
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a blank name, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a blank name, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "No spec matched") {
t.Errorf("should log 'No spec matched'")
}
// fake a db error
dp.Name = "blah"
db.fakeErr = true
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a db error, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a db error, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "error") {
t.Errorf("should log 'error'")
}
// nil cluster
dp.Value = 1234
db.fakeErr = false
scr.called = 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called once: %v", scr.called)
}
if dpofCalled != 0 {
t.Errorf("directorProcessIncomingDP: With a value and no cluster, directorProcessOrForward should not be called: %v", dpofCalled)
}
directorProcessOrForward = saveFn
}
func Test_the_director(t *testing.T) {
saveFn1 := directorIncomingDPMessages
saveFn2 := directorProcessIncomingDP
dimCalled := 0
directorIncomingDPMessages = func(rcv chan *cluster.Msg, dpCh chan *IncomingDP) { dimCalled++ }
dpidpCalled := 0
directorProcessIncomingDP = func(dp *IncomingDP, scr statReporter, dsc *dsCache, workerChs workerChannels, clstr clusterer, snd chan *cluster.Msg) {
dpidpCalled++
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
wc := &wrkCtl{wg: &sync.WaitGroup{}, startWg: &sync.WaitGroup{}, id: "FOO"}
clstr := &fakeCluster{cChange: make(chan bool)}
dpCh := make(chan *IncomingDP)
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
if clstr.nReady == 0 {
t.Errorf("director: Ready(true) not called on cluster")
}
if clstr.nReg == 0 {
t.Errorf("director: cluster.RegisterMsgType() not called")
}
// This sometimes can fail because we don't wait for that goroutine in this test...
time.Sleep(5 * time.Millisecond)
if dimCalled == 0 {
t.Errorf("director: directorIncomingDPMessages not started")
}
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
dpCh <- dp
dpCh <- dp
if dpidpCalled == 0 {
t.Errorf("director: directorProcessIncomingDP not called")
}
// Trigger a transition
clstr.cChange <- true
dpCh <- dp
if clstr.nTrans == 0 {
t.Errorf("director: on cluster change, Transition() not called")
}
// Transition with error
clstr.tErr = true
clstr.cChange <- true
dpCh <- dp
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("director: on transition error, 'some error' missing from logs")
}
dpidpCalled = 0
close(dpCh)
time.Sleep(1 * time.Second) // so that nil dp goroutine panics/recovers
if dpidpCalled > 0 {
t.Errorf("director: directorProcessIncomingDP must not be called on channel close")
}
if !strings.Contains(string(fl.last), "shutting down") {
t.Errorf("director: on channel close, missing 'shutting down' log entry")
}
// overrun
dpCh = make(chan *IncomingDP, 5)
dpCh <- dp
dpCh <- dp
dpCh <- dp
dpCh <- dp
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
time.Sleep(100 * time.Millisecond)
close(dpCh)
directorIncomingDPMessages = saveFn1
directorProcessIncomingDP = saveFn2
}
func Test_director_reportDirectorChannelFillPercent(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
ch := make(chan *IncomingDP, 10)
sr := &fakeSr{}
for i := 0; i < 9; i++ {
ch <- &IncomingDP{}
}
queue := &dpQueue{}
queue.push(&IncomingDP{})
go reportDirectorChannelFillPercent(ch, queue, sr, time.Millisecond)
time.Sleep(50 * time.Millisecond)
if sr.called == 0 {
t.Errorf("reportDirectorChannelFillPercent: statReporter should have been called a bunch of times")
}
if !strings.Contains(string(fl.last), "WARNING") {
t.Errorf("reportDirectorChannelFillPercent: there should be a warning about director channel nearly full")
}
}
func Test_director_queue(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
queue.push(dp)
if queue.pop() != dp {
t.Errorf("queue: pop returned wrong dp")
}
if queue.size() != 0 {
t.Errorf("queue: should be empty")
}
queue.push(&IncomingDP{})
if queue.size() != 1 {
t.Errorf("queue: size != 1")
}
}
func Test_director_checkSetAside(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
dp2 := &IncomingDP{}
r := checkSetAside(dp, queue, true)
if r != nil {
t.Errorf("with skip, checkSetAside should return nil")
}
if queue.size() != 1 {
t.Errorf("checkSetAside: queue size != 1")
}
r = checkSetAside(dp2, queue, false)
if r != dp {
t.Errorf("checkSetAside returned wrong point")
}
r = checkSetAside(nil, queue, false)
if r != dp2 {
t.Errorf("checkSetAside returned wrong point")
}
if queue.size() != 0 {
t.Errorf("checkSetAside: queue size != 0")
}
r = checkSetAside(nil, queue, false)
if r != nil {
t.Errorf("with skip false and empty queue, checkSetAside should return our point: nil")
}
}
| {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
rcv := make(chan *cluster.Msg)
dpCh := make(chan *IncomingDP)
count := 0
go func() {
for {
if _, ok := <-dpCh; !ok {
break
}
count++
}
}()
go directorIncomingDPMessages(rcv, dpCh)
// Sending a bogus message should not cause anything be written to dpCh
rcv <- &cluster.Msg{}
rcv <- &cluster.Msg{} // second send ensures the loop has gone full circle
if count > 0 {
t.Errorf("Malformed messages should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "decoding FAILED") {
t.Errorf("Malformed messages should log 'decoding FAILED'")
}
// now we need a real message
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
m, _ := cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
rcv <- m
if count < 1 {
t.Errorf("At least 1 data point should have been sent to dpCh")
}
dp.Hops = 1000 // exceed maxhops (which in fakeCluster is 0?)
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m // "clear" the loop
count = 0
rcv <- m
rcv <- m
if count > 0 {
t.Errorf("Hops exceeded should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "max hops") {
t.Errorf("Hops exceeded messages should log 'max hops'")
}
// Closing the dpCh should cause the recover() to happen
// The test here is that it doesn't panic
close(dpCh)
dp.Hops = 0
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
// Closing the channel exists (not sure how to really test for that)
go directorIncomingDPMessages(rcv, dpCh)
close(rcv)
} | identifier_body |
director_test.go | //
// Copyright 2016 Gregory Trubetskoy. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package receiver
import (
"fmt"
"log"
"math"
"os"
"strings"
"sync"
"testing"
"time"
"github.com/hashicorp/memberlist"
"github.com/tgres/tgres/cluster"
"github.com/tgres/tgres/rrd"
"github.com/tgres/tgres/serde"
)
type fakeLogger struct {
last []byte
}
func (f *fakeLogger) Write(p []byte) (n int, err error) {
f.last = p
return len(p), nil
}
func Test_directorIncomingDPMessages(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
rcv := make(chan *cluster.Msg)
dpCh := make(chan *IncomingDP)
count := 0
go func() {
for {
if _, ok := <-dpCh; !ok {
break
}
count++
}
}()
go directorIncomingDPMessages(rcv, dpCh)
// Sending a bogus message should not cause anything be written to dpCh
rcv <- &cluster.Msg{}
rcv <- &cluster.Msg{} // second send ensures the loop has gone full circle
if count > 0 {
t.Errorf("Malformed messages should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "decoding FAILED") {
t.Errorf("Malformed messages should log 'decoding FAILED'")
}
// now we need a real message
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
m, _ := cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
rcv <- m
if count < 1 {
t.Errorf("At least 1 data point should have been sent to dpCh")
}
dp.Hops = 1000 // exceed maxhops (which in fakeCluster is 0?)
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m // "clear" the loop
count = 0
rcv <- m
rcv <- m
if count > 0 {
t.Errorf("Hops exceeded should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "max hops") {
t.Errorf("Hops exceeded messages should log 'max hops'")
}
// Closing the dpCh should cause the recover() to happen
// The test here is that it doesn't panic
close(dpCh)
dp.Hops = 0
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
// Closing the channel exists (not sure how to really test for that)
go directorIncomingDPMessages(rcv, dpCh)
close(rcv)
}
func Test_directorForwardDPToNode(t *testing.T) {
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md}}
snd := make(chan *cluster.Msg)
count := 0
go func() {
for {
if _, ok := <-snd; !ok {
break
}
count++
}
}()
// if hops is > 0, nothing happens
dp.Hops = 1
directorForwardDPToNode(dp, node, snd)
directorForwardDPToNode(dp, node, snd)
if count > 0 {
t.Errorf("directorForwardDPToNode: Data points with hops > 0 should not be forwarded")
}
// otherwise it should work
dp.Hops = 0
directorForwardDPToNode(dp, node, snd)
dp.Hops = 0 // because it just got incremented
directorForwardDPToNode(dp, node, snd)
if count < 1 {
t.Errorf("Data point not sent to channel?")
}
// mark node not Ready
md[0] = 0
dp.Hops = 0 // because it just got incremented
if err := directorForwardDPToNode(dp, node, snd); err == nil {
t.Errorf("not ready node should cause an error")
}
}
func Test_directorProcessOrForward(t *testing.T) {
saveFn := directorForwardDPToNode
forward, fwErr := 0, error(nil)
directorForwardDPToNode = func(dp *IncomingDP, node *cluster.Node, snd chan *cluster.Msg) error {
forward++
return fwErr
}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
// rds
foo := serde.Ident{"name": "foo"}
ds := serde.NewDbDataSource(0, foo, rrd.NewDataSource(*DftDSSPec))
rds := &cachedDs{DbDataSourcer: ds}
// cluster
clstr := &fakeCluster{}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "local"}}
clstr.nodesForDd = []*cluster.Node{node}
clstr.ln = node
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// Test if we are LocalNode
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if sent < 1 {
t.Errorf("directorProcessOrForward: Nothing sent to workerChs")
}
// Now test we are NOT LN, forward
remote := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "remote"}}
clstr.nodesForDd = []*cluster.Node{remote}
n := directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if forward != 1 {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not called")
}
if n != 1 {
t.Errorf("directorProcessOrForward: return value != 1")
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fwErr = fmt.Errorf("some error")
n = directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if n != 0 {
t.Errorf("directorProcessOrForward: return value != 0")
}
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not logged")
}
fwErr = nil
// make an rds with points
foo = serde.Ident{"name": "foo"}
ds = serde.NewDbDataSource(0, foo, rrd.NewDataSource(rrd.DSSpec{
Step: 10 * time.Second,
RRAs: []rrd.RRASpec{
rrd.RRASpec{Function: rrd.WMEAN,
Step: 10 * time.Second,
Span: 30 * time.Second,
Latest: time.Unix(1000, 0),
},
},
}))
ds.ProcessDataPoint(123, time.Unix(2000, 0))
ds.ProcessDataPoint(123, time.Unix(3000, 0))
rds = &cachedDs{DbDataSourcer: ds}
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if !strings.Contains(string(fl.last), "PointCount") {
t.Errorf("directorProcessOrForward: Missing the PointCount warning log")
}
if rds.PointCount() != 0 {
t.Errorf("directorProcessOrForward: ClearRRAs(true) not called")
}
// restore directorForwardDPToNode
directorForwardDPToNode = saveFn
}
func Test_directorProcessIncomingDP(t *testing.T) {
saveFn := directorProcessOrForward
dpofCalled := 0
directorProcessOrForward = func(dsc *dsCache, cds *cachedDs, clstr clusterer, workerChs workerChannels, dp *IncomingDP, snd chan *cluster.Msg) (forwarded int) {
dpofCalled++
return 0
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
// dp
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
scr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: scr}
dsc := newDsCache(db, df, dsf)
// cluster
clstr := &fakeCluster{cChange: make(chan bool)}
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// NaN
dp.Value = math.NaN()
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a NaN, reportStatCount() should only be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a NaN, directorProcessOrForward should not be called")
}
// A value
dp.Value = 1234
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, clstr, nil)
if scr.called != 2 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called twice: %v", scr.called)
}
if dpofCalled != 1 {
t.Errorf("directorProcessIncomingDP: With a value, directorProcessOrForward should be called once: %v", dpofCalled)
}
// A blank name should cause a nil rds
dp.Name = ""
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a blank name, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a blank name, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "No spec matched") {
t.Errorf("should log 'No spec matched'")
}
// fake a db error
dp.Name = "blah"
db.fakeErr = true
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a db error, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a db error, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "error") {
t.Errorf("should log 'error'")
}
// nil cluster
dp.Value = 1234
db.fakeErr = false
scr.called = 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called once: %v", scr.called)
}
if dpofCalled != 0 {
t.Errorf("directorProcessIncomingDP: With a value and no cluster, directorProcessOrForward should not be called: %v", dpofCalled)
}
directorProcessOrForward = saveFn
}
func Test_the_director(t *testing.T) {
saveFn1 := directorIncomingDPMessages
saveFn2 := directorProcessIncomingDP
dimCalled := 0
directorIncomingDPMessages = func(rcv chan *cluster.Msg, dpCh chan *IncomingDP) { dimCalled++ }
dpidpCalled := 0
directorProcessIncomingDP = func(dp *IncomingDP, scr statReporter, dsc *dsCache, workerChs workerChannels, clstr clusterer, snd chan *cluster.Msg) {
dpidpCalled++
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
wc := &wrkCtl{wg: &sync.WaitGroup{}, startWg: &sync.WaitGroup{}, id: "FOO"}
clstr := &fakeCluster{cChange: make(chan bool)}
dpCh := make(chan *IncomingDP)
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
if clstr.nReady == 0 {
t.Errorf("director: Ready(true) not called on cluster")
}
if clstr.nReg == 0 {
t.Errorf("director: cluster.RegisterMsgType() not called")
}
// This sometimes can fail because we don't wait for that goroutine in this test...
time.Sleep(5 * time.Millisecond)
if dimCalled == 0 {
t.Errorf("director: directorIncomingDPMessages not started")
}
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
dpCh <- dp
dpCh <- dp
if dpidpCalled == 0 {
t.Errorf("director: directorProcessIncomingDP not called")
}
// Trigger a transition
clstr.cChange <- true
dpCh <- dp
if clstr.nTrans == 0 {
t.Errorf("director: on cluster change, Transition() not called")
}
// Transition with error
clstr.tErr = true
clstr.cChange <- true
dpCh <- dp
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("director: on transition error, 'some error' missing from logs")
}
dpidpCalled = 0
close(dpCh)
time.Sleep(1 * time.Second) // so that nil dp goroutine panics/recovers
if dpidpCalled > 0 {
t.Errorf("director: directorProcessIncomingDP must not be called on channel close")
}
if !strings.Contains(string(fl.last), "shutting down") {
t.Errorf("director: on channel close, missing 'shutting down' log entry")
}
// overrun
dpCh = make(chan *IncomingDP, 5)
dpCh <- dp
dpCh <- dp
dpCh <- dp
dpCh <- dp
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
time.Sleep(100 * time.Millisecond)
close(dpCh)
directorIncomingDPMessages = saveFn1
directorProcessIncomingDP = saveFn2
}
func Test_director_reportDirectorChannelFillPercent(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
ch := make(chan *IncomingDP, 10)
sr := &fakeSr{}
for i := 0; i < 9; i++ {
ch <- &IncomingDP{}
}
queue := &dpQueue{}
queue.push(&IncomingDP{})
go reportDirectorChannelFillPercent(ch, queue, sr, time.Millisecond)
time.Sleep(50 * time.Millisecond)
if sr.called == 0 {
t.Errorf("reportDirectorChannelFillPercent: statReporter should have been called a bunch of times")
}
if !strings.Contains(string(fl.last), "WARNING") {
t.Errorf("reportDirectorChannelFillPercent: there should be a warning about director channel nearly full")
}
}
func Test_director_queue(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
queue.push(dp)
if queue.pop() != dp {
t.Errorf("queue: pop returned wrong dp")
}
if queue.size() != 0 {
t.Errorf("queue: should be empty")
}
queue.push(&IncomingDP{})
if queue.size() != 1 {
t.Errorf("queue: size != 1")
}
}
func | (t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
dp2 := &IncomingDP{}
r := checkSetAside(dp, queue, true)
if r != nil {
t.Errorf("with skip, checkSetAside should return nil")
}
if queue.size() != 1 {
t.Errorf("checkSetAside: queue size != 1")
}
r = checkSetAside(dp2, queue, false)
if r != dp {
t.Errorf("checkSetAside returned wrong point")
}
r = checkSetAside(nil, queue, false)
if r != dp2 {
t.Errorf("checkSetAside returned wrong point")
}
if queue.size() != 0 {
t.Errorf("checkSetAside: queue size != 0")
}
r = checkSetAside(nil, queue, false)
if r != nil {
t.Errorf("with skip false and empty queue, checkSetAside should return our point: nil")
}
}
| Test_director_checkSetAside | identifier_name |
director_test.go | //
// Copyright 2016 Gregory Trubetskoy. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package receiver
import (
"fmt"
"log"
"math"
"os"
"strings"
"sync"
"testing"
"time"
"github.com/hashicorp/memberlist"
"github.com/tgres/tgres/cluster"
"github.com/tgres/tgres/rrd"
"github.com/tgres/tgres/serde"
)
type fakeLogger struct {
last []byte
}
func (f *fakeLogger) Write(p []byte) (n int, err error) {
f.last = p
return len(p), nil
}
func Test_directorIncomingDPMessages(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
rcv := make(chan *cluster.Msg)
dpCh := make(chan *IncomingDP)
count := 0
go func() {
for {
if _, ok := <-dpCh; !ok {
break
}
count++
}
}()
go directorIncomingDPMessages(rcv, dpCh)
// Sending a bogus message should not cause anything be written to dpCh
rcv <- &cluster.Msg{}
rcv <- &cluster.Msg{} // second send ensures the loop has gone full circle
if count > 0 {
t.Errorf("Malformed messages should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "decoding FAILED") {
t.Errorf("Malformed messages should log 'decoding FAILED'")
}
// now we need a real message
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
m, _ := cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
rcv <- m
if count < 1 {
t.Errorf("At least 1 data point should have been sent to dpCh")
}
dp.Hops = 1000 // exceed maxhops (which in fakeCluster is 0?)
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m // "clear" the loop
count = 0
rcv <- m
rcv <- m
if count > 0 {
t.Errorf("Hops exceeded should not cause data points, count: %d", count)
}
if !strings.Contains(string(fl.last), "max hops") {
t.Errorf("Hops exceeded messages should log 'max hops'")
}
// Closing the dpCh should cause the recover() to happen
// The test here is that it doesn't panic
close(dpCh)
dp.Hops = 0
m, _ = cluster.NewMsg(&cluster.Node{}, dp)
rcv <- m
// Closing the channel exists (not sure how to really test for that)
go directorIncomingDPMessages(rcv, dpCh)
close(rcv)
}
func Test_directorForwardDPToNode(t *testing.T) {
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md}}
snd := make(chan *cluster.Msg)
count := 0
go func() {
for {
if _, ok := <-snd; !ok {
break
}
count++
}
}()
// if hops is > 0, nothing happens
dp.Hops = 1
directorForwardDPToNode(dp, node, snd)
directorForwardDPToNode(dp, node, snd)
if count > 0 {
t.Errorf("directorForwardDPToNode: Data points with hops > 0 should not be forwarded")
}
// otherwise it should work
dp.Hops = 0
directorForwardDPToNode(dp, node, snd)
dp.Hops = 0 // because it just got incremented
directorForwardDPToNode(dp, node, snd)
if count < 1 {
t.Errorf("Data point not sent to channel?")
}
// mark node not Ready
md[0] = 0
dp.Hops = 0 // because it just got incremented
if err := directorForwardDPToNode(dp, node, snd); err == nil {
t.Errorf("not ready node should cause an error")
}
}
func Test_directorProcessOrForward(t *testing.T) {
saveFn := directorForwardDPToNode
forward, fwErr := 0, error(nil)
directorForwardDPToNode = func(dp *IncomingDP, node *cluster.Node, snd chan *cluster.Msg) error {
forward++
return fwErr
}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
// rds
foo := serde.Ident{"name": "foo"}
ds := serde.NewDbDataSource(0, foo, rrd.NewDataSource(*DftDSSPec))
rds := &cachedDs{DbDataSourcer: ds}
// cluster
clstr := &fakeCluster{}
md := make([]byte, 20)
md[0] = 1 // Ready
node := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "local"}}
clstr.nodesForDd = []*cluster.Node{node}
clstr.ln = node
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// Test if we are LocalNode
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if sent < 1 {
t.Errorf("directorProcessOrForward: Nothing sent to workerChs")
}
// Now test we are NOT LN, forward
remote := &cluster.Node{Node: &memberlist.Node{Meta: md, Name: "remote"}}
clstr.nodesForDd = []*cluster.Node{remote}
n := directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if forward != 1 {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not called")
}
if n != 1 {
t.Errorf("directorProcessOrForward: return value != 1")
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fwErr = fmt.Errorf("some error")
n = directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if n != 0 {
t.Errorf("directorProcessOrForward: return value != 0")
}
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("directorProcessOrForward: directorForwardDPToNode not logged")
}
fwErr = nil
// make an rds with points
foo = serde.Ident{"name": "foo"}
ds = serde.NewDbDataSource(0, foo, rrd.NewDataSource(rrd.DSSpec{
Step: 10 * time.Second,
RRAs: []rrd.RRASpec{
rrd.RRASpec{Function: rrd.WMEAN,
Step: 10 * time.Second,
Span: 30 * time.Second,
Latest: time.Unix(1000, 0),
},
},
}))
ds.ProcessDataPoint(123, time.Unix(2000, 0))
ds.ProcessDataPoint(123, time.Unix(3000, 0))
rds = &cachedDs{DbDataSourcer: ds}
directorProcessOrForward(dsc, rds, clstr, workerChs, nil, nil)
if !strings.Contains(string(fl.last), "PointCount") {
t.Errorf("directorProcessOrForward: Missing the PointCount warning log")
}
if rds.PointCount() != 0 {
t.Errorf("directorProcessOrForward: ClearRRAs(true) not called")
}
// restore directorForwardDPToNode
directorForwardDPToNode = saveFn
}
func Test_directorProcessIncomingDP(t *testing.T) {
saveFn := directorProcessOrForward
dpofCalled := 0
directorProcessOrForward = func(dsc *dsCache, cds *cachedDs, clstr clusterer, workerChs workerChannels, dp *IncomingDP, snd chan *cluster.Msg) (forwarded int) {
dpofCalled++
return 0
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
// dp
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
scr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: scr}
dsc := newDsCache(db, df, dsf)
// cluster
clstr := &fakeCluster{cChange: make(chan bool)}
// workerChs
workerChs := make([]chan *incomingDpWithDs, 1)
workerChs[0] = make(chan *incomingDpWithDs)
sent := 0
go func() {
for {
<-workerChs[0]
sent++
}
}()
// NaN
dp.Value = math.NaN()
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a NaN, reportStatCount() should only be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a NaN, directorProcessOrForward should not be called")
}
// A value
dp.Value = 1234
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, clstr, nil)
if scr.called != 2 |
if dpofCalled != 1 {
t.Errorf("directorProcessIncomingDP: With a value, directorProcessOrForward should be called once: %v", dpofCalled)
}
// A blank name should cause a nil rds
dp.Name = ""
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a blank name, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a blank name, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "No spec matched") {
t.Errorf("should log 'No spec matched'")
}
// fake a db error
dp.Name = "blah"
db.fakeErr = true
scr.called, dpofCalled = 0, 0
directorProcessIncomingDP(dp, scr, dsc, nil, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a db error, reportStatCount() should be called once")
}
if dpofCalled > 0 {
t.Errorf("directorProcessIncomingDP: With a db error, directorProcessOrForward should not be called")
}
if !strings.Contains(string(fl.last), "error") {
t.Errorf("should log 'error'")
}
// nil cluster
dp.Value = 1234
db.fakeErr = false
scr.called = 0
directorProcessIncomingDP(dp, scr, dsc, workerChs, nil, nil)
if scr.called != 1 {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called once: %v", scr.called)
}
if dpofCalled != 0 {
t.Errorf("directorProcessIncomingDP: With a value and no cluster, directorProcessOrForward should not be called: %v", dpofCalled)
}
directorProcessOrForward = saveFn
}
func Test_the_director(t *testing.T) {
saveFn1 := directorIncomingDPMessages
saveFn2 := directorProcessIncomingDP
dimCalled := 0
directorIncomingDPMessages = func(rcv chan *cluster.Msg, dpCh chan *IncomingDP) { dimCalled++ }
dpidpCalled := 0
directorProcessIncomingDP = func(dp *IncomingDP, scr statReporter, dsc *dsCache, workerChs workerChannels, clstr clusterer, snd chan *cluster.Msg) {
dpidpCalled++
}
fl := &fakeLogger{}
log.SetOutput(fl)
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
wc := &wrkCtl{wg: &sync.WaitGroup{}, startWg: &sync.WaitGroup{}, id: "FOO"}
clstr := &fakeCluster{cChange: make(chan bool)}
dpCh := make(chan *IncomingDP)
// dsc
db := &fakeSerde{}
df := &SimpleDSFinder{DftDSSPec}
sr := &fakeSr{}
dsf := &dsFlusher{db: db, sr: sr}
dsc := newDsCache(db, df, dsf)
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
if clstr.nReady == 0 {
t.Errorf("director: Ready(true) not called on cluster")
}
if clstr.nReg == 0 {
t.Errorf("director: cluster.RegisterMsgType() not called")
}
// This sometimes can fail because we don't wait for that goroutine in this test...
time.Sleep(5 * time.Millisecond)
if dimCalled == 0 {
t.Errorf("director: directorIncomingDPMessages not started")
}
dp := &IncomingDP{Name: "foo", TimeStamp: time.Unix(1000, 0), Value: 123}
dpCh <- dp
dpCh <- dp
if dpidpCalled == 0 {
t.Errorf("director: directorProcessIncomingDP not called")
}
// Trigger a transition
clstr.cChange <- true
dpCh <- dp
if clstr.nTrans == 0 {
t.Errorf("director: on cluster change, Transition() not called")
}
// Transition with error
clstr.tErr = true
clstr.cChange <- true
dpCh <- dp
if !strings.Contains(string(fl.last), "some error") {
t.Errorf("director: on transition error, 'some error' missing from logs")
}
dpidpCalled = 0
close(dpCh)
time.Sleep(1 * time.Second) // so that nil dp goroutine panics/recovers
if dpidpCalled > 0 {
t.Errorf("director: directorProcessIncomingDP must not be called on channel close")
}
if !strings.Contains(string(fl.last), "shutting down") {
t.Errorf("director: on channel close, missing 'shutting down' log entry")
}
// overrun
dpCh = make(chan *IncomingDP, 5)
dpCh <- dp
dpCh <- dp
dpCh <- dp
dpCh <- dp
wc.startWg.Add(1)
go director(wc, dpCh, clstr, sr, dsc, nil)
wc.startWg.Wait()
time.Sleep(100 * time.Millisecond)
close(dpCh)
directorIncomingDPMessages = saveFn1
directorProcessIncomingDP = saveFn2
}
func Test_director_reportDirectorChannelFillPercent(t *testing.T) {
defer func() {
// restore default output
log.SetOutput(os.Stderr)
}()
fl := &fakeLogger{}
log.SetOutput(fl)
ch := make(chan *IncomingDP, 10)
sr := &fakeSr{}
for i := 0; i < 9; i++ {
ch <- &IncomingDP{}
}
queue := &dpQueue{}
queue.push(&IncomingDP{})
go reportDirectorChannelFillPercent(ch, queue, sr, time.Millisecond)
time.Sleep(50 * time.Millisecond)
if sr.called == 0 {
t.Errorf("reportDirectorChannelFillPercent: statReporter should have been called a bunch of times")
}
if !strings.Contains(string(fl.last), "WARNING") {
t.Errorf("reportDirectorChannelFillPercent: there should be a warning about director channel nearly full")
}
}
func Test_director_queue(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
queue.push(dp)
if queue.pop() != dp {
t.Errorf("queue: pop returned wrong dp")
}
if queue.size() != 0 {
t.Errorf("queue: should be empty")
}
queue.push(&IncomingDP{})
if queue.size() != 1 {
t.Errorf("queue: size != 1")
}
}
func Test_director_checkSetAside(t *testing.T) {
queue := &dpQueue{}
dp := &IncomingDP{}
dp2 := &IncomingDP{}
r := checkSetAside(dp, queue, true)
if r != nil {
t.Errorf("with skip, checkSetAside should return nil")
}
if queue.size() != 1 {
t.Errorf("checkSetAside: queue size != 1")
}
r = checkSetAside(dp2, queue, false)
if r != dp {
t.Errorf("checkSetAside returned wrong point")
}
r = checkSetAside(nil, queue, false)
if r != dp2 {
t.Errorf("checkSetAside returned wrong point")
}
if queue.size() != 0 {
t.Errorf("checkSetAside: queue size != 0")
}
r = checkSetAside(nil, queue, false)
if r != nil {
t.Errorf("with skip false and empty queue, checkSetAside should return our point: nil")
}
}
| {
t.Errorf("directorProcessIncomingDP: With a value, reportStatCount() should be called twice: %v", scr.called)
} | conditional_block |
search.rs | use super::context;
use super::Contrapositive;
use super::{Cuts, Db, Steps};
use crate::offset::{OLit, Offset, Sub};
use crate::subst::Ptr as SubPtr;
use crate::{Lit, Rewind};
use alloc::vec::Vec;
use core::{fmt::Display, hash::Hash, ops::Neg};
use log::debug;
pub struct Search<'t, P, C> {
task: Task<'t, P, C>,
ctx: Context<'t, P, C>,
promises: Vec<Promise<Task<'t, P, C>>>,
pub sub: Sub<'t, C>,
proof: Steps<'t, P, C>,
alternatives: Vec<(Alternative<'t, P, C>, Action<'t, P, C>)>,
inferences: usize,
literals: usize,
db: &'t Db<P, C, usize>,
opt: Opt,
}
#[derive(Clone)]
pub struct TaskIter<C: IntoIterator>(core::iter::Skip<C::IntoIter>);
impl<C: IntoIterator> TaskIter<C> {
pub fn new(cl: C) -> Self |
}
impl<C: IntoIterator> Iterator for TaskIter<C> {
type Item = <C::IntoIter as Iterator>::Item;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
pub type Task<'t, P, C> = TaskIter<super::clause::OClause<'t, Lit<P, C, usize>>>;
pub type Context<'t, P, C> = context::Context<Vec<OLit<'t, P, C>>>;
#[derive(Clone, Debug)]
pub enum Action<'t, P, C> {
Prove,
Reduce(OLit<'t, P, C>, Index),
Extend(OLit<'t, P, C>, Contras<'t, P, C>, Index),
}
impl<'t, P, C> Action<'t, P, C> {
pub fn max_children(&self) -> usize {
use Action::*;
match self {
Prove | Reduce(_, _) => 0,
Extend(_, cs, skip) => cs[*skip].rest.len(),
}
}
}
type Index = usize;
type Contras<'t, P, C> = &'t [Contrapositive<P, C, usize>];
struct Alternative<'t, P, C> {
task: Task<'t, P, C>,
// when we do *not* use cut, then we may need to backtrack to
// contexts that are larger than the current context,
// so we save the whole context here
ctx: Option<Context<'t, P, C>>,
// when we use cut, then we always backtrack to contexts that are
// prefixes of the current context, so in that case,
// storing just a pointer to the context suffices
ctx_ptr: context::Ptr,
promises: Option<Vec<Promise<Task<'t, P, C>>>>,
promises_len: usize,
sub: SubPtr,
proof_len: usize,
}
#[derive(Clone)]
struct Promise<T> {
task: T,
ctx_ptr: context::Ptr,
alt_len: usize,
}
pub struct Opt {
pub lim: usize,
pub cuts: Cuts,
}
impl<'t, P, C> Search<'t, P, C> {
pub fn new(task: Task<'t, P, C>, db: &'t Db<P, C, usize>, opt: Opt) -> Self {
Self {
task,
ctx: Context::default(),
promises: Vec::new(),
sub: Sub::default(),
proof: Steps::new(),
alternatives: Vec::new(),
inferences: 0,
literals: 0,
db,
opt,
}
}
}
type State<'t, P, C> = Result<Action<'t, P, C>, bool>;
impl<'t, P, C> Search<'t, P, C>
where
P: Clone + Display + Eq + Hash + Neg<Output = P>,
C: Clone + Display + Eq,
{
pub fn prove(&mut self) -> Option<&Steps<'t, P, C>> {
let mut action: Action<'t, P, C> = Action::Prove;
loop {
let result = match action {
Action::Prove => match self.task.clone().next() {
Some(lit) => self.chk(lit),
None => self.fulfill_promise(),
},
Action::Reduce(lit, skip) => self.red(lit, skip),
Action::Extend(lit, contras, skip) => self.ext(lit, contras, skip),
};
match result {
Ok(next) => action = next,
Err(true) => return Some(&self.proof),
Err(false) => return None,
}
}
}
pub fn inferences(&self) -> usize {
self.inferences
}
fn chk(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("checks: {}", lit);
debug!("{} {}", self.literals, lit.head());
debug!("lemmas: {}", self.ctx.lemmas.len());
debug!("path: {}", self.ctx.path.len());
self.literals += 1;
let mut lits = self.task.clone();
let mut path = self.ctx.path.iter();
let mut lemmas = self.ctx.lemmas.iter();
if lits.any(|cl| path.any(|pl| pl.eq_mod(&self.sub, &cl))) {
debug!("regularity");
self.try_alternative()
} else if lemmas.any(|lem| lem.eq_mod(&self.sub, &lit)) {
debug!("lemma");
self.proof.push(Action::Prove);
// do not add lit to lemmas, unlike original leanCoP
// furthermore, do not try red/ext steps if we found a lemma,
// because it does not add anything to substitution
// note that Jens said that this might sometimes be counterproductive,
// because adding to the substitution is also beneficial to cut down search space
self.task.next();
Ok(Action::Prove)
} else {
Ok(Action::Reduce(lit, 0))
}
}
fn red(&mut self, lit: OLit<'t, P, C>, skip: usize) -> State<'t, P, C> {
debug!("reduce: {}", lit);
let alternative = Alternative::from(&*self);
for (pidx, pat) in self.ctx.path.iter().rev().enumerate().skip(skip) {
debug!("try reduce: {}", pat);
let sub_dom_len = self.sub.get_dom_len();
if pat.head() != &-lit.head().clone() {
continue;
}
if pat.args().unify(&mut self.sub, lit.args()) {
debug!("reduce succeeded");
self.proof.push(Action::Reduce(lit, pidx));
if !self.opt.cuts.reduction {
let action = Action::Reduce(lit, pidx + 1);
self.alternatives.push((alternative, action));
}
self.ctx.lemmas.push(lit);
self.task.next();
return Ok(Action::Prove);
} else {
self.sub.set_dom_len(sub_dom_len)
}
}
self.ext0(lit)
}
fn ext0(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("extend: {}", lit);
let neg = -lit.head().clone();
match self.db.get(&neg) {
Some(entries) => self.ext(lit, entries, 0),
None => self.try_alternative(),
}
}
fn ext(&mut self, lit: OLit<'t, P, C>, cs: Contras<'t, P, C>, skip: usize) -> State<'t, P, C> {
let alt = Alternative::from(&*self);
let prm = Promise::from(&*self);
let sub = SubPtr::from(&self.sub);
for (eidx, entry) in cs.iter().enumerate().skip(skip) {
debug!(
"try extend {}{} (lit = {}, |path| = {})",
lit.head(),
entry,
lit,
self.ctx.path.len()
);
if self.ctx.path.len() >= self.opt.lim && entry.vars.is_some() {
debug!("path limit reached");
continue;
};
let eargs = Offset::new(sub.dom_max(), &entry.args);
if let Some(vars) = entry.vars {
// we have to add 1 here because the lowest variable is 0
self.sub.set_dom_max(sub.dom_max() + vars + 1)
};
debug!("unify {} ~? {}, sub = {}", eargs, lit.args(), self.sub);
if eargs.unify(&mut self.sub, lit.args()) {
debug!("unify succeeded with {}, sub = {}", entry.rest, self.sub);
self.inferences += 1;
// promise to fulfill the current task
// (if the promise is kept and cut is enabled,
// then all alternatives that came after will be discarded)
self.promises.push(prm);
self.proof.push(Action::Extend(lit, cs, eidx));
let action = Action::Extend(lit, cs, eidx + 1);
// register an alternative (that will be discarded
// if the above promise is kept and cut is enabled)
self.alternatives.push((alt, action));
self.task = Task::new(Offset::new(sub.dom_max(), &entry.rest));
self.ctx.path.push(lit);
return Ok(Action::Prove);
} else {
debug!("unify failed");
self.sub.rewind(&sub)
}
}
self.try_alternative()
}
fn fulfill_promise(&mut self) -> State<'t, P, C> {
debug!("fulfill promise ({} left)", self.promises.len());
let prm = self.promises.pop().ok_or(true)?;
self.task = prm.task;
self.ctx.rewind(prm.ctx_ptr);
if let Some(prev) = self.task.next() {
self.ctx.lemmas.push(prev)
};
if let Some(cut) = self.opt.cuts.extension {
use super::cuts::Cut::*;
let alt_len = match cut {
Exclusive => prm.alt_len + 1,
Inclusive => prm.alt_len,
};
debug!("cut {} alternatives", self.alternatives.len() - alt_len);
assert!(alt_len <= self.alternatives.len());
self.alternatives.truncate(alt_len);
}
Ok(Action::Prove)
}
fn try_alternative(&mut self) -> State<'t, P, C> {
debug!("try alternative ({} left)", self.alternatives.len());
self.alternatives.pop().ok_or(false).map(|(alt, action)| {
self.rewind(alt);
action
})
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Alternative<'t, P, C> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx: if st.opt.cuts.extension.is_none() {
Some(st.ctx.clone())
} else {
None
},
ctx_ptr: context::Ptr::from(&st.ctx),
promises: if st.opt.cuts.extension.is_none() {
Some(st.promises.clone())
} else {
None
},
promises_len: st.promises.len(),
sub: SubPtr::from(&st.sub),
proof_len: st.proof.len(),
}
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Promise<Task<'t, P, C>> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx_ptr: context::Ptr::from(&st.ctx),
alt_len: st.alternatives.len(),
}
}
}
impl<'t, P, C> Rewind<Alternative<'t, P, C>> for Search<'t, P, C> {
fn rewind(&mut self, alt: Alternative<'t, P, C>) {
self.task = alt.task;
if let Some(ctx) = alt.ctx {
self.ctx = ctx;
} else {
self.ctx.rewind(alt.ctx_ptr);
}
if let Some(promises) = alt.promises {
self.promises = promises;
} else {
assert!(self.promises.len() >= alt.promises_len);
self.promises.truncate(alt.promises_len);
}
self.sub.rewind(&alt.sub);
self.proof.truncate(alt.proof_len);
}
}
| {
Self(cl.into_iter().skip(0))
} | identifier_body |
search.rs | use super::context;
use super::Contrapositive;
use super::{Cuts, Db, Steps};
use crate::offset::{OLit, Offset, Sub};
use crate::subst::Ptr as SubPtr;
use crate::{Lit, Rewind};
use alloc::vec::Vec;
use core::{fmt::Display, hash::Hash, ops::Neg};
use log::debug;
pub struct Search<'t, P, C> {
task: Task<'t, P, C>,
ctx: Context<'t, P, C>,
promises: Vec<Promise<Task<'t, P, C>>>,
pub sub: Sub<'t, C>,
proof: Steps<'t, P, C>,
alternatives: Vec<(Alternative<'t, P, C>, Action<'t, P, C>)>,
inferences: usize,
literals: usize,
db: &'t Db<P, C, usize>,
opt: Opt,
}
#[derive(Clone)]
pub struct TaskIter<C: IntoIterator>(core::iter::Skip<C::IntoIter>);
impl<C: IntoIterator> TaskIter<C> {
pub fn new(cl: C) -> Self {
Self(cl.into_iter().skip(0))
}
}
impl<C: IntoIterator> Iterator for TaskIter<C> {
type Item = <C::IntoIter as Iterator>::Item;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
pub type Task<'t, P, C> = TaskIter<super::clause::OClause<'t, Lit<P, C, usize>>>;
pub type Context<'t, P, C> = context::Context<Vec<OLit<'t, P, C>>>;
#[derive(Clone, Debug)]
pub enum Action<'t, P, C> {
Prove,
Reduce(OLit<'t, P, C>, Index),
Extend(OLit<'t, P, C>, Contras<'t, P, C>, Index),
}
impl<'t, P, C> Action<'t, P, C> {
pub fn max_children(&self) -> usize {
use Action::*;
match self {
Prove | Reduce(_, _) => 0,
Extend(_, cs, skip) => cs[*skip].rest.len(),
}
}
}
type Index = usize;
type Contras<'t, P, C> = &'t [Contrapositive<P, C, usize>];
struct Alternative<'t, P, C> {
task: Task<'t, P, C>,
// when we do *not* use cut, then we may need to backtrack to
// contexts that are larger than the current context,
// so we save the whole context here
ctx: Option<Context<'t, P, C>>,
// when we use cut, then we always backtrack to contexts that are
// prefixes of the current context, so in that case,
// storing just a pointer to the context suffices
ctx_ptr: context::Ptr,
promises: Option<Vec<Promise<Task<'t, P, C>>>>,
promises_len: usize,
sub: SubPtr,
proof_len: usize,
}
#[derive(Clone)]
struct Promise<T> {
task: T,
ctx_ptr: context::Ptr,
alt_len: usize,
}
pub struct Opt {
pub lim: usize,
pub cuts: Cuts,
}
impl<'t, P, C> Search<'t, P, C> {
pub fn new(task: Task<'t, P, C>, db: &'t Db<P, C, usize>, opt: Opt) -> Self {
Self {
task,
ctx: Context::default(),
promises: Vec::new(),
sub: Sub::default(),
proof: Steps::new(),
alternatives: Vec::new(),
inferences: 0,
literals: 0,
db,
opt,
}
}
}
type State<'t, P, C> = Result<Action<'t, P, C>, bool>;
impl<'t, P, C> Search<'t, P, C>
where
P: Clone + Display + Eq + Hash + Neg<Output = P>,
C: Clone + Display + Eq,
{
pub fn prove(&mut self) -> Option<&Steps<'t, P, C>> {
let mut action: Action<'t, P, C> = Action::Prove;
loop {
let result = match action {
Action::Prove => match self.task.clone().next() {
Some(lit) => self.chk(lit),
None => self.fulfill_promise(),
},
Action::Reduce(lit, skip) => self.red(lit, skip),
Action::Extend(lit, contras, skip) => self.ext(lit, contras, skip),
};
match result {
Ok(next) => action = next,
Err(true) => return Some(&self.proof),
Err(false) => return None,
}
}
}
pub fn inferences(&self) -> usize {
self.inferences
}
fn chk(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("checks: {}", lit);
debug!("{} {}", self.literals, lit.head());
debug!("lemmas: {}", self.ctx.lemmas.len());
debug!("path: {}", self.ctx.path.len());
self.literals += 1;
let mut lits = self.task.clone();
let mut path = self.ctx.path.iter();
let mut lemmas = self.ctx.lemmas.iter();
if lits.any(|cl| path.any(|pl| pl.eq_mod(&self.sub, &cl))) {
debug!("regularity");
self.try_alternative()
} else if lemmas.any(|lem| lem.eq_mod(&self.sub, &lit)) {
debug!("lemma");
self.proof.push(Action::Prove);
// do not add lit to lemmas, unlike original leanCoP
// furthermore, do not try red/ext steps if we found a lemma,
// because it does not add anything to substitution
// note that Jens said that this might sometimes be counterproductive,
// because adding to the substitution is also beneficial to cut down search space
self.task.next();
Ok(Action::Prove)
} else {
Ok(Action::Reduce(lit, 0))
}
}
fn red(&mut self, lit: OLit<'t, P, C>, skip: usize) -> State<'t, P, C> {
debug!("reduce: {}", lit);
let alternative = Alternative::from(&*self);
for (pidx, pat) in self.ctx.path.iter().rev().enumerate().skip(skip) {
debug!("try reduce: {}", pat);
let sub_dom_len = self.sub.get_dom_len();
if pat.head() != &-lit.head().clone() {
continue;
}
if pat.args().unify(&mut self.sub, lit.args()) {
debug!("reduce succeeded");
self.proof.push(Action::Reduce(lit, pidx));
if !self.opt.cuts.reduction {
let action = Action::Reduce(lit, pidx + 1);
self.alternatives.push((alternative, action));
}
self.ctx.lemmas.push(lit);
self.task.next();
return Ok(Action::Prove);
} else {
self.sub.set_dom_len(sub_dom_len)
}
}
self.ext0(lit)
}
fn ext0(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("extend: {}", lit);
let neg = -lit.head().clone();
match self.db.get(&neg) {
Some(entries) => self.ext(lit, entries, 0),
None => self.try_alternative(),
}
}
fn ext(&mut self, lit: OLit<'t, P, C>, cs: Contras<'t, P, C>, skip: usize) -> State<'t, P, C> {
let alt = Alternative::from(&*self);
let prm = Promise::from(&*self);
let sub = SubPtr::from(&self.sub);
for (eidx, entry) in cs.iter().enumerate().skip(skip) {
debug!(
"try extend {}{} (lit = {}, |path| = {})",
lit.head(),
entry,
lit,
self.ctx.path.len()
);
if self.ctx.path.len() >= self.opt.lim && entry.vars.is_some() {
debug!("path limit reached");
continue;
};
let eargs = Offset::new(sub.dom_max(), &entry.args);
if let Some(vars) = entry.vars {
// we have to add 1 here because the lowest variable is 0
self.sub.set_dom_max(sub.dom_max() + vars + 1)
};
debug!("unify {} ~? {}, sub = {}", eargs, lit.args(), self.sub);
if eargs.unify(&mut self.sub, lit.args()) {
debug!("unify succeeded with {}, sub = {}", entry.rest, self.sub);
self.inferences += 1;
// promise to fulfill the current task
// (if the promise is kept and cut is enabled,
// then all alternatives that came after will be discarded)
self.promises.push(prm);
self.proof.push(Action::Extend(lit, cs, eidx));
let action = Action::Extend(lit, cs, eidx + 1);
// register an alternative (that will be discarded
// if the above promise is kept and cut is enabled)
self.alternatives.push((alt, action));
self.task = Task::new(Offset::new(sub.dom_max(), &entry.rest));
self.ctx.path.push(lit);
return Ok(Action::Prove);
} else {
debug!("unify failed");
self.sub.rewind(&sub)
}
}
self.try_alternative()
}
fn fulfill_promise(&mut self) -> State<'t, P, C> {
debug!("fulfill promise ({} left)", self.promises.len());
let prm = self.promises.pop().ok_or(true)?;
self.task = prm.task;
self.ctx.rewind(prm.ctx_ptr);
if let Some(prev) = self.task.next() {
self.ctx.lemmas.push(prev)
};
if let Some(cut) = self.opt.cuts.extension {
use super::cuts::Cut::*;
let alt_len = match cut {
Exclusive => prm.alt_len + 1,
Inclusive => prm.alt_len,
};
debug!("cut {} alternatives", self.alternatives.len() - alt_len);
assert!(alt_len <= self.alternatives.len());
self.alternatives.truncate(alt_len);
}
Ok(Action::Prove)
}
fn | (&mut self) -> State<'t, P, C> {
debug!("try alternative ({} left)", self.alternatives.len());
self.alternatives.pop().ok_or(false).map(|(alt, action)| {
self.rewind(alt);
action
})
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Alternative<'t, P, C> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx: if st.opt.cuts.extension.is_none() {
Some(st.ctx.clone())
} else {
None
},
ctx_ptr: context::Ptr::from(&st.ctx),
promises: if st.opt.cuts.extension.is_none() {
Some(st.promises.clone())
} else {
None
},
promises_len: st.promises.len(),
sub: SubPtr::from(&st.sub),
proof_len: st.proof.len(),
}
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Promise<Task<'t, P, C>> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx_ptr: context::Ptr::from(&st.ctx),
alt_len: st.alternatives.len(),
}
}
}
impl<'t, P, C> Rewind<Alternative<'t, P, C>> for Search<'t, P, C> {
fn rewind(&mut self, alt: Alternative<'t, P, C>) {
self.task = alt.task;
if let Some(ctx) = alt.ctx {
self.ctx = ctx;
} else {
self.ctx.rewind(alt.ctx_ptr);
}
if let Some(promises) = alt.promises {
self.promises = promises;
} else {
assert!(self.promises.len() >= alt.promises_len);
self.promises.truncate(alt.promises_len);
}
self.sub.rewind(&alt.sub);
self.proof.truncate(alt.proof_len);
}
}
| try_alternative | identifier_name |
search.rs | use super::context;
use super::Contrapositive;
use super::{Cuts, Db, Steps};
use crate::offset::{OLit, Offset, Sub};
use crate::subst::Ptr as SubPtr;
use crate::{Lit, Rewind};
use alloc::vec::Vec;
use core::{fmt::Display, hash::Hash, ops::Neg};
use log::debug;
pub struct Search<'t, P, C> {
task: Task<'t, P, C>,
ctx: Context<'t, P, C>,
promises: Vec<Promise<Task<'t, P, C>>>,
pub sub: Sub<'t, C>,
proof: Steps<'t, P, C>,
alternatives: Vec<(Alternative<'t, P, C>, Action<'t, P, C>)>,
inferences: usize,
literals: usize,
db: &'t Db<P, C, usize>,
opt: Opt,
}
#[derive(Clone)]
pub struct TaskIter<C: IntoIterator>(core::iter::Skip<C::IntoIter>);
impl<C: IntoIterator> TaskIter<C> {
pub fn new(cl: C) -> Self {
Self(cl.into_iter().skip(0))
}
}
impl<C: IntoIterator> Iterator for TaskIter<C> {
type Item = <C::IntoIter as Iterator>::Item;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
pub type Task<'t, P, C> = TaskIter<super::clause::OClause<'t, Lit<P, C, usize>>>;
pub type Context<'t, P, C> = context::Context<Vec<OLit<'t, P, C>>>;
#[derive(Clone, Debug)]
pub enum Action<'t, P, C> {
Prove,
Reduce(OLit<'t, P, C>, Index),
Extend(OLit<'t, P, C>, Contras<'t, P, C>, Index),
}
impl<'t, P, C> Action<'t, P, C> {
pub fn max_children(&self) -> usize {
use Action::*;
match self {
Prove | Reduce(_, _) => 0,
Extend(_, cs, skip) => cs[*skip].rest.len(),
}
}
}
type Index = usize;
type Contras<'t, P, C> = &'t [Contrapositive<P, C, usize>];
struct Alternative<'t, P, C> {
task: Task<'t, P, C>,
// when we do *not* use cut, then we may need to backtrack to
// contexts that are larger than the current context,
// so we save the whole context here
ctx: Option<Context<'t, P, C>>,
// when we use cut, then we always backtrack to contexts that are
// prefixes of the current context, so in that case,
// storing just a pointer to the context suffices
ctx_ptr: context::Ptr,
promises: Option<Vec<Promise<Task<'t, P, C>>>>,
promises_len: usize,
sub: SubPtr,
proof_len: usize,
}
#[derive(Clone)]
struct Promise<T> {
task: T,
ctx_ptr: context::Ptr,
alt_len: usize,
}
pub struct Opt {
pub lim: usize,
pub cuts: Cuts,
}
impl<'t, P, C> Search<'t, P, C> {
pub fn new(task: Task<'t, P, C>, db: &'t Db<P, C, usize>, opt: Opt) -> Self {
Self {
task,
ctx: Context::default(),
promises: Vec::new(),
sub: Sub::default(),
proof: Steps::new(),
alternatives: Vec::new(),
inferences: 0,
literals: 0,
db,
opt,
}
}
}
type State<'t, P, C> = Result<Action<'t, P, C>, bool>;
impl<'t, P, C> Search<'t, P, C>
where
P: Clone + Display + Eq + Hash + Neg<Output = P>,
C: Clone + Display + Eq,
{
pub fn prove(&mut self) -> Option<&Steps<'t, P, C>> {
let mut action: Action<'t, P, C> = Action::Prove;
loop {
let result = match action {
Action::Prove => match self.task.clone().next() {
Some(lit) => self.chk(lit),
None => self.fulfill_promise(),
},
Action::Reduce(lit, skip) => self.red(lit, skip),
Action::Extend(lit, contras, skip) => self.ext(lit, contras, skip),
};
match result {
Ok(next) => action = next,
Err(true) => return Some(&self.proof),
Err(false) => return None,
}
}
}
pub fn inferences(&self) -> usize {
self.inferences
}
fn chk(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("checks: {}", lit);
debug!("{} {}", self.literals, lit.head());
debug!("lemmas: {}", self.ctx.lemmas.len());
debug!("path: {}", self.ctx.path.len());
self.literals += 1;
let mut lits = self.task.clone();
let mut path = self.ctx.path.iter();
let mut lemmas = self.ctx.lemmas.iter();
if lits.any(|cl| path.any(|pl| pl.eq_mod(&self.sub, &cl))) {
debug!("regularity");
self.try_alternative()
} else if lemmas.any(|lem| lem.eq_mod(&self.sub, &lit)) {
debug!("lemma");
self.proof.push(Action::Prove);
// do not add lit to lemmas, unlike original leanCoP
// furthermore, do not try red/ext steps if we found a lemma,
// because it does not add anything to substitution
// note that Jens said that this might sometimes be counterproductive,
// because adding to the substitution is also beneficial to cut down search space
self.task.next();
Ok(Action::Prove)
} else {
Ok(Action::Reduce(lit, 0))
}
}
fn red(&mut self, lit: OLit<'t, P, C>, skip: usize) -> State<'t, P, C> {
debug!("reduce: {}", lit);
let alternative = Alternative::from(&*self);
for (pidx, pat) in self.ctx.path.iter().rev().enumerate().skip(skip) {
debug!("try reduce: {}", pat);
let sub_dom_len = self.sub.get_dom_len();
if pat.head() != &-lit.head().clone() {
continue;
}
if pat.args().unify(&mut self.sub, lit.args()) {
debug!("reduce succeeded");
self.proof.push(Action::Reduce(lit, pidx));
if !self.opt.cuts.reduction {
let action = Action::Reduce(lit, pidx + 1);
self.alternatives.push((alternative, action));
}
self.ctx.lemmas.push(lit);
self.task.next();
return Ok(Action::Prove);
} else {
self.sub.set_dom_len(sub_dom_len)
}
}
self.ext0(lit)
}
fn ext0(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("extend: {}", lit);
let neg = -lit.head().clone();
match self.db.get(&neg) {
Some(entries) => self.ext(lit, entries, 0),
None => self.try_alternative(),
}
}
fn ext(&mut self, lit: OLit<'t, P, C>, cs: Contras<'t, P, C>, skip: usize) -> State<'t, P, C> {
let alt = Alternative::from(&*self);
let prm = Promise::from(&*self);
let sub = SubPtr::from(&self.sub);
for (eidx, entry) in cs.iter().enumerate().skip(skip) {
debug!(
"try extend {}{} (lit = {}, |path| = {})",
lit.head(),
entry,
lit,
self.ctx.path.len()
);
if self.ctx.path.len() >= self.opt.lim && entry.vars.is_some() {
debug!("path limit reached");
continue;
};
let eargs = Offset::new(sub.dom_max(), &entry.args);
if let Some(vars) = entry.vars {
// we have to add 1 here because the lowest variable is 0
self.sub.set_dom_max(sub.dom_max() + vars + 1)
};
debug!("unify {} ~? {}, sub = {}", eargs, lit.args(), self.sub);
if eargs.unify(&mut self.sub, lit.args()) {
debug!("unify succeeded with {}, sub = {}", entry.rest, self.sub);
self.inferences += 1;
// promise to fulfill the current task
// (if the promise is kept and cut is enabled,
// then all alternatives that came after will be discarded)
self.promises.push(prm);
self.proof.push(Action::Extend(lit, cs, eidx));
let action = Action::Extend(lit, cs, eidx + 1);
// register an alternative (that will be discarded
// if the above promise is kept and cut is enabled)
self.alternatives.push((alt, action));
self.task = Task::new(Offset::new(sub.dom_max(), &entry.rest));
self.ctx.path.push(lit);
return Ok(Action::Prove);
} else {
debug!("unify failed");
self.sub.rewind(&sub)
}
}
self.try_alternative()
}
fn fulfill_promise(&mut self) -> State<'t, P, C> {
debug!("fulfill promise ({} left)", self.promises.len());
let prm = self.promises.pop().ok_or(true)?;
self.task = prm.task;
self.ctx.rewind(prm.ctx_ptr);
if let Some(prev) = self.task.next() {
self.ctx.lemmas.push(prev)
};
if let Some(cut) = self.opt.cuts.extension {
use super::cuts::Cut::*;
let alt_len = match cut {
Exclusive => prm.alt_len + 1,
Inclusive => prm.alt_len,
};
debug!("cut {} alternatives", self.alternatives.len() - alt_len);
assert!(alt_len <= self.alternatives.len());
self.alternatives.truncate(alt_len);
}
Ok(Action::Prove)
}
fn try_alternative(&mut self) -> State<'t, P, C> {
debug!("try alternative ({} left)", self.alternatives.len());
self.alternatives.pop().ok_or(false).map(|(alt, action)| {
self.rewind(alt);
action
})
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Alternative<'t, P, C> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx: if st.opt.cuts.extension.is_none() {
Some(st.ctx.clone())
} else {
None
},
ctx_ptr: context::Ptr::from(&st.ctx),
promises: if st.opt.cuts.extension.is_none() {
Some(st.promises.clone())
} else {
None
},
promises_len: st.promises.len(),
sub: SubPtr::from(&st.sub),
proof_len: st.proof.len(),
}
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Promise<Task<'t, P, C>> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx_ptr: context::Ptr::from(&st.ctx),
alt_len: st.alternatives.len(),
}
}
}
impl<'t, P, C> Rewind<Alternative<'t, P, C>> for Search<'t, P, C> {
fn rewind(&mut self, alt: Alternative<'t, P, C>) {
self.task = alt.task;
if let Some(ctx) = alt.ctx | else {
self.ctx.rewind(alt.ctx_ptr);
}
if let Some(promises) = alt.promises {
self.promises = promises;
} else {
assert!(self.promises.len() >= alt.promises_len);
self.promises.truncate(alt.promises_len);
}
self.sub.rewind(&alt.sub);
self.proof.truncate(alt.proof_len);
}
}
| {
self.ctx = ctx;
} | conditional_block |
search.rs | use super::context;
use super::Contrapositive;
use super::{Cuts, Db, Steps};
use crate::offset::{OLit, Offset, Sub};
use crate::subst::Ptr as SubPtr;
use crate::{Lit, Rewind};
use alloc::vec::Vec;
use core::{fmt::Display, hash::Hash, ops::Neg};
use log::debug;
pub struct Search<'t, P, C> {
task: Task<'t, P, C>,
ctx: Context<'t, P, C>,
promises: Vec<Promise<Task<'t, P, C>>>,
pub sub: Sub<'t, C>,
proof: Steps<'t, P, C>,
alternatives: Vec<(Alternative<'t, P, C>, Action<'t, P, C>)>,
inferences: usize,
literals: usize,
db: &'t Db<P, C, usize>,
opt: Opt,
}
#[derive(Clone)]
pub struct TaskIter<C: IntoIterator>(core::iter::Skip<C::IntoIter>);
impl<C: IntoIterator> TaskIter<C> {
pub fn new(cl: C) -> Self {
Self(cl.into_iter().skip(0))
}
}
impl<C: IntoIterator> Iterator for TaskIter<C> {
type Item = <C::IntoIter as Iterator>::Item;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
}
pub type Task<'t, P, C> = TaskIter<super::clause::OClause<'t, Lit<P, C, usize>>>;
pub type Context<'t, P, C> = context::Context<Vec<OLit<'t, P, C>>>;
#[derive(Clone, Debug)]
pub enum Action<'t, P, C> {
Prove,
Reduce(OLit<'t, P, C>, Index),
Extend(OLit<'t, P, C>, Contras<'t, P, C>, Index),
}
impl<'t, P, C> Action<'t, P, C> {
pub fn max_children(&self) -> usize {
use Action::*;
match self {
Prove | Reduce(_, _) => 0,
Extend(_, cs, skip) => cs[*skip].rest.len(),
}
}
}
type Index = usize;
type Contras<'t, P, C> = &'t [Contrapositive<P, C, usize>];
struct Alternative<'t, P, C> {
task: Task<'t, P, C>,
// when we do *not* use cut, then we may need to backtrack to
// contexts that are larger than the current context,
// so we save the whole context here
ctx: Option<Context<'t, P, C>>,
// when we use cut, then we always backtrack to contexts that are
// prefixes of the current context, so in that case,
// storing just a pointer to the context suffices
ctx_ptr: context::Ptr,
promises: Option<Vec<Promise<Task<'t, P, C>>>>,
promises_len: usize,
sub: SubPtr,
proof_len: usize,
}
#[derive(Clone)]
struct Promise<T> {
task: T,
ctx_ptr: context::Ptr,
alt_len: usize,
}
pub struct Opt {
pub lim: usize,
pub cuts: Cuts,
}
impl<'t, P, C> Search<'t, P, C> {
pub fn new(task: Task<'t, P, C>, db: &'t Db<P, C, usize>, opt: Opt) -> Self {
Self {
task,
ctx: Context::default(),
promises: Vec::new(),
sub: Sub::default(),
proof: Steps::new(),
alternatives: Vec::new(),
inferences: 0,
literals: 0,
db,
opt,
}
}
}
type State<'t, P, C> = Result<Action<'t, P, C>, bool>;
impl<'t, P, C> Search<'t, P, C>
where
P: Clone + Display + Eq + Hash + Neg<Output = P>,
C: Clone + Display + Eq,
{
pub fn prove(&mut self) -> Option<&Steps<'t, P, C>> {
let mut action: Action<'t, P, C> = Action::Prove;
loop {
let result = match action {
Action::Prove => match self.task.clone().next() {
Some(lit) => self.chk(lit),
None => self.fulfill_promise(),
},
Action::Reduce(lit, skip) => self.red(lit, skip),
Action::Extend(lit, contras, skip) => self.ext(lit, contras, skip),
};
match result {
Ok(next) => action = next,
Err(true) => return Some(&self.proof),
Err(false) => return None,
}
}
}
pub fn inferences(&self) -> usize {
self.inferences
}
fn chk(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("checks: {}", lit);
debug!("{} {}", self.literals, lit.head());
debug!("lemmas: {}", self.ctx.lemmas.len());
debug!("path: {}", self.ctx.path.len());
self.literals += 1;
let mut lits = self.task.clone();
let mut path = self.ctx.path.iter();
let mut lemmas = self.ctx.lemmas.iter();
if lits.any(|cl| path.any(|pl| pl.eq_mod(&self.sub, &cl))) {
debug!("regularity");
self.try_alternative()
} else if lemmas.any(|lem| lem.eq_mod(&self.sub, &lit)) {
debug!("lemma");
self.proof.push(Action::Prove);
// do not add lit to lemmas, unlike original leanCoP
// furthermore, do not try red/ext steps if we found a lemma,
// because it does not add anything to substitution
// note that Jens said that this might sometimes be counterproductive,
// because adding to the substitution is also beneficial to cut down search space
self.task.next();
Ok(Action::Prove)
} else {
Ok(Action::Reduce(lit, 0))
}
}
fn red(&mut self, lit: OLit<'t, P, C>, skip: usize) -> State<'t, P, C> {
debug!("reduce: {}", lit);
let alternative = Alternative::from(&*self);
for (pidx, pat) in self.ctx.path.iter().rev().enumerate().skip(skip) {
debug!("try reduce: {}", pat);
let sub_dom_len = self.sub.get_dom_len();
if pat.head() != &-lit.head().clone() {
continue;
}
if pat.args().unify(&mut self.sub, lit.args()) {
debug!("reduce succeeded");
self.proof.push(Action::Reduce(lit, pidx));
if !self.opt.cuts.reduction {
let action = Action::Reduce(lit, pidx + 1);
self.alternatives.push((alternative, action));
}
self.ctx.lemmas.push(lit);
self.task.next();
return Ok(Action::Prove);
} else {
self.sub.set_dom_len(sub_dom_len)
}
}
self.ext0(lit)
}
fn ext0(&mut self, lit: OLit<'t, P, C>) -> State<'t, P, C> {
debug!("extend: {}", lit); | }
}
fn ext(&mut self, lit: OLit<'t, P, C>, cs: Contras<'t, P, C>, skip: usize) -> State<'t, P, C> {
let alt = Alternative::from(&*self);
let prm = Promise::from(&*self);
let sub = SubPtr::from(&self.sub);
for (eidx, entry) in cs.iter().enumerate().skip(skip) {
debug!(
"try extend {}{} (lit = {}, |path| = {})",
lit.head(),
entry,
lit,
self.ctx.path.len()
);
if self.ctx.path.len() >= self.opt.lim && entry.vars.is_some() {
debug!("path limit reached");
continue;
};
let eargs = Offset::new(sub.dom_max(), &entry.args);
if let Some(vars) = entry.vars {
// we have to add 1 here because the lowest variable is 0
self.sub.set_dom_max(sub.dom_max() + vars + 1)
};
debug!("unify {} ~? {}, sub = {}", eargs, lit.args(), self.sub);
if eargs.unify(&mut self.sub, lit.args()) {
debug!("unify succeeded with {}, sub = {}", entry.rest, self.sub);
self.inferences += 1;
// promise to fulfill the current task
// (if the promise is kept and cut is enabled,
// then all alternatives that came after will be discarded)
self.promises.push(prm);
self.proof.push(Action::Extend(lit, cs, eidx));
let action = Action::Extend(lit, cs, eidx + 1);
// register an alternative (that will be discarded
// if the above promise is kept and cut is enabled)
self.alternatives.push((alt, action));
self.task = Task::new(Offset::new(sub.dom_max(), &entry.rest));
self.ctx.path.push(lit);
return Ok(Action::Prove);
} else {
debug!("unify failed");
self.sub.rewind(&sub)
}
}
self.try_alternative()
}
fn fulfill_promise(&mut self) -> State<'t, P, C> {
debug!("fulfill promise ({} left)", self.promises.len());
let prm = self.promises.pop().ok_or(true)?;
self.task = prm.task;
self.ctx.rewind(prm.ctx_ptr);
if let Some(prev) = self.task.next() {
self.ctx.lemmas.push(prev)
};
if let Some(cut) = self.opt.cuts.extension {
use super::cuts::Cut::*;
let alt_len = match cut {
Exclusive => prm.alt_len + 1,
Inclusive => prm.alt_len,
};
debug!("cut {} alternatives", self.alternatives.len() - alt_len);
assert!(alt_len <= self.alternatives.len());
self.alternatives.truncate(alt_len);
}
Ok(Action::Prove)
}
fn try_alternative(&mut self) -> State<'t, P, C> {
debug!("try alternative ({} left)", self.alternatives.len());
self.alternatives.pop().ok_or(false).map(|(alt, action)| {
self.rewind(alt);
action
})
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Alternative<'t, P, C> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx: if st.opt.cuts.extension.is_none() {
Some(st.ctx.clone())
} else {
None
},
ctx_ptr: context::Ptr::from(&st.ctx),
promises: if st.opt.cuts.extension.is_none() {
Some(st.promises.clone())
} else {
None
},
promises_len: st.promises.len(),
sub: SubPtr::from(&st.sub),
proof_len: st.proof.len(),
}
}
}
impl<'t, P, C> From<&Search<'t, P, C>> for Promise<Task<'t, P, C>> {
fn from(st: &Search<'t, P, C>) -> Self {
Self {
task: st.task.clone(),
ctx_ptr: context::Ptr::from(&st.ctx),
alt_len: st.alternatives.len(),
}
}
}
impl<'t, P, C> Rewind<Alternative<'t, P, C>> for Search<'t, P, C> {
fn rewind(&mut self, alt: Alternative<'t, P, C>) {
self.task = alt.task;
if let Some(ctx) = alt.ctx {
self.ctx = ctx;
} else {
self.ctx.rewind(alt.ctx_ptr);
}
if let Some(promises) = alt.promises {
self.promises = promises;
} else {
assert!(self.promises.len() >= alt.promises_len);
self.promises.truncate(alt.promises_len);
}
self.sub.rewind(&alt.sub);
self.proof.truncate(alt.proof_len);
}
} | let neg = -lit.head().clone();
match self.db.get(&neg) {
Some(entries) => self.ext(lit, entries, 0),
None => self.try_alternative(), | random_line_split |
ESealedRadioactiveSourceDetail.js | //密封放射
var ESealedRadioactiveSourceDetail = (function() {
return {
init : function(){//初始化
// 区域 绑定删除事件 删除li的时候更新数据
$("#areaList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//所在院区
$("#hospitalArea").html("");
$("#Area").val("");
//所在机房或区域 id
$("#areaId").val("");
});
// 人员 绑定删除事件 删除li的时候更新数据
$("#staffList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//人员id 清空
$("#staffId").val("");
});
// 元素表 绑定删除事件 删除li的时候更新数据
$("#element").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//元素名称id 清空
$("#nuclideId").val("");
$("#nuclideName").val("");
});
// 生产单位 绑定删除事件 删除li的时候更新数据
$("#nuclideManufacturersList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//生产单位
$("#nuclideManufacturers").val("");
});
//根据编号生成类别
$("#sourceCode").change(function(){
var sourceCode = $(this).val();
if(!/^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(sourceCode)){
$("#sourceType").val("0");
$("#sourceType1").html("");
var sourceType = sourceCode.substr(sourceCode.length-1,1);
return;
}
//根据最后一个字符生成类别
var sourceType = sourceCode.substr(sourceCode.length-1,1);
if(sourceType == "N"){
$("#sourceType").val("0");
$("#sourceType1").html("");
}
if(sourceType == 1){
$("#sourceType").val("1");
$("#sourceType1").html("I");
}
if(sourceType == 2){
$("#sourceType").val("2");
$("#sourceType1").html("II");
}
if(sourceType == 3){
$("#sourceType").val("3");
$("#sourceType1").html("III");
}
if(sourceType == 4){
$("#sourceType").val("4");
$("#sourceType1").html("IV");
}
if(sourceType == 5){
$("#sourceType").val("5");
$("#sourceType1").html("V");
}
});
ESealedRadioactiveSourceDetail.initValidate();
},
initValidate : function(){
//提交
$("#ESealedRadioactiveSourceForm").find(".a-submit").click(function(){
if ($("#ESealedRadioactiveSourceForm").valid()) {
var data = {
id : $("#ESealedRadioactiveSourceForm").find("#id").val(),
nuclideId : $("#ESealedRadioactiveSourceForm").find("#nuclideId").val(),// 核素表id
staffId : $("#ESealedRadioactiveSourceForm").find("#staffId").val(),// 存储用户人的id
areaId : $("#ESealedRadioactiveSourceForm").find("#areaId").val(),//区域表的id
nuclideManufacturers : $("#ESealedRadioactiveSourceForm").find("#nuclideManufacturers").val(),//核素生产厂家id
activityExponent : $("#ESealedRadioactiveSourceForm").find("#activityExponent").val(),//活度指数
activity : $("#ESealedRadioactiveSourceForm").find("#activity").val(),//活度正数
measureTime : $("#ESealedRadioactiveSourceForm").find("#measureTime").val(),//测量日期
sourceCode : $("#ESealedRadioactiveSourceForm").find("#sourceCode").val(),//放射编号
purpose : $("#ESealedRadioactiveSourceForm").find("#purpose").val(),//用途
sourceType : $("#ESealedRadioactiveSourceForm").find("#sourceType").val(),// 放射源类别
activityType : $("#ESealedRadioactiveSourceForm").find("#activityType").val(),// 活动种类
nuclideName : $("#ESealedRadioactiveSourceForm").find("#nuclideName").val()//核素名称
};
$.post(home_url + "/EDevice/sealRadiate/submit",data,
function (response) {
if (response.code == 0){//提示 确认后跳转到查看
gridAddCallback("ESealedRadioactiveSourceTable");//刷新父页面table
location.href = home_url + "/EDevice/sealRadiate/lookOver?id=" + response.data;
}
}, 'json');
}
});
//验证编号一二位 生产单位代码
jQuery.validator.addMethod("verifySourceCodeUnit", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
var state = new Array(
"AR", "GR", "PL", "PT", "RU" ,"SI", "SK" ,"RO" ,"ZA", "SE" ,"ZZ", "TR" ,"GB",
"UA", "US" ,"UZ" ,"ES" ,"HU", "IN" ,"IE", "ID" ,"IS" ,"IL" ,"JP",
"NO","KZ","KG","KR","MX","NL","IT","AU","BE","BG","BR","BY","CA","CZ","DE","DK",
"EE","EG","FI","FR","HR"
);//国家名称代码
var str = SourceCode.substring(0,2);//获取1 2位编号
var b = false;//判断1 2位编号是否合格 true为不合格
if($.inArray(str, state) == -1){//不包含
b = true;
}
if(str == "NN" || !b || !isNaN(str)){
return true;
}
return false;
});
//验证编号三四位 出厂年份
jQuery.validator.addMethod("verifySourceCodeYear", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var time = SourceCode.substring(2,4);//获取年份
if(/^[0-9N]{2}$/.test(time) ){//不是数字或NN
return true;
}
return false;
});
//验证编号五六位 核素代码
jQuery.validator.addMethod("verifySourceCodeNuclide", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var str = SourceCode.substring(4,6);//获取5 6位编号()核素代码
if(str == "NN"){
return true;
}
var nuclide = new Array(//现在能用的核素 英文名
"H3","FE","C7","CO","NI","GE","SE","KR","CD","CS","PM",
"GD","IR","PO","RA","P8","AM","CM","CF"
);
if($.inArray(str, nuclide) == -1){//不包含
return false;
}
var nuclide2 = new Array(//现在能用的核素 中文名
"氢-3","铁-55","钴-57","钴-60","镍-63","锗-68","硒-75","氪-85","镉-109","铯-137","钷-147",
"钆-153","铱-192","钋-210","镭-226","钚-238","镅-241","锔-244","锎-252",
"磷-32","钼-99","钯-103","碘-125","碘-131","金-198"
);
//获取选取的核素名称
var element = $("#element span").html();
if($.inArray(element, nuclide2) == -1){//不包含
return false;
}
for(i = 0; i < nuclide.length; i++){//看元素是否对应 代码 和名字
if(element == nuclide2[i]){
if(str != nuclide[i]){//编号5 6位是否和核素英文代码相同
return false;
|
if( /^[A-Z0-9\d]{2}$/.test(str) ){
return true;
}
return false;
});
//验证编号是否正确
jQuery.validator.addMethod("verifySourceCode", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
if( /^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(SourceCode) ){
return true;
}
return false;
});
//所在活度正数不为空时指数 没有值时 指数默认为1
$("#activity").change(function(){
var str = $(this).val();
var activityExponent = $("#activityExponent").val();//指数
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
if(str == ""){
$("#activityExponent").val("")
}
});
//当活度正数有值时候 指数不能为 空
$("#activityExponent").change(function(){
var activityExponent = $(this).val();
var str = $("#activity").val();
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
});
$("#ESealedRadioactiveSourceForm").validate({
rules: {
nuclideId: {//核素名称
required: true
},
purpose : {//用途
required:true
},
areaId: {//场所
required: true
},
sourceCode : {//放射源编号
rangelength : [12,12],
verifySourceCodeUnit : true,
verifySourceCodeYear : true,
verifySourceCodeNuclide : true,
verifySourceCode : true
},
activityExponent : {//活度指数
digits:true,
range:[0,20]
},
activity : {//活度 正数
number : true,
verifyfigure : true
}
},
messages: {
sourceCode : {//放射源编号
rangelength :$.validator.format("<div class='tisyz'><i class='tisbefore'></i>请输入{0}位的放射源编号</div>"),
verifySourceCodeUnit : "<div class='tisyz'><i class='tisbefore'></i>一二位是生产单位代码</div>",
verifySourceCodeYear : "<div class='tisyz'><i class='tisbefore'></i>三四位是出厂年份后两位或NN</div>",
verifySourceCodeNuclide : "<div class='tisyz'><i class='tisbefore'></i>五六位是核素代码编号或NN</div>",
verifySourceCode : "<div class='tisyz'><i class='tisbefore'></i>放射编号后六位不正确</div>"
},
activityExponent : {
digits:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>",
range:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>"
},
activity : {
number : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>",
verifyfigure : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>"
}
}
});
}
}
})();
jQuery(document).ready(function() {
ESealedRadioactiveSourceDetail.init();//初始化
});
//核素生产厂家 单选
function getNuclideManufacturers(){
Company_Single(function(data){
$("#nuclideManufacturersList1").html("");//清空数据
//保存生产厂家的id
$("#nuclideManufacturers").val($(data).val());
$("#nuclideManufacturersList1").append(data);
$("#nuclideManufacturers").focus().blur()//光标切入移除
},5);//1.个人剂量监测单位;2.职业健康检查单位;3.评价与检测机构; 4.供货商;5.生产厂家;6.设计单位;7.施工单位;
}
//区域单选
function selectArea1() {
SelectRoom_Single(function(data) {
$("#areaList1").children().remove();//单选 删除里面多余的数据
$("#hospitalArea").html("");//清空 所在院区
//所在机房或区域 id
$("#areaId").val("");
if(data.id == undefined || data.id == ""){
return;
}
//本级区域
$("#areaList1").append(dataNodeSelected(data.id, data.name));
//生成所在院区
$("#hospitalArea").html(data.areaName);
//所在机房或区域 id
$("#areaId").val(data.id);
$("#areaId").focus().blur()//光标切入移除
});
}
//人员单选
function selectStaff1() {
SelectStaff_Single(function(data) {
$("#staffList1 li").remove();
$("#staffId").val("");
if(data.id == undefined || data.id == ""){
return;
}
$("#staffList1").append(dataNodeSelected(data.id, data.name));
//保存 人员id
$("#staffId").val(data.id);
$("#staffId").focus().blur()//光标切入移除
});
}
//元素周期表
function getElement(){
element(function(data){
$("#element").html("");
if( $(data).attr("name") == 'undefined' || $(data).attr("name") == undefined){//判断数据是否为null
$("#nuclideName").val("");//元素名称
$("#nuclideId").val("");//id
return;
}
$("#element").append(data);
//保存数据
$("#nuclideName").val($(data).attr("name"));//元素名称
$("#nuclideId").val($(data).val());//id
$("#nuclideId").focus().blur()//光标切入移除
});
}
//数组去重
Array.prototype.unique3 = function(){
var res = [];
var json = {};
for(var i = 0; i < this.length; i++){
if(!json[this[i]]){
res.push(this[i]);
json[this[i]] = 1;
}
}
return res;
} | }
break;
}
}
| conditional_block |
ESealedRadioactiveSourceDetail.js | //密封放射
var ESealedRadioactiveSourceDetail = (function() {
return {
init : function(){//初始化
// 区域 绑定删除事件 删除li的时候更新数据
$("#areaList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//所在院区
$("#hospitalArea").html("");
$("#Area").val("");
//所在机房或区域 id
$("#areaId").val("");
});
// 人员 绑定删除事件 删除li的时候更新数据
$("#staffList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//人员id 清空
$("#staffId").val("");
});
// 元素表 绑定删除事件 删除li的时候更新数据
$("#element").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//元素名称id 清空
$("#nuclideId").val("");
$("#nuclideName").val("");
});
// 生产单位 绑定删除事件 删除li的时候更新数据
$("#nuclideManufacturersList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//生产单位
$("#nuclideManufacturers").val("");
});
//根据编号生成类别
$("#sourceCode").change(function(){
var sourceCode = $(this).val();
if(!/^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(sourceCode)){
$("#sourceType").val("0");
$("#sourceType1").html("");
var sourceType = sourceCode.substr(sourceCode.length-1,1);
return;
}
//根据最后一个字符生成类别
var sourceType = sourceCode.substr(sourceCode.length-1,1);
if(sourceType == "N"){
$("#sourceType").val("0");
$("#sourceType1").html("");
}
if(sourceType == 1){
$("#sourceType").val("1");
$("#sourceType1").html("I");
}
if(sourceType == 2){
$("#sourceType").val("2");
$("#sourceType1").html("II");
}
if(sourceType == 3){
$("#sourceType").val("3");
$("#sourceType1").html("III");
}
if(sourceType == 4){
$("#sourceType").val("4");
$("#sourceType1").html("IV");
}
if(sourceType == 5){
$("#sourceType").val("5");
$("#sourceType1").html("V");
}
});
ESealedRadioactiveSourceDetail.initValidate();
},
initValidate : function(){
//提交
$("#ESealedRadioactiveSourceForm").find(".a-submit").click(function(){
if ($("#ESealedRadioactiveSourceForm").valid()) {
var data = {
id : $("#ESealedRadioactiveSourceForm").find("#id").val(),
nuclideId : $("#ESealedRadioactiveSourceForm").find("#nuclideId").val(),// 核素表id
staffId : $("#ESealedRadioactiveSourceForm").find("#staffId").val(),// 存储用户人的id
areaId : $("#ESealedRadioactiveSourceForm").find("#areaId").val(),//区域表的id
nuclideManufacturers : $("#ESealedRadioactiveSourceForm").find("#nuclideManufacturers").val(),//核素生产厂家id
activityExponent : $("#ESealedRadioactiveSourceForm").find("#activityExponent").val(),//活度指数
activity : $("#ESealedRadioactiveSourceForm").find("#activity").val(),//活度正数
measureTime : $("#ESealedRadioactiveSourceForm").find("#measureTime").val(),//测量日期
sourceCode : $("#ESealedRadioactiveSourceForm").find("#sourceCode").val(),//放射编号
purpose : $("#ESealedRadioactiveSourceForm").find("#purpose").val(),//用途
sourceType : $("#ESealedRadioactiveSourceForm").find("#sourceType").val(),// 放射源类别
activityType : $("#ESealedRadioactiveSourceForm").find("#activityType").val(),// 活动种类
nuclideName : $("#ESealedRadioactiveSourceForm").find("#nuclideName").val()//核素名称
};
$.post(home_url + "/EDevice/sealRadiate/submit",data,
function (response) {
if (response.code == 0){//提示 确认后跳转到查看
gridAddCallback("ESealedRadioactiveSourceTable");//刷新父页面table
location.href = home_url + "/EDevice/sealRadiate/lookOver?id=" + response.data;
}
}, 'json');
}
});
//验证编号一二位 生产单位代码
jQuery.validator.addMethod("verifySourceCodeUnit", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
var state = new Array(
"AR", "GR", "PL", "PT", "RU" ,"SI", "SK" ,"RO" ,"ZA", "SE" ,"ZZ", "TR" ,"GB",
"UA", "US" ,"UZ" ,"ES" ,"HU", "IN" ,"IE", "ID" ,"IS" ,"IL" ,"JP",
"NO","KZ","KG","KR","MX","NL","IT","AU","BE","BG","BR","BY","CA","CZ","DE","DK",
"EE","EG","FI","FR","HR"
);//国家名称代码
var str = SourceCode.substring(0,2);//获取1 2位编号
var b = false;//判断1 2位编号是否合格 true为不合格
if($.inArray(str, state) == -1){//不包含
b = true;
}
if(str == "NN" || !b || !isNaN(str)){
return true;
}
return false;
});
//验证编号三四位 出厂年份
jQuery.validator.addMethod("verifySourceCodeYear", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var time = SourceCode.substring(2,4);//获取年份
if(/^[0-9N]{2}$/.test(time) ){//不是数字或NN
return true;
}
return false;
});
//验证编号五六位 核素代码
jQuery.validator.addMethod("verifySourceCodeNuclide", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var str = SourceCode.substring(4,6);//获取5 6位编号()核素代码
if(str == "NN"){
return true;
}
var nuclide = new Array(//现在能用的核素 英文名
"H3","FE","C7","CO","NI","GE","SE","KR","CD","CS","PM",
"GD","IR","PO","RA","P8","AM","CM","CF"
);
if($.inArray(str, nuclide) == -1){//不包含
return false;
}
var nuclide2 = new Array(//现在能用的核素 中文名
"氢-3","铁-55","钴-57","钴-60","镍-63","锗-68","硒-75","氪-85","镉-109","铯-137","钷-147",
"钆-153","铱-192","钋-210","镭-226","钚-238","镅-241","锔-244","锎-252",
"磷-32","钼-99","钯-103","碘-125","碘-131","金-198"
);
//获取选取的核素名称
var element = $("#element span").html();
if($.inArray(element, nuclide2) == -1){//不包含
return false;
}
for(i = 0; i < nuclide.length; i++){//看元素是否对应 代码 和名字
if(element == nuclide2[i]){
if(str != nuclide[i]){//编号5 6位是否和核素英文代码相同
return false;
}
break;
}
}
if( /^[A-Z0-9\d]{2}$/.test(str) ){
return true;
}
return false;
});
//验证编号是否正确
jQuery.validator.addMethod("verifySourceCode", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
if( /^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(SourceCode) ){
return true;
}
return false;
});
//所在活度正数不为空时指数 没有值时 指数默认为1
$("#activity").change(function(){
var str = $(this).val();
var activityExponent = $("#activityExponent").val();//指数
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
if(str == ""){
$("#activityExponent").val("")
}
});
//当活度正数有值时候 指数不能为 空
$("#activityExponent").change(function(){
var activityExponent = $(this).val();
var str = $("#activity").val();
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
});
$("#ESealedRadioactiveSourceForm").validate({
rules: {
nuclideId: {//核素名称
required: true
},
purpose : {//用途
required:true
},
areaId: {//场所
required: true
},
sourceCode : {//放射源编号
rangelength : [12,12],
verifySourceCodeUnit : true,
verifySourceCodeYear : true,
verifySourceCodeNuclide : true,
verifySourceCode : true
},
activityExponent : {//活度指数
digits:true,
range:[0,20]
},
activity : {//活度 正数
number : true,
verifyfigure : true
}
},
messages: {
sourceCode : {//放射源编号
rangelength :$.validator.format("<div class='tisyz'><i class='tisbefore'></i>请输入{0}位的放射源编号</div>"),
verifySourceCodeUnit : "<div class='tisyz'><i class='tisbefore'></i>一二位是生产单位代码</div>",
verifySourceCodeYear : "<div class='tisyz'><i class='tisbefore'></i>三四位是出厂年份后两位或NN</div>",
verifySourceCodeNuclide : "<div class='tisyz'><i class='tisbefore'></i>五六位是核素代码编号或NN</div>",
verifySourceCode : "<div class='tisyz'><i class='tisbefore'></i>放射编号后六位不正确</div>"
},
activityExponent : {
digits:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>",
range:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>"
},
activity : {
number : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>",
verifyfigure : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>"
}
}
});
}
}
})();
jQuery(document).ready(function() {
ESealedRadioactiveSourceDetail.init();//初始化
});
//核素生产厂家 单选
function getNuclideManufacturers(){
Company_Single(function(data){
$("#nuclideManufacturersList1").html("");//清空数据
//保存生产厂家的id
$("#nuclideManufacturers").val($(data).val());
$("#nuclideManufacturersList1").append(data);
$("#nuclideManufacturers").focus().blur()//光标切入移除
},5);//1.个人剂量监测单位;2.职业健康检查单位;3.评价与检测机构; 4.供货商;5.生产厂家;6.设计单位;7.施工单位;
}
//区域单选
function selectArea1() {
SelectRoom_Single(function(data) {
$("#areaList1").children().remove();//单选 删除里面多余的数据
$("#hospitalArea").html("");//清空 所在院区
//所在机房或区域 id
$("#areaId").val("");
if(data.id == undefined || data.id == ""){
return;
}
//本级区域
$("#areaList1").append(dataNodeSelected(data.id, data.name));
//生成所在院区
$("#hospitalArea").html(data.areaName);
//所在机房或区域 id
$("#areaId").val(data.id);
$("#areaId").focus().blur()//光标切入移除
});
}
//人员单选
function selectStaff1() {
SelectStaff_Single(function(data) {
$("#staffList1 li").remove();
$("#staffId").val("");
if(data.id == undefined || data.id == ""){
return;
}
$("#staffList1").append(dataNodeSelected(data.id, data.name));
//保存 人员id
$("#staffId").val(data.id);
$("#staffId").focus().blur()//光标切入移除
});
}
//元素周期表
function getElement(){
element(function(data){
$("#element").html("");
if( $(data).attr("name") == 'undefined' || $(data).attr("name") == undefined){//判断数据是否为null
$("#nuclideName").val("");//元素名称
$("#nuclideId").val("");//id
return;
}
$("#element").append(data);
//保存数据
$("#nuclideName").val($(data).attr("name"));//元素名称
$("#nuclideId").val($(data).val());//id
$("#nuclideId").focus().blur()//光标切入移除
});
}
//数组去重
Array.prototype.unique3 = function(){
var res = [];
var json = {};
for(var i = 0; i < this.length; i++){
if(!json[this[i]]){
res.push(this[i]);
json[this[i]] = 1;
}
}
return res;
} | identifier_body | ||
ESealedRadioactiveSourceDetail.js | //密封放射
var ESealedRadioactiveSourceDetail = (function() {
return {
init : function(){//初始化
// 区域 绑定删除事件 删除li的时候更新数据
$("#areaList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//所在院区
$("#hospitalArea").html("");
$("#Area").val("");
//所在机房或区域 id
$("#areaId").val("");
});
// 人员 绑定删除事件 删除li的时候更新数据
$("#staffList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//人员id 清空
$("#staffId").val("");
});
// 元素表 绑定删除事件 删除li的时候更新数据
$("#element").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//元素名称id 清空
$("#nuclideId").val("");
$("#nuclideName").val("");
});
// 生产单位 绑定删除事件 删除li的时候更新数据
$("#nuclideManufacturersList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//生产单位
$("#nuclideManufacturers").val("");
});
//根据编号生成类别
$("#sourceCode").change(function(){
var sourceCode = $(this).val();
if(!/^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(sourceCode)){
$("#sourceType").val("0");
$("#sourceType1").html("");
var sourceType = sourceCode.substr(sourceCode.length-1,1);
return;
}
//根据最后一个字符生成类别
var sourceType = sourceCode.substr(sourceCode.length-1,1);
if(sourceType == "N"){
$("#sourceType").val("0");
$("#sourceType1").html("");
}
if(sourceType == 1){
$("#sourceType").val("1");
$("#sourceType1").html("I");
}
if(sourceType == 2){
$("#sourceType").val("2");
$("#sourceType1").html("II");
}
if(sourceType == 3){
$("#sourceType").val("3");
$("#sourceType1").html("III");
}
if(sourceType == 4){
$("#sourceType").val("4");
$("#sourceType1").html("IV");
}
if(sourceType == 5){
$("#sourceType").val("5");
$("#sourceType1").html("V");
}
});
ESealedRadioactiveSourceDetail.initValidate();
},
initValidate : function(){
//提交
$("#ESealedRadioactiveSourceForm").find(".a-submit").click(function(){
if ($("#ESealedRadioactiveSourceForm").valid()) {
var data = {
id : $("#ESealedRadioactiveSourceForm").find("#id").val(),
nuclideId : $("#ESealedRadioactiveSourceForm").find("#nuclideId").val(),// 核素表id
staffId : $("#ESealedRadioactiveSourceForm").find("#staffId").val(),// 存储用户人的id
areaId : $("#ESealedRadioactiveSourceForm").find("#areaId").val(),//区域表的id
nuclideManufacturers : $("#ESealedRadioactiveSourceForm").find("#nuclideManufacturers").val(),//核素生产厂家id
activityExponent : $("#ESealedRadioactiveSourceForm").find("#activityExponent").val(),//活度指数
activity : $("#ESealedRadioactiveSourceForm").find("#activity").val(),//活度正数
measureTime : $("#ESealedRadioactiveSourceForm").find("#measureTime").val(),//测量日期
sourceCode : $("#ESealedRadioactiveSourceForm").find("#sourceCode").val(),//放射编号
purpose : $("#ESealedRadioactiveSourceForm").find("#purpose").val(),//用途
sourceType : $("#ESealedRadioactiveSourceForm").find("#sourceType").val(),// 放射源类别
activityType : $("#ESealedRadioactiveSourceForm").find("#activityType").val(),// 活动种类
nuclideName : $("#ESealedRadioactiveSourceForm").find("#nuclideName").val()//核素名称
};
$.post(home_url + "/EDevice/sealRadiate/submit",data,
function (response) {
if (response.code == 0){//提示 确认后跳转到查看
gridAddCallback("ESealedRadioactiveSourceTable");//刷新父页面table
location.href = home_url + "/EDevice/sealRadiate/lookOver?id=" + response.data;
}
}, 'json');
}
});
//验证编号一二位 生产单位代码
jQuery.validator.addMethod("verifySourceCodeUnit", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
var state = new Array(
"AR", "GR", "PL", "PT", "RU" ,"SI", "SK" ,"RO" ,"ZA", "SE" ,"ZZ", "TR" ,"GB",
"UA", "US" ,"UZ" ,"ES" ,"HU", "IN" ,"IE", "ID" ,"IS" ,"IL" ,"JP",
"NO","KZ","KG","KR","MX","NL","IT","AU","BE","BG","BR","BY","CA","CZ","DE","DK",
"EE","EG","FI","FR","HR"
);//国家名称代码
var str = SourceCode.substring(0,2);//获取1 2位编号
var b = false;//判断1 2位编号是否合格 true为不合格
if($.inArray(str, state) == -1){//不包含
b = true;
}
if(str == "NN" || !b || !isNaN(str)){
return true;
}
return false;
});
//验证编号三四位 出厂年份
jQuery.validator.addMethod("verifySourceCodeYear", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var time = SourceCode.substring(2,4);//获取年份
if(/^[0-9N]{2}$/.test(time) ){//不是数字或NN
return true;
}
return false;
});
//验证编号五六位 核素代码
jQuery.validator.addMethod("verifySourceCodeNuclide", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var str = SourceCode.substring(4,6);//获取5 6位编号()核素代码
if(str == "NN"){
return true;
}
var nuclide = new Array(//现在能用的核素 英文名
"H3","FE","C7","CO","NI","GE","SE","KR","CD","CS","PM",
"GD","IR","PO","RA","P8","AM","CM","CF"
);
if($.inArray(str, nuclide) == -1){//不包含
return false;
}
var nuclide2 = new Array(//现在能用的核素 中文名
"氢-3","铁-55","钴-57","钴-60","镍-63","锗-68","硒-75","氪-85","镉-109","铯-137","钷-147",
"钆-153","铱-192","钋-210","镭-226","钚-238","镅-241","锔-244","锎-252",
"磷-32","钼-99","钯-103","碘-125","碘-131","金-198"
);
//获取选取的核素名称
var element = $("#element span").html();
if($.inArray(element, nuclide2) == -1){//不包含
return false;
}
for(i = 0; i < nuclide.length; i++){//看元素是否对应 代码 和名字
if(element == nuclide2[i]){
if(str != nuclide[i]){//编号5 6位是否和核素英文代码相同
return false;
}
break;
}
}
if( /^[A-Z0-9\d]{2}$/.test(str) ){
return true;
}
return false;
});
//验证编号是否正确
jQuery.validator.addMethod("verifySourceCode", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
if( /^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(SourceCode) ){
return true;
}
return false;
});
//所在活度正数不为空时指数 没有值时 指数默认为1
$("#activity").change(function(){
var str = $(this).val();
var activityExponent = $("#activityExponent").val();//指数
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
if(str == ""){
$("#activityExponent").val("")
}
});
//当活度正数有值时候 指数不能为 空
$("#activityExponent").change(function(){
var activityExponent = $(this).val();
var str = $("#activity").val();
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
});
$("#ESealedRadioactiveSourceForm").validate({
rules: {
nuclideId: {//核素名称
required: true
},
purpose : {//用途
required:true
},
areaId: {//场所
required: true
},
sourceCode : {//放射源编号
rangelength : [12,12],
verifySourceCodeUnit : true,
verifySourceCodeYear : true,
verifySourceCodeNuclide : true,
verifySourceCode : true
},
activityExponent : {//活度指数
digits:true,
range:[0,20]
},
activity : {//活度 正数
number : true,
verifyfigure : true
| },
messages: {
sourceCode : {//放射源编号
rangelength :$.validator.format("<div class='tisyz'><i class='tisbefore'></i>请输入{0}位的放射源编号</div>"),
verifySourceCodeUnit : "<div class='tisyz'><i class='tisbefore'></i>一二位是生产单位代码</div>",
verifySourceCodeYear : "<div class='tisyz'><i class='tisbefore'></i>三四位是出厂年份后两位或NN</div>",
verifySourceCodeNuclide : "<div class='tisyz'><i class='tisbefore'></i>五六位是核素代码编号或NN</div>",
verifySourceCode : "<div class='tisyz'><i class='tisbefore'></i>放射编号后六位不正确</div>"
},
activityExponent : {
digits:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>",
range:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>"
},
activity : {
number : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>",
verifyfigure : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>"
}
}
});
}
}
})();
jQuery(document).ready(function() {
ESealedRadioactiveSourceDetail.init();//初始化
});
//核素生产厂家 单选
function getNuclideManufacturers(){
Company_Single(function(data){
$("#nuclideManufacturersList1").html("");//清空数据
//保存生产厂家的id
$("#nuclideManufacturers").val($(data).val());
$("#nuclideManufacturersList1").append(data);
$("#nuclideManufacturers").focus().blur()//光标切入移除
},5);//1.个人剂量监测单位;2.职业健康检查单位;3.评价与检测机构; 4.供货商;5.生产厂家;6.设计单位;7.施工单位;
}
//区域单选
function selectArea1() {
SelectRoom_Single(function(data) {
$("#areaList1").children().remove();//单选 删除里面多余的数据
$("#hospitalArea").html("");//清空 所在院区
//所在机房或区域 id
$("#areaId").val("");
if(data.id == undefined || data.id == ""){
return;
}
//本级区域
$("#areaList1").append(dataNodeSelected(data.id, data.name));
//生成所在院区
$("#hospitalArea").html(data.areaName);
//所在机房或区域 id
$("#areaId").val(data.id);
$("#areaId").focus().blur()//光标切入移除
});
}
//人员单选
function selectStaff1() {
SelectStaff_Single(function(data) {
$("#staffList1 li").remove();
$("#staffId").val("");
if(data.id == undefined || data.id == ""){
return;
}
$("#staffList1").append(dataNodeSelected(data.id, data.name));
//保存 人员id
$("#staffId").val(data.id);
$("#staffId").focus().blur()//光标切入移除
});
}
//元素周期表
function getElement(){
element(function(data){
$("#element").html("");
if( $(data).attr("name") == 'undefined' || $(data).attr("name") == undefined){//判断数据是否为null
$("#nuclideName").val("");//元素名称
$("#nuclideId").val("");//id
return;
}
$("#element").append(data);
//保存数据
$("#nuclideName").val($(data).attr("name"));//元素名称
$("#nuclideId").val($(data).val());//id
$("#nuclideId").focus().blur()//光标切入移除
});
}
//数组去重
Array.prototype.unique3 = function(){
var res = [];
var json = {};
for(var i = 0; i < this.length; i++){
if(!json[this[i]]){
res.push(this[i]);
json[this[i]] = 1;
}
}
return res;
} | }
| random_line_split |
ESealedRadioactiveSourceDetail.js | //密封放射
var ESealedRadioactiveSourceDetail = (function() {
return {
init : function(){//初始化
// 区域 绑定删除事件 删除li的时候更新数据
$("#areaList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//所在院区
$("#hospitalArea").html("");
$("#Area").val("");
//所在机房或区域 id
$("#areaId").val("");
});
// 人员 绑定删除事件 删除li的时候更新数据
$("#staffList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//人员id 清空
$("#staffId").val("");
});
// 元素表 绑定删除事件 删除li的时候更新数据
$("#element").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//元素名称id 清空
$("#nuclideId").val("");
$("#nuclideName").val("");
});
// 生产单位 绑定删除事件 删除li的时候更新数据
$("#nuclideManufacturersList1").click(function(e){
if(e.target.nodeName != "I"){//点击i的时候触发
return;
}
//获取i-> a-> li
$(e.target).parent().parent().remove();
//生产单位
$("#nuclideManufacturers").val("");
});
//根据编号生成类别
$("#sourceCode").change(function(){
var sourceCode = $(this).val();
if(!/^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(sourceCode)){
$("#sourceType").val("0");
$("#sourceType1").html("");
var sourceType = sourceCode.substr(sourceCode.length-1,1);
return;
}
//根据最后一个字符生成类别
var sourceType = sourceCode.substr(sourceCode.length-1,1);
if(sourceType == "N"){
$("#sourceType").val("0");
$("#sourceType1").html("");
}
if(sourceType == 1){
$("#sourceType").val("1");
$("#sourceType1").html("I");
}
if(sourceType == 2){
$("#sourceType").val("2");
$("#sourceType1").html("II");
}
if(sourceType == 3){
$("#sourceType").val("3");
$("#sourceType1").html("III");
}
if(sourceType == 4){
$("#sourceType").val("4");
$("#sourceType1").html("IV");
}
if(sourceType == 5){
$("#sourceType").val("5");
$("#sourceType1").html("V");
}
});
ESealedRadioactiveSourceDetail.initValidate();
},
initValidate : function(){
//提交
$("#ESealedRadioactiveSourceForm").find(".a-submit").click(function(){
if ($("#ESealedRadioactiveSourceForm").valid()) {
var data = {
id : $("#ESealedRadioactiveSourceForm").find("#id").val(),
nuclideId : $("#ESealedRadioactiveSourceForm").find("#nuclideId").val(),// 核素表id
staffId : $("#ESealedRadioactiveSourceForm").find("#staffId").val(),// 存储用户人的id
areaId : $("#ESealedRadioactiveSourceForm").find("#areaId").val(),//区域表的id
nuclideManufacturers : $("#ESealedRadioactiveSourceForm").find("#nuclideManufacturers").val(),//核素生产厂家id
activityExponent : $("#ESealedRadioactiveSourceForm").find("#activityExponent").val(),//活度指数
activity : $("#ESealedRadioactiveSourceForm").find("#activity").val(),//活度正数
measureTime : $("#ESealedRadioactiveSourceForm").find("#measureTime").val(),//测量日期
sourceCode : $("#ESealedRadioactiveSourceForm").find("#sourceCode").val(),//放射编号
purpose : $("#ESealedRadioactiveSourceForm").find("#purpose").val(),//用途
sourceType : $("#ESealedRadioactiveSourceForm").find("#sourceType").val(),// 放射源类别
activityType : $("#ESealedRadioactiveSourceForm").find("#activityType").val(),// 活动种类
nuclideName : $("#ESealedRadioactiveSourceForm").find("#nuclideName").val()//核素名称
};
$.post(home_url + "/EDevice/sealRadiate/submit",data,
function (response) {
if (response.code == 0){//提示 确认后跳转到查看
gridAddCallback("ESealedRadioactiveSourceTable");//刷新父页面table
location.href = home_url + "/EDevice/sealRadiate/lookOver?id=" + response.data;
}
}, 'json');
}
});
//验证编号一二位 生产单位代码
jQuery.validator.addMethod("verifySourceCodeUnit", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
var state = new Array(
"AR", "GR", "PL", "PT", "RU" ,"SI", "SK" ,"RO" ,"ZA", "SE" ,"ZZ", "TR" ,"GB",
"UA", "US" ,"UZ" ,"ES" ,"HU", "IN" ,"IE", "ID" ,"IS" ,"IL" ,"JP",
"NO","KZ","KG","KR","MX","NL","IT","AU","BE","BG","BR","BY","CA","CZ","DE","DK",
"EE","EG","FI","FR","HR"
);//国家名称代码
var str = SourceCode.substring(0,2);//获取1 2位编号
var b = false;//判断1 2位编号是否合格 true为不合格
if($.inArray(str, state) == -1){//不包含
b = true;
}
if(str == "NN" || !b || !isNaN(str)){
return true;
}
return false;
});
//验证编号三四位 出厂年份
jQuery.validator.addMethod("verifySourceCodeYear", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var time = SourceCode.substring(2,4);//获取年份
if(/^[0-9N]{2}$/.test(time) ){//不是数字或NN
return true;
}
return false;
});
//验证编号五六位 核素代码
jQuery.validator.addMethod("verifySourceCodeNuclide", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
var str = SourceCode.substring(4,6);//获取5 6位编号()核素代码
if(str == "NN"){
return true;
}
var nuclide = new Array(//现在能用的核素 英文名
"H3","FE","C7","CO","NI","GE","SE","KR","CD","CS","PM",
"GD","IR","PO","RA","P8","AM","CM","CF"
);
if($.inArray(str, nuclide) == -1){//不包含
return false;
}
var nuclide2 = new Array(//现在能用的核素 中文名
"氢-3","铁-55","钴-57","钴-60","镍-63","锗-68","硒-75","氪-85","镉-109","铯-137","钷-147",
"钆-153","铱-192","钋-210","镭-226","钚-238","镅-241","锔-244","锎-252",
"磷-32","钼-99","钯-103","碘-125","碘-131","金-198"
);
//获取选取的核素名称
var element = $("#element span").html();
if($.inArray(element, nuclide2) == -1){//不包含
return false;
}
for(i = 0; i < nuclide.length; i++){//看元素是否对应 代码 和名字
if(element == nuclide2[i]){
if(str != nuclide[i]){//编号5 6位是否和核素英文代码相同
return false;
}
break;
}
}
if( /^[A-Z0-9\d]{2}$/.test(str) ){
return true;
}
return false;
});
//验证编号是否正确
jQuery.validator.addMethod("verifySourceCode", function(value, element) {
var SourceCode = $.trim($("#ESealedRadioactiveSourceForm").find("#sourceCode").val());
if(SourceCode == ''){
return true;
}
if( /^[A-Z\d]{2}[0-9N]{2}[A-Z0-9\d]{2}[\d]{5}[1-5N]$/.test(SourceCode) ){
return true;
}
return false;
});
//所在活度正数不为空时指数 没有值时 指数默认为1
$("#activity").change(function(){
var str = $(this).val();
var activityExponent = $("#activityExponent").val();//指数
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
if(str == ""){
$("#activityExponent").val("")
}
});
//当活度正数有值时候 指数不能为 空
$("#activityExponent").change(function(){
var activityExponent = $(this).val();
var str = $("#activity").val();
if(str != "" && activityExponent == ""){
$("#activityExponent").val("0")
}
});
$("#ESealedRadioactiveSourceForm").validate({
rules: {
nuclideId: {//核素名称
required: true
},
purpose : {//用途
required:true
},
areaId: {//场所
required: true
},
sourceCode : {//放射源编号
rangelength : [12,12],
verifySourceCodeUnit : true,
verifySourceCodeYear : true,
verifySourceCodeNuclide : true,
verifySourceCode : true
},
activityExponent : {//活度指数
digits:true,
range:[0,20]
},
activity : {//活度 正数
number : true,
verifyfigure : true
}
},
messages: {
sourceCode : {//放射源编号
rangelength :$.validator.format("<div class='tisyz'><i class='tisbefore'></i>请输入{0}位的放射源编号</div>"),
verifySourceCodeUnit : "<div class='tisyz'><i class='tisbefore'></i>一二位是生产单位代码</div>",
verifySourceCodeYear : "<div class='tisyz'><i class='tisbefore'></i>三四位是出厂年份后两位或NN</div>",
verifySourceCodeNuclide : "<div class='tisyz'><i class='tisbefore'></i>五六位是核素代码编号或NN</div>",
verifySourceCode : "<div class='tisyz'><i class='tisbefore'></i>放射编号后六位不正确</div>"
},
activityExponent : {
digits:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>",
range:"<div class='tisyz'><i class='tisbefore'></i>请输入0到20之间的整数</div>"
},
activity : {
number : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>",
verifyfigure : "<div class='tisyz'><i class='tisbefore'></i>请输入正数且4位有效数字</div>"
}
}
});
}
}
})();
jQuery(document).ready(function() {
ESealedRadioactiveSourceDetail.init();//初始化
});
//核素生产厂家 单选
function getNuclideManufacturers(){
Company_Single(function(data){
$("#nuclideManufacturersList1").html("");//清空数据
//保存生产厂家的id
$("#nuclideManufacturers").val($(data).val());
$("#nuclideManufacturersList1").append(data);
$("#nuclideManufacturers").focus().blur()//光标切入移除
},5);//1.个人剂量监测单位;2.职业健康检查单位;3.评价与检测机构; 4.供货商;5.生产厂家;6.设计单位;7.施工单位;
}
//区域单选
function selectArea1() {
SelectRoom_Single(function(data) {
$("#areaList1").children().remove();//单选 删除里面多余的数据
$("#hospitalArea").html("");//清空 所在院区
//所在机房或区域 id
$("#areaId").val("");
if(data.id == undefined || data.id == ""){
return;
}
//本级区域
$("#areaList1").append(dataNodeSelected(data.id, data.name));
//生成所在院区
$("#hospitalArea").html(data.areaName);
//所在机房或区域 id
$("#areaId").val(data.id);
$("#areaId").focus().blur()//光标切入移除
});
}
//人员单选
function selectStaff1() {
SelectStaff_Single(function(data) {
$("#staffList1 li").remove();
$("#staffId").val("");
if(data.id == undefined || data.id == ""){
return;
}
$("#staffList1").append(dataNodeSelected(data.id, data.name));
//保存 人员id
$("#staffId").val(data.id);
$("#staffId").focus().blur()//光标切入移除
});
}
//元素周期表
function getElement(){
element(function(data){
$("#element").html("");
if( $(data).attr("name") == 'undefined' || $(data).attr("name") == undefined){//判断数据是否为null
$("#nuclideName").val("");//元素名称
$("#nuclideId").val("");//id
return;
}
$("#element").append(data);
//保存数据
$("#nuclideName").val($(data).attr("name"));//元素名称
$("#nuclideId").val($(data).val());//id
$("#nuclideId").fo | )//光标切入移除
});
}
//数组去重
Array.prototype.unique3 = function(){
var res = [];
var json = {};
for(var i = 0; i < this.length; i++){
if(!json[this[i]]){
res.push(this[i]);
json[this[i]] = 1;
}
}
return res;
} | cus().blur( | identifier_name |
array.ts | import { Comparer, Predicate } from "./function"
import { Nat, checkNat, isNat } from "./math"
import { Option, exists, iff, optional } from "./option"
import { Seq } from "./seq"
/**
Empty immutable array.
Using this instead of a literal array `[]` to avoid allocating memory.
*/
export const empty: ReadonlyArray<never> = Object.freeze([])
/**
Replace each element with the result of calling `getNewValue`.
If `getNewValue` throws, the inputs will be left in a bad state.
(To mutate each element in place, just use a for-of loop.)
*/
export function mapMutate<T>(inputs: T[], getNewValue: (element: T, index: number) => T): void {
for (let index = 0; index < inputs.length; index++)
inputs[index] = getNewValue(inputs[index], index)
}
/**
Delete elements of an array not satisfying `predicate`.
If `predicate` throws, the array will be left in a bad state.
*/
export function filterMutate<T>(inputs: T[], predicate: Predicate<T>): void {
let writeIndex = 0
for (const input of inputs)
if (predicate(input)) {
inputs[writeIndex] = input
writeIndex++
}
inputs.length = writeIndex
}
/**
Replace elements with the result of [[tryGetOutput]] or delete them if that returns `undefined`.
If [[tryGetOutput] throws, the array will be left in a bad state.
*/
export function mapDefinedMutate<T>(inputs: T[], tryGetOutput: (input: T) => Option<T>): void {
let writeIndex = 0
for (const input of inputs) {
const output = tryGetOutput(input)
if (output !== undefined) {
inputs[writeIndex] = output
writeIndex++
}
}
inputs.length = writeIndex
}
/** Change the value at a single index in an array by applying a function to it. */
export function mutate<T>(inputs: T[], index: Nat, transform: (t: T) => T): void {
checkIndex(inputs, index)
inputs[index] = transform(inputs[index])
}
/**
Remove an element from an array and do not preserve the array's order.
Useful for arrays used to represent small sets.
Returns whether the value was successfully removed.
*/
export function removeUnordered<T>(inputs: T[], value: T, equal?: Comparer<T>): boolean {
for (let i = 0; i < inputs.length; i++)
if (exists(equal) ? equal(inputs[i], value) : inputs[i] === value) {
inputs[i] = last(inputs)!
inputs.length--
return true
}
return false
}
/**
Mutate [[inputs]] by combining them with each in [[other]].
If [[other]] is shorter than [[inputs]], this will reduce [[inputs]] in length.
If [[other]] is longer, the extra entries are ignored.
*/
export function zipMutate<T, U>(inputs: T[], other: Iterable<U>, zipper: (input: T, other: U) => T): void {
const iter = other[Symbol.iterator]()
for (let index = 0; index < inputs.length; index++) {
const { value, done } = iter.next()
if (done) {
inputs.length = index
break
}
inputs[index] = zipper(inputs[index], value)
}
}
/** Provides async utilities for an array. */
export function asyncArray<T>(inputs: T[]): AsyncArrayOps<T> {
return new AsyncArrayOps(inputs)
}
/**
Wrapper class for utilities that mutate arrays asynchronously.
For non-mutating utilities use [[AsyncSeq]].
*/
export class AsyncArrayOps<T> {
constructor(private inputs: T[]) {}
/** Asynchronous [[mapMutate]]. */
async map(getNewValue: (element: T, index: number) => Promise<T>): Promise<void> {
const { inputs } = this
for (let index = 0; index < inputs.length; index++)
inputs[index] = await getNewValue(inputs[index], index)
}
/** Asynchronous [[filterMutate]]. */
async filter(predicate: (element: T) => Promise<boolean>): Promise<void> {
const { inputs } = this
let writeIndex = 0
for (let readIndex = 0; readIndex < inputs.length; readIndex++)
if (await predicate(inputs[readIndex])) {
inputs[writeIndex] = inputs[readIndex]
writeIndex++
}
inputs.length = writeIndex
}
/** Asynchronous [[mapDefinedMutate]]. Performs `tryGetOutput` one element at a time. */
async mapDefined(tryGetOutput: (input: T) => Promise<Option<T>>): Promise<void> {
const { inputs } = this
let writeIndex = 0
for (let readIndex = 0; readIndex < inputs.length; readIndex++) {
const output = await tryGetOutput(inputs[readIndex])
if (output !== undefined) {
inputs[writeIndex] = output
writeIndex++
}
}
inputs.length = writeIndex
}
/** Asynchronous [[mutate]]. */
async mutate(index: Nat, transform: (t: T) => Promise<T>): Promise<void> {
const { inputs } = this
checkIndex(inputs, index)
inputs[index] = await transform(inputs[index])
}
}
/** Provides parallel utilities for an array. */
export function parallelArray<T>(inputs: T[], maxNumberOfThreads?: number): ParallelArrayOps<T> {
return new ParallelArrayOps(inputs, maxNumberOfThreads)
}
/**
Wrapper class for utilities that mutate arrays in parallel.
For non-mutating utilities use [[ParallelSeq]].
*/
export class ParallelArrayOps<T> {
/** Use [[parallelArray]] rather than calling this directly. */
constructor(readonly inputs: T[], readonly maxNumberOfThreads: number = Number.POSITIVE_INFINITY) {
if (maxNumberOfThreads !== Number.POSITIVE_INFINITY)
checkNat(maxNumberOfThreads)
}
/** Parallel [[mapMutate]]. */ |
let writeIndex = 0
let readIndex = 0
while (readIndex < maxNumberOfThreads && readIndex < inputs.length)
startOne()
while (readIndex < inputs.length) {
await awaitOne()
startOne()
}
while (writeIndex < inputs.length)
await awaitOne()
async function awaitOne(): Promise<void> {
inputs[writeIndex] = await (inputs as any as Array<Promise<T>>)[writeIndex]
writeIndex++
}
function startOne(): void {
(inputs as any as Array<Promise<T>>)[readIndex] = mapper(inputs[readIndex], readIndex)
readIndex++
}
}
/** Parallel [[filterMutate]]. */
filter(predicate: (element: T, index: number) => Promise<boolean>): Promise<void> {
return this.mapDefined(async (input, index) =>
optional(await predicate(input, index), () => input))
}
/** Parallel [[mapDefinedMutate]]. */
async mapDefined(tryGetOutput: (input: T, index: number) => Promise<Option<T>>): Promise<void> {
const { inputs, maxNumberOfThreads } = this
/** Next index to write a (defined) result to. */
let writeOutputIndex = 0
/** Next index to await a thread at. */
let readPromiseIndex = 0
/** Next index to read an input value from; the thread for that input will be written to the same index. */
let readValueIndex = 0
// Start initial threads.
while (readValueIndex < maxNumberOfThreads && readValueIndex < inputs.length)
startOne()
// Keep awaiting threads and starting new ones.
// Invariants: writeIndex <= readPromiseIndex, readPromiseIndex = readValueIndex - numberOfThreads
while (readValueIndex < inputs.length) {
await awaitOne()
startOne()
}
// Await remaining threads.
while (readPromiseIndex < inputs.length)
await awaitOne()
// Shorten array to new length.
inputs.length = writeOutputIndex
async function awaitOne(): Promise<void> {
const output = await (inputs as any as Array<Promise<Option<T>>>)[readPromiseIndex]
readPromiseIndex++
if (output !== undefined) {
inputs[writeOutputIndex] = output
writeOutputIndex++
}
}
function startOne(): void {
(inputs as any as Array<Promise<Option<T>>>)[readValueIndex] = tryGetOutput(inputs[readValueIndex], readValueIndex)
readValueIndex++
}
}
}
/**
Whether a number is an integer between 0 and array.length.
Does *not* check for whether there is a "hole" at the index.
*/
export function isValidIndex(inputs: Array<{}>, index: Nat): boolean {
return isNat(index) && index < inputs.length
}
/** Throws an error if [[index]] is not a valid index. */
export function checkIndex(inputs: Array<{}>, index: Nat): void {
if (!isValidIndex(inputs, index))
throw new Error(`Expected an array index < ${inputs.length}, got ${index}`)
}
/** Swap two values in an array. */
export function swap(inputs: Array<{}>, firstIndex: Nat, secondIndex: Nat): void {
checkIndex(inputs, firstIndex)
checkIndex(inputs, secondIndex)
const tmp = inputs[firstIndex]
inputs[firstIndex] = inputs[secondIndex]
inputs[secondIndex] = tmp
}
/** Initialize a new array by calling [[makeElement]] [[length]] times. */
export function initArray<T>(length: number, makeElement: (index: number) => T): T[] {
const arr = new Array(length)
for (let i = 0; i < length; i++)
arr[i] = makeElement(i)
return arr
}
/** Asynchronous [[initArray]]. */
export function initArrayAsync<T>(length: number, makeElement: (index: number) => Promise<T>): Promise<T[]> {
return Promise.all(initArray(length, makeElement))
}
/** Parallel [[initArray]]. */
export async function initArrayParallel<T>(numberOfThreads: number, length: number, makeElement: (index: number) => Promise<T>): Promise<T[]> {
const array = new Array(length)
await parallelArray(array, numberOfThreads).map((_, index) => makeElement(index))
return array
}
/**
[[Seq]] iterating over an array in reverse.
O(1) to create.
*/
export function reverse<T>(array: T[]): Seq<T> {
return new Seq(function*(): Iterator<T> {
for (let i = array.length - 1; i >= 0; i--)
yield array[i]
})
}
/** Immutable `Array.prototype.shift`. */
export function shift<T>(array: T[]): Option<[T, T[]]> {
return optional<[T, T[]]>(!isEmpty(array), () =>
[array[0], array.slice(1)])
}
/**
Every item but the first.
Identity for empty arrays.
*/
export function tail<T>(array: T[]): T[] {
return array.slice(1)
}
/** True iff an array has 0 length. */
export function isEmpty(array: any[]): boolean {
return array.length === 0
}
/**
Every item but the last.
Identity for empty arrays.
*/
export function rightTail<T>(array: T[]): T[] {
return array.slice(0, array.length - 1)
}
/** Immutable `Array.prototype.pop`. */
export function pop<T>(array: T[]): Option<[T[], T]> {
return iff<T, [T[], T]>(last(array), popped =>
[rightTail(array), popped])
}
/** Last element in the array. */
export function last<T>(array: T[]): Option<T> {
return array[array.length - 1]
} | async map(mapper: (element: T, index: number) => Promise<T>): Promise<void> {
const { inputs, maxNumberOfThreads } = this | random_line_split |
array.ts | import { Comparer, Predicate } from "./function"
import { Nat, checkNat, isNat } from "./math"
import { Option, exists, iff, optional } from "./option"
import { Seq } from "./seq"
/**
Empty immutable array.
Using this instead of a literal array `[]` to avoid allocating memory.
*/
export const empty: ReadonlyArray<never> = Object.freeze([])
/**
Replace each element with the result of calling `getNewValue`.
If `getNewValue` throws, the inputs will be left in a bad state.
(To mutate each element in place, just use a for-of loop.)
*/
export function mapMutate<T>(inputs: T[], getNewValue: (element: T, index: number) => T): void {
for (let index = 0; index < inputs.length; index++)
inputs[index] = getNewValue(inputs[index], index)
}
/**
Delete elements of an array not satisfying `predicate`.
If `predicate` throws, the array will be left in a bad state.
*/
export function filterMutate<T>(inputs: T[], predicate: Predicate<T>): void {
let writeIndex = 0
for (const input of inputs)
if (predicate(input)) {
inputs[writeIndex] = input
writeIndex++
}
inputs.length = writeIndex
}
/**
Replace elements with the result of [[tryGetOutput]] or delete them if that returns `undefined`.
If [[tryGetOutput] throws, the array will be left in a bad state.
*/
export function mapDefinedMutate<T>(inputs: T[], tryGetOutput: (input: T) => Option<T>): void {
let writeIndex = 0
for (const input of inputs) {
const output = tryGetOutput(input)
if (output !== undefined) {
inputs[writeIndex] = output
writeIndex++
}
}
inputs.length = writeIndex
}
/** Change the value at a single index in an array by applying a function to it. */
export function mutate<T>(inputs: T[], index: Nat, transform: (t: T) => T): void {
checkIndex(inputs, index)
inputs[index] = transform(inputs[index])
}
/**
Remove an element from an array and do not preserve the array's order.
Useful for arrays used to represent small sets.
Returns whether the value was successfully removed.
*/
export function removeUnordered<T>(inputs: T[], value: T, equal?: Comparer<T>): boolean {
for (let i = 0; i < inputs.length; i++)
if (exists(equal) ? equal(inputs[i], value) : inputs[i] === value) {
inputs[i] = last(inputs)!
inputs.length--
return true
}
return false
}
/**
Mutate [[inputs]] by combining them with each in [[other]].
If [[other]] is shorter than [[inputs]], this will reduce [[inputs]] in length.
If [[other]] is longer, the extra entries are ignored.
*/
export function zipMutate<T, U>(inputs: T[], other: Iterable<U>, zipper: (input: T, other: U) => T): void {
const iter = other[Symbol.iterator]()
for (let index = 0; index < inputs.length; index++) {
const { value, done } = iter.next()
if (done) {
inputs.length = index
break
}
inputs[index] = zipper(inputs[index], value)
}
}
/** Provides async utilities for an array. */
export function asyncArray<T>(inputs: T[]): AsyncArrayOps<T> {
return new AsyncArrayOps(inputs)
}
/**
Wrapper class for utilities that mutate arrays asynchronously.
For non-mutating utilities use [[AsyncSeq]].
*/
export class AsyncArrayOps<T> {
constructor(private inputs: T[]) {}
/** Asynchronous [[mapMutate]]. */
async map(getNewValue: (element: T, index: number) => Promise<T>): Promise<void> {
const { inputs } = this
for (let index = 0; index < inputs.length; index++)
inputs[index] = await getNewValue(inputs[index], index)
}
/** Asynchronous [[filterMutate]]. */
async filter(predicate: (element: T) => Promise<boolean>): Promise<void> {
const { inputs } = this
let writeIndex = 0
for (let readIndex = 0; readIndex < inputs.length; readIndex++)
if (await predicate(inputs[readIndex])) {
inputs[writeIndex] = inputs[readIndex]
writeIndex++
}
inputs.length = writeIndex
}
/** Asynchronous [[mapDefinedMutate]]. Performs `tryGetOutput` one element at a time. */
async mapDefined(tryGetOutput: (input: T) => Promise<Option<T>>): Promise<void> {
const { inputs } = this
let writeIndex = 0
for (let readIndex = 0; readIndex < inputs.length; readIndex++) {
const output = await tryGetOutput(inputs[readIndex])
if (output !== undefined) {
inputs[writeIndex] = output
writeIndex++
}
}
inputs.length = writeIndex
}
/** Asynchronous [[mutate]]. */
async mutate(index: Nat, transform: (t: T) => Promise<T>): Promise<void> {
const { inputs } = this
checkIndex(inputs, index)
inputs[index] = await transform(inputs[index])
}
}
/** Provides parallel utilities for an array. */
export function parallelArray<T>(inputs: T[], maxNumberOfThreads?: number): ParallelArrayOps<T> {
return new ParallelArrayOps(inputs, maxNumberOfThreads)
}
/**
Wrapper class for utilities that mutate arrays in parallel.
For non-mutating utilities use [[ParallelSeq]].
*/
export class ParallelArrayOps<T> {
/** Use [[parallelArray]] rather than calling this directly. */
| (readonly inputs: T[], readonly maxNumberOfThreads: number = Number.POSITIVE_INFINITY) {
if (maxNumberOfThreads !== Number.POSITIVE_INFINITY)
checkNat(maxNumberOfThreads)
}
/** Parallel [[mapMutate]]. */
async map(mapper: (element: T, index: number) => Promise<T>): Promise<void> {
const { inputs, maxNumberOfThreads } = this
let writeIndex = 0
let readIndex = 0
while (readIndex < maxNumberOfThreads && readIndex < inputs.length)
startOne()
while (readIndex < inputs.length) {
await awaitOne()
startOne()
}
while (writeIndex < inputs.length)
await awaitOne()
async function awaitOne(): Promise<void> {
inputs[writeIndex] = await (inputs as any as Array<Promise<T>>)[writeIndex]
writeIndex++
}
function startOne(): void {
(inputs as any as Array<Promise<T>>)[readIndex] = mapper(inputs[readIndex], readIndex)
readIndex++
}
}
/** Parallel [[filterMutate]]. */
filter(predicate: (element: T, index: number) => Promise<boolean>): Promise<void> {
return this.mapDefined(async (input, index) =>
optional(await predicate(input, index), () => input))
}
/** Parallel [[mapDefinedMutate]]. */
async mapDefined(tryGetOutput: (input: T, index: number) => Promise<Option<T>>): Promise<void> {
const { inputs, maxNumberOfThreads } = this
/** Next index to write a (defined) result to. */
let writeOutputIndex = 0
/** Next index to await a thread at. */
let readPromiseIndex = 0
/** Next index to read an input value from; the thread for that input will be written to the same index. */
let readValueIndex = 0
// Start initial threads.
while (readValueIndex < maxNumberOfThreads && readValueIndex < inputs.length)
startOne()
// Keep awaiting threads and starting new ones.
// Invariants: writeIndex <= readPromiseIndex, readPromiseIndex = readValueIndex - numberOfThreads
while (readValueIndex < inputs.length) {
await awaitOne()
startOne()
}
// Await remaining threads.
while (readPromiseIndex < inputs.length)
await awaitOne()
// Shorten array to new length.
inputs.length = writeOutputIndex
async function awaitOne(): Promise<void> {
const output = await (inputs as any as Array<Promise<Option<T>>>)[readPromiseIndex]
readPromiseIndex++
if (output !== undefined) {
inputs[writeOutputIndex] = output
writeOutputIndex++
}
}
function startOne(): void {
(inputs as any as Array<Promise<Option<T>>>)[readValueIndex] = tryGetOutput(inputs[readValueIndex], readValueIndex)
readValueIndex++
}
}
}
/**
Whether a number is an integer between 0 and array.length.
Does *not* check for whether there is a "hole" at the index.
*/
export function isValidIndex(inputs: Array<{}>, index: Nat): boolean {
return isNat(index) && index < inputs.length
}
/** Throws an error if [[index]] is not a valid index. */
export function checkIndex(inputs: Array<{}>, index: Nat): void {
if (!isValidIndex(inputs, index))
throw new Error(`Expected an array index < ${inputs.length}, got ${index}`)
}
/** Swap two values in an array. */
export function swap(inputs: Array<{}>, firstIndex: Nat, secondIndex: Nat): void {
checkIndex(inputs, firstIndex)
checkIndex(inputs, secondIndex)
const tmp = inputs[firstIndex]
inputs[firstIndex] = inputs[secondIndex]
inputs[secondIndex] = tmp
}
/** Initialize a new array by calling [[makeElement]] [[length]] times. */
export function initArray<T>(length: number, makeElement: (index: number) => T): T[] {
const arr = new Array(length)
for (let i = 0; i < length; i++)
arr[i] = makeElement(i)
return arr
}
/** Asynchronous [[initArray]]. */
export function initArrayAsync<T>(length: number, makeElement: (index: number) => Promise<T>): Promise<T[]> {
return Promise.all(initArray(length, makeElement))
}
/** Parallel [[initArray]]. */
export async function initArrayParallel<T>(numberOfThreads: number, length: number, makeElement: (index: number) => Promise<T>): Promise<T[]> {
const array = new Array(length)
await parallelArray(array, numberOfThreads).map((_, index) => makeElement(index))
return array
}
/**
[[Seq]] iterating over an array in reverse.
O(1) to create.
*/
export function reverse<T>(array: T[]): Seq<T> {
return new Seq(function*(): Iterator<T> {
for (let i = array.length - 1; i >= 0; i--)
yield array[i]
})
}
/** Immutable `Array.prototype.shift`. */
export function shift<T>(array: T[]): Option<[T, T[]]> {
return optional<[T, T[]]>(!isEmpty(array), () =>
[array[0], array.slice(1)])
}
/**
Every item but the first.
Identity for empty arrays.
*/
export function tail<T>(array: T[]): T[] {
return array.slice(1)
}
/** True iff an array has 0 length. */
export function isEmpty(array: any[]): boolean {
return array.length === 0
}
/**
Every item but the last.
Identity for empty arrays.
*/
export function rightTail<T>(array: T[]): T[] {
return array.slice(0, array.length - 1)
}
/** Immutable `Array.prototype.pop`. */
export function pop<T>(array: T[]): Option<[T[], T]> {
return iff<T, [T[], T]>(last(array), popped =>
[rightTail(array), popped])
}
/** Last element in the array. */
export function last<T>(array: T[]): Option<T> {
return array[array.length - 1]
}
| constructor | identifier_name |
array.ts | import { Comparer, Predicate } from "./function"
import { Nat, checkNat, isNat } from "./math"
import { Option, exists, iff, optional } from "./option"
import { Seq } from "./seq"
/**
Empty immutable array.
Using this instead of a literal array `[]` to avoid allocating memory.
*/
export const empty: ReadonlyArray<never> = Object.freeze([])
/**
Replace each element with the result of calling `getNewValue`.
If `getNewValue` throws, the inputs will be left in a bad state.
(To mutate each element in place, just use a for-of loop.)
*/
export function mapMutate<T>(inputs: T[], getNewValue: (element: T, index: number) => T): void {
for (let index = 0; index < inputs.length; index++)
inputs[index] = getNewValue(inputs[index], index)
}
/**
Delete elements of an array not satisfying `predicate`.
If `predicate` throws, the array will be left in a bad state.
*/
export function filterMutate<T>(inputs: T[], predicate: Predicate<T>): void {
let writeIndex = 0
for (const input of inputs)
if (predicate(input)) {
inputs[writeIndex] = input
writeIndex++
}
inputs.length = writeIndex
}
/**
Replace elements with the result of [[tryGetOutput]] or delete them if that returns `undefined`.
If [[tryGetOutput] throws, the array will be left in a bad state.
*/
export function mapDefinedMutate<T>(inputs: T[], tryGetOutput: (input: T) => Option<T>): void {
let writeIndex = 0
for (const input of inputs) {
const output = tryGetOutput(input)
if (output !== undefined) {
inputs[writeIndex] = output
writeIndex++
}
}
inputs.length = writeIndex
}
/** Change the value at a single index in an array by applying a function to it. */
export function mutate<T>(inputs: T[], index: Nat, transform: (t: T) => T): void {
checkIndex(inputs, index)
inputs[index] = transform(inputs[index])
}
/**
Remove an element from an array and do not preserve the array's order.
Useful for arrays used to represent small sets.
Returns whether the value was successfully removed.
*/
export function removeUnordered<T>(inputs: T[], value: T, equal?: Comparer<T>): boolean {
for (let i = 0; i < inputs.length; i++)
if (exists(equal) ? equal(inputs[i], value) : inputs[i] === value) {
inputs[i] = last(inputs)!
inputs.length--
return true
}
return false
}
/**
Mutate [[inputs]] by combining them with each in [[other]].
If [[other]] is shorter than [[inputs]], this will reduce [[inputs]] in length.
If [[other]] is longer, the extra entries are ignored.
*/
export function zipMutate<T, U>(inputs: T[], other: Iterable<U>, zipper: (input: T, other: U) => T): void {
const iter = other[Symbol.iterator]()
for (let index = 0; index < inputs.length; index++) {
const { value, done } = iter.next()
if (done) {
inputs.length = index
break
}
inputs[index] = zipper(inputs[index], value)
}
}
/** Provides async utilities for an array. */
export function asyncArray<T>(inputs: T[]): AsyncArrayOps<T> {
return new AsyncArrayOps(inputs)
}
/**
Wrapper class for utilities that mutate arrays asynchronously.
For non-mutating utilities use [[AsyncSeq]].
*/
export class AsyncArrayOps<T> {
constructor(private inputs: T[]) {}
/** Asynchronous [[mapMutate]]. */
async map(getNewValue: (element: T, index: number) => Promise<T>): Promise<void> {
const { inputs } = this
for (let index = 0; index < inputs.length; index++)
inputs[index] = await getNewValue(inputs[index], index)
}
/** Asynchronous [[filterMutate]]. */
async filter(predicate: (element: T) => Promise<boolean>): Promise<void> {
const { inputs } = this
let writeIndex = 0
for (let readIndex = 0; readIndex < inputs.length; readIndex++)
if (await predicate(inputs[readIndex])) {
inputs[writeIndex] = inputs[readIndex]
writeIndex++
}
inputs.length = writeIndex
}
/** Asynchronous [[mapDefinedMutate]]. Performs `tryGetOutput` one element at a time. */
async mapDefined(tryGetOutput: (input: T) => Promise<Option<T>>): Promise<void> {
const { inputs } = this
let writeIndex = 0
for (let readIndex = 0; readIndex < inputs.length; readIndex++) {
const output = await tryGetOutput(inputs[readIndex])
if (output !== undefined) {
inputs[writeIndex] = output
writeIndex++
}
}
inputs.length = writeIndex
}
/** Asynchronous [[mutate]]. */
async mutate(index: Nat, transform: (t: T) => Promise<T>): Promise<void> {
const { inputs } = this
checkIndex(inputs, index)
inputs[index] = await transform(inputs[index])
}
}
/** Provides parallel utilities for an array. */
export function parallelArray<T>(inputs: T[], maxNumberOfThreads?: number): ParallelArrayOps<T> {
return new ParallelArrayOps(inputs, maxNumberOfThreads)
}
/**
Wrapper class for utilities that mutate arrays in parallel.
For non-mutating utilities use [[ParallelSeq]].
*/
export class ParallelArrayOps<T> {
/** Use [[parallelArray]] rather than calling this directly. */
constructor(readonly inputs: T[], readonly maxNumberOfThreads: number = Number.POSITIVE_INFINITY) |
/** Parallel [[mapMutate]]. */
async map(mapper: (element: T, index: number) => Promise<T>): Promise<void> {
const { inputs, maxNumberOfThreads } = this
let writeIndex = 0
let readIndex = 0
while (readIndex < maxNumberOfThreads && readIndex < inputs.length)
startOne()
while (readIndex < inputs.length) {
await awaitOne()
startOne()
}
while (writeIndex < inputs.length)
await awaitOne()
async function awaitOne(): Promise<void> {
inputs[writeIndex] = await (inputs as any as Array<Promise<T>>)[writeIndex]
writeIndex++
}
function startOne(): void {
(inputs as any as Array<Promise<T>>)[readIndex] = mapper(inputs[readIndex], readIndex)
readIndex++
}
}
/** Parallel [[filterMutate]]. */
filter(predicate: (element: T, index: number) => Promise<boolean>): Promise<void> {
return this.mapDefined(async (input, index) =>
optional(await predicate(input, index), () => input))
}
/** Parallel [[mapDefinedMutate]]. */
async mapDefined(tryGetOutput: (input: T, index: number) => Promise<Option<T>>): Promise<void> {
const { inputs, maxNumberOfThreads } = this
/** Next index to write a (defined) result to. */
let writeOutputIndex = 0
/** Next index to await a thread at. */
let readPromiseIndex = 0
/** Next index to read an input value from; the thread for that input will be written to the same index. */
let readValueIndex = 0
// Start initial threads.
while (readValueIndex < maxNumberOfThreads && readValueIndex < inputs.length)
startOne()
// Keep awaiting threads and starting new ones.
// Invariants: writeIndex <= readPromiseIndex, readPromiseIndex = readValueIndex - numberOfThreads
while (readValueIndex < inputs.length) {
await awaitOne()
startOne()
}
// Await remaining threads.
while (readPromiseIndex < inputs.length)
await awaitOne()
// Shorten array to new length.
inputs.length = writeOutputIndex
async function awaitOne(): Promise<void> {
const output = await (inputs as any as Array<Promise<Option<T>>>)[readPromiseIndex]
readPromiseIndex++
if (output !== undefined) {
inputs[writeOutputIndex] = output
writeOutputIndex++
}
}
function startOne(): void {
(inputs as any as Array<Promise<Option<T>>>)[readValueIndex] = tryGetOutput(inputs[readValueIndex], readValueIndex)
readValueIndex++
}
}
}
/**
Whether a number is an integer between 0 and array.length.
Does *not* check for whether there is a "hole" at the index.
*/
export function isValidIndex(inputs: Array<{}>, index: Nat): boolean {
return isNat(index) && index < inputs.length
}
/** Throws an error if [[index]] is not a valid index. */
export function checkIndex(inputs: Array<{}>, index: Nat): void {
if (!isValidIndex(inputs, index))
throw new Error(`Expected an array index < ${inputs.length}, got ${index}`)
}
/** Swap two values in an array. */
export function swap(inputs: Array<{}>, firstIndex: Nat, secondIndex: Nat): void {
checkIndex(inputs, firstIndex)
checkIndex(inputs, secondIndex)
const tmp = inputs[firstIndex]
inputs[firstIndex] = inputs[secondIndex]
inputs[secondIndex] = tmp
}
/** Initialize a new array by calling [[makeElement]] [[length]] times. */
export function initArray<T>(length: number, makeElement: (index: number) => T): T[] {
const arr = new Array(length)
for (let i = 0; i < length; i++)
arr[i] = makeElement(i)
return arr
}
/** Asynchronous [[initArray]]. */
export function initArrayAsync<T>(length: number, makeElement: (index: number) => Promise<T>): Promise<T[]> {
return Promise.all(initArray(length, makeElement))
}
/** Parallel [[initArray]]. */
export async function initArrayParallel<T>(numberOfThreads: number, length: number, makeElement: (index: number) => Promise<T>): Promise<T[]> {
const array = new Array(length)
await parallelArray(array, numberOfThreads).map((_, index) => makeElement(index))
return array
}
/**
[[Seq]] iterating over an array in reverse.
O(1) to create.
*/
export function reverse<T>(array: T[]): Seq<T> {
return new Seq(function*(): Iterator<T> {
for (let i = array.length - 1; i >= 0; i--)
yield array[i]
})
}
/** Immutable `Array.prototype.shift`. */
export function shift<T>(array: T[]): Option<[T, T[]]> {
return optional<[T, T[]]>(!isEmpty(array), () =>
[array[0], array.slice(1)])
}
/**
Every item but the first.
Identity for empty arrays.
*/
export function tail<T>(array: T[]): T[] {
return array.slice(1)
}
/** True iff an array has 0 length. */
export function isEmpty(array: any[]): boolean {
return array.length === 0
}
/**
Every item but the last.
Identity for empty arrays.
*/
export function rightTail<T>(array: T[]): T[] {
return array.slice(0, array.length - 1)
}
/** Immutable `Array.prototype.pop`. */
export function pop<T>(array: T[]): Option<[T[], T]> {
return iff<T, [T[], T]>(last(array), popped =>
[rightTail(array), popped])
}
/** Last element in the array. */
export function last<T>(array: T[]): Option<T> {
return array[array.length - 1]
}
| {
if (maxNumberOfThreads !== Number.POSITIVE_INFINITY)
checkNat(maxNumberOfThreads)
} | identifier_body |
main.rs | #[macro_use]
extern crate log;
extern crate fern;
extern crate chrono;
extern crate libc;
mod configuration;
use configuration::ServerData;
use configuration::ClientData;
use std::sync::mpsc;
use std::thread;
use std::time;
use std::time::SystemTime;
use std::env;
/*==============================================================================
* Loggers
*------------------------------------------------------------------------------
*
*/
fn setup_terminal_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(std::io::stdout())
.apply()?;
Ok(())
}
fn setup_file_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(fern::log_file("output.log")?)
.apply()?;
Ok(())
}
/*==============================================================================
* Packet
*------------------------------------------------------------------------------
*
*/
struct Packet {
from: VirtualLink,
workload: u64,
origin: u32,
timestamp: SystemTime,
}
impl Packet {
fn from_iface(iface: &NetworkInterface, workload: u64, origin: u32) -> Packet {
Packet {
from: (*iface).get_virtual_link(),
workload: workload,
origin: origin,
timestamp: SystemTime::now(),
}
}
fn answer_me_at(tx: &mpsc::Sender<Packet>, workload: u64, origin: u32, timestamp: SystemTime) -> Packet {
Packet {
from: VirtualLink::linked_to(tx),
workload: workload,
origin: origin,
timestamp: timestamp,
}
}
}
/*==============================================================================
* VirtualLink
*------------------------------------------------------------------------------
*
*/
struct VirtualLink {
s: mpsc::Sender<Packet>
}
impl VirtualLink {
fn to_iface(interface: &NetworkInterface) -> VirtualLink {
VirtualLink {
s: (*interface).s.clone()
}
}
fn linked_to(tx: &mpsc::Sender<Packet>) -> VirtualLink {
VirtualLink {
s: (*tx).clone()
}
}
fn send_through(&self, packet: Packet) {
self.s.send(packet).unwrap()
}
}
/*==============================================================================
* Network Interface
*------------------------------------------------------------------------------
*
*/
struct NetworkInterface {
s: mpsc::Sender<Packet>,
r: mpsc::Receiver<Packet>
}
impl NetworkInterface {
fn new() -> NetworkInterface {
let (tx, rx) = mpsc::channel();
NetworkInterface {
s: tx,
r: rx
}
}
fn read(&self) -> Packet |
fn get_virtual_link(&self) -> VirtualLink {
VirtualLink::to_iface(self)
}
}
/*==============================================================================
* Host
*
*/
struct Host {
nic: NetworkInterface,
}
impl Host {
fn new() -> Host {
Host {
nic: NetworkInterface::new(),
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.nic.get_virtual_link()
}
}
/*==============================================================================
* Stats
*
*/
struct Stats {
samples: u64,
total: u64,
}
impl Stats {
fn new() -> Stats {
Stats {
samples: 0,
total: 0,
}
}
fn update_stats(&mut self, new_sample_time: u64) {
self.samples += 1;
self.total += new_sample_time;
}
fn get_average(&self) -> f64 {
if self.samples == 0 {
return 0.0;
}
(self.total as f64) / (self.samples as f64)
}
}
/*==============================================================================
* Server
*
*/
struct Server {
id: u32,
host: Host,
processing_power: u64
}
impl Server {
fn new(id: u32, server_data: ServerData) -> Server {
Server {
id: id,
host: Host::new(),
processing_power: server_data.get_processing_power()
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.host.get_virtual_link()
}
fn run(self) {
info!("[S{}] Ejecutando servidor {}", self.id, self.id);
let rx = self.host.nic.r;
let tx = self.host.nic.s;
for message in rx {
// Obtenemos la cantidad de cuadrantes a procesar.
let workload = message.workload;
info!("[S{}] Recibidas {} unidades de trabajo desde observatorio {}", self.id, workload, message.origin);
/*
* Procesamos los cuadrantes.
*
* El workload tiene unidades de trabajo. El poder de procesamiento
* tiene unidades de trabajo por segundo. El sleep time tiene unidades
* de milisegundos.
*
* Por ejemplo, un servidor recibe 5 unidades de trabajo desde el
* cliente. El servidor puede procesar dos unidades de trabajo por
* segundo. El hilo dormirá entonces 2500 milisegundos simulando
* el procesamiento de la carga. Para acelerar o relentizar
* la simulación, podemos ajustar el factor global de velocidad;
* por ejemplo, si el factor global es 2.0, en vez de dormir los 2500
* milisegundos dormiría 1250.
*
*/
let sleep_time = (1000*workload)/self.processing_power;
let sleep_time_scaled = ((sleep_time as f64)/GLOBAL_SPEED) as u64;
info!("[S{}] Tiempo estimado: {}ms (s: {}ms)", self.id, sleep_time, sleep_time_scaled);
thread::sleep(time::Duration::from_millis(sleep_time_scaled));
info!("[S{}] Procesamiento terminado; devolviendo ACK a observatorio {}", self.id, message.origin);
// Devolvemos el ACK.
let response = Packet::answer_me_at(&tx, 0, self.id, message.timestamp);
message.from.send_through(response);
}
}
}
/*==============================================================================
* Client
*
*/
struct Target {
virtual_link: VirtualLink,
weight: f64
}
struct Client {
id: u32,
host: Host,
distribution_scheme: Vec<Target>,
work_generation_rate: u64
}
impl Client {
fn new(id: u32, servers: &Vec<Server>, client_data: ClientData) -> Client {
let workshare: &Vec<f64> = client_data.get_workshare();
let mut distribution = Vec::new();
for i in 0..servers.len() {
distribution.push(Target {
virtual_link: servers[i].get_virtual_link(),
weight: workshare[i]
});
}
Client {
id: id,
host: Host::new(),
distribution_scheme: distribution,
work_generation_rate: client_data.get_work_generation_rate()
}
}
fn run(self) {
info!("[C{}] Ejecutando cliente {}", self.id, self.id);
/*
* Cada cierta cantidad de tiempo, el observatorio genera x cuadrantes.
* A partir de ahí itera por la lista de servidores distribuyendo los
* cuadrantes según los factores de distribución (e.g., si debe enviar
* una fracción p_k de los cuadrantes al servidor k, enviará p_k*x
* cuadrantes al servidor k).
*
* Habiendo enviado los mensajes, simplemente espera las respuestas.
* Suponiendo alternativamente que hay que seguir generando cuadrantes
* mientras se toman fotos, se pueden tener internamente dos threads,
* uno acumulando cuadrantes y otro tomando cuadrantes y distribuyendolos.
*
* Para medir el tiempo de respuesta del observatorio se puede ir
* calculando una media móvil, tomando el tiempo que tarda en responder
* cada servidor.
*/
let targets = &self.distribution_scheme;
let mut stats : Stats = Stats::new();
loop {
let x = self.work_generation_rate;
info!("[C{}] Generando {} unidades de trabajo", self.id, x);
// Distribuimos los x cuadrantes generados.
let mut sid = 0;
for target in targets {
sid += 1;
let workload = ((x as f64)*(target.weight)) as u64;
let packet = Packet::from_iface(&self.host.nic, workload, self.id);
info!("[C{}] Enviando {} unidades al servidor {}", self.id, workload, sid);
target.virtual_link.send_through(packet);
}
// Esperamos la respuesta de cada servidor.
info!("[C{}] Esperando respuestas", self.id);
for _d in targets {
let _response = self.host.nic.read();
// Cálculo de tiempo de respuesta
let response_time_duration = _response.timestamp.elapsed().unwrap();
let response_time_ms = response_time_duration.as_secs() + ((response_time_duration.subsec_millis() * 1000) as u64);
stats.update_stats(response_time_ms);
}
// Impresión de estadística hasta el momento
info!("[C{}] Promedio de respuesta parcial: {} ms", self.id, format!("{:.*}", 2, stats.get_average()));
info!("[C{}] Todos los servidores terminaron de procesar el bache", self.id);
let sleep_time = (3000.0/GLOBAL_SPEED) as u64;
thread::sleep(time::Duration::from_millis(sleep_time));
}
}
}
/*==============================================================================
* Main
*
*/
const GLOBAL_SPEED: f64 = 1.0;
fn main() {
let args : Vec<String> = env::args().collect();
if args.len() > 1 && args[1] == "--debug" {
setup_file_logging().expect("Couldn't set up logger");
} else {
setup_terminal_logging().expect("Couldn't set up logger");
}
/*
* Cargamos la configuración. La configuración es un archivo de texto con
* pares clave-valor. El objeto de configuración puede usarse como
*
* configuration.get("clave") // retorna el valor asociado a "clave".
*/
info!("[T0] Cargando configuración");
let mut configuration = configuration::Configuration::new();
configuration.load();
let mut threads = Vec::new();
let mut servers: Vec<Server> = Vec::new();
let mut clients: Vec<Client> = Vec::new();
info!("[T0] Inicializando servidores");
let server_data: Vec<ServerData> = configuration.get_server_dataset();
let mut server_count = 0;
for d in server_data {
server_count += 1;
servers.push(Server::new(server_count, d));
}
info!("[T0] Inicializando clientes");
let client_data: Vec<ClientData> = configuration.get_client_dataset();
let mut client_count = 0;
for c in client_data {
client_count += 1;
clients.push(Client::new(client_count, &servers, c));
}
info!("[T0] Lanzando hilos servidores");
for server in servers {
let th = thread::spawn(move || {
server.run();
});
threads.push(th);
}
info!("[T0] Lanzando hilos clientes");
for client in clients {
let th = thread::spawn(move || {
client.run();
});
threads.push(th);
}
info!("[T0] Esperando la finalización del programa");
for th in threads {
th.join().unwrap();
}
}
| {
self.r.recv().unwrap()
} | identifier_body |
main.rs | #[macro_use]
extern crate log;
extern crate fern;
extern crate chrono;
extern crate libc;
mod configuration;
use configuration::ServerData;
use configuration::ClientData;
use std::sync::mpsc;
use std::thread;
use std::time;
use std::time::SystemTime;
use std::env;
/*==============================================================================
* Loggers
*------------------------------------------------------------------------------
*
*/
fn setup_terminal_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(std::io::stdout())
.apply()?;
Ok(())
}
fn setup_file_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(fern::log_file("output.log")?)
.apply()?;
Ok(())
}
/*==============================================================================
* Packet
*------------------------------------------------------------------------------
*
*/
struct Packet {
from: VirtualLink,
workload: u64,
origin: u32,
timestamp: SystemTime,
}
impl Packet {
fn from_iface(iface: &NetworkInterface, workload: u64, origin: u32) -> Packet {
Packet {
from: (*iface).get_virtual_link(),
workload: workload,
origin: origin,
timestamp: SystemTime::now(),
}
}
fn answer_me_at(tx: &mpsc::Sender<Packet>, workload: u64, origin: u32, timestamp: SystemTime) -> Packet {
Packet {
from: VirtualLink::linked_to(tx),
workload: workload,
origin: origin,
timestamp: timestamp,
}
}
}
/*==============================================================================
* VirtualLink
*------------------------------------------------------------------------------
*
*/
struct VirtualLink {
s: mpsc::Sender<Packet>
}
impl VirtualLink {
fn to_iface(interface: &NetworkInterface) -> VirtualLink {
VirtualLink {
s: (*interface).s.clone()
}
}
fn linked_to(tx: &mpsc::Sender<Packet>) -> VirtualLink {
VirtualLink {
s: (*tx).clone()
}
}
fn send_through(&self, packet: Packet) {
self.s.send(packet).unwrap()
}
}
/*==============================================================================
* Network Interface
*------------------------------------------------------------------------------
*
*/
struct NetworkInterface {
s: mpsc::Sender<Packet>,
r: mpsc::Receiver<Packet>
}
impl NetworkInterface {
fn new() -> NetworkInterface {
let (tx, rx) = mpsc::channel();
NetworkInterface {
s: tx,
r: rx
}
}
fn read(&self) -> Packet {
self.r.recv().unwrap()
}
fn get_virtual_link(&self) -> VirtualLink {
VirtualLink::to_iface(self)
}
}
/*==============================================================================
* Host
*
*/
struct Host {
nic: NetworkInterface,
}
impl Host {
fn new() -> Host {
Host {
nic: NetworkInterface::new(),
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.nic.get_virtual_link()
}
}
/*==============================================================================
* Stats
*
*/
struct Stats {
samples: u64,
total: u64,
}
impl Stats {
fn new() -> Stats {
Stats {
samples: 0,
total: 0,
}
}
fn update_stats(&mut self, new_sample_time: u64) {
self.samples += 1;
self.total += new_sample_time;
}
fn get_average(&self) -> f64 {
if self.samples == 0 |
(self.total as f64) / (self.samples as f64)
}
}
/*==============================================================================
* Server
*
*/
struct Server {
id: u32,
host: Host,
processing_power: u64
}
impl Server {
fn new(id: u32, server_data: ServerData) -> Server {
Server {
id: id,
host: Host::new(),
processing_power: server_data.get_processing_power()
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.host.get_virtual_link()
}
fn run(self) {
info!("[S{}] Ejecutando servidor {}", self.id, self.id);
let rx = self.host.nic.r;
let tx = self.host.nic.s;
for message in rx {
// Obtenemos la cantidad de cuadrantes a procesar.
let workload = message.workload;
info!("[S{}] Recibidas {} unidades de trabajo desde observatorio {}", self.id, workload, message.origin);
/*
* Procesamos los cuadrantes.
*
* El workload tiene unidades de trabajo. El poder de procesamiento
* tiene unidades de trabajo por segundo. El sleep time tiene unidades
* de milisegundos.
*
* Por ejemplo, un servidor recibe 5 unidades de trabajo desde el
* cliente. El servidor puede procesar dos unidades de trabajo por
* segundo. El hilo dormirá entonces 2500 milisegundos simulando
* el procesamiento de la carga. Para acelerar o relentizar
* la simulación, podemos ajustar el factor global de velocidad;
* por ejemplo, si el factor global es 2.0, en vez de dormir los 2500
* milisegundos dormiría 1250.
*
*/
let sleep_time = (1000*workload)/self.processing_power;
let sleep_time_scaled = ((sleep_time as f64)/GLOBAL_SPEED) as u64;
info!("[S{}] Tiempo estimado: {}ms (s: {}ms)", self.id, sleep_time, sleep_time_scaled);
thread::sleep(time::Duration::from_millis(sleep_time_scaled));
info!("[S{}] Procesamiento terminado; devolviendo ACK a observatorio {}", self.id, message.origin);
// Devolvemos el ACK.
let response = Packet::answer_me_at(&tx, 0, self.id, message.timestamp);
message.from.send_through(response);
}
}
}
/*==============================================================================
* Client
*
*/
struct Target {
virtual_link: VirtualLink,
weight: f64
}
struct Client {
id: u32,
host: Host,
distribution_scheme: Vec<Target>,
work_generation_rate: u64
}
impl Client {
fn new(id: u32, servers: &Vec<Server>, client_data: ClientData) -> Client {
let workshare: &Vec<f64> = client_data.get_workshare();
let mut distribution = Vec::new();
for i in 0..servers.len() {
distribution.push(Target {
virtual_link: servers[i].get_virtual_link(),
weight: workshare[i]
});
}
Client {
id: id,
host: Host::new(),
distribution_scheme: distribution,
work_generation_rate: client_data.get_work_generation_rate()
}
}
fn run(self) {
info!("[C{}] Ejecutando cliente {}", self.id, self.id);
/*
* Cada cierta cantidad de tiempo, el observatorio genera x cuadrantes.
* A partir de ahí itera por la lista de servidores distribuyendo los
* cuadrantes según los factores de distribución (e.g., si debe enviar
* una fracción p_k de los cuadrantes al servidor k, enviará p_k*x
* cuadrantes al servidor k).
*
* Habiendo enviado los mensajes, simplemente espera las respuestas.
* Suponiendo alternativamente que hay que seguir generando cuadrantes
* mientras se toman fotos, se pueden tener internamente dos threads,
* uno acumulando cuadrantes y otro tomando cuadrantes y distribuyendolos.
*
* Para medir el tiempo de respuesta del observatorio se puede ir
* calculando una media móvil, tomando el tiempo que tarda en responder
* cada servidor.
*/
let targets = &self.distribution_scheme;
let mut stats : Stats = Stats::new();
loop {
let x = self.work_generation_rate;
info!("[C{}] Generando {} unidades de trabajo", self.id, x);
// Distribuimos los x cuadrantes generados.
let mut sid = 0;
for target in targets {
sid += 1;
let workload = ((x as f64)*(target.weight)) as u64;
let packet = Packet::from_iface(&self.host.nic, workload, self.id);
info!("[C{}] Enviando {} unidades al servidor {}", self.id, workload, sid);
target.virtual_link.send_through(packet);
}
// Esperamos la respuesta de cada servidor.
info!("[C{}] Esperando respuestas", self.id);
for _d in targets {
let _response = self.host.nic.read();
// Cálculo de tiempo de respuesta
let response_time_duration = _response.timestamp.elapsed().unwrap();
let response_time_ms = response_time_duration.as_secs() + ((response_time_duration.subsec_millis() * 1000) as u64);
stats.update_stats(response_time_ms);
}
// Impresión de estadística hasta el momento
info!("[C{}] Promedio de respuesta parcial: {} ms", self.id, format!("{:.*}", 2, stats.get_average()));
info!("[C{}] Todos los servidores terminaron de procesar el bache", self.id);
let sleep_time = (3000.0/GLOBAL_SPEED) as u64;
thread::sleep(time::Duration::from_millis(sleep_time));
}
}
}
/*==============================================================================
* Main
*
*/
const GLOBAL_SPEED: f64 = 1.0;
fn main() {
let args : Vec<String> = env::args().collect();
if args.len() > 1 && args[1] == "--debug" {
setup_file_logging().expect("Couldn't set up logger");
} else {
setup_terminal_logging().expect("Couldn't set up logger");
}
/*
* Cargamos la configuración. La configuración es un archivo de texto con
* pares clave-valor. El objeto de configuración puede usarse como
*
* configuration.get("clave") // retorna el valor asociado a "clave".
*/
info!("[T0] Cargando configuración");
let mut configuration = configuration::Configuration::new();
configuration.load();
let mut threads = Vec::new();
let mut servers: Vec<Server> = Vec::new();
let mut clients: Vec<Client> = Vec::new();
info!("[T0] Inicializando servidores");
let server_data: Vec<ServerData> = configuration.get_server_dataset();
let mut server_count = 0;
for d in server_data {
server_count += 1;
servers.push(Server::new(server_count, d));
}
info!("[T0] Inicializando clientes");
let client_data: Vec<ClientData> = configuration.get_client_dataset();
let mut client_count = 0;
for c in client_data {
client_count += 1;
clients.push(Client::new(client_count, &servers, c));
}
info!("[T0] Lanzando hilos servidores");
for server in servers {
let th = thread::spawn(move || {
server.run();
});
threads.push(th);
}
info!("[T0] Lanzando hilos clientes");
for client in clients {
let th = thread::spawn(move || {
client.run();
});
threads.push(th);
}
info!("[T0] Esperando la finalización del programa");
for th in threads {
th.join().unwrap();
}
}
| {
return 0.0;
} | conditional_block |
main.rs | #[macro_use]
extern crate log;
extern crate fern;
extern crate chrono;
extern crate libc;
mod configuration;
use configuration::ServerData;
use configuration::ClientData;
use std::sync::mpsc;
use std::thread;
use std::time;
use std::time::SystemTime;
use std::env;
/*==============================================================================
* Loggers
*------------------------------------------------------------------------------
*
*/
fn setup_terminal_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(std::io::stdout())
.apply()?;
Ok(())
}
fn setup_file_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(fern::log_file("output.log")?)
.apply()?;
Ok(())
}
/*==============================================================================
* Packet
*------------------------------------------------------------------------------
*
*/
struct Packet {
from: VirtualLink,
workload: u64,
origin: u32,
timestamp: SystemTime,
}
impl Packet {
fn from_iface(iface: &NetworkInterface, workload: u64, origin: u32) -> Packet {
Packet {
from: (*iface).get_virtual_link(),
workload: workload,
origin: origin,
timestamp: SystemTime::now(),
}
}
fn answer_me_at(tx: &mpsc::Sender<Packet>, workload: u64, origin: u32, timestamp: SystemTime) -> Packet {
Packet {
from: VirtualLink::linked_to(tx),
workload: workload,
origin: origin,
timestamp: timestamp,
}
}
}
/*==============================================================================
* VirtualLink
*------------------------------------------------------------------------------
*
*/
struct VirtualLink {
s: mpsc::Sender<Packet>
}
impl VirtualLink {
fn to_iface(interface: &NetworkInterface) -> VirtualLink {
VirtualLink {
s: (*interface).s.clone()
}
}
fn linked_to(tx: &mpsc::Sender<Packet>) -> VirtualLink {
VirtualLink {
s: (*tx).clone()
}
}
fn send_through(&self, packet: Packet) {
self.s.send(packet).unwrap()
}
}
/*==============================================================================
* Network Interface
*------------------------------------------------------------------------------
*
*/
struct NetworkInterface {
s: mpsc::Sender<Packet>,
r: mpsc::Receiver<Packet>
}
impl NetworkInterface {
fn new() -> NetworkInterface {
let (tx, rx) = mpsc::channel();
NetworkInterface {
s: tx,
r: rx
}
}
fn read(&self) -> Packet {
self.r.recv().unwrap()
}
fn get_virtual_link(&self) -> VirtualLink {
VirtualLink::to_iface(self)
}
}
/*==============================================================================
* Host
*
*/
struct Host {
nic: NetworkInterface,
}
impl Host {
fn new() -> Host {
Host {
nic: NetworkInterface::new(),
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.nic.get_virtual_link()
}
}
/*==============================================================================
* Stats
*
*/
struct Stats {
samples: u64,
total: u64,
}
impl Stats {
fn new() -> Stats {
Stats {
samples: 0,
total: 0,
}
}
fn update_stats(&mut self, new_sample_time: u64) {
self.samples += 1;
self.total += new_sample_time;
}
fn get_average(&self) -> f64 {
if self.samples == 0 {
return 0.0;
}
(self.total as f64) / (self.samples as f64)
}
}
/*==============================================================================
* Server
*
*/
struct Server {
id: u32,
host: Host,
processing_power: u64
}
impl Server {
fn new(id: u32, server_data: ServerData) -> Server {
Server {
id: id,
host: Host::new(),
processing_power: server_data.get_processing_power()
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.host.get_virtual_link()
}
fn run(self) {
info!("[S{}] Ejecutando servidor {}", self.id, self.id);
let rx = self.host.nic.r;
let tx = self.host.nic.s;
for message in rx {
// Obtenemos la cantidad de cuadrantes a procesar.
let workload = message.workload;
info!("[S{}] Recibidas {} unidades de trabajo desde observatorio {}", self.id, workload, message.origin);
/*
* Procesamos los cuadrantes.
*
* El workload tiene unidades de trabajo. El poder de procesamiento
* tiene unidades de trabajo por segundo. El sleep time tiene unidades
* de milisegundos.
*
* Por ejemplo, un servidor recibe 5 unidades de trabajo desde el
* cliente. El servidor puede procesar dos unidades de trabajo por
* segundo. El hilo dormirá entonces 2500 milisegundos simulando
* el procesamiento de la carga. Para acelerar o relentizar
* la simulación, podemos ajustar el factor global de velocidad;
* por ejemplo, si el factor global es 2.0, en vez de dormir los 2500
* milisegundos dormiría 1250.
*
*/
let sleep_time = (1000*workload)/self.processing_power;
let sleep_time_scaled = ((sleep_time as f64)/GLOBAL_SPEED) as u64;
info!("[S{}] Tiempo estimado: {}ms (s: {}ms)", self.id, sleep_time, sleep_time_scaled);
thread::sleep(time::Duration::from_millis(sleep_time_scaled));
info!("[S{}] Procesamiento terminado; devolviendo ACK a observatorio {}", self.id, message.origin);
// Devolvemos el ACK.
let response = Packet::answer_me_at(&tx, 0, self.id, message.timestamp);
message.from.send_through(response);
}
}
}
/*==============================================================================
* Client
*
*/
struct Target {
virtual_link: VirtualLink,
weight: f64
}
struct Client {
id: u32,
host: Host,
distribution_scheme: Vec<Target>,
work_generation_rate: u64
}
impl Client {
fn new(id: u32, servers: &Vec<Server>, client_data: ClientData) -> Client {
let workshare: &Vec<f64> = client_data.get_workshare();
let mut distribution = Vec::new();
for i in 0..servers.len() {
distribution.push(Target {
virtual_link: servers[i].get_virtual_link(),
weight: workshare[i]
});
}
Client {
id: id,
host: Host::new(),
distribution_scheme: distribution,
work_generation_rate: client_data.get_work_generation_rate()
}
}
fn run(self) {
info!("[C{}] Ejecutando cliente {}", self.id, self.id);
/*
* Cada cierta cantidad de tiempo, el observatorio genera x cuadrantes.
* A partir de ahí itera por la lista de servidores distribuyendo los
* cuadrantes según los factores de distribución (e.g., si debe enviar
* una fracción p_k de los cuadrantes al servidor k, enviará p_k*x
* cuadrantes al servidor k).
*
* Habiendo enviado los mensajes, simplemente espera las respuestas.
* Suponiendo alternativamente que hay que seguir generando cuadrantes
* mientras se toman fotos, se pueden tener internamente dos threads,
* uno acumulando cuadrantes y otro tomando cuadrantes y distribuyendolos.
*
* Para medir el tiempo de respuesta del observatorio se puede ir
* calculando una media móvil, tomando el tiempo que tarda en responder
* cada servidor.
*/
let targets = &self.distribution_scheme;
let mut stats : Stats = Stats::new();
loop {
let x = self.work_generation_rate;
info!("[C{}] Generando {} unidades de trabajo", self.id, x);
// Distribuimos los x cuadrantes generados.
let mut sid = 0;
for target in targets {
sid += 1;
let workload = ((x as f64)*(target.weight)) as u64;
let packet = Packet::from_iface(&self.host.nic, workload, self.id);
info!("[C{}] Enviando {} unidades al servidor {}", self.id, workload, sid);
target.virtual_link.send_through(packet);
}
// Esperamos la respuesta de cada servidor.
info!("[C{}] Esperando respuestas", self.id);
for _d in targets {
let _response = self.host.nic.read();
// Cálculo de tiempo de respuesta
let response_time_duration = _response.timestamp.elapsed().unwrap();
let response_time_ms = response_time_duration.as_secs() + ((response_time_duration.subsec_millis() * 1000) as u64);
stats.update_stats(response_time_ms);
}
// Impresión de estadística hasta el momento
info!("[C{}] Promedio de respuesta parcial: {} ms", self.id, format!("{:.*}", 2, stats.get_average()));
info!("[C{}] Todos los servidores terminaron de procesar el bache", self.id);
let sleep_time = (3000.0/GLOBAL_SPEED) as u64;
thread::sleep(time::Duration::from_millis(sleep_time));
}
}
}
/*==============================================================================
* Main
*
*/
const GLOBAL_SPEED: f64 = 1.0;
fn main() {
let args : Vec<String> = env::args().collect();
if args.len() > 1 && args[1] == "--debug" {
setup_file_logging().expect("Couldn't set up logger");
} else {
setup_terminal_logging().expect("Couldn't set up logger");
}
/*
* Cargamos la configuración. La configuración es un archivo de texto con
* pares clave-valor. El objeto de configuración puede usarse como
*
* configuration.get("clave") // retorna el valor asociado a "clave".
*/
info!("[T0] Cargando configuración");
let mut configuration = configuration::Configuration::new();
configuration.load(); |
let mut threads = Vec::new();
let mut servers: Vec<Server> = Vec::new();
let mut clients: Vec<Client> = Vec::new();
info!("[T0] Inicializando servidores");
let server_data: Vec<ServerData> = configuration.get_server_dataset();
let mut server_count = 0;
for d in server_data {
server_count += 1;
servers.push(Server::new(server_count, d));
}
info!("[T0] Inicializando clientes");
let client_data: Vec<ClientData> = configuration.get_client_dataset();
let mut client_count = 0;
for c in client_data {
client_count += 1;
clients.push(Client::new(client_count, &servers, c));
}
info!("[T0] Lanzando hilos servidores");
for server in servers {
let th = thread::spawn(move || {
server.run();
});
threads.push(th);
}
info!("[T0] Lanzando hilos clientes");
for client in clients {
let th = thread::spawn(move || {
client.run();
});
threads.push(th);
}
info!("[T0] Esperando la finalización del programa");
for th in threads {
th.join().unwrap();
}
} | random_line_split | |
main.rs | #[macro_use]
extern crate log;
extern crate fern;
extern crate chrono;
extern crate libc;
mod configuration;
use configuration::ServerData;
use configuration::ClientData;
use std::sync::mpsc;
use std::thread;
use std::time;
use std::time::SystemTime;
use std::env;
/*==============================================================================
* Loggers
*------------------------------------------------------------------------------
*
*/
fn setup_terminal_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(std::io::stdout())
.apply()?;
Ok(())
}
fn setup_file_logging() -> Result<(), fern::InitError> {
fern::Dispatch::new()
.format(|out, message, record| unsafe {
out.finish(format_args!(
"{}[{}] {}",
chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"),
libc::pthread_self(),
message
))
})
.level(log::LevelFilter::Info)
.chain(fern::log_file("output.log")?)
.apply()?;
Ok(())
}
/*==============================================================================
* Packet
*------------------------------------------------------------------------------
*
*/
struct Packet {
from: VirtualLink,
workload: u64,
origin: u32,
timestamp: SystemTime,
}
impl Packet {
fn from_iface(iface: &NetworkInterface, workload: u64, origin: u32) -> Packet {
Packet {
from: (*iface).get_virtual_link(),
workload: workload,
origin: origin,
timestamp: SystemTime::now(),
}
}
fn answer_me_at(tx: &mpsc::Sender<Packet>, workload: u64, origin: u32, timestamp: SystemTime) -> Packet {
Packet {
from: VirtualLink::linked_to(tx),
workload: workload,
origin: origin,
timestamp: timestamp,
}
}
}
/*==============================================================================
* VirtualLink
*------------------------------------------------------------------------------
*
*/
struct VirtualLink {
s: mpsc::Sender<Packet>
}
impl VirtualLink {
fn to_iface(interface: &NetworkInterface) -> VirtualLink {
VirtualLink {
s: (*interface).s.clone()
}
}
fn linked_to(tx: &mpsc::Sender<Packet>) -> VirtualLink {
VirtualLink {
s: (*tx).clone()
}
}
fn send_through(&self, packet: Packet) {
self.s.send(packet).unwrap()
}
}
/*==============================================================================
* Network Interface
*------------------------------------------------------------------------------
*
*/
struct NetworkInterface {
s: mpsc::Sender<Packet>,
r: mpsc::Receiver<Packet>
}
impl NetworkInterface {
fn | () -> NetworkInterface {
let (tx, rx) = mpsc::channel();
NetworkInterface {
s: tx,
r: rx
}
}
fn read(&self) -> Packet {
self.r.recv().unwrap()
}
fn get_virtual_link(&self) -> VirtualLink {
VirtualLink::to_iface(self)
}
}
/*==============================================================================
* Host
*
*/
struct Host {
nic: NetworkInterface,
}
impl Host {
fn new() -> Host {
Host {
nic: NetworkInterface::new(),
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.nic.get_virtual_link()
}
}
/*==============================================================================
* Stats
*
*/
struct Stats {
samples: u64,
total: u64,
}
impl Stats {
fn new() -> Stats {
Stats {
samples: 0,
total: 0,
}
}
fn update_stats(&mut self, new_sample_time: u64) {
self.samples += 1;
self.total += new_sample_time;
}
fn get_average(&self) -> f64 {
if self.samples == 0 {
return 0.0;
}
(self.total as f64) / (self.samples as f64)
}
}
/*==============================================================================
* Server
*
*/
struct Server {
id: u32,
host: Host,
processing_power: u64
}
impl Server {
fn new(id: u32, server_data: ServerData) -> Server {
Server {
id: id,
host: Host::new(),
processing_power: server_data.get_processing_power()
}
}
fn get_virtual_link(&self) -> VirtualLink {
self.host.get_virtual_link()
}
fn run(self) {
info!("[S{}] Ejecutando servidor {}", self.id, self.id);
let rx = self.host.nic.r;
let tx = self.host.nic.s;
for message in rx {
// Obtenemos la cantidad de cuadrantes a procesar.
let workload = message.workload;
info!("[S{}] Recibidas {} unidades de trabajo desde observatorio {}", self.id, workload, message.origin);
/*
* Procesamos los cuadrantes.
*
* El workload tiene unidades de trabajo. El poder de procesamiento
* tiene unidades de trabajo por segundo. El sleep time tiene unidades
* de milisegundos.
*
* Por ejemplo, un servidor recibe 5 unidades de trabajo desde el
* cliente. El servidor puede procesar dos unidades de trabajo por
* segundo. El hilo dormirá entonces 2500 milisegundos simulando
* el procesamiento de la carga. Para acelerar o relentizar
* la simulación, podemos ajustar el factor global de velocidad;
* por ejemplo, si el factor global es 2.0, en vez de dormir los 2500
* milisegundos dormiría 1250.
*
*/
let sleep_time = (1000*workload)/self.processing_power;
let sleep_time_scaled = ((sleep_time as f64)/GLOBAL_SPEED) as u64;
info!("[S{}] Tiempo estimado: {}ms (s: {}ms)", self.id, sleep_time, sleep_time_scaled);
thread::sleep(time::Duration::from_millis(sleep_time_scaled));
info!("[S{}] Procesamiento terminado; devolviendo ACK a observatorio {}", self.id, message.origin);
// Devolvemos el ACK.
let response = Packet::answer_me_at(&tx, 0, self.id, message.timestamp);
message.from.send_through(response);
}
}
}
/*==============================================================================
* Client
*
*/
struct Target {
virtual_link: VirtualLink,
weight: f64
}
struct Client {
id: u32,
host: Host,
distribution_scheme: Vec<Target>,
work_generation_rate: u64
}
impl Client {
fn new(id: u32, servers: &Vec<Server>, client_data: ClientData) -> Client {
let workshare: &Vec<f64> = client_data.get_workshare();
let mut distribution = Vec::new();
for i in 0..servers.len() {
distribution.push(Target {
virtual_link: servers[i].get_virtual_link(),
weight: workshare[i]
});
}
Client {
id: id,
host: Host::new(),
distribution_scheme: distribution,
work_generation_rate: client_data.get_work_generation_rate()
}
}
fn run(self) {
info!("[C{}] Ejecutando cliente {}", self.id, self.id);
/*
* Cada cierta cantidad de tiempo, el observatorio genera x cuadrantes.
* A partir de ahí itera por la lista de servidores distribuyendo los
* cuadrantes según los factores de distribución (e.g., si debe enviar
* una fracción p_k de los cuadrantes al servidor k, enviará p_k*x
* cuadrantes al servidor k).
*
* Habiendo enviado los mensajes, simplemente espera las respuestas.
* Suponiendo alternativamente que hay que seguir generando cuadrantes
* mientras se toman fotos, se pueden tener internamente dos threads,
* uno acumulando cuadrantes y otro tomando cuadrantes y distribuyendolos.
*
* Para medir el tiempo de respuesta del observatorio se puede ir
* calculando una media móvil, tomando el tiempo que tarda en responder
* cada servidor.
*/
let targets = &self.distribution_scheme;
let mut stats : Stats = Stats::new();
loop {
let x = self.work_generation_rate;
info!("[C{}] Generando {} unidades de trabajo", self.id, x);
// Distribuimos los x cuadrantes generados.
let mut sid = 0;
for target in targets {
sid += 1;
let workload = ((x as f64)*(target.weight)) as u64;
let packet = Packet::from_iface(&self.host.nic, workload, self.id);
info!("[C{}] Enviando {} unidades al servidor {}", self.id, workload, sid);
target.virtual_link.send_through(packet);
}
// Esperamos la respuesta de cada servidor.
info!("[C{}] Esperando respuestas", self.id);
for _d in targets {
let _response = self.host.nic.read();
// Cálculo de tiempo de respuesta
let response_time_duration = _response.timestamp.elapsed().unwrap();
let response_time_ms = response_time_duration.as_secs() + ((response_time_duration.subsec_millis() * 1000) as u64);
stats.update_stats(response_time_ms);
}
// Impresión de estadística hasta el momento
info!("[C{}] Promedio de respuesta parcial: {} ms", self.id, format!("{:.*}", 2, stats.get_average()));
info!("[C{}] Todos los servidores terminaron de procesar el bache", self.id);
let sleep_time = (3000.0/GLOBAL_SPEED) as u64;
thread::sleep(time::Duration::from_millis(sleep_time));
}
}
}
/*==============================================================================
* Main
*
*/
const GLOBAL_SPEED: f64 = 1.0;
fn main() {
let args : Vec<String> = env::args().collect();
if args.len() > 1 && args[1] == "--debug" {
setup_file_logging().expect("Couldn't set up logger");
} else {
setup_terminal_logging().expect("Couldn't set up logger");
}
/*
* Cargamos la configuración. La configuración es un archivo de texto con
* pares clave-valor. El objeto de configuración puede usarse como
*
* configuration.get("clave") // retorna el valor asociado a "clave".
*/
info!("[T0] Cargando configuración");
let mut configuration = configuration::Configuration::new();
configuration.load();
let mut threads = Vec::new();
let mut servers: Vec<Server> = Vec::new();
let mut clients: Vec<Client> = Vec::new();
info!("[T0] Inicializando servidores");
let server_data: Vec<ServerData> = configuration.get_server_dataset();
let mut server_count = 0;
for d in server_data {
server_count += 1;
servers.push(Server::new(server_count, d));
}
info!("[T0] Inicializando clientes");
let client_data: Vec<ClientData> = configuration.get_client_dataset();
let mut client_count = 0;
for c in client_data {
client_count += 1;
clients.push(Client::new(client_count, &servers, c));
}
info!("[T0] Lanzando hilos servidores");
for server in servers {
let th = thread::spawn(move || {
server.run();
});
threads.push(th);
}
info!("[T0] Lanzando hilos clientes");
for client in clients {
let th = thread::spawn(move || {
client.run();
});
threads.push(th);
}
info!("[T0] Esperando la finalización del programa");
for th in threads {
th.join().unwrap();
}
}
| new | identifier_name |
get_company_test.py | #!/usr/bin/env python
# encoding: utf-8
'''
#-------------------------------------------------------------------#
# CONFIDENTIAL --- CUSTOM STUDIOS #
#-------------------------------------------------------------------#
# #
# @Project Name : keyword #
# #
# @File Name : get_company1.py #
# #
# @Programmer : Adam #
# #
# @Start Date : 2020/8/10 0010 16:28 #
# #
# @Last Update : 2020/8/10 0010 16:28 #
# #
#-------------------------------------------------------------------#
# Classes: Use selenium to open a web page to access companies #
# with contact information #
#-------------------------------------------------------------------#
'''
from __future__ import absolute_import
from __future__ import with_statement
from queue import Queue
import sys
import re
import json
from lxml import etree
import collections
import requests
import random
import time
import datetime
import io
import pymysql
import codecs
import threading
from PIL import Image
from hashlib import md5
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import (
TimeoutException,
NoSuchFrameException,
NoSuchWindowException,
NoSuchElementException,
)
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach())
chrome_options = Options()
chrome_options.add_experimental_option("excludeSwitches", ['enable-automation'])
# chrome_driver = r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe'
information_list = []
# 储存公司信息的列表 /www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json
with open('/www/wwwroot/www.waiqidian.cn/api/customs/comany.txt') as f:
txt = f.readlines()
# for item in txt:
# item = item.rstrip("\n")
# with open('adjunct.json') as f:
# con_dict = json.loads(f.read())
# # cookie_path = con_dict['cookie_filepath']
# cookie_path = 'w_cookies.txt'
# mysql_db = con_dict[item]['datebase']
# data_hs = con_dict[item]['hs']
# try:
# data_hs = data_hs.split(',')
# except Exception as e:
# print('无需分割' + e)
class Chaojiying_Client(object):
def __init__(self, username, password, soft_id):
self.username = username
password = password.encode('utf8')
self.password = md5(password).hexdigest()
self.soft_id = soft_id
self.base_params = {
'user': self.username,
'pass2': self.password,
'softid': self.soft_id,
}
self.headers = {
'Connection': 'Keep-Alive',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)',
}
def PostPic(self, im, codetype):
"""
im: 图片字节
codetype: 题目类型 参考 http://www.chaojiying.com/price.html
"""
params = {
'codetype': codetype,
}
params.update(self.base_params)
files = {'userfile': ('ccc.jpg', im)}
r = requests.post('http://upload.chaojiying.net/Upload/Processing.php', data=params, files=files,
headers=self.headers)
return r.json()
def ReportError(self, im_id):
"""
im_id:报错题目的图片ID
"""
params = {
'id': im_id,
}
params.update(self.base_params)
r = requests.post('http://upload.chaojiying.net/Upload/ReportError.php', data=params, headers=self.headers)
return r.json()
def readCookies(cookie_path):
"""
读取本地的cookie并进行遍历
:param cookie_path:
:return:
"""
with open(cookie_path, 'r') as f: # ,encoding='utf-8'
listCookies = json.loads(f.read())
cookie = [item["name"] + "=" + item["value"] for item in listCookies]
cookiestr = '; '.join(item for item in cookie)
return cookiestr
def getHTMLText(data, cookiestr):
"""
# requests请求获取到页面的html
# :param data:
# :param cookiestr:
# :return:
"""
pageNum = 1
while pageNum < 5:
url2 = 'https://www.52wmb.com/async/company?key={}&old_key={}&country=&country_id=0&type=0&sort' \
'=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw%5B%5D=email&filterweight=email&_' \
'=1604476115171&page={}'.format(data['key'], data['key'], pageNum)
url1 = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=2&old_st=2&main=0&extend_search=false' \
'&fw%5B%5D=email&filterweight=email&_=1603852119428&page={} '.format(data['key'], data['key'], pageNum)
url = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw' \
'%5B%5D=email&filterweight=email&_=1603434620022&page={}'.format(data['key'], data['key'], pageNum)
url3 = 'https://www.52wmb.com/async/company?country=*&key={}&type=0&click_search=1&fw%5B%5D=email&filterweight=email' \
'&sort=default&country_data=&search_type=3&is_label=1&st=3&page={}&_=1614564485810'.format(data['key'], pageNum)
headers = {
'cookie': cookiestr,
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36',
'referer': 'https://www.52wmb.com/buy-3926909090?st=3'
}
try:
# html = requests.get(url=url2, headers=headers) #miao zhu shi 2021/3/1
html = requests.get(url=url3, headers=headers) # miao xin zeng 2021/3/1
time.sleep(random.randint(4, 8))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
information(html, cookiestr)
except Exception as e:
print(e)
pageNum += 1
def information(html, cookiestr):
"""
# 获取公司的信息
# :param html:
# :return:
"""
companys_list = etree.HTML(html).xpath('//li[@class="ssList-li company-fun-li"]')
for i in companys_list[:]:
company_info = collections.OrderedDict()
try:
# number 公司id
c_id = i.xpath('@data-id')[0]
company_info['number'] = c_id
# 公司名称
company_info['name'] = i.xpath('div[1]/div[@class="ssContent"]/a/text()')[0]
# 地区
# company_info['trade_country'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[0]
# 货运次数
company_info['trade_number'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[1].replace(
'总货运', '')
# 更新时间
update_time = i.xpath('div[1]/div[@class="ssContent"]/div[@class="ss-Cjl"]/text()')[0].strip()[-10:]
company_info['update_time'] = update_time
# print("company_info------",company_info)
get_next_level(company_info, cookiestr)
except Exception as e:
print(e)
def get_next_level(company_info, cookiestr):
url = 'https://www.52wmb.com/async/contact'
# url = 'https://www.52wmb.com/buyer/35274878?SRID=Z8KWwphnwpZsag%3D%3D&key=crane&st=2'
data1 = collections.OrderedDict()
data1['company_id'] = company_info['number']
# headers = {
# 'Referer': 'https://www.52wmb.com/buyer/{}'.format(data1['company_id']),
# 'cookie': cookiestr,
# 'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36'
# } miao zhu shi 2021/3/1
headers = {
"Accept": "*/*",
# "Accept-Encoding": "gzip,deflate,br",
# "Accept-Language": "zh-CN,zh;q=0.9",
# "Connection": "keep-alive",
# "Content-Length": "18",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Cookie": "__root_domain_v=.52wmb.com; _qddaz=QD.oxnn0z.8hqf3r.kjru3sro; _QUY=wqXCh8KywpbCmMKVwplrwpzCk8KeZ8KbwpJnZnDCk2htaQ==; _DP=2; company_search_tips=1; _QP=1; promote=proname=auto; _qdda=3-1.1; _qddab=3-cx3zj9.klpw0b65; access_token=13609ab52b8b529a; 52BY_TOKEN=8ed9a124-7a31-11eb-a26e-00155d391e0d; _MNG=1; vip_expired_tips=none; _qddamta_2885855166=3-0",
"DNT": "1",
"Host": "www.52wmb.com",
"Origin": "https://www.52wmb.com",
"Referer": "https://www.52wmb.com/buyer/" + str(data1['company_id']),
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0(Windows NT 10.0; Win64; x64)AppleWebKit/537.36(KHTML,like Gecko)Chrome/88.0.4324.182 Safari/537.36 Edg/88.0.705.81",
# "X-Requested-With": "XMLHttpRequest",
}
data = {
'company_id': data1['company_id']
}
try:
html = requests.post(url=url, data=data, headers=headers) # data=data,
time.sleep(random.randint(3, 5))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
# print('8'*20,'\n',html)
parser_email(html, company_info)
except Exception as e:
return ''
def parser_email(html, company_info):
html = etree.HTML(html)
try:
tbod = html.xpath('//*[@id="contact-detail"]')
# print("tbod:ss",tbod)
# titlle = html.xpath('//*[@id="companies-detail"]/div/div/div/h1/text()')//*[@id="contact-detail"]/table/tbody
if len(tbod) >= 1:
pass
else:
automation()
except Exception as e:
pass
try:
html = html.xpath('//tbody')[0]
if len(html) >= 1:
print('有数据')
strt = int(time.time())
company_info['time'] = strt
company_info['tableid'] = 0
company_info['spider'] = 1
try:
# 数据来源
region = html.xpath('tr[4]/td[1]/text()')[1].strip()
except:
region = '-'
company_info['region'] = region
try:
# 联系电话
c_tel = html.xpath('tr[3]/td[1]/text()')[1].strip()
except:
c_tel = '-'
company_info['tel'] = c_tel
try:
# 联 系 人
lianxiren = html.xpath('tr[2]/td[1]/text()')[1].strip()
except:
lianxiren = '-'
company_info['lianxiren'] = lianxiren
try:
# 邮箱地址
c_email = html.xpath('tr[2]/td[2]/text()')[1].strip()
except:
c_email = '-'
company_info['company_email'] = c_email
try:
# 公司地址
c_adress = html.xpath('tr[3]/td[2]/text()')[1].strip()
except:
c_adress = '-'
company_info['company_address'] = c_adress
# try:
# # 数据来源
# c_source = html.xpath('tr[4]/td[1]/text()')[1].strip()
# except:
# c_source = '-'
# company_info['company_source'] = c_source
try:
# 官方网站
c_site = html.xpath('tr[4]/td[2]/a/text()')[0]
except:
c_site = '-'
company_info['company_site'] = c_site
except Exception as e:
print('没有数据' + e)
if len(company_info) > 10:
information_list.append(company_info)
print(json.dumps(company_info, ensure_ascii=False))
else:
print('数据不完整')
def Verification(companys_list):
"""
# 判断页面是否请求成功是否拿到信息
# :param companys_list:
# :return:
"""
all_dict = collections.OrderedDict()
if len(companys_list) > 0:
all_dict['code'] = 200
all_dict['msg'] = "sucess"
else:
all_dict['code'] = 404
all_dict['msg'] = "没有找到该信息,请更换关键词再试试"
test_dict = collections.OrderedDict()
test_dict['number'] = '1111111'
test_dict['name'] = 'name'
test_dict['update_time'] = time.strftime('%Y-%m-%d')
test_dict['other_trade'] = 'None'
# companys_list[-1] = test_dict
all_dict['data'] = companys_list
print(json.dumps(companys_list, ensure_ascii=False))
insert_company(companys_list)
# 插入数据库
if all_dict['code'] == 404:
Verification_code()
def insert_company(mysql_db):
"""
# 把信息插入数据库
# :param data:
# :return:
"""
strt = int(time.time())
try:
conn = pymysql.connect(host=mysql_db['host'],
user=mysql_db['user'],
password=mysql_db['password'],
database=mysql_db['user'],
charset='utf8')
cursor = conn.cursor()
print("数据库连接成功")
except Exception as e:
print(e)
return
if len(information_list) >= 0:
# print("准备插入数据库->",information_list)
# print("数据库信息->",mysql_db['host'],mysql_db['user'],mysql_db['password'],mysql_db['user'])
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["company_email"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i['company_address']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
for data in list1:
insert_sql = 'insert into wp_haiguan_data (day,company_name,number,country,email,lianxiren,phone,address,' \
'website,update_time,tableid,is_spider) values {};'.format(data)
try:
# print("插入数据库的sql语句",insert_sql)
cursor.execute(insert_sql)
conn.commit()
except Exception as e:
print(e)
cursor.close()
conn.close()
else:
print('没有数据')
def automation():
"""
解决验证码,验证码处出来截图并连接打码平台
:return:
"""
driver = webdriver.Chrome(options=chrome_options)
"""
处理selenium被检测这个问题
"""
driver.maximize_window()
driver.get('https://www.52wmb.com/buyer/35206685?SRID=acKVwpdnwpdrbA%3D%3D&key=mask&st=2')
with open('w_cookies.txt', 'r', encoding='utf8') as f:
listCookies = json.loads(f.read())
# 往br | 机页数
driver.find_element_by_xpath('//*[@id="company_list_input"]').send_keys(random.randint(5, 9))
# 点击跳转
driver.find_element_by_xpath('//*[@id="company_list_jump"]').click()
time.sleep(2)
try:
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(1)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(1)
# 退出当前iframe表单
driver.switch_to.default_content()
except Exception as e:
print(e)
driver.quit()
if __name__ == '__main__':
start = time.time()
for item in txt:
item = item.rstrip("\n")
# print(item,'ee')
with open('/www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json') as f:
con_dict = json.loads(f.read())
cookie_path = con_dict['cookie_filepath']
# # cookie_path = 'w_cookies.txt'
mysql_db = con_dict[item]['datebase']
data_hs = con_dict[item]['hs']
try:
data_hs = data_hs.split(',')
except Exception as e:
print('无需分割' + e)
# print(data_hs)
for hs in data_hs:
data1 = collections.OrderedDict()
data1['key'] = hs
cookie = readCookies(cookie_path)
getHTMLText(data1, cookie)
insert_company(mysql_db)
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i["company_address"]] +
[i['company_email']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
print("List1aaa:", list1)
print('时间', time.time() - start)
'''
{"day": '日期', "company_name": '公司名称', "number" :'货运次数', "email": '邮箱', "lianxiren": '联系人', "fax" :'电话', "address": '地址',
"website": '网址', "update_time": '添加时间'}
'''
| owser里添加cookies
for cookie in listCookies:
cookie_dict = {
'name': cookie.get('name'),
'value': cookie.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
# 添加cookie进行免登陆
driver.add_cookie(cookie_dict)
# 刷新当前页面
driver.refresh()
time.sleep(2)
# 切换iframe
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(3)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(3)
# 退出当前iframe表单
driver.switch_to.default_content()
driver.refresh()
driver.quit()
def Verification_code():
driver = webdriver.Chrome(options=chrome_options)
driver.get('https://www.52wmb.com/buy-mask?st=2')
driver.maximize_window()
with open(r'w_cookies.txt', 'r+') as f:
list_cookies = json.loads(f.read())
for item in list_cookies:
cookie_dict = {
'name': item.get('name'),
'value': item.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
driver.add_cookie(cookie_dict=cookie_dict)
driver.refresh()
# 清空跳转框数值
driver.find_element_by_xpath('//*[@id="company_list_input"]').clear()
# 填入随 | identifier_body |
get_company_test.py | #!/usr/bin/env python
# encoding: utf-8
'''
#-------------------------------------------------------------------#
# CONFIDENTIAL --- CUSTOM STUDIOS #
#-------------------------------------------------------------------#
# #
# @Project Name : keyword #
# #
# @File Name : get_company1.py #
# #
# @Programmer : Adam #
# #
# @Start Date : 2020/8/10 0010 16:28 #
# #
# @Last Update : 2020/8/10 0010 16:28 #
# #
#-------------------------------------------------------------------#
# Classes: Use selenium to open a web page to access companies #
# with contact information #
#-------------------------------------------------------------------#
'''
from __future__ import absolute_import
from __future__ import with_statement
from queue import Queue
import sys
import re
import json
from lxml import etree
import collections
import requests
import random
import time
import datetime
import io
import pymysql
import codecs
import threading
from PIL import Image
from hashlib import md5
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import (
TimeoutException,
NoSuchFrameException,
NoSuchWindowException,
NoSuchElementException,
)
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach())
chrome_options = Options()
chrome_options.add_experimental_option("excludeSwitches", ['enable-automation'])
# chrome_driver = r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe'
information_list = []
# 储存公司信息的列表 /www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json
with open('/www/wwwroot/www.waiqidian.cn/api/customs/comany.txt') as f:
txt = f.readlines()
# for item in txt: | # item = item.rstrip("\n")
# with open('adjunct.json') as f:
# con_dict = json.loads(f.read())
# # cookie_path = con_dict['cookie_filepath']
# cookie_path = 'w_cookies.txt'
# mysql_db = con_dict[item]['datebase']
# data_hs = con_dict[item]['hs']
# try:
# data_hs = data_hs.split(',')
# except Exception as e:
# print('无需分割' + e)
class Chaojiying_Client(object):
def __init__(self, username, password, soft_id):
self.username = username
password = password.encode('utf8')
self.password = md5(password).hexdigest()
self.soft_id = soft_id
self.base_params = {
'user': self.username,
'pass2': self.password,
'softid': self.soft_id,
}
self.headers = {
'Connection': 'Keep-Alive',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)',
}
def PostPic(self, im, codetype):
"""
im: 图片字节
codetype: 题目类型 参考 http://www.chaojiying.com/price.html
"""
params = {
'codetype': codetype,
}
params.update(self.base_params)
files = {'userfile': ('ccc.jpg', im)}
r = requests.post('http://upload.chaojiying.net/Upload/Processing.php', data=params, files=files,
headers=self.headers)
return r.json()
def ReportError(self, im_id):
"""
im_id:报错题目的图片ID
"""
params = {
'id': im_id,
}
params.update(self.base_params)
r = requests.post('http://upload.chaojiying.net/Upload/ReportError.php', data=params, headers=self.headers)
return r.json()
def readCookies(cookie_path):
"""
读取本地的cookie并进行遍历
:param cookie_path:
:return:
"""
with open(cookie_path, 'r') as f: # ,encoding='utf-8'
listCookies = json.loads(f.read())
cookie = [item["name"] + "=" + item["value"] for item in listCookies]
cookiestr = '; '.join(item for item in cookie)
return cookiestr
def getHTMLText(data, cookiestr):
"""
# requests请求获取到页面的html
# :param data:
# :param cookiestr:
# :return:
"""
pageNum = 1
while pageNum < 5:
url2 = 'https://www.52wmb.com/async/company?key={}&old_key={}&country=&country_id=0&type=0&sort' \
'=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw%5B%5D=email&filterweight=email&_' \
'=1604476115171&page={}'.format(data['key'], data['key'], pageNum)
url1 = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=2&old_st=2&main=0&extend_search=false' \
'&fw%5B%5D=email&filterweight=email&_=1603852119428&page={} '.format(data['key'], data['key'], pageNum)
url = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw' \
'%5B%5D=email&filterweight=email&_=1603434620022&page={}'.format(data['key'], data['key'], pageNum)
url3 = 'https://www.52wmb.com/async/company?country=*&key={}&type=0&click_search=1&fw%5B%5D=email&filterweight=email' \
'&sort=default&country_data=&search_type=3&is_label=1&st=3&page={}&_=1614564485810'.format(data['key'], pageNum)
headers = {
'cookie': cookiestr,
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36',
'referer': 'https://www.52wmb.com/buy-3926909090?st=3'
}
try:
# html = requests.get(url=url2, headers=headers) #miao zhu shi 2021/3/1
html = requests.get(url=url3, headers=headers) # miao xin zeng 2021/3/1
time.sleep(random.randint(4, 8))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
information(html, cookiestr)
except Exception as e:
print(e)
pageNum += 1
def information(html, cookiestr):
"""
# 获取公司的信息
# :param html:
# :return:
"""
companys_list = etree.HTML(html).xpath('//li[@class="ssList-li company-fun-li"]')
for i in companys_list[:]:
company_info = collections.OrderedDict()
try:
# number 公司id
c_id = i.xpath('@data-id')[0]
company_info['number'] = c_id
# 公司名称
company_info['name'] = i.xpath('div[1]/div[@class="ssContent"]/a/text()')[0]
# 地区
# company_info['trade_country'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[0]
# 货运次数
company_info['trade_number'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[1].replace(
'总货运', '')
# 更新时间
update_time = i.xpath('div[1]/div[@class="ssContent"]/div[@class="ss-Cjl"]/text()')[0].strip()[-10:]
company_info['update_time'] = update_time
# print("company_info------",company_info)
get_next_level(company_info, cookiestr)
except Exception as e:
print(e)
def get_next_level(company_info, cookiestr):
url = 'https://www.52wmb.com/async/contact'
# url = 'https://www.52wmb.com/buyer/35274878?SRID=Z8KWwphnwpZsag%3D%3D&key=crane&st=2'
data1 = collections.OrderedDict()
data1['company_id'] = company_info['number']
# headers = {
# 'Referer': 'https://www.52wmb.com/buyer/{}'.format(data1['company_id']),
# 'cookie': cookiestr,
# 'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36'
# } miao zhu shi 2021/3/1
headers = {
"Accept": "*/*",
# "Accept-Encoding": "gzip,deflate,br",
# "Accept-Language": "zh-CN,zh;q=0.9",
# "Connection": "keep-alive",
# "Content-Length": "18",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Cookie": "__root_domain_v=.52wmb.com; _qddaz=QD.oxnn0z.8hqf3r.kjru3sro; _QUY=wqXCh8KywpbCmMKVwplrwpzCk8KeZ8KbwpJnZnDCk2htaQ==; _DP=2; company_search_tips=1; _QP=1; promote=proname=auto; _qdda=3-1.1; _qddab=3-cx3zj9.klpw0b65; access_token=13609ab52b8b529a; 52BY_TOKEN=8ed9a124-7a31-11eb-a26e-00155d391e0d; _MNG=1; vip_expired_tips=none; _qddamta_2885855166=3-0",
"DNT": "1",
"Host": "www.52wmb.com",
"Origin": "https://www.52wmb.com",
"Referer": "https://www.52wmb.com/buyer/" + str(data1['company_id']),
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0(Windows NT 10.0; Win64; x64)AppleWebKit/537.36(KHTML,like Gecko)Chrome/88.0.4324.182 Safari/537.36 Edg/88.0.705.81",
# "X-Requested-With": "XMLHttpRequest",
}
data = {
'company_id': data1['company_id']
}
try:
html = requests.post(url=url, data=data, headers=headers) # data=data,
time.sleep(random.randint(3, 5))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
# print('8'*20,'\n',html)
parser_email(html, company_info)
except Exception as e:
return ''
def parser_email(html, company_info):
html = etree.HTML(html)
try:
tbod = html.xpath('//*[@id="contact-detail"]')
# print("tbod:ss",tbod)
# titlle = html.xpath('//*[@id="companies-detail"]/div/div/div/h1/text()')//*[@id="contact-detail"]/table/tbody
if len(tbod) >= 1:
pass
else:
automation()
except Exception as e:
pass
try:
html = html.xpath('//tbody')[0]
if len(html) >= 1:
print('有数据')
strt = int(time.time())
company_info['time'] = strt
company_info['tableid'] = 0
company_info['spider'] = 1
try:
# 数据来源
region = html.xpath('tr[4]/td[1]/text()')[1].strip()
except:
region = '-'
company_info['region'] = region
try:
# 联系电话
c_tel = html.xpath('tr[3]/td[1]/text()')[1].strip()
except:
c_tel = '-'
company_info['tel'] = c_tel
try:
# 联 系 人
lianxiren = html.xpath('tr[2]/td[1]/text()')[1].strip()
except:
lianxiren = '-'
company_info['lianxiren'] = lianxiren
try:
# 邮箱地址
c_email = html.xpath('tr[2]/td[2]/text()')[1].strip()
except:
c_email = '-'
company_info['company_email'] = c_email
try:
# 公司地址
c_adress = html.xpath('tr[3]/td[2]/text()')[1].strip()
except:
c_adress = '-'
company_info['company_address'] = c_adress
# try:
# # 数据来源
# c_source = html.xpath('tr[4]/td[1]/text()')[1].strip()
# except:
# c_source = '-'
# company_info['company_source'] = c_source
try:
# 官方网站
c_site = html.xpath('tr[4]/td[2]/a/text()')[0]
except:
c_site = '-'
company_info['company_site'] = c_site
except Exception as e:
print('没有数据' + e)
if len(company_info) > 10:
information_list.append(company_info)
print(json.dumps(company_info, ensure_ascii=False))
else:
print('数据不完整')
def Verification(companys_list):
"""
# 判断页面是否请求成功是否拿到信息
# :param companys_list:
# :return:
"""
all_dict = collections.OrderedDict()
if len(companys_list) > 0:
all_dict['code'] = 200
all_dict['msg'] = "sucess"
else:
all_dict['code'] = 404
all_dict['msg'] = "没有找到该信息,请更换关键词再试试"
test_dict = collections.OrderedDict()
test_dict['number'] = '1111111'
test_dict['name'] = 'name'
test_dict['update_time'] = time.strftime('%Y-%m-%d')
test_dict['other_trade'] = 'None'
# companys_list[-1] = test_dict
all_dict['data'] = companys_list
print(json.dumps(companys_list, ensure_ascii=False))
insert_company(companys_list)
# 插入数据库
if all_dict['code'] == 404:
Verification_code()
def insert_company(mysql_db):
"""
# 把信息插入数据库
# :param data:
# :return:
"""
strt = int(time.time())
try:
conn = pymysql.connect(host=mysql_db['host'],
user=mysql_db['user'],
password=mysql_db['password'],
database=mysql_db['user'],
charset='utf8')
cursor = conn.cursor()
print("数据库连接成功")
except Exception as e:
print(e)
return
if len(information_list) >= 0:
# print("准备插入数据库->",information_list)
# print("数据库信息->",mysql_db['host'],mysql_db['user'],mysql_db['password'],mysql_db['user'])
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["company_email"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i['company_address']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
for data in list1:
insert_sql = 'insert into wp_haiguan_data (day,company_name,number,country,email,lianxiren,phone,address,' \
'website,update_time,tableid,is_spider) values {};'.format(data)
try:
# print("插入数据库的sql语句",insert_sql)
cursor.execute(insert_sql)
conn.commit()
except Exception as e:
print(e)
cursor.close()
conn.close()
else:
print('没有数据')
def automation():
"""
解决验证码,验证码处出来截图并连接打码平台
:return:
"""
driver = webdriver.Chrome(options=chrome_options)
"""
处理selenium被检测这个问题
"""
driver.maximize_window()
driver.get('https://www.52wmb.com/buyer/35206685?SRID=acKVwpdnwpdrbA%3D%3D&key=mask&st=2')
with open('w_cookies.txt', 'r', encoding='utf8') as f:
listCookies = json.loads(f.read())
# 往browser里添加cookies
for cookie in listCookies:
cookie_dict = {
'name': cookie.get('name'),
'value': cookie.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
# 添加cookie进行免登陆
driver.add_cookie(cookie_dict)
# 刷新当前页面
driver.refresh()
time.sleep(2)
# 切换iframe
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(3)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(3)
# 退出当前iframe表单
driver.switch_to.default_content()
driver.refresh()
driver.quit()
def Verification_code():
driver = webdriver.Chrome(options=chrome_options)
driver.get('https://www.52wmb.com/buy-mask?st=2')
driver.maximize_window()
with open(r'w_cookies.txt', 'r+') as f:
list_cookies = json.loads(f.read())
for item in list_cookies:
cookie_dict = {
'name': item.get('name'),
'value': item.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
driver.add_cookie(cookie_dict=cookie_dict)
driver.refresh()
# 清空跳转框数值
driver.find_element_by_xpath('//*[@id="company_list_input"]').clear()
# 填入随机页数
driver.find_element_by_xpath('//*[@id="company_list_input"]').send_keys(random.randint(5, 9))
# 点击跳转
driver.find_element_by_xpath('//*[@id="company_list_jump"]').click()
time.sleep(2)
try:
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(1)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(1)
# 退出当前iframe表单
driver.switch_to.default_content()
except Exception as e:
print(e)
driver.quit()
if __name__ == '__main__':
start = time.time()
for item in txt:
item = item.rstrip("\n")
# print(item,'ee')
with open('/www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json') as f:
con_dict = json.loads(f.read())
cookie_path = con_dict['cookie_filepath']
# # cookie_path = 'w_cookies.txt'
mysql_db = con_dict[item]['datebase']
data_hs = con_dict[item]['hs']
try:
data_hs = data_hs.split(',')
except Exception as e:
print('无需分割' + e)
# print(data_hs)
for hs in data_hs:
data1 = collections.OrderedDict()
data1['key'] = hs
cookie = readCookies(cookie_path)
getHTMLText(data1, cookie)
insert_company(mysql_db)
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i["company_address"]] +
[i['company_email']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
print("List1aaa:", list1)
print('时间', time.time() - start)
'''
{"day": '日期', "company_name": '公司名称', "number" :'货运次数', "email": '邮箱', "lianxiren": '联系人', "fax" :'电话', "address": '地址',
"website": '网址', "update_time": '添加时间'}
''' | random_line_split | |
get_company_test.py | #!/usr/bin/env python
# encoding: utf-8
'''
#-------------------------------------------------------------------#
# CONFIDENTIAL --- CUSTOM STUDIOS #
#-------------------------------------------------------------------#
# #
# @Project Name : keyword #
# #
# @File Name : get_company1.py #
# #
# @Programmer : Adam #
# #
# @Start Date : 2020/8/10 0010 16:28 #
# #
# @Last Update : 2020/8/10 0010 16:28 #
# #
#-------------------------------------------------------------------#
# Classes: Use selenium to open a web page to access companies #
# with contact information #
#-------------------------------------------------------------------#
'''
from __future__ import absolute_import
from __future__ import with_statement
from queue import Queue
import sys
import re
import json
from lxml import etree
import collections
import requests
import random
import time
import datetime
import io
import pymysql
import codecs
import threading
from PIL import Image
from hashlib import md5
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import (
TimeoutException,
NoSuchFrameException,
NoSuchWindowException,
NoSuchElementException,
)
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach())
chrome_options = Options()
chrome_options.add_experimental_option("excludeSwitches", ['enable-automation'])
# chrome_driver = r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe'
information_list = []
# 储存公司信息的列表 /www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json
with open('/www/wwwroot/www.waiqidian.cn/api/customs/comany.txt') as f:
txt = f.readlines()
# for item in txt:
# item = item.rstrip("\n")
# with open('adjunct.json') as f:
# con_dict = json.loads(f.read())
# # cookie_path = con_dict['cookie_filepath']
# cookie_path = 'w_cookies.txt'
# mysql_db = con_dict[item]['datebase']
# data_hs = con_dict[item]['hs']
# try:
# data_hs = data_hs.split(',')
# except Exception as e:
# print('无需分割' + e)
class Chaojiying_Client(object):
def __init__(self, username, password, soft_id):
self.username = username
password = password.encode('utf8')
self.password = md5(password).hexdigest()
self.soft_id = soft_id
self.base_params = {
'user': self.username,
'pass2': self.password,
'softid': self.soft_id,
}
self.headers = {
'Connection': 'Keep-Alive',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)',
}
def PostPic(self, im, codetype):
"""
im: 图片字节
codetype: 题目类型 参考 http://www.chaojiying.com/price.html
"""
params = {
'codetype': codetype,
}
params.update(self.base_params)
files = {'userfile': ('ccc.jpg', im)}
r = requests.post('http://upload.chaojiying.net/Upload/Processing.php', data=params, files=files,
headers=self.headers)
return r.json()
def ReportError(self, im_id):
"""
im_id:报错题目的图片ID
"""
params = {
'id': im_id,
}
params.update(self.base_params)
r = requests.post('http://upload.chaojiying.net/Upload/ReportError.php', data=params, headers=self.headers)
return r.json()
def readCookies(cookie_path):
"""
读取本地的cookie并进行遍历
:param cookie_path:
:return:
"""
with open(cookie_path, 'r') as f: # ,encoding='utf-8'
listCookies = json.loads(f.read())
cookie = [item["name"] + "=" + item["value"] for item in listCookies]
cookiestr = '; '.join(item for item in cookie)
return cookiestr
def getHTMLText(data, cookiestr):
"""
# requests请求获取到页面的html
# :param data:
# :param cookiestr:
# :return:
"""
pageNum = 1
while pageNum < 5:
url2 = 'https://www.52wmb.com/async/company?key={}&old_key={}&country=&country_id=0&type=0&sort' \
'=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw%5B%5D=email&filterweight=email&_' \
'=1604476115171&page={}'.format(data['key'], data['key'], pageNum)
url1 = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=2&old_st=2&main=0&extend_search=false' \
'&fw%5B%5D=email&filterweight=email&_=1603852119428&page={} '.format(data['key'], data['key'], pageNum)
url = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw' \
'%5B%5D=email&filterweight=email&_=1603434620022&page={}'.format(data['key'], data['key'], pageNum)
url3 = 'https://www.52wmb.com/async/company?country=*&key={}&type=0&click_search=1&fw%5B%5D=email&filterweight=email' \
'&sort=default&country_data=&search_type=3&is_label=1&st=3&page={}&_=1614564485810'.format(data['key'], pageNum)
headers = {
'cookie': cookiestr,
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36',
'referer': 'https://www.52wmb.com/buy-3926909090?st=3'
}
try:
# html = requests.get(url=url2, headers=headers) #miao zhu shi 2021/3/1
html = requests.get(url=url3, headers=headers) # miao xin zeng 2021/3/1
time.sleep(random.randint(4, 8))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
information(html, cookiestr)
except Exception as e:
print(e)
pageNum += 1
def information(html, cookiestr):
"""
# 获取公司的信息
# :param html:
# :return:
"""
companys_list = etree.HTML(html).xpath('//li[@class="ssList-li company-fun-li"]')
for i in companys_list[:]:
company_info = collections.OrderedDict()
try:
# number 公司id
c_id = i.xpath('@data-id')[0]
company_info['number'] = c_id
# 公司名称
company_info['name'] = i.xpath('div[1]/div[@class="ssContent"]/a/text()')[0]
# 地区
# company_info['trade_country'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[0]
# 货运次数
company_info['trade_number'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[1].replace(
'总货运', '')
# 更新时间
update_time = i.xpath('div[1]/div[@class="ssContent"]/div[@class="ss-Cjl"]/text()')[0].strip()[-10:]
company_info['update_time'] = update_time
# print("company_info------",company_info)
get_next_level(company_info, cookiestr)
except Exception as e:
print(e)
def get_next_level(company_info, cookiestr):
url = 'https://www.52wmb.com/async/contact'
# url = 'https://www.52wmb.com/buyer/35274878?SRID=Z8KW | 3D&key=crane&st=2'
data1 = collections.OrderedDict()
data1['company_id'] = company_info['number']
# headers = {
# 'Referer': 'https://www.52wmb.com/buyer/{}'.format(data1['company_id']),
# 'cookie': cookiestr,
# 'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36'
# } miao zhu shi 2021/3/1
headers = {
"Accept": "*/*",
# "Accept-Encoding": "gzip,deflate,br",
# "Accept-Language": "zh-CN,zh;q=0.9",
# "Connection": "keep-alive",
# "Content-Length": "18",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Cookie": "__root_domain_v=.52wmb.com; _qddaz=QD.oxnn0z.8hqf3r.kjru3sro; _QUY=wqXCh8KywpbCmMKVwplrwpzCk8KeZ8KbwpJnZnDCk2htaQ==; _DP=2; company_search_tips=1; _QP=1; promote=proname=auto; _qdda=3-1.1; _qddab=3-cx3zj9.klpw0b65; access_token=13609ab52b8b529a; 52BY_TOKEN=8ed9a124-7a31-11eb-a26e-00155d391e0d; _MNG=1; vip_expired_tips=none; _qddamta_2885855166=3-0",
"DNT": "1",
"Host": "www.52wmb.com",
"Origin": "https://www.52wmb.com",
"Referer": "https://www.52wmb.com/buyer/" + str(data1['company_id']),
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0(Windows NT 10.0; Win64; x64)AppleWebKit/537.36(KHTML,like Gecko)Chrome/88.0.4324.182 Safari/537.36 Edg/88.0.705.81",
# "X-Requested-With": "XMLHttpRequest",
}
data = {
'company_id': data1['company_id']
}
try:
html = requests.post(url=url, data=data, headers=headers) # data=data,
time.sleep(random.randint(3, 5))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
# print('8'*20,'\n',html)
parser_email(html, company_info)
except Exception as e:
return ''
def parser_email(html, company_info):
html = etree.HTML(html)
try:
tbod = html.xpath('//*[@id="contact-detail"]')
# print("tbod:ss",tbod)
# titlle = html.xpath('//*[@id="companies-detail"]/div/div/div/h1/text()')//*[@id="contact-detail"]/table/tbody
if len(tbod) >= 1:
pass
else:
automation()
except Exception as e:
pass
try:
html = html.xpath('//tbody')[0]
if len(html) >= 1:
print('有数据')
strt = int(time.time())
company_info['time'] = strt
company_info['tableid'] = 0
company_info['spider'] = 1
try:
# 数据来源
region = html.xpath('tr[4]/td[1]/text()')[1].strip()
except:
region = '-'
company_info['region'] = region
try:
# 联系电话
c_tel = html.xpath('tr[3]/td[1]/text()')[1].strip()
except:
c_tel = '-'
company_info['tel'] = c_tel
try:
# 联 系 人
lianxiren = html.xpath('tr[2]/td[1]/text()')[1].strip()
except:
lianxiren = '-'
company_info['lianxiren'] = lianxiren
try:
# 邮箱地址
c_email = html.xpath('tr[2]/td[2]/text()')[1].strip()
except:
c_email = '-'
company_info['company_email'] = c_email
try:
# 公司地址
c_adress = html.xpath('tr[3]/td[2]/text()')[1].strip()
except:
c_adress = '-'
company_info['company_address'] = c_adress
# try:
# # 数据来源
# c_source = html.xpath('tr[4]/td[1]/text()')[1].strip()
# except:
# c_source = '-'
# company_info['company_source'] = c_source
try:
# 官方网站
c_site = html.xpath('tr[4]/td[2]/a/text()')[0]
except:
c_site = '-'
company_info['company_site'] = c_site
except Exception as e:
print('没有数据' + e)
if len(company_info) > 10:
information_list.append(company_info)
print(json.dumps(company_info, ensure_ascii=False))
else:
print('数据不完整')
def Verification(companys_list):
"""
# 判断页面是否请求成功是否拿到信息
# :param companys_list:
# :return:
"""
all_dict = collections.OrderedDict()
if len(companys_list) > 0:
all_dict['code'] = 200
all_dict['msg'] = "sucess"
else:
all_dict['code'] = 404
all_dict['msg'] = "没有找到该信息,请更换关键词再试试"
test_dict = collections.OrderedDict()
test_dict['number'] = '1111111'
test_dict['name'] = 'name'
test_dict['update_time'] = time.strftime('%Y-%m-%d')
test_dict['other_trade'] = 'None'
# companys_list[-1] = test_dict
all_dict['data'] = companys_list
print(json.dumps(companys_list, ensure_ascii=False))
insert_company(companys_list)
# 插入数据库
if all_dict['code'] == 404:
Verification_code()
def insert_company(mysql_db):
"""
# 把信息插入数据库
# :param data:
# :return:
"""
strt = int(time.time())
try:
conn = pymysql.connect(host=mysql_db['host'],
user=mysql_db['user'],
password=mysql_db['password'],
database=mysql_db['user'],
charset='utf8')
cursor = conn.cursor()
print("数据库连接成功")
except Exception as e:
print(e)
return
if len(information_list) >= 0:
# print("准备插入数据库->",information_list)
# print("数据库信息->",mysql_db['host'],mysql_db['user'],mysql_db['password'],mysql_db['user'])
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["company_email"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i['company_address']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
for data in list1:
insert_sql = 'insert into wp_haiguan_data (day,company_name,number,country,email,lianxiren,phone,address,' \
'website,update_time,tableid,is_spider) values {};'.format(data)
try:
# print("插入数据库的sql语句",insert_sql)
cursor.execute(insert_sql)
conn.commit()
except Exception as e:
print(e)
cursor.close()
conn.close()
else:
print('没有数据')
def automation():
"""
解决验证码,验证码处出来截图并连接打码平台
:return:
"""
driver = webdriver.Chrome(options=chrome_options)
"""
处理selenium被检测这个问题
"""
driver.maximize_window()
driver.get('https://www.52wmb.com/buyer/35206685?SRID=acKVwpdnwpdrbA%3D%3D&key=mask&st=2')
with open('w_cookies.txt', 'r', encoding='utf8') as f:
listCookies = json.loads(f.read())
# 往browser里添加cookies
for cookie in listCookies:
cookie_dict = {
'name': cookie.get('name'),
'value': cookie.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
# 添加cookie进行免登陆
driver.add_cookie(cookie_dict)
# 刷新当前页面
driver.refresh()
time.sleep(2)
# 切换iframe
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(3)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(3)
# 退出当前iframe表单
driver.switch_to.default_content()
driver.refresh()
driver.quit()
def Verification_code():
driver = webdriver.Chrome(options=chrome_options)
driver.get('https://www.52wmb.com/buy-mask?st=2')
driver.maximize_window()
with open(r'w_cookies.txt', 'r+') as f:
list_cookies = json.loads(f.read())
for item in list_cookies:
cookie_dict = {
'name': item.get('name'),
'value': item.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
driver.add_cookie(cookie_dict=cookie_dict)
driver.refresh()
# 清空跳转框数值
driver.find_element_by_xpath('//*[@id="company_list_input"]').clear()
# 填入随机页数
driver.find_element_by_xpath('//*[@id="company_list_input"]').send_keys(random.randint(5, 9))
# 点击跳转
driver.find_element_by_xpath('//*[@id="company_list_jump"]').click()
time.sleep(2)
try:
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(1)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(1)
# 退出当前iframe表单
driver.switch_to.default_content()
except Exception as e:
print(e)
driver.quit()
if __name__ == '__main__':
start = time.time()
for item in txt:
item = item.rstrip("\n")
# print(item,'ee')
with open('/www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json') as f:
con_dict = json.loads(f.read())
cookie_path = con_dict['cookie_filepath']
# # cookie_path = 'w_cookies.txt'
mysql_db = con_dict[item]['datebase']
data_hs = con_dict[item]['hs']
try:
data_hs = data_hs.split(',')
except Exception as e:
print('无需分割' + e)
# print(data_hs)
for hs in data_hs:
data1 = collections.OrderedDict()
data1['key'] = hs
cookie = readCookies(cookie_path)
getHTMLText(data1, cookie)
insert_company(mysql_db)
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i["company_address"]] +
[i['company_email']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
print("List1aaa:", list1)
print('时间', time.time() - start)
'''
{"day": '日期', "company_name": '公司名称', "number" :'货运次数', "email": '邮箱', "lianxiren": '联系人', "fax" :'电话', "address": '地址',
"website": '网址', "update_time": '添加时间'}
'''
| wphnwpZsag%3D% | identifier_name |
get_company_test.py | #!/usr/bin/env python
# encoding: utf-8
'''
#-------------------------------------------------------------------#
# CONFIDENTIAL --- CUSTOM STUDIOS #
#-------------------------------------------------------------------#
# #
# @Project Name : keyword #
# #
# @File Name : get_company1.py #
# #
# @Programmer : Adam #
# #
# @Start Date : 2020/8/10 0010 16:28 #
# #
# @Last Update : 2020/8/10 0010 16:28 #
# #
#-------------------------------------------------------------------#
# Classes: Use selenium to open a web page to access companies #
# with contact information #
#-------------------------------------------------------------------#
'''
from __future__ import absolute_import
from __future__ import with_statement
from queue import Queue
import sys
import re
import json
from lxml import etree
import collections
import requests
import random
import time
import datetime
import io
import pymysql
import codecs
import threading
from PIL import Image
from hashlib import md5
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.common.exceptions import (
TimeoutException,
NoSuchFrameException,
NoSuchWindowException,
NoSuchElementException,
)
sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach())
chrome_options = Options()
chrome_options.add_experimental_option("excludeSwitches", ['enable-automation'])
# chrome_driver = r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe'
information_list = []
# 储存公司信息的列表 /www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json
with open('/www/wwwroot/www.waiqidian.cn/api/customs/comany.txt') as f:
txt = f.readlines()
# for item in txt:
# item = item.rstrip("\n")
# with open('adjunct.json') as f:
# con_dict = json.loads(f.read())
# # cookie_path = con_dict['cookie_filepath']
# cookie_path = 'w_cookies.txt'
# mysql_db = con_dict[item]['datebase']
# data_hs = con_dict[item]['hs']
# try:
# data_hs = data_hs.split(',')
# except Exception as e:
# print('无需分割' + e)
class Chaojiying_Client(object):
def __init__(self, username, password, soft_id):
self.username = username
password = password.encode('utf8')
self.password = md5(password).hexdigest()
self.soft_id = soft_id
self.base_params = {
'user': self.username,
'pass2': self.password,
'softid': self.soft_id,
}
self.headers = {
'Connection': 'Keep-Alive',
'User-Agent': 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0)',
}
def PostPic(self, im, codetype):
"""
im: 图片字节
codetype: 题目类型 参考 http://www.chaojiying.com/price.html
"""
params = {
'codetype': codetype,
}
params.update(self.base_params)
files = {'userfile': ('ccc.jpg', im)}
r = requests.post('http://upload.chaojiying.net/Upload/Processing.php', data=params, files=files,
headers=self.headers)
return r.json()
def ReportError(self, im_id):
"""
im_id:报错题目的图片ID
"""
params = {
'id': im_id,
}
params.update(self.base_params)
r = requests.post('http://upload.chaojiying.net/Upload/ReportError.php', data=params, headers=self.headers)
return r.json()
def readCookies(cookie_path):
"""
读取本地的cookie并进行遍历
:param cookie_path:
:return:
"""
with open(cookie_path, 'r') as f: # ,encoding='utf-8'
listCookies = json.loads(f.read())
cookie = [item["name"] + "=" + item["value"] for item in listCookies]
cookiestr = '; '.join(item for item in cookie)
return cookiestr
def getHTMLText(data, cookiestr):
"""
# requests请求获取到页面的html
# :param data:
# :param cookiestr:
# :return:
"""
pageNum = 1
while pageNum < 5:
url2 = 'https://www.52wmb.com/async/company?key={}&old_key={}&country=&country_id=0&type=0&sort' \
'=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw%5B%5D=email&filterweight=email&_' \
'=1604476115171&page={}'.format(data['key'], data['key'], pageNum)
url1 = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=2&old_st=2&main=0&extend_search=false' \
'&fw%5B%5D=email&filterweight=email&_=1603852119428&page={} '.format(data['key'], data['key'], pageNum)
url = 'https://www.52wmb.com/async/company?key={}&old_key={' \
'}&country=&country_id=0&type=0&sort=default&click_search=0&st=3&old_st=3&main=0&extend_search=false&fw' \
'%5B%5D=email&filterweight=email&_=1603434620022&page={}'.format(data['key'], data['key'], pageNum)
url3 = 'https://www.52wmb.com/async/company?country=*&key={}&type=0&click_search=1&fw%5B%5D=email&filterweight=email' \
'&sort=default&country_data=&search_type=3&is_label=1&st=3&page={}&_=1614564485810'.format(data['key'], pageNum)
headers = {
'cookie': cookiestr,
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36',
'referer': 'https://www.52wmb.com/buy-3926909090?st=3'
}
try:
# html = requests.get(url=url2, headers=headers) #miao zhu shi 2021/3/1
html = requests.get(url=url3, headers=headers) # miao xin zeng 2021/3/1
time.sleep(random.randint(4, 8))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
information(html, cookiestr)
except Exception as e:
print(e)
pageNum += 1
def information(html, cookiestr):
"""
# 获取公司的信息
# :param html:
# :return:
"""
companys_list = etree.HTML(html).xpath('//li[@class="ssList-li company-fun-li"]')
for i in companys_list[:]:
company_info = collections.OrderedDict()
try:
# number 公司id
c_id = i.xpath('@data-id')[0]
company_info['number'] = c_id
# 公司名称
company_info['name'] = i.xpath('div[1]/div[@class="ssContent"]/a/text()')[0]
# 地区
# company_info['trade_country'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[0]
# 货运次数
company_info['trade_number'] = i.xpath('div[1]/div[@class="ssContent"]/p[@class="ss-Ctag"]/text()')[1].replace(
'总货运', '')
# 更新时间
update_time = i.xpath('div[1]/div[@class="ssContent"]/div[@class="ss-Cjl"]/text()')[0].strip()[-10:]
company_info['update_time'] = update_time
# print("company_info------",company_info)
get_next_level(company_info, cookiestr)
except Exception as e:
print(e)
def get_next_level(company_info, cookiestr):
url = 'https://www.52wmb.com/async/contact'
# url = 'https://www.52wmb.com/buyer/35274878?SRID=Z8KWwphnwpZsag%3D%3D&key=crane&st=2'
data1 = collections.OrderedDict()
data1['company_id'] = company_info['number']
# headers = {
# 'Referer': 'https://www.52wmb.com/buyer/{}'.format(data1['company_id']),
# 'cookie': cookiestr,
# 'user-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.122 Safari/537.36'
# } miao zhu shi 2021/3/1
headers = {
"Accept": "*/*",
# "Accept-Encoding": "gzip,deflate,br",
# "Accept-Language": "zh-CN,zh;q=0.9",
# "Connection": "keep-alive",
# "Content-Length": "18",
"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
"Cookie": "__root_domain_v=.52wmb.com; _qddaz=QD.oxnn0z.8hqf3r.kjru3sro; _QUY=wqXCh8KywpbCmMKVwplrwpzCk8KeZ8KbwpJnZnDCk2htaQ==; _DP=2; company_search_tips=1; _QP=1; promote=proname=auto; _qdda=3-1.1; _qddab=3-cx3zj9.klpw0b65; access_token=13609ab52b8b529a; 52BY_TOKEN=8ed9a124-7a31-11eb-a26e-00155d391e0d; _MNG=1; vip_expired_tips=none; _qddamta_2885855166=3-0",
"DNT": "1",
"Host": "www.52wmb.com",
"Origin": "https://www.52wmb.com",
"Referer": "https://www.52wmb.com/buyer/" + str(data1['company_id']),
"Sec-Fetch-Dest": "empty",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0(Windows NT 10.0; Win64; x64)AppleWebKit/537.36(KHTML,like Gecko)Chrome/88.0.4324.182 Safari/537.36 Edg/88.0.705.81",
# "X-Requested-With": "XMLHttpRequest",
}
data = {
'company_id': data1['company_id']
}
try:
html = requests.post(url=url, data=data, headers=headers) # data=data,
time.sleep(random.randint(3, 5))
html.raise_for_status()
html.encoding = html.apparent_encoding
# print(html.url)
html = html.text
# print('8'*20,'\n',html)
parser_email(html, company_info)
except Exception as e:
return ''
def parser_email(html, company_info):
html = etree.HTML(html)
try:
tbod = html.xpath('//*[@id="contact-detail"]')
# print("tbod:ss",tbod)
# titlle = html.xpath('//*[@id="companies-detail"]/div/div/div/h1/text()')//*[@id="contact-detail"]/table/tbody
if len(tbod) >= 1:
pass
else:
automation()
except Exception as e:
pass
try:
html = html.xpath('//tbody')[0]
if len( | ) >= 1:
print('有数据')
strt = int(time.time())
company_info['time'] = strt
company_info['tableid'] = 0
company_info['spider'] = 1
try:
# 数据来源
region = html.xpath('tr[4]/td[1]/text()')[1].strip()
except:
region = '-'
company_info['region'] = region
try:
# 联系电话
c_tel = html.xpath('tr[3]/td[1]/text()')[1].strip()
except:
c_tel = '-'
company_info['tel'] = c_tel
try:
# 联 系 人
lianxiren = html.xpath('tr[2]/td[1]/text()')[1].strip()
except:
lianxiren = '-'
company_info['lianxiren'] = lianxiren
try:
# 邮箱地址
c_email = html.xpath('tr[2]/td[2]/text()')[1].strip()
except:
c_email = '-'
company_info['company_email'] = c_email
try:
# 公司地址
c_adress = html.xpath('tr[3]/td[2]/text()')[1].strip()
except:
c_adress = '-'
company_info['company_address'] = c_adress
# try:
# # 数据来源
# c_source = html.xpath('tr[4]/td[1]/text()')[1].strip()
# except:
# c_source = '-'
# company_info['company_source'] = c_source
try:
# 官方网站
c_site = html.xpath('tr[4]/td[2]/a/text()')[0]
except:
c_site = '-'
company_info['company_site'] = c_site
except Exception as e:
print('没有数据' + e)
if len(company_info) > 10:
information_list.append(company_info)
print(json.dumps(company_info, ensure_ascii=False))
else:
print('数据不完整')
def Verification(companys_list):
"""
# 判断页面是否请求成功是否拿到信息
# :param companys_list:
# :return:
"""
all_dict = collections.OrderedDict()
if len(companys_list) > 0:
all_dict['code'] = 200
all_dict['msg'] = "sucess"
else:
all_dict['code'] = 404
all_dict['msg'] = "没有找到该信息,请更换关键词再试试"
test_dict = collections.OrderedDict()
test_dict['number'] = '1111111'
test_dict['name'] = 'name'
test_dict['update_time'] = time.strftime('%Y-%m-%d')
test_dict['other_trade'] = 'None'
# companys_list[-1] = test_dict
all_dict['data'] = companys_list
print(json.dumps(companys_list, ensure_ascii=False))
insert_company(companys_list)
# 插入数据库
if all_dict['code'] == 404:
Verification_code()
def insert_company(mysql_db):
"""
# 把信息插入数据库
# :param data:
# :return:
"""
strt = int(time.time())
try:
conn = pymysql.connect(host=mysql_db['host'],
user=mysql_db['user'],
password=mysql_db['password'],
database=mysql_db['user'],
charset='utf8')
cursor = conn.cursor()
print("数据库连接成功")
except Exception as e:
print(e)
return
if len(information_list) >= 0:
# print("准备插入数据库->",information_list)
# print("数据库信息->",mysql_db['host'],mysql_db['user'],mysql_db['password'],mysql_db['user'])
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["company_email"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i['company_address']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
for data in list1:
insert_sql = 'insert into wp_haiguan_data (day,company_name,number,country,email,lianxiren,phone,address,' \
'website,update_time,tableid,is_spider) values {};'.format(data)
try:
# print("插入数据库的sql语句",insert_sql)
cursor.execute(insert_sql)
conn.commit()
except Exception as e:
print(e)
cursor.close()
conn.close()
else:
print('没有数据')
def automation():
"""
解决验证码,验证码处出来截图并连接打码平台
:return:
"""
driver = webdriver.Chrome(options=chrome_options)
"""
处理selenium被检测这个问题
"""
driver.maximize_window()
driver.get('https://www.52wmb.com/buyer/35206685?SRID=acKVwpdnwpdrbA%3D%3D&key=mask&st=2')
with open('w_cookies.txt', 'r', encoding='utf8') as f:
listCookies = json.loads(f.read())
# 往browser里添加cookies
for cookie in listCookies:
cookie_dict = {
'name': cookie.get('name'),
'value': cookie.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
# 添加cookie进行免登陆
driver.add_cookie(cookie_dict)
# 刷新当前页面
driver.refresh()
time.sleep(2)
# 切换iframe
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(3)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(3)
# 退出当前iframe表单
driver.switch_to.default_content()
driver.refresh()
driver.quit()
def Verification_code():
driver = webdriver.Chrome(options=chrome_options)
driver.get('https://www.52wmb.com/buy-mask?st=2')
driver.maximize_window()
with open(r'w_cookies.txt', 'r+') as f:
list_cookies = json.loads(f.read())
for item in list_cookies:
cookie_dict = {
'name': item.get('name'),
'value': item.get('value'),
"expires": '',
'path': '/',
'httpOnly': False,
'HostOnly': False,
'Secure': False
}
driver.add_cookie(cookie_dict=cookie_dict)
driver.refresh()
# 清空跳转框数值
driver.find_element_by_xpath('//*[@id="company_list_input"]').clear()
# 填入随机页数
driver.find_element_by_xpath('//*[@id="company_list_input"]').send_keys(random.randint(5, 9))
# 点击跳转
driver.find_element_by_xpath('//*[@id="company_list_jump"]').click()
time.sleep(2)
try:
driver.switch_to.frame('mainFrame')
# 定位验证码图片位置
xi = driver.find_element_by_xpath('//*[@id="picture"]')
# 截取整个页面,为下面截取验证码用
driver.save_screenshot('page.png')
# 根据验证码定位元素来获取长宽高
left = xi.location['x']
top = xi.location['y']
right = xi.location['x'] + xi.size['width']
bottom = xi.location['y'] + xi.size['height']
# 打开整个页面截图
im = Image.open('page.png')
# 截取验证码图片
im = im.crop((left, top, right, bottom))
# 保存到本地
im.save('pages.png')
time.sleep(1)
# 连接打码平台
cjy = Chaojiying_Client
chaojiying = Chaojiying_Client('20200807', 'ht123456789',
'907025') # 用户中心>>软件ID 生成一个替换 96001
img = open('pages.png', 'rb').read() # 本地图片文件路径 来替后要加()换 a.jpg 有时WIN系统
msg = chaojiying.PostPic(img, 1902)
yzm = msg['pic_str']
# 输入平台传回来的验证码并输入
driver.find_element_by_id('picture_code').send_keys(yzm)
time.sleep(1)
driver.find_element_by_id('verifi_robot').click()
time.sleep(1)
# 退出当前iframe表单
driver.switch_to.default_content()
except Exception as e:
print(e)
driver.quit()
if __name__ == '__main__':
start = time.time()
for item in txt:
item = item.rstrip("\n")
# print(item,'ee')
with open('/www/wwwroot/www.waiqidian.cn/api/customs/adjunct.json') as f:
con_dict = json.loads(f.read())
cookie_path = con_dict['cookie_filepath']
# # cookie_path = 'w_cookies.txt'
mysql_db = con_dict[item]['datebase']
data_hs = con_dict[item]['hs']
try:
data_hs = data_hs.split(',')
except Exception as e:
print('无需分割' + e)
# print(data_hs)
for hs in data_hs:
data1 = collections.OrderedDict()
data1['key'] = hs
cookie = readCookies(cookie_path)
getHTMLText(data1, cookie)
insert_company(mysql_db)
list1 = [tuple([i["update_time"]] +
[i['name']] +
[i["trade_number"]] +
[i["region"]] +
[i["lianxiren"]] +
[i["tel"]] +
[i["company_address"]] +
[i['company_email']] +
[i["company_site"]] +
[i["time"]] +
[i["tableid"]] +
[i["spider"]]
) for i in information_list]
print("List1aaa:", list1)
print('时间', time.time() - start)
'''
{"day": '日期', "company_name": '公司名称', "number" :'货运次数', "email": '邮箱', "lianxiren": '联系人', "fax" :'电话', "address": '地址',
"website": '网址', "update_time": '添加时间'}
'''
| html | conditional_block |
main.rs | use std::string::ToString;
fn main() |
fn vectors() {
let v: Vec<i32> = Vec::new();
let mut v = vec![1, 2, 3];
match v.binary_search(&16) {
Ok(pos) => v.insert(pos, 16),
Err(_) => v.push(16)
}
match v.binary_search(&12) {
Ok(pos) => v.insert(pos, 12),
Err(pos) => v.insert(pos, 12)
}
println!("Binary Search -> {:?}", v);
let mut v = Vec::new();
v.push(5);
v.push(6);
v.push(7);
v.push(8);
let v = vec![1, 2, 3, 4, 5];
let third: &i32 = &v[2];
println!("The third element is {}", third);
match v.get(2) {
Some(third) => println!("The third element is {}", third),
None => println!("There is no third element."),
}
// When the program has a valid reference, the borrow checker enforces the ownership and
// borrowing rules (covered in Chapter 4) to ensure this reference and any other references to
// the contents of the vector remain valid. Recall the rule that states you can’t have mutable
// and immutable references in the same scope. That rule applies in Listing 8-7, where we hold
// an immutable reference to the first element in a vector and try to add an element to the end,
// which won’t work.
let mut v = vec![1, 2, 3, 4, 5];
let first = &v[0];
v.push(6);
//Below line causes Compilation Error
//println!("The first element is: {}", first);
// This error is due to the way vectors work: adding a new element onto the end of the vector
// might require allocating new memory and copying the old elements to the new space, if there
// isn’t enough room to put all the elements next to each other where the vector currently is.
// In that case, the reference to the first element would be pointing to deallocated memory.
// The borrowing rules prevent programs from ending up in that situation.
let v = vec![100, 32, 57];
for i in &v {
println!("{}", i);
}
// To change the value that the mutable reference refers to, we have to use the dereference
// operator (*) to get to the value in i before we can use the += operator.
let mut v = vec![100, 32, 57];
for i in &mut v {
*i += 50;
}
for i in &v {
println!("{}", i);
}
enum SpreadsheetCell {
Int(i32),
Float(f64),
Text(String),
}
let row = vec![
SpreadsheetCell::Int(3),
SpreadsheetCell::Text(String::from("blue")),
SpreadsheetCell::Float(10.12),
];
}
fn strings() {
let mut s = String::new();
let m = String::from("sdfsdf");
let data = "initial contents";
let s = data.to_string();
// the method also works on a literal directly:
let s = "initial contents".to_string();
let hello = String::from("السلام عليكم");
let hello = String::from("Dobrý den");
let hello = String::from("Hello");
let hello = String::from("שָׁלוֹם");
let hello = String::from("नमस्ते");
let hello = String::from("こんにちは");
let hello = String::from("안녕하세요");
let hello = String::from("你好");
let hello = String::from("Olá");
let hello = String::from("Здравствуйте");
let hello = String::from("Hola");
let mut s1 = String::from("foo");
let s2 = "bar";
s1.push_str(s2);
println!("s2 is {}", s2);
let mut s = String::from("lo");
s.push('l');
use std::ops::Add;
let s1 = String::from("Hello, ");
let s2 = String::from("world!");
// The reason we’re able to use &s2 in the call to add is that the compiler can coerce the
// &String argument into a &str. When we call the add method, Rust uses a deref coercion, which
// here turns &s2 into &s2[..]. We’ll discuss deref coercion in more depth in Chapter 15.
// Because add does not take ownership of the s parameter, s2 will still be a valid String after
// this operation.
// looks like it will copy both strings and create a new one, this statement actually takes
// ownership of s1, appends a copy of the contents of s2, and then returns ownership of the
// result. In other words, it looks like it’s making a lot of copies but isn’t; the
// implementation is more efficient than copying.
//let s3 = s1.add(&s2);
let s3 = s1 + &s2;
println!("{}", s3);
let s1 = String::from("tic");
let s2 = String::from("tac");
let s3 = String::from("toe");
//let s = s1 + "-" + &s2 + "-" + &s3;
let s = format!("{}-{}-{}", s1, s2, s3);
println!("{}", s);
// The version of the code using format! is much easier to read and doesn’t take ownership of
// any of its parameters.
println!("{}", s1);
// A String is a wrapper over a Vec<u8>
let len = String::from("Hola").len();
// In this case, len will be 4, which means the vector storing the string “Hola” is 4 bytes long.
// Each of these letters takes 1 byte when encoded in UTF-8
println!("{}", len);
let len = String::from("Здравствуйте").len();
println!("{}", len);
// It takes 24 bytes to encode “Здравствуйте” in UTF-8, because each Unicode scalar value in that string
// takes 2 bytes of storage. Therefore, an index into the string’s bytes will not always
// correlate to a valid Unicode scalar value. To demonstrate, consider this invalid Rust code:
// let hello = "Здравствуйте";
// let answer = &hello[0];
// println!("{}", answer);
// error[E0277]: the type `str` cannot be indexed by `{integer}`
// Another point about UTF-8 is that there are actually three relevant ways to look at strings
// from Rust’s perspective: as bytes, scalar values, and grapheme clusters (the closest thing to
// what we would call letters).
// “नमस्ते”
// Bytes: [224, 164, 168, 224, 164, 174, 224, 164, 184, 224, 165, 141, 224, 164, 164, 224, 165, 135]
// Unicode scalar values (Rust's char type): ['न', 'म', 'स', '्', 'त', 'े']
// There are six char values here, but the fourth and sixth are not letters: they’re diacritics
// that don’t make sense on their own
// Grapheme clusters: ["न", "म", "स्", "ते"]
let namaste = "नमस्ते";
println!("{}", &namaste[0..12]);
let hello = "Здравствуйте";
let s = &hello[0..4];
println!("{}", s);
for c in "नमस्ते".chars() {
println!("{}", c);
}
for b in "नमस्ते".bytes() {
print!("{},", b);
}
// But be sure to remember that valid Unicode scalar values may be made up of more than 1 byte.
// Getting grapheme clusters from strings is complex, so this functionality is not provided by
// the standard library. Crates are available on crates.io if this is the functionality you need.
}
fn hashmaps() {
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
// Note that we need to first use the HashMap from the collections portion of the standard
// library. Of our three common collections, this one is the least often used, so it’s not
// included in the features brought into scope automatically in the prelude.
// The type annotation HashMap<_, _> is needed here because it’s possible to collect into many
// different data structures and Rust doesn’t know which you want unless you specify. For the
// parameters for the key and value types, however, we use underscores, and Rust can infer the
// types that the hash map contains based on the types of the data in the vectors.
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
println!("");
let scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
for (k, v) in &scores {
println!("{},{}", k, v);
}
let score = scores.get(&String::from("Blue"));
match score {
Some(s) => println!("{}", s),
None => ()
}
// For types that implement the Copy trait, like i32, the values are copied into the hash map.
// For owned values like String, the values will be moved and the hash map will be the owner of
// those values
let field_name = String::from("Favorite color");
let field_value = String::from("Blue");
let mut map = HashMap::new();
map.insert(field_name, field_value);
//error[E0382]: borrow of moved value: `field_name`
//println!("{}", field_name);
// If we insert references to values into the hash map, the values won’t be moved into the hash
// map. The values that the references point to must be valid for at least as long as the hash
// map is valid.
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
// Here, score will have the value that’s associated with the Blue team, and the result will be
// Some(&10). The result is wrapped in Some because get returns an Option<&V>
let team_name = String::from("Blue");
// get borrows key so its passed using &
let score = scores.get(&team_name);
match score {
Some(num) => println!("{}", num),
None => ()
}
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
for (key, value) in &scores {
println!("{}: {}", key, value);
}
// Overwriting a Value
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Blue"), 25);
println!("{:?}", scores);
// Only Inserting a Value If the Key Has No Value
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.entry(String::from("Yellow")).or_insert(50);
scores.entry(String::from("Blue")).or_insert(50);
println!("{:?}", scores);
// Updating a Value Based on the Old Value
let text = "hello world wonderful world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0);
*count += 1;
}
println!("{:?}", map);
// The or_insert method actually returns a mutable reference (&mut V) to the value for this key.
// Here we store that mutable reference in the count variable, so in order to assign to that
// value, we must first dereference count using the asterisk (*). The mutable reference goes out
// of scope at the end of the for loop, so all of these changes are safe and allowed by the
// borrowing rules.
// Hashing Functions
// By default, HashMap uses a “cryptographically strong”1 hashing function that can provide
// resistance to Denial of Service (DoS) attacks. This is not the fastest hashing algorithm
// available, but the trade-off for better security that comes with the drop in performance is
// worth it. If you profile your code and find that the default hash function is too slow for
// your purposes, you can switch to another function by specifying a different hasher. A hasher
// is a type that implements the BuildHasher trait. We’ll talk about traits and how to implement
// them in Chapter 10. You don’t necessarily have to implement your own hasher from scratch;
// crates.io has libraries shared by other Rust users that provide hashers implementing many
// common hashing algorithms.
} | {
vectors();
strings();
hashmaps();
} | identifier_body |
main.rs | use std::string::ToString;
fn main() {
vectors();
strings();
hashmaps();
}
fn vectors() {
let v: Vec<i32> = Vec::new();
let mut v = vec![1, 2, 3];
match v.binary_search(&16) {
Ok(pos) => v.insert(pos, 16),
Err(_) => v.push(16)
}
match v.binary_search(&12) {
Ok(pos) => v.insert(pos, 12),
Err(pos) => v.insert(pos, 12)
}
println!("Binary Search -> {:?}", v);
let mut v = Vec::new();
v.push(5);
v.push(6);
v.push(7);
v.push(8);
let v = vec![1, 2, 3, 4, 5];
let third: &i32 = &v[2];
println!("The third element is {}", third);
match v.get(2) {
Some(third) => println!("The third element is {}", third),
None => println!("There is no third element."),
}
// When the program has a valid reference, the borrow checker enforces the ownership and
// borrowing rules (covered in Chapter 4) to ensure this reference and any other references to
// the contents of the vector remain valid. Recall the rule that states you can’t have mutable
// and immutable references in the same scope. That rule applies in Listing 8-7, where we hold
// an immutable reference to the first element in a vector and try to add an element to the end,
// which won’t work.
let mut v = vec![1, 2, 3, 4, 5];
let first = &v[0];
v.push(6);
//Below line causes Compilation Error
//println!("The first element is: {}", first);
// This error is due to the way vectors work: adding a new element onto the end of the vector
// might require allocating new memory and copying the old elements to the new space, if there
// isn’t enough room to put all the elements next to each other where the vector currently is.
// In that case, the reference to the first element would be pointing to deallocated memory.
// The borrowing rules prevent programs from ending up in that situation.
let v = vec![100, 32, 57];
for i in &v {
println!("{}", i);
}
// To change the value that the mutable reference refers to, we have to use the dereference
// operator (*) to get to the value in i before we can use the += operator.
let mut v = vec![100, 32, 57];
for i in &mut v {
*i += 50;
}
for i in &v {
println!("{}", i);
}
enum SpreadsheetCell {
Int(i32),
Float(f64),
Text(String),
}
let row = vec![
SpreadsheetCell::Int(3),
SpreadsheetCell::Text(String::from("blue")),
SpreadsheetCell::Float(10.12),
];
}
fn strings() {
let mut s = String::new();
let m = String::from("sdfsdf");
let data = "initial contents";
let s = data.to_string();
// the method also works on a literal directly:
let s = "initial contents".to_string();
let hello = String::from("السلام عليكم"); | let hello = String::from("שָׁלוֹם");
let hello = String::from("नमस्ते");
let hello = String::from("こんにちは");
let hello = String::from("안녕하세요");
let hello = String::from("你好");
let hello = String::from("Olá");
let hello = String::from("Здравствуйте");
let hello = String::from("Hola");
let mut s1 = String::from("foo");
let s2 = "bar";
s1.push_str(s2);
println!("s2 is {}", s2);
let mut s = String::from("lo");
s.push('l');
use std::ops::Add;
let s1 = String::from("Hello, ");
let s2 = String::from("world!");
// The reason we’re able to use &s2 in the call to add is that the compiler can coerce the
// &String argument into a &str. When we call the add method, Rust uses a deref coercion, which
// here turns &s2 into &s2[..]. We’ll discuss deref coercion in more depth in Chapter 15.
// Because add does not take ownership of the s parameter, s2 will still be a valid String after
// this operation.
// looks like it will copy both strings and create a new one, this statement actually takes
// ownership of s1, appends a copy of the contents of s2, and then returns ownership of the
// result. In other words, it looks like it’s making a lot of copies but isn’t; the
// implementation is more efficient than copying.
//let s3 = s1.add(&s2);
let s3 = s1 + &s2;
println!("{}", s3);
let s1 = String::from("tic");
let s2 = String::from("tac");
let s3 = String::from("toe");
//let s = s1 + "-" + &s2 + "-" + &s3;
let s = format!("{}-{}-{}", s1, s2, s3);
println!("{}", s);
// The version of the code using format! is much easier to read and doesn’t take ownership of
// any of its parameters.
println!("{}", s1);
// A String is a wrapper over a Vec<u8>
let len = String::from("Hola").len();
// In this case, len will be 4, which means the vector storing the string “Hola” is 4 bytes long.
// Each of these letters takes 1 byte when encoded in UTF-8
println!("{}", len);
let len = String::from("Здравствуйте").len();
println!("{}", len);
// It takes 24 bytes to encode “Здравствуйте” in UTF-8, because each Unicode scalar value in that string
// takes 2 bytes of storage. Therefore, an index into the string’s bytes will not always
// correlate to a valid Unicode scalar value. To demonstrate, consider this invalid Rust code:
// let hello = "Здравствуйте";
// let answer = &hello[0];
// println!("{}", answer);
// error[E0277]: the type `str` cannot be indexed by `{integer}`
// Another point about UTF-8 is that there are actually three relevant ways to look at strings
// from Rust’s perspective: as bytes, scalar values, and grapheme clusters (the closest thing to
// what we would call letters).
// “नमस्ते”
// Bytes: [224, 164, 168, 224, 164, 174, 224, 164, 184, 224, 165, 141, 224, 164, 164, 224, 165, 135]
// Unicode scalar values (Rust's char type): ['न', 'म', 'स', '्', 'त', 'े']
// There are six char values here, but the fourth and sixth are not letters: they’re diacritics
// that don’t make sense on their own
// Grapheme clusters: ["न", "म", "स्", "ते"]
let namaste = "नमस्ते";
println!("{}", &namaste[0..12]);
let hello = "Здравствуйте";
let s = &hello[0..4];
println!("{}", s);
for c in "नमस्ते".chars() {
println!("{}", c);
}
for b in "नमस्ते".bytes() {
print!("{},", b);
}
// But be sure to remember that valid Unicode scalar values may be made up of more than 1 byte.
// Getting grapheme clusters from strings is complex, so this functionality is not provided by
// the standard library. Crates are available on crates.io if this is the functionality you need.
}
fn hashmaps() {
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
// Note that we need to first use the HashMap from the collections portion of the standard
// library. Of our three common collections, this one is the least often used, so it’s not
// included in the features brought into scope automatically in the prelude.
// The type annotation HashMap<_, _> is needed here because it’s possible to collect into many
// different data structures and Rust doesn’t know which you want unless you specify. For the
// parameters for the key and value types, however, we use underscores, and Rust can infer the
// types that the hash map contains based on the types of the data in the vectors.
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
println!("");
let scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
for (k, v) in &scores {
println!("{},{}", k, v);
}
let score = scores.get(&String::from("Blue"));
match score {
Some(s) => println!("{}", s),
None => ()
}
// For types that implement the Copy trait, like i32, the values are copied into the hash map.
// For owned values like String, the values will be moved and the hash map will be the owner of
// those values
let field_name = String::from("Favorite color");
let field_value = String::from("Blue");
let mut map = HashMap::new();
map.insert(field_name, field_value);
//error[E0382]: borrow of moved value: `field_name`
//println!("{}", field_name);
// If we insert references to values into the hash map, the values won’t be moved into the hash
// map. The values that the references point to must be valid for at least as long as the hash
// map is valid.
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
// Here, score will have the value that’s associated with the Blue team, and the result will be
// Some(&10). The result is wrapped in Some because get returns an Option<&V>
let team_name = String::from("Blue");
// get borrows key so its passed using &
let score = scores.get(&team_name);
match score {
Some(num) => println!("{}", num),
None => ()
}
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
for (key, value) in &scores {
println!("{}: {}", key, value);
}
// Overwriting a Value
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Blue"), 25);
println!("{:?}", scores);
// Only Inserting a Value If the Key Has No Value
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.entry(String::from("Yellow")).or_insert(50);
scores.entry(String::from("Blue")).or_insert(50);
println!("{:?}", scores);
// Updating a Value Based on the Old Value
let text = "hello world wonderful world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0);
*count += 1;
}
println!("{:?}", map);
// The or_insert method actually returns a mutable reference (&mut V) to the value for this key.
// Here we store that mutable reference in the count variable, so in order to assign to that
// value, we must first dereference count using the asterisk (*). The mutable reference goes out
// of scope at the end of the for loop, so all of these changes are safe and allowed by the
// borrowing rules.
// Hashing Functions
// By default, HashMap uses a “cryptographically strong”1 hashing function that can provide
// resistance to Denial of Service (DoS) attacks. This is not the fastest hashing algorithm
// available, but the trade-off for better security that comes with the drop in performance is
// worth it. If you profile your code and find that the default hash function is too slow for
// your purposes, you can switch to another function by specifying a different hasher. A hasher
// is a type that implements the BuildHasher trait. We’ll talk about traits and how to implement
// them in Chapter 10. You don’t necessarily have to implement your own hasher from scratch;
// crates.io has libraries shared by other Rust users that provide hashers implementing many
// common hashing algorithms.
} | let hello = String::from("Dobrý den");
let hello = String::from("Hello"); | random_line_split |
main.rs | use std::string::ToString;
fn main() {
vectors();
strings();
hashmaps();
}
fn vectors() {
let v: Vec<i32> = Vec::new();
let mut v = vec![1, 2, 3];
match v.binary_search(&16) {
Ok(pos) => v.insert(pos, 16),
Err(_) => v.push(16)
}
match v.binary_search(&12) {
Ok(pos) => v.insert(pos, 12),
Err(pos) => v.insert(pos, 12)
}
println!("Binary Search -> {:?}", v);
let mut v = Vec::new();
v.push(5);
v.push(6);
v.push(7);
v.push(8);
let v = vec![1, 2, 3, 4, 5];
let third: &i32 = &v[2];
println!("The third element is {}", third);
match v.get(2) {
Some(third) => println!("The third element is {}", third),
None => println!("There is no third element."),
}
// When the program has a valid reference, the borrow checker enforces the ownership and
// borrowing rules (covered in Chapter 4) to ensure this reference and any other references to
// the contents of the vector remain valid. Recall the rule that states you can’t have mutable
// and immutable references in the same scope. That rule applies in Listing 8-7, where we hold
// an immutable reference to the first element in a vector and try to add an element to the end,
// which won’t work.
let mut v = vec![1, 2, 3, 4, 5];
let first = &v[0];
v.push(6);
//Below line causes Compilation Error
//println!("The first element is: {}", first);
// This error is due to the way vectors work: adding a new element onto the end of the vector
// might require allocating new memory and copying the old elements to the new space, if there
// isn’t enough room to put all the elements next to each other where the vector currently is.
// In that case, the reference to the first element would be pointing to deallocated memory.
// The borrowing rules prevent programs from ending up in that situation.
let v = vec![100, 32, 57];
for i in &v {
println!("{}", i);
}
// To change the value that the mutable reference refers to, we have to use the dereference
// operator (*) to get to the value in i before we can use the += operator.
let mut v = vec![100, 32, 57];
for i in &mut v {
*i += 50;
}
for i in &v {
println!("{}", i);
}
enum SpreadsheetCell {
Int(i32),
Float(f64),
Text(String),
}
let row = vec![
SpreadsheetCell::Int(3),
SpreadsheetCell::Text(String::from("blue")),
SpreadsheetCell::Float(10.12),
];
}
fn string | let mut s = String::new();
let m = String::from("sdfsdf");
let data = "initial contents";
let s = data.to_string();
// the method also works on a literal directly:
let s = "initial contents".to_string();
let hello = String::from("السلام عليكم");
let hello = String::from("Dobrý den");
let hello = String::from("Hello");
let hello = String::from("שָׁלוֹם");
let hello = String::from("नमस्ते");
let hello = String::from("こんにちは");
let hello = String::from("안녕하세요");
let hello = String::from("你好");
let hello = String::from("Olá");
let hello = String::from("Здравствуйте");
let hello = String::from("Hola");
let mut s1 = String::from("foo");
let s2 = "bar";
s1.push_str(s2);
println!("s2 is {}", s2);
let mut s = String::from("lo");
s.push('l');
use std::ops::Add;
let s1 = String::from("Hello, ");
let s2 = String::from("world!");
// The reason we’re able to use &s2 in the call to add is that the compiler can coerce the
// &String argument into a &str. When we call the add method, Rust uses a deref coercion, which
// here turns &s2 into &s2[..]. We’ll discuss deref coercion in more depth in Chapter 15.
// Because add does not take ownership of the s parameter, s2 will still be a valid String after
// this operation.
// looks like it will copy both strings and create a new one, this statement actually takes
// ownership of s1, appends a copy of the contents of s2, and then returns ownership of the
// result. In other words, it looks like it’s making a lot of copies but isn’t; the
// implementation is more efficient than copying.
//let s3 = s1.add(&s2);
let s3 = s1 + &s2;
println!("{}", s3);
let s1 = String::from("tic");
let s2 = String::from("tac");
let s3 = String::from("toe");
//let s = s1 + "-" + &s2 + "-" + &s3;
let s = format!("{}-{}-{}", s1, s2, s3);
println!("{}", s);
// The version of the code using format! is much easier to read and doesn’t take ownership of
// any of its parameters.
println!("{}", s1);
// A String is a wrapper over a Vec<u8>
let len = String::from("Hola").len();
// In this case, len will be 4, which means the vector storing the string “Hola” is 4 bytes long.
// Each of these letters takes 1 byte when encoded in UTF-8
println!("{}", len);
let len = String::from("Здравствуйте").len();
println!("{}", len);
// It takes 24 bytes to encode “Здравствуйте” in UTF-8, because each Unicode scalar value in that string
// takes 2 bytes of storage. Therefore, an index into the string’s bytes will not always
// correlate to a valid Unicode scalar value. To demonstrate, consider this invalid Rust code:
// let hello = "Здравствуйте";
// let answer = &hello[0];
// println!("{}", answer);
// error[E0277]: the type `str` cannot be indexed by `{integer}`
// Another point about UTF-8 is that there are actually three relevant ways to look at strings
// from Rust’s perspective: as bytes, scalar values, and grapheme clusters (the closest thing to
// what we would call letters).
// “नमस्ते”
// Bytes: [224, 164, 168, 224, 164, 174, 224, 164, 184, 224, 165, 141, 224, 164, 164, 224, 165, 135]
// Unicode scalar values (Rust's char type): ['न', 'म', 'स', '्', 'त', 'े']
// There are six char values here, but the fourth and sixth are not letters: they’re diacritics
// that don’t make sense on their own
// Grapheme clusters: ["न", "म", "स्", "ते"]
let namaste = "नमस्ते";
println!("{}", &namaste[0..12]);
let hello = "Здравствуйте";
let s = &hello[0..4];
println!("{}", s);
for c in "नमस्ते".chars() {
println!("{}", c);
}
for b in "नमस्ते".bytes() {
print!("{},", b);
}
// But be sure to remember that valid Unicode scalar values may be made up of more than 1 byte.
// Getting grapheme clusters from strings is complex, so this functionality is not provided by
// the standard library. Crates are available on crates.io if this is the functionality you need.
}
fn hashmaps() {
use std::collections::HashMap;
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
// Note that we need to first use the HashMap from the collections portion of the standard
// library. Of our three common collections, this one is the least often used, so it’s not
// included in the features brought into scope automatically in the prelude.
// The type annotation HashMap<_, _> is needed here because it’s possible to collect into many
// different data structures and Rust doesn’t know which you want unless you specify. For the
// parameters for the key and value types, however, we use underscores, and Rust can infer the
// types that the hash map contains based on the types of the data in the vectors.
let teams = vec![String::from("Blue"), String::from("Yellow")];
let initial_scores = vec![10, 50];
println!("");
let scores: HashMap<_, _> = teams.iter().zip(initial_scores.iter()).collect();
for (k, v) in &scores {
println!("{},{}", k, v);
}
let score = scores.get(&String::from("Blue"));
match score {
Some(s) => println!("{}", s),
None => ()
}
// For types that implement the Copy trait, like i32, the values are copied into the hash map.
// For owned values like String, the values will be moved and the hash map will be the owner of
// those values
let field_name = String::from("Favorite color");
let field_value = String::from("Blue");
let mut map = HashMap::new();
map.insert(field_name, field_value);
//error[E0382]: borrow of moved value: `field_name`
//println!("{}", field_name);
// If we insert references to values into the hash map, the values won’t be moved into the hash
// map. The values that the references point to must be valid for at least as long as the hash
// map is valid.
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
// Here, score will have the value that’s associated with the Blue team, and the result will be
// Some(&10). The result is wrapped in Some because get returns an Option<&V>
let team_name = String::from("Blue");
// get borrows key so its passed using &
let score = scores.get(&team_name);
match score {
Some(num) => println!("{}", num),
None => ()
}
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Yellow"), 50);
for (key, value) in &scores {
println!("{}: {}", key, value);
}
// Overwriting a Value
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.insert(String::from("Blue"), 25);
println!("{:?}", scores);
// Only Inserting a Value If the Key Has No Value
let mut scores = HashMap::new();
scores.insert(String::from("Blue"), 10);
scores.entry(String::from("Yellow")).or_insert(50);
scores.entry(String::from("Blue")).or_insert(50);
println!("{:?}", scores);
// Updating a Value Based on the Old Value
let text = "hello world wonderful world";
let mut map = HashMap::new();
for word in text.split_whitespace() {
let count = map.entry(word).or_insert(0);
*count += 1;
}
println!("{:?}", map);
// The or_insert method actually returns a mutable reference (&mut V) to the value for this key.
// Here we store that mutable reference in the count variable, so in order to assign to that
// value, we must first dereference count using the asterisk (*). The mutable reference goes out
// of scope at the end of the for loop, so all of these changes are safe and allowed by the
// borrowing rules.
// Hashing Functions
// By default, HashMap uses a “cryptographically strong”1 hashing function that can provide
// resistance to Denial of Service (DoS) attacks. This is not the fastest hashing algorithm
// available, but the trade-off for better security that comes with the drop in performance is
// worth it. If you profile your code and find that the default hash function is too slow for
// your purposes, you can switch to another function by specifying a different hasher. A hasher
// is a type that implements the BuildHasher trait. We’ll talk about traits and how to implement
// them in Chapter 10. You don’t necessarily have to implement your own hasher from scratch;
// crates.io has libraries shared by other Rust users that provide hashers implementing many
// common hashing algorithms.
} | s() {
| identifier_name |
protocol.py | from twisted.internet.protocol import ClientFactory, Protocol, DatagramProtocol
from twisted.internet import reactor, task
import logging
import struct
from gbot.util import split_by
from gbot.models import Account
import time, json
class LocalUDPInfo(DatagramProtocol):
node_io_addr = ('0.0.0.0', 8124)
bots = []
def __init__(self):
print "UDPInfo start"
def send_json(self, obj): self.transport.write(json.dumps(obj), self.node_io_addr)
def datagramReceived(self, data, addr):
msg = json.loads(data)
action = msg.get("action")
print data
if action == "start":
for bot in self.bots:
for login in [bot.logins.get(id) for id, online in bot.online.items() if online]:
self.player_came(bot.name, login)
def message_received(self, room, by, body):
self.send_json({
"action": "message",
"room": room,
"by": by,
"body": body
})
def player_came(self, room, login):
self.send_json({
"action": "player_came",
"room": room,
"login": login
})
def player_left(self, room, login):
self.send_json({
"action": "player_left",
"room": room,
"login": login
})
#udp_info = LocalUDPInfo()
#reactor.listenUDP(8125, udp_info)
class GarenaRSUDPProtocol(DatagramProtocol):
def __init__(self, factory):
self.factory = factory
self.msg_seq = int(time.time()) # because of how large unsigned int is, it is ok to do this
self.msg_blob = "000000005c0000003f0000f800000040b40000000000000000000000ccff41007200690061006c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
self.msg_blob = self.msg_blob.decode('hex')
print "UDP start"
self.poll_messages()
self.hello_everybody_lc = task.LoopingCall(self.hello_everybody).start(30, False)
self.tries = {}
self.resenders = {}
self.received = []
def poll_messages(self):
self.factory.bot.messages.get().addCallback(self.send_message)
def hello_everybody(self):
# print "UDP hello => all"
for id in [id for id, online in self.factory.bot.online.items() if online]:
self.say_hello(id)
def say_hello(self, id):
addr = self.factory.bot.addr(id)
if addr:
hello_packet = struct.pack("< I I 8x", 2, self.factory.account.id)
self.transport.write(hello_packet, addr)
def datagramReceived(self, data, host_port):
host, port = host_port
packet_type = ord(data[0])
if packet_type == 2:
self.handle_hello(data)
if packet_type == 15:
pass
# this is {HELLO REPLY} packet, we don't really need it, so -> ignore
#print "UDP hello reply <= ", host_port
#
if packet_type == 51:
self.handle_message(data)
if packet_type == 57:
self.invalidate_resender(data)
def handle_message(self, data):
data = data[1:]
# print len(data)
# print data.encode('hex')
format = "< I I I 96x I"
unpacked = struct.unpack(format, data[:112])
seq, from_id, to_id, length = unpacked
msg = data[112:].decode('utf_16_le', 'ignore')
# print self.factory.account.login + " => " + msg
# player = self.tcp.players.get(from_id)
# me = self.tcp.players.get(to_id)
addr = self.factory.bot.addr(from_id)
# print addr
key = "%s#%s" % (from_id, seq)
if addr and not key in self.received:
self.received.append(key)
reactor.callLater(10, lambda: self.received.remove(key))
# print "{MESSAGE #%s from %s of length %s(bytes)}" % (seq, login, length)
# print "{MSG BODY => %s}" % msg
reply = struct.pack("< B I I 8x", 57, seq, self.factory.account.id)
self.transport.write(reply, addr)
reactor.callLater(0, self.factory.bot.message_received, from_id, msg, True)
# self.send_message(me, player, u"you said => " + msg)
def send_message(self, player_and_msg):
self.poll_messages()
to_player, msg = player_and_msg
addr = self.factory.bot.addr(to_player.id)
if addr:
self.msg_seq += 1
seq = self.msg_seq
from_id = self.factory.account.id
length = len(msg) * 2
header = struct.pack("< B I I I", 51, seq, from_id, to_player.id)
packet = header + self.msg_blob + struct.pack("< I", length) + msg.encode('utf_16_le', 'ignore')
#self.transport.write(packet, addr)
self.resenders[seq] = task.LoopingCall(self.resend_message, seq, packet, addr)
self.tries[seq] = 0
self.resenders[seq].start(0.4)
# print "{MESSAGE to %s}" % to_player.login
# print "{MSG BODY => %s}" % msg
def | (self, data):
seq = struct.unpack("<I", data[1:5])[0]
# print "remote => i got #%s" % seq
lc = self.resenders.get(seq)
if lc:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def resend_message(self, seq, packet, addr):
lc = self.resenders.get(seq)
if lc:
self.tries[seq] += 1
self.transport.write(packet, addr)
# print "sending #%s, tries: %s" % (seq, self.tries[seq])
if self.tries[seq] > 6:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def handle_hello(self, data):
id = struct.unpack("<I", data[4:8])[0]
addr = self.factory.bot.addr(id)
if addr:
reply = struct.pack("< I I 4x I", 15, self.factory.account.id, id)
self.transport.write(reply, addr)
class GarenaRSProtocol(Protocol):
def __init__(self):
self.buffer = ''
def write(self, data): self.transport.write(data)
def write_hex(self, data): self.write(data.decode('hex'))
def connectionMade(self):
self.log = logging.getLogger("GRSP[%s]" % self.factory.account.login)
self.log.info(u"connection made, sending auth packet")
self.write_hex(self.factory.packet)
self.log.info(u"issuing disconnect in 45 seconds if Garena did not respond with WELCOME")
self.timeout_deferred = reactor.callLater(45, self.timeout)
def timeout(self):
self.log.error(u"Garena did not send WELCOME packet in 45 seconds, dropping connection now")
self.transport.loseConnection()
def dataReceived(self, data):
self.buffer += data
self.decodeHeader()
def decodeHeader(self):
if len(self.buffer) >= 5:
header = struct.unpack("< I B", self.buffer[:5])
if len(self.buffer) >= header[0]+4:
packet = self.buffer[5:header[0]+4]
self.buffer = self.buffer[header[0]+4:]
if len(self.buffer) >= 5:
reactor.callLater(0, self.decodeHeader)
self.decodePacket(header[1], packet)
def decodePacket(self, packet_type, data):
if self.factory.write_only and packet_type != 48: return
getattr(self, 'handle_' + {
34: 'player_came',
35: 'player_left',
37: 'message',
44: 'userlist',
48: 'welcome'
}.get(packet_type, 'non_existing'), lambda data: None)(data)
def handle_non_existing(self, data):
self.log.info(u"??? -> %s", data.encode('hex'))
def handle_player_left(self, data):
id = struct.unpack("< I", data)[0]
self.factory.bot.player_left(id)
def handle_player_came(self, data):
format = "< I 15s 6x 1B 2x 4B 32x"
unpacked = struct.unpack(format, data)
id = unpacked[0]
login = unicode(unpacked[1].rstrip(chr(0)), 'ascii', 'ignore')
ip = "%s.%s.%s.%s" % unpacked[3:]
lvl = unpacked[2]
port = struct.unpack(">H", data[40:42])[0]
if not Account.get_or(pk = id):
self.factory.bot.player_came(id, login, ip, port, lvl)
else:
self.log.info(u"%s is bot's account -> do nothing", login)
#if hasattr(self.factory, 'udp_protocol'):
# self.factory.udp_protocol.say_hello(id)
def handle_userlist(self, data):
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
timeout_deferred.cancel()
del self.timeout_deferred
self.log.info(u"got userlist")
for user_data in [ud for ud in split_by(data[8:], 64) if len(ud) == 64]:
self.handle_player_came(user_data)
def handle_message(self, data):
id = struct.unpack("<I", data[4:8])[0]
message = unicode(data[12:], 'utf_16_le', 'ignore').strip()
reactor.callLater(0, self.factory.bot.message_received, id, message)
def handle_welcome(self, data):
self.log.info(u"got WELCOME")
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
try:
timeout_deferred.cancel()
except:
pass
del self.timeout_deferred
class GarenaRSFactory(ClientFactory):
protocol = GarenaRSProtocol
def __init__(self, bot, account, write_only = True,
send_kicks = False, send_anns = True, send_pvts = True):
self.bot = bot
self.account = account
self.write_only = write_only
self.connection = None
self.log = logging.getLogger("GRSF[%s:%s]" % (bot.name, account.login))
self.log.info(u"initialized")
self.packet = account.packet.replace("{roomid}",
struct.pack("< I", bot.room.id).encode('hex'))
# deferreds
if send_anns: self.bot.announces.get().addCallback(self.send_announce)
if send_pvts: self.bot.privates.get().addCallback(self.send_private)
if send_kicks: self.bot.kicks.get().addCallback(self.send_kick)
#only now enable udp for ospl.slave
#if account.port > 15000:
# self.udp_protocol = GarenaRSUDPProtocol(self)
# self.udp = reactor.listenUDP(account.port, self.udp_protocol, interface = '212.154.211.111')
#else:
# self.udp = None
self.connect()
def connect(self):
self.log.info(u"issuing roomserver connection")
reactor.connectTCP(self.bot.room.ip, 8687, self)
def reconnect(self):
self.log.info(u"issuing reconnect in 5 seconds")
self.connection = None
if not self.write_only:
self.log.info(u"lost connection on reading bot, moving ip_list to stale")
for id in self.bot.ip_list.keys():
reactor.callLater(0, self.bot.player_left, id)
reactor.callLater(5, self.connect)
def startedConnecting(self, connector):
self.log.info(u"started connecting")
def clientConnectionLost(self, connector, reason):
self.log.error(u"connection lost, reason: %s", reason)
self.reconnect()
def clientConnectionFailed(self, connector, reason):
self.log.error("uconnection failed, reason: %s", reason)
self.reconnect()
def send_kick(self, (player_id, reason)):
self.kick_deferred = self.bot.kicks.get().addCallback(self.send_kick)
if self.connection:
self.log.debug(u"doing kick => %s @ %s", player_id, reason)
format = "< I b I I I"
packet = struct.pack(format, len(reason) + 13, 40, self.account.id,
player_id, len(reason)) + reason.encode('ascii', 'ignore')
self.connection.write(packet)
# remove 15 min ban, that happens after player is kicked
player_login = self.bot.logins.get(player_id, u'').encode('ascii', 'ignore')
if player_login and False:
self.log.debug(u"removing 15min ban => %s", player_login)
packet = struct.pack("< I b I", len(player_login) + 10, 120, self.bot.room.id) + \
player_login + ("\0" * 5)
self.connection.write(packet)
else:
self.log.error(u"kick : no connection")
def send_private(self, (player_id, message)):
reactor.callLater(0.55, lambda: self.bot.privates.get().addCallback(self.send_private))
if self.connection:
format = "< I b I I"
packet = struct.pack(format, len(message) * 2 + 9, 127,
self.account.id,
player_id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"pvt : no connection")
def send_announce(self, message):
reactor.callLater(1.1, lambda: self.bot.announces.get().addCallback(self.send_announce))
if self.connection:
self.log.debug(u"ANN -> %s", message)
format = "< I b I"
packet = struct.pack(format, len(message) * 2 + 5, 48,
self.bot.room.id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"ann : no connection")
| invalidate_resender | identifier_name |
protocol.py | from twisted.internet.protocol import ClientFactory, Protocol, DatagramProtocol
from twisted.internet import reactor, task
import logging
import struct
from gbot.util import split_by
from gbot.models import Account
import time, json
class LocalUDPInfo(DatagramProtocol):
node_io_addr = ('0.0.0.0', 8124)
bots = []
def __init__(self):
print "UDPInfo start"
| action = msg.get("action")
print data
if action == "start":
for bot in self.bots:
for login in [bot.logins.get(id) for id, online in bot.online.items() if online]:
self.player_came(bot.name, login)
def message_received(self, room, by, body):
self.send_json({
"action": "message",
"room": room,
"by": by,
"body": body
})
def player_came(self, room, login):
self.send_json({
"action": "player_came",
"room": room,
"login": login
})
def player_left(self, room, login):
self.send_json({
"action": "player_left",
"room": room,
"login": login
})
#udp_info = LocalUDPInfo()
#reactor.listenUDP(8125, udp_info)
class GarenaRSUDPProtocol(DatagramProtocol):
def __init__(self, factory):
self.factory = factory
self.msg_seq = int(time.time()) # because of how large unsigned int is, it is ok to do this
self.msg_blob = "000000005c0000003f0000f800000040b40000000000000000000000ccff41007200690061006c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
self.msg_blob = self.msg_blob.decode('hex')
print "UDP start"
self.poll_messages()
self.hello_everybody_lc = task.LoopingCall(self.hello_everybody).start(30, False)
self.tries = {}
self.resenders = {}
self.received = []
def poll_messages(self):
self.factory.bot.messages.get().addCallback(self.send_message)
def hello_everybody(self):
# print "UDP hello => all"
for id in [id for id, online in self.factory.bot.online.items() if online]:
self.say_hello(id)
def say_hello(self, id):
addr = self.factory.bot.addr(id)
if addr:
hello_packet = struct.pack("< I I 8x", 2, self.factory.account.id)
self.transport.write(hello_packet, addr)
def datagramReceived(self, data, host_port):
host, port = host_port
packet_type = ord(data[0])
if packet_type == 2:
self.handle_hello(data)
if packet_type == 15:
pass
# this is {HELLO REPLY} packet, we don't really need it, so -> ignore
#print "UDP hello reply <= ", host_port
#
if packet_type == 51:
self.handle_message(data)
if packet_type == 57:
self.invalidate_resender(data)
def handle_message(self, data):
data = data[1:]
# print len(data)
# print data.encode('hex')
format = "< I I I 96x I"
unpacked = struct.unpack(format, data[:112])
seq, from_id, to_id, length = unpacked
msg = data[112:].decode('utf_16_le', 'ignore')
# print self.factory.account.login + " => " + msg
# player = self.tcp.players.get(from_id)
# me = self.tcp.players.get(to_id)
addr = self.factory.bot.addr(from_id)
# print addr
key = "%s#%s" % (from_id, seq)
if addr and not key in self.received:
self.received.append(key)
reactor.callLater(10, lambda: self.received.remove(key))
# print "{MESSAGE #%s from %s of length %s(bytes)}" % (seq, login, length)
# print "{MSG BODY => %s}" % msg
reply = struct.pack("< B I I 8x", 57, seq, self.factory.account.id)
self.transport.write(reply, addr)
reactor.callLater(0, self.factory.bot.message_received, from_id, msg, True)
# self.send_message(me, player, u"you said => " + msg)
def send_message(self, player_and_msg):
self.poll_messages()
to_player, msg = player_and_msg
addr = self.factory.bot.addr(to_player.id)
if addr:
self.msg_seq += 1
seq = self.msg_seq
from_id = self.factory.account.id
length = len(msg) * 2
header = struct.pack("< B I I I", 51, seq, from_id, to_player.id)
packet = header + self.msg_blob + struct.pack("< I", length) + msg.encode('utf_16_le', 'ignore')
#self.transport.write(packet, addr)
self.resenders[seq] = task.LoopingCall(self.resend_message, seq, packet, addr)
self.tries[seq] = 0
self.resenders[seq].start(0.4)
# print "{MESSAGE to %s}" % to_player.login
# print "{MSG BODY => %s}" % msg
def invalidate_resender(self, data):
seq = struct.unpack("<I", data[1:5])[0]
# print "remote => i got #%s" % seq
lc = self.resenders.get(seq)
if lc:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def resend_message(self, seq, packet, addr):
lc = self.resenders.get(seq)
if lc:
self.tries[seq] += 1
self.transport.write(packet, addr)
# print "sending #%s, tries: %s" % (seq, self.tries[seq])
if self.tries[seq] > 6:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def handle_hello(self, data):
id = struct.unpack("<I", data[4:8])[0]
addr = self.factory.bot.addr(id)
if addr:
reply = struct.pack("< I I 4x I", 15, self.factory.account.id, id)
self.transport.write(reply, addr)
class GarenaRSProtocol(Protocol):
def __init__(self):
self.buffer = ''
def write(self, data): self.transport.write(data)
def write_hex(self, data): self.write(data.decode('hex'))
def connectionMade(self):
self.log = logging.getLogger("GRSP[%s]" % self.factory.account.login)
self.log.info(u"connection made, sending auth packet")
self.write_hex(self.factory.packet)
self.log.info(u"issuing disconnect in 45 seconds if Garena did not respond with WELCOME")
self.timeout_deferred = reactor.callLater(45, self.timeout)
def timeout(self):
self.log.error(u"Garena did not send WELCOME packet in 45 seconds, dropping connection now")
self.transport.loseConnection()
def dataReceived(self, data):
self.buffer += data
self.decodeHeader()
def decodeHeader(self):
if len(self.buffer) >= 5:
header = struct.unpack("< I B", self.buffer[:5])
if len(self.buffer) >= header[0]+4:
packet = self.buffer[5:header[0]+4]
self.buffer = self.buffer[header[0]+4:]
if len(self.buffer) >= 5:
reactor.callLater(0, self.decodeHeader)
self.decodePacket(header[1], packet)
def decodePacket(self, packet_type, data):
if self.factory.write_only and packet_type != 48: return
getattr(self, 'handle_' + {
34: 'player_came',
35: 'player_left',
37: 'message',
44: 'userlist',
48: 'welcome'
}.get(packet_type, 'non_existing'), lambda data: None)(data)
def handle_non_existing(self, data):
self.log.info(u"??? -> %s", data.encode('hex'))
def handle_player_left(self, data):
id = struct.unpack("< I", data)[0]
self.factory.bot.player_left(id)
def handle_player_came(self, data):
format = "< I 15s 6x 1B 2x 4B 32x"
unpacked = struct.unpack(format, data)
id = unpacked[0]
login = unicode(unpacked[1].rstrip(chr(0)), 'ascii', 'ignore')
ip = "%s.%s.%s.%s" % unpacked[3:]
lvl = unpacked[2]
port = struct.unpack(">H", data[40:42])[0]
if not Account.get_or(pk = id):
self.factory.bot.player_came(id, login, ip, port, lvl)
else:
self.log.info(u"%s is bot's account -> do nothing", login)
#if hasattr(self.factory, 'udp_protocol'):
# self.factory.udp_protocol.say_hello(id)
def handle_userlist(self, data):
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
timeout_deferred.cancel()
del self.timeout_deferred
self.log.info(u"got userlist")
for user_data in [ud for ud in split_by(data[8:], 64) if len(ud) == 64]:
self.handle_player_came(user_data)
def handle_message(self, data):
id = struct.unpack("<I", data[4:8])[0]
message = unicode(data[12:], 'utf_16_le', 'ignore').strip()
reactor.callLater(0, self.factory.bot.message_received, id, message)
def handle_welcome(self, data):
self.log.info(u"got WELCOME")
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
try:
timeout_deferred.cancel()
except:
pass
del self.timeout_deferred
class GarenaRSFactory(ClientFactory):
protocol = GarenaRSProtocol
def __init__(self, bot, account, write_only = True,
send_kicks = False, send_anns = True, send_pvts = True):
self.bot = bot
self.account = account
self.write_only = write_only
self.connection = None
self.log = logging.getLogger("GRSF[%s:%s]" % (bot.name, account.login))
self.log.info(u"initialized")
self.packet = account.packet.replace("{roomid}",
struct.pack("< I", bot.room.id).encode('hex'))
# deferreds
if send_anns: self.bot.announces.get().addCallback(self.send_announce)
if send_pvts: self.bot.privates.get().addCallback(self.send_private)
if send_kicks: self.bot.kicks.get().addCallback(self.send_kick)
#only now enable udp for ospl.slave
#if account.port > 15000:
# self.udp_protocol = GarenaRSUDPProtocol(self)
# self.udp = reactor.listenUDP(account.port, self.udp_protocol, interface = '212.154.211.111')
#else:
# self.udp = None
self.connect()
def connect(self):
self.log.info(u"issuing roomserver connection")
reactor.connectTCP(self.bot.room.ip, 8687, self)
def reconnect(self):
self.log.info(u"issuing reconnect in 5 seconds")
self.connection = None
if not self.write_only:
self.log.info(u"lost connection on reading bot, moving ip_list to stale")
for id in self.bot.ip_list.keys():
reactor.callLater(0, self.bot.player_left, id)
reactor.callLater(5, self.connect)
def startedConnecting(self, connector):
self.log.info(u"started connecting")
def clientConnectionLost(self, connector, reason):
self.log.error(u"connection lost, reason: %s", reason)
self.reconnect()
def clientConnectionFailed(self, connector, reason):
self.log.error("uconnection failed, reason: %s", reason)
self.reconnect()
def send_kick(self, (player_id, reason)):
self.kick_deferred = self.bot.kicks.get().addCallback(self.send_kick)
if self.connection:
self.log.debug(u"doing kick => %s @ %s", player_id, reason)
format = "< I b I I I"
packet = struct.pack(format, len(reason) + 13, 40, self.account.id,
player_id, len(reason)) + reason.encode('ascii', 'ignore')
self.connection.write(packet)
# remove 15 min ban, that happens after player is kicked
player_login = self.bot.logins.get(player_id, u'').encode('ascii', 'ignore')
if player_login and False:
self.log.debug(u"removing 15min ban => %s", player_login)
packet = struct.pack("< I b I", len(player_login) + 10, 120, self.bot.room.id) + \
player_login + ("\0" * 5)
self.connection.write(packet)
else:
self.log.error(u"kick : no connection")
def send_private(self, (player_id, message)):
reactor.callLater(0.55, lambda: self.bot.privates.get().addCallback(self.send_private))
if self.connection:
format = "< I b I I"
packet = struct.pack(format, len(message) * 2 + 9, 127,
self.account.id,
player_id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"pvt : no connection")
def send_announce(self, message):
reactor.callLater(1.1, lambda: self.bot.announces.get().addCallback(self.send_announce))
if self.connection:
self.log.debug(u"ANN -> %s", message)
format = "< I b I"
packet = struct.pack(format, len(message) * 2 + 5, 48,
self.bot.room.id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"ann : no connection") | def send_json(self, obj): self.transport.write(json.dumps(obj), self.node_io_addr)
def datagramReceived(self, data, addr):
msg = json.loads(data)
| random_line_split |
protocol.py | from twisted.internet.protocol import ClientFactory, Protocol, DatagramProtocol
from twisted.internet import reactor, task
import logging
import struct
from gbot.util import split_by
from gbot.models import Account
import time, json
class LocalUDPInfo(DatagramProtocol):
node_io_addr = ('0.0.0.0', 8124)
bots = []
def __init__(self):
print "UDPInfo start"
def send_json(self, obj): self.transport.write(json.dumps(obj), self.node_io_addr)
def datagramReceived(self, data, addr):
msg = json.loads(data)
action = msg.get("action")
print data
if action == "start":
for bot in self.bots:
for login in [bot.logins.get(id) for id, online in bot.online.items() if online]:
self.player_came(bot.name, login)
def message_received(self, room, by, body):
self.send_json({
"action": "message",
"room": room,
"by": by,
"body": body
})
def player_came(self, room, login):
self.send_json({
"action": "player_came",
"room": room,
"login": login
})
def player_left(self, room, login):
self.send_json({
"action": "player_left",
"room": room,
"login": login
})
#udp_info = LocalUDPInfo()
#reactor.listenUDP(8125, udp_info)
class GarenaRSUDPProtocol(DatagramProtocol):
def __init__(self, factory):
self.factory = factory
self.msg_seq = int(time.time()) # because of how large unsigned int is, it is ok to do this
self.msg_blob = "000000005c0000003f0000f800000040b40000000000000000000000ccff41007200690061006c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
self.msg_blob = self.msg_blob.decode('hex')
print "UDP start"
self.poll_messages()
self.hello_everybody_lc = task.LoopingCall(self.hello_everybody).start(30, False)
self.tries = {}
self.resenders = {}
self.received = []
def poll_messages(self):
self.factory.bot.messages.get().addCallback(self.send_message)
def hello_everybody(self):
# print "UDP hello => all"
for id in [id for id, online in self.factory.bot.online.items() if online]:
self.say_hello(id)
def say_hello(self, id):
addr = self.factory.bot.addr(id)
if addr:
hello_packet = struct.pack("< I I 8x", 2, self.factory.account.id)
self.transport.write(hello_packet, addr)
def datagramReceived(self, data, host_port):
host, port = host_port
packet_type = ord(data[0])
if packet_type == 2:
self.handle_hello(data)
if packet_type == 15:
pass
# this is {HELLO REPLY} packet, we don't really need it, so -> ignore
#print "UDP hello reply <= ", host_port
#
if packet_type == 51:
self.handle_message(data)
if packet_type == 57:
self.invalidate_resender(data)
def handle_message(self, data):
data = data[1:]
# print len(data)
# print data.encode('hex')
format = "< I I I 96x I"
unpacked = struct.unpack(format, data[:112])
seq, from_id, to_id, length = unpacked
msg = data[112:].decode('utf_16_le', 'ignore')
# print self.factory.account.login + " => " + msg
# player = self.tcp.players.get(from_id)
# me = self.tcp.players.get(to_id)
addr = self.factory.bot.addr(from_id)
# print addr
key = "%s#%s" % (from_id, seq)
if addr and not key in self.received:
self.received.append(key)
reactor.callLater(10, lambda: self.received.remove(key))
# print "{MESSAGE #%s from %s of length %s(bytes)}" % (seq, login, length)
# print "{MSG BODY => %s}" % msg
reply = struct.pack("< B I I 8x", 57, seq, self.factory.account.id)
self.transport.write(reply, addr)
reactor.callLater(0, self.factory.bot.message_received, from_id, msg, True)
# self.send_message(me, player, u"you said => " + msg)
def send_message(self, player_and_msg):
self.poll_messages()
to_player, msg = player_and_msg
addr = self.factory.bot.addr(to_player.id)
if addr:
self.msg_seq += 1
seq = self.msg_seq
from_id = self.factory.account.id
length = len(msg) * 2
header = struct.pack("< B I I I", 51, seq, from_id, to_player.id)
packet = header + self.msg_blob + struct.pack("< I", length) + msg.encode('utf_16_le', 'ignore')
#self.transport.write(packet, addr)
self.resenders[seq] = task.LoopingCall(self.resend_message, seq, packet, addr)
self.tries[seq] = 0
self.resenders[seq].start(0.4)
# print "{MESSAGE to %s}" % to_player.login
# print "{MSG BODY => %s}" % msg
def invalidate_resender(self, data):
seq = struct.unpack("<I", data[1:5])[0]
# print "remote => i got #%s" % seq
lc = self.resenders.get(seq)
if lc:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def resend_message(self, seq, packet, addr):
lc = self.resenders.get(seq)
if lc:
self.tries[seq] += 1
self.transport.write(packet, addr)
# print "sending #%s, tries: %s" % (seq, self.tries[seq])
if self.tries[seq] > 6:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def handle_hello(self, data):
id = struct.unpack("<I", data[4:8])[0]
addr = self.factory.bot.addr(id)
if addr:
reply = struct.pack("< I I 4x I", 15, self.factory.account.id, id)
self.transport.write(reply, addr)
class GarenaRSProtocol(Protocol):
def __init__(self):
self.buffer = ''
def write(self, data): self.transport.write(data)
def write_hex(self, data): self.write(data.decode('hex'))
def connectionMade(self):
|
def timeout(self):
self.log.error(u"Garena did not send WELCOME packet in 45 seconds, dropping connection now")
self.transport.loseConnection()
def dataReceived(self, data):
self.buffer += data
self.decodeHeader()
def decodeHeader(self):
if len(self.buffer) >= 5:
header = struct.unpack("< I B", self.buffer[:5])
if len(self.buffer) >= header[0]+4:
packet = self.buffer[5:header[0]+4]
self.buffer = self.buffer[header[0]+4:]
if len(self.buffer) >= 5:
reactor.callLater(0, self.decodeHeader)
self.decodePacket(header[1], packet)
def decodePacket(self, packet_type, data):
if self.factory.write_only and packet_type != 48: return
getattr(self, 'handle_' + {
34: 'player_came',
35: 'player_left',
37: 'message',
44: 'userlist',
48: 'welcome'
}.get(packet_type, 'non_existing'), lambda data: None)(data)
def handle_non_existing(self, data):
self.log.info(u"??? -> %s", data.encode('hex'))
def handle_player_left(self, data):
id = struct.unpack("< I", data)[0]
self.factory.bot.player_left(id)
def handle_player_came(self, data):
format = "< I 15s 6x 1B 2x 4B 32x"
unpacked = struct.unpack(format, data)
id = unpacked[0]
login = unicode(unpacked[1].rstrip(chr(0)), 'ascii', 'ignore')
ip = "%s.%s.%s.%s" % unpacked[3:]
lvl = unpacked[2]
port = struct.unpack(">H", data[40:42])[0]
if not Account.get_or(pk = id):
self.factory.bot.player_came(id, login, ip, port, lvl)
else:
self.log.info(u"%s is bot's account -> do nothing", login)
#if hasattr(self.factory, 'udp_protocol'):
# self.factory.udp_protocol.say_hello(id)
def handle_userlist(self, data):
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
timeout_deferred.cancel()
del self.timeout_deferred
self.log.info(u"got userlist")
for user_data in [ud for ud in split_by(data[8:], 64) if len(ud) == 64]:
self.handle_player_came(user_data)
def handle_message(self, data):
id = struct.unpack("<I", data[4:8])[0]
message = unicode(data[12:], 'utf_16_le', 'ignore').strip()
reactor.callLater(0, self.factory.bot.message_received, id, message)
def handle_welcome(self, data):
self.log.info(u"got WELCOME")
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
try:
timeout_deferred.cancel()
except:
pass
del self.timeout_deferred
class GarenaRSFactory(ClientFactory):
protocol = GarenaRSProtocol
def __init__(self, bot, account, write_only = True,
send_kicks = False, send_anns = True, send_pvts = True):
self.bot = bot
self.account = account
self.write_only = write_only
self.connection = None
self.log = logging.getLogger("GRSF[%s:%s]" % (bot.name, account.login))
self.log.info(u"initialized")
self.packet = account.packet.replace("{roomid}",
struct.pack("< I", bot.room.id).encode('hex'))
# deferreds
if send_anns: self.bot.announces.get().addCallback(self.send_announce)
if send_pvts: self.bot.privates.get().addCallback(self.send_private)
if send_kicks: self.bot.kicks.get().addCallback(self.send_kick)
#only now enable udp for ospl.slave
#if account.port > 15000:
# self.udp_protocol = GarenaRSUDPProtocol(self)
# self.udp = reactor.listenUDP(account.port, self.udp_protocol, interface = '212.154.211.111')
#else:
# self.udp = None
self.connect()
def connect(self):
self.log.info(u"issuing roomserver connection")
reactor.connectTCP(self.bot.room.ip, 8687, self)
def reconnect(self):
self.log.info(u"issuing reconnect in 5 seconds")
self.connection = None
if not self.write_only:
self.log.info(u"lost connection on reading bot, moving ip_list to stale")
for id in self.bot.ip_list.keys():
reactor.callLater(0, self.bot.player_left, id)
reactor.callLater(5, self.connect)
def startedConnecting(self, connector):
self.log.info(u"started connecting")
def clientConnectionLost(self, connector, reason):
self.log.error(u"connection lost, reason: %s", reason)
self.reconnect()
def clientConnectionFailed(self, connector, reason):
self.log.error("uconnection failed, reason: %s", reason)
self.reconnect()
def send_kick(self, (player_id, reason)):
self.kick_deferred = self.bot.kicks.get().addCallback(self.send_kick)
if self.connection:
self.log.debug(u"doing kick => %s @ %s", player_id, reason)
format = "< I b I I I"
packet = struct.pack(format, len(reason) + 13, 40, self.account.id,
player_id, len(reason)) + reason.encode('ascii', 'ignore')
self.connection.write(packet)
# remove 15 min ban, that happens after player is kicked
player_login = self.bot.logins.get(player_id, u'').encode('ascii', 'ignore')
if player_login and False:
self.log.debug(u"removing 15min ban => %s", player_login)
packet = struct.pack("< I b I", len(player_login) + 10, 120, self.bot.room.id) + \
player_login + ("\0" * 5)
self.connection.write(packet)
else:
self.log.error(u"kick : no connection")
def send_private(self, (player_id, message)):
reactor.callLater(0.55, lambda: self.bot.privates.get().addCallback(self.send_private))
if self.connection:
format = "< I b I I"
packet = struct.pack(format, len(message) * 2 + 9, 127,
self.account.id,
player_id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"pvt : no connection")
def send_announce(self, message):
reactor.callLater(1.1, lambda: self.bot.announces.get().addCallback(self.send_announce))
if self.connection:
self.log.debug(u"ANN -> %s", message)
format = "< I b I"
packet = struct.pack(format, len(message) * 2 + 5, 48,
self.bot.room.id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"ann : no connection")
| self.log = logging.getLogger("GRSP[%s]" % self.factory.account.login)
self.log.info(u"connection made, sending auth packet")
self.write_hex(self.factory.packet)
self.log.info(u"issuing disconnect in 45 seconds if Garena did not respond with WELCOME")
self.timeout_deferred = reactor.callLater(45, self.timeout) | identifier_body |
protocol.py | from twisted.internet.protocol import ClientFactory, Protocol, DatagramProtocol
from twisted.internet import reactor, task
import logging
import struct
from gbot.util import split_by
from gbot.models import Account
import time, json
class LocalUDPInfo(DatagramProtocol):
node_io_addr = ('0.0.0.0', 8124)
bots = []
def __init__(self):
print "UDPInfo start"
def send_json(self, obj): self.transport.write(json.dumps(obj), self.node_io_addr)
def datagramReceived(self, data, addr):
msg = json.loads(data)
action = msg.get("action")
print data
if action == "start":
for bot in self.bots:
for login in [bot.logins.get(id) for id, online in bot.online.items() if online]:
self.player_came(bot.name, login)
def message_received(self, room, by, body):
self.send_json({
"action": "message",
"room": room,
"by": by,
"body": body
})
def player_came(self, room, login):
self.send_json({
"action": "player_came",
"room": room,
"login": login
})
def player_left(self, room, login):
self.send_json({
"action": "player_left",
"room": room,
"login": login
})
#udp_info = LocalUDPInfo()
#reactor.listenUDP(8125, udp_info)
class GarenaRSUDPProtocol(DatagramProtocol):
def __init__(self, factory):
self.factory = factory
self.msg_seq = int(time.time()) # because of how large unsigned int is, it is ok to do this
self.msg_blob = "000000005c0000003f0000f800000040b40000000000000000000000ccff41007200690061006c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
self.msg_blob = self.msg_blob.decode('hex')
print "UDP start"
self.poll_messages()
self.hello_everybody_lc = task.LoopingCall(self.hello_everybody).start(30, False)
self.tries = {}
self.resenders = {}
self.received = []
def poll_messages(self):
self.factory.bot.messages.get().addCallback(self.send_message)
def hello_everybody(self):
# print "UDP hello => all"
for id in [id for id, online in self.factory.bot.online.items() if online]:
self.say_hello(id)
def say_hello(self, id):
addr = self.factory.bot.addr(id)
if addr:
hello_packet = struct.pack("< I I 8x", 2, self.factory.account.id)
self.transport.write(hello_packet, addr)
def datagramReceived(self, data, host_port):
host, port = host_port
packet_type = ord(data[0])
if packet_type == 2:
self.handle_hello(data)
if packet_type == 15:
pass
# this is {HELLO REPLY} packet, we don't really need it, so -> ignore
#print "UDP hello reply <= ", host_port
#
if packet_type == 51:
self.handle_message(data)
if packet_type == 57:
self.invalidate_resender(data)
def handle_message(self, data):
data = data[1:]
# print len(data)
# print data.encode('hex')
format = "< I I I 96x I"
unpacked = struct.unpack(format, data[:112])
seq, from_id, to_id, length = unpacked
msg = data[112:].decode('utf_16_le', 'ignore')
# print self.factory.account.login + " => " + msg
# player = self.tcp.players.get(from_id)
# me = self.tcp.players.get(to_id)
addr = self.factory.bot.addr(from_id)
# print addr
key = "%s#%s" % (from_id, seq)
if addr and not key in self.received:
self.received.append(key)
reactor.callLater(10, lambda: self.received.remove(key))
# print "{MESSAGE #%s from %s of length %s(bytes)}" % (seq, login, length)
# print "{MSG BODY => %s}" % msg
reply = struct.pack("< B I I 8x", 57, seq, self.factory.account.id)
self.transport.write(reply, addr)
reactor.callLater(0, self.factory.bot.message_received, from_id, msg, True)
# self.send_message(me, player, u"you said => " + msg)
def send_message(self, player_and_msg):
self.poll_messages()
to_player, msg = player_and_msg
addr = self.factory.bot.addr(to_player.id)
if addr:
self.msg_seq += 1
seq = self.msg_seq
from_id = self.factory.account.id
length = len(msg) * 2
header = struct.pack("< B I I I", 51, seq, from_id, to_player.id)
packet = header + self.msg_blob + struct.pack("< I", length) + msg.encode('utf_16_le', 'ignore')
#self.transport.write(packet, addr)
self.resenders[seq] = task.LoopingCall(self.resend_message, seq, packet, addr)
self.tries[seq] = 0
self.resenders[seq].start(0.4)
# print "{MESSAGE to %s}" % to_player.login
# print "{MSG BODY => %s}" % msg
def invalidate_resender(self, data):
seq = struct.unpack("<I", data[1:5])[0]
# print "remote => i got #%s" % seq
lc = self.resenders.get(seq)
if lc:
lc.stop()
del self.resenders[seq]
del self.tries[seq]
def resend_message(self, seq, packet, addr):
lc = self.resenders.get(seq)
if lc:
|
def handle_hello(self, data):
id = struct.unpack("<I", data[4:8])[0]
addr = self.factory.bot.addr(id)
if addr:
reply = struct.pack("< I I 4x I", 15, self.factory.account.id, id)
self.transport.write(reply, addr)
class GarenaRSProtocol(Protocol):
def __init__(self):
self.buffer = ''
def write(self, data): self.transport.write(data)
def write_hex(self, data): self.write(data.decode('hex'))
def connectionMade(self):
self.log = logging.getLogger("GRSP[%s]" % self.factory.account.login)
self.log.info(u"connection made, sending auth packet")
self.write_hex(self.factory.packet)
self.log.info(u"issuing disconnect in 45 seconds if Garena did not respond with WELCOME")
self.timeout_deferred = reactor.callLater(45, self.timeout)
def timeout(self):
self.log.error(u"Garena did not send WELCOME packet in 45 seconds, dropping connection now")
self.transport.loseConnection()
def dataReceived(self, data):
self.buffer += data
self.decodeHeader()
def decodeHeader(self):
if len(self.buffer) >= 5:
header = struct.unpack("< I B", self.buffer[:5])
if len(self.buffer) >= header[0]+4:
packet = self.buffer[5:header[0]+4]
self.buffer = self.buffer[header[0]+4:]
if len(self.buffer) >= 5:
reactor.callLater(0, self.decodeHeader)
self.decodePacket(header[1], packet)
def decodePacket(self, packet_type, data):
if self.factory.write_only and packet_type != 48: return
getattr(self, 'handle_' + {
34: 'player_came',
35: 'player_left',
37: 'message',
44: 'userlist',
48: 'welcome'
}.get(packet_type, 'non_existing'), lambda data: None)(data)
def handle_non_existing(self, data):
self.log.info(u"??? -> %s", data.encode('hex'))
def handle_player_left(self, data):
id = struct.unpack("< I", data)[0]
self.factory.bot.player_left(id)
def handle_player_came(self, data):
format = "< I 15s 6x 1B 2x 4B 32x"
unpacked = struct.unpack(format, data)
id = unpacked[0]
login = unicode(unpacked[1].rstrip(chr(0)), 'ascii', 'ignore')
ip = "%s.%s.%s.%s" % unpacked[3:]
lvl = unpacked[2]
port = struct.unpack(">H", data[40:42])[0]
if not Account.get_or(pk = id):
self.factory.bot.player_came(id, login, ip, port, lvl)
else:
self.log.info(u"%s is bot's account -> do nothing", login)
#if hasattr(self.factory, 'udp_protocol'):
# self.factory.udp_protocol.say_hello(id)
def handle_userlist(self, data):
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
timeout_deferred.cancel()
del self.timeout_deferred
self.log.info(u"got userlist")
for user_data in [ud for ud in split_by(data[8:], 64) if len(ud) == 64]:
self.handle_player_came(user_data)
def handle_message(self, data):
id = struct.unpack("<I", data[4:8])[0]
message = unicode(data[12:], 'utf_16_le', 'ignore').strip()
reactor.callLater(0, self.factory.bot.message_received, id, message)
def handle_welcome(self, data):
self.log.info(u"got WELCOME")
self.log.info(u"cancelling TIMEOUT")
self.factory.connection = self
timeout_deferred = getattr(self, 'timeout_deferred', None)
if timeout_deferred and timeout_deferred.active:
try:
timeout_deferred.cancel()
except:
pass
del self.timeout_deferred
class GarenaRSFactory(ClientFactory):
protocol = GarenaRSProtocol
def __init__(self, bot, account, write_only = True,
send_kicks = False, send_anns = True, send_pvts = True):
self.bot = bot
self.account = account
self.write_only = write_only
self.connection = None
self.log = logging.getLogger("GRSF[%s:%s]" % (bot.name, account.login))
self.log.info(u"initialized")
self.packet = account.packet.replace("{roomid}",
struct.pack("< I", bot.room.id).encode('hex'))
# deferreds
if send_anns: self.bot.announces.get().addCallback(self.send_announce)
if send_pvts: self.bot.privates.get().addCallback(self.send_private)
if send_kicks: self.bot.kicks.get().addCallback(self.send_kick)
#only now enable udp for ospl.slave
#if account.port > 15000:
# self.udp_protocol = GarenaRSUDPProtocol(self)
# self.udp = reactor.listenUDP(account.port, self.udp_protocol, interface = '212.154.211.111')
#else:
# self.udp = None
self.connect()
def connect(self):
self.log.info(u"issuing roomserver connection")
reactor.connectTCP(self.bot.room.ip, 8687, self)
def reconnect(self):
self.log.info(u"issuing reconnect in 5 seconds")
self.connection = None
if not self.write_only:
self.log.info(u"lost connection on reading bot, moving ip_list to stale")
for id in self.bot.ip_list.keys():
reactor.callLater(0, self.bot.player_left, id)
reactor.callLater(5, self.connect)
def startedConnecting(self, connector):
self.log.info(u"started connecting")
def clientConnectionLost(self, connector, reason):
self.log.error(u"connection lost, reason: %s", reason)
self.reconnect()
def clientConnectionFailed(self, connector, reason):
self.log.error("uconnection failed, reason: %s", reason)
self.reconnect()
def send_kick(self, (player_id, reason)):
self.kick_deferred = self.bot.kicks.get().addCallback(self.send_kick)
if self.connection:
self.log.debug(u"doing kick => %s @ %s", player_id, reason)
format = "< I b I I I"
packet = struct.pack(format, len(reason) + 13, 40, self.account.id,
player_id, len(reason)) + reason.encode('ascii', 'ignore')
self.connection.write(packet)
# remove 15 min ban, that happens after player is kicked
player_login = self.bot.logins.get(player_id, u'').encode('ascii', 'ignore')
if player_login and False:
self.log.debug(u"removing 15min ban => %s", player_login)
packet = struct.pack("< I b I", len(player_login) + 10, 120, self.bot.room.id) + \
player_login + ("\0" * 5)
self.connection.write(packet)
else:
self.log.error(u"kick : no connection")
def send_private(self, (player_id, message)):
reactor.callLater(0.55, lambda: self.bot.privates.get().addCallback(self.send_private))
if self.connection:
format = "< I b I I"
packet = struct.pack(format, len(message) * 2 + 9, 127,
self.account.id,
player_id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"pvt : no connection")
def send_announce(self, message):
reactor.callLater(1.1, lambda: self.bot.announces.get().addCallback(self.send_announce))
if self.connection:
self.log.debug(u"ANN -> %s", message)
format = "< I b I"
packet = struct.pack(format, len(message) * 2 + 5, 48,
self.bot.room.id) + message.encode('utf_16_le', 'ignore')
self.connection.write(packet)
else:
self.log.error(u"ann : no connection")
| self.tries[seq] += 1
self.transport.write(packet, addr)
# print "sending #%s, tries: %s" % (seq, self.tries[seq])
if self.tries[seq] > 6:
lc.stop()
del self.resenders[seq]
del self.tries[seq] | conditional_block |
WeightedSum.go | package interval
import (
"bitbucket.org/gofd/gofd/core"
"fmt"
"strings"
)
// WeightedSum represents the constraint C1*X1+C2*X2+...+Cn*Xn=Z
// Its propagate functions establish arc consistency (with bounds and arc
// algorithms). | // The basic idea of WeightedSum is to substitute the WeightedSum equation to
// many Ci*Xi=Hi, so that a Sum constraint results with H1+H2+...+Hn=Z. This
// Sum constraint is substituted as well with X+Y=Z equations (see Sum
// constraint for more information).
type WeightedSum struct {
vars []core.VarId
hvars []core.VarId //helper-variables
cs []int
resultVar core.VarId
outCh chan<- *core.ChangeEvent
inCh <-chan *core.ChangeEntry
varidToDomainMap map[core.VarId]*core.IvDomain
id core.PropId
store *core.Store
pseudoPropsXCY []*PseudoXmultCeqY
pseudoPropsXYZ []*PseudoXplusYeqZ
}
func (this *WeightedSum) Start() {
// initial check
evt := core.CreateChangeEvent()
this.ivweightSumInitialCheck(evt)
core.SendChangesToStore(evt, this)
for changeEntry := range this.inCh {
core.LogIncomingChange(this, this.store, changeEntry)
evt = core.CreateChangeEvent()
varidChanged := changeEntry.GetID()
changedDom := this.varidToDomainMap[varidChanged]
changedDom.Removes(changeEntry.GetValues())
this.ivweightedSumPropagate(varidChanged, evt)
core.SendChangesToStore(evt, this)
}
}
// propagate check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightedSumPropagate(varid core.VarId, evt *core.ChangeEvent) {
this.checkXmultCeqY(varid, evt)
this.ivsumPropagate(varid, evt)
}
func (this *WeightedSum) ivsumPropagate(varid core.VarId, evt *core.ChangeEvent) {
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
// initialCheck check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightSumInitialCheck(evt *core.ChangeEvent) {
this.checkXmultCeqY(-1, evt)
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
func (this *WeightedSum) checkXmultCeqY(varid core.VarId, evt *core.ChangeEvent) {
for i, prop := range this.pseudoPropsXCY {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
if varid == -1 {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
} else if prop.x == varid {
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
} else if prop.y == varid {
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
}
}
}
// Register generates auxiliary variables and makes pseudo structs
// and all vars will be registered at store and get domains and channels
func (this *WeightedSum) Register(store *core.Store) {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
var domains map[core.VarId]core.Domain
this.inCh, domains, this.outCh =
store.RegisterPropagatorMap(allvars, this.id)
this.varidToDomainMap = core.GetVaridToIntervalDomains(domains)
this.store = store
}
// SetID is used by the store to set the propagator's ID, don't use it
// yourself or bad things will happen
func (this *WeightedSum) SetID(propID core.PropId) {
this.id = propID
}
func (this *WeightedSum) GetID() core.PropId {
return this.id
}
func CreateWeightedSum(store *core.Store, resultVar core.VarId, cs []int,
intVars ...core.VarId) *WeightedSum {
if core.GetLogger().DoDebug() {
core.GetLogger().Dln("CreateWeightedSum-propagator")
}
prop := new(WeightedSum)
prop.vars = intVars
prop.resultVar = resultVar
prop.cs = cs
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(prop.vars))
prop.hvars = make([]core.VarId, 0)
for i, X := range prop.vars {
H := core.CreateAuxIntVarIvValues(store,
core.ScalarSlice(prop.cs[i], store.GetDomain(X).Values_asSlice()))
prop.pseudoPropsXCY[i] = CreatePseudoXmultCeqY(X, prop.cs[i], H)
prop.hvars = append(prop.hvars, H)
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(prop.pseudoPropsXCY)-1)
H := prop.pseudoPropsXCY[0].y
newHVars := make([]core.VarId, 0)
for i, p := range prop.pseudoPropsXCY[1 : len(prop.vars)-1] {
NewH := core.CreateAuxIntVarIvFromTo(store,
store.GetDomain(H).GetMin()+store.GetDomain(p.y).GetMin(),
store.GetDomain(H).GetMax()+store.GetDomain(p.y).GetMax())
prop.pseudoPropsXYZ[i] = CreatePseudoXplusYeqZ(H, p.y, NewH)
H = NewH
newHVars = append(newHVars, NewH)
}
X := prop.hvars[len(prop.hvars)-1]
prop.hvars = append(prop.hvars, newHVars...)
prop.pseudoPropsXYZ[len(prop.pseudoPropsXYZ)-1] = CreatePseudoXplusYeqZ(H, X, prop.resultVar)
return prop
}
func (this *WeightedSum) String() string {
vars_str := make([]string, len(this.vars))
for i, var_id := range this.vars {
vars_str[i] = fmt.Sprintf("%v*%s",
this.cs[i], this.store.GetName(var_id))
}
return fmt.Sprintf("PROP_%d %s = %s",
this.id, strings.Join(vars_str, "+"),
this.store.GetName(this.resultVar))
}
func (this *WeightedSum) Clone() core.Constraint {
prop := new(WeightedSum)
prop.vars = make([]core.VarId, len(this.vars))
for i, single_var := range this.vars {
prop.vars[i] = single_var
}
prop.resultVar = this.resultVar
prop.cs = make([]int, len(this.cs))
for i, c := range this.cs {
prop.cs[i] = c
}
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(this.pseudoPropsXCY))
for i, p := range this.pseudoPropsXCY {
prop.pseudoPropsXCY[i] = p.Clone()
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(this.pseudoPropsXYZ))
for i, p := range this.pseudoPropsXYZ {
prop.pseudoPropsXYZ[i] = p.Clone()
}
prop.hvars = make([]core.VarId, len(this.hvars))
for i, single_var := range this.hvars {
prop.hvars[i] = single_var
}
return prop
}
// INFO: the following two propagation-functions do not work on special
// interval-operations. Reason is: the multiplication is always worst case!
// example-CSP:
// X*C=Y
// X:[1,11]
// C:2
// Y:[1,20]
// RESULT:
// X:[1,10]
// Y:[2] [4] [6] ... [20]
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. X=Y/C, then X is first variable
func ivfirstInMultSecondOutBOUNDS(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
removeParts := make([]*core.IvDomPart, 0)
minX := firstInDomain.GetMin() * c
maxX := firstInDomain.GetMax() * c
for i, yPart := range secondOutDomain.GetParts() {
removeParts = append(removeParts, yPart.DIFFERENCE_MIN_MAX(minX, maxX)...)
if yPart.GT(maxX) {
removeParts = append(removeParts, secondOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(secondOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutBOUNDS(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
removeParts := make([]*core.IvDomPart, 0)
if secondInDomain.IsEmpty() {
return
}
minY := secondInDomain.GetMin() / c
maxY := secondInDomain.GetMax() / c
for i, xPart := range firstOutDomain.GetParts() {
removeParts = append(removeParts, xPart.DIFFERENCE_MIN_MAX(minY, maxY)...)
if xPart.GT(maxY) {
removeParts = append(removeParts, firstOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(firstOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. Y=X/C, then X is first variable
func ivfirstInMultSecondOutARC(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, y_val := range secondOutDomain.GetValues() {
if c == 0 || !(y_val%c == 0) {
if y_val != 0 {
vals = append(vals, y_val)
}
} else {
if !firstInDomain.Contains(y_val / c) {
vals = append(vals, y_val)
}
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(secondOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutARC(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
if secondInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, x_val := range firstOutDomain.GetValues() {
if !secondInDomain.Contains(x_val * c) {
vals = append(vals, x_val)
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(firstOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
func (this *WeightedSum) GetAllVars() []core.VarId {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
return allvars
}
func (this *WeightedSum) GetVarIds() []core.VarId {
return this.GetAllVars()
}
func (this *WeightedSum) GetDomains() []core.Domain {
return core.ValuesOfMapVarIdToIvDomain(this.GetAllVars(), this.varidToDomainMap)
}
func (this *WeightedSum) GetInCh() <-chan *core.ChangeEntry {
return this.inCh
}
func (this *WeightedSum) GetOutCh() chan<- *core.ChangeEvent {
return this.outCh
}
// XplusYneqZ represents the propagator for the constraint X + Y == Z
type PseudoXmultCeqY struct {
x, y core.VarId
c int
}
func (this *PseudoXmultCeqY) Clone() *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = this.x, this.c, this.y
return prop
}
func CreatePseudoXmultCeqY(x core.VarId, c int, y core.VarId) *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = x, c, y
return prop
} | random_line_split | |
WeightedSum.go | package interval
import (
"bitbucket.org/gofd/gofd/core"
"fmt"
"strings"
)
// WeightedSum represents the constraint C1*X1+C2*X2+...+Cn*Xn=Z
// Its propagate functions establish arc consistency (with bounds and arc
// algorithms).
// The basic idea of WeightedSum is to substitute the WeightedSum equation to
// many Ci*Xi=Hi, so that a Sum constraint results with H1+H2+...+Hn=Z. This
// Sum constraint is substituted as well with X+Y=Z equations (see Sum
// constraint for more information).
type WeightedSum struct {
vars []core.VarId
hvars []core.VarId //helper-variables
cs []int
resultVar core.VarId
outCh chan<- *core.ChangeEvent
inCh <-chan *core.ChangeEntry
varidToDomainMap map[core.VarId]*core.IvDomain
id core.PropId
store *core.Store
pseudoPropsXCY []*PseudoXmultCeqY
pseudoPropsXYZ []*PseudoXplusYeqZ
}
func (this *WeightedSum) Start() {
// initial check
evt := core.CreateChangeEvent()
this.ivweightSumInitialCheck(evt)
core.SendChangesToStore(evt, this)
for changeEntry := range this.inCh {
core.LogIncomingChange(this, this.store, changeEntry)
evt = core.CreateChangeEvent()
varidChanged := changeEntry.GetID()
changedDom := this.varidToDomainMap[varidChanged]
changedDom.Removes(changeEntry.GetValues())
this.ivweightedSumPropagate(varidChanged, evt)
core.SendChangesToStore(evt, this)
}
}
// propagate check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightedSumPropagate(varid core.VarId, evt *core.ChangeEvent) {
this.checkXmultCeqY(varid, evt)
this.ivsumPropagate(varid, evt)
}
func (this *WeightedSum) ivsumPropagate(varid core.VarId, evt *core.ChangeEvent) {
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
// initialCheck check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightSumInitialCheck(evt *core.ChangeEvent) {
this.checkXmultCeqY(-1, evt)
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
func (this *WeightedSum) checkXmultCeqY(varid core.VarId, evt *core.ChangeEvent) {
for i, prop := range this.pseudoPropsXCY {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
if varid == -1 {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
} else if prop.x == varid {
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
} else if prop.y == varid {
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
}
}
}
// Register generates auxiliary variables and makes pseudo structs
// and all vars will be registered at store and get domains and channels
func (this *WeightedSum) Register(store *core.Store) {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
var domains map[core.VarId]core.Domain
this.inCh, domains, this.outCh =
store.RegisterPropagatorMap(allvars, this.id)
this.varidToDomainMap = core.GetVaridToIntervalDomains(domains)
this.store = store
}
// SetID is used by the store to set the propagator's ID, don't use it
// yourself or bad things will happen
func (this *WeightedSum) SetID(propID core.PropId) {
this.id = propID
}
func (this *WeightedSum) GetID() core.PropId {
return this.id
}
func CreateWeightedSum(store *core.Store, resultVar core.VarId, cs []int,
intVars ...core.VarId) *WeightedSum {
if core.GetLogger().DoDebug() {
core.GetLogger().Dln("CreateWeightedSum-propagator")
}
prop := new(WeightedSum)
prop.vars = intVars
prop.resultVar = resultVar
prop.cs = cs
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(prop.vars))
prop.hvars = make([]core.VarId, 0)
for i, X := range prop.vars {
H := core.CreateAuxIntVarIvValues(store,
core.ScalarSlice(prop.cs[i], store.GetDomain(X).Values_asSlice()))
prop.pseudoPropsXCY[i] = CreatePseudoXmultCeqY(X, prop.cs[i], H)
prop.hvars = append(prop.hvars, H)
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(prop.pseudoPropsXCY)-1)
H := prop.pseudoPropsXCY[0].y
newHVars := make([]core.VarId, 0)
for i, p := range prop.pseudoPropsXCY[1 : len(prop.vars)-1] {
NewH := core.CreateAuxIntVarIvFromTo(store,
store.GetDomain(H).GetMin()+store.GetDomain(p.y).GetMin(),
store.GetDomain(H).GetMax()+store.GetDomain(p.y).GetMax())
prop.pseudoPropsXYZ[i] = CreatePseudoXplusYeqZ(H, p.y, NewH)
H = NewH
newHVars = append(newHVars, NewH)
}
X := prop.hvars[len(prop.hvars)-1]
prop.hvars = append(prop.hvars, newHVars...)
prop.pseudoPropsXYZ[len(prop.pseudoPropsXYZ)-1] = CreatePseudoXplusYeqZ(H, X, prop.resultVar)
return prop
}
func (this *WeightedSum) String() string {
vars_str := make([]string, len(this.vars))
for i, var_id := range this.vars {
vars_str[i] = fmt.Sprintf("%v*%s",
this.cs[i], this.store.GetName(var_id))
}
return fmt.Sprintf("PROP_%d %s = %s",
this.id, strings.Join(vars_str, "+"),
this.store.GetName(this.resultVar))
}
func (this *WeightedSum) Clone() core.Constraint {
prop := new(WeightedSum)
prop.vars = make([]core.VarId, len(this.vars))
for i, single_var := range this.vars {
prop.vars[i] = single_var
}
prop.resultVar = this.resultVar
prop.cs = make([]int, len(this.cs))
for i, c := range this.cs {
prop.cs[i] = c
}
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(this.pseudoPropsXCY))
for i, p := range this.pseudoPropsXCY {
prop.pseudoPropsXCY[i] = p.Clone()
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(this.pseudoPropsXYZ))
for i, p := range this.pseudoPropsXYZ {
prop.pseudoPropsXYZ[i] = p.Clone()
}
prop.hvars = make([]core.VarId, len(this.hvars))
for i, single_var := range this.hvars {
prop.hvars[i] = single_var
}
return prop
}
// INFO: the following two propagation-functions do not work on special
// interval-operations. Reason is: the multiplication is always worst case!
// example-CSP:
// X*C=Y
// X:[1,11]
// C:2
// Y:[1,20]
// RESULT:
// X:[1,10]
// Y:[2] [4] [6] ... [20]
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. X=Y/C, then X is first variable
func ivfirstInMultSecondOutBOUNDS(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
removeParts := make([]*core.IvDomPart, 0)
minX := firstInDomain.GetMin() * c
maxX := firstInDomain.GetMax() * c
for i, yPart := range secondOutDomain.GetParts() {
removeParts = append(removeParts, yPart.DIFFERENCE_MIN_MAX(minX, maxX)...)
if yPart.GT(maxX) {
removeParts = append(removeParts, secondOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(secondOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutBOUNDS(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
removeParts := make([]*core.IvDomPart, 0)
if secondInDomain.IsEmpty() {
return
}
minY := secondInDomain.GetMin() / c
maxY := secondInDomain.GetMax() / c
for i, xPart := range firstOutDomain.GetParts() {
removeParts = append(removeParts, xPart.DIFFERENCE_MIN_MAX(minY, maxY)...)
if xPart.GT(maxY) {
removeParts = append(removeParts, firstOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(firstOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. Y=X/C, then X is first variable
func ivfirstInMultSecondOutARC(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, y_val := range secondOutDomain.GetValues() {
if c == 0 || !(y_val%c == 0) {
if y_val != 0 {
vals = append(vals, y_val)
}
} else {
if !firstInDomain.Contains(y_val / c) {
vals = append(vals, y_val)
}
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(secondOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutARC(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
if secondInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, x_val := range firstOutDomain.GetValues() {
if !secondInDomain.Contains(x_val * c) {
vals = append(vals, x_val)
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(firstOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
func (this *WeightedSum) GetAllVars() []core.VarId {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
return allvars
}
func (this *WeightedSum) GetVarIds() []core.VarId {
return this.GetAllVars()
}
func (this *WeightedSum) GetDomains() []core.Domain {
return core.ValuesOfMapVarIdToIvDomain(this.GetAllVars(), this.varidToDomainMap)
}
func (this *WeightedSum) GetInCh() <-chan *core.ChangeEntry |
func (this *WeightedSum) GetOutCh() chan<- *core.ChangeEvent {
return this.outCh
}
// XplusYneqZ represents the propagator for the constraint X + Y == Z
type PseudoXmultCeqY struct {
x, y core.VarId
c int
}
func (this *PseudoXmultCeqY) Clone() *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = this.x, this.c, this.y
return prop
}
func CreatePseudoXmultCeqY(x core.VarId, c int, y core.VarId) *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = x, c, y
return prop
}
| {
return this.inCh
} | identifier_body |
WeightedSum.go | package interval
import (
"bitbucket.org/gofd/gofd/core"
"fmt"
"strings"
)
// WeightedSum represents the constraint C1*X1+C2*X2+...+Cn*Xn=Z
// Its propagate functions establish arc consistency (with bounds and arc
// algorithms).
// The basic idea of WeightedSum is to substitute the WeightedSum equation to
// many Ci*Xi=Hi, so that a Sum constraint results with H1+H2+...+Hn=Z. This
// Sum constraint is substituted as well with X+Y=Z equations (see Sum
// constraint for more information).
type WeightedSum struct {
vars []core.VarId
hvars []core.VarId //helper-variables
cs []int
resultVar core.VarId
outCh chan<- *core.ChangeEvent
inCh <-chan *core.ChangeEntry
varidToDomainMap map[core.VarId]*core.IvDomain
id core.PropId
store *core.Store
pseudoPropsXCY []*PseudoXmultCeqY
pseudoPropsXYZ []*PseudoXplusYeqZ
}
func (this *WeightedSum) Start() {
// initial check
evt := core.CreateChangeEvent()
this.ivweightSumInitialCheck(evt)
core.SendChangesToStore(evt, this)
for changeEntry := range this.inCh {
core.LogIncomingChange(this, this.store, changeEntry)
evt = core.CreateChangeEvent()
varidChanged := changeEntry.GetID()
changedDom := this.varidToDomainMap[varidChanged]
changedDom.Removes(changeEntry.GetValues())
this.ivweightedSumPropagate(varidChanged, evt)
core.SendChangesToStore(evt, this)
}
}
// propagate check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightedSumPropagate(varid core.VarId, evt *core.ChangeEvent) {
this.checkXmultCeqY(varid, evt)
this.ivsumPropagate(varid, evt)
}
func (this *WeightedSum) ivsumPropagate(varid core.VarId, evt *core.ChangeEvent) {
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
// initialCheck check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightSumInitialCheck(evt *core.ChangeEvent) {
this.checkXmultCeqY(-1, evt)
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
func (this *WeightedSum) checkXmultCeqY(varid core.VarId, evt *core.ChangeEvent) {
for i, prop := range this.pseudoPropsXCY {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
if varid == -1 {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
} else if prop.x == varid {
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
} else if prop.y == varid {
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
}
}
}
// Register generates auxiliary variables and makes pseudo structs
// and all vars will be registered at store and get domains and channels
func (this *WeightedSum) Register(store *core.Store) {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
var domains map[core.VarId]core.Domain
this.inCh, domains, this.outCh =
store.RegisterPropagatorMap(allvars, this.id)
this.varidToDomainMap = core.GetVaridToIntervalDomains(domains)
this.store = store
}
// SetID is used by the store to set the propagator's ID, don't use it
// yourself or bad things will happen
func (this *WeightedSum) SetID(propID core.PropId) {
this.id = propID
}
func (this *WeightedSum) GetID() core.PropId {
return this.id
}
func CreateWeightedSum(store *core.Store, resultVar core.VarId, cs []int,
intVars ...core.VarId) *WeightedSum {
if core.GetLogger().DoDebug() {
core.GetLogger().Dln("CreateWeightedSum-propagator")
}
prop := new(WeightedSum)
prop.vars = intVars
prop.resultVar = resultVar
prop.cs = cs
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(prop.vars))
prop.hvars = make([]core.VarId, 0)
for i, X := range prop.vars {
H := core.CreateAuxIntVarIvValues(store,
core.ScalarSlice(prop.cs[i], store.GetDomain(X).Values_asSlice()))
prop.pseudoPropsXCY[i] = CreatePseudoXmultCeqY(X, prop.cs[i], H)
prop.hvars = append(prop.hvars, H)
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(prop.pseudoPropsXCY)-1)
H := prop.pseudoPropsXCY[0].y
newHVars := make([]core.VarId, 0)
for i, p := range prop.pseudoPropsXCY[1 : len(prop.vars)-1] {
NewH := core.CreateAuxIntVarIvFromTo(store,
store.GetDomain(H).GetMin()+store.GetDomain(p.y).GetMin(),
store.GetDomain(H).GetMax()+store.GetDomain(p.y).GetMax())
prop.pseudoPropsXYZ[i] = CreatePseudoXplusYeqZ(H, p.y, NewH)
H = NewH
newHVars = append(newHVars, NewH)
}
X := prop.hvars[len(prop.hvars)-1]
prop.hvars = append(prop.hvars, newHVars...)
prop.pseudoPropsXYZ[len(prop.pseudoPropsXYZ)-1] = CreatePseudoXplusYeqZ(H, X, prop.resultVar)
return prop
}
func (this *WeightedSum) String() string {
vars_str := make([]string, len(this.vars))
for i, var_id := range this.vars {
vars_str[i] = fmt.Sprintf("%v*%s",
this.cs[i], this.store.GetName(var_id))
}
return fmt.Sprintf("PROP_%d %s = %s",
this.id, strings.Join(vars_str, "+"),
this.store.GetName(this.resultVar))
}
func (this *WeightedSum) Clone() core.Constraint {
prop := new(WeightedSum)
prop.vars = make([]core.VarId, len(this.vars))
for i, single_var := range this.vars {
prop.vars[i] = single_var
}
prop.resultVar = this.resultVar
prop.cs = make([]int, len(this.cs))
for i, c := range this.cs {
prop.cs[i] = c
}
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(this.pseudoPropsXCY))
for i, p := range this.pseudoPropsXCY {
prop.pseudoPropsXCY[i] = p.Clone()
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(this.pseudoPropsXYZ))
for i, p := range this.pseudoPropsXYZ {
prop.pseudoPropsXYZ[i] = p.Clone()
}
prop.hvars = make([]core.VarId, len(this.hvars))
for i, single_var := range this.hvars {
prop.hvars[i] = single_var
}
return prop
}
// INFO: the following two propagation-functions do not work on special
// interval-operations. Reason is: the multiplication is always worst case!
// example-CSP:
// X*C=Y
// X:[1,11]
// C:2
// Y:[1,20]
// RESULT:
// X:[1,10]
// Y:[2] [4] [6] ... [20]
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. X=Y/C, then X is first variable
func ivfirstInMultSecondOutBOUNDS(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
removeParts := make([]*core.IvDomPart, 0)
minX := firstInDomain.GetMin() * c
maxX := firstInDomain.GetMax() * c
for i, yPart := range secondOutDomain.GetParts() {
removeParts = append(removeParts, yPart.DIFFERENCE_MIN_MAX(minX, maxX)...)
if yPart.GT(maxX) {
removeParts = append(removeParts, secondOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(secondOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutBOUNDS(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
removeParts := make([]*core.IvDomPart, 0)
if secondInDomain.IsEmpty() {
return
}
minY := secondInDomain.GetMin() / c
maxY := secondInDomain.GetMax() / c
for i, xPart := range firstOutDomain.GetParts() |
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(firstOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. Y=X/C, then X is first variable
func ivfirstInMultSecondOutARC(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, y_val := range secondOutDomain.GetValues() {
if c == 0 || !(y_val%c == 0) {
if y_val != 0 {
vals = append(vals, y_val)
}
} else {
if !firstInDomain.Contains(y_val / c) {
vals = append(vals, y_val)
}
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(secondOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutARC(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
if secondInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, x_val := range firstOutDomain.GetValues() {
if !secondInDomain.Contains(x_val * c) {
vals = append(vals, x_val)
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(firstOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
func (this *WeightedSum) GetAllVars() []core.VarId {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
return allvars
}
func (this *WeightedSum) GetVarIds() []core.VarId {
return this.GetAllVars()
}
func (this *WeightedSum) GetDomains() []core.Domain {
return core.ValuesOfMapVarIdToIvDomain(this.GetAllVars(), this.varidToDomainMap)
}
func (this *WeightedSum) GetInCh() <-chan *core.ChangeEntry {
return this.inCh
}
func (this *WeightedSum) GetOutCh() chan<- *core.ChangeEvent {
return this.outCh
}
// XplusYneqZ represents the propagator for the constraint X + Y == Z
type PseudoXmultCeqY struct {
x, y core.VarId
c int
}
func (this *PseudoXmultCeqY) Clone() *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = this.x, this.c, this.y
return prop
}
func CreatePseudoXmultCeqY(x core.VarId, c int, y core.VarId) *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = x, c, y
return prop
}
| {
removeParts = append(removeParts, xPart.DIFFERENCE_MIN_MAX(minY, maxY)...)
if xPart.GT(maxY) {
removeParts = append(removeParts, firstOutDomain.GetParts()[i:]...)
break
}
} | conditional_block |
WeightedSum.go | package interval
import (
"bitbucket.org/gofd/gofd/core"
"fmt"
"strings"
)
// WeightedSum represents the constraint C1*X1+C2*X2+...+Cn*Xn=Z
// Its propagate functions establish arc consistency (with bounds and arc
// algorithms).
// The basic idea of WeightedSum is to substitute the WeightedSum equation to
// many Ci*Xi=Hi, so that a Sum constraint results with H1+H2+...+Hn=Z. This
// Sum constraint is substituted as well with X+Y=Z equations (see Sum
// constraint for more information).
type WeightedSum struct {
vars []core.VarId
hvars []core.VarId //helper-variables
cs []int
resultVar core.VarId
outCh chan<- *core.ChangeEvent
inCh <-chan *core.ChangeEntry
varidToDomainMap map[core.VarId]*core.IvDomain
id core.PropId
store *core.Store
pseudoPropsXCY []*PseudoXmultCeqY
pseudoPropsXYZ []*PseudoXplusYeqZ
}
func (this *WeightedSum) Start() {
// initial check
evt := core.CreateChangeEvent()
this.ivweightSumInitialCheck(evt)
core.SendChangesToStore(evt, this)
for changeEntry := range this.inCh {
core.LogIncomingChange(this, this.store, changeEntry)
evt = core.CreateChangeEvent()
varidChanged := changeEntry.GetID()
changedDom := this.varidToDomainMap[varidChanged]
changedDom.Removes(changeEntry.GetValues())
this.ivweightedSumPropagate(varidChanged, evt)
core.SendChangesToStore(evt, this)
}
}
// propagate check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightedSumPropagate(varid core.VarId, evt *core.ChangeEvent) {
this.checkXmultCeqY(varid, evt)
this.ivsumPropagate(varid, evt)
}
func (this *WeightedSum) ivsumPropagate(varid core.VarId, evt *core.ChangeEvent) {
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcPropagate(varid, this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
// initialCheck check for changes. First look for X*C=Y propagators, then
// X+Y=Z and finally for the final propagator X=Y. Collect changes
func (this *WeightedSum) ivweightSumInitialCheck(evt *core.ChangeEvent) {
this.checkXmultCeqY(-1, evt)
hvarXCY := make([]core.VarId, len(this.pseudoPropsXCY))
for i, hxcy := range this.pseudoPropsXCY {
hvarXCY[i] = hxcy.y
}
ivsumBoundsInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
ivsumArcInitialCheck(this.varidToDomainMap, this.pseudoPropsXYZ, evt)
}
func (this *WeightedSum) checkXmultCeqY(varid core.VarId, evt *core.ChangeEvent) {
for i, prop := range this.pseudoPropsXCY {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
if varid == -1 {
xDom := this.varidToDomainMap[prop.x]
yDom := this.varidToDomainMap[prop.y]
c := this.cs[i]
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
} else if prop.x == varid {
ivfirstInMultSecondOutBOUNDS(xDom, c, yDom, prop.y, evt)
ivfirstInMultSecondOutARC(xDom, c, yDom, prop.y, evt)
} else if prop.y == varid {
ivsecondInMultFirstOutBOUNDS(yDom, c, xDom, prop.x, evt)
ivsecondInMultFirstOutARC(yDom, c, xDom, prop.x, evt)
}
}
}
// Register generates auxiliary variables and makes pseudo structs
// and all vars will be registered at store and get domains and channels
func (this *WeightedSum) Register(store *core.Store) {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
var domains map[core.VarId]core.Domain
this.inCh, domains, this.outCh =
store.RegisterPropagatorMap(allvars, this.id)
this.varidToDomainMap = core.GetVaridToIntervalDomains(domains)
this.store = store
}
// SetID is used by the store to set the propagator's ID, don't use it
// yourself or bad things will happen
func (this *WeightedSum) SetID(propID core.PropId) {
this.id = propID
}
func (this *WeightedSum) GetID() core.PropId {
return this.id
}
func CreateWeightedSum(store *core.Store, resultVar core.VarId, cs []int,
intVars ...core.VarId) *WeightedSum {
if core.GetLogger().DoDebug() {
core.GetLogger().Dln("CreateWeightedSum-propagator")
}
prop := new(WeightedSum)
prop.vars = intVars
prop.resultVar = resultVar
prop.cs = cs
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(prop.vars))
prop.hvars = make([]core.VarId, 0)
for i, X := range prop.vars {
H := core.CreateAuxIntVarIvValues(store,
core.ScalarSlice(prop.cs[i], store.GetDomain(X).Values_asSlice()))
prop.pseudoPropsXCY[i] = CreatePseudoXmultCeqY(X, prop.cs[i], H)
prop.hvars = append(prop.hvars, H)
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(prop.pseudoPropsXCY)-1)
H := prop.pseudoPropsXCY[0].y
newHVars := make([]core.VarId, 0)
for i, p := range prop.pseudoPropsXCY[1 : len(prop.vars)-1] {
NewH := core.CreateAuxIntVarIvFromTo(store,
store.GetDomain(H).GetMin()+store.GetDomain(p.y).GetMin(),
store.GetDomain(H).GetMax()+store.GetDomain(p.y).GetMax())
prop.pseudoPropsXYZ[i] = CreatePseudoXplusYeqZ(H, p.y, NewH)
H = NewH
newHVars = append(newHVars, NewH)
}
X := prop.hvars[len(prop.hvars)-1]
prop.hvars = append(prop.hvars, newHVars...)
prop.pseudoPropsXYZ[len(prop.pseudoPropsXYZ)-1] = CreatePseudoXplusYeqZ(H, X, prop.resultVar)
return prop
}
func (this *WeightedSum) String() string {
vars_str := make([]string, len(this.vars))
for i, var_id := range this.vars {
vars_str[i] = fmt.Sprintf("%v*%s",
this.cs[i], this.store.GetName(var_id))
}
return fmt.Sprintf("PROP_%d %s = %s",
this.id, strings.Join(vars_str, "+"),
this.store.GetName(this.resultVar))
}
func (this *WeightedSum) Clone() core.Constraint {
prop := new(WeightedSum)
prop.vars = make([]core.VarId, len(this.vars))
for i, single_var := range this.vars {
prop.vars[i] = single_var
}
prop.resultVar = this.resultVar
prop.cs = make([]int, len(this.cs))
for i, c := range this.cs {
prop.cs[i] = c
}
prop.pseudoPropsXCY = make([]*PseudoXmultCeqY, len(this.pseudoPropsXCY))
for i, p := range this.pseudoPropsXCY {
prop.pseudoPropsXCY[i] = p.Clone()
}
prop.pseudoPropsXYZ = make([]*PseudoXplusYeqZ, len(this.pseudoPropsXYZ))
for i, p := range this.pseudoPropsXYZ {
prop.pseudoPropsXYZ[i] = p.Clone()
}
prop.hvars = make([]core.VarId, len(this.hvars))
for i, single_var := range this.hvars {
prop.hvars[i] = single_var
}
return prop
}
// INFO: the following two propagation-functions do not work on special
// interval-operations. Reason is: the multiplication is always worst case!
// example-CSP:
// X*C=Y
// X:[1,11]
// C:2
// Y:[1,20]
// RESULT:
// X:[1,10]
// Y:[2] [4] [6] ... [20]
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. X=Y/C, then X is first variable
func ivfirstInMultSecondOutBOUNDS(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
removeParts := make([]*core.IvDomPart, 0)
minX := firstInDomain.GetMin() * c
maxX := firstInDomain.GetMax() * c
for i, yPart := range secondOutDomain.GetParts() {
removeParts = append(removeParts, yPart.DIFFERENCE_MIN_MAX(minX, maxX)...)
if yPart.GT(maxX) {
removeParts = append(removeParts, secondOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(secondOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutBOUNDS(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
removeParts := make([]*core.IvDomPart, 0)
if secondInDomain.IsEmpty() {
return
}
minY := secondInDomain.GetMin() / c
maxY := secondInDomain.GetMax() / c
for i, xPart := range firstOutDomain.GetParts() {
removeParts = append(removeParts, xPart.DIFFERENCE_MIN_MAX(minY, maxY)...)
if xPart.GT(maxY) {
removeParts = append(removeParts, firstOutDomain.GetParts()[i:]...)
break
}
}
if len(removeParts) != 0 {
chEntry := core.CreateChangeEntryWithValues(firstOutVarId, core.CreateIvDomainDomParts(removeParts))
evt.AddChangeEntry(chEntry)
}
}
// X*C=Y
// ivfirstInMultSecondOutARC collect changes, when first variable has changed
// e.g. Y=X/C, then X is first variable
func ivfirstInMultSecondOutARC(firstInDomain *core.IvDomain, c int,
secondOutDomain *core.IvDomain, secondOutVarId core.VarId,
evt *core.ChangeEvent) {
if firstInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, y_val := range secondOutDomain.GetValues() {
if c == 0 || !(y_val%c == 0) {
if y_val != 0 {
vals = append(vals, y_val)
}
} else {
if !firstInDomain.Contains(y_val / c) {
vals = append(vals, y_val)
}
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(secondOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
// ivsecondInMultFirstOutARC collect changes, when second variable has changed
// e.g. X*C=Y, then Y is second variable
func ivsecondInMultFirstOutARC(secondInDomain *core.IvDomain, c int,
firstOutDomain *core.IvDomain, firstOutVarId core.VarId,
evt *core.ChangeEvent) {
if secondInDomain.IsEmpty() {
return
}
vals := make([]int, 0)
for _, x_val := range firstOutDomain.GetValues() {
if !secondInDomain.Contains(x_val * c) {
vals = append(vals, x_val)
}
}
if len(vals) != 0 {
chEntry := core.CreateChangeEntryWithIntValues(firstOutVarId, vals)
evt.AddChangeEntry(chEntry)
}
}
func (this *WeightedSum) GetAllVars() []core.VarId {
allvars := make([]core.VarId, len(this.vars)+len(this.hvars)+1)
i := 0
for _, v := range this.vars {
allvars[i] = v
i++
}
for _, v := range this.hvars {
allvars[i] = v
i++
}
allvars[i] = this.resultVar
return allvars
}
func (this *WeightedSum) GetVarIds() []core.VarId {
return this.GetAllVars()
}
func (this *WeightedSum) | () []core.Domain {
return core.ValuesOfMapVarIdToIvDomain(this.GetAllVars(), this.varidToDomainMap)
}
func (this *WeightedSum) GetInCh() <-chan *core.ChangeEntry {
return this.inCh
}
func (this *WeightedSum) GetOutCh() chan<- *core.ChangeEvent {
return this.outCh
}
// XplusYneqZ represents the propagator for the constraint X + Y == Z
type PseudoXmultCeqY struct {
x, y core.VarId
c int
}
func (this *PseudoXmultCeqY) Clone() *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = this.x, this.c, this.y
return prop
}
func CreatePseudoXmultCeqY(x core.VarId, c int, y core.VarId) *PseudoXmultCeqY {
prop := new(PseudoXmultCeqY)
prop.x, prop.c, prop.y = x, c, y
return prop
}
| GetDomains | identifier_name |
1505.js | // --------------
// 2017.03.20 增加cookie 记录dxl= & dxlm = 值
var dxlUrlValue = $.isUrlPar("dxl")||"";
var dxlmUrlValue = $.isUrlPar("dxlm")||"";
// 记录各自 cookie
if(dxlUrlValue){
$.cookie("dxl", dxlUrlValue);
};
if(dxlmUrlValue){
$.cookie("dxlm", dxlmUrlValue);
};
//---------------
$(document).ready(function () {
//2017.03.20 读取cookie (dxl ==2? ==2删除 header 和 footer)------------
var dxlUrlValue2 = $.cookie("dxl");
if(dxlUrlValue2 =="2"){
$("header").remove();
| --------------
var CITY = $.cookie('city');
var urlTemp = dxlHttp.m + "index/jsonpnew/index?act=hunShaList&callback=?&"; //瀑布流接口
var groupSubmit = $(".group.submit"); //筛选 确认按钮
var loadWrap = $(".loadWrap"); //加载提示
var allSellerSpan = $(".allSeller .selectShow span"); //全部商区 span
var allStyleSpan = $(".allStyle .selectShow span"); //全部商户 span
var smartSortSpan = $(".smartSort .selectShow span"); //智能排序 span
var par = {
region: allSellerSpan.attr("url"), //商区
feature: allStyleSpan.attr("url"), //酒店商户类型
city: $.cookie("city") //智能排序
}
var minPriceInput = $(".priceRange .dSpanInput").eq(0).find("input"); //最低价格输入框
var maxPriceInput = $(".priceRange .dSpanInput").eq(1).find("input"); //最高价格输入框
var priceRangeSpan = $(".priceRange span"); //价格tag元素
var jsonPrice = {}; //价格tag json
//记录cookie
$.cookie("weddingListUrl", window.location.href, {
expires: 7,
domain: "daoxila." + s4Com,
path: "/"
});
//点击指定区域 input失焦
$("#sellerSort").touchClick(function (e) {
if (e.target.tagName !== "INPUT") {
$("input").blur();
}
})
//把页面内的数据清除。
$("#sellerList ul").html("").css("display", 'block');
//关闭广告条
$(".banner .close").touchClick(function (e) {
e.stopPropagation();
e.preventDefault();
$(".banner").slideUp(500);
})
//底部app下载调用
$.wFootAppDown();
//返回顶部
$.mTopCall();
//搜索
$.mSearch(function () {
$.dxlGaPageTracker("/VPTracker/WapHunShaSheYing/Search");
});
//动态取值 排序区域宽高
sortUlWidth()
$(window).on("resize", sortUlWidth);
//全部商户 婚宴/婚纱/婚庆业务切换
$(".selectTag").touchClick(function () {
$(".allStyle").find(".active").removeClass("active");
$(this).parent().addClass("active");
});
//点击显示筛选条目
$(".selectShow").touchClick(function (e) {
//添加虚拟url
var parentIndex = $(this).parent().index(); //父级的索引
var gaUrl = "";
switch (parentIndex) {
case 0:
gaUrl = "/VPTracker/WapList/ShangQu";
break;
case 1:
gaUrl = "/VPTracker/WapList/YeWu";
break;
case 2:
gaUrl = "/VPTracker/WapList/PaiXu";
break;
case 3:
gaUrl = "/VPTracker/WapList/ShaiXuan";
break;
}
$.dxlGaPageTracker(gaUrl);
$(this).next(".popup").toggleClass("show")
.parent().siblings().find(".popup").removeClass("show");
$(this).find("span").toggleClass("cur")
.parents('.sortNav').siblings().find("span").removeClass("cur");
if ($(this).next(".popup").hasClass("show")) {
$("#sellerList,footer").css("display", "none");
$.dxlGaPageTracker('/VPTracker/WapHunShaSheYing/' + $.cookie("city") + '/ShangHu-filter?filter=' + $(this).data("type"));
if ($(this).data("type") == "4") {
groupSubmit.show();
} else {
groupSubmit.hide();
}
} else {
groupSubmit.hide();
$("input").blur();
$("#sellerList,footer").css("display", "block");
};
});
//折扣和优惠
$(".defaultSort .discounts span").touchClick(function (e) {
$(this).toggleClass("selected");
});
//价格区间(元)
$(".defaultSort .priceRange span").touchClick(function (e) {
$(this).toggleClass("selected").siblings().removeClass("selected");
});
//将价格tag存入json
priceRangeSpan.each(function (i, n) {
if ($(this).hasClass("dSpanInput")) return;
var _thisText = $(this).text();
jsonPrice[i] = _thisText.match(/下|上/) ? (_thisText.indexOf("下") != -1 ? "-" + parseInt(_thisText) : parseInt(_thisText) + "-") : _thisText;
})
// 滑动获取数据
$.dxlInclud(["fall"], function () {
$.isUrlPar("q") ? par.q = $.isUrlPar("q") : "";
$("#sellerSort").data("help") ? par.helpTag = $("#sellerSort").data("help") : "";
var option = {
"setting": {
url: urlTemp + $.param(par), //获取数据网址
tpl: tpl,
dom: "ul", //竖列容器元素标签(可省略)
selector: "#sellerList", //瀑布流容器
preLoad: false, //无图片或无需预加载时设为false, 默认为true(可省略)
imgUrl: "path",
initNum: 15, //初始化获取数量
newNum: 15, //每次新获取数量
watchHeight: 100 //页面离底部多远拉取数据,单位px(可省略)
},
"haddle": {
"onLoading": onLoading,
"onLoadingOk": loadingOk,
"onComplete": onComplete,
"onNoData": onNoData
}
};
$.dxlWaterFall(option);
//input聚焦 失焦
$(".priceRange input").focus(function () {
$(this).parent().addClass("selected").siblings(".dSpanInput").addClass("selected");
groupSubmit.css({
position: "absolute",
top: $(".popup.show").height() - 20 + "px"
});
}).blur(function () {
if ($(this).val() == "") {
$(this).parent().removeClass("selected").siblings(".dSpanInput").removeClass("selected");
};
groupSubmit.css({
position: "fixed",
top: ""
});
});
//价格筛选后赋值输入框
priceRangeSpan.touchClick(function () {
if ($(this).hasClass("dSpanInput")) return;
var minPrice,
maxPrice;
priceStr = $(this).text();
if (!priceStr) return;
if (priceStr.indexOf("-") != -1) {
minPrice = parseInt(priceStr.substring(0, priceStr.indexOf("-")));
maxPrice = parseInt(priceStr.substr(priceStr.indexOf("-") + 1));
}
if (priceStr.indexOf("以下") != -1) {
maxPrice = parseInt(priceStr);
}
if (priceStr.indexOf("以上") != -1) {
minPrice = parseInt(priceStr);
}
minPriceInput.val(minPrice);
maxPriceInput.val(maxPrice);
priceRangeSpan.find("input").removeClass("cur");
})
//价格区间输入框,匹配价格tag
$(".priceRange input").on("input", function () {
var minPriceInputVal = parseInt(minPriceInput.val()) || "";
var maxPriceInputVal = parseInt(maxPriceInput.val()) || "";
var priceStr = minPriceInputVal + "-" + maxPriceInputVal;
$(this).parent().parent().siblings().find("span").removeClass("selected");
$.each(jsonPrice, function (i, v) {
v == priceStr && priceRangeSpan.eq(i).addClass("selected");
})
})
//提交
$(".defaultSort .submit").touchClick(function (e) {
var _price = priceJudge();
//价格为空或价格左小右大符合条件
if (!_price || (!_price.unfill && _price[1] < _price[0])) {
$.mAlert("价格区间格式不正确,请重新输入");
return false;
}
//符合条件:
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
var group0 = $(".defaultSort .group").eq(0).find("span"),
regionVal = $.trim(allSellerSpan.attr("url")), //商区
featureVal = $.trim(allStyleSpan.attr("url")); //商户
var obj = {
city: $.cookie("city"),
min_price: _price[0],
max_price: _price[1],
region: regionVal == "" ? "" : regionVal, //商区
feature: featureVal == "" ? "" : featureVal, //商户
sort: smartSortSpan.attr("url"), //智能排序
libao: $(group0[0]).hasClass("selected") ? 1 : 0,
youhui: $(group0[1]).hasClass("selected") ? 1 : 0,
};
option.setting.url = urlTemp + $.param(obj);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
sortItemHide();
}
});
//全城商区
$(".allSeller .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allSeller .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allSeller .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "region");
});
//全部商户
$(".allStyle .popup div:nth-child(2)").find("li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allStyle .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allStyle .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "feature");
});
//智能排序
$(".smartSort .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
smartSortSpan.html(_thisText + '<i class="sortIconI"></i>');
smartSortSpan.attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "sort");
})
//全城商区/全部商户请求接口时传参
function searchAjax(elem, pa) {
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
var liHuiDiv = $(".defaultSort .group").eq(0);
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
par[pa] = elem.attr("url") != "" ? elem.attr("url") : "";
par.min_price = minPriceInput.val(); //最低价格
par.max_price = maxPriceInput.val(); //最高价格
par.libao = liHuiDiv.find("span:nth-child(1)").hasClass("selected") ? 1 : 0; //礼包
par.youhui = liHuiDiv.find("span:nth-child(2)").hasClass("selected") ? 1 : 0; //优惠
par.sort = smartSortSpan.attr("url") //智能排序
option.setting.url = urlTemp + $.param(par);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
}
sortItemHide();
}
});
//动态赋值 banner高/弹层宽高
function sortUlWidth() {
$("#sellerSort .sortNav .popup").css({
"width": $(window).width(),
"height": $(window).height() - 86
});
$(".banner img").height($(window).width() * 50 / 320);
}
//关闭排序条件总区域
function sortItemHide() {
setTimeout(function () {
$("input").blur();
groupSubmit.hide().css("position", "fixed");
$("#sellerSort .sortNav .popup").removeClass("show");
$("#sellerSort .sortNav span").removeClass("cur");
$("#sellerList,footer").css("display", "block");
}, 300);
}
//筛选弹层 价格区间判断
function priceJudge() {
var price = [];
$(".priceRange input").each(function (i) {
if (is_money_withzero($(this).val())) {
price[i] = Number($(this).val());
} else if ($(this).val() == "") {
price.unfill = true;
} else {
price = false;
return false;
}
});
return price;
}
//瀑布流设置对象生成器
function tpl(data, obj) {
var html = '<li>' +
'<a href="' + dxlHttp.m + CITY + '/HunShaSheYing/' + data.url + '-Info" class="sellerListItem">' +
'<dl class="clearfix">' +
'<dt><img src="' + data.image + '" ></dt>' +
'<dd>' +
'<div class="title">' +
'<h3>' +
'<i>' + data.name + '</i>' +
(data.xixuntong_status == 1 ? (data.coupon_daodianli_id ? '<span class="gift"></span>' : "") +
/*(data.price_back ? '<span class="fan"></span>':"") +*/
(data.coupon_putong_id ? '<span class="sale"></span>' : "") : '') +
(data.fu_flag ? '<span class="fu"></span>' : '') +
'</h3>' +
'</div>' +
'<div class="returnCash">' +
'<p class="row1">' +
'<span class="big">¥';
if (data.price_min == data.price_max) { //如果最低价格 和 最高价格一样,取其中一个
html += '<i>' + data.price_min + '</i>/套';
} else {
html += '<i>' + data.price_min + '-' + data.price_max + '</i>/套';
}
html += '</span>' +
/*(data.xixuntong_status==1 && data.price_back ? '<span class="spetial">'+ data.price_back +'</span>' : '') + */
//(data.xixuntong_status==1 ? '<span class="spetial">最高返'+ data.price_back +'</span>' : '') +
'</p>' +
'<p class="row2">' +
'<span class="district">' + data.region + '</span>' +
'<span class="style">' + data.features + '</span>' +
'</p>' +
'</div>' +
'</dd>' +
'</dl>' +
'</a>' +
'</li>';
obj.append(html);
}
//加载数据中处理
function onLoading() {
loadWrap.show();
}
//本次拉取成功处理
function loadingOk() {
loadWrap.hide();
}
//加载数据完成
function onComplete() {
loadWrap.html("没有更多了哦").show();
}
//首次获取数据量为0时
function onNoData() {
loadWrap.hide();
var str = '<div class="dataEmpty"><h3>没有找到合适的商户</h3><p>换个条件再试试吧</p></div>';
$("#sellerList").append(str);
}
//判断金额是否为正整数和0==为正整数和0返回true,不为正整数和0返回false
function is_money_withzero(value) {
var reg = /^[1-9]\d*$|^0$/;
return reg.test(value);
};
}) | $("footer").remove();
};
// --------------------- | conditional_block |
1505.js | // --------------
// 2017.03.20 增加cookie 记录dxl= & dxlm = 值
var dxlUrlValue = $.isUrlPar("dxl")||"";
var dxlmUrlValue = $.isUrlPar("dxlm")||"";
// 记录各自 cookie
if(dxlUrlValue){
$.cookie("dxl", dxlUrlValue);
};
if(dxlmUrlValue){
$.cookie("dxlm", dxlmUrlValue);
};
//---------------
$(document).ready(function () {
//2017.03.20 读取cookie (dxl ==2? ==2删除 header 和 footer)------------
var dxlUrlValue2 = $.cookie("dxl");
if(dxlUrlValue2 =="2"){
$("header").remove();
$("footer").remove();
};
// -----------------------------------
var CITY = $.cookie('city');
var urlTemp = dxlHttp.m + "index/jsonpnew/index?act=hunShaList&callback=?&"; //瀑布流接口
var groupSubmit = $(".group.submit"); //筛选 确认按钮
var loadWrap = $(".loadWrap"); //加载提示
var allSellerSpan = $(".allSeller .selectShow span"); //全部商区 span
var allStyleSpan = $(".allStyle .selectShow span"); //全部商户 span
var smartSortSpan = $(".smartSort .selectShow span"); //智能排序 span
var par = {
region: allSellerSpan.attr("url"), //商区
feature: allStyleSpan.attr("url"), //酒店商户类型
city: $.cookie("city") //智能排序
}
var minPriceInput = $(".priceRange .dSpanInput").eq(0).find("input"); //最低价格输入框
var maxPriceInput = $(".priceRange .dSpanInput").eq(1).find("input"); //最高价格输入框
var priceRangeSpan = $(".priceRange span"); //价格tag元素
var jsonPrice = {}; //价格tag json
//记录cookie
$.cookie("weddingListUrl", window.location.href, {
expires: 7,
domain: "daoxila." + s4Com,
path: "/"
});
//点击指定区域 input失焦
$("#sellerSort").touchClick(function (e) {
if (e.target.tagName !== "INPUT") {
$("input").blur();
}
})
//把页面内的数据清除。
$("#sellerList ul").html("").css("display", 'block');
//关闭广告条
$(".banner .close").touchClick(function (e) {
e.stopPropagation();
e.preventDefault();
$(".banner").slideUp(500);
})
//底部app下载调用
$.wFootAppDown();
//返回顶部
$.mTopCall();
//搜索
$.mSearch(function () {
$.dxlGaPageTracker("/VPTracker/WapHunShaSheYing/Search");
});
//动态取值 排序区域宽高
sortUlWidth()
$(window).on("resize", sortUlWidth);
//全部商户 婚宴/婚纱/婚庆业务切换
$(".selectTag").touchClick(function () {
$(".allStyle").find(".active").removeClass("active");
$(this).parent().addClass("active");
});
//点击显示筛选条目
$(".selectShow").touchClick(function (e) {
//添加虚拟url
var parentIndex = $(this).parent().index(); //父级的索引
var gaUrl = "";
switch (parentIndex) {
case 0:
gaUrl = "/VPTracker/WapList/ShangQu";
break;
case 1:
gaUrl = "/VPTracker/WapList/YeWu";
break;
case 2:
gaUrl = "/VPTracker/WapList/PaiXu";
break;
case 3:
gaUrl = "/VPTracker/WapList/ShaiXuan";
break;
}
$.dxlGaPageTracker(gaUrl);
$(this).next(".popup").toggleClass("show")
.parent().siblings().find(".popup").removeClass("show");
$(this).find("span").toggleClass("cur")
.parents('.sortNav').siblings().find("span").removeClass("cur");
if ($(this).next(".popup").hasClass("show")) {
$("#sellerList,footer").css("display", "none");
$.dxlGaPageTracker('/VPTracker/WapHunShaSheYing/' + $.cookie("city") + '/ShangHu-filter?filter=' + $(this).data("type"));
if ($(this).data("type") == "4") {
groupSubmit.show();
} else {
groupSubmit.hide();
}
} else {
groupSubmit.hide();
$("input").blur();
$("#sellerList,footer").css("display", "block");
};
});
//折扣和优惠
$(".defaultSort .discounts span").touchClick(function (e) {
$(this).toggleClass("selected");
});
//价格区间(元)
$(".defaultSort .priceRange span").touchClick(function (e) {
$(this).toggleClass("selected").siblings().removeClass("selected");
});
//将价格tag存入json
priceRangeSpan.each(function (i, n) {
if ($(this).hasClass("dSpanInput")) return;
var _thisText = $(this).text();
jsonPrice[i] = _thisText.match(/下|上/) ? (_thisText.indexOf("下") != -1 ? "-" + parseInt(_thisText) : parseInt(_thisText) + "-") : _thisText;
})
// 滑动获取数据
$.dxlInclud(["fall"], function () {
$.isUrlPar("q") ? par.q = $.isUrlPar("q") : "";
$("#sellerSort").data("help") ? par.helpTag = $("#sellerSort").data("help") : "";
var option = {
"setting": {
url: urlTemp + $.param(par), //获取数据网址
tpl: tpl,
dom: "ul", //竖列容器元素标签(可省略)
selector: "#sellerList", //瀑布流容器
preLoad: false, //无图片或无需预加载时设为false, 默认为true(可省略)
imgUrl: "path",
initNum: 15, //初始化获取数量
newNum: 15, //每次新获取数量
watchHeight: 100 //页面离底部多远拉取数据,单位px(可省略)
},
"haddle": {
"onLoading": onLoading,
"onLoadingOk": loadingOk,
"onComplete": onComplete,
"onNoData": onNoData
}
};
$.dxlWaterFall(option);
//input聚焦 失焦
$(".priceRange input").focus(function () {
$(this).parent().addClass("selected").siblings(".dSpanInput").addClass("selected");
groupSubmit.css({
position: "absolute",
top: $(".popup.show").height() - 20 + "px"
});
}).blur(function () {
if ($(this).val() == "") {
$(this).parent().removeClass("selected").siblings(".dSpanInput").removeClass("selected");
};
groupSubmit.css({
position: "fixed",
top: ""
});
});
//价格筛选后赋值输入框
priceRangeSpan.touchClick(function () {
if ($(this).hasClass("dSpanInput")) return;
var minPrice,
maxPrice;
priceStr = $(this).text();
if (!priceStr) return;
if (priceStr.indexOf("-") != -1) {
minPrice = parseInt(priceStr.substring(0, priceStr.indexOf("-")));
maxPrice = parseInt(priceStr.substr(priceStr.indexOf("-") + 1));
}
if (priceStr.indexOf("以下") != -1) {
maxPrice = parseInt(priceStr);
}
if (priceStr.indexOf("以上") != -1) {
minPrice = parseInt(priceStr);
}
minPriceInput.val(minPrice);
maxPriceInput.val(maxPrice);
priceRangeSpan.find("input").removeClass("cur");
})
//价格区间输入框,匹配价格tag
$(".priceRange input").on("input", function () {
var minPriceInputVal = parseInt(minPriceInput.val()) || "";
var maxPriceInputVal = parseInt(maxPriceInput.val()) || "";
var priceStr = minPriceInputVal + "-" + maxPriceInputVal;
$(this).parent().parent().siblings().find("span").removeClass("selected");
$.each(jsonPrice, function (i, v) {
v == priceStr && priceRangeSpan.eq(i).addClass("selected");
})
})
//提交
$(".defaultSort .submit").touchClick(function (e) {
var _price = priceJudge();
//价格为空或价格左小右大符合条件
if (!_price || (!_price.unfill && _price[1] < _price[0])) {
$.mAlert("价格区间格式不正确,请重新输入");
return false;
}
//符合条件:
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
var group0 = $(".defaultSort .group").eq(0).find("span"),
regionVal = $.trim(allSellerSpan.attr("url")), //商区
featureVal = $.trim(allStyleSpan.attr("url")); //商户
var obj = {
city: $.cookie("city"),
min_price: _price[0],
max_price: _price[1],
region: regionVal == "" ? "" : regionVal, //商区
feature: featureVal == "" ? "" : featureVal, //商户
sort: smartSortSpan.attr("url"), //智能排序
libao: $(group0[0]).hasClass("selected") ? 1 : 0,
youhui: $(group0[1]).hasClass("selected") ? 1 : 0,
};
option.setting.url = urlTemp + $.param(obj);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
sortItemHide();
}
});
//全城商区
$(".allSeller .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allSeller .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allSeller .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "region");
});
//全部商户
$(".allStyle .popup div:nth-child(2)").find("li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allStyle .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allStyle .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "feature");
});
//智能排序
$(".smartSort .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
smartSortSpan.html(_thisText + '<i class="sortIconI"></i>');
smartSortSpan.attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "sort");
})
//全城商区/全部商户请求接口时传参
function searchAjax(elem, pa) {
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
var liHuiDiv = $(".defaultSort .group").eq(0);
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
par[pa] = elem.attr("url") != "" ? elem.attr("url") : "";
par.min_price = minPriceInput.val(); //最低价格
par.max_price = maxPriceInput.val(); //最高价格
par.libao = liHuiDiv.find("span:nth-child(1)").hasClass("selected") ? 1 : 0; //礼包
par.youhui = liHuiDiv.find("span:nth-child(2)").hasClass("selected") ? 1 : 0; //优惠
par.sort = smartSortSpan.attr("url") //智能排序
option.setting.url = urlTemp + $.param(par);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
}
sortItemHide();
}
});
//动态赋值 banner高/弹层宽高
function sortUlWidth() {
$("#sellerSort .sortNav .popup").css({
"width": $(window).width(),
"height": $(window).height() - 86
});
$(".banner img").height($(window).width() * 50 / 320);
}
//关闭排序条件总区域
function sortItemHide() {
setTimeout(function () {
$("input").blur();
groupSubmit.hide().css("position", "fixed");
$("#sellerSort .sortNav .popup").removeClass("show");
$("#sellerSort .sortNav span").removeClass("cur");
$("#sellerList,footer").css("display", "block");
}, 300);
}
//筛选弹层 价格区间判断
function priceJudge() {
var price = [];
$(".priceRange input").each(function (i) {
if (is_money_withzero($(this).val())) {
price[i] = Number($(this).val());
} else if ($(this).val() == "") {
price.unfill = true;
} else {
price = false;
return false;
}
});
return price;
}
//瀑布流设置对象生成器
function tpl(data, obj) {
var html = '<li>' +
'<a href="' + dxlHttp.m + CITY + '/HunShaSheYing/' + data.url + '-Info" class="sellerListItem">' +
'<dl class="clearfix">' +
'<dt><img src="' + data.image + '" ></dt>' +
'<dd>' +
'<div class="title">' +
'<h3>' +
'<i>' + data.name + '</i>' +
(data.xixuntong_status == 1 ? (data.coupon_daodianli_id ? '<span class="gift"></span>' : "") +
/*(data.price_back ? '<span class="fan"></span>':"") +*/
(data.coupon_putong_id ? '<span class="sale"></span>' : "") : '') +
(data.fu_flag ? '<span class="fu"></span>' : '') +
'</h3>' +
'</div>' +
'<div class="returnCash">' +
'<p class="row1">' +
'<span class="big">¥';
if (data.price_min == data.price_max) { //如果最低价格 和 最高价格一样,取其中一个
html += '<i>' + data.price_min + '</i>/套';
} else {
html += '<i>' + data.price_min + '-' + data.price_max + '</i>/套';
}
html += '</span>' +
/*(data.xixuntong_status==1 && data.price_back ? '<span class="spetial">'+ data.price_back +'</span>' : '') + */
//(data.xixuntong_status==1 ? '<span class="spetial">最高返'+ data.price_back +'</span>' : '') +
'</p>' +
'<p class="row2">' +
'<span class="district">' + data.region + '</span>' +
'<span class="style">' + data.features + '</span>' +
'</p>' +
'</div>' +
'</dd>' +
'</dl>' +
'</a>' +
'</li>';
obj.append(html);
}
//加载数据中处理
function onLoading() {
loadWrap.show();
}
//本次拉取成功处理
function loadingOk() {
loadWrap.hide();
}
//加载数据完成
function onComplete() {
loadWrap.html("没有更多了哦").show();
}
//首次获取数据量为0时
function onNoData() {
loadWrap.hide();
var str = '<div class="dataEmpty"><h3>没有找到合适的商户</h3><p>换个条件再试试吧</p></div>';
$("#sellerList").append(str);
}
//判断金额是否为正整数和0==为正整数和0返回true,不为正整数和0返回false
function is_money_withzero(value) {
var reg = /^[1-9]\d*$|^0$/;
return reg.test(value);
};
}) | identifier_body | ||
1505.js | // --------------
// 2017.03.20 增加cookie 记录dxl= & dxlm = 值
var dxlUrlValue = $.isUrlPar("dxl")||"";
var dxlmUrlValue = $.isUrlPar("dxlm")||"";
// 记录各自 cookie
if(dxlUrlValue){
$.cookie("dxl", dxlUrlValue);
};
if(dxlmUrlValue){
$.cookie("dxlm", dxlmUrlValue);
};
//---------------
$(document).ready(function () {
//2017.03.20 读取cookie (dxl ==2? ==2删除 header 和 footer)------------
var dxlUrlValue2 = $.cookie("dxl");
if(dxlUrlValue2 =="2"){
$("header").remove();
$("footer").remove();
};
// -----------------------------------
var CITY = $.cookie('city');
var urlTemp = dxlHttp.m + "index/jsonpnew/index?act=hunShaList&callback=?&"; //瀑布流接口
var groupSubmit = $(".group.submit"); //筛选 确认按钮
var loadWrap = $(".loadWrap"); //加载提示
var allSellerSpan = $(".allSeller .selectShow span"); //全部商区 span
var allStyleSpan = $(".allStyle .selectShow span"); //全部商户 span
var smartSortSpan = $(".smartSort .selectShow span"); //智能排序 span
var par = {
region: allSellerSpan.attr("url"), //商区
feature: allStyleSpan.attr("url"), //酒店商户类型
city: $.cookie("city") //智能排序
}
var minPriceInput = $(".priceRange .dSpanInput").eq(0).find("input"); //最低价格输入框
var maxPriceInput = $(".priceRange .dSpanInput").eq(1).find("input"); //最高价格输入框
var priceRangeSpan = $(".priceRange span"); //价格tag元素
var jsonPrice = {}; //价格tag json
//记录cookie
$.cookie("weddingListUrl", window.location.href, {
expires: 7,
domain: "daoxila." + s4Com,
path: "/"
});
//点击指定区域 input失焦
$("#sellerSort").touchClick(function (e) {
if (e.target.tagName !== "INPUT") {
$("input").blur();
}
})
//把页面内的数据清除。
$("#sellerList ul").html("").css("display", 'block');
//关闭广告条
$(".banner .close").touchClick(function (e) {
e.stopPropagation();
e.preventDefault();
$(".banner").slideUp(500);
})
//底部app下载调用
$.wFootAppDown();
//返回顶部
$.mTopCall();
//搜索
$.mSearch(function () {
$.dxlGaPageTracker("/VPTracker/WapHunShaSheYing/Search");
});
|
//动态取值 排序区域宽高
sortUlWidth()
$(window).on("resize", sortUlWidth);
//全部商户 婚宴/婚纱/婚庆业务切换
$(".selectTag").touchClick(function () {
$(".allStyle").find(".active").removeClass("active");
$(this).parent().addClass("active");
});
//点击显示筛选条目
$(".selectShow").touchClick(function (e) {
//添加虚拟url
var parentIndex = $(this).parent().index(); //父级的索引
var gaUrl = "";
switch (parentIndex) {
case 0:
gaUrl = "/VPTracker/WapList/ShangQu";
break;
case 1:
gaUrl = "/VPTracker/WapList/YeWu";
break;
case 2:
gaUrl = "/VPTracker/WapList/PaiXu";
break;
case 3:
gaUrl = "/VPTracker/WapList/ShaiXuan";
break;
}
$.dxlGaPageTracker(gaUrl);
$(this).next(".popup").toggleClass("show")
.parent().siblings().find(".popup").removeClass("show");
$(this).find("span").toggleClass("cur")
.parents('.sortNav').siblings().find("span").removeClass("cur");
if ($(this).next(".popup").hasClass("show")) {
$("#sellerList,footer").css("display", "none");
$.dxlGaPageTracker('/VPTracker/WapHunShaSheYing/' + $.cookie("city") + '/ShangHu-filter?filter=' + $(this).data("type"));
if ($(this).data("type") == "4") {
groupSubmit.show();
} else {
groupSubmit.hide();
}
} else {
groupSubmit.hide();
$("input").blur();
$("#sellerList,footer").css("display", "block");
};
});
//折扣和优惠
$(".defaultSort .discounts span").touchClick(function (e) {
$(this).toggleClass("selected");
});
//价格区间(元)
$(".defaultSort .priceRange span").touchClick(function (e) {
$(this).toggleClass("selected").siblings().removeClass("selected");
});
//将价格tag存入json
priceRangeSpan.each(function (i, n) {
if ($(this).hasClass("dSpanInput")) return;
var _thisText = $(this).text();
jsonPrice[i] = _thisText.match(/下|上/) ? (_thisText.indexOf("下") != -1 ? "-" + parseInt(_thisText) : parseInt(_thisText) + "-") : _thisText;
})
// 滑动获取数据
$.dxlInclud(["fall"], function () {
$.isUrlPar("q") ? par.q = $.isUrlPar("q") : "";
$("#sellerSort").data("help") ? par.helpTag = $("#sellerSort").data("help") : "";
var option = {
"setting": {
url: urlTemp + $.param(par), //获取数据网址
tpl: tpl,
dom: "ul", //竖列容器元素标签(可省略)
selector: "#sellerList", //瀑布流容器
preLoad: false, //无图片或无需预加载时设为false, 默认为true(可省略)
imgUrl: "path",
initNum: 15, //初始化获取数量
newNum: 15, //每次新获取数量
watchHeight: 100 //页面离底部多远拉取数据,单位px(可省略)
},
"haddle": {
"onLoading": onLoading,
"onLoadingOk": loadingOk,
"onComplete": onComplete,
"onNoData": onNoData
}
};
$.dxlWaterFall(option);
//input聚焦 失焦
$(".priceRange input").focus(function () {
$(this).parent().addClass("selected").siblings(".dSpanInput").addClass("selected");
groupSubmit.css({
position: "absolute",
top: $(".popup.show").height() - 20 + "px"
});
}).blur(function () {
if ($(this).val() == "") {
$(this).parent().removeClass("selected").siblings(".dSpanInput").removeClass("selected");
};
groupSubmit.css({
position: "fixed",
top: ""
});
});
//价格筛选后赋值输入框
priceRangeSpan.touchClick(function () {
if ($(this).hasClass("dSpanInput")) return;
var minPrice,
maxPrice;
priceStr = $(this).text();
if (!priceStr) return;
if (priceStr.indexOf("-") != -1) {
minPrice = parseInt(priceStr.substring(0, priceStr.indexOf("-")));
maxPrice = parseInt(priceStr.substr(priceStr.indexOf("-") + 1));
}
if (priceStr.indexOf("以下") != -1) {
maxPrice = parseInt(priceStr);
}
if (priceStr.indexOf("以上") != -1) {
minPrice = parseInt(priceStr);
}
minPriceInput.val(minPrice);
maxPriceInput.val(maxPrice);
priceRangeSpan.find("input").removeClass("cur");
})
//价格区间输入框,匹配价格tag
$(".priceRange input").on("input", function () {
var minPriceInputVal = parseInt(minPriceInput.val()) || "";
var maxPriceInputVal = parseInt(maxPriceInput.val()) || "";
var priceStr = minPriceInputVal + "-" + maxPriceInputVal;
$(this).parent().parent().siblings().find("span").removeClass("selected");
$.each(jsonPrice, function (i, v) {
v == priceStr && priceRangeSpan.eq(i).addClass("selected");
})
})
//提交
$(".defaultSort .submit").touchClick(function (e) {
var _price = priceJudge();
//价格为空或价格左小右大符合条件
if (!_price || (!_price.unfill && _price[1] < _price[0])) {
$.mAlert("价格区间格式不正确,请重新输入");
return false;
}
//符合条件:
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
var group0 = $(".defaultSort .group").eq(0).find("span"),
regionVal = $.trim(allSellerSpan.attr("url")), //商区
featureVal = $.trim(allStyleSpan.attr("url")); //商户
var obj = {
city: $.cookie("city"),
min_price: _price[0],
max_price: _price[1],
region: regionVal == "" ? "" : regionVal, //商区
feature: featureVal == "" ? "" : featureVal, //商户
sort: smartSortSpan.attr("url"), //智能排序
libao: $(group0[0]).hasClass("selected") ? 1 : 0,
youhui: $(group0[1]).hasClass("selected") ? 1 : 0,
};
option.setting.url = urlTemp + $.param(obj);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
sortItemHide();
}
});
//全城商区
$(".allSeller .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allSeller .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allSeller .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "region");
});
//全部商户
$(".allStyle .popup div:nth-child(2)").find("li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allStyle .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allStyle .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "feature");
});
//智能排序
$(".smartSort .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
smartSortSpan.html(_thisText + '<i class="sortIconI"></i>');
smartSortSpan.attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "sort");
})
//全城商区/全部商户请求接口时传参
function searchAjax(elem, pa) {
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
var liHuiDiv = $(".defaultSort .group").eq(0);
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
par[pa] = elem.attr("url") != "" ? elem.attr("url") : "";
par.min_price = minPriceInput.val(); //最低价格
par.max_price = maxPriceInput.val(); //最高价格
par.libao = liHuiDiv.find("span:nth-child(1)").hasClass("selected") ? 1 : 0; //礼包
par.youhui = liHuiDiv.find("span:nth-child(2)").hasClass("selected") ? 1 : 0; //优惠
par.sort = smartSortSpan.attr("url") //智能排序
option.setting.url = urlTemp + $.param(par);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
}
sortItemHide();
}
});
//动态赋值 banner高/弹层宽高
function sortUlWidth() {
$("#sellerSort .sortNav .popup").css({
"width": $(window).width(),
"height": $(window).height() - 86
});
$(".banner img").height($(window).width() * 50 / 320);
}
//关闭排序条件总区域
function sortItemHide() {
setTimeout(function () {
$("input").blur();
groupSubmit.hide().css("position", "fixed");
$("#sellerSort .sortNav .popup").removeClass("show");
$("#sellerSort .sortNav span").removeClass("cur");
$("#sellerList,footer").css("display", "block");
}, 300);
}
//筛选弹层 价格区间判断
function priceJudge() {
var price = [];
$(".priceRange input").each(function (i) {
if (is_money_withzero($(this).val())) {
price[i] = Number($(this).val());
} else if ($(this).val() == "") {
price.unfill = true;
} else {
price = false;
return false;
}
});
return price;
}
//瀑布流设置对象生成器
function tpl(data, obj) {
var html = '<li>' +
'<a href="' + dxlHttp.m + CITY + '/HunShaSheYing/' + data.url + '-Info" class="sellerListItem">' +
'<dl class="clearfix">' +
'<dt><img src="' + data.image + '" ></dt>' +
'<dd>' +
'<div class="title">' +
'<h3>' +
'<i>' + data.name + '</i>' +
(data.xixuntong_status == 1 ? (data.coupon_daodianli_id ? '<span class="gift"></span>' : "") +
/*(data.price_back ? '<span class="fan"></span>':"") +*/
(data.coupon_putong_id ? '<span class="sale"></span>' : "") : '') +
(data.fu_flag ? '<span class="fu"></span>' : '') +
'</h3>' +
'</div>' +
'<div class="returnCash">' +
'<p class="row1">' +
'<span class="big">¥';
if (data.price_min == data.price_max) { //如果最低价格 和 最高价格一样,取其中一个
html += '<i>' + data.price_min + '</i>/套';
} else {
html += '<i>' + data.price_min + '-' + data.price_max + '</i>/套';
}
html += '</span>' +
/*(data.xixuntong_status==1 && data.price_back ? '<span class="spetial">'+ data.price_back +'</span>' : '') + */
//(data.xixuntong_status==1 ? '<span class="spetial">最高返'+ data.price_back +'</span>' : '') +
'</p>' +
'<p class="row2">' +
'<span class="district">' + data.region + '</span>' +
'<span class="style">' + data.features + '</span>' +
'</p>' +
'</div>' +
'</dd>' +
'</dl>' +
'</a>' +
'</li>';
obj.append(html);
}
//加载数据中处理
function onLoading() {
loadWrap.show();
}
//本次拉取成功处理
function loadingOk() {
loadWrap.hide();
}
//加载数据完成
function onComplete() {
loadWrap.html("没有更多了哦").show();
}
//首次获取数据量为0时
function onNoData() {
loadWrap.hide();
var str = '<div class="dataEmpty"><h3>没有找到合适的商户</h3><p>换个条件再试试吧</p></div>';
$("#sellerList").append(str);
}
//判断金额是否为正整数和0==为正整数和0返回true,不为正整数和0返回false
function is_money_withzero(value) {
var reg = /^[1-9]\d*$|^0$/;
return reg.test(value);
};
}) | random_line_split | |
1505.js | // --------------
// 2017.03.20 增加cookie 记录dxl= & dxlm = 值
var dxlUrlValue = $.isUrlPar("dxl")||"";
var dxlmUrlValue = $.isUrlPar("dxlm")||"";
// 记录各自 cookie
if(dxlUrlValue){
$.cookie("dxl", dxlUrlValue);
};
if(dxlmUrlValue){
$.cookie("dxlm", dxlmUrlValue);
};
//---------------
$(document).ready(function () {
//2017.03.20 读取cookie (dxl ==2? ==2删除 header 和 footer)------------
var dxlUrlValue2 = $.cookie("dxl");
if(dxlUrlValue2 =="2"){
$("header").remove();
$("footer").remove();
};
// -----------------------------------
var CITY = $.cookie('city');
var urlTemp = dxlHttp.m + "index/jsonpnew/index?act=hunShaList&callback=?&"; //瀑布流接口
var groupSubmit = $(".group.submit"); //筛选 确认按钮
var loadWrap = $(".loadWrap"); //加载提示
var allSellerSpan = $(".allSeller .selectShow span"); //全部商区 span
var allStyleSpan = $(".allStyle .selectShow span"); //全部商户 span
var smartSortSpan = $(".smartSort .selectShow span"); //智能排序 span
var par = {
region: allSellerSpan.attr("url"), //商区
feature: allStyleSpan.attr("url"), //酒店商户类型
city: $.cookie("city") //智能排序
}
var minPriceInput = $(".priceRange .dSpanInput").eq(0).find("input"); //最低价格输入框
var maxPriceInput = $(".priceRange .dSpanInput").eq(1).find("input"); //最高价格输入框
var priceRangeSpan = $(".priceRange span"); //价格tag元素
var jsonPrice = {}; //价格tag json
//记录cookie
$.cookie("weddingListUrl", window.location.href, {
expires: 7,
domain: "daoxila." + s4Com,
path: "/"
});
//点击指定区域 input失焦
$("#sellerSort").touchClick(function (e) {
if (e.target.tagName !== "INPUT") {
$("input").blur();
}
})
//把页面内的数据清除。
$("#sellerList ul").html("").css("display", 'block');
//关闭广告条
$(".banner .close").touchClick(function (e) {
e.stopPropagation();
e.preventDefault();
$(".banner").slideUp(500);
})
//底部app下载调用
$.wFootAppDown();
//返回顶部
$.mTopCall();
//搜索
$.mSearch(function () {
$.dxlGaPageTracker("/VPTracker/WapHunShaSheYing/Search");
});
//动态取值 排序区域宽高
sortUlWidth()
$(window).on("resize", sortUlWidth);
//全部商户 婚宴/婚纱/婚庆业务切换
$(".selectTag").touchClick(function () {
$(".allStyle").find(".active").removeClass("active");
$(this).parent().addClass("active");
});
//点击显示筛选条目
$(".selectShow").touchClick(function (e) {
//添加虚拟url
var parentIndex = $(this).parent().index(); //父级的索引
var gaUrl = "";
switch (parentIndex) {
case 0:
gaUrl = "/VPTracker/WapList/ShangQu";
break;
case 1:
gaUrl = "/VPTracker/WapList/YeWu";
break;
case 2:
gaUrl = "/VPTracker/WapList/PaiXu";
break;
case 3:
gaUrl = "/VPTracker/WapList/ShaiXuan";
break;
}
$.dxlGaPageTracker(gaUrl);
$(this).next(".popup").toggleClass("show")
.parent().siblings().find(".popup").removeClass("show");
$(this).find("span").toggleClass("cur")
.parents('.sortNav').siblings().find("span").removeClass("cur");
if ($(this).next(".popup").hasClass("show")) {
$("#sellerList,footer").css("display", "none");
$.dxlGaPageTracker('/VPTracker/WapHunShaSheYing/' + $.cookie("city") + '/ShangHu-filter?filter=' + $(this).data("type"));
if ($(this).data("type") == "4") {
groupSubmit.show();
} else {
groupSubmit.hide();
}
} else {
groupSubmit.hide();
$("input").blur();
$("#sellerList,footer").css("display", "block");
};
});
//折扣和优惠
$(".defaultSort .discounts span").touchClick(function (e) {
$(this).toggleClass("selected");
});
//价格区间(元)
$(".defaultSort .priceRange span").touchClick(function (e) {
$(this).toggleClass("selected").siblings().removeClass("selected");
});
//将价格tag存入json
priceRangeSpan.each(function (i, n) {
if ($(this).hasClass("dSpanInput")) return;
var _thisText = $(this).text();
jsonPrice[i] = _thisText.match(/下|上/) ? (_thisText.indexOf("下") != -1 ? "-" + parseInt(_thisText) : parseInt(_thisText) + "-") : _thisText;
})
// 滑动获取数据
$.dxlInclud(["fall"], function () {
$.isUrlPar("q") ? par.q = $.isUrlPar("q") : "";
$("#sellerSort").data("help") ? par.helpTag = $("#sellerSort").data("help") : "";
var option = {
"setting": {
url: urlTemp + $.param(par), //获取数据网址
tpl: tpl,
dom: "ul", //竖列容器元素标签(可省略)
selector: "#sellerList", //瀑布流容器
preLoad: false, //无图片或无需预加载时设为false, 默认为true(可省略)
imgUrl: "path",
initNum: 15, //初始化获取数量
newNum: 15, //每次新获取数量
watchHeight: 100 //页面离底部多远拉取数据,单位px(可省略)
},
"haddle": {
"onLoading": onLoading,
"onLoadingOk": loadingOk,
"onComplete": onComplete,
"onNoData": onNoData
}
};
$.dxlWaterFall(option);
//input聚焦 失焦
$(".priceRange input").focus(function () {
$(this).parent().addClass("selected").siblings(".dSpanInput").addClass("selected");
groupSubmit.css({
position: "absolute",
top: $(".popup.show").height() - 20 + "px"
});
}).blur(function () {
if ($(this).val() == "") {
$(this).parent().removeClass("selected").siblings(".dSpanInput").removeClass("selected");
};
groupSubmit.css({
position: "fixed",
top: ""
});
});
//价格筛选后赋值输入框
priceRangeSpan.touchClick(function () {
if ($(this).hasClass("dSpanInput")) return;
var minPrice,
maxPrice;
priceStr = $(this).text();
if (!priceStr) return;
if (priceStr.indexOf("-") != -1) {
minPrice = parseInt(priceStr.substring(0, priceStr.indexOf("-")));
maxPrice = parseInt(priceStr.substr(priceStr.indexOf("-") + 1));
}
if (priceStr.indexOf("以下") != -1) {
maxPrice = parseInt(priceStr);
}
if (priceStr.indexOf("以上") != -1) {
minPrice = parseInt(priceStr);
}
minPriceInput.val(minPrice);
maxPriceInput.val(maxPrice);
priceRangeSpan.find("input").removeClass("cur");
})
//价格区间输入框,匹配价格tag
$(".priceRange input").on("input", function () {
var minPriceInputVal = parseInt(minPriceInput.val()) || "";
var maxPriceInputVal = parseInt(maxPriceInput.val()) || "";
var priceStr = minPriceInputVal + "-" + maxPriceInputVal;
$(this).parent().parent().siblings().find("span").removeClass("selected");
$.each(jsonPrice, function (i, v) {
v == priceStr && priceRangeSpan.eq(i).addClass("selected");
})
})
//提交
$(".defaultSort .submit").touchClick(function (e) {
var _price = priceJudge();
//价格为空或价格左小右大符合条件
if (!_price || (!_price.unfill && _price[1] < _price[0])) {
$.mAlert("价格区间格式不正确,请重新输入");
return false;
}
//符合条件:
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
var group0 = $(".defaultSort .group").eq(0).find("span"),
regionVal = $.trim(allSellerSpan.attr("url")), //商区
featureVal = $.trim(allStyleSpan.attr("url")); //商户
var obj = {
city: $.cookie("city"),
min_price: _price[0],
max_price: _price[1],
region: regionVal == "" ? "" : regionVal, //商区
feature: featureVal == "" ? "" : featureVal, //商户
sort: smartSortSpan.attr("url"), //智能排序
libao: $(group0[0]).hasClass("selected") ? 1 : 0,
youhui: $(group0[1]).hasClass("selected") ? 1 : 0,
};
option.setting.url = urlTemp + $.param(obj);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
sortItemHide();
}
});
//全城商区
$(".allSeller .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allSeller .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allSeller .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "region");
});
//全部商户
$(".allStyle .popup div:nth-child(2)").find("li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
$(".allStyle .selectShow span").html(_thisText + '<i class="sortIconI"></i>');
$(".allStyle .selectShow span").attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "feature");
});
//智能排序
$(".smartSort .popup li").touchClick(function () {
var _thisText = $.trim($(this).text()),
_thisUrl = $.trim($(this).attr("url"));
smartSortSpan.html(_thisText + '<i class="sortIconI"></i>');
smartSortSpan.attr("url", _thisUrl);
$(this).addClass("selected").siblings().removeClass("selected");
searchAjax($(this), "sort");
})
//全城商区/全部商户请求接口时传参
function searchAjax(elem, pa) {
if ($.dxlWaterFall && $.isFunction($.dxlWaterFall)) {
var liHuiDiv = $(".defaultSort .group").eq(0);
$("#sellerList ul").empty();
$.dxlWaterFall("emptyData");
par[pa] = elem.attr("url") != "" ? elem.attr("url") : "";
par.min_price = minPriceInput.val(); //最低价格
par.max_price = maxPriceInput.val(); //最高价格
par.libao = liHuiDiv.find("span:nth-child(1)").hasClass("selected") ? 1 : 0; //礼包
par.youhui = liHuiDiv.find("span:nth-child(2)").hasClass("selected") ? 1 : 0; //优惠
par.sort = smartSortSpan.attr("url") //智能排序
option.setting.url = urlTemp + $.param(par);
$(".dataEmpty").remove();
$.dxlWaterFall(option);
}
sortItemHide();
}
});
//动态赋值 banner高/弹层宽高
function sortUlWidth() {
$("#sellerSort .sortNav .popup").css({
"width": $(window).width(),
"height": $(window).height() - 86
});
$(".banner img").height($(window).width() * 50 / 320);
}
//关闭排序条件总区域
function sortItemHide() {
setTimeout(function () {
$("input").blur();
groupSubmit.hide().css("position", "fixed");
$("#sellerSort .sortNav .popup").removeClass("show");
$("#sellerSort .sortNav span").removeClass("cur");
$("#sellerList,footer").css("display", "block");
}, 300);
}
//筛选弹层 价格区间判断
function priceJudge() {
var price = [];
$(".priceRange input").each(function (i) {
if (is_money_withzero($(this).val())) {
price[i] = Number($(this).val());
} else if ($(this).val() == "") {
price.unfill = true;
} else {
price = false;
return false;
}
});
return price;
}
//瀑布流设置对象生成器
function tpl(data, obj) {
var html = '<li>' +
'<a href="' + dxlHttp.m + CITY + '/HunShaSheYing/' + data.url + '-Info" class="sellerListItem">' +
'<dl class="clearfix">' +
'<dt><img src="' + data.image + '" ></dt>' +
'<dd>' +
'<div class="title">' +
'<h3>' +
'<i>' + data.name + '</i>' +
(data.xixuntong_status == 1 ? (data.coupon_daodianli_id ? '<span class="gift"></span>' : "") +
/*(data.price_back ? '<span class="fan"></span>':"") + | ata.coupon_putong_id ? '<span class="sale"></span>' : "") : '') +
(data.fu_flag ? '<span class="fu"></span>' : '') +
'</h3>' +
'</div>' +
'<div class="returnCash">' +
'<p class="row1">' +
'<span class="big">¥';
if (data.price_min == data.price_max) { //如果最低价格 和 最高价格一样,取其中一个
html += '<i>' + data.price_min + '</i>/套';
} else {
html += '<i>' + data.price_min + '-' + data.price_max + '</i>/套';
}
html += '</span>' +
/*(data.xixuntong_status==1 && data.price_back ? '<span class="spetial">'+ data.price_back +'</span>' : '') + */
//(data.xixuntong_status==1 ? '<span class="spetial">最高返'+ data.price_back +'</span>' : '') +
'</p>' +
'<p class="row2">' +
'<span class="district">' + data.region + '</span>' +
'<span class="style">' + data.features + '</span>' +
'</p>' +
'</div>' +
'</dd>' +
'</dl>' +
'</a>' +
'</li>';
obj.append(html);
}
//加载数据中处理
function onLoading() {
loadWrap.show();
}
//本次拉取成功处理
function loadingOk() {
loadWrap.hide();
}
//加载数据完成
function onComplete() {
loadWrap.html("没有更多了哦").show();
}
//首次获取数据量为0时
function onNoData() {
loadWrap.hide();
var str = '<div class="dataEmpty"><h3>没有找到合适的商户</h3><p>换个条件再试试吧</p></div>';
$("#sellerList").append(str);
}
//判断金额是否为正整数和0==为正整数和0返回true,不为正整数和0返回false
function is_money_withzero(value) {
var reg = /^[1-9]\d*$|^0$/;
return reg.test(value);
};
}) | */
(d | identifier_name |
udpopnet_random.py | # generate random fully connected networks
# and evaluate their properties
import time
import copy
import random
import networkx as nx
import numpy as np
#import matplotlib.pyplot as plt
#plt.ion()
plotnet = 1 # 1 if plot network, 0 is no figure
outfile = open("randomnetoutput.txt", "wb")
outfile.write("Node"+"\t"+ "Edges"+"\t"+"Mcrit"+"\t"+"Var"+"\t"+"Eff"+"\t"+"Var/Edge"+"\t"+"MaxDiam"+"\t"+"Dend"+"\t"+"Terminal"+"\t"+"Even"+"\t"+"V/E"+"\t"+"Net"+"\n")
nodes = 10
edges = 15
npop=copy.deepcopy(nodes)
edgerange=1
noderange=1
if(noderange):
# range of nodes in the network
minnode = 2
maxnode = 20
#nodes = 8 # just have to change this
replicates = 100
w = 0.5 # heterozygote fitness relative to homozygotes
def linearness(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
maxdiameter=0
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
#print link
for j in range(0, npop):
if(link[j]>maxdiameter):
maxdiameter=link[j]
#print "diameter: ", maxdiameter
# alternative could return diameter by uncommenting below
#return maxdiameter
#print "linearness: ", (float(maxdiameter)-1)/(float(npop)-2)
return ((float(maxdiameter)-1)/(float(npop)-2))
def eff(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
invert=[]
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
for m in range(len(link)):
if link[m]!=0:
invert.append(1/float(link[m]))
return float(sum(invert))/(float(npop)*(npop-1))
# check if the network is fully interconnected
def fullconnectcheck(net):
connected = [0 for x in range(nodes)]
connected[0]=1
current=0
for k in range(0, nodes):
for j in range(0, nodes):
current = j
if(connected[j] == 1):
for i in range(0, nodes): # walks through and finds connections to first node
if(i != current):
if(net[current][i]==1):
connected[i]=1
connectcount=0
for j in range(0, nodes):
if(connected[j]==1):
connectcount=connectcount+1
if(connectcount == nodes):
# fully connected
return 1
else:
# not connected
return 0
#Function for calculating dendrisity value.
def dend(net):
#Makes a list of the number of edges each node in the network has.
edgesum=[]
for i in net:
edgesum.append(sum(i))
#print "The number of edges at each node: %s" % edgesum
#Makes a list of the number of nodes that have three edges.
threecount=0
for i in edgesum:
if i==3:
threecount=threecount+1
#print "The number of nodes with three edges: %i" % threecount
edges=[] #Place-holder for list of all edges.
bridges=[] #Place-holder for list of all bridge edges.
nontermnodebridges=[] #Place-holder list for non-terminal bridge edges.
nontermbnodes=[] #Place-holder list for non-terminal nodes connected tobridges.
#Finds bridges in network by breaking connections and then checking connectivity of network, then checks if nodes and edges are non-terminal.
for i in range(0,nodes):
for j in range(0,nodes):
netp=copy.deepcopy(net)
if i!=j and netp[i][j]==netp[j][i]==1:
edges.append(1)
netp[i][j]=netp[j][i]=0
check=fullconnectcheck(netp)
if check==0:
bridges.append(1)
if sum(net[i])>=2:
nontermnodebridges.append(str(i)+str(j))
nontermbnodes.append(i)
else:
bridges.append(0)
#Gives the total number of non-terminal bridge edges.
#nontermbridge=len(nontermbridge)/2
def unique(seq):
checked=[]
for e in seq:
if e not in checked:
checked.append(e)
return checked
nontermbnodes=unique(nontermbnodes)
#Makes a list of non-terminal edges and checks if they are connected to each other.
bridgeconnect=[]
threebridgeconnect=[]
for i in nontermbnodes:
for j in nontermbnodes:
if i>=0 and j>=0 and i!=j and j>i:
if net[i][j]==net[j][i]==1:
bridgeconnect.append(str(i)+str(j))
if sum(net[i])==3:
threebridgeconnect.append(i)
totedge=(sum(edgesum)/2)
#print "Then total number of edges in the matrix: %i" % totedge
totbridge=sum(bridges)/2
#print "The total number of bridges in the network: %i" % totbridge
#Sums the total number of non-terminal edges.
#nontbedge=float(len(unique(bridgeconnect)))
#Checks if the bridges involving non-terminal nodes are to another non-terminal node.
nontermbridgeconnect=0
for i in range(0,len(bridgeconnect)):
if bridgeconnect[i] in nontermnodebridges:
nontermbridgeconnect+=1
#print "The number of non-terminal bridge edges in the network: %i" % nontermbridgeconnect
Totnontbridgeflankthree=float(len(unique(threebridgeconnect)))
#print "The number of non-terminal nodes in the network that flank nodes with three edges: %i" % Totnontbridgeflankthree
if nontermbridgeconnect!=float(0):
dend = Totnontbridgeflankthree/float(len(nontermbnodes))
else:
dend="0"
return dend
def pnextcalc(w,m,freq,popnet,immi):
# calculate avereage allele frequency after migration
pave = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if i == j:
# normalization step if m is too large
if(immi[i]*m <= 1):
# p = global freq[j]
pave[i]=pave[i]+(1-immi[i]*m) * freq[j]
else:
pave[i]=0
elif popnet[i][j] == 1:
if(immi[i]*m <= 1):
pave[i] = pave[i] + m * freq[j]
else:
pave[i] = pave[i] + m * freq[j] / immi[i]*m
# calculate average fitness
wbar = [0 for x in range(nodes)]
for i in range(0, nodes):
wbar[i] = pave[i]*pave[i] + 2*w*pave[i]*(1-pave[i]) + (1-pave[i])*(1-pave[i])
# update frequencies with selection
pnext = [0 for x in range(nodes)]
for i in range(0, nodes):
pnext[i] = (pave[i]*pave[i] + w*pave[i]*(1-pave[i]))/wbar[i]
# boundry at zero and one
for i in range(0, nodes):
if(pnext[i]>1):
pnext[i]=1
if(pnext[i]<0):
pnext[i]=0
# store new values
for i in range(0, nodes):
freq[i]=pnext[i]
return freq
def diff(freq, pnext):
# calculate change
diff=0
for i in range(0, nodes):
diff=diff+abs(pnext[i]-freq[i])
return diff
# calculate distance in allele frequency between popualtions
def dist(freq):
distance=0
for i in range(0, nodes):
for j in range(0, nodes):
distance=distance+abs(freq[i]-freq[j])
return distance
def | (net,npop):
# what fraction of the nodes are terminal nodes
# these only have one connection in the rows
term = 0
for i in range(0, npop):
rowsum = 0
for j in range(0, npop):
rowsum = rowsum + net[i][j]
if(rowsum == 1):
term = term + 1
#print rowsum
#print term
return float(term)/float(npop)
for r in range(0, replicates):
if(noderange):
nodes = random.randrange(minnode,maxnode+1)
if(edgerange):
minedge = nodes-1 # minimum # edges possible
maxedge = nodes*minedge/2 #maximum # edges possible
edgepick = random.randrange(minedge,maxedge+1)
#print edgepick
else:
edgepick = edges
# create networks until fully interconnected
while(1):
# initial empty network matrix
net = [[0 for x in range(nodes)] for x in range(nodes)]
# place random edges
countedge=0
while(1):
if(countedge == edgepick):
break
#for i in range(0, edgepick):
x = random.randrange(0,nodes)
y = random.randrange(0,nodes)
if(x != y):
if(net[x][y] == 0):
net[x][y]=1
net[y][x]=1
countedge=countedge+1
# evaluate if fully interconnected
if(fullconnectcheck(net) == 1):
break
#print net
if(plotnet):
DistMatrix =np.array(net)
G = nx.from_numpy_matrix(DistMatrix)
#pos=nx.graphviz_layout(G)
#nx.draw_graphviz(G,labels=None,node_alpha=0.1)
#plt.draw()
#plt.clf()
# evaluate stability
freq = [0 for x in range(nodes)]
inc=0.1
m=0.0
errorflag=0
immi = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
immi[i]=immi[i]+net[i][j]
digitplaces=3
digitplacecount=0
while(True):
digitplacecount=digitplacecount+1
distance=1
while(distance>0.0001):
difference=1
for z in range(0, nodes):
freq[z]=0+random.random()/50
for z in range(0, nodes/2):
freq[z]=1-random.random()/50
while(difference>0.0000000001):
prefreq=copy.deepcopy(freq)
freq=pnextcalc(w,m,freq,net,immi)
difference=diff(prefreq,freq)
#currenttime=time.time()
distance=dist(freq)
m=m+inc
m=m-2*inc
if(m == 0.0):
digitplaces=digitplaces+1
if(digitplacecount==digitplaces):
break
#print m
inc=inc/10
#print m
#print net
# calcualte variance in connectivity
aveconnect = edgepick/nodes
sumnodeconnect = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if(net[i][j]):
sumnodeconnect[i] = sumnodeconnect[i]+1
varsum=0.0
for i in range(0, nodes):
varsum = varsum + (sumnodeconnect[i] - aveconnect)*(sumnodeconnect[i] - aveconnect)
variance = varsum / nodes
# end calcualte variance in connectivity
# calculate network diameter and average shortest path length?
# calculate dendrosity
#bigY = dend(net)
#print bigY
#print "nodes = ", nodes
#print "edges = ", edgepick
#print "variance = ", variance
#print "m* = ", m
nodeoutput = str(nodes)
edgeoutput = str(edgepick)
nodesperedge = str(float(nodes)/float(edgepick))
veoutput = str(nodesperedge)
moutput = str(m)
varoutput = str(variance)
varianceperedge = str(variance/float(edgepick))
varedgeoutput = str(varianceperedge)
lin=linearness(net,nodes)
linoutput=str(lin)
#Youtput = str(bigY)
dendo=dend(net)
dendout=str(dendo)
ter=terminalness(net,nodes)
teroutput = str(ter)
effo = eff(net,nodes)
effoutput = str(effo)
if(nodes%2 == 0):
eveness=1
else:
eveness=0
evenoutput = str(eveness)
netoutput = str(net)
outfile.write(nodeoutput+"\t"+edgeoutput+"\t"+moutput+"\t"+varoutput+"\t"+effoutput+"\t"+varedgeoutput+"\t"+linoutput+"\t"+dendout+"\t"+teroutput+"\t"+evenoutput+"\t"+veoutput+"\t"+netoutput+"\n")
print r, " ", nodes, " ", edgepick, " ", m
#pause until enter
##q = input("Press Enter to continue...")
exit()
| terminalness | identifier_name |
udpopnet_random.py | # generate random fully connected networks
# and evaluate their properties
import time
import copy
import random
import networkx as nx
import numpy as np
#import matplotlib.pyplot as plt
#plt.ion()
plotnet = 1 # 1 if plot network, 0 is no figure
outfile = open("randomnetoutput.txt", "wb")
outfile.write("Node"+"\t"+ "Edges"+"\t"+"Mcrit"+"\t"+"Var"+"\t"+"Eff"+"\t"+"Var/Edge"+"\t"+"MaxDiam"+"\t"+"Dend"+"\t"+"Terminal"+"\t"+"Even"+"\t"+"V/E"+"\t"+"Net"+"\n")
nodes = 10
edges = 15
npop=copy.deepcopy(nodes)
edgerange=1
noderange=1
if(noderange):
# range of nodes in the network
minnode = 2
maxnode = 20
#nodes = 8 # just have to change this
replicates = 100
w = 0.5 # heterozygote fitness relative to homozygotes
def linearness(net, npop):
# special case for a network of 2 populations
|
def eff(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
invert=[]
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
for m in range(len(link)):
if link[m]!=0:
invert.append(1/float(link[m]))
return float(sum(invert))/(float(npop)*(npop-1))
# check if the network is fully interconnected
def fullconnectcheck(net):
connected = [0 for x in range(nodes)]
connected[0]=1
current=0
for k in range(0, nodes):
for j in range(0, nodes):
current = j
if(connected[j] == 1):
for i in range(0, nodes): # walks through and finds connections to first node
if(i != current):
if(net[current][i]==1):
connected[i]=1
connectcount=0
for j in range(0, nodes):
if(connected[j]==1):
connectcount=connectcount+1
if(connectcount == nodes):
# fully connected
return 1
else:
# not connected
return 0
#Function for calculating dendrisity value.
def dend(net):
#Makes a list of the number of edges each node in the network has.
edgesum=[]
for i in net:
edgesum.append(sum(i))
#print "The number of edges at each node: %s" % edgesum
#Makes a list of the number of nodes that have three edges.
threecount=0
for i in edgesum:
if i==3:
threecount=threecount+1
#print "The number of nodes with three edges: %i" % threecount
edges=[] #Place-holder for list of all edges.
bridges=[] #Place-holder for list of all bridge edges.
nontermnodebridges=[] #Place-holder list for non-terminal bridge edges.
nontermbnodes=[] #Place-holder list for non-terminal nodes connected tobridges.
#Finds bridges in network by breaking connections and then checking connectivity of network, then checks if nodes and edges are non-terminal.
for i in range(0,nodes):
for j in range(0,nodes):
netp=copy.deepcopy(net)
if i!=j and netp[i][j]==netp[j][i]==1:
edges.append(1)
netp[i][j]=netp[j][i]=0
check=fullconnectcheck(netp)
if check==0:
bridges.append(1)
if sum(net[i])>=2:
nontermnodebridges.append(str(i)+str(j))
nontermbnodes.append(i)
else:
bridges.append(0)
#Gives the total number of non-terminal bridge edges.
#nontermbridge=len(nontermbridge)/2
def unique(seq):
checked=[]
for e in seq:
if e not in checked:
checked.append(e)
return checked
nontermbnodes=unique(nontermbnodes)
#Makes a list of non-terminal edges and checks if they are connected to each other.
bridgeconnect=[]
threebridgeconnect=[]
for i in nontermbnodes:
for j in nontermbnodes:
if i>=0 and j>=0 and i!=j and j>i:
if net[i][j]==net[j][i]==1:
bridgeconnect.append(str(i)+str(j))
if sum(net[i])==3:
threebridgeconnect.append(i)
totedge=(sum(edgesum)/2)
#print "Then total number of edges in the matrix: %i" % totedge
totbridge=sum(bridges)/2
#print "The total number of bridges in the network: %i" % totbridge
#Sums the total number of non-terminal edges.
#nontbedge=float(len(unique(bridgeconnect)))
#Checks if the bridges involving non-terminal nodes are to another non-terminal node.
nontermbridgeconnect=0
for i in range(0,len(bridgeconnect)):
if bridgeconnect[i] in nontermnodebridges:
nontermbridgeconnect+=1
#print "The number of non-terminal bridge edges in the network: %i" % nontermbridgeconnect
Totnontbridgeflankthree=float(len(unique(threebridgeconnect)))
#print "The number of non-terminal nodes in the network that flank nodes with three edges: %i" % Totnontbridgeflankthree
if nontermbridgeconnect!=float(0):
dend = Totnontbridgeflankthree/float(len(nontermbnodes))
else:
dend="0"
return dend
def pnextcalc(w,m,freq,popnet,immi):
# calculate avereage allele frequency after migration
pave = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if i == j:
# normalization step if m is too large
if(immi[i]*m <= 1):
# p = global freq[j]
pave[i]=pave[i]+(1-immi[i]*m) * freq[j]
else:
pave[i]=0
elif popnet[i][j] == 1:
if(immi[i]*m <= 1):
pave[i] = pave[i] + m * freq[j]
else:
pave[i] = pave[i] + m * freq[j] / immi[i]*m
# calculate average fitness
wbar = [0 for x in range(nodes)]
for i in range(0, nodes):
wbar[i] = pave[i]*pave[i] + 2*w*pave[i]*(1-pave[i]) + (1-pave[i])*(1-pave[i])
# update frequencies with selection
pnext = [0 for x in range(nodes)]
for i in range(0, nodes):
pnext[i] = (pave[i]*pave[i] + w*pave[i]*(1-pave[i]))/wbar[i]
# boundry at zero and one
for i in range(0, nodes):
if(pnext[i]>1):
pnext[i]=1
if(pnext[i]<0):
pnext[i]=0
# store new values
for i in range(0, nodes):
freq[i]=pnext[i]
return freq
def diff(freq, pnext):
# calculate change
diff=0
for i in range(0, nodes):
diff=diff+abs(pnext[i]-freq[i])
return diff
# calculate distance in allele frequency between popualtions
def dist(freq):
distance=0
for i in range(0, nodes):
for j in range(0, nodes):
distance=distance+abs(freq[i]-freq[j])
return distance
def terminalness(net,npop):
# what fraction of the nodes are terminal nodes
# these only have one connection in the rows
term = 0
for i in range(0, npop):
rowsum = 0
for j in range(0, npop):
rowsum = rowsum + net[i][j]
if(rowsum == 1):
term = term + 1
#print rowsum
#print term
return float(term)/float(npop)
for r in range(0, replicates):
if(noderange):
nodes = random.randrange(minnode,maxnode+1)
if(edgerange):
minedge = nodes-1 # minimum # edges possible
maxedge = nodes*minedge/2 #maximum # edges possible
edgepick = random.randrange(minedge,maxedge+1)
#print edgepick
else:
edgepick = edges
# create networks until fully interconnected
while(1):
# initial empty network matrix
net = [[0 for x in range(nodes)] for x in range(nodes)]
# place random edges
countedge=0
while(1):
if(countedge == edgepick):
break
#for i in range(0, edgepick):
x = random.randrange(0,nodes)
y = random.randrange(0,nodes)
if(x != y):
if(net[x][y] == 0):
net[x][y]=1
net[y][x]=1
countedge=countedge+1
# evaluate if fully interconnected
if(fullconnectcheck(net) == 1):
break
#print net
if(plotnet):
DistMatrix =np.array(net)
G = nx.from_numpy_matrix(DistMatrix)
#pos=nx.graphviz_layout(G)
#nx.draw_graphviz(G,labels=None,node_alpha=0.1)
#plt.draw()
#plt.clf()
# evaluate stability
freq = [0 for x in range(nodes)]
inc=0.1
m=0.0
errorflag=0
immi = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
immi[i]=immi[i]+net[i][j]
digitplaces=3
digitplacecount=0
while(True):
digitplacecount=digitplacecount+1
distance=1
while(distance>0.0001):
difference=1
for z in range(0, nodes):
freq[z]=0+random.random()/50
for z in range(0, nodes/2):
freq[z]=1-random.random()/50
while(difference>0.0000000001):
prefreq=copy.deepcopy(freq)
freq=pnextcalc(w,m,freq,net,immi)
difference=diff(prefreq,freq)
#currenttime=time.time()
distance=dist(freq)
m=m+inc
m=m-2*inc
if(m == 0.0):
digitplaces=digitplaces+1
if(digitplacecount==digitplaces):
break
#print m
inc=inc/10
#print m
#print net
# calcualte variance in connectivity
aveconnect = edgepick/nodes
sumnodeconnect = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if(net[i][j]):
sumnodeconnect[i] = sumnodeconnect[i]+1
varsum=0.0
for i in range(0, nodes):
varsum = varsum + (sumnodeconnect[i] - aveconnect)*(sumnodeconnect[i] - aveconnect)
variance = varsum / nodes
# end calcualte variance in connectivity
# calculate network diameter and average shortest path length?
# calculate dendrosity
#bigY = dend(net)
#print bigY
#print "nodes = ", nodes
#print "edges = ", edgepick
#print "variance = ", variance
#print "m* = ", m
nodeoutput = str(nodes)
edgeoutput = str(edgepick)
nodesperedge = str(float(nodes)/float(edgepick))
veoutput = str(nodesperedge)
moutput = str(m)
varoutput = str(variance)
varianceperedge = str(variance/float(edgepick))
varedgeoutput = str(varianceperedge)
lin=linearness(net,nodes)
linoutput=str(lin)
#Youtput = str(bigY)
dendo=dend(net)
dendout=str(dendo)
ter=terminalness(net,nodes)
teroutput = str(ter)
effo = eff(net,nodes)
effoutput = str(effo)
if(nodes%2 == 0):
eveness=1
else:
eveness=0
evenoutput = str(eveness)
netoutput = str(net)
outfile.write(nodeoutput+"\t"+edgeoutput+"\t"+moutput+"\t"+varoutput+"\t"+effoutput+"\t"+varedgeoutput+"\t"+linoutput+"\t"+dendout+"\t"+teroutput+"\t"+evenoutput+"\t"+veoutput+"\t"+netoutput+"\n")
print r, " ", nodes, " ", edgepick, " ", m
#pause until enter
##q = input("Press Enter to continue...")
exit()
| if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
maxdiameter=0
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
#print link
for j in range(0, npop):
if(link[j]>maxdiameter):
maxdiameter=link[j]
#print "diameter: ", maxdiameter
# alternative could return diameter by uncommenting below
#return maxdiameter
#print "linearness: ", (float(maxdiameter)-1)/(float(npop)-2)
return ((float(maxdiameter)-1)/(float(npop)-2)) | identifier_body |
udpopnet_random.py | # generate random fully connected networks
# and evaluate their properties
import time
import copy
import random
import networkx as nx
import numpy as np
#import matplotlib.pyplot as plt
#plt.ion()
plotnet = 1 # 1 if plot network, 0 is no figure
outfile = open("randomnetoutput.txt", "wb")
outfile.write("Node"+"\t"+ "Edges"+"\t"+"Mcrit"+"\t"+"Var"+"\t"+"Eff"+"\t"+"Var/Edge"+"\t"+"MaxDiam"+"\t"+"Dend"+"\t"+"Terminal"+"\t"+"Even"+"\t"+"V/E"+"\t"+"Net"+"\n")
nodes = 10
edges = 15
npop=copy.deepcopy(nodes)
edgerange=1
noderange=1
if(noderange):
# range of nodes in the network
minnode = 2
maxnode = 20
#nodes = 8 # just have to change this
replicates = 100
w = 0.5 # heterozygote fitness relative to homozygotes
def linearness(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
maxdiameter=0
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
#print link
for j in range(0, npop):
if(link[j]>maxdiameter):
maxdiameter=link[j]
#print "diameter: ", maxdiameter
# alternative could return diameter by uncommenting below
#return maxdiameter
#print "linearness: ", (float(maxdiameter)-1)/(float(npop)-2)
return ((float(maxdiameter)-1)/(float(npop)-2))
def eff(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
invert=[]
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
for m in range(len(link)):
if link[m]!=0:
invert.append(1/float(link[m]))
return float(sum(invert))/(float(npop)*(npop-1))
# check if the network is fully interconnected
def fullconnectcheck(net):
connected = [0 for x in range(nodes)]
connected[0]=1
current=0
for k in range(0, nodes):
for j in range(0, nodes):
current = j
if(connected[j] == 1):
for i in range(0, nodes): # walks through and finds connections to first node
if(i != current):
if(net[current][i]==1):
connected[i]=1
connectcount=0
for j in range(0, nodes):
if(connected[j]==1):
connectcount=connectcount+1
if(connectcount == nodes):
# fully connected
return 1
else:
# not connected
return 0
#Function for calculating dendrisity value.
def dend(net):
#Makes a list of the number of edges each node in the network has.
edgesum=[]
for i in net:
edgesum.append(sum(i))
#print "The number of edges at each node: %s" % edgesum
#Makes a list of the number of nodes that have three edges.
threecount=0
for i in edgesum:
if i==3:
threecount=threecount+1
#print "The number of nodes with three edges: %i" % threecount
edges=[] #Place-holder for list of all edges.
bridges=[] #Place-holder for list of all bridge edges.
nontermnodebridges=[] #Place-holder list for non-terminal bridge edges.
nontermbnodes=[] #Place-holder list for non-terminal nodes connected tobridges.
#Finds bridges in network by breaking connections and then checking connectivity of network, then checks if nodes and edges are non-terminal.
for i in range(0,nodes):
for j in range(0,nodes):
netp=copy.deepcopy(net)
if i!=j and netp[i][j]==netp[j][i]==1:
edges.append(1)
netp[i][j]=netp[j][i]=0
check=fullconnectcheck(netp)
if check==0:
bridges.append(1)
if sum(net[i])>=2:
nontermnodebridges.append(str(i)+str(j))
nontermbnodes.append(i)
else:
bridges.append(0)
#Gives the total number of non-terminal bridge edges.
#nontermbridge=len(nontermbridge)/2
def unique(seq):
checked=[]
for e in seq:
if e not in checked:
checked.append(e)
return checked
nontermbnodes=unique(nontermbnodes)
#Makes a list of non-terminal edges and checks if they are connected to each other.
bridgeconnect=[]
threebridgeconnect=[]
for i in nontermbnodes:
for j in nontermbnodes:
if i>=0 and j>=0 and i!=j and j>i:
if net[i][j]==net[j][i]==1:
bridgeconnect.append(str(i)+str(j))
if sum(net[i])==3:
threebridgeconnect.append(i)
totedge=(sum(edgesum)/2)
#print "Then total number of edges in the matrix: %i" % totedge
totbridge=sum(bridges)/2
#print "The total number of bridges in the network: %i" % totbridge
#Sums the total number of non-terminal edges.
#nontbedge=float(len(unique(bridgeconnect)))
#Checks if the bridges involving non-terminal nodes are to another non-terminal node.
nontermbridgeconnect=0
for i in range(0,len(bridgeconnect)):
if bridgeconnect[i] in nontermnodebridges:
nontermbridgeconnect+=1
#print "The number of non-terminal bridge edges in the network: %i" % nontermbridgeconnect
Totnontbridgeflankthree=float(len(unique(threebridgeconnect)))
#print "The number of non-terminal nodes in the network that flank nodes with three edges: %i" % Totnontbridgeflankthree
if nontermbridgeconnect!=float(0):
dend = Totnontbridgeflankthree/float(len(nontermbnodes))
else:
dend="0"
return dend
def pnextcalc(w,m,freq,popnet,immi):
# calculate avereage allele frequency after migration
pave = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if i == j:
# normalization step if m is too large
if(immi[i]*m <= 1):
# p = global freq[j]
pave[i]=pave[i]+(1-immi[i]*m) * freq[j]
else:
pave[i]=0
elif popnet[i][j] == 1:
if(immi[i]*m <= 1):
pave[i] = pave[i] + m * freq[j]
else:
pave[i] = pave[i] + m * freq[j] / immi[i]*m
# calculate average fitness
wbar = [0 for x in range(nodes)]
for i in range(0, nodes):
wbar[i] = pave[i]*pave[i] + 2*w*pave[i]*(1-pave[i]) + (1-pave[i])*(1-pave[i])
# update frequencies with selection
pnext = [0 for x in range(nodes)]
for i in range(0, nodes):
pnext[i] = (pave[i]*pave[i] + w*pave[i]*(1-pave[i]))/wbar[i]
# boundry at zero and one
for i in range(0, nodes):
if(pnext[i]>1):
pnext[i]=1
if(pnext[i]<0):
pnext[i]=0
# store new values
for i in range(0, nodes):
freq[i]=pnext[i]
return freq
def diff(freq, pnext):
# calculate change
diff=0
for i in range(0, nodes):
diff=diff+abs(pnext[i]-freq[i]) | # calculate distance in allele frequency between popualtions
def dist(freq):
distance=0
for i in range(0, nodes):
for j in range(0, nodes):
distance=distance+abs(freq[i]-freq[j])
return distance
def terminalness(net,npop):
# what fraction of the nodes are terminal nodes
# these only have one connection in the rows
term = 0
for i in range(0, npop):
rowsum = 0
for j in range(0, npop):
rowsum = rowsum + net[i][j]
if(rowsum == 1):
term = term + 1
#print rowsum
#print term
return float(term)/float(npop)
for r in range(0, replicates):
if(noderange):
nodes = random.randrange(minnode,maxnode+1)
if(edgerange):
minedge = nodes-1 # minimum # edges possible
maxedge = nodes*minedge/2 #maximum # edges possible
edgepick = random.randrange(minedge,maxedge+1)
#print edgepick
else:
edgepick = edges
# create networks until fully interconnected
while(1):
# initial empty network matrix
net = [[0 for x in range(nodes)] for x in range(nodes)]
# place random edges
countedge=0
while(1):
if(countedge == edgepick):
break
#for i in range(0, edgepick):
x = random.randrange(0,nodes)
y = random.randrange(0,nodes)
if(x != y):
if(net[x][y] == 0):
net[x][y]=1
net[y][x]=1
countedge=countedge+1
# evaluate if fully interconnected
if(fullconnectcheck(net) == 1):
break
#print net
if(plotnet):
DistMatrix =np.array(net)
G = nx.from_numpy_matrix(DistMatrix)
#pos=nx.graphviz_layout(G)
#nx.draw_graphviz(G,labels=None,node_alpha=0.1)
#plt.draw()
#plt.clf()
# evaluate stability
freq = [0 for x in range(nodes)]
inc=0.1
m=0.0
errorflag=0
immi = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
immi[i]=immi[i]+net[i][j]
digitplaces=3
digitplacecount=0
while(True):
digitplacecount=digitplacecount+1
distance=1
while(distance>0.0001):
difference=1
for z in range(0, nodes):
freq[z]=0+random.random()/50
for z in range(0, nodes/2):
freq[z]=1-random.random()/50
while(difference>0.0000000001):
prefreq=copy.deepcopy(freq)
freq=pnextcalc(w,m,freq,net,immi)
difference=diff(prefreq,freq)
#currenttime=time.time()
distance=dist(freq)
m=m+inc
m=m-2*inc
if(m == 0.0):
digitplaces=digitplaces+1
if(digitplacecount==digitplaces):
break
#print m
inc=inc/10
#print m
#print net
# calcualte variance in connectivity
aveconnect = edgepick/nodes
sumnodeconnect = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if(net[i][j]):
sumnodeconnect[i] = sumnodeconnect[i]+1
varsum=0.0
for i in range(0, nodes):
varsum = varsum + (sumnodeconnect[i] - aveconnect)*(sumnodeconnect[i] - aveconnect)
variance = varsum / nodes
# end calcualte variance in connectivity
# calculate network diameter and average shortest path length?
# calculate dendrosity
#bigY = dend(net)
#print bigY
#print "nodes = ", nodes
#print "edges = ", edgepick
#print "variance = ", variance
#print "m* = ", m
nodeoutput = str(nodes)
edgeoutput = str(edgepick)
nodesperedge = str(float(nodes)/float(edgepick))
veoutput = str(nodesperedge)
moutput = str(m)
varoutput = str(variance)
varianceperedge = str(variance/float(edgepick))
varedgeoutput = str(varianceperedge)
lin=linearness(net,nodes)
linoutput=str(lin)
#Youtput = str(bigY)
dendo=dend(net)
dendout=str(dendo)
ter=terminalness(net,nodes)
teroutput = str(ter)
effo = eff(net,nodes)
effoutput = str(effo)
if(nodes%2 == 0):
eveness=1
else:
eveness=0
evenoutput = str(eveness)
netoutput = str(net)
outfile.write(nodeoutput+"\t"+edgeoutput+"\t"+moutput+"\t"+varoutput+"\t"+effoutput+"\t"+varedgeoutput+"\t"+linoutput+"\t"+dendout+"\t"+teroutput+"\t"+evenoutput+"\t"+veoutput+"\t"+netoutput+"\n")
print r, " ", nodes, " ", edgepick, " ", m
#pause until enter
##q = input("Press Enter to continue...")
exit() | return diff
| random_line_split |
udpopnet_random.py | # generate random fully connected networks
# and evaluate their properties
import time
import copy
import random
import networkx as nx
import numpy as np
#import matplotlib.pyplot as plt
#plt.ion()
plotnet = 1 # 1 if plot network, 0 is no figure
outfile = open("randomnetoutput.txt", "wb")
outfile.write("Node"+"\t"+ "Edges"+"\t"+"Mcrit"+"\t"+"Var"+"\t"+"Eff"+"\t"+"Var/Edge"+"\t"+"MaxDiam"+"\t"+"Dend"+"\t"+"Terminal"+"\t"+"Even"+"\t"+"V/E"+"\t"+"Net"+"\n")
nodes = 10
edges = 15
npop=copy.deepcopy(nodes)
edgerange=1
noderange=1
if(noderange):
# range of nodes in the network
minnode = 2
maxnode = 20
#nodes = 8 # just have to change this
replicates = 100
w = 0.5 # heterozygote fitness relative to homozygotes
def linearness(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
maxdiameter=0
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
#print link
for j in range(0, npop):
if(link[j]>maxdiameter):
maxdiameter=link[j]
#print "diameter: ", maxdiameter
# alternative could return diameter by uncommenting below
#return maxdiameter
#print "linearness: ", (float(maxdiameter)-1)/(float(npop)-2)
return ((float(maxdiameter)-1)/(float(npop)-2))
def eff(net, npop):
# special case for a network of 2 populations
if(npop == 2):
return 1
else:
# determine the distance between all popualtion pairs
invert=[]
for i in range(1, npop):
link = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
linkcopy = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
for k in range(1, npop):
for j in range(0, npop):
if (i != j):
if(link[j]==0):
if(net[i][j]==1):
linkcopy[j]=k
else:
for l in range(0, npop):
if (i != l):
if(link[l]==0):
if(net[j][l]==1):
linkcopy[l]=k
link = copy.deepcopy(linkcopy)
for m in range(len(link)):
if link[m]!=0:
invert.append(1/float(link[m]))
return float(sum(invert))/(float(npop)*(npop-1))
# check if the network is fully interconnected
def fullconnectcheck(net):
connected = [0 for x in range(nodes)]
connected[0]=1
current=0
for k in range(0, nodes):
for j in range(0, nodes):
current = j
if(connected[j] == 1):
for i in range(0, nodes): # walks through and finds connections to first node
if(i != current):
if(net[current][i]==1):
connected[i]=1
connectcount=0
for j in range(0, nodes):
if(connected[j]==1):
connectcount=connectcount+1
if(connectcount == nodes):
# fully connected
return 1
else:
# not connected
return 0
#Function for calculating dendrisity value.
def dend(net):
#Makes a list of the number of edges each node in the network has.
edgesum=[]
for i in net:
edgesum.append(sum(i))
#print "The number of edges at each node: %s" % edgesum
#Makes a list of the number of nodes that have three edges.
threecount=0
for i in edgesum:
if i==3:
threecount=threecount+1
#print "The number of nodes with three edges: %i" % threecount
edges=[] #Place-holder for list of all edges.
bridges=[] #Place-holder for list of all bridge edges.
nontermnodebridges=[] #Place-holder list for non-terminal bridge edges.
nontermbnodes=[] #Place-holder list for non-terminal nodes connected tobridges.
#Finds bridges in network by breaking connections and then checking connectivity of network, then checks if nodes and edges are non-terminal.
for i in range(0,nodes):
for j in range(0,nodes):
netp=copy.deepcopy(net)
if i!=j and netp[i][j]==netp[j][i]==1:
edges.append(1)
netp[i][j]=netp[j][i]=0
check=fullconnectcheck(netp)
if check==0:
bridges.append(1)
if sum(net[i])>=2:
nontermnodebridges.append(str(i)+str(j))
nontermbnodes.append(i)
else:
bridges.append(0)
#Gives the total number of non-terminal bridge edges.
#nontermbridge=len(nontermbridge)/2
def unique(seq):
checked=[]
for e in seq:
if e not in checked:
checked.append(e)
return checked
nontermbnodes=unique(nontermbnodes)
#Makes a list of non-terminal edges and checks if they are connected to each other.
bridgeconnect=[]
threebridgeconnect=[]
for i in nontermbnodes:
for j in nontermbnodes:
if i>=0 and j>=0 and i!=j and j>i:
|
totedge=(sum(edgesum)/2)
#print "Then total number of edges in the matrix: %i" % totedge
totbridge=sum(bridges)/2
#print "The total number of bridges in the network: %i" % totbridge
#Sums the total number of non-terminal edges.
#nontbedge=float(len(unique(bridgeconnect)))
#Checks if the bridges involving non-terminal nodes are to another non-terminal node.
nontermbridgeconnect=0
for i in range(0,len(bridgeconnect)):
if bridgeconnect[i] in nontermnodebridges:
nontermbridgeconnect+=1
#print "The number of non-terminal bridge edges in the network: %i" % nontermbridgeconnect
Totnontbridgeflankthree=float(len(unique(threebridgeconnect)))
#print "The number of non-terminal nodes in the network that flank nodes with three edges: %i" % Totnontbridgeflankthree
if nontermbridgeconnect!=float(0):
dend = Totnontbridgeflankthree/float(len(nontermbnodes))
else:
dend="0"
return dend
def pnextcalc(w,m,freq,popnet,immi):
# calculate avereage allele frequency after migration
pave = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if i == j:
# normalization step if m is too large
if(immi[i]*m <= 1):
# p = global freq[j]
pave[i]=pave[i]+(1-immi[i]*m) * freq[j]
else:
pave[i]=0
elif popnet[i][j] == 1:
if(immi[i]*m <= 1):
pave[i] = pave[i] + m * freq[j]
else:
pave[i] = pave[i] + m * freq[j] / immi[i]*m
# calculate average fitness
wbar = [0 for x in range(nodes)]
for i in range(0, nodes):
wbar[i] = pave[i]*pave[i] + 2*w*pave[i]*(1-pave[i]) + (1-pave[i])*(1-pave[i])
# update frequencies with selection
pnext = [0 for x in range(nodes)]
for i in range(0, nodes):
pnext[i] = (pave[i]*pave[i] + w*pave[i]*(1-pave[i]))/wbar[i]
# boundry at zero and one
for i in range(0, nodes):
if(pnext[i]>1):
pnext[i]=1
if(pnext[i]<0):
pnext[i]=0
# store new values
for i in range(0, nodes):
freq[i]=pnext[i]
return freq
def diff(freq, pnext):
# calculate change
diff=0
for i in range(0, nodes):
diff=diff+abs(pnext[i]-freq[i])
return diff
# calculate distance in allele frequency between popualtions
def dist(freq):
distance=0
for i in range(0, nodes):
for j in range(0, nodes):
distance=distance+abs(freq[i]-freq[j])
return distance
def terminalness(net,npop):
# what fraction of the nodes are terminal nodes
# these only have one connection in the rows
term = 0
for i in range(0, npop):
rowsum = 0
for j in range(0, npop):
rowsum = rowsum + net[i][j]
if(rowsum == 1):
term = term + 1
#print rowsum
#print term
return float(term)/float(npop)
for r in range(0, replicates):
if(noderange):
nodes = random.randrange(minnode,maxnode+1)
if(edgerange):
minedge = nodes-1 # minimum # edges possible
maxedge = nodes*minedge/2 #maximum # edges possible
edgepick = random.randrange(minedge,maxedge+1)
#print edgepick
else:
edgepick = edges
# create networks until fully interconnected
while(1):
# initial empty network matrix
net = [[0 for x in range(nodes)] for x in range(nodes)]
# place random edges
countedge=0
while(1):
if(countedge == edgepick):
break
#for i in range(0, edgepick):
x = random.randrange(0,nodes)
y = random.randrange(0,nodes)
if(x != y):
if(net[x][y] == 0):
net[x][y]=1
net[y][x]=1
countedge=countedge+1
# evaluate if fully interconnected
if(fullconnectcheck(net) == 1):
break
#print net
if(plotnet):
DistMatrix =np.array(net)
G = nx.from_numpy_matrix(DistMatrix)
#pos=nx.graphviz_layout(G)
#nx.draw_graphviz(G,labels=None,node_alpha=0.1)
#plt.draw()
#plt.clf()
# evaluate stability
freq = [0 for x in range(nodes)]
inc=0.1
m=0.0
errorflag=0
immi = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
immi[i]=immi[i]+net[i][j]
digitplaces=3
digitplacecount=0
while(True):
digitplacecount=digitplacecount+1
distance=1
while(distance>0.0001):
difference=1
for z in range(0, nodes):
freq[z]=0+random.random()/50
for z in range(0, nodes/2):
freq[z]=1-random.random()/50
while(difference>0.0000000001):
prefreq=copy.deepcopy(freq)
freq=pnextcalc(w,m,freq,net,immi)
difference=diff(prefreq,freq)
#currenttime=time.time()
distance=dist(freq)
m=m+inc
m=m-2*inc
if(m == 0.0):
digitplaces=digitplaces+1
if(digitplacecount==digitplaces):
break
#print m
inc=inc/10
#print m
#print net
# calcualte variance in connectivity
aveconnect = edgepick/nodes
sumnodeconnect = [0 for x in range(nodes)]
for i in range(0, nodes):
for j in range(0, nodes):
if(net[i][j]):
sumnodeconnect[i] = sumnodeconnect[i]+1
varsum=0.0
for i in range(0, nodes):
varsum = varsum + (sumnodeconnect[i] - aveconnect)*(sumnodeconnect[i] - aveconnect)
variance = varsum / nodes
# end calcualte variance in connectivity
# calculate network diameter and average shortest path length?
# calculate dendrosity
#bigY = dend(net)
#print bigY
#print "nodes = ", nodes
#print "edges = ", edgepick
#print "variance = ", variance
#print "m* = ", m
nodeoutput = str(nodes)
edgeoutput = str(edgepick)
nodesperedge = str(float(nodes)/float(edgepick))
veoutput = str(nodesperedge)
moutput = str(m)
varoutput = str(variance)
varianceperedge = str(variance/float(edgepick))
varedgeoutput = str(varianceperedge)
lin=linearness(net,nodes)
linoutput=str(lin)
#Youtput = str(bigY)
dendo=dend(net)
dendout=str(dendo)
ter=terminalness(net,nodes)
teroutput = str(ter)
effo = eff(net,nodes)
effoutput = str(effo)
if(nodes%2 == 0):
eveness=1
else:
eveness=0
evenoutput = str(eveness)
netoutput = str(net)
outfile.write(nodeoutput+"\t"+edgeoutput+"\t"+moutput+"\t"+varoutput+"\t"+effoutput+"\t"+varedgeoutput+"\t"+linoutput+"\t"+dendout+"\t"+teroutput+"\t"+evenoutput+"\t"+veoutput+"\t"+netoutput+"\n")
print r, " ", nodes, " ", edgepick, " ", m
#pause until enter
##q = input("Press Enter to continue...")
exit()
| if net[i][j]==net[j][i]==1:
bridgeconnect.append(str(i)+str(j))
if sum(net[i])==3:
threebridgeconnect.append(i) | conditional_block |
transcribe.py | #!/usr/bin/python
import os, sys
# Pages begin as follows:
# <hr>
# <A name=2></a>1<br>
# Beginning at page 4, the second number is an increasing page number:
# <hr>
# <A name=4></a>1<br>
# ...
# <hr>
# <A name=5></a>2<br>
# This is the number printed in the top-right hand corner
# of the original PDF.
HR = "<hr>\n"
BEGIN = "***OOO***<br>\n"
# Lines are numbered:
# 14<br>
# THE COURT:<br>
# GOOD MORNING.<br>
# 15<br>
# ALL RESPOND:<br>
# GOOD MORNING.<br>
BR = "<br>\n"
PRINT_PRE = False
def parsePreamble(o, i):
p = 0
print "preamble", p
lastLineNo = None
pageAnchor = None
for line in i:
if line == BEGIN:
print >>o, '<p class="narration">***OOO***</p>'
return p, lastLineNo, pageAnchor
if line == HR:
p += 1
print "preamble", p
if PRINT_PRE:
print >>o, line
continue
if line.startswith('<A '):
pageAnchor = line
if PRINT_PRE:
print >>o, line
continue
if not line.endswith(BR):
print "SKIP", line,
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
except ValueError:
if PRINT_PRE:
print >>o, line, BR,
else:
if PRINT_PRE:
print >>o, line, BR,
assert False
corrections = [
(' star war ', ' Star Wars '),
(' yzaca ', ' ycaza '),
]
icons = {
'Mr. Richardson': 'richardson.jpg',
'The Respondent': 'leif.png',
'The Witness': 'banks.jpg',
'A': 'banks.jpg',
'by mr. richardson:': 'richardson.jpg',
'by the court:': 'avatar.png',
'by the respondent:': 'leif.png',
}
examiner = None
class Dialogue:
def __init__(self, pageNo, lastLineNo, speaker):
self.pageNo = pageNo
self.lineNo = lastLineNo
speaker = speaker.lower().title()
self.speaker = speaker
self.lines = []
self.words = []
if speaker == 'Q':
self.icon = icons[examiner]
else:
self.icon = icons.get(speaker, 'avatar.png')
def addLine(self, line):
l = line.lower()
if l.startswith('by '):
global examiner
examiner = l
elif l == 'constantine evans,':
icons['The Witness'] = 'costi.png'
icons['A'] = 'costi.png'
for r in corrections:
l = l.replace(r[0], r[1])
self.lines.append(l)
self.words.extend(l.split())
def | (o, dialogue, pageNo, lastLineNo, speaker):
if dialogue:
printDialogue(o, dialogue)
dialogue = Dialogue(pageNo, lastLineNo, speaker)
return dialogue
def parsePageAnchor(anchor):
# e.g.,
# <A name=38></a>35<br>
# The first number is produced by the pdftohtml conversion;
# the second is the one printed in the top-right of
# the original PDF.
return int(anchor.split('>')[2].split('<')[0])
def parsePage(o, i, p, pageAnchor, dialogue, lastLineNo = None):
global examiner
print "page", p
prevLineNo = None
pageNo = parsePageAnchor(pageAnchor)
print >>o, '<a name=p%02d></a>' % (pageNo, )
for line in i:
# these are eaten by the outer loop
assert not line.startswith('<A ')
if line == HR:
break # end of page
# 16<br>
# THE COURT:<br>
# AS YOU CAN SEE, WE HAVE A BUSY<br>
# 17<br>
# CALENDAR THIS MORNING.<br>
# I HAVE A PRIORITY FOR MATTER<br>
# 18<br>
# NUMBER 13, WHICH IS CASTLE VERSUS STRAND.<br>
if not line.endswith(BR):
# HTML tags at begin/end
print "SKIP", line,
assert line[0] == '<'
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
if prevLineNo and lastLineNo == prevLineNo+1:
if dialogue and dialogue.speaker == 'Narrator':
printDialogue(o, dialogue)
dialogue = None
else:
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
prevLineNo = lastLineNo
except ValueError:
dialogue.addLine(line)
prevLineNo = None
elif line.endswith(':'):
if line.count(' ') < 2:
if dialogue is None or dialogue.speaker != 'Narrator':
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
else:
if line != 'THE COURT:':
dialogue.addLine(line)
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
prevLineNo = None
else:
if dialogue.speaker == 'Narrator':
dialogue.addLine(line)
printDialogue(o, dialogue)
dialogue = None
else:
dialogue.addLine(line)
prevLineNo = None
elif line in ('Q','A'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line)
elif line.startswith('(WHEREUPON,'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
else:
if not dialogue or (line == '///' and dialogue.speaker != 'Narrator'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
return dialogue
# from Pygments:
# <span class="lineno"> 6 </span><span class="p">{</span>
LINENO = '<span class="lineno">%2d </span>'
printLineNumbers = False # XXX: anchor
def printDialogue(o, dialogue):
if printLineNumbers:
print >>o, (LINENO % dialogue.lineNo),
print >>o, '<a name=p%02dl%02d></a>' % (dialogue.pageNo, dialogue.lineNo)
if dialogue.speaker == 'Narrator':
print >>o, '<p class="narration">'
else:
if dialogue.icon:
print >>o, ('<p class="spk"><img src="%s" width=48 height=48 align=bottom> %s</p><p class="dlg">' % (dialogue.icon, dialogue.speaker))
else:
print >>o, ('<p class="spk">%s</p><p class="dlg">' % dialogue.speaker)
if False:
for line in dialogue.lines:
print >>o, line,
else:
printWords(o, dialogue.words)
print >>o, '</p>'
return
properNames = set([
'god',
'castle', 'green', 'homeowners', 'association', "association's",
'leif', 'strand',
'dianne', 'patrizzi', "dianne's",
'randy', 'banks',
'kelly', 'richardson',
'constantine', 'evans',
'richard', 'ycaza',
'cathy', 'brown',
'i', "i'm", "i'll", "i've",
'mr.',
'association',
'treasurer', 'president', 'chairman',
'pasadena',
'south', 'el', 'molino', 'avenue',
'twitter',
'slack',
'wednesday', 'september', 'october',
])
from collections import deque
renter = deque()
renter.append('/baby/vm-2016-07-21.mov')
renter.append('/baby/vm-2016-07-22-1.mov')
renter.append('/baby/vm-2016-07-22-2.mov')
renter.append('/baby/vm-2016-08-04.mov')
links = {
'"slack."': '''"<a href="https://slack.com/">Slack</a>."''',
'newsletters': '<a href="/havisham/v2i1.pdf">newsletters</a>',
}
def printWords(o, words):
# "I was afraid of worms, Roxanne!"
newSentence = True
lastWord = None
for word in words:
if newSentence:
word = word.capitalize()
newSentence = False
if word in properNames or (word[-1] in '.,?!' and word[:-1] in properNames):
word = word.capitalize()
if 'pdro' in word:
# case number
word = word.upper()
if max(word.count('-'), word.count('.')) > 1:
# e.g., H.O.A. or B-A-N-K-S
# BUG: capitalizes 'MAN-TO-MAN', which is funny, so I left it in
word = word.upper()
if word[:6] == 'renter':
url = renter.popleft()
word = '<a href="%s">renter</a>%s' % (url, word[6:])
elif word in links:
word = links[word]
elif word == '///' and lastWord != '///':
print >>o, '<br>'
print >>o, word,
lastWord = word
if word == '///':
print >>o, '<br>'
if word[-1] in '.?!':
newSentence = True
return
def main():
i = open("input.html", "r")
o = open("index.html", "w");
print >>o, HEAD
print >>o, '<div class="transcript"><tt>'
p, lastLineNo, pageAnchor = parsePreamble(o, i)
dialogue = parsePage(o, i, p, pageAnchor, None, lastLineNo)
for line in i:
if line == '</BODY>\n':
break
assert line.startswith('<A '), line
pageAnchor = line
dialogue = parsePage(o, i, p, pageAnchor, dialogue)
p += 1
printDialogue(o, dialogue)
print >>o, '</tt></div>'
print >>o, TAIL
o.close()
i.close()
return
HEAD = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Dark Castle — The Court Hearing</title>
<link rel="stylesheet" media="screen" href="/style/219.css">
<link rel="stylesheet" media="screen" href="/style/transcript.css">
<link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.csszengarden.com/zengarden.xml">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="author" content="Leif Strand">
<meta name="description" content="A part of The Nemesis Project">
<meta name="robots" content="all">
<script src="http://use.typekit.net/fzq7emy.js"></script>
<script>try{Typekit.load();}catch(e){}</script>
<!--[if lt IE 9]>
<script src="/scripts/html5shiv.js"></script>
<![endif]-->
</head>
<body>
<div class="page-wrapper">
<section class="intro">
<header role="banner">
<h1>Dark Castle</h1>
<h2>The Restraining Order</h2>
</header>
</section>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<h3>The Court Hearing</h3>
<p>Thursday, October 18, 2018</p>
"""
TAIL = """
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p><a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.pdf">Original PDF document</a> translated to <a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.html">HTML</a> using <a href="http://pdftohtml.sourceforge.net/"><tt>pdftohtml</tt></a> version 0.40.</p>
<p>Dark Castle presentation produced by <a href="https://github.com/castlegreen/hulk/blob/master/transcribe.py"><tt>transcribe.py</tt></a>.</p>
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p class="runic"><img src="/images/nice-girl.jpg" width=212 height=154></p>
<p class="runic">⼭ ⽶ ⽥</p>
</div>
</div>
</div>
</body>
</html>
"""
main()
| switchDialogue | identifier_name |
transcribe.py | #!/usr/bin/python
import os, sys
# Pages begin as follows:
# <hr>
# <A name=2></a>1<br>
# Beginning at page 4, the second number is an increasing page number:
# <hr>
# <A name=4></a>1<br>
# ...
# <hr>
# <A name=5></a>2<br>
# This is the number printed in the top-right hand corner
# of the original PDF.
HR = "<hr>\n"
BEGIN = "***OOO***<br>\n"
# Lines are numbered:
# 14<br>
# THE COURT:<br>
# GOOD MORNING.<br>
# 15<br>
# ALL RESPOND:<br>
# GOOD MORNING.<br>
BR = "<br>\n"
PRINT_PRE = False
def parsePreamble(o, i):
p = 0
print "preamble", p
lastLineNo = None
pageAnchor = None
for line in i:
if line == BEGIN:
print >>o, '<p class="narration">***OOO***</p>'
return p, lastLineNo, pageAnchor
if line == HR:
p += 1
print "preamble", p
if PRINT_PRE:
print >>o, line
continue
if line.startswith('<A '):
|
if not line.endswith(BR):
print "SKIP", line,
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
except ValueError:
if PRINT_PRE:
print >>o, line, BR,
else:
if PRINT_PRE:
print >>o, line, BR,
assert False
corrections = [
(' star war ', ' Star Wars '),
(' yzaca ', ' ycaza '),
]
icons = {
'Mr. Richardson': 'richardson.jpg',
'The Respondent': 'leif.png',
'The Witness': 'banks.jpg',
'A': 'banks.jpg',
'by mr. richardson:': 'richardson.jpg',
'by the court:': 'avatar.png',
'by the respondent:': 'leif.png',
}
examiner = None
class Dialogue:
def __init__(self, pageNo, lastLineNo, speaker):
self.pageNo = pageNo
self.lineNo = lastLineNo
speaker = speaker.lower().title()
self.speaker = speaker
self.lines = []
self.words = []
if speaker == 'Q':
self.icon = icons[examiner]
else:
self.icon = icons.get(speaker, 'avatar.png')
def addLine(self, line):
l = line.lower()
if l.startswith('by '):
global examiner
examiner = l
elif l == 'constantine evans,':
icons['The Witness'] = 'costi.png'
icons['A'] = 'costi.png'
for r in corrections:
l = l.replace(r[0], r[1])
self.lines.append(l)
self.words.extend(l.split())
def switchDialogue(o, dialogue, pageNo, lastLineNo, speaker):
if dialogue:
printDialogue(o, dialogue)
dialogue = Dialogue(pageNo, lastLineNo, speaker)
return dialogue
def parsePageAnchor(anchor):
# e.g.,
# <A name=38></a>35<br>
# The first number is produced by the pdftohtml conversion;
# the second is the one printed in the top-right of
# the original PDF.
return int(anchor.split('>')[2].split('<')[0])
def parsePage(o, i, p, pageAnchor, dialogue, lastLineNo = None):
global examiner
print "page", p
prevLineNo = None
pageNo = parsePageAnchor(pageAnchor)
print >>o, '<a name=p%02d></a>' % (pageNo, )
for line in i:
# these are eaten by the outer loop
assert not line.startswith('<A ')
if line == HR:
break # end of page
# 16<br>
# THE COURT:<br>
# AS YOU CAN SEE, WE HAVE A BUSY<br>
# 17<br>
# CALENDAR THIS MORNING.<br>
# I HAVE A PRIORITY FOR MATTER<br>
# 18<br>
# NUMBER 13, WHICH IS CASTLE VERSUS STRAND.<br>
if not line.endswith(BR):
# HTML tags at begin/end
print "SKIP", line,
assert line[0] == '<'
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
if prevLineNo and lastLineNo == prevLineNo+1:
if dialogue and dialogue.speaker == 'Narrator':
printDialogue(o, dialogue)
dialogue = None
else:
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
prevLineNo = lastLineNo
except ValueError:
dialogue.addLine(line)
prevLineNo = None
elif line.endswith(':'):
if line.count(' ') < 2:
if dialogue is None or dialogue.speaker != 'Narrator':
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
else:
if line != 'THE COURT:':
dialogue.addLine(line)
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
prevLineNo = None
else:
if dialogue.speaker == 'Narrator':
dialogue.addLine(line)
printDialogue(o, dialogue)
dialogue = None
else:
dialogue.addLine(line)
prevLineNo = None
elif line in ('Q','A'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line)
elif line.startswith('(WHEREUPON,'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
else:
if not dialogue or (line == '///' and dialogue.speaker != 'Narrator'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
return dialogue
# from Pygments:
# <span class="lineno"> 6 </span><span class="p">{</span>
LINENO = '<span class="lineno">%2d </span>'
printLineNumbers = False # XXX: anchor
def printDialogue(o, dialogue):
if printLineNumbers:
print >>o, (LINENO % dialogue.lineNo),
print >>o, '<a name=p%02dl%02d></a>' % (dialogue.pageNo, dialogue.lineNo)
if dialogue.speaker == 'Narrator':
print >>o, '<p class="narration">'
else:
if dialogue.icon:
print >>o, ('<p class="spk"><img src="%s" width=48 height=48 align=bottom> %s</p><p class="dlg">' % (dialogue.icon, dialogue.speaker))
else:
print >>o, ('<p class="spk">%s</p><p class="dlg">' % dialogue.speaker)
if False:
for line in dialogue.lines:
print >>o, line,
else:
printWords(o, dialogue.words)
print >>o, '</p>'
return
properNames = set([
'god',
'castle', 'green', 'homeowners', 'association', "association's",
'leif', 'strand',
'dianne', 'patrizzi', "dianne's",
'randy', 'banks',
'kelly', 'richardson',
'constantine', 'evans',
'richard', 'ycaza',
'cathy', 'brown',
'i', "i'm", "i'll", "i've",
'mr.',
'association',
'treasurer', 'president', 'chairman',
'pasadena',
'south', 'el', 'molino', 'avenue',
'twitter',
'slack',
'wednesday', 'september', 'october',
])
from collections import deque
renter = deque()
renter.append('/baby/vm-2016-07-21.mov')
renter.append('/baby/vm-2016-07-22-1.mov')
renter.append('/baby/vm-2016-07-22-2.mov')
renter.append('/baby/vm-2016-08-04.mov')
links = {
'"slack."': '''"<a href="https://slack.com/">Slack</a>."''',
'newsletters': '<a href="/havisham/v2i1.pdf">newsletters</a>',
}
def printWords(o, words):
# "I was afraid of worms, Roxanne!"
newSentence = True
lastWord = None
for word in words:
if newSentence:
word = word.capitalize()
newSentence = False
if word in properNames or (word[-1] in '.,?!' and word[:-1] in properNames):
word = word.capitalize()
if 'pdro' in word:
# case number
word = word.upper()
if max(word.count('-'), word.count('.')) > 1:
# e.g., H.O.A. or B-A-N-K-S
# BUG: capitalizes 'MAN-TO-MAN', which is funny, so I left it in
word = word.upper()
if word[:6] == 'renter':
url = renter.popleft()
word = '<a href="%s">renter</a>%s' % (url, word[6:])
elif word in links:
word = links[word]
elif word == '///' and lastWord != '///':
print >>o, '<br>'
print >>o, word,
lastWord = word
if word == '///':
print >>o, '<br>'
if word[-1] in '.?!':
newSentence = True
return
def main():
i = open("input.html", "r")
o = open("index.html", "w");
print >>o, HEAD
print >>o, '<div class="transcript"><tt>'
p, lastLineNo, pageAnchor = parsePreamble(o, i)
dialogue = parsePage(o, i, p, pageAnchor, None, lastLineNo)
for line in i:
if line == '</BODY>\n':
break
assert line.startswith('<A '), line
pageAnchor = line
dialogue = parsePage(o, i, p, pageAnchor, dialogue)
p += 1
printDialogue(o, dialogue)
print >>o, '</tt></div>'
print >>o, TAIL
o.close()
i.close()
return
HEAD = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Dark Castle — The Court Hearing</title>
<link rel="stylesheet" media="screen" href="/style/219.css">
<link rel="stylesheet" media="screen" href="/style/transcript.css">
<link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.csszengarden.com/zengarden.xml">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="author" content="Leif Strand">
<meta name="description" content="A part of The Nemesis Project">
<meta name="robots" content="all">
<script src="http://use.typekit.net/fzq7emy.js"></script>
<script>try{Typekit.load();}catch(e){}</script>
<!--[if lt IE 9]>
<script src="/scripts/html5shiv.js"></script>
<![endif]-->
</head>
<body>
<div class="page-wrapper">
<section class="intro">
<header role="banner">
<h1>Dark Castle</h1>
<h2>The Restraining Order</h2>
</header>
</section>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<h3>The Court Hearing</h3>
<p>Thursday, October 18, 2018</p>
"""
TAIL = """
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p><a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.pdf">Original PDF document</a> translated to <a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.html">HTML</a> using <a href="http://pdftohtml.sourceforge.net/"><tt>pdftohtml</tt></a> version 0.40.</p>
<p>Dark Castle presentation produced by <a href="https://github.com/castlegreen/hulk/blob/master/transcribe.py"><tt>transcribe.py</tt></a>.</p>
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p class="runic"><img src="/images/nice-girl.jpg" width=212 height=154></p>
<p class="runic">⼭ ⽶ ⽥</p>
</div>
</div>
</div>
</body>
</html>
"""
main()
| pageAnchor = line
if PRINT_PRE:
print >>o, line
continue | conditional_block |
transcribe.py | #!/usr/bin/python
import os, sys
# Pages begin as follows:
# <hr>
# <A name=2></a>1<br>
# Beginning at page 4, the second number is an increasing page number:
# <hr>
# <A name=4></a>1<br>
# ...
# <hr>
# <A name=5></a>2<br>
# This is the number printed in the top-right hand corner
# of the original PDF.
HR = "<hr>\n"
BEGIN = "***OOO***<br>\n"
# Lines are numbered:
# 14<br>
# THE COURT:<br>
# GOOD MORNING.<br>
# 15<br>
# ALL RESPOND:<br>
# GOOD MORNING.<br>
BR = "<br>\n"
PRINT_PRE = False
def parsePreamble(o, i):
p = 0
print "preamble", p
lastLineNo = None
pageAnchor = None
for line in i:
if line == BEGIN:
print >>o, '<p class="narration">***OOO***</p>'
return p, lastLineNo, pageAnchor
if line == HR:
p += 1
print "preamble", p
if PRINT_PRE:
print >>o, line
continue
if line.startswith('<A '):
pageAnchor = line
if PRINT_PRE:
print >>o, line
continue
if not line.endswith(BR):
print "SKIP", line,
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
except ValueError:
if PRINT_PRE:
print >>o, line, BR,
else:
if PRINT_PRE:
print >>o, line, BR,
assert False
corrections = [
(' star war ', ' Star Wars '),
(' yzaca ', ' ycaza '),
]
icons = {
'Mr. Richardson': 'richardson.jpg',
'The Respondent': 'leif.png',
'The Witness': 'banks.jpg',
'A': 'banks.jpg',
'by mr. richardson:': 'richardson.jpg',
'by the court:': 'avatar.png',
'by the respondent:': 'leif.png',
}
examiner = None
class Dialogue:
def __init__(self, pageNo, lastLineNo, speaker):
self.pageNo = pageNo
self.lineNo = lastLineNo
speaker = speaker.lower().title()
self.speaker = speaker
self.lines = []
self.words = []
if speaker == 'Q':
self.icon = icons[examiner]
else:
self.icon = icons.get(speaker, 'avatar.png')
def addLine(self, line):
l = line.lower()
if l.startswith('by '):
global examiner
examiner = l
elif l == 'constantine evans,':
icons['The Witness'] = 'costi.png'
icons['A'] = 'costi.png'
for r in corrections:
l = l.replace(r[0], r[1])
self.lines.append(l)
self.words.extend(l.split())
def switchDialogue(o, dialogue, pageNo, lastLineNo, speaker):
if dialogue:
printDialogue(o, dialogue)
dialogue = Dialogue(pageNo, lastLineNo, speaker)
return dialogue
def parsePageAnchor(anchor):
# e.g.,
# <A name=38></a>35<br>
# The first number is produced by the pdftohtml conversion;
# the second is the one printed in the top-right of
# the original PDF.
|
def parsePage(o, i, p, pageAnchor, dialogue, lastLineNo = None):
global examiner
print "page", p
prevLineNo = None
pageNo = parsePageAnchor(pageAnchor)
print >>o, '<a name=p%02d></a>' % (pageNo, )
for line in i:
# these are eaten by the outer loop
assert not line.startswith('<A ')
if line == HR:
break # end of page
# 16<br>
# THE COURT:<br>
# AS YOU CAN SEE, WE HAVE A BUSY<br>
# 17<br>
# CALENDAR THIS MORNING.<br>
# I HAVE A PRIORITY FOR MATTER<br>
# 18<br>
# NUMBER 13, WHICH IS CASTLE VERSUS STRAND.<br>
if not line.endswith(BR):
# HTML tags at begin/end
print "SKIP", line,
assert line[0] == '<'
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
if prevLineNo and lastLineNo == prevLineNo+1:
if dialogue and dialogue.speaker == 'Narrator':
printDialogue(o, dialogue)
dialogue = None
else:
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
prevLineNo = lastLineNo
except ValueError:
dialogue.addLine(line)
prevLineNo = None
elif line.endswith(':'):
if line.count(' ') < 2:
if dialogue is None or dialogue.speaker != 'Narrator':
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
else:
if line != 'THE COURT:':
dialogue.addLine(line)
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
prevLineNo = None
else:
if dialogue.speaker == 'Narrator':
dialogue.addLine(line)
printDialogue(o, dialogue)
dialogue = None
else:
dialogue.addLine(line)
prevLineNo = None
elif line in ('Q','A'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line)
elif line.startswith('(WHEREUPON,'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
else:
if not dialogue or (line == '///' and dialogue.speaker != 'Narrator'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
return dialogue
# from Pygments:
# <span class="lineno"> 6 </span><span class="p">{</span>
LINENO = '<span class="lineno">%2d </span>'
printLineNumbers = False # XXX: anchor
def printDialogue(o, dialogue):
if printLineNumbers:
print >>o, (LINENO % dialogue.lineNo),
print >>o, '<a name=p%02dl%02d></a>' % (dialogue.pageNo, dialogue.lineNo)
if dialogue.speaker == 'Narrator':
print >>o, '<p class="narration">'
else:
if dialogue.icon:
print >>o, ('<p class="spk"><img src="%s" width=48 height=48 align=bottom> %s</p><p class="dlg">' % (dialogue.icon, dialogue.speaker))
else:
print >>o, ('<p class="spk">%s</p><p class="dlg">' % dialogue.speaker)
if False:
for line in dialogue.lines:
print >>o, line,
else:
printWords(o, dialogue.words)
print >>o, '</p>'
return
properNames = set([
'god',
'castle', 'green', 'homeowners', 'association', "association's",
'leif', 'strand',
'dianne', 'patrizzi', "dianne's",
'randy', 'banks',
'kelly', 'richardson',
'constantine', 'evans',
'richard', 'ycaza',
'cathy', 'brown',
'i', "i'm", "i'll", "i've",
'mr.',
'association',
'treasurer', 'president', 'chairman',
'pasadena',
'south', 'el', 'molino', 'avenue',
'twitter',
'slack',
'wednesday', 'september', 'october',
])
from collections import deque
renter = deque()
renter.append('/baby/vm-2016-07-21.mov')
renter.append('/baby/vm-2016-07-22-1.mov')
renter.append('/baby/vm-2016-07-22-2.mov')
renter.append('/baby/vm-2016-08-04.mov')
links = {
'"slack."': '''"<a href="https://slack.com/">Slack</a>."''',
'newsletters': '<a href="/havisham/v2i1.pdf">newsletters</a>',
}
def printWords(o, words):
# "I was afraid of worms, Roxanne!"
newSentence = True
lastWord = None
for word in words:
if newSentence:
word = word.capitalize()
newSentence = False
if word in properNames or (word[-1] in '.,?!' and word[:-1] in properNames):
word = word.capitalize()
if 'pdro' in word:
# case number
word = word.upper()
if max(word.count('-'), word.count('.')) > 1:
# e.g., H.O.A. or B-A-N-K-S
# BUG: capitalizes 'MAN-TO-MAN', which is funny, so I left it in
word = word.upper()
if word[:6] == 'renter':
url = renter.popleft()
word = '<a href="%s">renter</a>%s' % (url, word[6:])
elif word in links:
word = links[word]
elif word == '///' and lastWord != '///':
print >>o, '<br>'
print >>o, word,
lastWord = word
if word == '///':
print >>o, '<br>'
if word[-1] in '.?!':
newSentence = True
return
def main():
i = open("input.html", "r")
o = open("index.html", "w");
print >>o, HEAD
print >>o, '<div class="transcript"><tt>'
p, lastLineNo, pageAnchor = parsePreamble(o, i)
dialogue = parsePage(o, i, p, pageAnchor, None, lastLineNo)
for line in i:
if line == '</BODY>\n':
break
assert line.startswith('<A '), line
pageAnchor = line
dialogue = parsePage(o, i, p, pageAnchor, dialogue)
p += 1
printDialogue(o, dialogue)
print >>o, '</tt></div>'
print >>o, TAIL
o.close()
i.close()
return
HEAD = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Dark Castle — The Court Hearing</title>
<link rel="stylesheet" media="screen" href="/style/219.css">
<link rel="stylesheet" media="screen" href="/style/transcript.css">
<link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.csszengarden.com/zengarden.xml">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="author" content="Leif Strand">
<meta name="description" content="A part of The Nemesis Project">
<meta name="robots" content="all">
<script src="http://use.typekit.net/fzq7emy.js"></script>
<script>try{Typekit.load();}catch(e){}</script>
<!--[if lt IE 9]>
<script src="/scripts/html5shiv.js"></script>
<![endif]-->
</head>
<body>
<div class="page-wrapper">
<section class="intro">
<header role="banner">
<h1>Dark Castle</h1>
<h2>The Restraining Order</h2>
</header>
</section>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<h3>The Court Hearing</h3>
<p>Thursday, October 18, 2018</p>
"""
TAIL = """
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p><a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.pdf">Original PDF document</a> translated to <a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.html">HTML</a> using <a href="http://pdftohtml.sourceforge.net/"><tt>pdftohtml</tt></a> version 0.40.</p>
<p>Dark Castle presentation produced by <a href="https://github.com/castlegreen/hulk/blob/master/transcribe.py"><tt>transcribe.py</tt></a>.</p>
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p class="runic"><img src="/images/nice-girl.jpg" width=212 height=154></p>
<p class="runic">⼭ ⽶ ⽥</p>
</div>
</div>
</div>
</body>
</html>
"""
main()
| return int(anchor.split('>')[2].split('<')[0]) | identifier_body |
transcribe.py | #!/usr/bin/python
import os, sys
# Pages begin as follows:
# <hr>
# <A name=2></a>1<br>
# Beginning at page 4, the second number is an increasing page number:
# <hr>
# <A name=4></a>1<br>
# ...
# <hr>
# <A name=5></a>2<br>
# This is the number printed in the top-right hand corner
# of the original PDF.
HR = "<hr>\n"
BEGIN = "***OOO***<br>\n"
# Lines are numbered:
# 14<br>
# THE COURT:<br>
# GOOD MORNING.<br>
# 15<br>
# ALL RESPOND:<br>
# GOOD MORNING.<br>
BR = "<br>\n"
PRINT_PRE = False
def parsePreamble(o, i):
p = 0
print "preamble", p
lastLineNo = None
pageAnchor = None
for line in i:
if line == BEGIN:
print >>o, '<p class="narration">***OOO***</p>'
return p, lastLineNo, pageAnchor
if line == HR:
p += 1
print "preamble", p
if PRINT_PRE:
print >>o, line
continue
if line.startswith('<A '):
pageAnchor = line
if PRINT_PRE:
print >>o, line
continue
if not line.endswith(BR):
print "SKIP", line,
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
except ValueError:
if PRINT_PRE:
print >>o, line, BR,
else:
if PRINT_PRE:
print >>o, line, BR,
assert False
corrections = [
(' star war ', ' Star Wars '),
(' yzaca ', ' ycaza '),
]
icons = {
'Mr. Richardson': 'richardson.jpg',
'The Respondent': 'leif.png',
'The Witness': 'banks.jpg',
'A': 'banks.jpg',
'by mr. richardson:': 'richardson.jpg',
'by the court:': 'avatar.png',
'by the respondent:': 'leif.png',
}
examiner = None
class Dialogue:
def __init__(self, pageNo, lastLineNo, speaker):
self.pageNo = pageNo
self.lineNo = lastLineNo
speaker = speaker.lower().title()
self.speaker = speaker
self.lines = []
self.words = []
if speaker == 'Q':
self.icon = icons[examiner]
else:
self.icon = icons.get(speaker, 'avatar.png')
def addLine(self, line):
l = line.lower()
if l.startswith('by '):
global examiner
examiner = l
elif l == 'constantine evans,':
icons['The Witness'] = 'costi.png'
icons['A'] = 'costi.png'
for r in corrections:
l = l.replace(r[0], r[1])
self.lines.append(l)
self.words.extend(l.split())
def switchDialogue(o, dialogue, pageNo, lastLineNo, speaker):
if dialogue:
printDialogue(o, dialogue)
dialogue = Dialogue(pageNo, lastLineNo, speaker)
return dialogue
def parsePageAnchor(anchor):
# e.g.,
# <A name=38></a>35<br>
# The first number is produced by the pdftohtml conversion;
# the second is the one printed in the top-right of
# the original PDF.
return int(anchor.split('>')[2].split('<')[0])
def parsePage(o, i, p, pageAnchor, dialogue, lastLineNo = None):
global examiner
print "page", p
prevLineNo = None
pageNo = parsePageAnchor(pageAnchor)
print >>o, '<a name=p%02d></a>' % (pageNo, )
for line in i:
# these are eaten by the outer loop
assert not line.startswith('<A ')
if line == HR:
break # end of page
# 16<br>
# THE COURT:<br>
# AS YOU CAN SEE, WE HAVE A BUSY<br>
# 17<br>
# CALENDAR THIS MORNING.<br>
# I HAVE A PRIORITY FOR MATTER<br>
# 18<br>
# NUMBER 13, WHICH IS CASTLE VERSUS STRAND.<br>
if not line.endswith(BR):
# HTML tags at begin/end
print "SKIP", line,
assert line[0] == '<'
continue
line = line[0:-len(BR)]
if line[0].isdigit():
try:
lastLineNo = int(line)
if prevLineNo and lastLineNo == prevLineNo+1:
if dialogue and dialogue.speaker == 'Narrator':
printDialogue(o, dialogue)
dialogue = None
else:
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
prevLineNo = lastLineNo
except ValueError:
dialogue.addLine(line)
prevLineNo = None | if line.count(' ') < 2:
if dialogue is None or dialogue.speaker != 'Narrator':
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
else:
if line != 'THE COURT:':
dialogue.addLine(line)
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line[0:-1])
prevLineNo = None
else:
if dialogue.speaker == 'Narrator':
dialogue.addLine(line)
printDialogue(o, dialogue)
dialogue = None
else:
dialogue.addLine(line)
prevLineNo = None
elif line in ('Q','A'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker=line)
elif line.startswith('(WHEREUPON,'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
else:
if not dialogue or (line == '///' and dialogue.speaker != 'Narrator'):
dialogue = switchDialogue(o, dialogue, pageNo, lastLineNo, speaker='Narrator')
dialogue.addLine(line)
prevLineNo = None
return dialogue
# from Pygments:
# <span class="lineno"> 6 </span><span class="p">{</span>
LINENO = '<span class="lineno">%2d </span>'
printLineNumbers = False # XXX: anchor
def printDialogue(o, dialogue):
if printLineNumbers:
print >>o, (LINENO % dialogue.lineNo),
print >>o, '<a name=p%02dl%02d></a>' % (dialogue.pageNo, dialogue.lineNo)
if dialogue.speaker == 'Narrator':
print >>o, '<p class="narration">'
else:
if dialogue.icon:
print >>o, ('<p class="spk"><img src="%s" width=48 height=48 align=bottom> %s</p><p class="dlg">' % (dialogue.icon, dialogue.speaker))
else:
print >>o, ('<p class="spk">%s</p><p class="dlg">' % dialogue.speaker)
if False:
for line in dialogue.lines:
print >>o, line,
else:
printWords(o, dialogue.words)
print >>o, '</p>'
return
properNames = set([
'god',
'castle', 'green', 'homeowners', 'association', "association's",
'leif', 'strand',
'dianne', 'patrizzi', "dianne's",
'randy', 'banks',
'kelly', 'richardson',
'constantine', 'evans',
'richard', 'ycaza',
'cathy', 'brown',
'i', "i'm", "i'll", "i've",
'mr.',
'association',
'treasurer', 'president', 'chairman',
'pasadena',
'south', 'el', 'molino', 'avenue',
'twitter',
'slack',
'wednesday', 'september', 'october',
])
from collections import deque
renter = deque()
renter.append('/baby/vm-2016-07-21.mov')
renter.append('/baby/vm-2016-07-22-1.mov')
renter.append('/baby/vm-2016-07-22-2.mov')
renter.append('/baby/vm-2016-08-04.mov')
links = {
'"slack."': '''"<a href="https://slack.com/">Slack</a>."''',
'newsletters': '<a href="/havisham/v2i1.pdf">newsletters</a>',
}
def printWords(o, words):
# "I was afraid of worms, Roxanne!"
newSentence = True
lastWord = None
for word in words:
if newSentence:
word = word.capitalize()
newSentence = False
if word in properNames or (word[-1] in '.,?!' and word[:-1] in properNames):
word = word.capitalize()
if 'pdro' in word:
# case number
word = word.upper()
if max(word.count('-'), word.count('.')) > 1:
# e.g., H.O.A. or B-A-N-K-S
# BUG: capitalizes 'MAN-TO-MAN', which is funny, so I left it in
word = word.upper()
if word[:6] == 'renter':
url = renter.popleft()
word = '<a href="%s">renter</a>%s' % (url, word[6:])
elif word in links:
word = links[word]
elif word == '///' and lastWord != '///':
print >>o, '<br>'
print >>o, word,
lastWord = word
if word == '///':
print >>o, '<br>'
if word[-1] in '.?!':
newSentence = True
return
def main():
i = open("input.html", "r")
o = open("index.html", "w");
print >>o, HEAD
print >>o, '<div class="transcript"><tt>'
p, lastLineNo, pageAnchor = parsePreamble(o, i)
dialogue = parsePage(o, i, p, pageAnchor, None, lastLineNo)
for line in i:
if line == '</BODY>\n':
break
assert line.startswith('<A '), line
pageAnchor = line
dialogue = parsePage(o, i, p, pageAnchor, dialogue)
p += 1
printDialogue(o, dialogue)
print >>o, '</tt></div>'
print >>o, TAIL
o.close()
i.close()
return
HEAD = """<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Dark Castle — The Court Hearing</title>
<link rel="stylesheet" media="screen" href="/style/219.css">
<link rel="stylesheet" media="screen" href="/style/transcript.css">
<link rel="alternate" type="application/rss+xml" title="RSS" href="http://www.csszengarden.com/zengarden.xml">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="author" content="Leif Strand">
<meta name="description" content="A part of The Nemesis Project">
<meta name="robots" content="all">
<script src="http://use.typekit.net/fzq7emy.js"></script>
<script>try{Typekit.load();}catch(e){}</script>
<!--[if lt IE 9]>
<script src="/scripts/html5shiv.js"></script>
<![endif]-->
</head>
<body>
<div class="page-wrapper">
<section class="intro">
<header role="banner">
<h1>Dark Castle</h1>
<h2>The Restraining Order</h2>
</header>
</section>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<h3>The Court Hearing</h3>
<p>Thursday, October 18, 2018</p>
"""
TAIL = """
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p><a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.pdf">Original PDF document</a> translated to <a href="PDF_T18T54_CASTLE_GREEN_VS_STRAND_10-18-18_FINAL_TRANSCRIPT.html">HTML</a> using <a href="http://pdftohtml.sourceforge.net/"><tt>pdftohtml</tt></a> version 0.40.</p>
<p>Dark Castle presentation produced by <a href="https://github.com/castlegreen/hulk/blob/master/transcribe.py"><tt>transcribe.py</tt></a>.</p>
</div>
</div>
<div class="main supporting" role="main">
<div class="explanation" role="article">
<p class="runic"><img src="/images/nice-girl.jpg" width=212 height=154></p>
<p class="runic">⼭ ⽶ ⽥</p>
</div>
</div>
</div>
</body>
</html>
"""
main() |
elif line.endswith(':'): | random_line_split |
order.go | package controller
import (
"encoding/json"
"fmt"
"time"
daoConf "dao/conf"
daoSql "dao/sql"
. "global"
apiIndex "http/api"
"logic"
"util"
"github.com/labstack/echo"
)
type OrderController struct{}
// 注册路由
func (self OrderController) RegisterRoute(e *echo.Group) {
e.Get("/order/list", echo.HandlerFunc(self.MyOrderList))
e.Get("/order/detail", echo.HandlerFunc(self.Detail))
e.Post("/order/prepare", echo.HandlerFunc(self.PrepareOrder))
e.Post("/order/do_order", echo.HandlerFunc(self.DoOrder))
e.Post("/order/cancel_rder", echo.HandlerFunc(self.CancelOrder))
e.Post("/order/eval_order", echo.HandlerFunc(self.EvalOrder))
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) PrepareOrder(ctx echo.Context) error {
uid := ctx.Get("uid").(uint64)
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 goodsId 信息
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取 goodsId 信息
shipTimeList := []string{"XX", "XX"}
// shipTimeList := logic.GetShipTime()
// 获取用户所有地址
myAddressList, err := daoSql.GetAddressListByUid(uid, false)
if nil != err && RecordEmpty != err {
// log
return util.Fail(ctx, 10, err.Error())
}
var myAddress *daoSql.Address
for idx, addressItem := range myAddressList {
// 默认取第一个
if 0 == idx {
myAddress = addressItem
}
// 取默认地址
if uint8(1) == addressItem.IsDefault {
myAddress = addressItem
}
}
if nil == myAddress {
myAddress = &daoSql.Address{}
} else {
myAddress.IsDefault = 1
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 生成预处理订单
orderMap := logic.OrderMap{
Address: myAddress,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
}
orderInfo, orderGoodsList, err := logic.GenOrder(uid, orderMap)
if nil != err {
// log
}
// 过滤订单参数
orderInfo.Filter()
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(myAddress),
ShipTimeList: shipTimeList,
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
}
if 0 < len(goodsException) {
orderData.Alert = fmt.Sprintf(orderConf.Alert, goodsException)
} else if 0 < len(goodsNoStorageException) {
orderData.Alert = fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException)
} else {
orderData.Alert = ""
}
// 格式化地址列表
for _, addressItem := range myAddressList {
orderData.AddressList = append(orderData.AddressList, (*apiIndex.AddressType)(addressItem))
}
orderData.Format()
return util.Success(ctx, orderData) | }
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) DoOrder(ctx echo.Context) error {
// 设置 redis key
uid := ctx.Get("uid").(uint64)
// 避免同一个订单重复提交
// data, _ := json.Marshal(ctx.Request())
// curOrderMd5 = fmt.Printf("%x", md5.Sum(data))
// preOrderMd5 := daoRedis.NewRedisClient().Key(daoRedis.KeyOrder, util.Itoa(uid)).GET("")
// if preOrder == preOrderMd5 {
// // log
// return util.Fail(ctx, 10, RepeatDoOrder)
// }
// daoRedis.NewRedisClient().SET("", curOrderMd5, 30)
// 获取购物车商品列表
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 商品详情
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取地址信息
address, err := fetchAddress(ctx)
if nil != err {
return util.Fail(ctx, 10, "地址信息无效")
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
if 0 < len(goodsException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.Alert, goodsException))
}
if 0 < len(goodsNoStorageException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException))
}
// 提交订单
orderMap := logic.OrderMap{
Address: address,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
ExceptTime: time.Now().Unix(),
OrderMessage: ctx.FormValue("order_message"),
}
orderInfo, orderGoodsList, err := logic.SubmitOrder(uid, orderMap)
if nil != err {
// log
}
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(address),
ShipTimeList: []string{},
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
Cancel: genApiCancel(orderInfo),
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) MyOrderList(ctx echo.Context) error {
// 获取订单列表信息
// uid, base_id, rn
myOrderMapList, hasMore, err := logic.GetMyOrderList(10, 0, 20)
if nil != err {
return err
}
// 拼装接口数据
orderData := &apiIndex.OrderList{
HasMore: hasMore,
}
// orderData
for _, v := range myOrderMapList {
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(v["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range v["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData.List = append(orderData.List, &apiIndex.Order{
Address: (*apiIndex.AddressType)(v["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: v["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
})
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) CancelOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
cancelFlag := util.Atoi(ctx.FormValue("cancel_flag"), 16, false).(uint16)
err := logic.CancelOrder(uid, orderSn, cancelFlag)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) EvalOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
stars := util.Atoi(ctx.FormValue("stars"), 8, false).(uint8)
feedback := ctx.FormValue("feedback")
err := logic.EvalOrder(uid, orderSn, stars, feedback)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) Detail(ctx echo.Context) error {
ordeSn := ctx.QueryParam("order_sn")
// 获取订单列表信息
// uid, orderSn
myOrderMap, err := logic.GetOrderDetail(10, ordeSn)
if nil != err {
return err
}
// 拼装接口数据
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(myOrderMap["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range myOrderMap["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData := &apiIndex.Order{
Alert: "",
Address: (*apiIndex.AddressType)(myOrderMap["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: myOrderMap["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
Cancel: genApiCancel(myOrderMap["order"].(*daoSql.Order)),
}
orderData.Format()
return util.Render(ctx, "order/info", "订单详情", orderData)
}
// 生成订单列表页页的 html 不提供外部接口
func (OrderController) GenOrderListHtml(ctx echo.Context) error {
return util.Render(ctx, "order/list", "订单列表", map[string]interface{}{})
}
// 获取购物车中商品列表
func getCartGoodsList(ctx echo.Context) (goodsList []*logic.CartInfo, err error) {
goodsList = []*logic.CartInfo{}
goodsListStr := ctx.FormValue("goods_list")
goodsListMap := []map[string]interface{}{}
err = json.Unmarshal([]byte(goodsListStr), &goodsListMap)
for _, item := range goodsListMap {
tmpInfo := &logic.CartInfo{
GoodsId: util.MustNum(item["goods_id"], 64, false).(uint64),
Selected: util.MustNum(item["selected"], 8, false).(uint8),
GoodsNum: util.MustNum(item["goods_num"], 16, false).(uint16),
}
goodsList = append(goodsList, tmpInfo)
}
if err != nil {
// log
return
}
if 0 >= len(goodsList) {
return goodsList, CartEmpty
}
return
}
// 提交订单时新地址(address_id<=0) 会先插入地址表
func fetchAddress(ctx echo.Context) (*daoSql.Address, error) {
uid := ctx.Get("uid").(uint64)
// 获取提交订单时指定的地址
addressId := util.Atoi(ctx.FormValue("address_id"), 64, false).(uint64)
if 0 < addressId {
myAddressMap, err := daoSql.GetAddressListById([]uint64{addressId})
if nil != err {
// log
return &daoSql.Address{}, err
}
myAddress, ok := myAddressMap[addressId]
if !ok || uid != myAddress.MemberId {
// log
return &daoSql.Address{}, RecordEmpty
}
return myAddress, nil
}
// 插入新的地址信息
trueName := ctx.FormValue("true_name")
liveArea := ctx.FormValue("live_area")
address := ctx.FormValue("address")
mobile := ctx.FormValue("mobile")
// 显式提取地址信息
addressInfo := daoSql.UserAddressInfo{
TrueName: trueName,
LiveArea: liveArea,
Address: address,
Mobile: mobile,
}
myAddress, err := daoSql.SaveMyAddress(uid, &addressInfo)
if nil != err {
// log
return &daoSql.Address{}, err
}
return myAddress, err
}
// 根据订单
func genApiCancel(orderInfo *daoSql.Order) (cancel *apiIndex.Cancel) {
// 读入配置信息
envConf, _ := daoConf.EnvConf()
// 订单取消信息
cancelInfo := logic.GetCancelInfo(orderInfo)
cancel = &apiIndex.Cancel{
CanCancel: cancelInfo.CanCancel,
}
if !cancelInfo.CanCancel {
if 0 < len(envConf.ServiceTel) {
cancel.CancelTip.Tel = envConf.ServiceTel
} else {
cancel.CancelTip.Tel = cancelInfo.CancelTip.Tel
}
cancel.CancelTip.Tip = cancelInfo.CancelTip.Tip
} else {
cancelReasonList := []*apiIndex.CancelReasonType{}
for k, v := range cancelInfo.CancelReason {
tmp := &apiIndex.CancelReasonType{
Flag: util.Itoa(k),
Context: v,
}
cancelReasonList = append(cancelReasonList, tmp)
}
cancel.CancelReason = cancelReasonList
}
return cancel
} | random_line_split | |
order.go | package controller
import (
"encoding/json"
"fmt"
"time"
daoConf "dao/conf"
daoSql "dao/sql"
. "global"
apiIndex "http/api"
"logic"
"util"
"github.com/labstack/echo"
)
type OrderController struct{}
// 注册路由
func (self OrderController) RegisterRoute(e *echo.Group) {
e.Get("/order/list", echo.HandlerFunc(self.MyOrderList))
e.Get("/order/detail", echo.HandlerFunc(self.Detail))
e.Post("/order/prepare", echo.HandlerFunc(self.PrepareOrder))
e.Post("/order/do_order", echo.HandlerFunc(self.DoOrder))
e.Post("/order/cancel_rder", echo.HandlerFunc(self.CancelOrder))
e.Post("/order/eval_order", echo.HandlerFunc(self.EvalOrder))
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) PrepareOrder(ctx echo.Context) error {
uid := ctx.Get("uid").(uint64)
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 goodsId 信息
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取 goodsId 信息
shipTimeList := []string{"XX", "XX"}
// shipTimeList := logic.GetShipTime()
// 获取用户所有地址
myAddressList, err := daoSql.GetAddressListByUid(uid, false)
if nil != err && RecordEmpty != err {
// log
return util.Fail(ctx, 10, err.Error())
}
var myAddress *daoSql.Address
for idx, addressItem := range myAddressList {
// 默认取第一个
if 0 == idx {
myAddress = addressItem
}
// 取默认地址
if uint8(1) == addressItem.IsDefault {
myAddress = addressItem
}
}
if nil == myAddress {
myAddress = &daoSql.Address{}
} else {
myAddress.IsDefault = 1
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 生成预处理订单
orderMap := logic.OrderMap{
Address: myAddress,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
}
orderInfo, orderGoodsList, err := logic.GenOrder(uid, orderMap)
if nil != err {
// log
}
// 过滤订单参数
orderInfo.Filter()
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(myAddress),
ShipTimeList: shipTimeList,
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
}
if 0 < len(goodsException) {
orderData.Alert = fmt.Sprintf(orderConf.Alert, goodsException)
} else if 0 < len(goodsNoStorageException) {
orderData.Alert = fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException)
} else {
orderData.Alert = ""
}
// 格式化地址列表
for _, addressItem := range myAddressList {
orderData.AddressList = append(orderData.AddressList, (*apiIndex.AddressType)(addressItem))
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) DoOrder(ctx echo.Context) error {
// 设置 redis key
uid := ctx.Get("uid").(uint64)
// 避免同一个订单重复提交
// data, _ := json.Marshal(ctx.Request())
// curOrderMd5 = fmt.Printf("%x", md5.Sum(data))
// preOrderMd5 := daoRedis.NewRedisClient().Key(daoRedis.KeyOrder, util.Itoa(uid)).GET("")
// if preOrder == preOrderMd5 {
// // log
// return util.Fail(ctx, 10, RepeatDoOrder)
// }
// daoRedis.NewRedisClient().SET("", curOrderMd5, 30)
// 获取购物车商品列表
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 商品详情
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取地址信息
address, err := fetchAddress(ctx)
if nil != err {
return util.Fail(ctx, 10, "地址信息无效")
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
if 0 < len(goodsException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.Alert, goodsException))
}
if 0 < len(goodsNoStorageException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException))
}
// 提交订单
orderMap := logic.OrderMap{
Address: address,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
ExceptTime: time.Now().Unix(),
OrderMessage: ctx.FormValue("order_message"),
}
orderInfo, orderGoodsList, err := logic.SubmitOrder(uid, orderMap)
if nil != err {
// log
}
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(address),
ShipTimeList: []string{},
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
Cancel: genApiCancel(orderInfo),
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) MyOrderList(ctx echo.Context) error {
// 获取订单列表信息
// uid, base_id, rn
myOrderMapList, hasMore, err := logic.GetMyOrderList(10, 0, 20)
if nil != err {
return err
}
// 拼装接口数据
orderData := &apiIndex.OrderList{
HasMore: hasMore,
}
// orderData
for _, v := range myOrderMapList {
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(v["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range v[ | er(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
stars := util.Atoi(ctx.FormValue("stars"), 8, false).(uint8)
feedback := ctx.FormValue("feedback")
err := logic.EvalOrder(uid, orderSn, stars, feedback)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) Detail(ctx echo.Context) error {
ordeSn := ctx.QueryParam("order_sn")
// 获取订单列表信息
// uid, orderSn
myOrderMap, err := logic.GetOrderDetail(10, ordeSn)
if nil != err {
return err
}
// 拼装接口数据
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(myOrderMap["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range myOrderMap["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData := &apiIndex.Order{
Alert: "",
Address: (*apiIndex.AddressType)(myOrderMap["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: myOrderMap["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
Cancel: genApiCancel(myOrderMap["order"].(*daoSql.Order)),
}
orderData.Format()
return util.Render(ctx, "order/info", "订单详情", orderData)
}
// 生成订单列表页页的 html 不提供外部接口
func (OrderController) GenOrderListHtml(ctx echo.Context) error {
return util.Render(ctx, "order/list", "订单列表", map[string]interface{}{})
}
// 获取购物车中商品列表
func getCartGoodsList(ctx echo.Context) (goodsList []*logic.CartInfo, err error) {
goodsList = []*logic.CartInfo{}
goodsListStr := ctx.FormValue("goods_list")
goodsListMap := []map[string]interface{}{}
err = json.Unmarshal([]byte(goodsListStr), &goodsListMap)
for _, item := range goodsListMap {
tmpInfo := &logic.CartInfo{
GoodsId: util.MustNum(item["goods_id"], 64, false).(uint64),
Selected: util.MustNum(item["selected"], 8, false).(uint8),
GoodsNum: util.MustNum(item["goods_num"], 16, false).(uint16),
}
goodsList = append(goodsList, tmpInfo)
}
if err != nil {
// log
return
}
if 0 >= len(goodsList) {
return goodsList, CartEmpty
}
return
}
// 提交订单时新地址(address_id<=0) 会先插入地址表
func fetchAddress(ctx echo.Context) (*daoSql.Address, error) {
uid := ctx.Get("uid").(uint64)
// 获取提交订单时指定的地址
addressId := util.Atoi(ctx.FormValue("address_id"), 64, false).(uint64)
if 0 < addressId {
myAddressMap, err := daoSql.GetAddressListById([]uint64{addressId})
if nil != err {
// log
return &daoSql.Address{}, err
}
myAddress, ok := myAddressMap[addressId]
if !ok || uid != myAddress.MemberId {
// log
return &daoSql.Address{}, RecordEmpty
}
return myAddress, nil
}
// 插入新的地址信息
trueName := ctx.FormValue("true_name")
liveArea := ctx.FormValue("live_area")
address := ctx.FormValue("address")
mobile := ctx.FormValue("mobile")
// 显式提取地址信息
addressInfo := daoSql.UserAddressInfo{
TrueName: trueName,
LiveArea: liveArea,
Address: address,
Mobile: mobile,
}
myAddress, err := daoSql.SaveMyAddress(uid, &addressInfo)
if nil != err {
// log
return &daoSql.Address{}, err
}
return myAddress, err
}
// 根据订单
func genApiCancel(orderInfo *daoSql.Order) (cancel *apiIndex.Cancel) {
// 读入配置信息
envConf, _ := daoConf.EnvConf()
// 订单取消信息
cancelInfo := logic.GetCancelInfo(orderInfo)
cancel = &apiIndex.Cancel{
CanCancel: cancelInfo.CanCancel,
}
if !cancelInfo.CanCancel {
if 0 < len(envConf.ServiceTel) {
cancel.CancelTip.Tel = envConf.ServiceTel
} else {
cancel.CancelTip.Tel = cancelInfo.CancelTip.Tel
}
cancel.CancelTip.Tip = cancelInfo.CancelTip.Tip
} else {
cancelReasonList := []*apiIndex.CancelReasonType{}
for k, v := range cancelInfo.CancelReason {
tmp := &apiIndex.CancelReasonType{
Flag: util.Itoa(k),
Context: v,
}
cancelReasonList = append(cancelReasonList, tmp)
}
cancel.CancelReason = cancelReasonList
}
return cancel
}
| "goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData.List = append(orderData.List, &apiIndex.Order{
Address: (*apiIndex.AddressType)(v["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: v["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
})
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) CancelOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
cancelFlag := util.Atoi(ctx.FormValue("cancel_flag"), 16, false).(uint16)
err := logic.CancelOrder(uid, orderSn, cancelFlag)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) EvalOrd | identifier_body |
order.go | package controller
import (
"encoding/json"
"fmt"
"time"
daoConf "dao/conf"
daoSql "dao/sql"
. "global"
apiIndex "http/api"
"logic"
"util"
"github.com/labstack/echo"
)
type OrderController struct{}
// 注册路由
func (self OrderController) RegisterRoute(e *echo.Group) {
e.Get("/order/list", echo.HandlerFunc(self.MyOrderList))
e.Get("/order/detail", echo.HandlerFunc(self.Detail))
e.Post("/order/prepare", echo.HandlerFunc(self.PrepareOrder))
e.Post("/order/do_order", echo.HandlerFunc(self.DoOrder))
e.Post("/order/cancel_rder", echo.HandlerFunc(self.CancelOrder))
e.Post("/order/eval_order", echo.HandlerFunc(self.EvalOrder))
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) PrepareOrder(ctx echo.Context) error {
uid := ctx.Get("uid").(uint64)
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 goodsId 信息
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取 goodsId 信息
shipTimeList := []string{"XX", "XX"}
// shipTimeList := logic.GetShipTime()
// 获取用户所有地址
myAddressList, err := daoSql.GetAddressListByUid(uid, false)
if nil != err && RecordEmpty != err {
// log
return util.Fail(ctx, 10, err.Error())
}
var myAddress *daoSql.Address
for idx, addressItem := range myAddressList {
// 默认取第一个
if 0 == idx {
myAddress = addressItem
}
// 取默认地址
if uint8(1) == addressItem.IsDefault {
myAddress = addressItem
}
}
if nil == myAddress {
myAddress = &daoSql.Address{}
} else {
myAddress.IsDefault = 1
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 生成预处理订单
orderMap := logic.OrderMap{
Address: myAddress,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
}
orderInfo, orderGoodsList, err := logic.GenOrder(uid, orderMap)
if nil != err {
// log
}
// 过滤订单参数
orderInfo.Filter()
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(myAddress),
ShipTimeList: shipTimeList,
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
}
if 0 < len(goodsException) {
orderData.Alert = fmt.Sprintf(orderConf.Alert, goodsException)
} else if 0 < len(goodsNoStorageException) {
orderData.Alert = fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException)
} else {
orderData.Alert = ""
}
// 格式化地址列表
for _, addressItem := range myAddressList {
orderData.AddressList = append(orderData.AddressList, (*apiIndex.AddressType)(addressItem))
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) DoOrder(ctx echo.Context) error {
// 设置 redis key
uid := ctx.Get("uid").(uint64)
// 避免同一个订单重复提交
// data, _ := json.Marshal(ctx.Request())
// curOrderMd5 = fmt.Printf("%x", md5.Sum(data))
// preOrderMd5 := daoRedis.NewRedisClient().Key(daoRedis.KeyOrder, util.Itoa(uid)).GET("")
// if preOrder == preOrderMd5 {
// // log
// return util.Fail(ctx, 10, RepeatDoOrder)
// }
// daoRedis.NewRedisClient().SET("", curOrderMd5, 30)
// 获取购物车商品列表
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 商品详情
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取地址信息
address, err := fetchAddress(ctx)
if nil != err {
return util.Fail(ctx, 10, "地址信息无效")
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
if 0 < len(goodsException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.Alert, goodsException))
}
if 0 < len(goodsNoStorageException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException))
}
// 提交订单
orderMap := logic.OrderMap{
Address: address,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
ExceptTime: time.Now().Unix(),
OrderMessage: ctx.FormValue("order_message"),
}
orderInfo, orderGoodsList, err := logic.SubmitOrder(uid, orderMap)
if nil != err {
// log
}
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(address),
ShipTimeList: []string{},
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
Cancel: genApiCancel(orderInfo),
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) MyOrderList(ctx echo.Context) error {
// 获取订单列表信息
// uid, base_id, rn
myOrderMapList, hasMore, err := logic.GetMyOrderList(10, 0, 20)
if nil != err {
return err
}
// 拼装接口数据
orderData := &apiIndex.OrderList{
HasMore: hasMore,
}
// orderData
for _, v := range myOrderMapList {
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(v["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range v["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData.List = append(orderData.List, &apiIndex.Order{
Address: (*apiIndex.AddressType)(v["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: v["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
})
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) CancelOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
cancelFlag := util.Atoi(ctx.FormValue("cancel_flag"), 16, false).(uint16)
err := logic.CancelOrder(uid, orderSn, cancelFlag)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) EvalOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
stars := util.Atoi(ctx.FormValue("stars"), 8, false).(uint8)
feedback := ctx.FormValue("feedback")
err := logic.EvalOrder(uid, orderSn, stars, feedback)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) Detail(ctx echo.Context) error {
ordeSn := ctx.QueryParam("order_sn")
// 获取订单列表信息
// uid, orderSn
myOrderMap, err := logic.GetOrderDetail(10, ordeSn)
if nil != err {
return err
}
// 拼装接口数据
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(myOrderMap["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range myOrderMap["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData := &apiIndex.Order{
Alert: "",
Address: (*apiIndex.AddressType)(myOrderMap["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: myOrderMap["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
Cancel: genApiCancel(myOrderMap["order"].(*daoSql.Order)),
}
orderData.Format()
return util.Render(ctx, "order/info", "订单详情", orderData)
}
// 生成订单列表页页的 html 不提供外部接口
func (OrderController) GenOrderListHtml(ctx echo.Context) error {
return util.Render(ctx, "order/list", "订单列表", map[string]interface{}{})
}
// 获取购物车中商品列表
func getCartGoodsList(ctx echo.Context) (goodsList []*logic.CartInfo, err error) {
goodsList = []*logic.CartInfo{}
goodsListStr := ctx.FormValue("goods_list")
goodsListMap := []map[string]interface{}{}
err = json.Unmarshal([]byte(goodsListStr), &goodsListMap)
for _, item := range goodsListMap {
tmpInfo := &logic.CartInfo{
GoodsId: util.MustNum(item["goods_id"], 64, false).(uint64),
Selected: util.MustNum(item["selected"], 8, false).(uint8),
GoodsNum: util.MustNum(item["goods_num"], 16, false).(uint16),
}
goodsList = append(goodsList, tmpInfo)
}
if err != nil {
// log
return
}
if 0 >= len(goodsList) {
return goodsList, CartEmpty
}
return
}
// 提交订单时新地址(address_id<=0) 会先插入地址表
func fetchAddress(ctx echo.Context) (*daoSql.Address, error) {
uid := ctx.Get("uid").(uint64)
// 获取提交订单时指定的地址
addressId := util.Atoi(ctx.FormValue("address_id"), 64, false).(uint64)
if 0 < addressId {
myAddressMap, err := daoSql.GetAddressListById([]uint64{addressId})
if nil != err {
// log
return &daoSql.Address{}, err
}
myAddress, ok := myAddressMap[addressId]
if !ok || uid != myAddress.MemberId {
// log
return &daoSql.Address{}, RecordEmpty
}
return myAddress, nil
}
// 插入新的地址信息
trueName := ctx.FormValue("true_name")
liveArea := ctx.FormValue("live_area")
address := ctx.FormValue("address")
mobile := ctx.FormValue("mobile")
// 显式提取地址信息
addressInfo := daoSql.UserAddressInfo{
TrueName: trueName,
LiveArea: liveArea,
Address: address,
Mobile: mobile,
}
myAddress, err := daoSql.SaveMyAddress(uid, &addressInfo)
if nil != err {
// log
return &daoSql.Address{}, err
}
return myAddress, err
}
// 根据订单
func genApiCancel(orderInfo *daoSql.Order) (cancel *apiIndex.Cancel) {
// 读入配置信息
envConf, _ := daoConf.EnvConf()
// 订单取消信息
cancelInfo := logic.GetCancelInfo(orderInfo)
cancel = &apiIndex.Cancel{
CanCancel: cancelInfo.CanCancel,
}
if !cancelInfo.CanCancel {
if 0 < len(envConf.ServiceTel) {
cancel.CancelTip.Tel = envConf.ServiceTel
} else {
cancel.CancelTip.Tel = cancelInfo.CancelTip.Tel
}
cancel.CancelTip.Tip = cancelInfo.CancelTip.Tip
} else {
cancelReasonList := []*apiIndex.CancelReasonType{}
for k, v := range cancelInfo.CancelReason {
tmp := &apiIndex.CancelReasonType{
Flag: util.Itoa(k),
Context: v,
}
| sonList = append(cancelReasonList, tmp)
}
cancel.CancelReason = cancelReasonList
}
return cancel
}
| cancelRea | identifier_name |
order.go | package controller
import (
"encoding/json"
"fmt"
"time"
daoConf "dao/conf"
daoSql "dao/sql"
. "global"
apiIndex "http/api"
"logic"
"util"
"github.com/labstack/echo"
)
type OrderController struct{}
// 注册路由
func (self OrderController) RegisterRoute(e *echo.Group) {
e.Get("/order/list", echo.HandlerFunc(self.MyOrderList))
e.Get("/order/detail", echo.HandlerFunc(self.Detail))
e.Post("/order/prepare", echo.HandlerFunc(self.PrepareOrder))
e.Post("/order/do_order", echo.HandlerFunc(self.DoOrder))
e.Post("/order/cancel_rder", echo.HandlerFunc(self.CancelOrder))
e.Post("/order/eval_order", echo.HandlerFunc(self.EvalOrder))
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) PrepareOrder(ctx echo.Context) error {
uid := ctx.Get("uid").(uint64)
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsInfo.GoodsId)
}
}
}
// 获取 goodsId 信息
goodsException, goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取 goodsId 信息
shipTimeList := []string{"XX", "XX"}
// shipTimeList := logic.GetShipTime()
// 获取用户所有地址
myAddressList, err := daoSql.GetAddressListByUid(uid, false)
if nil != err && RecordEmpty != err {
// log
return util.Fail(ctx, 10, err.Error())
}
var myAddress *daoSql.Address
for idx, addressItem := range myAddressList {
// 默认取第一个
if 0 == idx {
myAddress = addressItem
}
// 取默认地址
if uint8(1) == addressItem.IsDefault {
myAddress = addressItem
}
}
if nil == myAddress {
myAddress = &daoSql.Address{}
} else {
myAddress.IsDefault = 1
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 生成预处理订单
orderMap := logic.OrderMap{
Address: myAddress,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
}
orderInfo, orderGoodsList, err := logic.GenOrder(uid, orderMap)
if nil != err {
// log
}
// 过滤订单参数
orderInfo.Filter()
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(myAddress),
ShipTimeList: shipTimeList,
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
}
if 0 < len(goodsException) {
orderData.Alert = fmt.Sprintf(orderConf.Alert, goodsException)
} else if 0 < len(goodsNoStorageException) {
orderData.Alert = fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException)
} else {
orderData.Alert = ""
}
// 格式化地址列表
for _, addressItem := range myAddressList {
orderData.AddressList = append(orderData.AddressList, (*apiIndex.AddressType)(addressItem))
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 确认信息页 form-> goods_list:[{"goods_id":"3","selected":"1","goods_num":"2"}]
func (OrderController) DoOrder(ctx echo.Context) error {
// 设置 redis key
uid := ctx.Get("uid").(uint64)
// 避免同一个订单重复提交
// data, _ := json.Marshal(ctx.Request())
// curOrderMd5 = fmt.Printf("%x", md5.Sum(data))
// preOrderMd5 := daoRedis.NewRedisClient().Key(daoRedis.KeyOrder, util.Itoa(uid)).GET("")
// if preOrder == preOrderMd5 {
// // log
// return util.Fail(ctx, 10, RepeatDoOrder)
// }
// daoRedis.NewRedisClient().SET("", curOrderMd5, 30)
// 获取购物车商品列表
goodsList, err := getCartGoodsList(ctx)
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
// 收集 goodsId
goodsIdList := []uint64{}
for _, goodsInfo := range goodsList {
if 1 == goodsInfo.Selected {
if 0 < goodsInfo.GoodsNum {
goodsIdList = append(goodsIdList, goodsIn | goodsIdMap, _ := logic.GetCartInfo(goodsIdList)
// 验证并修正库存信息 如果只有3个,购买5个,会强制改为3个
goodsNoStorageException, _ := logic.VerifyGoodsNum(goodsIdMap, goodsList)
// 获取地址信息
address, err := fetchAddress(ctx)
if nil != err {
return util.Fail(ctx, 10, "地址信息无效")
}
// 读入配置信息
orderConf, err := daoConf.OrderConf()
if nil != err {
// log
return util.Fail(ctx, 10, err.Error())
}
if 0 < len(goodsException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.Alert, goodsException))
}
if 0 < len(goodsNoStorageException) {
return util.Fail(ctx, 10, fmt.Sprintf(orderConf.StorageAlert, goodsNoStorageException))
}
// 提交订单
orderMap := logic.OrderMap{
Address: address,
GoodsIdMap: goodsIdMap,
GoodsList: goodsList,
ExceptTime: time.Now().Unix(),
OrderMessage: ctx.FormValue("order_message"),
}
orderInfo, orderGoodsList, err := logic.SubmitOrder(uid, orderMap)
if nil != err {
// log
}
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(orderGoodsList))
for idx, item := range orderGoodsList {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
// 拼装接口数据
orderData := apiIndex.Order{
Address: (*apiIndex.AddressType)(address),
ShipTimeList: []string{},
OrderInfo: apiIndex.OrderInfo{
GoodsList: arrApiOrderGoods,
Order: &apiIndex.OrderBase{Order: orderInfo},
},
Cancel: genApiCancel(orderInfo),
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) MyOrderList(ctx echo.Context) error {
// 获取订单列表信息
// uid, base_id, rn
myOrderMapList, hasMore, err := logic.GetMyOrderList(10, 0, 20)
if nil != err {
return err
}
// 拼装接口数据
orderData := &apiIndex.OrderList{
HasMore: hasMore,
}
// orderData
for _, v := range myOrderMapList {
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(v["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range v["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData.List = append(orderData.List, &apiIndex.Order{
Address: (*apiIndex.AddressType)(v["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: v["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
})
}
orderData.Format()
return util.Success(ctx, orderData)
}
// 订单列表页
func (OrderController) CancelOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
cancelFlag := util.Atoi(ctx.FormValue("cancel_flag"), 16, false).(uint16)
err := logic.CancelOrder(uid, orderSn, cancelFlag)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) EvalOrder(ctx echo.Context) error {
// 获取订单列表信息
uid := uint64(10)
orderSn := ctx.FormValue("order_sn")
stars := util.Atoi(ctx.FormValue("stars"), 8, false).(uint8)
feedback := ctx.FormValue("feedback")
err := logic.EvalOrder(uid, orderSn, stars, feedback)
if nil != err {
return util.Fail(ctx, 10, err.Error())
}
return util.Success(ctx, nil)
}
// 订单列表页
func (OrderController) Detail(ctx echo.Context) error {
ordeSn := ctx.QueryParam("order_sn")
// 获取订单列表信息
// uid, orderSn
myOrderMap, err := logic.GetOrderDetail(10, ordeSn)
if nil != err {
return err
}
// 拼装接口数据
arrApiOrderGoods := make([]*apiIndex.OrderGoods, len(myOrderMap["goodsList"].([]*daoSql.OrderGoods)))
for idx, item := range myOrderMap["goodsList"].([]*daoSql.OrderGoods) {
arrApiOrderGoods[idx] = &apiIndex.OrderGoods{OrderGoods: item}
}
orderData := &apiIndex.Order{
Alert: "",
Address: (*apiIndex.AddressType)(myOrderMap["addressInfo"].(*daoSql.Address)),
OrderInfo: apiIndex.OrderInfo{
Order: &apiIndex.OrderBase{Order: myOrderMap["order"].(*daoSql.Order)},
GoodsList: arrApiOrderGoods,
},
Cancel: genApiCancel(myOrderMap["order"].(*daoSql.Order)),
}
orderData.Format()
return util.Render(ctx, "order/info", "订单详情", orderData)
}
// 生成订单列表页页的 html 不提供外部接口
func (OrderController) GenOrderListHtml(ctx echo.Context) error {
return util.Render(ctx, "order/list", "订单列表", map[string]interface{}{})
}
// 获取购物车中商品列表
func getCartGoodsList(ctx echo.Context) (goodsList []*logic.CartInfo, err error) {
goodsList = []*logic.CartInfo{}
goodsListStr := ctx.FormValue("goods_list")
goodsListMap := []map[string]interface{}{}
err = json.Unmarshal([]byte(goodsListStr), &goodsListMap)
for _, item := range goodsListMap {
tmpInfo := &logic.CartInfo{
GoodsId: util.MustNum(item["goods_id"], 64, false).(uint64),
Selected: util.MustNum(item["selected"], 8, false).(uint8),
GoodsNum: util.MustNum(item["goods_num"], 16, false).(uint16),
}
goodsList = append(goodsList, tmpInfo)
}
if err != nil {
// log
return
}
if 0 >= len(goodsList) {
return goodsList, CartEmpty
}
return
}
// 提交订单时新地址(address_id<=0) 会先插入地址表
func fetchAddress(ctx echo.Context) (*daoSql.Address, error) {
uid := ctx.Get("uid").(uint64)
// 获取提交订单时指定的地址
addressId := util.Atoi(ctx.FormValue("address_id"), 64, false).(uint64)
if 0 < addressId {
myAddressMap, err := daoSql.GetAddressListById([]uint64{addressId})
if nil != err {
// log
return &daoSql.Address{}, err
}
myAddress, ok := myAddressMap[addressId]
if !ok || uid != myAddress.MemberId {
// log
return &daoSql.Address{}, RecordEmpty
}
return myAddress, nil
}
// 插入新的地址信息
trueName := ctx.FormValue("true_name")
liveArea := ctx.FormValue("live_area")
address := ctx.FormValue("address")
mobile := ctx.FormValue("mobile")
// 显式提取地址信息
addressInfo := daoSql.UserAddressInfo{
TrueName: trueName,
LiveArea: liveArea,
Address: address,
Mobile: mobile,
}
myAddress, err := daoSql.SaveMyAddress(uid, &addressInfo)
if nil != err {
// log
return &daoSql.Address{}, err
}
return myAddress, err
}
// 根据订单
func genApiCancel(orderInfo *daoSql.Order) (cancel *apiIndex.Cancel) {
// 读入配置信息
envConf, _ := daoConf.EnvConf()
// 订单取消信息
cancelInfo := logic.GetCancelInfo(orderInfo)
cancel = &apiIndex.Cancel{
CanCancel: cancelInfo.CanCancel,
}
if !cancelInfo.CanCancel {
if 0 < len(envConf.ServiceTel) {
cancel.CancelTip.Tel = envConf.ServiceTel
} else {
cancel.CancelTip.Tel = cancelInfo.CancelTip.Tel
}
cancel.CancelTip.Tip = cancelInfo.CancelTip.Tip
} else {
cancelReasonList := []*apiIndex.CancelReasonType{}
for k, v := range cancelInfo.CancelReason {
tmp := &apiIndex.CancelReasonType{
Flag: util.Itoa(k),
Context: v,
}
cancelReasonList = append(cancelReasonList, tmp)
}
cancel.CancelReason = cancelReasonList
}
return cancel
}
| fo.GoodsId)
}
}
}
// 获取 商品详情
goodsException, | conditional_block |
symbol.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Symbol factory
import { each, isArray, retrieve2 } from 'zrender/src/core/util';
import * as graphic from './graphic';
import BoundingRect from 'zrender/src/core/BoundingRect';
import { calculateTextPosition } from 'zrender/src/contain/text';
import { Dictionary } from 'zrender/src/core/types';
import { SymbolOptionMixin, ZRColor } from './types';
import { parsePercent } from './number';
export type ECSymbol = graphic.Path & {
__isEmptyBrush?: boolean
setColor: (color: ZRColor, innerColor?: ZRColor) => void
getColor: () => ZRColor
};
type SymbolCtor = { new(): ECSymbol };
type SymbolShapeMaker = (x: number, y: number, w: number, h: number, shape: Dictionary<any>) => void;
/**
* Triangle shape
* @inner
*/
const Triangle = graphic.Path.extend({
type: 'triangle',
shape: {
cx: 0,
cy: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const cx = shape.cx;
const cy = shape.cy;
const width = shape.width / 2;
const height = shape.height / 2;
path.moveTo(cx, cy - height);
path.lineTo(cx + width, cy + height);
path.lineTo(cx - width, cy + height);
path.closePath();
}
});
/**
* Diamond shape
* @inner
*/
const Diamond = graphic.Path.extend({
type: 'diamond',
shape: {
cx: 0,
cy: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const cx = shape.cx;
const cy = shape.cy;
const width = shape.width / 2;
const height = shape.height / 2;
path.moveTo(cx, cy - height);
path.lineTo(cx + width, cy);
path.lineTo(cx, cy + height);
path.lineTo(cx - width, cy);
path.closePath();
}
});
/**
* Pin shape
* @inner
*/
const Pin = graphic.Path.extend({
type: 'pin',
shape: {
// x, y on the cusp
x: 0,
y: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const x = shape.x;
const y = shape.y;
const w = shape.width / 5 * 3;
// Height must be larger than width
const h = Math.max(w, shape.height);
const r = w / 2;
// Dist on y with tangent point and circle center
const dy = r * r / (h - r);
const cy = y - h + r + dy;
const angle = Math.asin(dy / r);
// Dist on x with tangent point and circle center
const dx = Math.cos(angle) * r;
const tanX = Math.sin(angle);
const tanY = Math.cos(angle);
const cpLen = r * 0.6;
const cpLen2 = r * 0.7;
path.moveTo(x - dx, cy + dy);
path.arc(
x, cy, r,
Math.PI - angle,
Math.PI * 2 + angle
);
path.bezierCurveTo(
x + dx - tanX * cpLen, cy + dy + tanY * cpLen,
x, y - cpLen2,
x, y
);
path.bezierCurveTo(
x, y - cpLen2,
x - dx + tanX * cpLen, cy + dy + tanY * cpLen,
x - dx, cy + dy
);
path.closePath();
}
});
/**
* Arrow shape
* @inner
*/
const Arrow = graphic.Path.extend({
type: 'arrow',
shape: {
x: 0,
y: 0,
width: 0,
height: 0
},
buildPath: function (ctx, shape) {
const height = shape.height;
const width = shape.width;
const x = shape.x;
const y = shape.y;
const dx = width / 3 * 2;
ctx.moveTo(x, y);
ctx.lineTo(x + dx, y + height);
ctx.lineTo(x, y + height / 4 * 3);
ctx.lineTo(x - dx, y + height);
ctx.lineTo(x, y);
ctx.closePath();
}
});
/**
* Map of path constructors | rect: graphic.Rect as unknown as SymbolCtor,
roundRect: graphic.Rect as unknown as SymbolCtor,
square: graphic.Rect as unknown as SymbolCtor,
circle: graphic.Circle as unknown as SymbolCtor,
diamond: Diamond as unknown as SymbolCtor,
pin: Pin as unknown as SymbolCtor,
arrow: Arrow as unknown as SymbolCtor,
triangle: Triangle as unknown as SymbolCtor
};
const symbolShapeMakers: Dictionary<SymbolShapeMaker> = {
line: function (x, y, w, h, shape: graphic.Line['shape']) {
shape.x1 = x;
shape.y1 = y + h / 2;
shape.x2 = x + w;
shape.y2 = y + h / 2;
},
rect: function (x, y, w, h, shape: graphic.Rect['shape']) {
shape.x = x;
shape.y = y;
shape.width = w;
shape.height = h;
},
roundRect: function (x, y, w, h, shape: graphic.Rect['shape']) {
shape.x = x;
shape.y = y;
shape.width = w;
shape.height = h;
shape.r = Math.min(w, h) / 4;
},
square: function (x, y, w, h, shape: graphic.Rect['shape']) {
const size = Math.min(w, h);
shape.x = x;
shape.y = y;
shape.width = size;
shape.height = size;
},
circle: function (x, y, w, h, shape: graphic.Circle['shape']) {
// Put circle in the center of square
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.r = Math.min(w, h) / 2;
},
diamond: function (x, y, w, h, shape: InstanceType<typeof Diamond>['shape']) {
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.width = w;
shape.height = h;
},
pin: function (x, y, w, h, shape: InstanceType<typeof Pin>['shape']) {
shape.x = x + w / 2;
shape.y = y + h / 2;
shape.width = w;
shape.height = h;
},
arrow: function (x, y, w, h, shape: InstanceType<typeof Arrow>['shape']) {
shape.x = x + w / 2;
shape.y = y + h / 2;
shape.width = w;
shape.height = h;
},
triangle: function (x, y, w, h, shape: InstanceType<typeof Triangle>['shape']) {
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.width = w;
shape.height = h;
}
};
export const symbolBuildProxies: Dictionary<ECSymbol> = {};
each(symbolCtors, function (Ctor, name) {
symbolBuildProxies[name] = new Ctor();
});
const SymbolClz = graphic.Path.extend({
type: 'symbol',
shape: {
symbolType: '',
x: 0,
y: 0,
width: 0,
height: 0
},
calculateTextPosition(out, config, rect) {
const res = calculateTextPosition(out, config, rect);
const shape = this.shape;
if (shape && shape.symbolType === 'pin' && config.position === 'inside') {
res.y = rect.y + rect.height * 0.4;
}
return res;
},
buildPath(ctx, shape, inBundle) {
let symbolType = shape.symbolType;
if (symbolType !== 'none') {
let proxySymbol = symbolBuildProxies[symbolType];
if (!proxySymbol) {
// Default rect
symbolType = 'rect';
proxySymbol = symbolBuildProxies[symbolType];
}
symbolShapeMakers[symbolType](
shape.x, shape.y, shape.width, shape.height, proxySymbol.shape
);
proxySymbol.buildPath(ctx, proxySymbol.shape, inBundle);
}
}
});
// Provide setColor helper method to avoid determine if set the fill or stroke outside
function symbolPathSetColor(this: ECSymbol, color: ZRColor, innerColor?: ZRColor) {
if (this.type !== 'image') {
const symbolStyle = this.style;
if (this.__isEmptyBrush) {
symbolStyle.stroke = color;
symbolStyle.fill = innerColor || '#fff';
// TODO Same width with lineStyle in LineView
symbolStyle.lineWidth = 2;
}
else if (this.shape.symbolType === 'line') {
symbolStyle.stroke = color;
}
else {
symbolStyle.fill = color;
}
this.markRedraw();
}
}
/**
* Create a symbol element with given symbol configuration: shape, x, y, width, height, color
*/
export function createSymbol(
symbolType: string,
x: number,
y: number,
w: number,
h: number,
color?: ZRColor,
// whether to keep the ratio of w/h,
keepAspect?: boolean
) {
// TODO Support image object, DynamicImage.
const isEmpty = symbolType.indexOf('empty') === 0;
if (isEmpty) {
symbolType = symbolType.substr(5, 1).toLowerCase() + symbolType.substr(6);
}
let symbolPath: ECSymbol | graphic.Image;
if (symbolType.indexOf('image://') === 0) {
symbolPath = graphic.makeImage(
symbolType.slice(8),
new BoundingRect(x, y, w, h),
keepAspect ? 'center' : 'cover'
);
}
else if (symbolType.indexOf('path://') === 0) {
symbolPath = graphic.makePath(
symbolType.slice(7),
{},
new BoundingRect(x, y, w, h),
keepAspect ? 'center' : 'cover'
) as unknown as ECSymbol;
}
else {
symbolPath = new SymbolClz({
shape: {
symbolType: symbolType,
x: x,
y: y,
width: w,
height: h
}
}) as unknown as ECSymbol;
}
(symbolPath as ECSymbol).__isEmptyBrush = isEmpty;
// TODO Should deprecate setColor
(symbolPath as ECSymbol).setColor = symbolPathSetColor;
if (color) {
(symbolPath as ECSymbol).setColor(color);
}
return symbolPath as ECSymbol;
}
export function normalizeSymbolSize(symbolSize: number | number[]): [number, number] {
if (!isArray(symbolSize)) {
symbolSize = [+symbolSize, +symbolSize];
}
return [symbolSize[0] || 0, symbolSize[1] || 0];
}
export function normalizeSymbolOffset(
symbolOffset: SymbolOptionMixin['symbolOffset'],
symbolSize: number[]
): [number, number] {
if (symbolOffset == null) {
return;
}
if (!isArray(symbolOffset)) {
symbolOffset = [symbolOffset, symbolOffset];
}
return [
parsePercent(symbolOffset[0], symbolSize[0]) || 0,
parsePercent(retrieve2(symbolOffset[1], symbolOffset[0]), symbolSize[1]) || 0
];
} | */
// TODO Use function to build symbol path.
const symbolCtors: Dictionary<SymbolCtor> = {
line: graphic.Line as unknown as SymbolCtor,
| random_line_split |
symbol.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Symbol factory
import { each, isArray, retrieve2 } from 'zrender/src/core/util';
import * as graphic from './graphic';
import BoundingRect from 'zrender/src/core/BoundingRect';
import { calculateTextPosition } from 'zrender/src/contain/text';
import { Dictionary } from 'zrender/src/core/types';
import { SymbolOptionMixin, ZRColor } from './types';
import { parsePercent } from './number';
export type ECSymbol = graphic.Path & {
__isEmptyBrush?: boolean
setColor: (color: ZRColor, innerColor?: ZRColor) => void
getColor: () => ZRColor
};
type SymbolCtor = { new(): ECSymbol };
type SymbolShapeMaker = (x: number, y: number, w: number, h: number, shape: Dictionary<any>) => void;
/**
* Triangle shape
* @inner
*/
const Triangle = graphic.Path.extend({
type: 'triangle',
shape: {
cx: 0,
cy: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const cx = shape.cx;
const cy = shape.cy;
const width = shape.width / 2;
const height = shape.height / 2;
path.moveTo(cx, cy - height);
path.lineTo(cx + width, cy + height);
path.lineTo(cx - width, cy + height);
path.closePath();
}
});
/**
* Diamond shape
* @inner
*/
const Diamond = graphic.Path.extend({
type: 'diamond',
shape: {
cx: 0,
cy: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const cx = shape.cx;
const cy = shape.cy;
const width = shape.width / 2;
const height = shape.height / 2;
path.moveTo(cx, cy - height);
path.lineTo(cx + width, cy);
path.lineTo(cx, cy + height);
path.lineTo(cx - width, cy);
path.closePath();
}
});
/**
* Pin shape
* @inner
*/
const Pin = graphic.Path.extend({
type: 'pin',
shape: {
// x, y on the cusp
x: 0,
y: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const x = shape.x;
const y = shape.y;
const w = shape.width / 5 * 3;
// Height must be larger than width
const h = Math.max(w, shape.height);
const r = w / 2;
// Dist on y with tangent point and circle center
const dy = r * r / (h - r);
const cy = y - h + r + dy;
const angle = Math.asin(dy / r);
// Dist on x with tangent point and circle center
const dx = Math.cos(angle) * r;
const tanX = Math.sin(angle);
const tanY = Math.cos(angle);
const cpLen = r * 0.6;
const cpLen2 = r * 0.7;
path.moveTo(x - dx, cy + dy);
path.arc(
x, cy, r,
Math.PI - angle,
Math.PI * 2 + angle
);
path.bezierCurveTo(
x + dx - tanX * cpLen, cy + dy + tanY * cpLen,
x, y - cpLen2,
x, y
);
path.bezierCurveTo(
x, y - cpLen2,
x - dx + tanX * cpLen, cy + dy + tanY * cpLen,
x - dx, cy + dy
);
path.closePath();
}
});
/**
* Arrow shape
* @inner
*/
const Arrow = graphic.Path.extend({
type: 'arrow',
shape: {
x: 0,
y: 0,
width: 0,
height: 0
},
buildPath: function (ctx, shape) {
const height = shape.height;
const width = shape.width;
const x = shape.x;
const y = shape.y;
const dx = width / 3 * 2;
ctx.moveTo(x, y);
ctx.lineTo(x + dx, y + height);
ctx.lineTo(x, y + height / 4 * 3);
ctx.lineTo(x - dx, y + height);
ctx.lineTo(x, y);
ctx.closePath();
}
});
/**
* Map of path constructors
*/
// TODO Use function to build symbol path.
const symbolCtors: Dictionary<SymbolCtor> = {
line: graphic.Line as unknown as SymbolCtor,
rect: graphic.Rect as unknown as SymbolCtor,
roundRect: graphic.Rect as unknown as SymbolCtor,
square: graphic.Rect as unknown as SymbolCtor,
circle: graphic.Circle as unknown as SymbolCtor,
diamond: Diamond as unknown as SymbolCtor,
pin: Pin as unknown as SymbolCtor,
arrow: Arrow as unknown as SymbolCtor,
triangle: Triangle as unknown as SymbolCtor
};
const symbolShapeMakers: Dictionary<SymbolShapeMaker> = {
line: function (x, y, w, h, shape: graphic.Line['shape']) {
shape.x1 = x;
shape.y1 = y + h / 2;
shape.x2 = x + w;
shape.y2 = y + h / 2;
},
rect: function (x, y, w, h, shape: graphic.Rect['shape']) {
shape.x = x;
shape.y = y;
shape.width = w;
shape.height = h;
},
roundRect: function (x, y, w, h, shape: graphic.Rect['shape']) {
shape.x = x;
shape.y = y;
shape.width = w;
shape.height = h;
shape.r = Math.min(w, h) / 4;
},
square: function (x, y, w, h, shape: graphic.Rect['shape']) {
const size = Math.min(w, h);
shape.x = x;
shape.y = y;
shape.width = size;
shape.height = size;
},
circle: function (x, y, w, h, shape: graphic.Circle['shape']) {
// Put circle in the center of square
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.r = Math.min(w, h) / 2;
},
diamond: function (x, y, w, h, shape: InstanceType<typeof Diamond>['shape']) {
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.width = w;
shape.height = h;
},
pin: function (x, y, w, h, shape: InstanceType<typeof Pin>['shape']) {
shape.x = x + w / 2;
shape.y = y + h / 2;
shape.width = w;
shape.height = h;
},
arrow: function (x, y, w, h, shape: InstanceType<typeof Arrow>['shape']) {
shape.x = x + w / 2;
shape.y = y + h / 2;
shape.width = w;
shape.height = h;
},
triangle: function (x, y, w, h, shape: InstanceType<typeof Triangle>['shape']) {
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.width = w;
shape.height = h;
}
};
export const symbolBuildProxies: Dictionary<ECSymbol> = {};
each(symbolCtors, function (Ctor, name) {
symbolBuildProxies[name] = new Ctor();
});
const SymbolClz = graphic.Path.extend({
type: 'symbol',
shape: {
symbolType: '',
x: 0,
y: 0,
width: 0,
height: 0
},
calculateTextPosition(out, config, rect) {
const res = calculateTextPosition(out, config, rect);
const shape = this.shape;
if (shape && shape.symbolType === 'pin' && config.position === 'inside') {
res.y = rect.y + rect.height * 0.4;
}
return res;
},
buildPath(ctx, shape, inBundle) {
let symbolType = shape.symbolType;
if (symbolType !== 'none') {
let proxySymbol = symbolBuildProxies[symbolType];
if (!proxySymbol) {
// Default rect
symbolType = 'rect';
proxySymbol = symbolBuildProxies[symbolType];
}
symbolShapeMakers[symbolType](
shape.x, shape.y, shape.width, shape.height, proxySymbol.shape
);
proxySymbol.buildPath(ctx, proxySymbol.shape, inBundle);
}
}
});
// Provide setColor helper method to avoid determine if set the fill or stroke outside
function symbolPathSetColor(this: ECSymbol, color: ZRColor, innerColor?: ZRColor) {
if (this.type !== 'image') {
const symbolStyle = this.style;
if (this.__isEmptyBrush) {
symbolStyle.stroke = color;
symbolStyle.fill = innerColor || '#fff';
// TODO Same width with lineStyle in LineView
symbolStyle.lineWidth = 2;
}
else if (this.shape.symbolType === 'line') {
symbolStyle.stroke = color;
}
else {
symbolStyle.fill = color;
}
this.markRedraw();
}
}
/**
* Create a symbol element with given symbol configuration: shape, x, y, width, height, color
*/
export function | (
symbolType: string,
x: number,
y: number,
w: number,
h: number,
color?: ZRColor,
// whether to keep the ratio of w/h,
keepAspect?: boolean
) {
// TODO Support image object, DynamicImage.
const isEmpty = symbolType.indexOf('empty') === 0;
if (isEmpty) {
symbolType = symbolType.substr(5, 1).toLowerCase() + symbolType.substr(6);
}
let symbolPath: ECSymbol | graphic.Image;
if (symbolType.indexOf('image://') === 0) {
symbolPath = graphic.makeImage(
symbolType.slice(8),
new BoundingRect(x, y, w, h),
keepAspect ? 'center' : 'cover'
);
}
else if (symbolType.indexOf('path://') === 0) {
symbolPath = graphic.makePath(
symbolType.slice(7),
{},
new BoundingRect(x, y, w, h),
keepAspect ? 'center' : 'cover'
) as unknown as ECSymbol;
}
else {
symbolPath = new SymbolClz({
shape: {
symbolType: symbolType,
x: x,
y: y,
width: w,
height: h
}
}) as unknown as ECSymbol;
}
(symbolPath as ECSymbol).__isEmptyBrush = isEmpty;
// TODO Should deprecate setColor
(symbolPath as ECSymbol).setColor = symbolPathSetColor;
if (color) {
(symbolPath as ECSymbol).setColor(color);
}
return symbolPath as ECSymbol;
}
export function normalizeSymbolSize(symbolSize: number | number[]): [number, number] {
if (!isArray(symbolSize)) {
symbolSize = [+symbolSize, +symbolSize];
}
return [symbolSize[0] || 0, symbolSize[1] || 0];
}
export function normalizeSymbolOffset(
symbolOffset: SymbolOptionMixin['symbolOffset'],
symbolSize: number[]
): [number, number] {
if (symbolOffset == null) {
return;
}
if (!isArray(symbolOffset)) {
symbolOffset = [symbolOffset, symbolOffset];
}
return [
parsePercent(symbolOffset[0], symbolSize[0]) || 0,
parsePercent(retrieve2(symbolOffset[1], symbolOffset[0]), symbolSize[1]) || 0
];
}
| createSymbol | identifier_name |
symbol.ts | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// Symbol factory
import { each, isArray, retrieve2 } from 'zrender/src/core/util';
import * as graphic from './graphic';
import BoundingRect from 'zrender/src/core/BoundingRect';
import { calculateTextPosition } from 'zrender/src/contain/text';
import { Dictionary } from 'zrender/src/core/types';
import { SymbolOptionMixin, ZRColor } from './types';
import { parsePercent } from './number';
export type ECSymbol = graphic.Path & {
__isEmptyBrush?: boolean
setColor: (color: ZRColor, innerColor?: ZRColor) => void
getColor: () => ZRColor
};
type SymbolCtor = { new(): ECSymbol };
type SymbolShapeMaker = (x: number, y: number, w: number, h: number, shape: Dictionary<any>) => void;
/**
* Triangle shape
* @inner
*/
const Triangle = graphic.Path.extend({
type: 'triangle',
shape: {
cx: 0,
cy: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const cx = shape.cx;
const cy = shape.cy;
const width = shape.width / 2;
const height = shape.height / 2;
path.moveTo(cx, cy - height);
path.lineTo(cx + width, cy + height);
path.lineTo(cx - width, cy + height);
path.closePath();
}
});
/**
* Diamond shape
* @inner
*/
const Diamond = graphic.Path.extend({
type: 'diamond',
shape: {
cx: 0,
cy: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const cx = shape.cx;
const cy = shape.cy;
const width = shape.width / 2;
const height = shape.height / 2;
path.moveTo(cx, cy - height);
path.lineTo(cx + width, cy);
path.lineTo(cx, cy + height);
path.lineTo(cx - width, cy);
path.closePath();
}
});
/**
* Pin shape
* @inner
*/
const Pin = graphic.Path.extend({
type: 'pin',
shape: {
// x, y on the cusp
x: 0,
y: 0,
width: 0,
height: 0
},
buildPath: function (path, shape) {
const x = shape.x;
const y = shape.y;
const w = shape.width / 5 * 3;
// Height must be larger than width
const h = Math.max(w, shape.height);
const r = w / 2;
// Dist on y with tangent point and circle center
const dy = r * r / (h - r);
const cy = y - h + r + dy;
const angle = Math.asin(dy / r);
// Dist on x with tangent point and circle center
const dx = Math.cos(angle) * r;
const tanX = Math.sin(angle);
const tanY = Math.cos(angle);
const cpLen = r * 0.6;
const cpLen2 = r * 0.7;
path.moveTo(x - dx, cy + dy);
path.arc(
x, cy, r,
Math.PI - angle,
Math.PI * 2 + angle
);
path.bezierCurveTo(
x + dx - tanX * cpLen, cy + dy + tanY * cpLen,
x, y - cpLen2,
x, y
);
path.bezierCurveTo(
x, y - cpLen2,
x - dx + tanX * cpLen, cy + dy + tanY * cpLen,
x - dx, cy + dy
);
path.closePath();
}
});
/**
* Arrow shape
* @inner
*/
const Arrow = graphic.Path.extend({
type: 'arrow',
shape: {
x: 0,
y: 0,
width: 0,
height: 0
},
buildPath: function (ctx, shape) {
const height = shape.height;
const width = shape.width;
const x = shape.x;
const y = shape.y;
const dx = width / 3 * 2;
ctx.moveTo(x, y);
ctx.lineTo(x + dx, y + height);
ctx.lineTo(x, y + height / 4 * 3);
ctx.lineTo(x - dx, y + height);
ctx.lineTo(x, y);
ctx.closePath();
}
});
/**
* Map of path constructors
*/
// TODO Use function to build symbol path.
const symbolCtors: Dictionary<SymbolCtor> = {
line: graphic.Line as unknown as SymbolCtor,
rect: graphic.Rect as unknown as SymbolCtor,
roundRect: graphic.Rect as unknown as SymbolCtor,
square: graphic.Rect as unknown as SymbolCtor,
circle: graphic.Circle as unknown as SymbolCtor,
diamond: Diamond as unknown as SymbolCtor,
pin: Pin as unknown as SymbolCtor,
arrow: Arrow as unknown as SymbolCtor,
triangle: Triangle as unknown as SymbolCtor
};
const symbolShapeMakers: Dictionary<SymbolShapeMaker> = {
line: function (x, y, w, h, shape: graphic.Line['shape']) {
shape.x1 = x;
shape.y1 = y + h / 2;
shape.x2 = x + w;
shape.y2 = y + h / 2;
},
rect: function (x, y, w, h, shape: graphic.Rect['shape']) {
shape.x = x;
shape.y = y;
shape.width = w;
shape.height = h;
},
roundRect: function (x, y, w, h, shape: graphic.Rect['shape']) {
shape.x = x;
shape.y = y;
shape.width = w;
shape.height = h;
shape.r = Math.min(w, h) / 4;
},
square: function (x, y, w, h, shape: graphic.Rect['shape']) {
const size = Math.min(w, h);
shape.x = x;
shape.y = y;
shape.width = size;
shape.height = size;
},
circle: function (x, y, w, h, shape: graphic.Circle['shape']) {
// Put circle in the center of square
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.r = Math.min(w, h) / 2;
},
diamond: function (x, y, w, h, shape: InstanceType<typeof Diamond>['shape']) {
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.width = w;
shape.height = h;
},
pin: function (x, y, w, h, shape: InstanceType<typeof Pin>['shape']) {
shape.x = x + w / 2;
shape.y = y + h / 2;
shape.width = w;
shape.height = h;
},
arrow: function (x, y, w, h, shape: InstanceType<typeof Arrow>['shape']) {
shape.x = x + w / 2;
shape.y = y + h / 2;
shape.width = w;
shape.height = h;
},
triangle: function (x, y, w, h, shape: InstanceType<typeof Triangle>['shape']) {
shape.cx = x + w / 2;
shape.cy = y + h / 2;
shape.width = w;
shape.height = h;
}
};
export const symbolBuildProxies: Dictionary<ECSymbol> = {};
each(symbolCtors, function (Ctor, name) {
symbolBuildProxies[name] = new Ctor();
});
const SymbolClz = graphic.Path.extend({
type: 'symbol',
shape: {
symbolType: '',
x: 0,
y: 0,
width: 0,
height: 0
},
calculateTextPosition(out, config, rect) {
const res = calculateTextPosition(out, config, rect);
const shape = this.shape;
if (shape && shape.symbolType === 'pin' && config.position === 'inside') {
res.y = rect.y + rect.height * 0.4;
}
return res;
},
buildPath(ctx, shape, inBundle) |
});
// Provide setColor helper method to avoid determine if set the fill or stroke outside
function symbolPathSetColor(this: ECSymbol, color: ZRColor, innerColor?: ZRColor) {
if (this.type !== 'image') {
const symbolStyle = this.style;
if (this.__isEmptyBrush) {
symbolStyle.stroke = color;
symbolStyle.fill = innerColor || '#fff';
// TODO Same width with lineStyle in LineView
symbolStyle.lineWidth = 2;
}
else if (this.shape.symbolType === 'line') {
symbolStyle.stroke = color;
}
else {
symbolStyle.fill = color;
}
this.markRedraw();
}
}
/**
* Create a symbol element with given symbol configuration: shape, x, y, width, height, color
*/
export function createSymbol(
symbolType: string,
x: number,
y: number,
w: number,
h: number,
color?: ZRColor,
// whether to keep the ratio of w/h,
keepAspect?: boolean
) {
// TODO Support image object, DynamicImage.
const isEmpty = symbolType.indexOf('empty') === 0;
if (isEmpty) {
symbolType = symbolType.substr(5, 1).toLowerCase() + symbolType.substr(6);
}
let symbolPath: ECSymbol | graphic.Image;
if (symbolType.indexOf('image://') === 0) {
symbolPath = graphic.makeImage(
symbolType.slice(8),
new BoundingRect(x, y, w, h),
keepAspect ? 'center' : 'cover'
);
}
else if (symbolType.indexOf('path://') === 0) {
symbolPath = graphic.makePath(
symbolType.slice(7),
{},
new BoundingRect(x, y, w, h),
keepAspect ? 'center' : 'cover'
) as unknown as ECSymbol;
}
else {
symbolPath = new SymbolClz({
shape: {
symbolType: symbolType,
x: x,
y: y,
width: w,
height: h
}
}) as unknown as ECSymbol;
}
(symbolPath as ECSymbol).__isEmptyBrush = isEmpty;
// TODO Should deprecate setColor
(symbolPath as ECSymbol).setColor = symbolPathSetColor;
if (color) {
(symbolPath as ECSymbol).setColor(color);
}
return symbolPath as ECSymbol;
}
export function normalizeSymbolSize(symbolSize: number | number[]): [number, number] {
if (!isArray(symbolSize)) {
symbolSize = [+symbolSize, +symbolSize];
}
return [symbolSize[0] || 0, symbolSize[1] || 0];
}
export function normalizeSymbolOffset(
symbolOffset: SymbolOptionMixin['symbolOffset'],
symbolSize: number[]
): [number, number] {
if (symbolOffset == null) {
return;
}
if (!isArray(symbolOffset)) {
symbolOffset = [symbolOffset, symbolOffset];
}
return [
parsePercent(symbolOffset[0], symbolSize[0]) || 0,
parsePercent(retrieve2(symbolOffset[1], symbolOffset[0]), symbolSize[1]) || 0
];
}
| {
let symbolType = shape.symbolType;
if (symbolType !== 'none') {
let proxySymbol = symbolBuildProxies[symbolType];
if (!proxySymbol) {
// Default rect
symbolType = 'rect';
proxySymbol = symbolBuildProxies[symbolType];
}
symbolShapeMakers[symbolType](
shape.x, shape.y, shape.width, shape.height, proxySymbol.shape
);
proxySymbol.buildPath(ctx, proxySymbol.shape, inBundle);
}
} | identifier_body |
main.rs | use std::fmt;
use std::collections::{VecDeque, HashSet};
use intcode::{Word, util::{parse_stdin_program, GameDisplay}, Program, Registers, ExecutionState};
fn main() {
let mut robot = Robot::new(parse_stdin_program());
let mut gd: GameDisplay<Tile> = GameDisplay::default();
gd.insert(robot.position(), Tile::Empty);
let mut oxygen_at = None;
{
let mut work = VecDeque::new();
work.push_back(*robot.position());
while let Some(root) = work.pop_front() {
// perhaps robot.travel(&root, &gd)?
while root != *robot.position() {
match path_to(&gd, robot.position(), &root) {
Some(directions) => {
for d in directions {
let prev = *robot.position();
let (moved_to, _) = robot.try_move(d);
assert_ne!(prev, moved_to);
}
},
None => panic!("Cannot get from {:?} to {:?}", robot.position(), root),
}
}
let unexplored = Direction::all()
.map(|d| (d, root.step_in_direction(&d)))
.filter_map(|(d, p)| match gd.get(&p) {
Some(Tile::Unexplored) | None => Some((d, p)),
Some(_) => None,
})
.collect::<Vec<_>>();
for (d, target) in unexplored {
let (ended_up, tile) = robot.try_move(d);
if tile == Tile::Oxygen {
assert!(oxygen_at.is_none());
oxygen_at = Some(ended_up);
}
if target == ended_up {
gd.insert(&target, tile);
// push to the same side as we are popping will decrease the amount of running
// around on the map so maybe depth first?
work.push_front(target);
let (back_at, _) = robot.try_move(d.reverse());
assert_eq!(back_at, root);
} else {
gd.insert(&target, tile);
}
}
}
}
println!("oxygen at: {:?}", oxygen_at);
println!("robot moves: {}", robot.moves);
println!("stage1: {}", path_to(&gd, &( 0, 0), oxygen_at.as_ref().unwrap()).unwrap().len());
{
// stage2 is probably just a dfs from the oxygen, mark the coordinates and ... push all new
// marked ones to the queue?
let mut frontier = VecDeque::new();
let mut oxygen = HashSet::new();
oxygen.insert(oxygen_at.unwrap());
frontier.push_back((oxygen_at.unwrap(), 0));
let mut prev_time = 0;
let mut minutes = 0;
while let Some((p1, time)) = frontier.pop_front() {
oxygen.insert(p1);
if prev_time != time {
assert!(prev_time < time, "{} should be less than {}", prev_time, time);
prev_time = time;
minutes += 1;
println!("{:>3} minutes ... {} slots oxygenated", minutes, oxygen.len());
}
let unoxinated = Direction::all()
.map(|d| p1.step_in_direction(&d))
.filter_map(|p| match gd.get(&p) {
Some(Tile::Empty) => Some(p),
Some(_) | None => None,
})
.filter(|p| !oxygen.contains(&p))
.collect::<Vec<_>>();
for p2 in unoxinated {
frontier.push_back((p2, time + 1));
}
}
println!("stage2: {}", minutes);
}
}
/// Wasteful dijkstra ... could share the hashmaps across queries maybe?
fn | (gd: &GameDisplay<Tile>, pos: &(Word, Word), target: &(Word, Word)) -> Option<Vec<Direction>> {
//println!("path_to: {:?} to {:?}", pos, target);
use std::collections::{HashMap, BinaryHeap};
use std::collections::hash_map::Entry;
use std::cmp;
let mut ret = Vec::new();
let mut work = BinaryHeap::new();
let mut dist = HashMap::new();
let mut prev = HashMap::new();
work.push(cmp::Reverse((0, *pos)));
while let Some(cmp::Reverse((steps_here, p))) = work.pop() {
//println!("path_to: popped {:?}", (p, steps_here));
if p == *target {
//println!("path_to: found target {:?}", p);
let mut backwards = p;
ret.push(p);
while backwards != *pos {
let previous = prev.remove(&backwards).unwrap();
ret.push(previous);
backwards = previous;
}
ret.reverse();
let dirs = ret.windows(2)
.map(|slice| {
let a = slice[0];
let b = slice[1];
let d = (b.0 - a.0, b.1 - a.1);
match d {
( 0,-1) => Direction::Down,
( 0, 1) => Direction::Up,
(-1, 0) => Direction::Left,
( 1, 0) => Direction::Right,
x => unreachable!("cannot have this {:?} between {:?} and {:?}", x, a, b),
}
}).collect();
return Some(dirs);
}
match dist.entry(p) {
Entry::Vacant(vcnt) => {
vcnt.insert(steps_here);
},
Entry::Occupied(mut o) => {
if *o.get() >= steps_here {
*o.get_mut() = steps_here;
} else {
println!("already visited {:?} with lower dist {} than {} from {:?}", p, o.get(), steps_here, prev[&p]);
continue;
}
}
}
for (p2, dir) in adjacent(gd, &p) {
let alt = steps_here + 1;
if alt < *dist.get(&p2).unwrap_or(&usize::max_value()) {
//println!(" {:?} --{:?}--> {:?}", p, dir, p2);
dist.insert(p2, alt);
prev.insert(p2, p);
work.push(cmp::Reverse((alt, p2)));
}
}
}
None
}
#[test]
fn test_path_to() {
use Direction::*;
let mut gd: GameDisplay<Tile> = GameDisplay::default();
gd.insert(&(-1, 0), Tile::Wall);
gd.insert(&(-1,-1), Tile::Wall);
gd.insert(&( 0,-1), Tile::Wall);
gd.insert(&( 2, 0), Tile::Wall);
gd.insert(&( 2,-1), Tile::Wall);
gd.insert(&( 0, 0), Tile::Empty); // right
gd.insert(&( 1, 0), Tile::Empty); // down
gd.insert(&( 1, 1), Tile::Empty); // down
gd.insert(&( 1, 2), Tile::Empty); // down
gd.insert(&( 1, 3), Tile::Empty); // down
gd.insert(&( 1, 4), Tile::Empty); // down
gd.insert(&( 2, 4), Tile::Empty); // down
gd.insert(&( 3, 4), Tile::Empty); // down
gd.insert(&( 4, 4), Tile::Empty); // down
gd.insert(&( 4, 3), Tile::Empty); // down
gd.insert(&( 4, 2), Tile::Empty); // down
gd.insert(&( 4, 1), Tile::Empty); // down
gd.insert(&( 1,-1), Tile::Empty); // down
gd.insert(&( 1,-2), Tile::Empty); // down
gd.insert(&( 2,-2), Tile::Empty); // right
gd.insert(&( 3,-2), Tile::Empty); // right
gd.insert(&( 3,-1), Tile::Empty);
gd.insert(&( 3, 0), Tile::Empty);
gd.insert(&( 4, 0), Tile::Empty);
println!("{}", gd);
assert_eq!(vec![Right, Down, Down, Right, Right, Up, Up, Right], path_to(&gd, &( 0, 0), &( 4, 0)).unwrap());
}
fn adjacent<'a>(gd: &'a GameDisplay<Tile>, pos: &'a (Word, Word)) -> impl Iterator<Item = ((Word, Word), Direction)> + 'a {
Direction::all()
.into_iter()
.map(move |d| (pos.step_in_direction(&d), d))
.filter_map(move |(p2, d)| gd.get(&p2).map(|t| (p2, d, t)))
//.inspect(|x| println!(" c: {:?}", x))
.filter_map(|(p2, d, t)| match t {
&Tile::Empty | &Tile::Robot | &Tile::Oxygen => Some((p2, d)),
_ => None,
})
//.inspect(|x| println!(" d: {:?}", x))
}
struct Robot {
program: Program<'static>,
regs: Option<Registers>,
pos: (Word, Word),
moves: usize,
}
impl Robot {
fn new(data: Vec<Word>) -> Self {
let mem = intcode::Memory::from(data).with_memory_expansion();
let program = Program::from(mem);
Robot {
program,
regs: Some(Registers::default()),
pos: (0, 0),
moves: 0,
}
}
fn position(&self) -> &(Word, Word) {
&self.pos
}
fn try_move(&mut self, dir: Direction) -> ((Word, Word), Tile) {
loop {
let mut ret = None;
self.regs = Some(match self.program.eval_from_instruction(self.regs.take().unwrap()).unwrap() {
ExecutionState::HaltedAt(regs) => unreachable!("Halted at: {:?}", regs),
ExecutionState::Paused(regs) => unreachable!("Paused? {:?}", regs),
ExecutionState::InputIO(io) => {
let val: i64 = dir.into();
//println!("robot <-- {}", val);
self.program.handle_input_completion(io, val).unwrap()
},
ExecutionState::OutputIO(io, value) => {
//println!("robot --> {}", value);
let moved = value != 0;
let found = value == 2;
let tile = if found { Tile::Oxygen } else if moved { Tile::Empty } else { Tile::Wall };
let prev = self.pos;
if moved {
self.pos = self.pos.step_in_direction(&dir);
self.moves += 1;
}
// println!("robot movement from {:?} to {:?} ended up {:?}", prev, dir, self.pos);
ret = Some((self.pos, tile));
self.program.handle_output_completion(io)
},
});
if let Some((pos, tile)) = ret {
return (pos, tile);
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Direction {
Up,
Right,
Down,
Left
}
impl Into<Word> for Direction {
fn into(self) -> Word {
match self {
Direction::Up => 1,
Direction::Right => 3,
Direction::Down => 2,
Direction::Left => 4,
}
}
}
impl Direction {
fn all() -> impl Iterator<Item = Direction> {
use Direction::*;
[Up, Right, Down, Left].into_iter().copied()
}
fn reverse(&self) -> Direction {
use Direction::*;
match *self {
Up => Down,
Right => Left,
Down => Up,
Left => Right,
}
}
}
trait Coordinates {
fn step_in_direction(&self, dir: &Direction) -> Self;
}
impl Coordinates for (Word, Word) {
fn step_in_direction(&self, dir: &Direction) -> Self {
match *dir {
Direction::Up => (self.0, self.1 + 1),
Direction::Right => (self.0 + 1, self.1),
Direction::Down => (self.0, self.1 - 1),
Direction::Left => (self.0 - 1, self.1),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Tile {
Wall,
Empty,
Oxygen,
Robot,
Unexplored
}
impl Default for Tile {
fn default() -> Self {
Tile::Unexplored
}
}
impl fmt::Display for Tile {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let ch = match *self {
Tile::Wall => '#',
Tile::Empty => ' ',
Tile::Oxygen => 'o',
Tile::Robot => 'R',
Tile::Unexplored => '?'
};
write!(fmt, "{}", ch)
}
}
| path_to | identifier_name |
main.rs | use std::fmt;
use std::collections::{VecDeque, HashSet};
use intcode::{Word, util::{parse_stdin_program, GameDisplay}, Program, Registers, ExecutionState};
fn main() {
let mut robot = Robot::new(parse_stdin_program());
let mut gd: GameDisplay<Tile> = GameDisplay::default();
gd.insert(robot.position(), Tile::Empty);
let mut oxygen_at = None;
{
let mut work = VecDeque::new();
work.push_back(*robot.position());
while let Some(root) = work.pop_front() {
// perhaps robot.travel(&root, &gd)?
while root != *robot.position() {
match path_to(&gd, robot.position(), &root) {
Some(directions) => {
for d in directions {
let prev = *robot.position();
let (moved_to, _) = robot.try_move(d);
assert_ne!(prev, moved_to);
}
},
None => panic!("Cannot get from {:?} to {:?}", robot.position(), root),
}
}
let unexplored = Direction::all()
.map(|d| (d, root.step_in_direction(&d)))
.filter_map(|(d, p)| match gd.get(&p) {
Some(Tile::Unexplored) | None => Some((d, p)),
Some(_) => None,
})
.collect::<Vec<_>>();
for (d, target) in unexplored {
let (ended_up, tile) = robot.try_move(d);
if tile == Tile::Oxygen {
assert!(oxygen_at.is_none());
oxygen_at = Some(ended_up);
}
if target == ended_up {
gd.insert(&target, tile);
// push to the same side as we are popping will decrease the amount of running
// around on the map so maybe depth first?
work.push_front(target);
let (back_at, _) = robot.try_move(d.reverse());
assert_eq!(back_at, root);
} else {
gd.insert(&target, tile);
}
}
}
}
println!("oxygen at: {:?}", oxygen_at);
println!("robot moves: {}", robot.moves);
println!("stage1: {}", path_to(&gd, &( 0, 0), oxygen_at.as_ref().unwrap()).unwrap().len());
{
// stage2 is probably just a dfs from the oxygen, mark the coordinates and ... push all new
// marked ones to the queue?
let mut frontier = VecDeque::new();
let mut oxygen = HashSet::new();
oxygen.insert(oxygen_at.unwrap());
frontier.push_back((oxygen_at.unwrap(), 0));
let mut prev_time = 0;
let mut minutes = 0;
while let Some((p1, time)) = frontier.pop_front() {
oxygen.insert(p1);
if prev_time != time {
assert!(prev_time < time, "{} should be less than {}", prev_time, time);
prev_time = time;
minutes += 1;
println!("{:>3} minutes ... {} slots oxygenated", minutes, oxygen.len());
}
let unoxinated = Direction::all()
.map(|d| p1.step_in_direction(&d))
.filter_map(|p| match gd.get(&p) {
Some(Tile::Empty) => Some(p),
Some(_) | None => None,
})
.filter(|p| !oxygen.contains(&p))
.collect::<Vec<_>>();
for p2 in unoxinated {
frontier.push_back((p2, time + 1));
}
}
println!("stage2: {}", minutes);
}
}
/// Wasteful dijkstra ... could share the hashmaps across queries maybe?
fn path_to(gd: &GameDisplay<Tile>, pos: &(Word, Word), target: &(Word, Word)) -> Option<Vec<Direction>> {
//println!("path_to: {:?} to {:?}", pos, target);
use std::collections::{HashMap, BinaryHeap};
use std::collections::hash_map::Entry;
use std::cmp;
let mut ret = Vec::new();
let mut work = BinaryHeap::new();
let mut dist = HashMap::new();
let mut prev = HashMap::new();
work.push(cmp::Reverse((0, *pos)));
while let Some(cmp::Reverse((steps_here, p))) = work.pop() {
//println!("path_to: popped {:?}", (p, steps_here));
if p == *target {
//println!("path_to: found target {:?}", p);
let mut backwards = p;
ret.push(p);
while backwards != *pos {
let previous = prev.remove(&backwards).unwrap();
ret.push(previous);
backwards = previous;
}
ret.reverse();
let dirs = ret.windows(2)
.map(|slice| {
let a = slice[0];
let b = slice[1];
let d = (b.0 - a.0, b.1 - a.1);
match d {
( 0,-1) => Direction::Down,
( 0, 1) => Direction::Up,
(-1, 0) => Direction::Left,
( 1, 0) => Direction::Right,
x => unreachable!("cannot have this {:?} between {:?} and {:?}", x, a, b),
}
}).collect();
return Some(dirs);
}
match dist.entry(p) {
Entry::Vacant(vcnt) => {
vcnt.insert(steps_here);
},
Entry::Occupied(mut o) => {
if *o.get() >= steps_here {
*o.get_mut() = steps_here;
} else {
println!("already visited {:?} with lower dist {} than {} from {:?}", p, o.get(), steps_here, prev[&p]);
continue;
} |
if alt < *dist.get(&p2).unwrap_or(&usize::max_value()) {
//println!(" {:?} --{:?}--> {:?}", p, dir, p2);
dist.insert(p2, alt);
prev.insert(p2, p);
work.push(cmp::Reverse((alt, p2)));
}
}
}
None
}
#[test]
fn test_path_to() {
use Direction::*;
let mut gd: GameDisplay<Tile> = GameDisplay::default();
gd.insert(&(-1, 0), Tile::Wall);
gd.insert(&(-1,-1), Tile::Wall);
gd.insert(&( 0,-1), Tile::Wall);
gd.insert(&( 2, 0), Tile::Wall);
gd.insert(&( 2,-1), Tile::Wall);
gd.insert(&( 0, 0), Tile::Empty); // right
gd.insert(&( 1, 0), Tile::Empty); // down
gd.insert(&( 1, 1), Tile::Empty); // down
gd.insert(&( 1, 2), Tile::Empty); // down
gd.insert(&( 1, 3), Tile::Empty); // down
gd.insert(&( 1, 4), Tile::Empty); // down
gd.insert(&( 2, 4), Tile::Empty); // down
gd.insert(&( 3, 4), Tile::Empty); // down
gd.insert(&( 4, 4), Tile::Empty); // down
gd.insert(&( 4, 3), Tile::Empty); // down
gd.insert(&( 4, 2), Tile::Empty); // down
gd.insert(&( 4, 1), Tile::Empty); // down
gd.insert(&( 1,-1), Tile::Empty); // down
gd.insert(&( 1,-2), Tile::Empty); // down
gd.insert(&( 2,-2), Tile::Empty); // right
gd.insert(&( 3,-2), Tile::Empty); // right
gd.insert(&( 3,-1), Tile::Empty);
gd.insert(&( 3, 0), Tile::Empty);
gd.insert(&( 4, 0), Tile::Empty);
println!("{}", gd);
assert_eq!(vec![Right, Down, Down, Right, Right, Up, Up, Right], path_to(&gd, &( 0, 0), &( 4, 0)).unwrap());
}
fn adjacent<'a>(gd: &'a GameDisplay<Tile>, pos: &'a (Word, Word)) -> impl Iterator<Item = ((Word, Word), Direction)> + 'a {
Direction::all()
.into_iter()
.map(move |d| (pos.step_in_direction(&d), d))
.filter_map(move |(p2, d)| gd.get(&p2).map(|t| (p2, d, t)))
//.inspect(|x| println!(" c: {:?}", x))
.filter_map(|(p2, d, t)| match t {
&Tile::Empty | &Tile::Robot | &Tile::Oxygen => Some((p2, d)),
_ => None,
})
//.inspect(|x| println!(" d: {:?}", x))
}
struct Robot {
program: Program<'static>,
regs: Option<Registers>,
pos: (Word, Word),
moves: usize,
}
impl Robot {
fn new(data: Vec<Word>) -> Self {
let mem = intcode::Memory::from(data).with_memory_expansion();
let program = Program::from(mem);
Robot {
program,
regs: Some(Registers::default()),
pos: (0, 0),
moves: 0,
}
}
fn position(&self) -> &(Word, Word) {
&self.pos
}
fn try_move(&mut self, dir: Direction) -> ((Word, Word), Tile) {
loop {
let mut ret = None;
self.regs = Some(match self.program.eval_from_instruction(self.regs.take().unwrap()).unwrap() {
ExecutionState::HaltedAt(regs) => unreachable!("Halted at: {:?}", regs),
ExecutionState::Paused(regs) => unreachable!("Paused? {:?}", regs),
ExecutionState::InputIO(io) => {
let val: i64 = dir.into();
//println!("robot <-- {}", val);
self.program.handle_input_completion(io, val).unwrap()
},
ExecutionState::OutputIO(io, value) => {
//println!("robot --> {}", value);
let moved = value != 0;
let found = value == 2;
let tile = if found { Tile::Oxygen } else if moved { Tile::Empty } else { Tile::Wall };
let prev = self.pos;
if moved {
self.pos = self.pos.step_in_direction(&dir);
self.moves += 1;
}
// println!("robot movement from {:?} to {:?} ended up {:?}", prev, dir, self.pos);
ret = Some((self.pos, tile));
self.program.handle_output_completion(io)
},
});
if let Some((pos, tile)) = ret {
return (pos, tile);
}
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Direction {
Up,
Right,
Down,
Left
}
impl Into<Word> for Direction {
fn into(self) -> Word {
match self {
Direction::Up => 1,
Direction::Right => 3,
Direction::Down => 2,
Direction::Left => 4,
}
}
}
impl Direction {
fn all() -> impl Iterator<Item = Direction> {
use Direction::*;
[Up, Right, Down, Left].into_iter().copied()
}
fn reverse(&self) -> Direction {
use Direction::*;
match *self {
Up => Down,
Right => Left,
Down => Up,
Left => Right,
}
}
}
trait Coordinates {
fn step_in_direction(&self, dir: &Direction) -> Self;
}
impl Coordinates for (Word, Word) {
fn step_in_direction(&self, dir: &Direction) -> Self {
match *dir {
Direction::Up => (self.0, self.1 + 1),
Direction::Right => (self.0 + 1, self.1),
Direction::Down => (self.0, self.1 - 1),
Direction::Left => (self.0 - 1, self.1),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum Tile {
Wall,
Empty,
Oxygen,
Robot,
Unexplored
}
impl Default for Tile {
fn default() -> Self {
Tile::Unexplored
}
}
impl fmt::Display for Tile {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let ch = match *self {
Tile::Wall => '#',
Tile::Empty => ' ',
Tile::Oxygen => 'o',
Tile::Robot => 'R',
Tile::Unexplored => '?'
};
write!(fmt, "{}", ch)
}
} | }
}
for (p2, dir) in adjacent(gd, &p) {
let alt = steps_here + 1; | random_line_split |
booting.py | import logging
import time
import traceback
import warnings
import boardfarm.lib.booting
import boardfarm.lib.voice
from boardfarm.devices.debian_lan import DebianLAN
from boardfarm.exceptions import (
BootFail,
CodeError,
DeviceDoesNotExistError,
NoTFTPServer,
)
from boardfarm.lib.booting_utils import check_and_connect_to_wifi
from boardfarm.library import check_devices
from packaging.version import Version
from termcolor import colored
from boardfarm_docsis.devices.base_devices.board import DocsisCPE
from boardfarm_docsis.lib.booting_utils import (
activate_mitm,
register_fxs_details,
set_static_ip_and_default_gw,
)
from boardfarm_docsis.lib.dns_helper import dns_acs_config
from boardfarm_docsis.use_cases.cmts_interactions import is_bpi_privacy_disabled
from boardfarm_docsis.use_cases.provision_helper import provision_board
logger = logging.getLogger("bft")
def pre_boot_wan_clients(config, env_helper, devices):
if env_helper.get_dns_dict():
# to get reachable and unreachable ips for ACS DNS
devices.wan.auth_dns = True
dns_acs_config(devices, env_helper.get_dns_dict())
tftp_device, tftp_servers = boardfarm.lib.booting.get_tftp(config)
if not tftp_servers:
logger.error(colored("No tftp server found", color="red", attrs=["bold"]))
# currently we must have at least 1 tftp server configured
raise NoTFTPServer
if len(tftp_servers) > 1:
msg = f"Found more than 1 tftp server: {tftp_servers}, using {tftp_device.name}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise CodeError(msg)
# should we run configure for all the wan devices? or just wan?
for x in devices:
# if isinstance(x, DebianWAN): # does not work for mitm
if hasattr(x, "name") and "wan" in x.name:
logger.info(f"Configuring {x.name}")
x.configure(config=config)
# if more than 1 tftp server should we start them all?
# currently starting the 1 being used
logger.info(f"Starting TFTP server on {tftp_device.name}")
tftp_device.start_tftp_server()
devices.board.tftp_device = tftp_device
def pre_boot_lan_clients(config, env_helper, devices):
for x in devices.lan_clients:
if isinstance(x, DebianLAN):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_wlan_clients(config, env_helper, devices):
for x in getattr(devices, "wlan_clients", []):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_board(config, env_helper, devices):
env_cwmp_v = env_helper.get_cwmp_version()
if env_cwmp_v:
assert Version(env_cwmp_v) == Version(
devices.board.cwmp_version()
), f"CWMP version mismatch, Expected version {env_cwmp_v}"
def pre_boot_env(config, env_helper, devices):
# this should take care of provisioner/tr069/voice/etc
# depending on what the env_helperd has configured
if env_helper.mitm_enabled() and not hasattr(devices, "mitm"):
raise DeviceDoesNotExistError("No mitm device (requested by environment)")
cm_boot_file = None
mta_boot_file = None
if env_helper.has_board_boot_file():
cm_boot_file = env_helper.get_board_boot_file()
if env_helper.has_board_boot_file_mta():
mta_boot_file = env_helper.get_board_boot_file_mta()
devices.board.env_config(cm_boot_file, mta_boot_file, devices.board.mibs_path)
if env_helper.voice_enabled():
dev_list = [
devices.sipcenter,
devices.softphone,
]
if env_helper.get_external_voip():
dev_list.append(devices.softphone2)
dev_list.append(getattr(devices, "FXS", [devices.lan, devices.lan2]))
boardfarm.lib.voice.voice_configure(
dev_list,
devices.sipcenter,
config,
)
prov = getattr(config, "provisioner", None)
if prov:
if env_helper.vendor_encap_opts(ip_proto="ipv4"):
devices.provisioner.vendor_opts_acsv4_url = True
if env_helper.vendor_encap_opts(ip_proto="ipv6"):
devices.provisioner.vendor_opts_acsv6_url = True
devices.provisioner.valid_route_gateway = env_helper.is_route_gateway_valid()
logger.info("Provisioning board")
provision_board()
else:
# should this be an error?
logger.error(
colored(
"No provisioner found! Board provisioned skipped",
color="yellow",
attrs=["bold"],
)
)
if hasattr(devices.board, "pre_boot_env"):
devices.board.pre_boot_env(env_helper)
pre_boot_actions = {
"wan_clients_pre_boot": pre_boot_wan_clients,
"lan_clients_pre_boot": pre_boot_lan_clients,
"wlan_clients_pre_boot": pre_boot_wlan_clients,
"board_pre_boot": pre_boot_board,
"environment_pre_boot": pre_boot_env,
}
def boot_board(config, env_helper, devices):
try:
devices.board.reset()
if env_helper.get_software():
devices.board.flash(env_helper)
# store the timestamp, for uptime check later (in case the board
# crashes on boot)
devices.board.__reset__timestamp = time.time()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
except Exception as e:
logger.critical(colored("\n\nFailed to Boot", color="red", attrs=["bold"]))
logger.error(e)
raise BootFail
boot_actions = {"board_boot": boot_board}
def _wait_for_cm_online(config, env_helper, devices):
for _ in range(180):
if (
devices.cmts.is_cm_online(
ignore_bpi=is_bpi_privacy_disabled(), ignore_partial=True
)
is False
):
# show the arm prompt as it is a log in itself
devices.board.touch()
time.sleep(15)
continue
if devices.board.finalize_boot():
break
logger.info("######Rebooting######")
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
else:
msg = "\n\nFailed to Boot: board not online on CMTS"
logger.critical(msg)
raise BootFail(msg)
def post_boot_board(config, env_helper, devices):
try:
_wait_for_cm_online(config, env_helper, devices)
except BootFail:
logger.warning(
colored(
"Board not online on CMTS, resetting and retrying",
color="yellow",
attrs=["bold"],
)
)
devices.board.reset()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
_wait_for_cm_online(config, env_helper, devices)
devices.board.post_boot_init()
board_uptime = devices.board.get_seconds_uptime()
logger.info(f"Time up: {board_uptime}")
if hasattr(devices.board, "__reset__timestamp"):
time_elapsed = time.time() - devices.board.__reset__timestamp
logger.info(f"Time since reboot: {time_elapsed}")
if time_elapsed < board_uptime:
# TODO: the following should be an exception and not
# just a print!!!!
logger.warning("Error: possibly the board did not reset!")
if (time_elapsed - board_uptime) > 60:
logger.warning(
colored(
"Board may have rebooted multiple times after flashing process",
color="yellow",
attrs=["bold"],
)
)
if isinstance(devices.board, DocsisCPE):
pass # maybe new method to be added
else:
# the old way for legacy
devices.board.check_valid_docsis_ip_networking(strict=False)
def post_boot_wan_clients(config, env_helper, devices):
pass
def post_boot_lan_clients(config, env_helper, devices):
for i, v in enumerate(devices.board.dev.lan_clients):
if getattr(env_helper, "has_lan_advertise_identity", None):
for option in ["125", "17"]:
if env_helper.has_lan_advertise_identity(i):
v.configure_dhclient(([option, True],))
else:
v.configure_dhclient(([option, False],))
if devices.board.routing and config.setup_device_networking:
for idx, x in enumerate(devices.board.dev.lan_clients):
if isinstance(x, DebianLAN): # should this use devices.lan_clients?
logger.info(f"Starting LAN client on {x.name}")
for n in range(3):
try:
x.configure_docker_iface()
if env_helper.get_prov_mode() == "ipv6":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if (
devices.board.cm_cfg.dslite
and env_helper.is_dhcpv4_enabled_on_lan()
):
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.get_prov_mode() == "dual":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
x.configure_proxy_pkgs()
if env_helper.get_prov_mode() in [
"ipv4",
"dual",
] and env_helper.is_set_static_ipv4(idx):
# set static ip address
set_static_ip_and_default_gw(
client=x,
)
break
except Exception as e:
logger.warning(e)
logger.error(
colored(
f"Failed to start lan client on '{x.name}' device, attempt #{n}",
color="red",
attrs=["bold"],
)
)
time.sleep(10)
else:
msg = f"Failed to start lan client on {x.name}"
logger.warning(colored(msg, color="yellow", attrs=["bold"]))
# do not fail the boot with raise BootFail(msg)
# reason: the board config may be such that the
# clients are not getting an ip (see LLCs)
def post_boot_wlan_clients(config, env_helper, devices):
wifi_clients = env_helper.wifi_clients()
if wifi_clients:
# Register all wifi clients in wifi manager
for client in wifi_clients:
|
# Start to connect all clients after registartions done:
for client in wifi_clients:
check_and_connect_to_wifi(devices, client)
logger.info(colored("\nWlan clients:", color="green"))
devices.wlan_clients.registered_clients_summary()
def post_boot_env(config, env_helper, devices):
if env_helper.mitm_enabled():
activate_mitm(devices, env_helper.get_mitm_devices())
eMTA_iface_status = env_helper.get_emta_interface_status()
if eMTA_iface_status:
devices.board.set_eMTA_interface(devices.board.mta_iface, eMTA_iface_status)
if env_helper.voice_enabled() and eMTA_iface_status != "down":
devices.board.wait_for_mta_provisioning()
register_fxs_details(getattr(devices, "FXS", []), devices.board)
cfg = env_helper.get_prov_mode()
tr069check = cfg not in ["disabled", "bridge", "none"]
tr069provision = env_helper.get_tr069_provisioning()
if tr069check:
for _ in range(20):
try:
devices.board.get_cpeid()
break
except Exception as e:
logger.error(e)
warnings.warn("Failed to connect to ACS, retrying")
time.sleep(10)
else:
raise BootFail("Failed to connect to ACS")
if tr069provision:
reset_val = any(
x in env_helper.get_software()
for x in [
"factory_reset",
"pre_flash_factory_reset",
]
)
if reset_val:
for i in tr069provision:
for acs_api in i:
API_func = getattr(devices.acs_server, acs_api)
for param in i[acs_api]:
API_func(param)
else:
raise BootFail(
"Factory reset has to performed for tr069 provisioning. Env json with factory reset true should be used."
)
if hasattr(devices.board, "post_boot_env"):
devices.board.post_boot_env()
post_boot_actions = {
"board_post_boot": post_boot_board,
"wan_clients_post_boot": post_boot_wan_clients,
"lan_clients_post_boot": post_boot_lan_clients,
"environment_post_boot": post_boot_env,
"wlan_clients_connection": post_boot_wlan_clients,
}
def run_actions(actions_dict, actions_name, *args, **kwargs):
logger.info(colored(f"{actions_name} ACTIONS", color="green", attrs=["bold"]))
for key, func in actions_dict.items():
try:
logger.info(colored(f"Action {key} start", color="green", attrs=["bold"]))
start_time = time.time()
func(*args, **kwargs)
logger.info(
colored(
f"\nAction {key} completed. Took {int(time.time() - start_time)} seconds to complete.",
color="green",
attrs=["bold"],
)
)
except Exception as e:
msg = f"\nFailed at: {actions_name}: {key} after {int(time.time() - start_time)} seconds with exception {e}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise e
logger.info(colored(f"{actions_name} COMPLETED", color="green", attrs=["bold"]))
def boot(config, env_helper, devices, logged=None, actions_list=None):
start_time = time.time()
if not actions_list:
actions_list = ["pre", "boot", "post"]
try:
if "pre" in actions_list:
run_actions(pre_boot_actions, "PRE-BOOT", config, env_helper, devices)
if "boot" in actions_list:
run_actions(boot_actions, "BOOT", config, env_helper, devices)
if "post" in actions_list:
run_actions(post_boot_actions, "POST-BOOT", config, env_helper, devices)
logger.info(
colored(
f"Boot completed in {int(time.time() - start_time)} seconds.",
color="green",
attrs=["bold"],
)
)
except Exception:
traceback.print_exc()
check_devices(devices)
logger.info(
colored(
f"Boot failed after {int(time.time() - start_time)} seconds.",
color="red",
attrs=["bold"],
)
)
raise
| devices.wlan_clients.register(client) | conditional_block |
booting.py | import logging
import time
import traceback
import warnings
import boardfarm.lib.booting
import boardfarm.lib.voice
from boardfarm.devices.debian_lan import DebianLAN
from boardfarm.exceptions import (
BootFail,
CodeError,
DeviceDoesNotExistError,
NoTFTPServer,
)
from boardfarm.lib.booting_utils import check_and_connect_to_wifi
from boardfarm.library import check_devices
from packaging.version import Version
from termcolor import colored
from boardfarm_docsis.devices.base_devices.board import DocsisCPE
from boardfarm_docsis.lib.booting_utils import (
activate_mitm,
register_fxs_details,
set_static_ip_and_default_gw,
)
from boardfarm_docsis.lib.dns_helper import dns_acs_config
from boardfarm_docsis.use_cases.cmts_interactions import is_bpi_privacy_disabled
from boardfarm_docsis.use_cases.provision_helper import provision_board
logger = logging.getLogger("bft")
def pre_boot_wan_clients(config, env_helper, devices):
if env_helper.get_dns_dict():
# to get reachable and unreachable ips for ACS DNS
devices.wan.auth_dns = True
dns_acs_config(devices, env_helper.get_dns_dict())
tftp_device, tftp_servers = boardfarm.lib.booting.get_tftp(config)
if not tftp_servers:
logger.error(colored("No tftp server found", color="red", attrs=["bold"]))
# currently we must have at least 1 tftp server configured
raise NoTFTPServer
if len(tftp_servers) > 1:
msg = f"Found more than 1 tftp server: {tftp_servers}, using {tftp_device.name}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise CodeError(msg)
# should we run configure for all the wan devices? or just wan?
for x in devices:
# if isinstance(x, DebianWAN): # does not work for mitm
if hasattr(x, "name") and "wan" in x.name:
logger.info(f"Configuring {x.name}")
x.configure(config=config)
# if more than 1 tftp server should we start them all?
# currently starting the 1 being used
logger.info(f"Starting TFTP server on {tftp_device.name}")
tftp_device.start_tftp_server()
devices.board.tftp_device = tftp_device
def pre_boot_lan_clients(config, env_helper, devices):
for x in devices.lan_clients:
if isinstance(x, DebianLAN):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_wlan_clients(config, env_helper, devices):
for x in getattr(devices, "wlan_clients", []):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_board(config, env_helper, devices):
env_cwmp_v = env_helper.get_cwmp_version()
if env_cwmp_v:
assert Version(env_cwmp_v) == Version(
devices.board.cwmp_version()
), f"CWMP version mismatch, Expected version {env_cwmp_v}"
def pre_boot_env(config, env_helper, devices):
# this should take care of provisioner/tr069/voice/etc
# depending on what the env_helperd has configured
if env_helper.mitm_enabled() and not hasattr(devices, "mitm"):
raise DeviceDoesNotExistError("No mitm device (requested by environment)")
cm_boot_file = None
mta_boot_file = None
if env_helper.has_board_boot_file():
cm_boot_file = env_helper.get_board_boot_file()
if env_helper.has_board_boot_file_mta():
mta_boot_file = env_helper.get_board_boot_file_mta()
devices.board.env_config(cm_boot_file, mta_boot_file, devices.board.mibs_path)
if env_helper.voice_enabled():
dev_list = [
devices.sipcenter,
devices.softphone,
]
if env_helper.get_external_voip():
dev_list.append(devices.softphone2)
dev_list.append(getattr(devices, "FXS", [devices.lan, devices.lan2]))
boardfarm.lib.voice.voice_configure(
dev_list,
devices.sipcenter,
config,
)
prov = getattr(config, "provisioner", None)
if prov:
if env_helper.vendor_encap_opts(ip_proto="ipv4"):
devices.provisioner.vendor_opts_acsv4_url = True
if env_helper.vendor_encap_opts(ip_proto="ipv6"):
devices.provisioner.vendor_opts_acsv6_url = True
devices.provisioner.valid_route_gateway = env_helper.is_route_gateway_valid()
logger.info("Provisioning board")
provision_board()
else:
# should this be an error?
logger.error(
colored(
"No provisioner found! Board provisioned skipped",
color="yellow",
attrs=["bold"],
)
)
if hasattr(devices.board, "pre_boot_env"):
devices.board.pre_boot_env(env_helper)
pre_boot_actions = {
"wan_clients_pre_boot": pre_boot_wan_clients,
"lan_clients_pre_boot": pre_boot_lan_clients,
"wlan_clients_pre_boot": pre_boot_wlan_clients,
"board_pre_boot": pre_boot_board,
"environment_pre_boot": pre_boot_env,
}
def boot_board(config, env_helper, devices):
try:
devices.board.reset()
if env_helper.get_software():
devices.board.flash(env_helper)
# store the timestamp, for uptime check later (in case the board
# crashes on boot)
devices.board.__reset__timestamp = time.time()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
except Exception as e:
logger.critical(colored("\n\nFailed to Boot", color="red", attrs=["bold"]))
logger.error(e)
raise BootFail
boot_actions = {"board_boot": boot_board}
def _wait_for_cm_online(config, env_helper, devices):
for _ in range(180):
if (
devices.cmts.is_cm_online(
ignore_bpi=is_bpi_privacy_disabled(), ignore_partial=True
)
is False
):
# show the arm prompt as it is a log in itself
devices.board.touch()
time.sleep(15)
continue
if devices.board.finalize_boot():
break
logger.info("######Rebooting######")
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
else:
msg = "\n\nFailed to Boot: board not online on CMTS"
logger.critical(msg)
raise BootFail(msg)
def post_boot_board(config, env_helper, devices):
try:
_wait_for_cm_online(config, env_helper, devices)
except BootFail:
logger.warning(
colored(
"Board not online on CMTS, resetting and retrying",
color="yellow",
attrs=["bold"],
)
)
devices.board.reset()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
_wait_for_cm_online(config, env_helper, devices)
devices.board.post_boot_init()
board_uptime = devices.board.get_seconds_uptime()
logger.info(f"Time up: {board_uptime}")
if hasattr(devices.board, "__reset__timestamp"):
time_elapsed = time.time() - devices.board.__reset__timestamp
logger.info(f"Time since reboot: {time_elapsed}")
if time_elapsed < board_uptime:
# TODO: the following should be an exception and not
# just a print!!!!
logger.warning("Error: possibly the board did not reset!")
if (time_elapsed - board_uptime) > 60:
logger.warning(
colored(
"Board may have rebooted multiple times after flashing process",
color="yellow",
attrs=["bold"],
)
)
if isinstance(devices.board, DocsisCPE):
pass # maybe new method to be added
else:
# the old way for legacy
devices.board.check_valid_docsis_ip_networking(strict=False)
def post_boot_wan_clients(config, env_helper, devices):
pass
def post_boot_lan_clients(config, env_helper, devices):
for i, v in enumerate(devices.board.dev.lan_clients):
if getattr(env_helper, "has_lan_advertise_identity", None):
for option in ["125", "17"]:
if env_helper.has_lan_advertise_identity(i):
v.configure_dhclient(([option, True],))
else:
v.configure_dhclient(([option, False],))
if devices.board.routing and config.setup_device_networking:
for idx, x in enumerate(devices.board.dev.lan_clients):
if isinstance(x, DebianLAN): # should this use devices.lan_clients?
logger.info(f"Starting LAN client on {x.name}")
for n in range(3):
try:
x.configure_docker_iface()
if env_helper.get_prov_mode() == "ipv6":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if (
devices.board.cm_cfg.dslite
and env_helper.is_dhcpv4_enabled_on_lan()
):
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.get_prov_mode() == "dual":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
x.configure_proxy_pkgs()
if env_helper.get_prov_mode() in [
"ipv4",
"dual",
] and env_helper.is_set_static_ipv4(idx):
# set static ip address
set_static_ip_and_default_gw(
client=x,
)
break
except Exception as e:
logger.warning(e)
logger.error(
colored(
f"Failed to start lan client on '{x.name}' device, attempt #{n}",
color="red",
attrs=["bold"],
)
)
time.sleep(10)
else:
msg = f"Failed to start lan client on {x.name}"
logger.warning(colored(msg, color="yellow", attrs=["bold"]))
# do not fail the boot with raise BootFail(msg)
# reason: the board config may be such that the
# clients are not getting an ip (see LLCs)
def post_boot_wlan_clients(config, env_helper, devices):
wifi_clients = env_helper.wifi_clients()
if wifi_clients:
# Register all wifi clients in wifi manager
for client in wifi_clients:
devices.wlan_clients.register(client)
# Start to connect all clients after registartions done:
for client in wifi_clients:
check_and_connect_to_wifi(devices, client)
logger.info(colored("\nWlan clients:", color="green"))
devices.wlan_clients.registered_clients_summary()
def post_boot_env(config, env_helper, devices):
if env_helper.mitm_enabled():
activate_mitm(devices, env_helper.get_mitm_devices())
eMTA_iface_status = env_helper.get_emta_interface_status()
if eMTA_iface_status:
devices.board.set_eMTA_interface(devices.board.mta_iface, eMTA_iface_status)
if env_helper.voice_enabled() and eMTA_iface_status != "down":
devices.board.wait_for_mta_provisioning()
register_fxs_details(getattr(devices, "FXS", []), devices.board)
cfg = env_helper.get_prov_mode()
tr069check = cfg not in ["disabled", "bridge", "none"]
tr069provision = env_helper.get_tr069_provisioning()
if tr069check:
for _ in range(20):
try:
devices.board.get_cpeid()
break
except Exception as e:
logger.error(e)
warnings.warn("Failed to connect to ACS, retrying")
time.sleep(10)
else:
raise BootFail("Failed to connect to ACS")
if tr069provision:
reset_val = any(
x in env_helper.get_software()
for x in [
"factory_reset",
"pre_flash_factory_reset",
]
)
if reset_val:
for i in tr069provision:
for acs_api in i:
API_func = getattr(devices.acs_server, acs_api)
for param in i[acs_api]:
API_func(param)
else:
raise BootFail(
"Factory reset has to performed for tr069 provisioning. Env json with factory reset true should be used."
)
if hasattr(devices.board, "post_boot_env"):
devices.board.post_boot_env()
post_boot_actions = {
"board_post_boot": post_boot_board,
"wan_clients_post_boot": post_boot_wan_clients,
"lan_clients_post_boot": post_boot_lan_clients,
"environment_post_boot": post_boot_env,
"wlan_clients_connection": post_boot_wlan_clients,
}
def | (actions_dict, actions_name, *args, **kwargs):
logger.info(colored(f"{actions_name} ACTIONS", color="green", attrs=["bold"]))
for key, func in actions_dict.items():
try:
logger.info(colored(f"Action {key} start", color="green", attrs=["bold"]))
start_time = time.time()
func(*args, **kwargs)
logger.info(
colored(
f"\nAction {key} completed. Took {int(time.time() - start_time)} seconds to complete.",
color="green",
attrs=["bold"],
)
)
except Exception as e:
msg = f"\nFailed at: {actions_name}: {key} after {int(time.time() - start_time)} seconds with exception {e}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise e
logger.info(colored(f"{actions_name} COMPLETED", color="green", attrs=["bold"]))
def boot(config, env_helper, devices, logged=None, actions_list=None):
start_time = time.time()
if not actions_list:
actions_list = ["pre", "boot", "post"]
try:
if "pre" in actions_list:
run_actions(pre_boot_actions, "PRE-BOOT", config, env_helper, devices)
if "boot" in actions_list:
run_actions(boot_actions, "BOOT", config, env_helper, devices)
if "post" in actions_list:
run_actions(post_boot_actions, "POST-BOOT", config, env_helper, devices)
logger.info(
colored(
f"Boot completed in {int(time.time() - start_time)} seconds.",
color="green",
attrs=["bold"],
)
)
except Exception:
traceback.print_exc()
check_devices(devices)
logger.info(
colored(
f"Boot failed after {int(time.time() - start_time)} seconds.",
color="red",
attrs=["bold"],
)
)
raise
| run_actions | identifier_name |
booting.py | import logging | import boardfarm.lib.booting
import boardfarm.lib.voice
from boardfarm.devices.debian_lan import DebianLAN
from boardfarm.exceptions import (
BootFail,
CodeError,
DeviceDoesNotExistError,
NoTFTPServer,
)
from boardfarm.lib.booting_utils import check_and_connect_to_wifi
from boardfarm.library import check_devices
from packaging.version import Version
from termcolor import colored
from boardfarm_docsis.devices.base_devices.board import DocsisCPE
from boardfarm_docsis.lib.booting_utils import (
activate_mitm,
register_fxs_details,
set_static_ip_and_default_gw,
)
from boardfarm_docsis.lib.dns_helper import dns_acs_config
from boardfarm_docsis.use_cases.cmts_interactions import is_bpi_privacy_disabled
from boardfarm_docsis.use_cases.provision_helper import provision_board
logger = logging.getLogger("bft")
def pre_boot_wan_clients(config, env_helper, devices):
if env_helper.get_dns_dict():
# to get reachable and unreachable ips for ACS DNS
devices.wan.auth_dns = True
dns_acs_config(devices, env_helper.get_dns_dict())
tftp_device, tftp_servers = boardfarm.lib.booting.get_tftp(config)
if not tftp_servers:
logger.error(colored("No tftp server found", color="red", attrs=["bold"]))
# currently we must have at least 1 tftp server configured
raise NoTFTPServer
if len(tftp_servers) > 1:
msg = f"Found more than 1 tftp server: {tftp_servers}, using {tftp_device.name}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise CodeError(msg)
# should we run configure for all the wan devices? or just wan?
for x in devices:
# if isinstance(x, DebianWAN): # does not work for mitm
if hasattr(x, "name") and "wan" in x.name:
logger.info(f"Configuring {x.name}")
x.configure(config=config)
# if more than 1 tftp server should we start them all?
# currently starting the 1 being used
logger.info(f"Starting TFTP server on {tftp_device.name}")
tftp_device.start_tftp_server()
devices.board.tftp_device = tftp_device
def pre_boot_lan_clients(config, env_helper, devices):
for x in devices.lan_clients:
if isinstance(x, DebianLAN):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_wlan_clients(config, env_helper, devices):
for x in getattr(devices, "wlan_clients", []):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_board(config, env_helper, devices):
env_cwmp_v = env_helper.get_cwmp_version()
if env_cwmp_v:
assert Version(env_cwmp_v) == Version(
devices.board.cwmp_version()
), f"CWMP version mismatch, Expected version {env_cwmp_v}"
def pre_boot_env(config, env_helper, devices):
# this should take care of provisioner/tr069/voice/etc
# depending on what the env_helperd has configured
if env_helper.mitm_enabled() and not hasattr(devices, "mitm"):
raise DeviceDoesNotExistError("No mitm device (requested by environment)")
cm_boot_file = None
mta_boot_file = None
if env_helper.has_board_boot_file():
cm_boot_file = env_helper.get_board_boot_file()
if env_helper.has_board_boot_file_mta():
mta_boot_file = env_helper.get_board_boot_file_mta()
devices.board.env_config(cm_boot_file, mta_boot_file, devices.board.mibs_path)
if env_helper.voice_enabled():
dev_list = [
devices.sipcenter,
devices.softphone,
]
if env_helper.get_external_voip():
dev_list.append(devices.softphone2)
dev_list.append(getattr(devices, "FXS", [devices.lan, devices.lan2]))
boardfarm.lib.voice.voice_configure(
dev_list,
devices.sipcenter,
config,
)
prov = getattr(config, "provisioner", None)
if prov:
if env_helper.vendor_encap_opts(ip_proto="ipv4"):
devices.provisioner.vendor_opts_acsv4_url = True
if env_helper.vendor_encap_opts(ip_proto="ipv6"):
devices.provisioner.vendor_opts_acsv6_url = True
devices.provisioner.valid_route_gateway = env_helper.is_route_gateway_valid()
logger.info("Provisioning board")
provision_board()
else:
# should this be an error?
logger.error(
colored(
"No provisioner found! Board provisioned skipped",
color="yellow",
attrs=["bold"],
)
)
if hasattr(devices.board, "pre_boot_env"):
devices.board.pre_boot_env(env_helper)
pre_boot_actions = {
"wan_clients_pre_boot": pre_boot_wan_clients,
"lan_clients_pre_boot": pre_boot_lan_clients,
"wlan_clients_pre_boot": pre_boot_wlan_clients,
"board_pre_boot": pre_boot_board,
"environment_pre_boot": pre_boot_env,
}
def boot_board(config, env_helper, devices):
try:
devices.board.reset()
if env_helper.get_software():
devices.board.flash(env_helper)
# store the timestamp, for uptime check later (in case the board
# crashes on boot)
devices.board.__reset__timestamp = time.time()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
except Exception as e:
logger.critical(colored("\n\nFailed to Boot", color="red", attrs=["bold"]))
logger.error(e)
raise BootFail
boot_actions = {"board_boot": boot_board}
def _wait_for_cm_online(config, env_helper, devices):
for _ in range(180):
if (
devices.cmts.is_cm_online(
ignore_bpi=is_bpi_privacy_disabled(), ignore_partial=True
)
is False
):
# show the arm prompt as it is a log in itself
devices.board.touch()
time.sleep(15)
continue
if devices.board.finalize_boot():
break
logger.info("######Rebooting######")
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
else:
msg = "\n\nFailed to Boot: board not online on CMTS"
logger.critical(msg)
raise BootFail(msg)
def post_boot_board(config, env_helper, devices):
try:
_wait_for_cm_online(config, env_helper, devices)
except BootFail:
logger.warning(
colored(
"Board not online on CMTS, resetting and retrying",
color="yellow",
attrs=["bold"],
)
)
devices.board.reset()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
_wait_for_cm_online(config, env_helper, devices)
devices.board.post_boot_init()
board_uptime = devices.board.get_seconds_uptime()
logger.info(f"Time up: {board_uptime}")
if hasattr(devices.board, "__reset__timestamp"):
time_elapsed = time.time() - devices.board.__reset__timestamp
logger.info(f"Time since reboot: {time_elapsed}")
if time_elapsed < board_uptime:
# TODO: the following should be an exception and not
# just a print!!!!
logger.warning("Error: possibly the board did not reset!")
if (time_elapsed - board_uptime) > 60:
logger.warning(
colored(
"Board may have rebooted multiple times after flashing process",
color="yellow",
attrs=["bold"],
)
)
if isinstance(devices.board, DocsisCPE):
pass # maybe new method to be added
else:
# the old way for legacy
devices.board.check_valid_docsis_ip_networking(strict=False)
def post_boot_wan_clients(config, env_helper, devices):
pass
def post_boot_lan_clients(config, env_helper, devices):
for i, v in enumerate(devices.board.dev.lan_clients):
if getattr(env_helper, "has_lan_advertise_identity", None):
for option in ["125", "17"]:
if env_helper.has_lan_advertise_identity(i):
v.configure_dhclient(([option, True],))
else:
v.configure_dhclient(([option, False],))
if devices.board.routing and config.setup_device_networking:
for idx, x in enumerate(devices.board.dev.lan_clients):
if isinstance(x, DebianLAN): # should this use devices.lan_clients?
logger.info(f"Starting LAN client on {x.name}")
for n in range(3):
try:
x.configure_docker_iface()
if env_helper.get_prov_mode() == "ipv6":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if (
devices.board.cm_cfg.dslite
and env_helper.is_dhcpv4_enabled_on_lan()
):
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.get_prov_mode() == "dual":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
x.configure_proxy_pkgs()
if env_helper.get_prov_mode() in [
"ipv4",
"dual",
] and env_helper.is_set_static_ipv4(idx):
# set static ip address
set_static_ip_and_default_gw(
client=x,
)
break
except Exception as e:
logger.warning(e)
logger.error(
colored(
f"Failed to start lan client on '{x.name}' device, attempt #{n}",
color="red",
attrs=["bold"],
)
)
time.sleep(10)
else:
msg = f"Failed to start lan client on {x.name}"
logger.warning(colored(msg, color="yellow", attrs=["bold"]))
# do not fail the boot with raise BootFail(msg)
# reason: the board config may be such that the
# clients are not getting an ip (see LLCs)
def post_boot_wlan_clients(config, env_helper, devices):
wifi_clients = env_helper.wifi_clients()
if wifi_clients:
# Register all wifi clients in wifi manager
for client in wifi_clients:
devices.wlan_clients.register(client)
# Start to connect all clients after registartions done:
for client in wifi_clients:
check_and_connect_to_wifi(devices, client)
logger.info(colored("\nWlan clients:", color="green"))
devices.wlan_clients.registered_clients_summary()
def post_boot_env(config, env_helper, devices):
if env_helper.mitm_enabled():
activate_mitm(devices, env_helper.get_mitm_devices())
eMTA_iface_status = env_helper.get_emta_interface_status()
if eMTA_iface_status:
devices.board.set_eMTA_interface(devices.board.mta_iface, eMTA_iface_status)
if env_helper.voice_enabled() and eMTA_iface_status != "down":
devices.board.wait_for_mta_provisioning()
register_fxs_details(getattr(devices, "FXS", []), devices.board)
cfg = env_helper.get_prov_mode()
tr069check = cfg not in ["disabled", "bridge", "none"]
tr069provision = env_helper.get_tr069_provisioning()
if tr069check:
for _ in range(20):
try:
devices.board.get_cpeid()
break
except Exception as e:
logger.error(e)
warnings.warn("Failed to connect to ACS, retrying")
time.sleep(10)
else:
raise BootFail("Failed to connect to ACS")
if tr069provision:
reset_val = any(
x in env_helper.get_software()
for x in [
"factory_reset",
"pre_flash_factory_reset",
]
)
if reset_val:
for i in tr069provision:
for acs_api in i:
API_func = getattr(devices.acs_server, acs_api)
for param in i[acs_api]:
API_func(param)
else:
raise BootFail(
"Factory reset has to performed for tr069 provisioning. Env json with factory reset true should be used."
)
if hasattr(devices.board, "post_boot_env"):
devices.board.post_boot_env()
post_boot_actions = {
"board_post_boot": post_boot_board,
"wan_clients_post_boot": post_boot_wan_clients,
"lan_clients_post_boot": post_boot_lan_clients,
"environment_post_boot": post_boot_env,
"wlan_clients_connection": post_boot_wlan_clients,
}
def run_actions(actions_dict, actions_name, *args, **kwargs):
logger.info(colored(f"{actions_name} ACTIONS", color="green", attrs=["bold"]))
for key, func in actions_dict.items():
try:
logger.info(colored(f"Action {key} start", color="green", attrs=["bold"]))
start_time = time.time()
func(*args, **kwargs)
logger.info(
colored(
f"\nAction {key} completed. Took {int(time.time() - start_time)} seconds to complete.",
color="green",
attrs=["bold"],
)
)
except Exception as e:
msg = f"\nFailed at: {actions_name}: {key} after {int(time.time() - start_time)} seconds with exception {e}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise e
logger.info(colored(f"{actions_name} COMPLETED", color="green", attrs=["bold"]))
def boot(config, env_helper, devices, logged=None, actions_list=None):
start_time = time.time()
if not actions_list:
actions_list = ["pre", "boot", "post"]
try:
if "pre" in actions_list:
run_actions(pre_boot_actions, "PRE-BOOT", config, env_helper, devices)
if "boot" in actions_list:
run_actions(boot_actions, "BOOT", config, env_helper, devices)
if "post" in actions_list:
run_actions(post_boot_actions, "POST-BOOT", config, env_helper, devices)
logger.info(
colored(
f"Boot completed in {int(time.time() - start_time)} seconds.",
color="green",
attrs=["bold"],
)
)
except Exception:
traceback.print_exc()
check_devices(devices)
logger.info(
colored(
f"Boot failed after {int(time.time() - start_time)} seconds.",
color="red",
attrs=["bold"],
)
)
raise | import time
import traceback
import warnings
| random_line_split |
booting.py | import logging
import time
import traceback
import warnings
import boardfarm.lib.booting
import boardfarm.lib.voice
from boardfarm.devices.debian_lan import DebianLAN
from boardfarm.exceptions import (
BootFail,
CodeError,
DeviceDoesNotExistError,
NoTFTPServer,
)
from boardfarm.lib.booting_utils import check_and_connect_to_wifi
from boardfarm.library import check_devices
from packaging.version import Version
from termcolor import colored
from boardfarm_docsis.devices.base_devices.board import DocsisCPE
from boardfarm_docsis.lib.booting_utils import (
activate_mitm,
register_fxs_details,
set_static_ip_and_default_gw,
)
from boardfarm_docsis.lib.dns_helper import dns_acs_config
from boardfarm_docsis.use_cases.cmts_interactions import is_bpi_privacy_disabled
from boardfarm_docsis.use_cases.provision_helper import provision_board
logger = logging.getLogger("bft")
def pre_boot_wan_clients(config, env_helper, devices):
|
def pre_boot_lan_clients(config, env_helper, devices):
for x in devices.lan_clients:
if isinstance(x, DebianLAN):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_wlan_clients(config, env_helper, devices):
for x in getattr(devices, "wlan_clients", []):
logger.info(f"Configuring {x.name}")
x.configure()
def pre_boot_board(config, env_helper, devices):
env_cwmp_v = env_helper.get_cwmp_version()
if env_cwmp_v:
assert Version(env_cwmp_v) == Version(
devices.board.cwmp_version()
), f"CWMP version mismatch, Expected version {env_cwmp_v}"
def pre_boot_env(config, env_helper, devices):
# this should take care of provisioner/tr069/voice/etc
# depending on what the env_helperd has configured
if env_helper.mitm_enabled() and not hasattr(devices, "mitm"):
raise DeviceDoesNotExistError("No mitm device (requested by environment)")
cm_boot_file = None
mta_boot_file = None
if env_helper.has_board_boot_file():
cm_boot_file = env_helper.get_board_boot_file()
if env_helper.has_board_boot_file_mta():
mta_boot_file = env_helper.get_board_boot_file_mta()
devices.board.env_config(cm_boot_file, mta_boot_file, devices.board.mibs_path)
if env_helper.voice_enabled():
dev_list = [
devices.sipcenter,
devices.softphone,
]
if env_helper.get_external_voip():
dev_list.append(devices.softphone2)
dev_list.append(getattr(devices, "FXS", [devices.lan, devices.lan2]))
boardfarm.lib.voice.voice_configure(
dev_list,
devices.sipcenter,
config,
)
prov = getattr(config, "provisioner", None)
if prov:
if env_helper.vendor_encap_opts(ip_proto="ipv4"):
devices.provisioner.vendor_opts_acsv4_url = True
if env_helper.vendor_encap_opts(ip_proto="ipv6"):
devices.provisioner.vendor_opts_acsv6_url = True
devices.provisioner.valid_route_gateway = env_helper.is_route_gateway_valid()
logger.info("Provisioning board")
provision_board()
else:
# should this be an error?
logger.error(
colored(
"No provisioner found! Board provisioned skipped",
color="yellow",
attrs=["bold"],
)
)
if hasattr(devices.board, "pre_boot_env"):
devices.board.pre_boot_env(env_helper)
pre_boot_actions = {
"wan_clients_pre_boot": pre_boot_wan_clients,
"lan_clients_pre_boot": pre_boot_lan_clients,
"wlan_clients_pre_boot": pre_boot_wlan_clients,
"board_pre_boot": pre_boot_board,
"environment_pre_boot": pre_boot_env,
}
def boot_board(config, env_helper, devices):
try:
devices.board.reset()
if env_helper.get_software():
devices.board.flash(env_helper)
# store the timestamp, for uptime check later (in case the board
# crashes on boot)
devices.board.__reset__timestamp = time.time()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
except Exception as e:
logger.critical(colored("\n\nFailed to Boot", color="red", attrs=["bold"]))
logger.error(e)
raise BootFail
boot_actions = {"board_boot": boot_board}
def _wait_for_cm_online(config, env_helper, devices):
for _ in range(180):
if (
devices.cmts.is_cm_online(
ignore_bpi=is_bpi_privacy_disabled(), ignore_partial=True
)
is False
):
# show the arm prompt as it is a log in itself
devices.board.touch()
time.sleep(15)
continue
if devices.board.finalize_boot():
break
logger.info("######Rebooting######")
devices.cmts.clear_cm_reset(devices.board.cm_mac)
time.sleep(20)
else:
msg = "\n\nFailed to Boot: board not online on CMTS"
logger.critical(msg)
raise BootFail(msg)
def post_boot_board(config, env_helper, devices):
try:
_wait_for_cm_online(config, env_helper, devices)
except BootFail:
logger.warning(
colored(
"Board not online on CMTS, resetting and retrying",
color="yellow",
attrs=["bold"],
)
)
devices.board.reset()
devices.cmts.clear_cm_reset(devices.board.cm_mac)
_wait_for_cm_online(config, env_helper, devices)
devices.board.post_boot_init()
board_uptime = devices.board.get_seconds_uptime()
logger.info(f"Time up: {board_uptime}")
if hasattr(devices.board, "__reset__timestamp"):
time_elapsed = time.time() - devices.board.__reset__timestamp
logger.info(f"Time since reboot: {time_elapsed}")
if time_elapsed < board_uptime:
# TODO: the following should be an exception and not
# just a print!!!!
logger.warning("Error: possibly the board did not reset!")
if (time_elapsed - board_uptime) > 60:
logger.warning(
colored(
"Board may have rebooted multiple times after flashing process",
color="yellow",
attrs=["bold"],
)
)
if isinstance(devices.board, DocsisCPE):
pass # maybe new method to be added
else:
# the old way for legacy
devices.board.check_valid_docsis_ip_networking(strict=False)
def post_boot_wan_clients(config, env_helper, devices):
pass
def post_boot_lan_clients(config, env_helper, devices):
for i, v in enumerate(devices.board.dev.lan_clients):
if getattr(env_helper, "has_lan_advertise_identity", None):
for option in ["125", "17"]:
if env_helper.has_lan_advertise_identity(i):
v.configure_dhclient(([option, True],))
else:
v.configure_dhclient(([option, False],))
if devices.board.routing and config.setup_device_networking:
for idx, x in enumerate(devices.board.dev.lan_clients):
if isinstance(x, DebianLAN): # should this use devices.lan_clients?
logger.info(f"Starting LAN client on {x.name}")
for n in range(3):
try:
x.configure_docker_iface()
if env_helper.get_prov_mode() == "ipv6":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if (
devices.board.cm_cfg.dslite
and env_helper.is_dhcpv4_enabled_on_lan()
):
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.get_prov_mode() == "dual":
x.start_ipv6_lan_client(wan_gw=devices.wan.gw)
if env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
elif env_helper.is_dhcpv4_enabled_on_lan():
x.start_ipv4_lan_client(wan_gw=devices.wan.gw)
x.configure_proxy_pkgs()
if env_helper.get_prov_mode() in [
"ipv4",
"dual",
] and env_helper.is_set_static_ipv4(idx):
# set static ip address
set_static_ip_and_default_gw(
client=x,
)
break
except Exception as e:
logger.warning(e)
logger.error(
colored(
f"Failed to start lan client on '{x.name}' device, attempt #{n}",
color="red",
attrs=["bold"],
)
)
time.sleep(10)
else:
msg = f"Failed to start lan client on {x.name}"
logger.warning(colored(msg, color="yellow", attrs=["bold"]))
# do not fail the boot with raise BootFail(msg)
# reason: the board config may be such that the
# clients are not getting an ip (see LLCs)
def post_boot_wlan_clients(config, env_helper, devices):
wifi_clients = env_helper.wifi_clients()
if wifi_clients:
# Register all wifi clients in wifi manager
for client in wifi_clients:
devices.wlan_clients.register(client)
# Start to connect all clients after registartions done:
for client in wifi_clients:
check_and_connect_to_wifi(devices, client)
logger.info(colored("\nWlan clients:", color="green"))
devices.wlan_clients.registered_clients_summary()
def post_boot_env(config, env_helper, devices):
if env_helper.mitm_enabled():
activate_mitm(devices, env_helper.get_mitm_devices())
eMTA_iface_status = env_helper.get_emta_interface_status()
if eMTA_iface_status:
devices.board.set_eMTA_interface(devices.board.mta_iface, eMTA_iface_status)
if env_helper.voice_enabled() and eMTA_iface_status != "down":
devices.board.wait_for_mta_provisioning()
register_fxs_details(getattr(devices, "FXS", []), devices.board)
cfg = env_helper.get_prov_mode()
tr069check = cfg not in ["disabled", "bridge", "none"]
tr069provision = env_helper.get_tr069_provisioning()
if tr069check:
for _ in range(20):
try:
devices.board.get_cpeid()
break
except Exception as e:
logger.error(e)
warnings.warn("Failed to connect to ACS, retrying")
time.sleep(10)
else:
raise BootFail("Failed to connect to ACS")
if tr069provision:
reset_val = any(
x in env_helper.get_software()
for x in [
"factory_reset",
"pre_flash_factory_reset",
]
)
if reset_val:
for i in tr069provision:
for acs_api in i:
API_func = getattr(devices.acs_server, acs_api)
for param in i[acs_api]:
API_func(param)
else:
raise BootFail(
"Factory reset has to performed for tr069 provisioning. Env json with factory reset true should be used."
)
if hasattr(devices.board, "post_boot_env"):
devices.board.post_boot_env()
post_boot_actions = {
"board_post_boot": post_boot_board,
"wan_clients_post_boot": post_boot_wan_clients,
"lan_clients_post_boot": post_boot_lan_clients,
"environment_post_boot": post_boot_env,
"wlan_clients_connection": post_boot_wlan_clients,
}
def run_actions(actions_dict, actions_name, *args, **kwargs):
logger.info(colored(f"{actions_name} ACTIONS", color="green", attrs=["bold"]))
for key, func in actions_dict.items():
try:
logger.info(colored(f"Action {key} start", color="green", attrs=["bold"]))
start_time = time.time()
func(*args, **kwargs)
logger.info(
colored(
f"\nAction {key} completed. Took {int(time.time() - start_time)} seconds to complete.",
color="green",
attrs=["bold"],
)
)
except Exception as e:
msg = f"\nFailed at: {actions_name}: {key} after {int(time.time() - start_time)} seconds with exception {e}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise e
logger.info(colored(f"{actions_name} COMPLETED", color="green", attrs=["bold"]))
def boot(config, env_helper, devices, logged=None, actions_list=None):
start_time = time.time()
if not actions_list:
actions_list = ["pre", "boot", "post"]
try:
if "pre" in actions_list:
run_actions(pre_boot_actions, "PRE-BOOT", config, env_helper, devices)
if "boot" in actions_list:
run_actions(boot_actions, "BOOT", config, env_helper, devices)
if "post" in actions_list:
run_actions(post_boot_actions, "POST-BOOT", config, env_helper, devices)
logger.info(
colored(
f"Boot completed in {int(time.time() - start_time)} seconds.",
color="green",
attrs=["bold"],
)
)
except Exception:
traceback.print_exc()
check_devices(devices)
logger.info(
colored(
f"Boot failed after {int(time.time() - start_time)} seconds.",
color="red",
attrs=["bold"],
)
)
raise
| if env_helper.get_dns_dict():
# to get reachable and unreachable ips for ACS DNS
devices.wan.auth_dns = True
dns_acs_config(devices, env_helper.get_dns_dict())
tftp_device, tftp_servers = boardfarm.lib.booting.get_tftp(config)
if not tftp_servers:
logger.error(colored("No tftp server found", color="red", attrs=["bold"]))
# currently we must have at least 1 tftp server configured
raise NoTFTPServer
if len(tftp_servers) > 1:
msg = f"Found more than 1 tftp server: {tftp_servers}, using {tftp_device.name}"
logger.error(colored(msg, color="red", attrs=["bold"]))
raise CodeError(msg)
# should we run configure for all the wan devices? or just wan?
for x in devices:
# if isinstance(x, DebianWAN): # does not work for mitm
if hasattr(x, "name") and "wan" in x.name:
logger.info(f"Configuring {x.name}")
x.configure(config=config)
# if more than 1 tftp server should we start them all?
# currently starting the 1 being used
logger.info(f"Starting TFTP server on {tftp_device.name}")
tftp_device.start_tftp_server()
devices.board.tftp_device = tftp_device | identifier_body |
transaction.go | package factom
import (
"bytes"
"context"
"crypto/sha256"
"encoding/binary"
"fmt"
"time"
"github.com/Factom-Asset-Tokens/factom/varintf"
)
type FactoidTransaction struct {
// TODO: The header is usually at the top level, is this ok?
FactoidTransactionHeader
FCTInputs []FactoidTransactionIO
FCTOutputs []FactoidTransactionIO
ECOutputs []FactoidTransactionIO
Signatures []FactoidTransactionSignature
}
type FactoidTransactionHeader struct {
// TransactionID is not in the marshalled binary
TransactionID *Bytes32
Version uint64
// TimestampSalt is accurate to the millisecond
TimestampSalt time.Time
}
// factoidTransactionIOs is used as a wrapper for an array of IOs to reuse the
// functionality. This is compared to writing your own loop to handle
// lists of io behavior.
type factoidTransactionIOs []FactoidTransactionIO
type FactoidTransactionIO struct {
Amount uint64
// Address can be an SHA256d(RCD) for FCT in/out, or a public key for EC out.
// It is the encoded bytes into the human readable addresses
Address Bytes32
}
type FactoidTransactionSignature struct {
// SHA256d(RCD) == FactoidIOAddress for the inputs
ReedeemCondition RCD1
SignatureBlock Bytes
}
// IsPopulated returns true if f has already been successfully populated by a
// call to Get. IsPopulated returns false if f.FCTInputs, or f.Signatures are
// nil, or if f.Timestamp is zero.
func (f FactoidTransaction) | () bool {
return f.FCTInputs != nil && // This array should not be nil
f.Signatures != nil &&
!f.TimestampSalt.IsZero()
}
// IsPopulated returns true if s has already been successfully populated by a
// call to Get. IsPopulated returns false if s.SignatureBlock or
// s.ReedeemCondition are nil
func (s FactoidTransactionSignature) IsPopulated() bool {
return s.SignatureBlock != nil
}
// Valid returns if the inputs of the factoid transaction are properly signed
// by the redeem conditions. It will also validate the total inputs is greater
// than the total outputs.
func (f *FactoidTransaction) Valid() bool {
if !f.IsPopulated() {
return false
}
// Validate amounts
if f.TotalFCTInputs() < f.TotalFCTOutputs()+f.TotalECOutput() {
return false
}
// Validate signatures
if len(f.FCTInputs) != len(f.Signatures) {
return false
}
msg, err := f.MarshalLedgerBinary()
if err != nil {
return false
}
for i := range f.FCTInputs {
expAddr := f.Signatures[i].ReedeemCondition.Address()
// RCD should match the input
if bytes.Compare(expAddr[:], f.FCTInputs[i].Address[:]) != 0 {
return false
}
if !f.Signatures[i].Validate(msg) {
return false
}
}
return true
}
func (f *FactoidTransaction) TotalFCTInputs() (total uint64) {
return factoidTransactionIOs(f.FCTInputs).TotalAmount()
}
func (f *FactoidTransaction) TotalFCTOutputs() (total uint64) {
return factoidTransactionIOs(f.FCTOutputs).TotalAmount()
}
// TotalECOutput is delimated in factoishis
func (f *FactoidTransaction) TotalECOutput() (total uint64) {
return factoidTransactionIOs(f.ECOutputs).TotalAmount()
}
func (s factoidTransactionIOs) TotalAmount() (total uint64) {
for _, io := range s {
total += io.Amount
}
return
}
func (s FactoidTransactionSignature) Validate(msg Bytes) bool {
return s.ReedeemCondition.Validate(msg, s.SignatureBlock)
}
// Get queries factomd for the entry corresponding to f.TransactionID, which
// must be not nil. After a successful call all inputs, outputs, and
// the header will be populated
func (f *FactoidTransaction) Get(ctx context.Context, c *Client) error {
// TODO: Test this functionality
// If the TransactionID is nil then we have nothing to query for.
if f.TransactionID == nil {
return fmt.Errorf("txid is nil")
}
// If the Transaction is already populated then there is nothing to do. If
// the Hash is nil, we cannot populate it anyway.
if f.IsPopulated() {
return nil
}
params := struct {
Hash *Bytes32 `json:"hash"`
}{Hash: f.TransactionID}
var result struct {
Data Bytes `json:"data"`
}
if err := c.FactomdRequest(ctx, "raw-data", params, &result); err != nil {
return err
}
if err := f.UnmarshalBinary(result.Data); err != nil {
return err
}
return nil
}
// ComputeTransactionID computes the txid for a given transaction. The txid is
// the sha256 of the ledger fields in a factoid transaction. The ledger fields
// exclude the signature block of the transaction
func (f *FactoidTransaction) ComputeTransactionID() (Bytes32, error) {
data, err := f.MarshalLedgerBinary()
if err != nil {
return Bytes32{}, err
}
return f.computeTransactionID(data)
}
func (f *FactoidTransaction) computeTransactionID(ledgerBinary Bytes) (Bytes32, error) {
txid := Bytes32(sha256.Sum256(ledgerBinary))
return txid, nil
}
// ComputeFullHash computes the fullhash for a given transaction. The fullhash
// is the sha256 of all the fields in a factoid transaction.
func (f *FactoidTransaction) ComputeFullHash() (*Bytes32, error) {
data, err := f.MarshalBinary()
if err != nil {
return nil, err
}
txid := Bytes32(sha256.Sum256(data))
return &txid, nil
}
// MarshalLedgerBinary marshals the transaction ledger fields to their
// binary representation. This excludes the signature blocks
func (f *FactoidTransaction) MarshalLedgerBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
// It's very difficult to know the size before marshaling, as
// each in/out has a varint so make the buffer at the end
// The header bytes
header, err := f.MarshalHeaderBinary()
if err != nil {
return nil, err
}
// Inputs
inputs, err := factoidTransactionIOs(f.FCTInputs).MarshalBinary()
if err != nil {
return nil, err
}
// FCT Outputs
fctout, err := factoidTransactionIOs(f.FCTOutputs).MarshalBinary()
if err != nil {
return nil, err
}
// EC Outputs
ecout, err := factoidTransactionIOs(f.ECOutputs).MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(header)+len(inputs)+len(fctout)+len(ecout))
var i int
i += copy(data[i:], header)
i += copy(data[i:], inputs)
i += copy(data[i:], fctout)
i += copy(data[i:], ecout)
return data, nil
}
// TODO: Re-eval how to do this. Kinda different from the rest
func (f *FactoidTransaction) MarshalBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
data, err := f.MarshalLedgerBinary()
if err != nil {
return nil, err
}
for _, s := range f.Signatures {
sig, err := s.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, sig...)
}
return data, nil
}
// MarshalHeaderBinary marshals the transaction's header to its binary
// representation. See UnmarshalHeaderBinary for encoding details.
func (f *FactoidTransaction) MarshalHeaderBinary() ([]byte, error) {
version := varintf.Encode(f.Version)
data := make([]byte, TransactionHeadMinLen+len(version))
var i int
i += copy(data[i:], version)
// Do the timestamp as 6 bytes in ms
ms := f.TimestampSalt.UnixNano() / 1e6
buf := bytes.NewBuffer(make([]byte, 0, 8))
if err := binary.Write(buf, binary.BigEndian, ms); err != nil {
return nil, err
}
i += copy(data[i:], buf.Bytes()[2:])
data[i] = uint8(len(f.FCTInputs))
i += 1
data[i] = uint8(len(f.FCTOutputs))
i += 1
data[i] = uint8(len(f.ECOutputs))
i += 1
return data, nil
}
// MarshalBinary marshals a set of transaction ios to its binary representation.
// See UnmarshalBinary for encoding details.
func (ios factoidTransactionIOs) MarshalBinary() ([]byte, error) {
var data []byte
for _, io := range ios {
iodata, err := io.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, iodata...)
}
return data, nil
}
// MarshalBinary marshals a transaction io to its binary representation.
// See UnmarshalBinary for encoding details.
func (io *FactoidTransactionIO) MarshalBinary() ([]byte, error) {
amount := varintf.Encode(io.Amount)
data := make([]byte, 32+len(amount))
var i int
i += copy(data[i:], amount)
i += copy(data[i:], io.Address[:])
return data, nil
}
// MarshalBinary marshals a transaction signature to its binary representation.
// See UnmarshalBinary for encoding details.
func (s *FactoidTransactionSignature) MarshalBinary() ([]byte, error) {
if !s.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
rcdData, err := s.ReedeemCondition.MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(rcdData)+len(s.SignatureBlock))
var i int
i += copy(data[i:], rcdData)
i += copy(data[i:], s.SignatureBlock)
return data, nil
}
const (
TransactionHeadMinLen = 0 + // Version length is varint
6 + // timestamp
1 + // input count
1 + // factoid output count
1 // EC output count
TransactionTotalMinLen = TransactionHeadMinLen // Coinbases have no body
)
// Decode will consume as many bytes as necessary to unmarshal the factoid
// transaction. It will return the number of bytes read and an error.
func (f *FactoidTransaction) Decode(data []byte) (i int, err error) {
if len(data) < TransactionTotalMinLen {
return 0, fmt.Errorf("insufficient length")
}
// Decode header
version, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("version bytes invalid")
}
f.Version = version
msdata := make([]byte, 8)
// TS + counts length check
if len(data) < i+(6+3) {
return 0, fmt.Errorf("not enough bytes to decode tx")
}
copy(msdata[2:], data[i:i+6])
ms := binary.BigEndian.Uint64(msdata)
f.TimestampSalt = time.Unix(0, int64(ms)*1e6)
i += 6
inputCount := uint8(data[i])
i += 1
fctOutputCount := uint8(data[i])
i += 1
ecOutputCount := uint8(data[i])
i += 1
// Decode the body
// Decode the inputs
f.FCTInputs = make([]FactoidTransactionIO, inputCount)
read, err := factoidTransactionIOs(f.FCTInputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the FCT Outputs
f.FCTOutputs = make([]FactoidTransactionIO, fctOutputCount)
read, err = factoidTransactionIOs(f.FCTOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the EC Outputs
f.ECOutputs = make([]FactoidTransactionIO, ecOutputCount)
read, err = factoidTransactionIOs(f.ECOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// All data minus the signatures is the needed binary data to compute
// the txid
ledgerData := data[:i]
// Decode the signature blocks, one per input
f.Signatures = make([]FactoidTransactionSignature, len(f.FCTInputs))
for c := uint8(0); c < uint8(len(f.FCTInputs)); c++ {
// f.Signatures[i] = new(FactoidTransactionSignature)
read, err := f.Signatures[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
txid, err := f.computeTransactionID(ledgerData)
if err != nil {
return 0, err
}
// If the txid is already set, validate the txid
if f.TransactionID != nil {
if *f.TransactionID != txid {
return 0, fmt.Errorf("invalid txid")
}
}
f.TransactionID = &txid
return i, err
}
// UnmarshalBinary unmarshals the data into a factoid transaction.
func (f *FactoidTransaction) UnmarshalBinary(data []byte) error {
// TODO: Some length checks to prevent too few/too many bytes
_, err := f.Decode(data)
return err
}
// Decode takes a given input and decodes the set of bytes needed to populate
// the set of factoid transactions ios. The set length should be preset before
// calling this function. It will return how many bytes it read and return an error.
func (ios factoidTransactionIOs) Decode(data []byte) (int, error) {
var i int
for c := range ios {
read, err := ios[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
return i, nil
}
// Decode takes a given input and decodes the set of bytes needed for a full
// transaction input/output. It will return how many bytes it read and an error.
// A FactoidTransactionIO includes an amount and an address.
func (io *FactoidTransactionIO) Decode(data []byte) (int, error) {
amount, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("amount is not a valid varint")
}
io.Amount = amount
if len(data)-i < 32 {
return 0, fmt.Errorf("not enough bytes to decode factoidtx")
}
var tmp Bytes32 // TODO: Fix this
copy(tmp[:], data[i:i+32])
io.Address = tmp
i += 32
return i, nil
}
// Decode will take a given input and decode the set of bytes needed for the full
// FactoidTransactionSignature. It will return how many bytes it read and an error.
// A FactoidTransactionSignature includes the RCD type and it's signature block.
func (s *FactoidTransactionSignature) Decode(data []byte) (int, error) {
rcd, i, err := DecodeRCD(data)
if err != nil {
return 0, err
}
// TODO: How do you want to handle this? Have the decode only return the
// concrete rcd1 type?
rcd1, ok := rcd.(*RCD1)
if !ok {
return -1, fmt.Errorf("rcd %d type not supported", rcd.Type())
}
s.ReedeemCondition = *rcd1
s.SignatureBlock = make([]byte, rcd.SignatureBlockSize())
i += copy(s.SignatureBlock, data[i:])
return i, nil
}
| IsPopulated | identifier_name |
transaction.go | package factom
import (
"bytes"
"context"
"crypto/sha256"
"encoding/binary"
"fmt"
"time"
"github.com/Factom-Asset-Tokens/factom/varintf"
)
type FactoidTransaction struct {
// TODO: The header is usually at the top level, is this ok?
FactoidTransactionHeader
FCTInputs []FactoidTransactionIO
FCTOutputs []FactoidTransactionIO
ECOutputs []FactoidTransactionIO
Signatures []FactoidTransactionSignature
}
type FactoidTransactionHeader struct {
// TransactionID is not in the marshalled binary
TransactionID *Bytes32
Version uint64
// TimestampSalt is accurate to the millisecond
TimestampSalt time.Time
}
// factoidTransactionIOs is used as a wrapper for an array of IOs to reuse the
// functionality. This is compared to writing your own loop to handle
// lists of io behavior.
type factoidTransactionIOs []FactoidTransactionIO
type FactoidTransactionIO struct {
Amount uint64
// Address can be an SHA256d(RCD) for FCT in/out, or a public key for EC out.
// It is the encoded bytes into the human readable addresses
Address Bytes32
}
type FactoidTransactionSignature struct {
// SHA256d(RCD) == FactoidIOAddress for the inputs
ReedeemCondition RCD1
SignatureBlock Bytes
}
// IsPopulated returns true if f has already been successfully populated by a
// call to Get. IsPopulated returns false if f.FCTInputs, or f.Signatures are
// nil, or if f.Timestamp is zero.
func (f FactoidTransaction) IsPopulated() bool {
return f.FCTInputs != nil && // This array should not be nil
f.Signatures != nil &&
!f.TimestampSalt.IsZero()
}
// IsPopulated returns true if s has already been successfully populated by a
// call to Get. IsPopulated returns false if s.SignatureBlock or
// s.ReedeemCondition are nil
func (s FactoidTransactionSignature) IsPopulated() bool {
return s.SignatureBlock != nil
}
// Valid returns if the inputs of the factoid transaction are properly signed
// by the redeem conditions. It will also validate the total inputs is greater
// than the total outputs.
func (f *FactoidTransaction) Valid() bool {
if !f.IsPopulated() {
return false
}
// Validate amounts
if f.TotalFCTInputs() < f.TotalFCTOutputs()+f.TotalECOutput() {
return false
}
// Validate signatures
if len(f.FCTInputs) != len(f.Signatures) {
return false
}
msg, err := f.MarshalLedgerBinary()
if err != nil {
return false
}
for i := range f.FCTInputs {
expAddr := f.Signatures[i].ReedeemCondition.Address()
// RCD should match the input
if bytes.Compare(expAddr[:], f.FCTInputs[i].Address[:]) != 0 {
return false
}
if !f.Signatures[i].Validate(msg) {
return false
}
}
return true
}
func (f *FactoidTransaction) TotalFCTInputs() (total uint64) {
return factoidTransactionIOs(f.FCTInputs).TotalAmount()
}
func (f *FactoidTransaction) TotalFCTOutputs() (total uint64) {
return factoidTransactionIOs(f.FCTOutputs).TotalAmount()
}
// TotalECOutput is delimated in factoishis
func (f *FactoidTransaction) TotalECOutput() (total uint64) {
return factoidTransactionIOs(f.ECOutputs).TotalAmount()
}
func (s factoidTransactionIOs) TotalAmount() (total uint64) {
for _, io := range s {
total += io.Amount
}
return
}
func (s FactoidTransactionSignature) Validate(msg Bytes) bool {
return s.ReedeemCondition.Validate(msg, s.SignatureBlock)
}
// Get queries factomd for the entry corresponding to f.TransactionID, which
// must be not nil. After a successful call all inputs, outputs, and
// the header will be populated
func (f *FactoidTransaction) Get(ctx context.Context, c *Client) error {
// TODO: Test this functionality
// If the TransactionID is nil then we have nothing to query for.
if f.TransactionID == nil {
return fmt.Errorf("txid is nil")
}
// If the Transaction is already populated then there is nothing to do. If
// the Hash is nil, we cannot populate it anyway.
if f.IsPopulated() {
return nil
}
params := struct {
Hash *Bytes32 `json:"hash"`
}{Hash: f.TransactionID}
var result struct {
Data Bytes `json:"data"`
}
if err := c.FactomdRequest(ctx, "raw-data", params, &result); err != nil {
return err
}
if err := f.UnmarshalBinary(result.Data); err != nil {
return err
}
return nil
}
// ComputeTransactionID computes the txid for a given transaction. The txid is
// the sha256 of the ledger fields in a factoid transaction. The ledger fields
// exclude the signature block of the transaction
func (f *FactoidTransaction) ComputeTransactionID() (Bytes32, error) {
data, err := f.MarshalLedgerBinary()
if err != nil {
return Bytes32{}, err
}
return f.computeTransactionID(data)
}
func (f *FactoidTransaction) computeTransactionID(ledgerBinary Bytes) (Bytes32, error) {
txid := Bytes32(sha256.Sum256(ledgerBinary))
return txid, nil
}
// ComputeFullHash computes the fullhash for a given transaction. The fullhash
// is the sha256 of all the fields in a factoid transaction.
func (f *FactoidTransaction) ComputeFullHash() (*Bytes32, error) {
data, err := f.MarshalBinary()
if err != nil {
return nil, err
}
txid := Bytes32(sha256.Sum256(data))
return &txid, nil
}
// MarshalLedgerBinary marshals the transaction ledger fields to their
// binary representation. This excludes the signature blocks
func (f *FactoidTransaction) MarshalLedgerBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
// It's very difficult to know the size before marshaling, as
// each in/out has a varint so make the buffer at the end
// The header bytes
header, err := f.MarshalHeaderBinary()
if err != nil {
return nil, err
}
// Inputs
inputs, err := factoidTransactionIOs(f.FCTInputs).MarshalBinary()
if err != nil {
return nil, err
}
// FCT Outputs
fctout, err := factoidTransactionIOs(f.FCTOutputs).MarshalBinary()
if err != nil {
return nil, err
}
// EC Outputs
ecout, err := factoidTransactionIOs(f.ECOutputs).MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(header)+len(inputs)+len(fctout)+len(ecout))
var i int
i += copy(data[i:], header)
i += copy(data[i:], inputs)
i += copy(data[i:], fctout)
i += copy(data[i:], ecout)
return data, nil
}
// TODO: Re-eval how to do this. Kinda different from the rest
func (f *FactoidTransaction) MarshalBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
data, err := f.MarshalLedgerBinary()
if err != nil {
return nil, err
}
for _, s := range f.Signatures |
return data, nil
}
// MarshalHeaderBinary marshals the transaction's header to its binary
// representation. See UnmarshalHeaderBinary for encoding details.
func (f *FactoidTransaction) MarshalHeaderBinary() ([]byte, error) {
version := varintf.Encode(f.Version)
data := make([]byte, TransactionHeadMinLen+len(version))
var i int
i += copy(data[i:], version)
// Do the timestamp as 6 bytes in ms
ms := f.TimestampSalt.UnixNano() / 1e6
buf := bytes.NewBuffer(make([]byte, 0, 8))
if err := binary.Write(buf, binary.BigEndian, ms); err != nil {
return nil, err
}
i += copy(data[i:], buf.Bytes()[2:])
data[i] = uint8(len(f.FCTInputs))
i += 1
data[i] = uint8(len(f.FCTOutputs))
i += 1
data[i] = uint8(len(f.ECOutputs))
i += 1
return data, nil
}
// MarshalBinary marshals a set of transaction ios to its binary representation.
// See UnmarshalBinary for encoding details.
func (ios factoidTransactionIOs) MarshalBinary() ([]byte, error) {
var data []byte
for _, io := range ios {
iodata, err := io.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, iodata...)
}
return data, nil
}
// MarshalBinary marshals a transaction io to its binary representation.
// See UnmarshalBinary for encoding details.
func (io *FactoidTransactionIO) MarshalBinary() ([]byte, error) {
amount := varintf.Encode(io.Amount)
data := make([]byte, 32+len(amount))
var i int
i += copy(data[i:], amount)
i += copy(data[i:], io.Address[:])
return data, nil
}
// MarshalBinary marshals a transaction signature to its binary representation.
// See UnmarshalBinary for encoding details.
func (s *FactoidTransactionSignature) MarshalBinary() ([]byte, error) {
if !s.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
rcdData, err := s.ReedeemCondition.MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(rcdData)+len(s.SignatureBlock))
var i int
i += copy(data[i:], rcdData)
i += copy(data[i:], s.SignatureBlock)
return data, nil
}
const (
TransactionHeadMinLen = 0 + // Version length is varint
6 + // timestamp
1 + // input count
1 + // factoid output count
1 // EC output count
TransactionTotalMinLen = TransactionHeadMinLen // Coinbases have no body
)
// Decode will consume as many bytes as necessary to unmarshal the factoid
// transaction. It will return the number of bytes read and an error.
func (f *FactoidTransaction) Decode(data []byte) (i int, err error) {
if len(data) < TransactionTotalMinLen {
return 0, fmt.Errorf("insufficient length")
}
// Decode header
version, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("version bytes invalid")
}
f.Version = version
msdata := make([]byte, 8)
// TS + counts length check
if len(data) < i+(6+3) {
return 0, fmt.Errorf("not enough bytes to decode tx")
}
copy(msdata[2:], data[i:i+6])
ms := binary.BigEndian.Uint64(msdata)
f.TimestampSalt = time.Unix(0, int64(ms)*1e6)
i += 6
inputCount := uint8(data[i])
i += 1
fctOutputCount := uint8(data[i])
i += 1
ecOutputCount := uint8(data[i])
i += 1
// Decode the body
// Decode the inputs
f.FCTInputs = make([]FactoidTransactionIO, inputCount)
read, err := factoidTransactionIOs(f.FCTInputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the FCT Outputs
f.FCTOutputs = make([]FactoidTransactionIO, fctOutputCount)
read, err = factoidTransactionIOs(f.FCTOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the EC Outputs
f.ECOutputs = make([]FactoidTransactionIO, ecOutputCount)
read, err = factoidTransactionIOs(f.ECOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// All data minus the signatures is the needed binary data to compute
// the txid
ledgerData := data[:i]
// Decode the signature blocks, one per input
f.Signatures = make([]FactoidTransactionSignature, len(f.FCTInputs))
for c := uint8(0); c < uint8(len(f.FCTInputs)); c++ {
// f.Signatures[i] = new(FactoidTransactionSignature)
read, err := f.Signatures[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
txid, err := f.computeTransactionID(ledgerData)
if err != nil {
return 0, err
}
// If the txid is already set, validate the txid
if f.TransactionID != nil {
if *f.TransactionID != txid {
return 0, fmt.Errorf("invalid txid")
}
}
f.TransactionID = &txid
return i, err
}
// UnmarshalBinary unmarshals the data into a factoid transaction.
func (f *FactoidTransaction) UnmarshalBinary(data []byte) error {
// TODO: Some length checks to prevent too few/too many bytes
_, err := f.Decode(data)
return err
}
// Decode takes a given input and decodes the set of bytes needed to populate
// the set of factoid transactions ios. The set length should be preset before
// calling this function. It will return how many bytes it read and return an error.
func (ios factoidTransactionIOs) Decode(data []byte) (int, error) {
var i int
for c := range ios {
read, err := ios[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
return i, nil
}
// Decode takes a given input and decodes the set of bytes needed for a full
// transaction input/output. It will return how many bytes it read and an error.
// A FactoidTransactionIO includes an amount and an address.
func (io *FactoidTransactionIO) Decode(data []byte) (int, error) {
amount, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("amount is not a valid varint")
}
io.Amount = amount
if len(data)-i < 32 {
return 0, fmt.Errorf("not enough bytes to decode factoidtx")
}
var tmp Bytes32 // TODO: Fix this
copy(tmp[:], data[i:i+32])
io.Address = tmp
i += 32
return i, nil
}
// Decode will take a given input and decode the set of bytes needed for the full
// FactoidTransactionSignature. It will return how many bytes it read and an error.
// A FactoidTransactionSignature includes the RCD type and it's signature block.
func (s *FactoidTransactionSignature) Decode(data []byte) (int, error) {
rcd, i, err := DecodeRCD(data)
if err != nil {
return 0, err
}
// TODO: How do you want to handle this? Have the decode only return the
// concrete rcd1 type?
rcd1, ok := rcd.(*RCD1)
if !ok {
return -1, fmt.Errorf("rcd %d type not supported", rcd.Type())
}
s.ReedeemCondition = *rcd1
s.SignatureBlock = make([]byte, rcd.SignatureBlockSize())
i += copy(s.SignatureBlock, data[i:])
return i, nil
}
| {
sig, err := s.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, sig...)
} | conditional_block |
transaction.go | package factom
import (
"bytes"
"context"
"crypto/sha256"
"encoding/binary"
"fmt"
"time"
"github.com/Factom-Asset-Tokens/factom/varintf"
)
type FactoidTransaction struct {
// TODO: The header is usually at the top level, is this ok?
FactoidTransactionHeader
FCTInputs []FactoidTransactionIO
FCTOutputs []FactoidTransactionIO
ECOutputs []FactoidTransactionIO
Signatures []FactoidTransactionSignature
}
type FactoidTransactionHeader struct {
// TransactionID is not in the marshalled binary
TransactionID *Bytes32
Version uint64
// TimestampSalt is accurate to the millisecond
TimestampSalt time.Time
}
// factoidTransactionIOs is used as a wrapper for an array of IOs to reuse the
// functionality. This is compared to writing your own loop to handle
// lists of io behavior.
type factoidTransactionIOs []FactoidTransactionIO
type FactoidTransactionIO struct {
Amount uint64
// Address can be an SHA256d(RCD) for FCT in/out, or a public key for EC out.
// It is the encoded bytes into the human readable addresses
Address Bytes32
}
type FactoidTransactionSignature struct {
// SHA256d(RCD) == FactoidIOAddress for the inputs
ReedeemCondition RCD1
SignatureBlock Bytes
}
// IsPopulated returns true if f has already been successfully populated by a
// call to Get. IsPopulated returns false if f.FCTInputs, or f.Signatures are
// nil, or if f.Timestamp is zero.
func (f FactoidTransaction) IsPopulated() bool {
return f.FCTInputs != nil && // This array should not be nil
f.Signatures != nil &&
!f.TimestampSalt.IsZero()
}
// IsPopulated returns true if s has already been successfully populated by a
// call to Get. IsPopulated returns false if s.SignatureBlock or
// s.ReedeemCondition are nil
func (s FactoidTransactionSignature) IsPopulated() bool |
// Valid returns if the inputs of the factoid transaction are properly signed
// by the redeem conditions. It will also validate the total inputs is greater
// than the total outputs.
func (f *FactoidTransaction) Valid() bool {
if !f.IsPopulated() {
return false
}
// Validate amounts
if f.TotalFCTInputs() < f.TotalFCTOutputs()+f.TotalECOutput() {
return false
}
// Validate signatures
if len(f.FCTInputs) != len(f.Signatures) {
return false
}
msg, err := f.MarshalLedgerBinary()
if err != nil {
return false
}
for i := range f.FCTInputs {
expAddr := f.Signatures[i].ReedeemCondition.Address()
// RCD should match the input
if bytes.Compare(expAddr[:], f.FCTInputs[i].Address[:]) != 0 {
return false
}
if !f.Signatures[i].Validate(msg) {
return false
}
}
return true
}
func (f *FactoidTransaction) TotalFCTInputs() (total uint64) {
return factoidTransactionIOs(f.FCTInputs).TotalAmount()
}
func (f *FactoidTransaction) TotalFCTOutputs() (total uint64) {
return factoidTransactionIOs(f.FCTOutputs).TotalAmount()
}
// TotalECOutput is delimated in factoishis
func (f *FactoidTransaction) TotalECOutput() (total uint64) {
return factoidTransactionIOs(f.ECOutputs).TotalAmount()
}
func (s factoidTransactionIOs) TotalAmount() (total uint64) {
for _, io := range s {
total += io.Amount
}
return
}
func (s FactoidTransactionSignature) Validate(msg Bytes) bool {
return s.ReedeemCondition.Validate(msg, s.SignatureBlock)
}
// Get queries factomd for the entry corresponding to f.TransactionID, which
// must be not nil. After a successful call all inputs, outputs, and
// the header will be populated
func (f *FactoidTransaction) Get(ctx context.Context, c *Client) error {
// TODO: Test this functionality
// If the TransactionID is nil then we have nothing to query for.
if f.TransactionID == nil {
return fmt.Errorf("txid is nil")
}
// If the Transaction is already populated then there is nothing to do. If
// the Hash is nil, we cannot populate it anyway.
if f.IsPopulated() {
return nil
}
params := struct {
Hash *Bytes32 `json:"hash"`
}{Hash: f.TransactionID}
var result struct {
Data Bytes `json:"data"`
}
if err := c.FactomdRequest(ctx, "raw-data", params, &result); err != nil {
return err
}
if err := f.UnmarshalBinary(result.Data); err != nil {
return err
}
return nil
}
// ComputeTransactionID computes the txid for a given transaction. The txid is
// the sha256 of the ledger fields in a factoid transaction. The ledger fields
// exclude the signature block of the transaction
func (f *FactoidTransaction) ComputeTransactionID() (Bytes32, error) {
data, err := f.MarshalLedgerBinary()
if err != nil {
return Bytes32{}, err
}
return f.computeTransactionID(data)
}
func (f *FactoidTransaction) computeTransactionID(ledgerBinary Bytes) (Bytes32, error) {
txid := Bytes32(sha256.Sum256(ledgerBinary))
return txid, nil
}
// ComputeFullHash computes the fullhash for a given transaction. The fullhash
// is the sha256 of all the fields in a factoid transaction.
func (f *FactoidTransaction) ComputeFullHash() (*Bytes32, error) {
data, err := f.MarshalBinary()
if err != nil {
return nil, err
}
txid := Bytes32(sha256.Sum256(data))
return &txid, nil
}
// MarshalLedgerBinary marshals the transaction ledger fields to their
// binary representation. This excludes the signature blocks
func (f *FactoidTransaction) MarshalLedgerBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
// It's very difficult to know the size before marshaling, as
// each in/out has a varint so make the buffer at the end
// The header bytes
header, err := f.MarshalHeaderBinary()
if err != nil {
return nil, err
}
// Inputs
inputs, err := factoidTransactionIOs(f.FCTInputs).MarshalBinary()
if err != nil {
return nil, err
}
// FCT Outputs
fctout, err := factoidTransactionIOs(f.FCTOutputs).MarshalBinary()
if err != nil {
return nil, err
}
// EC Outputs
ecout, err := factoidTransactionIOs(f.ECOutputs).MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(header)+len(inputs)+len(fctout)+len(ecout))
var i int
i += copy(data[i:], header)
i += copy(data[i:], inputs)
i += copy(data[i:], fctout)
i += copy(data[i:], ecout)
return data, nil
}
// TODO: Re-eval how to do this. Kinda different from the rest
func (f *FactoidTransaction) MarshalBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
data, err := f.MarshalLedgerBinary()
if err != nil {
return nil, err
}
for _, s := range f.Signatures {
sig, err := s.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, sig...)
}
return data, nil
}
// MarshalHeaderBinary marshals the transaction's header to its binary
// representation. See UnmarshalHeaderBinary for encoding details.
func (f *FactoidTransaction) MarshalHeaderBinary() ([]byte, error) {
version := varintf.Encode(f.Version)
data := make([]byte, TransactionHeadMinLen+len(version))
var i int
i += copy(data[i:], version)
// Do the timestamp as 6 bytes in ms
ms := f.TimestampSalt.UnixNano() / 1e6
buf := bytes.NewBuffer(make([]byte, 0, 8))
if err := binary.Write(buf, binary.BigEndian, ms); err != nil {
return nil, err
}
i += copy(data[i:], buf.Bytes()[2:])
data[i] = uint8(len(f.FCTInputs))
i += 1
data[i] = uint8(len(f.FCTOutputs))
i += 1
data[i] = uint8(len(f.ECOutputs))
i += 1
return data, nil
}
// MarshalBinary marshals a set of transaction ios to its binary representation.
// See UnmarshalBinary for encoding details.
func (ios factoidTransactionIOs) MarshalBinary() ([]byte, error) {
var data []byte
for _, io := range ios {
iodata, err := io.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, iodata...)
}
return data, nil
}
// MarshalBinary marshals a transaction io to its binary representation.
// See UnmarshalBinary for encoding details.
func (io *FactoidTransactionIO) MarshalBinary() ([]byte, error) {
amount := varintf.Encode(io.Amount)
data := make([]byte, 32+len(amount))
var i int
i += copy(data[i:], amount)
i += copy(data[i:], io.Address[:])
return data, nil
}
// MarshalBinary marshals a transaction signature to its binary representation.
// See UnmarshalBinary for encoding details.
func (s *FactoidTransactionSignature) MarshalBinary() ([]byte, error) {
if !s.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
rcdData, err := s.ReedeemCondition.MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(rcdData)+len(s.SignatureBlock))
var i int
i += copy(data[i:], rcdData)
i += copy(data[i:], s.SignatureBlock)
return data, nil
}
const (
TransactionHeadMinLen = 0 + // Version length is varint
6 + // timestamp
1 + // input count
1 + // factoid output count
1 // EC output count
TransactionTotalMinLen = TransactionHeadMinLen // Coinbases have no body
)
// Decode will consume as many bytes as necessary to unmarshal the factoid
// transaction. It will return the number of bytes read and an error.
func (f *FactoidTransaction) Decode(data []byte) (i int, err error) {
if len(data) < TransactionTotalMinLen {
return 0, fmt.Errorf("insufficient length")
}
// Decode header
version, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("version bytes invalid")
}
f.Version = version
msdata := make([]byte, 8)
// TS + counts length check
if len(data) < i+(6+3) {
return 0, fmt.Errorf("not enough bytes to decode tx")
}
copy(msdata[2:], data[i:i+6])
ms := binary.BigEndian.Uint64(msdata)
f.TimestampSalt = time.Unix(0, int64(ms)*1e6)
i += 6
inputCount := uint8(data[i])
i += 1
fctOutputCount := uint8(data[i])
i += 1
ecOutputCount := uint8(data[i])
i += 1
// Decode the body
// Decode the inputs
f.FCTInputs = make([]FactoidTransactionIO, inputCount)
read, err := factoidTransactionIOs(f.FCTInputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the FCT Outputs
f.FCTOutputs = make([]FactoidTransactionIO, fctOutputCount)
read, err = factoidTransactionIOs(f.FCTOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the EC Outputs
f.ECOutputs = make([]FactoidTransactionIO, ecOutputCount)
read, err = factoidTransactionIOs(f.ECOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// All data minus the signatures is the needed binary data to compute
// the txid
ledgerData := data[:i]
// Decode the signature blocks, one per input
f.Signatures = make([]FactoidTransactionSignature, len(f.FCTInputs))
for c := uint8(0); c < uint8(len(f.FCTInputs)); c++ {
// f.Signatures[i] = new(FactoidTransactionSignature)
read, err := f.Signatures[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
txid, err := f.computeTransactionID(ledgerData)
if err != nil {
return 0, err
}
// If the txid is already set, validate the txid
if f.TransactionID != nil {
if *f.TransactionID != txid {
return 0, fmt.Errorf("invalid txid")
}
}
f.TransactionID = &txid
return i, err
}
// UnmarshalBinary unmarshals the data into a factoid transaction.
func (f *FactoidTransaction) UnmarshalBinary(data []byte) error {
// TODO: Some length checks to prevent too few/too many bytes
_, err := f.Decode(data)
return err
}
// Decode takes a given input and decodes the set of bytes needed to populate
// the set of factoid transactions ios. The set length should be preset before
// calling this function. It will return how many bytes it read and return an error.
func (ios factoidTransactionIOs) Decode(data []byte) (int, error) {
var i int
for c := range ios {
read, err := ios[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
return i, nil
}
// Decode takes a given input and decodes the set of bytes needed for a full
// transaction input/output. It will return how many bytes it read and an error.
// A FactoidTransactionIO includes an amount and an address.
func (io *FactoidTransactionIO) Decode(data []byte) (int, error) {
amount, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("amount is not a valid varint")
}
io.Amount = amount
if len(data)-i < 32 {
return 0, fmt.Errorf("not enough bytes to decode factoidtx")
}
var tmp Bytes32 // TODO: Fix this
copy(tmp[:], data[i:i+32])
io.Address = tmp
i += 32
return i, nil
}
// Decode will take a given input and decode the set of bytes needed for the full
// FactoidTransactionSignature. It will return how many bytes it read and an error.
// A FactoidTransactionSignature includes the RCD type and it's signature block.
func (s *FactoidTransactionSignature) Decode(data []byte) (int, error) {
rcd, i, err := DecodeRCD(data)
if err != nil {
return 0, err
}
// TODO: How do you want to handle this? Have the decode only return the
// concrete rcd1 type?
rcd1, ok := rcd.(*RCD1)
if !ok {
return -1, fmt.Errorf("rcd %d type not supported", rcd.Type())
}
s.ReedeemCondition = *rcd1
s.SignatureBlock = make([]byte, rcd.SignatureBlockSize())
i += copy(s.SignatureBlock, data[i:])
return i, nil
}
| {
return s.SignatureBlock != nil
} | identifier_body |
transaction.go | package factom
import (
"bytes"
"context"
"crypto/sha256"
"encoding/binary"
"fmt"
"time"
"github.com/Factom-Asset-Tokens/factom/varintf"
)
type FactoidTransaction struct {
// TODO: The header is usually at the top level, is this ok?
FactoidTransactionHeader
FCTInputs []FactoidTransactionIO
FCTOutputs []FactoidTransactionIO
ECOutputs []FactoidTransactionIO
Signatures []FactoidTransactionSignature
}
type FactoidTransactionHeader struct {
// TransactionID is not in the marshalled binary
TransactionID *Bytes32
Version uint64
// TimestampSalt is accurate to the millisecond
TimestampSalt time.Time
}
// factoidTransactionIOs is used as a wrapper for an array of IOs to reuse the
// functionality. This is compared to writing your own loop to handle
// lists of io behavior.
type factoidTransactionIOs []FactoidTransactionIO
type FactoidTransactionIO struct {
Amount uint64
// Address can be an SHA256d(RCD) for FCT in/out, or a public key for EC out.
// It is the encoded bytes into the human readable addresses
Address Bytes32
}
type FactoidTransactionSignature struct {
// SHA256d(RCD) == FactoidIOAddress for the inputs
ReedeemCondition RCD1
SignatureBlock Bytes
}
// IsPopulated returns true if f has already been successfully populated by a
// call to Get. IsPopulated returns false if f.FCTInputs, or f.Signatures are
// nil, or if f.Timestamp is zero.
func (f FactoidTransaction) IsPopulated() bool {
return f.FCTInputs != nil && // This array should not be nil
f.Signatures != nil &&
!f.TimestampSalt.IsZero()
}
// IsPopulated returns true if s has already been successfully populated by a
// call to Get. IsPopulated returns false if s.SignatureBlock or
// s.ReedeemCondition are nil
func (s FactoidTransactionSignature) IsPopulated() bool {
return s.SignatureBlock != nil
}
// Valid returns if the inputs of the factoid transaction are properly signed
// by the redeem conditions. It will also validate the total inputs is greater
// than the total outputs.
func (f *FactoidTransaction) Valid() bool {
if !f.IsPopulated() {
return false
}
// Validate amounts
if f.TotalFCTInputs() < f.TotalFCTOutputs()+f.TotalECOutput() {
return false
}
// Validate signatures
if len(f.FCTInputs) != len(f.Signatures) {
return false
}
msg, err := f.MarshalLedgerBinary()
if err != nil {
return false
}
for i := range f.FCTInputs {
expAddr := f.Signatures[i].ReedeemCondition.Address()
// RCD should match the input
if bytes.Compare(expAddr[:], f.FCTInputs[i].Address[:]) != 0 {
return false
}
if !f.Signatures[i].Validate(msg) {
return false
}
}
return true
}
func (f *FactoidTransaction) TotalFCTInputs() (total uint64) {
return factoidTransactionIOs(f.FCTInputs).TotalAmount()
}
func (f *FactoidTransaction) TotalFCTOutputs() (total uint64) {
return factoidTransactionIOs(f.FCTOutputs).TotalAmount()
}
// TotalECOutput is delimated in factoishis
func (f *FactoidTransaction) TotalECOutput() (total uint64) {
return factoidTransactionIOs(f.ECOutputs).TotalAmount()
}
func (s factoidTransactionIOs) TotalAmount() (total uint64) {
for _, io := range s {
total += io.Amount
}
return
}
func (s FactoidTransactionSignature) Validate(msg Bytes) bool {
return s.ReedeemCondition.Validate(msg, s.SignatureBlock)
}
// Get queries factomd for the entry corresponding to f.TransactionID, which
// must be not nil. After a successful call all inputs, outputs, and
// the header will be populated
func (f *FactoidTransaction) Get(ctx context.Context, c *Client) error {
// TODO: Test this functionality
// If the TransactionID is nil then we have nothing to query for.
if f.TransactionID == nil {
return fmt.Errorf("txid is nil")
}
// If the Transaction is already populated then there is nothing to do. If
// the Hash is nil, we cannot populate it anyway.
if f.IsPopulated() {
return nil
}
params := struct {
Hash *Bytes32 `json:"hash"`
}{Hash: f.TransactionID}
var result struct {
Data Bytes `json:"data"`
}
if err := c.FactomdRequest(ctx, "raw-data", params, &result); err != nil {
return err
}
if err := f.UnmarshalBinary(result.Data); err != nil {
return err
}
return nil
}
// ComputeTransactionID computes the txid for a given transaction. The txid is
// the sha256 of the ledger fields in a factoid transaction. The ledger fields
// exclude the signature block of the transaction
func (f *FactoidTransaction) ComputeTransactionID() (Bytes32, error) {
data, err := f.MarshalLedgerBinary()
if err != nil {
return Bytes32{}, err
}
return f.computeTransactionID(data)
}
func (f *FactoidTransaction) computeTransactionID(ledgerBinary Bytes) (Bytes32, error) {
txid := Bytes32(sha256.Sum256(ledgerBinary))
return txid, nil
}
// ComputeFullHash computes the fullhash for a given transaction. The fullhash
// is the sha256 of all the fields in a factoid transaction.
func (f *FactoidTransaction) ComputeFullHash() (*Bytes32, error) {
data, err := f.MarshalBinary()
if err != nil {
return nil, err
}
txid := Bytes32(sha256.Sum256(data))
return &txid, nil
}
// MarshalLedgerBinary marshals the transaction ledger fields to their
// binary representation. This excludes the signature blocks
func (f *FactoidTransaction) MarshalLedgerBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
// It's very difficult to know the size before marshaling, as
// each in/out has a varint so make the buffer at the end
// The header bytes
header, err := f.MarshalHeaderBinary()
if err != nil {
return nil, err
}
// Inputs
inputs, err := factoidTransactionIOs(f.FCTInputs).MarshalBinary()
if err != nil {
return nil, err
}
// FCT Outputs
fctout, err := factoidTransactionIOs(f.FCTOutputs).MarshalBinary()
if err != nil {
return nil, err
}
// EC Outputs
ecout, err := factoidTransactionIOs(f.ECOutputs).MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(header)+len(inputs)+len(fctout)+len(ecout))
var i int
i += copy(data[i:], header)
i += copy(data[i:], inputs)
i += copy(data[i:], fctout)
i += copy(data[i:], ecout)
return data, nil
}
// TODO: Re-eval how to do this. Kinda different from the rest
func (f *FactoidTransaction) MarshalBinary() ([]byte, error) {
// TODO: More checks up front?
if !f.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
data, err := f.MarshalLedgerBinary()
if err != nil {
return nil, err
}
for _, s := range f.Signatures {
sig, err := s.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, sig...)
}
return data, nil
}
// MarshalHeaderBinary marshals the transaction's header to its binary
// representation. See UnmarshalHeaderBinary for encoding details.
func (f *FactoidTransaction) MarshalHeaderBinary() ([]byte, error) {
version := varintf.Encode(f.Version)
data := make([]byte, TransactionHeadMinLen+len(version))
var i int
i += copy(data[i:], version)
// Do the timestamp as 6 bytes in ms
ms := f.TimestampSalt.UnixNano() / 1e6
buf := bytes.NewBuffer(make([]byte, 0, 8))
if err := binary.Write(buf, binary.BigEndian, ms); err != nil {
return nil, err
}
i += copy(data[i:], buf.Bytes()[2:])
data[i] = uint8(len(f.FCTInputs))
i += 1
data[i] = uint8(len(f.FCTOutputs))
i += 1
data[i] = uint8(len(f.ECOutputs))
i += 1
return data, nil
}
// MarshalBinary marshals a set of transaction ios to its binary representation.
// See UnmarshalBinary for encoding details.
func (ios factoidTransactionIOs) MarshalBinary() ([]byte, error) {
var data []byte
for _, io := range ios {
iodata, err := io.MarshalBinary()
if err != nil {
return nil, err
}
data = append(data, iodata...)
}
return data, nil | // See UnmarshalBinary for encoding details.
func (io *FactoidTransactionIO) MarshalBinary() ([]byte, error) {
amount := varintf.Encode(io.Amount)
data := make([]byte, 32+len(amount))
var i int
i += copy(data[i:], amount)
i += copy(data[i:], io.Address[:])
return data, nil
}
// MarshalBinary marshals a transaction signature to its binary representation.
// See UnmarshalBinary for encoding details.
func (s *FactoidTransactionSignature) MarshalBinary() ([]byte, error) {
if !s.IsPopulated() {
return nil, fmt.Errorf("not populated")
}
rcdData, err := s.ReedeemCondition.MarshalBinary()
if err != nil {
return nil, err
}
data := make([]byte, len(rcdData)+len(s.SignatureBlock))
var i int
i += copy(data[i:], rcdData)
i += copy(data[i:], s.SignatureBlock)
return data, nil
}
const (
TransactionHeadMinLen = 0 + // Version length is varint
6 + // timestamp
1 + // input count
1 + // factoid output count
1 // EC output count
TransactionTotalMinLen = TransactionHeadMinLen // Coinbases have no body
)
// Decode will consume as many bytes as necessary to unmarshal the factoid
// transaction. It will return the number of bytes read and an error.
func (f *FactoidTransaction) Decode(data []byte) (i int, err error) {
if len(data) < TransactionTotalMinLen {
return 0, fmt.Errorf("insufficient length")
}
// Decode header
version, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("version bytes invalid")
}
f.Version = version
msdata := make([]byte, 8)
// TS + counts length check
if len(data) < i+(6+3) {
return 0, fmt.Errorf("not enough bytes to decode tx")
}
copy(msdata[2:], data[i:i+6])
ms := binary.BigEndian.Uint64(msdata)
f.TimestampSalt = time.Unix(0, int64(ms)*1e6)
i += 6
inputCount := uint8(data[i])
i += 1
fctOutputCount := uint8(data[i])
i += 1
ecOutputCount := uint8(data[i])
i += 1
// Decode the body
// Decode the inputs
f.FCTInputs = make([]FactoidTransactionIO, inputCount)
read, err := factoidTransactionIOs(f.FCTInputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the FCT Outputs
f.FCTOutputs = make([]FactoidTransactionIO, fctOutputCount)
read, err = factoidTransactionIOs(f.FCTOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// Decode the EC Outputs
f.ECOutputs = make([]FactoidTransactionIO, ecOutputCount)
read, err = factoidTransactionIOs(f.ECOutputs).Decode(data[i:])
if err != nil {
return 0, err
}
i += read
// All data minus the signatures is the needed binary data to compute
// the txid
ledgerData := data[:i]
// Decode the signature blocks, one per input
f.Signatures = make([]FactoidTransactionSignature, len(f.FCTInputs))
for c := uint8(0); c < uint8(len(f.FCTInputs)); c++ {
// f.Signatures[i] = new(FactoidTransactionSignature)
read, err := f.Signatures[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
txid, err := f.computeTransactionID(ledgerData)
if err != nil {
return 0, err
}
// If the txid is already set, validate the txid
if f.TransactionID != nil {
if *f.TransactionID != txid {
return 0, fmt.Errorf("invalid txid")
}
}
f.TransactionID = &txid
return i, err
}
// UnmarshalBinary unmarshals the data into a factoid transaction.
func (f *FactoidTransaction) UnmarshalBinary(data []byte) error {
// TODO: Some length checks to prevent too few/too many bytes
_, err := f.Decode(data)
return err
}
// Decode takes a given input and decodes the set of bytes needed to populate
// the set of factoid transactions ios. The set length should be preset before
// calling this function. It will return how many bytes it read and return an error.
func (ios factoidTransactionIOs) Decode(data []byte) (int, error) {
var i int
for c := range ios {
read, err := ios[c].Decode(data[i:])
if err != nil {
return 0, err
}
i += read
}
return i, nil
}
// Decode takes a given input and decodes the set of bytes needed for a full
// transaction input/output. It will return how many bytes it read and an error.
// A FactoidTransactionIO includes an amount and an address.
func (io *FactoidTransactionIO) Decode(data []byte) (int, error) {
amount, i := varintf.Decode(data)
if i < 0 {
return 0, fmt.Errorf("amount is not a valid varint")
}
io.Amount = amount
if len(data)-i < 32 {
return 0, fmt.Errorf("not enough bytes to decode factoidtx")
}
var tmp Bytes32 // TODO: Fix this
copy(tmp[:], data[i:i+32])
io.Address = tmp
i += 32
return i, nil
}
// Decode will take a given input and decode the set of bytes needed for the full
// FactoidTransactionSignature. It will return how many bytes it read and an error.
// A FactoidTransactionSignature includes the RCD type and it's signature block.
func (s *FactoidTransactionSignature) Decode(data []byte) (int, error) {
rcd, i, err := DecodeRCD(data)
if err != nil {
return 0, err
}
// TODO: How do you want to handle this? Have the decode only return the
// concrete rcd1 type?
rcd1, ok := rcd.(*RCD1)
if !ok {
return -1, fmt.Errorf("rcd %d type not supported", rcd.Type())
}
s.ReedeemCondition = *rcd1
s.SignatureBlock = make([]byte, rcd.SignatureBlockSize())
i += copy(s.SignatureBlock, data[i:])
return i, nil
} | }
// MarshalBinary marshals a transaction io to its binary representation. | random_line_split |
core_bert.py | import sys
import codecs
import time
import json
from scipy.spatial.distance import cosine
import code
from models import BertMatch
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import pkuseg
from elmoformanylangs import Embedder
import numpy as np
from pytorch_transformers import BertTokenizer
import six
import pandas as pd
from config import config
seg = pkuseg.pkuseg()
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = BertMatch()
model.load_state_dict(torch.load(config.bert_train_modelPath))
model = model.to(DEVICE)
pretrained_model_name_or_path = config.bert_pretrain_model_path
print('bert预训练模型已加载')
class answerCandidate:
def __init__(self, sub = '', pre = '', qRaw = '', qType = 0, score = 0, kbDict = [], wS = 1, wP = 10, wAP = 100):
self.sub = sub # subject
self.pre = pre # predicate
self.qRaw = qRaw # raw question
self.qType = qType # question type
self.score = score # 分数
self.kbDict = kbDict # kd dictionary
self.origin = ''
self.scoreDetail = [0,0,0,0,0]
self.wS = wS # subject的权重
self.wP = wP # oredicate的权重
self.wAP = wAP # answer pattern的权重
self.scoreSub = 0
self.scoreAP = 0
self.scorePre = 0
def calcScore(self, qtList, countCharDict, debug=False, includingObj = [], use_elmo=False):
# 最重要的部分,计算该答案的分数
lenSub = len(self.sub)
scorePre = 0
scoreAP = 0
pre = self.pre
q = self.qRaw
subIndex = q.index(self.sub)
qWithoutSub1 = q[:subIndex] # subject左边的部分
qWithoutSub2 = q[subIndex+lenSub:] # subject右边的部分
qWithoutSub = q.replace(self.sub,'') # 去掉subject剩下的部分
qtKey = (self.qRaw.replace(self.sub,'(SUB)',1) + ' ||| ' + pre) # 把subject换成(sub)然后加上predicate
if qtKey in qtList:
scoreAP = qtList[qtKey] # 查看当前的问题有没有在知识库中出现过
self.scoreAP = scoreAP
qWithoutSubSet1 = set(qWithoutSub1)
qWithoutSubSet2 = set(qWithoutSub2)
qWithoutSubSet = set(qWithoutSub)
preLowerSet = set(pre.lower())
# 找出predicate和问题前后两部分的最大intersection
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
# 计算来自predicate的分数,采用最大overlap的character的倒数 1/(n+1)
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
if len(pre) != 0:
scorePre = preFactor / len(qWithoutSubSet | preLowerSet)
else:
scorePre = 0
if len(includingObj) != 0 and scorePre == 0:
for objStr in includingObj:
scorePreTmp = 0
preLowerSet = set(objStr.lower())
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
scorePreTmp = preFactor / len(qWithoutSubSet | preLowerSet)
if scorePreTmp > scorePre:
scorePre = scorePreTmp
if use_elmo and len(pre) != 0:
preCut = [pre]
qWithoutSubCut = [qWithoutSub]
data_df = pd.DataFrame({'question': qWithoutSubCut, 'sim_question': preCut})
q_data = data_df['question'].apply(sent2ids)
p_data = data_df['sim_question'].apply(sent2ids)
q_data_e = data2tensor([q_data[0]])
p_data_e = data2tensor([p_data[0]])
output = model(q_data_e,p_data_e) #
scorePre_m = output[0][0]##bert模型做相似度计算,第一个值是相似性分数
self.scorePre = scorePre_m
scoreSub = 0
# 计算subject的权重有多高,可能有些subject本身就是更重要一些,一般来说越罕见的entity重要性越高
for char in self.sub:
if char in countCharDict:
scoreSub += 1/(countCharDict[char] + 1)
else:
scoreSub += 1
self.scoreSub = scoreSub
self.scorePre = scorePre
self.score = scoreSub * self.wS + scorePre * self.wP + scoreAP * self.wAP
return self.score
def pad_sequences(sequences, maxlen=None, dtype='int32',
padding='pre', truncating='pre', value=0.):
num_samples = len(sequences)
lengths = []
for x in sequences:
try:
lengths.append(len(x))
except TypeError:
raise ValueError('`sequences` must be a list of iterables. '
'Found non-iterable: ' + str(x))
if maxlen is None:
maxlen = np.max(lengths)
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
is_dtype_str = np.issubdtype(dtype, np.str_) or np.issubdtype(dtype, np.unicode_)
if isinstance(value, six.string_types) and dtype != object and not is_dtype_str:
raise ValueError("`dtype` {} is not compatible with `value`'s type: {}\n"
"You should set `dtype=object` for variable length strings."
.format(dtype, type(value)))
x = np.full((num_samples, maxlen) + sample_shape, value, dtype=dtype)
for idx, s in enumerate(sequences):
if not len(s):
continue # empty list/array was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" '
'not understood' % truncating)
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s '
'is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x
def data2tensor(batch_token_ids, pad=True, maxlen=50):
if pad:
batch_token_ids = pad_sequences(batch_token_ids, maxlen=maxlen, padding='post')
batch_token_ids = torch.tensor(batch_token_ids, dtype=torch.long).to(DEVICE)
return batch_token_ids
tokenizer = BertTokenizer.from_pretrained(pretrained_model_name_or_path)
def sent2ids(sent_text):
sent_tokens = ['[CLS]'] + tokenizer.tokenize(sent_text) + ["[SEP]"]
token_ids = tokenizer.convert_tokens_to_ids(sent_tokens)
return token_ids
def getAnswer(sub, pre, kbDict):
answerList = []
for kb in kbDict[sub]:
if pre in kb:
answerList.append(kb[pre])
return answerList
def answerQ (qRaw, lKey, kbDict, qtList, countCharDict, wP=10, threshold=0, debug=False):
q = qRaw.strip().lower() # 问题转化成小写
candidateSet = set()
result = ''
maxScore = 0
bestAnswer = set()
for key in lKey:
if -1 != q.find(key): # 如果问题中出现了该subject,那么我们就要考虑这个subject的triples
for kb in kbDict[key]:
for pre in list(kb):
newAnswerCandidate = answerCandidate(key, pre, q, wP=wP) # 构建一个新的answer candidate
candidateSet.add(newAnswerCandidate)
candidateSetCopy = candidateSet.copy()
if debug:
print('len(candidateSet) = ' + str(len(candidateSetCopy)), end = '\r', flush=True)
candidateSet = set()
candidateSetIndex = set()
for aCandidate in candidateSetCopy:
strTmp = str(aCandidate.sub+'|'+aCandidate.pre)
if strTmp not in candidateSetIndex:
candidateSetIndex.add(strTmp)
candidateSet.add(aCandidate)
# 针对每一个candidate answer,计算该candidate的分数,然后选择分数最高的作为答案
for aCandidate in candidateSet:
scoreTmp = aCandidate.calcScore(qtList, countCharDict,debug)
if scoreTmp > maxScore:
maxScore = scoreTmp
bestAnswer = set()
if scoreTmp == maxScore:
bestAnswer.add(aCandidate)
# 去除一些重复的答案
bestAnswerCopy = bestAnswer.copy()
bestAnswer = set()
for aCandidate in bestAnswerCopy:
aCfound = 0
for aC in bestAnswer:
if aC.pre == aCandidate.pre and aC.sub == aCandidate.sub:
aCfound = 1
break
if aCfound == 0:
bestAnswer.add(aCandidate)
# 加入object的分数
bestAnswerCopy = bestAnswer.copy()
for aCandidate in bestAnswerCopy:
if aCandidate.score == aCandidate.scoreSub:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict))
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
# 加入cosine similarity
bestAnswerCopy = bestAnswer.copy()
if len(bestAnswer) > 1: # use word vector to remove duplicated answer
for aCandidate in bestAnswerCopy:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict), use_elmo | bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
if debug:
for ai in bestAnswer:
for kb in kbDict[ai.sub]:
if ai.pre in kb:
print(ai.sub + ' ' +ai.pre + ' '+ kb[ai.pre])
return[bestAnswer,candidateSet]
else:
return bestAnswer
def loadQtList(path, encode = 'utf8'):
qtList = json.load(open(path,'r',encoding=encode))
return qtList
def loadcountCharDict(path, encode = 'utf8'):
countCharDict = json.load(open(path,'r',encoding=encode))
return countCharDict
def answerAllQ(pathInput, pathOutput, lKey, kbDict, qtList, countCharDict, qIDstart=1, wP=10):
fq = open(pathInput, 'r', encoding='utf8')
i = qIDstart
timeStart = time.time()
fo = open(pathOutput, 'w', encoding='utf8')
fo.close()
listQ = []
for line in fq:
if line[1] == 'q':
listQ.append(line[line.index('\t')+1:].strip())
for q in listQ:
fo = open(pathOutput, 'a', encoding='utf8')
result = answerQ(q, lKey, kbDict, qtList, countCharDict, wP=wP)
fo.write('<question id='+str(i)+'>\t' + q.lower() + '\n')
answerLast = ''
if len(result) != 0:
answerSet = []
fo.write('<triple id='+str(i)+'>\t')
for res in result:
answerTmp = getAnswer(res.sub, res.pre, kbDict)
answerSet.append(answerTmp)
fo.write(res.sub.lower() + ' ||| ' + res.pre.lower() + ' ||| '\
+ str(answerTmp) + ' ||| ' + str(res.score) + ' ====== ')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
answerLast = answerSet[0][0]
mulAnswer = False
for ansTmp in answerSet:
for ans in ansTmp:
if ans != answerLast:
mulAnswer = True
continue
if mulAnswer == True:
continue
if mulAnswer == True:
for ansTmp in answerSet:
for ans in ansTmp:
fo.write(ans)
if len(ansTmp) > 1:
fo.write(' | ')
if len(answerSet) > 1:
fo.write(' ||| ')
else:
fo.write(answerLast)
fo.write('\n==================================================\n')
else:
fo.write('<triple id='+str(i)+'>\t')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
fo.write('\n==================================================\n')
print('processing ' + str(i) + 'th Q.\tAv time cost: ' + str((time.time()-timeStart) / i)[:6] + ' sec', end = '\r', flush=True)
fo.close()
i += 1
fq.close()
def loadResAndanswerAllQ(pathInput, pathOutput, pathDict, pathQt, pathCD, encode='utf8', qIDstart=1, wP=10):
kbDict = json.load(open(pathDict, 'r', encoding=encode)) # kbJson.cleanPre.alias.utf8
qtList = loadQtList(pathQt, encode) # outputAP
countCharDict = loadcountCharDict(pathCD) # countChar
answerAllQ(pathInput, pathOutput, list(kbDict), kbDict, qtList, countCharDict, qIDstart=1,wP=wP)
if __name__ == '__main__':
pathInput = config.test_data_path
pathOutput = config.result_bert_path
pathDict = config.kb_process_path
pathQt = config.output_data_path
pathCD = config.countChar_dir
qIDstart = 1
defaultWeightPre = 30
loadResAndanswerAllQ(pathInput,pathOutput,pathDict,pathQt,pathCD,'utf8', qIDstart, defaultWeightPre)
| =True)
if scoreReCal > maxScore:
| conditional_block |
core_bert.py | import sys
import codecs
import time
import json
from scipy.spatial.distance import cosine
import code
from models import BertMatch
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import pkuseg
from elmoformanylangs import Embedder
import numpy as np
from pytorch_transformers import BertTokenizer
import six
import pandas as pd
from config import config
seg = pkuseg.pkuseg()
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = BertMatch()
model.load_state_dict(torch.load(config.bert_train_modelPath))
model = model.to(DEVICE)
pretrained_model_name_or_path = config.bert_pretrain_model_path
print('bert预训练模型已加载')
class answerCandidate:
def __init__(self, sub = '', pre = '', qRaw = '', qType = 0, score = 0, kbDict = [], wS = 1, wP = 10, wAP = 100):
self.sub = sub # subject
self.pre = pre # predicate
self.qRaw = qRaw # raw question
self.qType = qType # question type
self.score = score # 分数
self.kbDict = kbDict # kd dictionary
self.origin = ''
self.scoreDetail = [0,0,0,0,0]
self.wS = wS # subject的权重
self.wP = wP # oredicate的权重
self.wAP = wAP # answer pattern的权重
self.scoreSub = 0
self.scoreAP = 0
self.scorePre = 0
def calcScore(self, qtList, countCharDict, debug=False, includingObj = [], use_elmo=False):
# 最重要的部分,计算该答案的分数
lenSub = len(self.sub)
scorePre = 0
scoreAP = 0
pre = self.pre
q = self.qRaw
subIndex = q.index(self.sub)
qWithoutSub1 = q[:subIndex] # subject左边的部分
qWithoutSub2 = q[subIndex+lenSub:] # subject右边的部分
qWithoutSub = q.replace(self.sub,'') # 去掉subject剩下的部分
qtKey = (self.qRaw.replace(self.sub,'(SUB)',1) + ' ||| ' + pre) # 把subject换成(sub)然后加上predicate
if qtKey in qtList:
scoreAP = qtList[qtKey] # 查看当前的问题有没有在知识库中出现过
self.scoreAP = scoreAP
qWithoutSubSet1 = set(qWithoutSub1)
qWithoutSubSet2 = set(qWithoutSub2)
qWithoutSubSet = set(qWithoutSub)
preLowerSet = set(pre.lower())
# 找出predicate和问题前后两部分的最大intersection
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
# 计算来自predicate的分数,采用最大overlap的character的倒数 1/(n+1)
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
if len(pre) != 0:
scorePre = preFactor / len(qWithoutSubSet | preLowerSet)
else:
scorePre = 0
if len(includingObj) != 0 and scorePre == 0:
for objStr in includingObj:
scorePreTmp = 0
preLowerSet = set(objStr.lower())
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
scorePreTmp = preFactor / len(qWithoutSubSet | preLowerSet)
if scorePreTmp > scorePre:
scorePre = scorePreTmp
if use_elmo and len(pre) != 0:
preCut = [pre]
qWithoutSubCut = [qWithoutSub]
data_df = pd.DataFrame({'question': qWithoutSubCut, 'sim_question': preCut})
q_data = data_df['question'].apply(sent2ids)
p_data = data_df['sim_question'].apply(sent2ids)
q_data_e = data2tensor([q_data[0]])
p_data_e = data2tensor([p_data[0]])
output = model(q_data_e,p_data_e) #
scorePre_m = output[0][0]##bert模型做相似度计算,第一个值是相似性分数
self.scorePre = scorePre_m
scoreSub = 0
# 计算subject的权重有多高,可能有些subject本身就是更重要一些,一般来说越罕见的entity重要性越高
for char in self.sub:
if char in countCharDict:
scoreSub += 1/(countCharDict[char] + 1)
else:
scoreSub += 1
self.scoreSub = scoreSub
self.scorePre = scorePre
self.score = scoreSub * self.wS + scorePre * self.wP + scoreAP * self.wAP
return self.score
def pad_sequences(sequences, maxlen=None, dtype='int32',
padding='pre', truncating='pre', value=0.):
num_samples = len(sequences)
lengths = []
for x in sequences:
try:
lengths.append(len(x))
except TypeError:
raise ValueError('`sequences` must be a list of iterables. '
'Found non-iterable: ' + str(x))
if maxlen is None:
maxlen = np.max(lengths)
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
is_dtype_str = np.issubdtype(dtype, np.str_) or np.issubdtype(dtype, np.unicode_)
if isinstance(value, six.string_types) and dtype != object and not is_dtype_str:
raise ValueError("`dtype` {} is not compatible with `value`'s type: {}\n"
"You should set `dtype=object` for variable length strings."
.format(dtype, type(value)))
x = np.full((num_samples, maxlen) + sample_shape, value, dtype=dtype)
for idx, s in enumerate(sequences):
if not len(s):
continue # empty list/array was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" '
'not understood' % truncating)
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s '
'is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x
def data2tensor(batch_token_ids, pad=True, maxlen=50):
if pad:
batch_token_ids = pad_sequences(batch_token_ids, maxlen=maxlen, padding='post')
batch_token_ids = torch.tensor(batch_token_ids, dtype=torch.long).to(DEVICE)
return batch_token_ids
tokenizer = BertTokenizer.from_pretrained(pretrained_model_name_or_path)
def sent2ids(sent_text):
sent_tokens = ['[CLS]'] + tokenizer.tokenize(sent_text) + ["[SEP]"]
token_ids = tokenizer.convert_tokens_to_ids(sent_tokens)
return token_ids
def getAnswer(sub, pre, kbDict):
answerList = []
for kb in kbDict[sub]:
if pre in kb:
answerList.append(kb[pre])
return answerList
def answerQ (qRaw, lKey, kbDict, qtList, countCharDict, wP=10, threshold=0, debug=False):
q = qRaw.strip().lower() # 问题转化成小写
candidateSet = set()
| = ''
maxScore = 0
bestAnswer = set()
for key in lKey:
if -1 != q.find(key): # 如果问题中出现了该subject,那么我们就要考虑这个subject的triples
for kb in kbDict[key]:
for pre in list(kb):
newAnswerCandidate = answerCandidate(key, pre, q, wP=wP) # 构建一个新的answer candidate
candidateSet.add(newAnswerCandidate)
candidateSetCopy = candidateSet.copy()
if debug:
print('len(candidateSet) = ' + str(len(candidateSetCopy)), end = '\r', flush=True)
candidateSet = set()
candidateSetIndex = set()
for aCandidate in candidateSetCopy:
strTmp = str(aCandidate.sub+'|'+aCandidate.pre)
if strTmp not in candidateSetIndex:
candidateSetIndex.add(strTmp)
candidateSet.add(aCandidate)
# 针对每一个candidate answer,计算该candidate的分数,然后选择分数最高的作为答案
for aCandidate in candidateSet:
scoreTmp = aCandidate.calcScore(qtList, countCharDict,debug)
if scoreTmp > maxScore:
maxScore = scoreTmp
bestAnswer = set()
if scoreTmp == maxScore:
bestAnswer.add(aCandidate)
# 去除一些重复的答案
bestAnswerCopy = bestAnswer.copy()
bestAnswer = set()
for aCandidate in bestAnswerCopy:
aCfound = 0
for aC in bestAnswer:
if aC.pre == aCandidate.pre and aC.sub == aCandidate.sub:
aCfound = 1
break
if aCfound == 0:
bestAnswer.add(aCandidate)
# 加入object的分数
bestAnswerCopy = bestAnswer.copy()
for aCandidate in bestAnswerCopy:
if aCandidate.score == aCandidate.scoreSub:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict))
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
# 加入cosine similarity
bestAnswerCopy = bestAnswer.copy()
if len(bestAnswer) > 1: # use word vector to remove duplicated answer
for aCandidate in bestAnswerCopy:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict), use_elmo=True)
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
if debug:
for ai in bestAnswer:
for kb in kbDict[ai.sub]:
if ai.pre in kb:
print(ai.sub + ' ' +ai.pre + ' '+ kb[ai.pre])
return[bestAnswer,candidateSet]
else:
return bestAnswer
def loadQtList(path, encode = 'utf8'):
qtList = json.load(open(path,'r',encoding=encode))
return qtList
def loadcountCharDict(path, encode = 'utf8'):
countCharDict = json.load(open(path,'r',encoding=encode))
return countCharDict
def answerAllQ(pathInput, pathOutput, lKey, kbDict, qtList, countCharDict, qIDstart=1, wP=10):
fq = open(pathInput, 'r', encoding='utf8')
i = qIDstart
timeStart = time.time()
fo = open(pathOutput, 'w', encoding='utf8')
fo.close()
listQ = []
for line in fq:
if line[1] == 'q':
listQ.append(line[line.index('\t')+1:].strip())
for q in listQ:
fo = open(pathOutput, 'a', encoding='utf8')
result = answerQ(q, lKey, kbDict, qtList, countCharDict, wP=wP)
fo.write('<question id='+str(i)+'>\t' + q.lower() + '\n')
answerLast = ''
if len(result) != 0:
answerSet = []
fo.write('<triple id='+str(i)+'>\t')
for res in result:
answerTmp = getAnswer(res.sub, res.pre, kbDict)
answerSet.append(answerTmp)
fo.write(res.sub.lower() + ' ||| ' + res.pre.lower() + ' ||| '\
+ str(answerTmp) + ' ||| ' + str(res.score) + ' ====== ')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
answerLast = answerSet[0][0]
mulAnswer = False
for ansTmp in answerSet:
for ans in ansTmp:
if ans != answerLast:
mulAnswer = True
continue
if mulAnswer == True:
continue
if mulAnswer == True:
for ansTmp in answerSet:
for ans in ansTmp:
fo.write(ans)
if len(ansTmp) > 1:
fo.write(' | ')
if len(answerSet) > 1:
fo.write(' ||| ')
else:
fo.write(answerLast)
fo.write('\n==================================================\n')
else:
fo.write('<triple id='+str(i)+'>\t')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
fo.write('\n==================================================\n')
print('processing ' + str(i) + 'th Q.\tAv time cost: ' + str((time.time()-timeStart) / i)[:6] + ' sec', end = '\r', flush=True)
fo.close()
i += 1
fq.close()
def loadResAndanswerAllQ(pathInput, pathOutput, pathDict, pathQt, pathCD, encode='utf8', qIDstart=1, wP=10):
kbDict = json.load(open(pathDict, 'r', encoding=encode)) # kbJson.cleanPre.alias.utf8
qtList = loadQtList(pathQt, encode) # outputAP
countCharDict = loadcountCharDict(pathCD) # countChar
answerAllQ(pathInput, pathOutput, list(kbDict), kbDict, qtList, countCharDict, qIDstart=1,wP=wP)
if __name__ == '__main__':
pathInput = config.test_data_path
pathOutput = config.result_bert_path
pathDict = config.kb_process_path
pathQt = config.output_data_path
pathCD = config.countChar_dir
qIDstart = 1
defaultWeightPre = 30
loadResAndanswerAllQ(pathInput,pathOutput,pathDict,pathQt,pathCD,'utf8', qIDstart, defaultWeightPre)
| result | identifier_name |
core_bert.py | import sys
import codecs
import time
import json
from scipy.spatial.distance import cosine
import code
from models import BertMatch
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import pkuseg
from elmoformanylangs import Embedder
import numpy as np
from pytorch_transformers import BertTokenizer
import six
import pandas as pd
from config import config
seg = pkuseg.pkuseg()
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = BertMatch()
model.load_state_dict(torch.load(config.bert_train_modelPath))
model = model.to(DEVICE)
pretrained_model_name_or_path = config.bert_pretrain_model_path
print('bert预训练模型已加载')
class answerCandidate:
def __init__(self, sub = '', pre = '', qRaw = '', qType = 0, score = 0, kbDict = [], wS = 1, wP = 10, wAP = 100):
self.sub = sub # subject
self.pre = pre # predicate
self.qRaw = qRaw # raw question
self.qType = qType # question type
self.score = score # 分数
self.kbDict = kbDict # kd dictionary
self.origin = ''
self.scoreDetail = [0,0,0,0,0]
self.wS = wS # subject的权重
self.wP = wP # oredicate的权重
self.wAP = wAP # answer pattern的权重
self.scoreSub = 0
self.scoreAP = 0
self.scorePre = 0
def calcScore(self, qtList, countCharDict, debug=False, includingObj = [], use_elmo=False):
# 最重要的部分,计算该答案的分数
lenSub = len(self.sub)
scorePre = 0
scoreAP = 0
pre = self.pre
q = self.qRaw
subIndex = q.index(self.sub)
qWithoutSub1 = q[:subIndex] # subject左边的部分
qWithoutSub2 = q[subIndex+lenSub:] # subject右边的部分
qWithoutSub = q.replace(self.sub,'') # 去掉subject剩下的部分
qtKey = (self.qRaw.replace(self.sub,'(SUB)',1) + ' ||| ' + pre) # 把subject换成(sub)然后加上predicate
if qtKey in qtList:
scoreAP = qtList[qtKey] # 查看当前的问题有没有在知识库中出现过
self.scoreAP = scoreAP
qWithoutSubSet1 = set(qWithoutSub1)
qWithoutSubSet2 = set(qWithoutSub2)
qWithoutSubSet = set(qWithoutSub)
preLowerSet = set(pre.lower())
# 找出predicate和问题前后两部分的最大intersection
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
# 计算来自predicate的分数,采用最大overlap的character的倒数 1/(n+1)
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
if len(pre) != 0:
scorePre = preFactor / len(qWithoutSubSet | preLowerSet)
else:
scorePre = 0
if len(includingObj) != 0 and scorePre == 0:
for objStr in includingObj:
scorePreTmp = 0
preLowerSet = set(objStr.lower())
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
scorePreTmp = preFactor / len(qWithoutSubSet | preLowerSet)
if scorePreTmp > scorePre:
scorePre = scorePreTmp
if use_elmo and len(pre) != 0:
preCut = [pre]
qWithoutSubCut = [qWithoutSub]
data_df = pd.DataFrame({'question': qWithoutSubCut, 'sim_question': preCut})
q_data = data_df['question'].apply(sent2ids)
p_data = data_df['sim_question'].apply(sent2ids)
q_data_e = data2tensor([q_data[0]])
p_data_e = data2tensor([p_data[0]])
output = model(q_data_e,p_data_e) #
scorePre_m = output[0][0]##bert模型做相似度计算,第一个值是相似性分数
self.scorePre = scorePre_m
scoreSub = 0
# 计算subject的权重有多高,可能有些subject本身就是更重要一些,一般来说越罕见的entity重要性越高
for char in self.sub:
if char in countCharDict:
scoreSub += 1/(countCharDict[char] + 1)
else:
scoreSub += 1
self.scoreSub = scoreSub
self.scorePre = scorePre
self.score = scoreSub * self.wS + scorePre * self.wP + scoreAP * self.wAP
return self.score
def pad_sequences(sequences, maxlen=None, dtype='int32',
padding='pre', truncating='pre', value=0.):
num_samples = len(sequences) | try:
lengths.append(len(x))
except TypeError:
raise ValueError('`sequences` must be a list of iterables. '
'Found non-iterable: ' + str(x))
if maxlen is None:
maxlen = np.max(lengths)
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
is_dtype_str = np.issubdtype(dtype, np.str_) or np.issubdtype(dtype, np.unicode_)
if isinstance(value, six.string_types) and dtype != object and not is_dtype_str:
raise ValueError("`dtype` {} is not compatible with `value`'s type: {}\n"
"You should set `dtype=object` for variable length strings."
.format(dtype, type(value)))
x = np.full((num_samples, maxlen) + sample_shape, value, dtype=dtype)
for idx, s in enumerate(sequences):
if not len(s):
continue # empty list/array was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" '
'not understood' % truncating)
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s '
'is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x
def data2tensor(batch_token_ids, pad=True, maxlen=50):
if pad:
batch_token_ids = pad_sequences(batch_token_ids, maxlen=maxlen, padding='post')
batch_token_ids = torch.tensor(batch_token_ids, dtype=torch.long).to(DEVICE)
return batch_token_ids
tokenizer = BertTokenizer.from_pretrained(pretrained_model_name_or_path)
def sent2ids(sent_text):
sent_tokens = ['[CLS]'] + tokenizer.tokenize(sent_text) + ["[SEP]"]
token_ids = tokenizer.convert_tokens_to_ids(sent_tokens)
return token_ids
def getAnswer(sub, pre, kbDict):
answerList = []
for kb in kbDict[sub]:
if pre in kb:
answerList.append(kb[pre])
return answerList
def answerQ (qRaw, lKey, kbDict, qtList, countCharDict, wP=10, threshold=0, debug=False):
q = qRaw.strip().lower() # 问题转化成小写
candidateSet = set()
result = ''
maxScore = 0
bestAnswer = set()
for key in lKey:
if -1 != q.find(key): # 如果问题中出现了该subject,那么我们就要考虑这个subject的triples
for kb in kbDict[key]:
for pre in list(kb):
newAnswerCandidate = answerCandidate(key, pre, q, wP=wP) # 构建一个新的answer candidate
candidateSet.add(newAnswerCandidate)
candidateSetCopy = candidateSet.copy()
if debug:
print('len(candidateSet) = ' + str(len(candidateSetCopy)), end = '\r', flush=True)
candidateSet = set()
candidateSetIndex = set()
for aCandidate in candidateSetCopy:
strTmp = str(aCandidate.sub+'|'+aCandidate.pre)
if strTmp not in candidateSetIndex:
candidateSetIndex.add(strTmp)
candidateSet.add(aCandidate)
# 针对每一个candidate answer,计算该candidate的分数,然后选择分数最高的作为答案
for aCandidate in candidateSet:
scoreTmp = aCandidate.calcScore(qtList, countCharDict,debug)
if scoreTmp > maxScore:
maxScore = scoreTmp
bestAnswer = set()
if scoreTmp == maxScore:
bestAnswer.add(aCandidate)
# 去除一些重复的答案
bestAnswerCopy = bestAnswer.copy()
bestAnswer = set()
for aCandidate in bestAnswerCopy:
aCfound = 0
for aC in bestAnswer:
if aC.pre == aCandidate.pre and aC.sub == aCandidate.sub:
aCfound = 1
break
if aCfound == 0:
bestAnswer.add(aCandidate)
# 加入object的分数
bestAnswerCopy = bestAnswer.copy()
for aCandidate in bestAnswerCopy:
if aCandidate.score == aCandidate.scoreSub:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict))
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
# 加入cosine similarity
bestAnswerCopy = bestAnswer.copy()
if len(bestAnswer) > 1: # use word vector to remove duplicated answer
for aCandidate in bestAnswerCopy:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict), use_elmo=True)
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
if debug:
for ai in bestAnswer:
for kb in kbDict[ai.sub]:
if ai.pre in kb:
print(ai.sub + ' ' +ai.pre + ' '+ kb[ai.pre])
return[bestAnswer,candidateSet]
else:
return bestAnswer
def loadQtList(path, encode = 'utf8'):
qtList = json.load(open(path,'r',encoding=encode))
return qtList
def loadcountCharDict(path, encode = 'utf8'):
countCharDict = json.load(open(path,'r',encoding=encode))
return countCharDict
def answerAllQ(pathInput, pathOutput, lKey, kbDict, qtList, countCharDict, qIDstart=1, wP=10):
fq = open(pathInput, 'r', encoding='utf8')
i = qIDstart
timeStart = time.time()
fo = open(pathOutput, 'w', encoding='utf8')
fo.close()
listQ = []
for line in fq:
if line[1] == 'q':
listQ.append(line[line.index('\t')+1:].strip())
for q in listQ:
fo = open(pathOutput, 'a', encoding='utf8')
result = answerQ(q, lKey, kbDict, qtList, countCharDict, wP=wP)
fo.write('<question id='+str(i)+'>\t' + q.lower() + '\n')
answerLast = ''
if len(result) != 0:
answerSet = []
fo.write('<triple id='+str(i)+'>\t')
for res in result:
answerTmp = getAnswer(res.sub, res.pre, kbDict)
answerSet.append(answerTmp)
fo.write(res.sub.lower() + ' ||| ' + res.pre.lower() + ' ||| '\
+ str(answerTmp) + ' ||| ' + str(res.score) + ' ====== ')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
answerLast = answerSet[0][0]
mulAnswer = False
for ansTmp in answerSet:
for ans in ansTmp:
if ans != answerLast:
mulAnswer = True
continue
if mulAnswer == True:
continue
if mulAnswer == True:
for ansTmp in answerSet:
for ans in ansTmp:
fo.write(ans)
if len(ansTmp) > 1:
fo.write(' | ')
if len(answerSet) > 1:
fo.write(' ||| ')
else:
fo.write(answerLast)
fo.write('\n==================================================\n')
else:
fo.write('<triple id='+str(i)+'>\t')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
fo.write('\n==================================================\n')
print('processing ' + str(i) + 'th Q.\tAv time cost: ' + str((time.time()-timeStart) / i)[:6] + ' sec', end = '\r', flush=True)
fo.close()
i += 1
fq.close()
def loadResAndanswerAllQ(pathInput, pathOutput, pathDict, pathQt, pathCD, encode='utf8', qIDstart=1, wP=10):
kbDict = json.load(open(pathDict, 'r', encoding=encode)) # kbJson.cleanPre.alias.utf8
qtList = loadQtList(pathQt, encode) # outputAP
countCharDict = loadcountCharDict(pathCD) # countChar
answerAllQ(pathInput, pathOutput, list(kbDict), kbDict, qtList, countCharDict, qIDstart=1,wP=wP)
if __name__ == '__main__':
pathInput = config.test_data_path
pathOutput = config.result_bert_path
pathDict = config.kb_process_path
pathQt = config.output_data_path
pathCD = config.countChar_dir
qIDstart = 1
defaultWeightPre = 30
loadResAndanswerAllQ(pathInput,pathOutput,pathDict,pathQt,pathCD,'utf8', qIDstart, defaultWeightPre) | lengths = []
for x in sequences: | random_line_split |
core_bert.py | import sys
import codecs
import time
import json
from scipy.spatial.distance import cosine
import code
from models import BertMatch
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import pkuseg
from elmoformanylangs import Embedder
import numpy as np
from pytorch_transformers import BertTokenizer
import six
import pandas as pd
from config import config
seg = pkuseg.pkuseg()
DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = BertMatch()
model.load_state_dict(torch.load(config.bert_train_modelPath))
model = model.to(DEVICE)
pretrained_model_name_or_path = config.bert_pretrain_model_path
print('bert预训练模型已加载')
class answerCandidate:
def __init__(sel | a list of iterables. '
'Found non-iterable: ' + str(x))
if maxlen is None:
maxlen = np.max(lengths)
sample_shape = tuple()
for s in sequences:
if len(s) > 0:
sample_shape = np.asarray(s).shape[1:]
break
is_dtype_str = np.issubdtype(dtype, np.str_) or np.issubdtype(dtype, np.unicode_)
if isinstance(value, six.string_types) and dtype != object and not is_dtype_str:
raise ValueError("`dtype` {} is not compatible with `value`'s type: {}\n"
"You should set `dtype=object` for variable length strings."
.format(dtype, type(value)))
x = np.full((num_samples, maxlen) + sample_shape, value, dtype=dtype)
for idx, s in enumerate(sequences):
if not len(s):
continue # empty list/array was found
if truncating == 'pre':
trunc = s[-maxlen:]
elif truncating == 'post':
trunc = s[:maxlen]
else:
raise ValueError('Truncating type "%s" '
'not understood' % truncating)
trunc = np.asarray(trunc, dtype=dtype)
if trunc.shape[1:] != sample_shape:
raise ValueError('Shape of sample %s of sequence at position %s '
'is different from expected shape %s' %
(trunc.shape[1:], idx, sample_shape))
if padding == 'post':
x[idx, :len(trunc)] = trunc
elif padding == 'pre':
x[idx, -len(trunc):] = trunc
else:
raise ValueError('Padding type "%s" not understood' % padding)
return x
def data2tensor(batch_token_ids, pad=True, maxlen=50):
if pad:
batch_token_ids = pad_sequences(batch_token_ids, maxlen=maxlen, padding='post')
batch_token_ids = torch.tensor(batch_token_ids, dtype=torch.long).to(DEVICE)
return batch_token_ids
tokenizer = BertTokenizer.from_pretrained(pretrained_model_name_or_path)
def sent2ids(sent_text):
sent_tokens = ['[CLS]'] + tokenizer.tokenize(sent_text) + ["[SEP]"]
token_ids = tokenizer.convert_tokens_to_ids(sent_tokens)
return token_ids
def getAnswer(sub, pre, kbDict):
answerList = []
for kb in kbDict[sub]:
if pre in kb:
answerList.append(kb[pre])
return answerList
def answerQ (qRaw, lKey, kbDict, qtList, countCharDict, wP=10, threshold=0, debug=False):
q = qRaw.strip().lower() # 问题转化成小写
candidateSet = set()
result = ''
maxScore = 0
bestAnswer = set()
for key in lKey:
if -1 != q.find(key): # 如果问题中出现了该subject,那么我们就要考虑这个subject的triples
for kb in kbDict[key]:
for pre in list(kb):
newAnswerCandidate = answerCandidate(key, pre, q, wP=wP) # 构建一个新的answer candidate
candidateSet.add(newAnswerCandidate)
candidateSetCopy = candidateSet.copy()
if debug:
print('len(candidateSet) = ' + str(len(candidateSetCopy)), end = '\r', flush=True)
candidateSet = set()
candidateSetIndex = set()
for aCandidate in candidateSetCopy:
strTmp = str(aCandidate.sub+'|'+aCandidate.pre)
if strTmp not in candidateSetIndex:
candidateSetIndex.add(strTmp)
candidateSet.add(aCandidate)
# 针对每一个candidate answer,计算该candidate的分数,然后选择分数最高的作为答案
for aCandidate in candidateSet:
scoreTmp = aCandidate.calcScore(qtList, countCharDict,debug)
if scoreTmp > maxScore:
maxScore = scoreTmp
bestAnswer = set()
if scoreTmp == maxScore:
bestAnswer.add(aCandidate)
# 去除一些重复的答案
bestAnswerCopy = bestAnswer.copy()
bestAnswer = set()
for aCandidate in bestAnswerCopy:
aCfound = 0
for aC in bestAnswer:
if aC.pre == aCandidate.pre and aC.sub == aCandidate.sub:
aCfound = 1
break
if aCfound == 0:
bestAnswer.add(aCandidate)
# 加入object的分数
bestAnswerCopy = bestAnswer.copy()
for aCandidate in bestAnswerCopy:
if aCandidate.score == aCandidate.scoreSub:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict))
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
# 加入cosine similarity
bestAnswerCopy = bestAnswer.copy()
if len(bestAnswer) > 1: # use word vector to remove duplicated answer
for aCandidate in bestAnswerCopy:
scoreReCal = aCandidate.calcScore(qtList, countCharDict,debug, includingObj=getAnswer(aCandidate.sub, aCandidate.pre, kbDict), use_elmo=True)
if scoreReCal > maxScore:
bestAnswer = set()
maxScore = scoreReCal
if scoreReCal == maxScore:
bestAnswer.add(aCandidate)
if debug:
for ai in bestAnswer:
for kb in kbDict[ai.sub]:
if ai.pre in kb:
print(ai.sub + ' ' +ai.pre + ' '+ kb[ai.pre])
return[bestAnswer,candidateSet]
else:
return bestAnswer
def loadQtList(path, encode = 'utf8'):
qtList = json.load(open(path,'r',encoding=encode))
return qtList
def loadcountCharDict(path, encode = 'utf8'):
countCharDict = json.load(open(path,'r',encoding=encode))
return countCharDict
def answerAllQ(pathInput, pathOutput, lKey, kbDict, qtList, countCharDict, qIDstart=1, wP=10):
fq = open(pathInput, 'r', encoding='utf8')
i = qIDstart
timeStart = time.time()
fo = open(pathOutput, 'w', encoding='utf8')
fo.close()
listQ = []
for line in fq:
if line[1] == 'q':
listQ.append(line[line.index('\t')+1:].strip())
for q in listQ:
fo = open(pathOutput, 'a', encoding='utf8')
result = answerQ(q, lKey, kbDict, qtList, countCharDict, wP=wP)
fo.write('<question id='+str(i)+'>\t' + q.lower() + '\n')
answerLast = ''
if len(result) != 0:
answerSet = []
fo.write('<triple id='+str(i)+'>\t')
for res in result:
answerTmp = getAnswer(res.sub, res.pre, kbDict)
answerSet.append(answerTmp)
fo.write(res.sub.lower() + ' ||| ' + res.pre.lower() + ' ||| '\
+ str(answerTmp) + ' ||| ' + str(res.score) + ' ====== ')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
answerLast = answerSet[0][0]
mulAnswer = False
for ansTmp in answerSet:
for ans in ansTmp:
if ans != answerLast:
mulAnswer = True
continue
if mulAnswer == True:
continue
if mulAnswer == True:
for ansTmp in answerSet:
for ans in ansTmp:
fo.write(ans)
if len(ansTmp) > 1:
fo.write(' | ')
if len(answerSet) > 1:
fo.write(' ||| ')
else:
fo.write(answerLast)
fo.write('\n==================================================\n')
else:
fo.write('<triple id='+str(i)+'>\t')
fo.write('\n')
fo.write('<answer id='+str(i)+'>\t')
fo.write('\n==================================================\n')
print('processing ' + str(i) + 'th Q.\tAv time cost: ' + str((time.time()-timeStart) / i)[:6] + ' sec', end = '\r', flush=True)
fo.close()
i += 1
fq.close()
def loadResAndanswerAllQ(pathInput, pathOutput, pathDict, pathQt, pathCD, encode='utf8', qIDstart=1, wP=10):
kbDict = json.load(open(pathDict, 'r', encoding=encode)) # kbJson.cleanPre.alias.utf8
qtList = loadQtList(pathQt, encode) # outputAP
countCharDict = loadcountCharDict(pathCD) # countChar
answerAllQ(pathInput, pathOutput, list(kbDict), kbDict, qtList, countCharDict, qIDstart=1,wP=wP)
if __name__ == '__main__':
pathInput = config.test_data_path
pathOutput = config.result_bert_path
pathDict = config.kb_process_path
pathQt = config.output_data_path
pathCD = config.countChar_dir
qIDstart = 1
defaultWeightPre = 30
loadResAndanswerAllQ(pathInput,pathOutput,pathDict,pathQt,pathCD,'utf8', qIDstart, defaultWeightPre)
| f, sub = '', pre = '', qRaw = '', qType = 0, score = 0, kbDict = [], wS = 1, wP = 10, wAP = 100):
self.sub = sub # subject
self.pre = pre # predicate
self.qRaw = qRaw # raw question
self.qType = qType # question type
self.score = score # 分数
self.kbDict = kbDict # kd dictionary
self.origin = ''
self.scoreDetail = [0,0,0,0,0]
self.wS = wS # subject的权重
self.wP = wP # oredicate的权重
self.wAP = wAP # answer pattern的权重
self.scoreSub = 0
self.scoreAP = 0
self.scorePre = 0
def calcScore(self, qtList, countCharDict, debug=False, includingObj = [], use_elmo=False):
# 最重要的部分,计算该答案的分数
lenSub = len(self.sub)
scorePre = 0
scoreAP = 0
pre = self.pre
q = self.qRaw
subIndex = q.index(self.sub)
qWithoutSub1 = q[:subIndex] # subject左边的部分
qWithoutSub2 = q[subIndex+lenSub:] # subject右边的部分
qWithoutSub = q.replace(self.sub,'') # 去掉subject剩下的部分
qtKey = (self.qRaw.replace(self.sub,'(SUB)',1) + ' ||| ' + pre) # 把subject换成(sub)然后加上predicate
if qtKey in qtList:
scoreAP = qtList[qtKey] # 查看当前的问题有没有在知识库中出现过
self.scoreAP = scoreAP
qWithoutSubSet1 = set(qWithoutSub1)
qWithoutSubSet2 = set(qWithoutSub2)
qWithoutSubSet = set(qWithoutSub)
preLowerSet = set(pre.lower())
# 找出predicate和问题前后两部分的最大intersection
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
# 计算来自predicate的分数,采用最大overlap的character的倒数 1/(n+1)
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
if len(pre) != 0:
scorePre = preFactor / len(qWithoutSubSet | preLowerSet)
else:
scorePre = 0
if len(includingObj) != 0 and scorePre == 0:
for objStr in includingObj:
scorePreTmp = 0
preLowerSet = set(objStr.lower())
intersection1 = qWithoutSubSet1 & preLowerSet
intersection2 = qWithoutSubSet2 & preLowerSet
if len(intersection1) > len(intersection2):
maxIntersection = intersection1
else:
maxIntersection = intersection2
preFactor = 0
for char in maxIntersection:
if char in countCharDict:
preFactor += 1/(countCharDict[char] + 1)
else:
preFactor += 1
scorePreTmp = preFactor / len(qWithoutSubSet | preLowerSet)
if scorePreTmp > scorePre:
scorePre = scorePreTmp
if use_elmo and len(pre) != 0:
preCut = [pre]
qWithoutSubCut = [qWithoutSub]
data_df = pd.DataFrame({'question': qWithoutSubCut, 'sim_question': preCut})
q_data = data_df['question'].apply(sent2ids)
p_data = data_df['sim_question'].apply(sent2ids)
q_data_e = data2tensor([q_data[0]])
p_data_e = data2tensor([p_data[0]])
output = model(q_data_e,p_data_e) #
scorePre_m = output[0][0]##bert模型做相似度计算,第一个值是相似性分数
self.scorePre = scorePre_m
scoreSub = 0
# 计算subject的权重有多高,可能有些subject本身就是更重要一些,一般来说越罕见的entity重要性越高
for char in self.sub:
if char in countCharDict:
scoreSub += 1/(countCharDict[char] + 1)
else:
scoreSub += 1
self.scoreSub = scoreSub
self.scorePre = scorePre
self.score = scoreSub * self.wS + scorePre * self.wP + scoreAP * self.wAP
return self.score
def pad_sequences(sequences, maxlen=None, dtype='int32',
padding='pre', truncating='pre', value=0.):
num_samples = len(sequences)
lengths = []
for x in sequences:
try:
lengths.append(len(x))
except TypeError:
raise ValueError('`sequences` must be | identifier_body |
connection.rs | #![allow(dead_code)]
use super::pointer::*;
use super::window::*;
use crate::connection::ConnectionOps;
use crate::os::wayland::inputhandler::InputHandler;
use crate::os::wayland::output::OutputHandler;
use crate::os::x11::keyboard::Keyboard;
use crate::screen::{ScreenInfo, Screens};
use crate::spawn::*;
use crate::{Appearance, Connection, ScreenRect, WindowEvent};
use anyhow::{bail, Context};
use mio::unix::SourceFd;
use mio::{Events, Interest, Poll, Token};
use smithay_client_toolkit as toolkit;
use std::cell::RefCell;
use std::collections::HashMap;
use std::os::unix::fs::FileExt;
use std::os::unix::io::FromRawFd;
use std::rc::Rc;
use std::sync::atomic::AtomicUsize;
use toolkit::environment::Environment;
use toolkit::reexports::client::Display;
use toolkit::seat::SeatListener;
use toolkit::shm::AutoMemPool;
use wayland_client::protocol::wl_keyboard::{Event as WlKeyboardEvent, KeymapFormat, WlKeyboard};
use wayland_client::{EventQueue, Main};
toolkit::default_environment!(MyEnvironment, desktop,
fields=[
output_handler: OutputHandler,
input_handler: InputHandler,
],
singles=[
wayland_protocols::wlr::unstable::output_management::v1::client::zwlr_output_manager_v1::ZwlrOutputManagerV1 => output_handler,
wayland_protocols::unstable::text_input::v3::client::zwp_text_input_manager_v3::ZwpTextInputManagerV3 => input_handler,
]);
impl MyEnvironment {
pub fn input_handler(&mut self) -> &mut InputHandler {
&mut self.input_handler
}
}
pub struct WaylandConnection {
should_terminate: RefCell<bool>,
pub(crate) next_window_id: AtomicUsize,
pub(crate) windows: RefCell<HashMap<usize, Rc<RefCell<WaylandWindowInner>>>>,
// Take care with the destruction order: the underlying wayland
// libraries are not safe and require destruction in reverse
// creation order. This list of fields must reflect that otherwise
// we'll segfault on shutdown.
// Rust guarantees that struct fields are dropped in the order
// they appear in the struct, so the Display must be at the
// bottom of this list, and opengl, which depends on everything
// must be ahead of the rest.
pub(crate) gl_connection: RefCell<Option<Rc<crate::egl::GlConnection>>>,
pub(crate) pointer: RefCell<PointerDispatcher>,
pub(crate) keyboard_mapper: RefCell<Option<Keyboard>>,
pub(crate) keyboard_window_id: RefCell<Option<usize>>,
pub(crate) surface_to_window_id: RefCell<HashMap<u32, usize>>,
pub(crate) active_surface_id: RefCell<u32>,
/// Repeats per second
pub(crate) key_repeat_rate: RefCell<i32>,
pub(crate) mem_pool: RefCell<AutoMemPool>,
/// Delay before repeating, in milliseconds
pub(crate) key_repeat_delay: RefCell<i32>,
pub(crate) last_serial: RefCell<u32>,
seat_listener: SeatListener,
pub(crate) environment: Environment<MyEnvironment>,
event_q: RefCell<EventQueue>,
pub(crate) display: RefCell<Display>,
}
impl Drop for WaylandConnection {
fn drop(&mut self) {
self.environment
.with_inner(|env| env.input_handler.shutdown());
}
}
impl WaylandConnection {
pub fn create_new() -> anyhow::Result<Self> {
let (environment, display, event_q) = toolkit::new_default_environment!(
MyEnvironment,
desktop,
fields = [
output_handler: OutputHandler::new(),
input_handler: InputHandler::new(),
]
)?;
let mut pointer = None;
let mut seat_keyboards = HashMap::new();
for seat in environment.get_all_seats() {
if let Some((has_kbd, has_ptr, name)) =
toolkit::seat::with_seat_data(&seat, |seat_data| {
(
seat_data.has_keyboard && !seat_data.defunct,
seat_data.has_pointer && !seat_data.defunct,
seat_data.name.clone(),
)
})
{
if has_kbd {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
environment.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(name, keyboard);
}
if has_ptr {
pointer.replace(PointerDispatcher::register(
&seat,
environment.require_global(),
environment.require_global(),
environment.require_global(),
environment.get_primary_selection_manager(),
)?);
}
}
}
let pointer =
pointer.ok_or_else(|| anyhow::anyhow!("no seats have an available pointer"))?;
let seat_listener;
{
let env = environment.clone();
seat_listener = environment.listen_for_seats(move |seat, seat_data, _| {
if seat_data.has_keyboard {
if !seat_data.defunct {
// We only want to assign a new keyboard object if we don't already have
// one for this seat. When a seat is being created or updated, the listener
// can receive the same seat multiple times: for example, when switching
// back from another virtual console, the same seat is usually seen four
// times with different data flags:
//
// has_pointer: true; has_keyboard: false
// has_pointer: false; has_keyboard: false
// has_pointer: false; has_keyboard: true
// has_pointer: true; has_keyboard: true
//
// This is essentially telling the client to re-assign its keyboard and
// pointer, but that means that this listener will fire twice with
// has_keyboard set to true. If we assign a handler both times, then we end
// up handling key events twice.
if !seat_keyboards.contains_key(&seat_data.name) {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
env.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(seat_data.name.clone(), keyboard);
}
} else {
env.with_inner(|env| env.input_handler.seat_defunct(&seat));
}
} else {
// If we previously had a keyboard object on this seat, it's no longer valid if
// has_keyboard is false, so we remove the keyboard object we knew about and
// thereby ensure that we assign a new keyboard object next time the listener
// fires for this seat with has_keyboard = true.
seat_keyboards.remove(&seat_data.name);
}
if seat_data.has_pointer && !seat_data.defunct {
let conn = Connection::get().unwrap().wayland();
conn.pointer.borrow_mut().seat_changed(&seat);
}
});
}
let mem_pool = environment.create_auto_pool()?;
Ok(Self {
display: RefCell::new(display),
environment,
should_terminate: RefCell::new(false),
next_window_id: AtomicUsize::new(1),
windows: RefCell::new(HashMap::new()),
event_q: RefCell::new(event_q),
pointer: RefCell::new(pointer),
seat_listener,
mem_pool: RefCell::new(mem_pool),
gl_connection: RefCell::new(None),
keyboard_mapper: RefCell::new(None),
key_repeat_rate: RefCell::new(25),
key_repeat_delay: RefCell::new(400),
keyboard_window_id: RefCell::new(None),
last_serial: RefCell::new(0),
surface_to_window_id: RefCell::new(HashMap::new()),
active_surface_id: RefCell::new(0),
})
}
fn keyboard_event(
&self,
keyboard: Main<WlKeyboard>,
event: WlKeyboardEvent,
) -> anyhow::Result<()> {
match &event {
WlKeyboardEvent::Enter {
serial, surface, ..
} => {
// update global active surface id
*self.active_surface_id.borrow_mut() = surface.as_ref().id();
*self.last_serial.borrow_mut() = *serial;
if let Some(&window_id) = self
.surface_to_window_id
.borrow()
.get(&surface.as_ref().id())
{
self.keyboard_window_id.borrow_mut().replace(window_id);
self.environment.with_inner(|env| {
if let Some(input) =
env.input_handler.get_text_input_for_keyboard(&keyboard)
{
input.enable();
input.commit();
}
env.input_handler.advise_surface(&surface, &keyboard);
});
} else {
log::warn!("{:?}, no known surface", event);
}
}
WlKeyboardEvent::Leave { serial, .. } => {
if let Some(input) = self
.environment
.with_inner(|env| env.input_handler.get_text_input_for_keyboard(&keyboard))
{
input.disable();
input.commit();
}
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::Key { serial, .. } | WlKeyboardEvent::Modifiers { serial, .. } => {
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::RepeatInfo { rate, delay } => {
*self.key_repeat_rate.borrow_mut() = *rate;
*self.key_repeat_delay.borrow_mut() = *delay;
}
WlKeyboardEvent::Keymap { format, fd, size } => |
_ => {}
}
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.keyboard_event(event);
}
}
Ok(())
}
pub(crate) fn dispatch_to_focused_window(&self, event: WindowEvent) {
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.events.dispatch(event);
}
}
}
pub(crate) fn next_window_id(&self) -> usize {
self.next_window_id
.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)
}
fn flush(&self) -> anyhow::Result<()> {
if let Err(e) = self.display.borrow_mut().flush() {
if e.kind() != ::std::io::ErrorKind::WouldBlock {
bail!("Error while flushing display: {}", e);
}
}
Ok(())
}
pub(crate) fn window_by_id(&self, window_id: usize) -> Option<Rc<RefCell<WaylandWindowInner>>> {
self.windows.borrow().get(&window_id).map(Rc::clone)
}
pub(crate) fn with_window_inner<
R,
F: FnOnce(&mut WaylandWindowInner) -> anyhow::Result<R> + Send + 'static,
>(
window: usize,
f: F,
) -> promise::Future<R>
where
R: Send + 'static,
{
let mut prom = promise::Promise::new();
let future = prom.get_future().unwrap();
promise::spawn::spawn_into_main_thread(async move {
if let Some(handle) = Connection::get().unwrap().wayland().window_by_id(window) {
let mut inner = handle.borrow_mut();
prom.result(f(&mut inner));
}
})
.detach();
future
}
fn run_message_loop_impl(&self) -> anyhow::Result<()> {
const TOK_WL: usize = 0xffff_fffc;
const TOK_SPAWN: usize = 0xffff_fffd;
let tok_wl = Token(TOK_WL);
let tok_spawn = Token(TOK_SPAWN);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(8);
poll.registry().register(
&mut SourceFd(&self.display.borrow().get_connection_fd()),
tok_wl,
Interest::READABLE,
)?;
poll.registry().register(
&mut SourceFd(&SPAWN_QUEUE.raw_fd()),
tok_spawn,
Interest::READABLE,
)?;
while !*self.should_terminate.borrow() {
// Check the spawn queue before we try to sleep; there may
// be work pending and we don't guarantee that there is a
// 1:1 wakeup to queued function, so we need to be assertive
// in order to avoid missing wakeups
let timeout = if SPAWN_QUEUE.run() {
// if we processed one, we don't want to sleep because
// there may be others to deal with
Some(std::time::Duration::from_secs(0))
} else {
None
};
{
let mut event_q = self.event_q.borrow_mut();
if let Err(err) = event_q.dispatch_pending(&mut (), |_, _, _| {}) {
return Err(err).with_context(|| {
format!(
"error during event_q.dispatch protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
self.flush()?;
if let Err(err) = poll.poll(&mut events, timeout) {
if err.kind() == std::io::ErrorKind::Interrupted {
continue;
}
bail!("polling for events: {:?}", err);
}
for event in &events {
if event.token() == tok_wl {
let event_q = self.event_q.borrow();
if let Some(guard) = event_q.prepare_read() {
if let Err(err) = guard.read_events() {
if err.kind() != std::io::ErrorKind::WouldBlock
&& err.kind() != std::io::ErrorKind::Interrupted
{
return Err(err).with_context(|| {
format!(
"error during event_q.read_events protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
}
}
}
}
Ok(())
}
pub(crate) fn advise_of_appearance_change(&self, appearance: crate::Appearance) {
for win in self.windows.borrow().values() {
win.borrow_mut().appearance_changed(appearance);
}
}
}
impl ConnectionOps for WaylandConnection {
fn name(&self) -> String {
"Wayland".to_string()
}
fn terminate_message_loop(&self) {
*self.should_terminate.borrow_mut() = true;
}
fn get_appearance(&self) -> Appearance {
match promise::spawn::block_on(crate::os::xdg_desktop_portal::get_appearance()) {
Ok(Some(appearance)) => return appearance,
Ok(None) => {}
Err(err) => {
log::debug!("Unable to resolve appearance using xdg-desktop-portal: {err:#}");
}
}
// fallback
Appearance::Light
}
fn run_message_loop(&self) -> anyhow::Result<()> {
let res = self.run_message_loop_impl();
// Ensure that we drop these eagerly, to avoid
// noisy errors wrt. global destructors unwinding
// in unexpected places
self.windows.borrow_mut().clear();
res
}
fn screens(&self) -> anyhow::Result<Screens> {
if let Some(screens) = self
.environment
.with_inner(|env| env.output_handler.screens())
{
return Ok(screens);
}
let mut by_name = HashMap::new();
let mut virtual_rect: ScreenRect = euclid::rect(0, 0, 0, 0);
for output in self.environment.get_all_outputs() {
toolkit::output::with_output_info(&output, |info| {
let name = if info.name.is_empty() {
format!("{} {}", info.model, info.make)
} else {
info.name.clone()
};
let (width, height) = info
.modes
.iter()
.find(|mode| mode.is_current)
.map(|mode| mode.dimensions)
.unwrap_or((info.physical_size.0, info.physical_size.1));
let rect = euclid::rect(
info.location.0 as isize,
info.location.1 as isize,
width as isize,
height as isize,
);
let scale = info.scale_factor as f64;
virtual_rect = virtual_rect.union(&rect);
by_name.insert(
name.clone(),
ScreenInfo {
name,
rect,
scale,
max_fps: None,
},
);
});
}
// The main screen is the one either at the origin of
// the virtual area, or if that doesn't exist for some weird
// reason, the screen closest to the origin.
let main = by_name
.values()
.min_by_key(|screen| {
screen
.rect
.origin
.to_f32()
.distance_to(euclid::Point2D::origin())
.abs() as isize
})
.ok_or_else(|| anyhow::anyhow!("no screens were found"))?
.clone();
// We don't yet know how to determine the active screen,
// so assume the main screen.
let active = main.clone();
Ok(Screens {
main,
active,
by_name,
virtual_rect,
})
}
}
| {
let file = unsafe { std::fs::File::from_raw_fd(*fd) };
match format {
KeymapFormat::XkbV1 => {
let mut data = vec![0u8; *size as usize];
file.read_exact_at(&mut data, 0)?;
// Dance around CString panicing on the NUL terminator
// in the xkbcommon crate
while let Some(0) = data.last() {
data.pop();
}
let s = String::from_utf8(data)?;
match Keyboard::new_from_string(s) {
Ok(k) => {
self.keyboard_mapper.replace(Some(k));
}
Err(err) => {
log::error!("Error processing keymap change: {:#}", err);
}
}
}
_ => {}
}
} | conditional_block |
connection.rs | #![allow(dead_code)]
use super::pointer::*;
use super::window::*;
use crate::connection::ConnectionOps;
use crate::os::wayland::inputhandler::InputHandler;
use crate::os::wayland::output::OutputHandler;
use crate::os::x11::keyboard::Keyboard;
use crate::screen::{ScreenInfo, Screens};
use crate::spawn::*;
use crate::{Appearance, Connection, ScreenRect, WindowEvent};
use anyhow::{bail, Context};
use mio::unix::SourceFd;
use mio::{Events, Interest, Poll, Token};
use smithay_client_toolkit as toolkit;
use std::cell::RefCell;
use std::collections::HashMap;
use std::os::unix::fs::FileExt;
use std::os::unix::io::FromRawFd;
use std::rc::Rc;
use std::sync::atomic::AtomicUsize;
use toolkit::environment::Environment;
use toolkit::reexports::client::Display;
use toolkit::seat::SeatListener;
use toolkit::shm::AutoMemPool;
use wayland_client::protocol::wl_keyboard::{Event as WlKeyboardEvent, KeymapFormat, WlKeyboard};
use wayland_client::{EventQueue, Main};
toolkit::default_environment!(MyEnvironment, desktop,
fields=[
output_handler: OutputHandler,
input_handler: InputHandler,
],
singles=[
wayland_protocols::wlr::unstable::output_management::v1::client::zwlr_output_manager_v1::ZwlrOutputManagerV1 => output_handler,
wayland_protocols::unstable::text_input::v3::client::zwp_text_input_manager_v3::ZwpTextInputManagerV3 => input_handler,
]);
impl MyEnvironment {
pub fn input_handler(&mut self) -> &mut InputHandler {
&mut self.input_handler
}
}
pub struct WaylandConnection {
should_terminate: RefCell<bool>,
pub(crate) next_window_id: AtomicUsize,
pub(crate) windows: RefCell<HashMap<usize, Rc<RefCell<WaylandWindowInner>>>>,
// Take care with the destruction order: the underlying wayland
// libraries are not safe and require destruction in reverse
// creation order. This list of fields must reflect that otherwise
// we'll segfault on shutdown.
// Rust guarantees that struct fields are dropped in the order
// they appear in the struct, so the Display must be at the
// bottom of this list, and opengl, which depends on everything
// must be ahead of the rest.
pub(crate) gl_connection: RefCell<Option<Rc<crate::egl::GlConnection>>>,
pub(crate) pointer: RefCell<PointerDispatcher>,
pub(crate) keyboard_mapper: RefCell<Option<Keyboard>>,
pub(crate) keyboard_window_id: RefCell<Option<usize>>,
pub(crate) surface_to_window_id: RefCell<HashMap<u32, usize>>,
pub(crate) active_surface_id: RefCell<u32>,
/// Repeats per second
pub(crate) key_repeat_rate: RefCell<i32>,
pub(crate) mem_pool: RefCell<AutoMemPool>,
/// Delay before repeating, in milliseconds
pub(crate) key_repeat_delay: RefCell<i32>,
pub(crate) last_serial: RefCell<u32>,
seat_listener: SeatListener,
pub(crate) environment: Environment<MyEnvironment>,
event_q: RefCell<EventQueue>,
pub(crate) display: RefCell<Display>,
}
impl Drop for WaylandConnection {
fn drop(&mut self) {
self.environment
.with_inner(|env| env.input_handler.shutdown());
}
}
impl WaylandConnection {
pub fn create_new() -> anyhow::Result<Self> {
let (environment, display, event_q) = toolkit::new_default_environment!(
MyEnvironment,
desktop,
fields = [
output_handler: OutputHandler::new(),
input_handler: InputHandler::new(),
]
)?;
let mut pointer = None;
let mut seat_keyboards = HashMap::new();
for seat in environment.get_all_seats() {
if let Some((has_kbd, has_ptr, name)) =
toolkit::seat::with_seat_data(&seat, |seat_data| {
(
seat_data.has_keyboard && !seat_data.defunct,
seat_data.has_pointer && !seat_data.defunct,
seat_data.name.clone(),
)
})
{
if has_kbd {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
environment.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(name, keyboard);
}
if has_ptr {
pointer.replace(PointerDispatcher::register(
&seat,
environment.require_global(),
environment.require_global(),
environment.require_global(),
environment.get_primary_selection_manager(),
)?);
}
}
}
let pointer =
pointer.ok_or_else(|| anyhow::anyhow!("no seats have an available pointer"))?;
let seat_listener;
{
let env = environment.clone();
seat_listener = environment.listen_for_seats(move |seat, seat_data, _| {
if seat_data.has_keyboard {
if !seat_data.defunct {
// We only want to assign a new keyboard object if we don't already have
// one for this seat. When a seat is being created or updated, the listener
// can receive the same seat multiple times: for example, when switching
// back from another virtual console, the same seat is usually seen four
// times with different data flags:
//
// has_pointer: true; has_keyboard: false
// has_pointer: false; has_keyboard: false
// has_pointer: false; has_keyboard: true
// has_pointer: true; has_keyboard: true
//
// This is essentially telling the client to re-assign its keyboard and
// pointer, but that means that this listener will fire twice with
// has_keyboard set to true. If we assign a handler both times, then we end
// up handling key events twice.
if !seat_keyboards.contains_key(&seat_data.name) {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
env.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(seat_data.name.clone(), keyboard);
}
} else {
env.with_inner(|env| env.input_handler.seat_defunct(&seat));
}
} else {
// If we previously had a keyboard object on this seat, it's no longer valid if
// has_keyboard is false, so we remove the keyboard object we knew about and
// thereby ensure that we assign a new keyboard object next time the listener
// fires for this seat with has_keyboard = true.
seat_keyboards.remove(&seat_data.name);
}
if seat_data.has_pointer && !seat_data.defunct {
let conn = Connection::get().unwrap().wayland();
conn.pointer.borrow_mut().seat_changed(&seat);
}
});
}
let mem_pool = environment.create_auto_pool()?;
Ok(Self {
display: RefCell::new(display),
environment,
should_terminate: RefCell::new(false),
next_window_id: AtomicUsize::new(1),
windows: RefCell::new(HashMap::new()),
event_q: RefCell::new(event_q),
pointer: RefCell::new(pointer),
seat_listener,
mem_pool: RefCell::new(mem_pool),
gl_connection: RefCell::new(None),
keyboard_mapper: RefCell::new(None),
key_repeat_rate: RefCell::new(25),
key_repeat_delay: RefCell::new(400),
keyboard_window_id: RefCell::new(None),
last_serial: RefCell::new(0),
surface_to_window_id: RefCell::new(HashMap::new()),
active_surface_id: RefCell::new(0),
})
}
fn | (
&self,
keyboard: Main<WlKeyboard>,
event: WlKeyboardEvent,
) -> anyhow::Result<()> {
match &event {
WlKeyboardEvent::Enter {
serial, surface, ..
} => {
// update global active surface id
*self.active_surface_id.borrow_mut() = surface.as_ref().id();
*self.last_serial.borrow_mut() = *serial;
if let Some(&window_id) = self
.surface_to_window_id
.borrow()
.get(&surface.as_ref().id())
{
self.keyboard_window_id.borrow_mut().replace(window_id);
self.environment.with_inner(|env| {
if let Some(input) =
env.input_handler.get_text_input_for_keyboard(&keyboard)
{
input.enable();
input.commit();
}
env.input_handler.advise_surface(&surface, &keyboard);
});
} else {
log::warn!("{:?}, no known surface", event);
}
}
WlKeyboardEvent::Leave { serial, .. } => {
if let Some(input) = self
.environment
.with_inner(|env| env.input_handler.get_text_input_for_keyboard(&keyboard))
{
input.disable();
input.commit();
}
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::Key { serial, .. } | WlKeyboardEvent::Modifiers { serial, .. } => {
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::RepeatInfo { rate, delay } => {
*self.key_repeat_rate.borrow_mut() = *rate;
*self.key_repeat_delay.borrow_mut() = *delay;
}
WlKeyboardEvent::Keymap { format, fd, size } => {
let file = unsafe { std::fs::File::from_raw_fd(*fd) };
match format {
KeymapFormat::XkbV1 => {
let mut data = vec![0u8; *size as usize];
file.read_exact_at(&mut data, 0)?;
// Dance around CString panicing on the NUL terminator
// in the xkbcommon crate
while let Some(0) = data.last() {
data.pop();
}
let s = String::from_utf8(data)?;
match Keyboard::new_from_string(s) {
Ok(k) => {
self.keyboard_mapper.replace(Some(k));
}
Err(err) => {
log::error!("Error processing keymap change: {:#}", err);
}
}
}
_ => {}
}
}
_ => {}
}
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.keyboard_event(event);
}
}
Ok(())
}
pub(crate) fn dispatch_to_focused_window(&self, event: WindowEvent) {
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.events.dispatch(event);
}
}
}
pub(crate) fn next_window_id(&self) -> usize {
self.next_window_id
.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)
}
fn flush(&self) -> anyhow::Result<()> {
if let Err(e) = self.display.borrow_mut().flush() {
if e.kind() != ::std::io::ErrorKind::WouldBlock {
bail!("Error while flushing display: {}", e);
}
}
Ok(())
}
pub(crate) fn window_by_id(&self, window_id: usize) -> Option<Rc<RefCell<WaylandWindowInner>>> {
self.windows.borrow().get(&window_id).map(Rc::clone)
}
pub(crate) fn with_window_inner<
R,
F: FnOnce(&mut WaylandWindowInner) -> anyhow::Result<R> + Send + 'static,
>(
window: usize,
f: F,
) -> promise::Future<R>
where
R: Send + 'static,
{
let mut prom = promise::Promise::new();
let future = prom.get_future().unwrap();
promise::spawn::spawn_into_main_thread(async move {
if let Some(handle) = Connection::get().unwrap().wayland().window_by_id(window) {
let mut inner = handle.borrow_mut();
prom.result(f(&mut inner));
}
})
.detach();
future
}
fn run_message_loop_impl(&self) -> anyhow::Result<()> {
const TOK_WL: usize = 0xffff_fffc;
const TOK_SPAWN: usize = 0xffff_fffd;
let tok_wl = Token(TOK_WL);
let tok_spawn = Token(TOK_SPAWN);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(8);
poll.registry().register(
&mut SourceFd(&self.display.borrow().get_connection_fd()),
tok_wl,
Interest::READABLE,
)?;
poll.registry().register(
&mut SourceFd(&SPAWN_QUEUE.raw_fd()),
tok_spawn,
Interest::READABLE,
)?;
while !*self.should_terminate.borrow() {
// Check the spawn queue before we try to sleep; there may
// be work pending and we don't guarantee that there is a
// 1:1 wakeup to queued function, so we need to be assertive
// in order to avoid missing wakeups
let timeout = if SPAWN_QUEUE.run() {
// if we processed one, we don't want to sleep because
// there may be others to deal with
Some(std::time::Duration::from_secs(0))
} else {
None
};
{
let mut event_q = self.event_q.borrow_mut();
if let Err(err) = event_q.dispatch_pending(&mut (), |_, _, _| {}) {
return Err(err).with_context(|| {
format!(
"error during event_q.dispatch protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
self.flush()?;
if let Err(err) = poll.poll(&mut events, timeout) {
if err.kind() == std::io::ErrorKind::Interrupted {
continue;
}
bail!("polling for events: {:?}", err);
}
for event in &events {
if event.token() == tok_wl {
let event_q = self.event_q.borrow();
if let Some(guard) = event_q.prepare_read() {
if let Err(err) = guard.read_events() {
if err.kind() != std::io::ErrorKind::WouldBlock
&& err.kind() != std::io::ErrorKind::Interrupted
{
return Err(err).with_context(|| {
format!(
"error during event_q.read_events protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
}
}
}
}
Ok(())
}
pub(crate) fn advise_of_appearance_change(&self, appearance: crate::Appearance) {
for win in self.windows.borrow().values() {
win.borrow_mut().appearance_changed(appearance);
}
}
}
impl ConnectionOps for WaylandConnection {
fn name(&self) -> String {
"Wayland".to_string()
}
fn terminate_message_loop(&self) {
*self.should_terminate.borrow_mut() = true;
}
fn get_appearance(&self) -> Appearance {
match promise::spawn::block_on(crate::os::xdg_desktop_portal::get_appearance()) {
Ok(Some(appearance)) => return appearance,
Ok(None) => {}
Err(err) => {
log::debug!("Unable to resolve appearance using xdg-desktop-portal: {err:#}");
}
}
// fallback
Appearance::Light
}
fn run_message_loop(&self) -> anyhow::Result<()> {
let res = self.run_message_loop_impl();
// Ensure that we drop these eagerly, to avoid
// noisy errors wrt. global destructors unwinding
// in unexpected places
self.windows.borrow_mut().clear();
res
}
fn screens(&self) -> anyhow::Result<Screens> {
if let Some(screens) = self
.environment
.with_inner(|env| env.output_handler.screens())
{
return Ok(screens);
}
let mut by_name = HashMap::new();
let mut virtual_rect: ScreenRect = euclid::rect(0, 0, 0, 0);
for output in self.environment.get_all_outputs() {
toolkit::output::with_output_info(&output, |info| {
let name = if info.name.is_empty() {
format!("{} {}", info.model, info.make)
} else {
info.name.clone()
};
let (width, height) = info
.modes
.iter()
.find(|mode| mode.is_current)
.map(|mode| mode.dimensions)
.unwrap_or((info.physical_size.0, info.physical_size.1));
let rect = euclid::rect(
info.location.0 as isize,
info.location.1 as isize,
width as isize,
height as isize,
);
let scale = info.scale_factor as f64;
virtual_rect = virtual_rect.union(&rect);
by_name.insert(
name.clone(),
ScreenInfo {
name,
rect,
scale,
max_fps: None,
},
);
});
}
// The main screen is the one either at the origin of
// the virtual area, or if that doesn't exist for some weird
// reason, the screen closest to the origin.
let main = by_name
.values()
.min_by_key(|screen| {
screen
.rect
.origin
.to_f32()
.distance_to(euclid::Point2D::origin())
.abs() as isize
})
.ok_or_else(|| anyhow::anyhow!("no screens were found"))?
.clone();
// We don't yet know how to determine the active screen,
// so assume the main screen.
let active = main.clone();
Ok(Screens {
main,
active,
by_name,
virtual_rect,
})
}
}
| keyboard_event | identifier_name |
connection.rs | #![allow(dead_code)]
use super::pointer::*;
use super::window::*;
use crate::connection::ConnectionOps;
use crate::os::wayland::inputhandler::InputHandler;
use crate::os::wayland::output::OutputHandler;
use crate::os::x11::keyboard::Keyboard;
use crate::screen::{ScreenInfo, Screens};
use crate::spawn::*;
use crate::{Appearance, Connection, ScreenRect, WindowEvent};
use anyhow::{bail, Context};
use mio::unix::SourceFd;
use mio::{Events, Interest, Poll, Token};
use smithay_client_toolkit as toolkit;
use std::cell::RefCell;
use std::collections::HashMap;
use std::os::unix::fs::FileExt;
use std::os::unix::io::FromRawFd;
use std::rc::Rc;
use std::sync::atomic::AtomicUsize;
use toolkit::environment::Environment;
use toolkit::reexports::client::Display;
use toolkit::seat::SeatListener;
use toolkit::shm::AutoMemPool;
use wayland_client::protocol::wl_keyboard::{Event as WlKeyboardEvent, KeymapFormat, WlKeyboard};
use wayland_client::{EventQueue, Main};
toolkit::default_environment!(MyEnvironment, desktop,
fields=[
output_handler: OutputHandler,
input_handler: InputHandler,
],
singles=[
wayland_protocols::wlr::unstable::output_management::v1::client::zwlr_output_manager_v1::ZwlrOutputManagerV1 => output_handler,
wayland_protocols::unstable::text_input::v3::client::zwp_text_input_manager_v3::ZwpTextInputManagerV3 => input_handler,
]);
impl MyEnvironment {
pub fn input_handler(&mut self) -> &mut InputHandler {
&mut self.input_handler
}
}
pub struct WaylandConnection {
should_terminate: RefCell<bool>,
pub(crate) next_window_id: AtomicUsize,
pub(crate) windows: RefCell<HashMap<usize, Rc<RefCell<WaylandWindowInner>>>>,
// Take care with the destruction order: the underlying wayland
// libraries are not safe and require destruction in reverse
// creation order. This list of fields must reflect that otherwise
// we'll segfault on shutdown.
// Rust guarantees that struct fields are dropped in the order
// they appear in the struct, so the Display must be at the
// bottom of this list, and opengl, which depends on everything
// must be ahead of the rest.
pub(crate) gl_connection: RefCell<Option<Rc<crate::egl::GlConnection>>>,
pub(crate) pointer: RefCell<PointerDispatcher>,
pub(crate) keyboard_mapper: RefCell<Option<Keyboard>>,
pub(crate) keyboard_window_id: RefCell<Option<usize>>,
pub(crate) surface_to_window_id: RefCell<HashMap<u32, usize>>,
pub(crate) active_surface_id: RefCell<u32>,
/// Repeats per second
pub(crate) key_repeat_rate: RefCell<i32>,
pub(crate) mem_pool: RefCell<AutoMemPool>,
/// Delay before repeating, in milliseconds
pub(crate) key_repeat_delay: RefCell<i32>,
pub(crate) last_serial: RefCell<u32>,
seat_listener: SeatListener,
pub(crate) environment: Environment<MyEnvironment>,
event_q: RefCell<EventQueue>,
pub(crate) display: RefCell<Display>,
}
impl Drop for WaylandConnection {
fn drop(&mut self) {
self.environment
.with_inner(|env| env.input_handler.shutdown());
}
}
impl WaylandConnection {
pub fn create_new() -> anyhow::Result<Self> {
let (environment, display, event_q) = toolkit::new_default_environment!(
MyEnvironment,
desktop,
fields = [
output_handler: OutputHandler::new(),
input_handler: InputHandler::new(),
]
)?;
let mut pointer = None;
let mut seat_keyboards = HashMap::new();
for seat in environment.get_all_seats() {
if let Some((has_kbd, has_ptr, name)) =
toolkit::seat::with_seat_data(&seat, |seat_data| {
(
seat_data.has_keyboard && !seat_data.defunct,
seat_data.has_pointer && !seat_data.defunct,
seat_data.name.clone(),
)
})
{
if has_kbd {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
environment.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(name, keyboard);
}
if has_ptr {
pointer.replace(PointerDispatcher::register(
&seat,
environment.require_global(),
environment.require_global(),
environment.require_global(),
environment.get_primary_selection_manager(),
)?);
}
}
}
let pointer =
pointer.ok_or_else(|| anyhow::anyhow!("no seats have an available pointer"))?;
let seat_listener;
{
let env = environment.clone();
seat_listener = environment.listen_for_seats(move |seat, seat_data, _| {
if seat_data.has_keyboard {
if !seat_data.defunct {
// We only want to assign a new keyboard object if we don't already have
// one for this seat. When a seat is being created or updated, the listener
// can receive the same seat multiple times: for example, when switching
// back from another virtual console, the same seat is usually seen four
// times with different data flags:
//
// has_pointer: true; has_keyboard: false
// has_pointer: false; has_keyboard: false
// has_pointer: false; has_keyboard: true
// has_pointer: true; has_keyboard: true
//
// This is essentially telling the client to re-assign its keyboard and
// pointer, but that means that this listener will fire twice with
// has_keyboard set to true. If we assign a handler both times, then we end
// up handling key events twice.
if !seat_keyboards.contains_key(&seat_data.name) {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
env.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(seat_data.name.clone(), keyboard);
}
} else {
env.with_inner(|env| env.input_handler.seat_defunct(&seat));
}
} else {
// If we previously had a keyboard object on this seat, it's no longer valid if
// has_keyboard is false, so we remove the keyboard object we knew about and
// thereby ensure that we assign a new keyboard object next time the listener
// fires for this seat with has_keyboard = true.
seat_keyboards.remove(&seat_data.name);
}
if seat_data.has_pointer && !seat_data.defunct {
let conn = Connection::get().unwrap().wayland();
conn.pointer.borrow_mut().seat_changed(&seat);
}
});
}
let mem_pool = environment.create_auto_pool()?;
Ok(Self {
display: RefCell::new(display),
environment,
should_terminate: RefCell::new(false),
next_window_id: AtomicUsize::new(1), | windows: RefCell::new(HashMap::new()),
event_q: RefCell::new(event_q),
pointer: RefCell::new(pointer),
seat_listener,
mem_pool: RefCell::new(mem_pool),
gl_connection: RefCell::new(None),
keyboard_mapper: RefCell::new(None),
key_repeat_rate: RefCell::new(25),
key_repeat_delay: RefCell::new(400),
keyboard_window_id: RefCell::new(None),
last_serial: RefCell::new(0),
surface_to_window_id: RefCell::new(HashMap::new()),
active_surface_id: RefCell::new(0),
})
}
fn keyboard_event(
&self,
keyboard: Main<WlKeyboard>,
event: WlKeyboardEvent,
) -> anyhow::Result<()> {
match &event {
WlKeyboardEvent::Enter {
serial, surface, ..
} => {
// update global active surface id
*self.active_surface_id.borrow_mut() = surface.as_ref().id();
*self.last_serial.borrow_mut() = *serial;
if let Some(&window_id) = self
.surface_to_window_id
.borrow()
.get(&surface.as_ref().id())
{
self.keyboard_window_id.borrow_mut().replace(window_id);
self.environment.with_inner(|env| {
if let Some(input) =
env.input_handler.get_text_input_for_keyboard(&keyboard)
{
input.enable();
input.commit();
}
env.input_handler.advise_surface(&surface, &keyboard);
});
} else {
log::warn!("{:?}, no known surface", event);
}
}
WlKeyboardEvent::Leave { serial, .. } => {
if let Some(input) = self
.environment
.with_inner(|env| env.input_handler.get_text_input_for_keyboard(&keyboard))
{
input.disable();
input.commit();
}
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::Key { serial, .. } | WlKeyboardEvent::Modifiers { serial, .. } => {
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::RepeatInfo { rate, delay } => {
*self.key_repeat_rate.borrow_mut() = *rate;
*self.key_repeat_delay.borrow_mut() = *delay;
}
WlKeyboardEvent::Keymap { format, fd, size } => {
let file = unsafe { std::fs::File::from_raw_fd(*fd) };
match format {
KeymapFormat::XkbV1 => {
let mut data = vec![0u8; *size as usize];
file.read_exact_at(&mut data, 0)?;
// Dance around CString panicing on the NUL terminator
// in the xkbcommon crate
while let Some(0) = data.last() {
data.pop();
}
let s = String::from_utf8(data)?;
match Keyboard::new_from_string(s) {
Ok(k) => {
self.keyboard_mapper.replace(Some(k));
}
Err(err) => {
log::error!("Error processing keymap change: {:#}", err);
}
}
}
_ => {}
}
}
_ => {}
}
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.keyboard_event(event);
}
}
Ok(())
}
pub(crate) fn dispatch_to_focused_window(&self, event: WindowEvent) {
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.events.dispatch(event);
}
}
}
pub(crate) fn next_window_id(&self) -> usize {
self.next_window_id
.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)
}
fn flush(&self) -> anyhow::Result<()> {
if let Err(e) = self.display.borrow_mut().flush() {
if e.kind() != ::std::io::ErrorKind::WouldBlock {
bail!("Error while flushing display: {}", e);
}
}
Ok(())
}
pub(crate) fn window_by_id(&self, window_id: usize) -> Option<Rc<RefCell<WaylandWindowInner>>> {
self.windows.borrow().get(&window_id).map(Rc::clone)
}
pub(crate) fn with_window_inner<
R,
F: FnOnce(&mut WaylandWindowInner) -> anyhow::Result<R> + Send + 'static,
>(
window: usize,
f: F,
) -> promise::Future<R>
where
R: Send + 'static,
{
let mut prom = promise::Promise::new();
let future = prom.get_future().unwrap();
promise::spawn::spawn_into_main_thread(async move {
if let Some(handle) = Connection::get().unwrap().wayland().window_by_id(window) {
let mut inner = handle.borrow_mut();
prom.result(f(&mut inner));
}
})
.detach();
future
}
fn run_message_loop_impl(&self) -> anyhow::Result<()> {
const TOK_WL: usize = 0xffff_fffc;
const TOK_SPAWN: usize = 0xffff_fffd;
let tok_wl = Token(TOK_WL);
let tok_spawn = Token(TOK_SPAWN);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(8);
poll.registry().register(
&mut SourceFd(&self.display.borrow().get_connection_fd()),
tok_wl,
Interest::READABLE,
)?;
poll.registry().register(
&mut SourceFd(&SPAWN_QUEUE.raw_fd()),
tok_spawn,
Interest::READABLE,
)?;
while !*self.should_terminate.borrow() {
// Check the spawn queue before we try to sleep; there may
// be work pending and we don't guarantee that there is a
// 1:1 wakeup to queued function, so we need to be assertive
// in order to avoid missing wakeups
let timeout = if SPAWN_QUEUE.run() {
// if we processed one, we don't want to sleep because
// there may be others to deal with
Some(std::time::Duration::from_secs(0))
} else {
None
};
{
let mut event_q = self.event_q.borrow_mut();
if let Err(err) = event_q.dispatch_pending(&mut (), |_, _, _| {}) {
return Err(err).with_context(|| {
format!(
"error during event_q.dispatch protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
self.flush()?;
if let Err(err) = poll.poll(&mut events, timeout) {
if err.kind() == std::io::ErrorKind::Interrupted {
continue;
}
bail!("polling for events: {:?}", err);
}
for event in &events {
if event.token() == tok_wl {
let event_q = self.event_q.borrow();
if let Some(guard) = event_q.prepare_read() {
if let Err(err) = guard.read_events() {
if err.kind() != std::io::ErrorKind::WouldBlock
&& err.kind() != std::io::ErrorKind::Interrupted
{
return Err(err).with_context(|| {
format!(
"error during event_q.read_events protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
}
}
}
}
Ok(())
}
pub(crate) fn advise_of_appearance_change(&self, appearance: crate::Appearance) {
for win in self.windows.borrow().values() {
win.borrow_mut().appearance_changed(appearance);
}
}
}
impl ConnectionOps for WaylandConnection {
fn name(&self) -> String {
"Wayland".to_string()
}
fn terminate_message_loop(&self) {
*self.should_terminate.borrow_mut() = true;
}
fn get_appearance(&self) -> Appearance {
match promise::spawn::block_on(crate::os::xdg_desktop_portal::get_appearance()) {
Ok(Some(appearance)) => return appearance,
Ok(None) => {}
Err(err) => {
log::debug!("Unable to resolve appearance using xdg-desktop-portal: {err:#}");
}
}
// fallback
Appearance::Light
}
fn run_message_loop(&self) -> anyhow::Result<()> {
let res = self.run_message_loop_impl();
// Ensure that we drop these eagerly, to avoid
// noisy errors wrt. global destructors unwinding
// in unexpected places
self.windows.borrow_mut().clear();
res
}
fn screens(&self) -> anyhow::Result<Screens> {
if let Some(screens) = self
.environment
.with_inner(|env| env.output_handler.screens())
{
return Ok(screens);
}
let mut by_name = HashMap::new();
let mut virtual_rect: ScreenRect = euclid::rect(0, 0, 0, 0);
for output in self.environment.get_all_outputs() {
toolkit::output::with_output_info(&output, |info| {
let name = if info.name.is_empty() {
format!("{} {}", info.model, info.make)
} else {
info.name.clone()
};
let (width, height) = info
.modes
.iter()
.find(|mode| mode.is_current)
.map(|mode| mode.dimensions)
.unwrap_or((info.physical_size.0, info.physical_size.1));
let rect = euclid::rect(
info.location.0 as isize,
info.location.1 as isize,
width as isize,
height as isize,
);
let scale = info.scale_factor as f64;
virtual_rect = virtual_rect.union(&rect);
by_name.insert(
name.clone(),
ScreenInfo {
name,
rect,
scale,
max_fps: None,
},
);
});
}
// The main screen is the one either at the origin of
// the virtual area, or if that doesn't exist for some weird
// reason, the screen closest to the origin.
let main = by_name
.values()
.min_by_key(|screen| {
screen
.rect
.origin
.to_f32()
.distance_to(euclid::Point2D::origin())
.abs() as isize
})
.ok_or_else(|| anyhow::anyhow!("no screens were found"))?
.clone();
// We don't yet know how to determine the active screen,
// so assume the main screen.
let active = main.clone();
Ok(Screens {
main,
active,
by_name,
virtual_rect,
})
}
} | random_line_split | |
connection.rs | #![allow(dead_code)]
use super::pointer::*;
use super::window::*;
use crate::connection::ConnectionOps;
use crate::os::wayland::inputhandler::InputHandler;
use crate::os::wayland::output::OutputHandler;
use crate::os::x11::keyboard::Keyboard;
use crate::screen::{ScreenInfo, Screens};
use crate::spawn::*;
use crate::{Appearance, Connection, ScreenRect, WindowEvent};
use anyhow::{bail, Context};
use mio::unix::SourceFd;
use mio::{Events, Interest, Poll, Token};
use smithay_client_toolkit as toolkit;
use std::cell::RefCell;
use std::collections::HashMap;
use std::os::unix::fs::FileExt;
use std::os::unix::io::FromRawFd;
use std::rc::Rc;
use std::sync::atomic::AtomicUsize;
use toolkit::environment::Environment;
use toolkit::reexports::client::Display;
use toolkit::seat::SeatListener;
use toolkit::shm::AutoMemPool;
use wayland_client::protocol::wl_keyboard::{Event as WlKeyboardEvent, KeymapFormat, WlKeyboard};
use wayland_client::{EventQueue, Main};
toolkit::default_environment!(MyEnvironment, desktop,
fields=[
output_handler: OutputHandler,
input_handler: InputHandler,
],
singles=[
wayland_protocols::wlr::unstable::output_management::v1::client::zwlr_output_manager_v1::ZwlrOutputManagerV1 => output_handler,
wayland_protocols::unstable::text_input::v3::client::zwp_text_input_manager_v3::ZwpTextInputManagerV3 => input_handler,
]);
impl MyEnvironment {
pub fn input_handler(&mut self) -> &mut InputHandler {
&mut self.input_handler
}
}
pub struct WaylandConnection {
should_terminate: RefCell<bool>,
pub(crate) next_window_id: AtomicUsize,
pub(crate) windows: RefCell<HashMap<usize, Rc<RefCell<WaylandWindowInner>>>>,
// Take care with the destruction order: the underlying wayland
// libraries are not safe and require destruction in reverse
// creation order. This list of fields must reflect that otherwise
// we'll segfault on shutdown.
// Rust guarantees that struct fields are dropped in the order
// they appear in the struct, so the Display must be at the
// bottom of this list, and opengl, which depends on everything
// must be ahead of the rest.
pub(crate) gl_connection: RefCell<Option<Rc<crate::egl::GlConnection>>>,
pub(crate) pointer: RefCell<PointerDispatcher>,
pub(crate) keyboard_mapper: RefCell<Option<Keyboard>>,
pub(crate) keyboard_window_id: RefCell<Option<usize>>,
pub(crate) surface_to_window_id: RefCell<HashMap<u32, usize>>,
pub(crate) active_surface_id: RefCell<u32>,
/// Repeats per second
pub(crate) key_repeat_rate: RefCell<i32>,
pub(crate) mem_pool: RefCell<AutoMemPool>,
/// Delay before repeating, in milliseconds
pub(crate) key_repeat_delay: RefCell<i32>,
pub(crate) last_serial: RefCell<u32>,
seat_listener: SeatListener,
pub(crate) environment: Environment<MyEnvironment>,
event_q: RefCell<EventQueue>,
pub(crate) display: RefCell<Display>,
}
impl Drop for WaylandConnection {
fn drop(&mut self) {
self.environment
.with_inner(|env| env.input_handler.shutdown());
}
}
impl WaylandConnection {
pub fn create_new() -> anyhow::Result<Self> {
let (environment, display, event_q) = toolkit::new_default_environment!(
MyEnvironment,
desktop,
fields = [
output_handler: OutputHandler::new(),
input_handler: InputHandler::new(),
]
)?;
let mut pointer = None;
let mut seat_keyboards = HashMap::new();
for seat in environment.get_all_seats() {
if let Some((has_kbd, has_ptr, name)) =
toolkit::seat::with_seat_data(&seat, |seat_data| {
(
seat_data.has_keyboard && !seat_data.defunct,
seat_data.has_pointer && !seat_data.defunct,
seat_data.name.clone(),
)
})
{
if has_kbd {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
environment.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(name, keyboard);
}
if has_ptr {
pointer.replace(PointerDispatcher::register(
&seat,
environment.require_global(),
environment.require_global(),
environment.require_global(),
environment.get_primary_selection_manager(),
)?);
}
}
}
let pointer =
pointer.ok_or_else(|| anyhow::anyhow!("no seats have an available pointer"))?;
let seat_listener;
{
let env = environment.clone();
seat_listener = environment.listen_for_seats(move |seat, seat_data, _| {
if seat_data.has_keyboard {
if !seat_data.defunct {
// We only want to assign a new keyboard object if we don't already have
// one for this seat. When a seat is being created or updated, the listener
// can receive the same seat multiple times: for example, when switching
// back from another virtual console, the same seat is usually seen four
// times with different data flags:
//
// has_pointer: true; has_keyboard: false
// has_pointer: false; has_keyboard: false
// has_pointer: false; has_keyboard: true
// has_pointer: true; has_keyboard: true
//
// This is essentially telling the client to re-assign its keyboard and
// pointer, but that means that this listener will fire twice with
// has_keyboard set to true. If we assign a handler both times, then we end
// up handling key events twice.
if !seat_keyboards.contains_key(&seat_data.name) {
let keyboard = seat.get_keyboard();
keyboard.quick_assign(|keyboard, event, _| {
let conn = Connection::get().unwrap().wayland();
if let Err(err) = conn.keyboard_event(keyboard, event) {
log::error!("keyboard_event: {:#}", err);
}
});
env.with_inner(|env| env.input_handler.advise_seat(&seat, &keyboard));
seat_keyboards.insert(seat_data.name.clone(), keyboard);
}
} else {
env.with_inner(|env| env.input_handler.seat_defunct(&seat));
}
} else {
// If we previously had a keyboard object on this seat, it's no longer valid if
// has_keyboard is false, so we remove the keyboard object we knew about and
// thereby ensure that we assign a new keyboard object next time the listener
// fires for this seat with has_keyboard = true.
seat_keyboards.remove(&seat_data.name);
}
if seat_data.has_pointer && !seat_data.defunct {
let conn = Connection::get().unwrap().wayland();
conn.pointer.borrow_mut().seat_changed(&seat);
}
});
}
let mem_pool = environment.create_auto_pool()?;
Ok(Self {
display: RefCell::new(display),
environment,
should_terminate: RefCell::new(false),
next_window_id: AtomicUsize::new(1),
windows: RefCell::new(HashMap::new()),
event_q: RefCell::new(event_q),
pointer: RefCell::new(pointer),
seat_listener,
mem_pool: RefCell::new(mem_pool),
gl_connection: RefCell::new(None),
keyboard_mapper: RefCell::new(None),
key_repeat_rate: RefCell::new(25),
key_repeat_delay: RefCell::new(400),
keyboard_window_id: RefCell::new(None),
last_serial: RefCell::new(0),
surface_to_window_id: RefCell::new(HashMap::new()),
active_surface_id: RefCell::new(0),
})
}
fn keyboard_event(
&self,
keyboard: Main<WlKeyboard>,
event: WlKeyboardEvent,
) -> anyhow::Result<()> {
match &event {
WlKeyboardEvent::Enter {
serial, surface, ..
} => {
// update global active surface id
*self.active_surface_id.borrow_mut() = surface.as_ref().id();
*self.last_serial.borrow_mut() = *serial;
if let Some(&window_id) = self
.surface_to_window_id
.borrow()
.get(&surface.as_ref().id())
{
self.keyboard_window_id.borrow_mut().replace(window_id);
self.environment.with_inner(|env| {
if let Some(input) =
env.input_handler.get_text_input_for_keyboard(&keyboard)
{
input.enable();
input.commit();
}
env.input_handler.advise_surface(&surface, &keyboard);
});
} else {
log::warn!("{:?}, no known surface", event);
}
}
WlKeyboardEvent::Leave { serial, .. } => {
if let Some(input) = self
.environment
.with_inner(|env| env.input_handler.get_text_input_for_keyboard(&keyboard))
{
input.disable();
input.commit();
}
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::Key { serial, .. } | WlKeyboardEvent::Modifiers { serial, .. } => {
*self.last_serial.borrow_mut() = *serial;
}
WlKeyboardEvent::RepeatInfo { rate, delay } => {
*self.key_repeat_rate.borrow_mut() = *rate;
*self.key_repeat_delay.borrow_mut() = *delay;
}
WlKeyboardEvent::Keymap { format, fd, size } => {
let file = unsafe { std::fs::File::from_raw_fd(*fd) };
match format {
KeymapFormat::XkbV1 => {
let mut data = vec![0u8; *size as usize];
file.read_exact_at(&mut data, 0)?;
// Dance around CString panicing on the NUL terminator
// in the xkbcommon crate
while let Some(0) = data.last() {
data.pop();
}
let s = String::from_utf8(data)?;
match Keyboard::new_from_string(s) {
Ok(k) => {
self.keyboard_mapper.replace(Some(k));
}
Err(err) => {
log::error!("Error processing keymap change: {:#}", err);
}
}
}
_ => {}
}
}
_ => {}
}
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.keyboard_event(event);
}
}
Ok(())
}
pub(crate) fn dispatch_to_focused_window(&self, event: WindowEvent) |
pub(crate) fn next_window_id(&self) -> usize {
self.next_window_id
.fetch_add(1, ::std::sync::atomic::Ordering::Relaxed)
}
fn flush(&self) -> anyhow::Result<()> {
if let Err(e) = self.display.borrow_mut().flush() {
if e.kind() != ::std::io::ErrorKind::WouldBlock {
bail!("Error while flushing display: {}", e);
}
}
Ok(())
}
pub(crate) fn window_by_id(&self, window_id: usize) -> Option<Rc<RefCell<WaylandWindowInner>>> {
self.windows.borrow().get(&window_id).map(Rc::clone)
}
pub(crate) fn with_window_inner<
R,
F: FnOnce(&mut WaylandWindowInner) -> anyhow::Result<R> + Send + 'static,
>(
window: usize,
f: F,
) -> promise::Future<R>
where
R: Send + 'static,
{
let mut prom = promise::Promise::new();
let future = prom.get_future().unwrap();
promise::spawn::spawn_into_main_thread(async move {
if let Some(handle) = Connection::get().unwrap().wayland().window_by_id(window) {
let mut inner = handle.borrow_mut();
prom.result(f(&mut inner));
}
})
.detach();
future
}
fn run_message_loop_impl(&self) -> anyhow::Result<()> {
const TOK_WL: usize = 0xffff_fffc;
const TOK_SPAWN: usize = 0xffff_fffd;
let tok_wl = Token(TOK_WL);
let tok_spawn = Token(TOK_SPAWN);
let mut poll = Poll::new()?;
let mut events = Events::with_capacity(8);
poll.registry().register(
&mut SourceFd(&self.display.borrow().get_connection_fd()),
tok_wl,
Interest::READABLE,
)?;
poll.registry().register(
&mut SourceFd(&SPAWN_QUEUE.raw_fd()),
tok_spawn,
Interest::READABLE,
)?;
while !*self.should_terminate.borrow() {
// Check the spawn queue before we try to sleep; there may
// be work pending and we don't guarantee that there is a
// 1:1 wakeup to queued function, so we need to be assertive
// in order to avoid missing wakeups
let timeout = if SPAWN_QUEUE.run() {
// if we processed one, we don't want to sleep because
// there may be others to deal with
Some(std::time::Duration::from_secs(0))
} else {
None
};
{
let mut event_q = self.event_q.borrow_mut();
if let Err(err) = event_q.dispatch_pending(&mut (), |_, _, _| {}) {
return Err(err).with_context(|| {
format!(
"error during event_q.dispatch protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
self.flush()?;
if let Err(err) = poll.poll(&mut events, timeout) {
if err.kind() == std::io::ErrorKind::Interrupted {
continue;
}
bail!("polling for events: {:?}", err);
}
for event in &events {
if event.token() == tok_wl {
let event_q = self.event_q.borrow();
if let Some(guard) = event_q.prepare_read() {
if let Err(err) = guard.read_events() {
if err.kind() != std::io::ErrorKind::WouldBlock
&& err.kind() != std::io::ErrorKind::Interrupted
{
return Err(err).with_context(|| {
format!(
"error during event_q.read_events protocol_error={:?}",
self.display.borrow().protocol_error()
)
});
}
}
}
}
}
}
Ok(())
}
pub(crate) fn advise_of_appearance_change(&self, appearance: crate::Appearance) {
for win in self.windows.borrow().values() {
win.borrow_mut().appearance_changed(appearance);
}
}
}
impl ConnectionOps for WaylandConnection {
fn name(&self) -> String {
"Wayland".to_string()
}
fn terminate_message_loop(&self) {
*self.should_terminate.borrow_mut() = true;
}
fn get_appearance(&self) -> Appearance {
match promise::spawn::block_on(crate::os::xdg_desktop_portal::get_appearance()) {
Ok(Some(appearance)) => return appearance,
Ok(None) => {}
Err(err) => {
log::debug!("Unable to resolve appearance using xdg-desktop-portal: {err:#}");
}
}
// fallback
Appearance::Light
}
fn run_message_loop(&self) -> anyhow::Result<()> {
let res = self.run_message_loop_impl();
// Ensure that we drop these eagerly, to avoid
// noisy errors wrt. global destructors unwinding
// in unexpected places
self.windows.borrow_mut().clear();
res
}
fn screens(&self) -> anyhow::Result<Screens> {
if let Some(screens) = self
.environment
.with_inner(|env| env.output_handler.screens())
{
return Ok(screens);
}
let mut by_name = HashMap::new();
let mut virtual_rect: ScreenRect = euclid::rect(0, 0, 0, 0);
for output in self.environment.get_all_outputs() {
toolkit::output::with_output_info(&output, |info| {
let name = if info.name.is_empty() {
format!("{} {}", info.model, info.make)
} else {
info.name.clone()
};
let (width, height) = info
.modes
.iter()
.find(|mode| mode.is_current)
.map(|mode| mode.dimensions)
.unwrap_or((info.physical_size.0, info.physical_size.1));
let rect = euclid::rect(
info.location.0 as isize,
info.location.1 as isize,
width as isize,
height as isize,
);
let scale = info.scale_factor as f64;
virtual_rect = virtual_rect.union(&rect);
by_name.insert(
name.clone(),
ScreenInfo {
name,
rect,
scale,
max_fps: None,
},
);
});
}
// The main screen is the one either at the origin of
// the virtual area, or if that doesn't exist for some weird
// reason, the screen closest to the origin.
let main = by_name
.values()
.min_by_key(|screen| {
screen
.rect
.origin
.to_f32()
.distance_to(euclid::Point2D::origin())
.abs() as isize
})
.ok_or_else(|| anyhow::anyhow!("no screens were found"))?
.clone();
// We don't yet know how to determine the active screen,
// so assume the main screen.
let active = main.clone();
Ok(Screens {
main,
active,
by_name,
virtual_rect,
})
}
}
| {
if let Some(&window_id) = self.keyboard_window_id.borrow().as_ref() {
if let Some(win) = self.window_by_id(window_id) {
let mut inner = win.borrow_mut();
inner.events.dispatch(event);
}
}
} | identifier_body |
substitution.rs | use std::{cell::RefCell, default::Default, fmt};
use union_find::{QuickFindUf, Union, UnionByRank, UnionFind, UnionResult};
use crate::base::{
fixed::{FixedVec, FixedVecMap},
kind::ArcKind,
symbol::Symbol,
types::{self, ArcType, Flags, FlagsVisitor, Skolem, Type, TypeContext, Walker},
};
use crate::typ::RcType;
#[derive(Debug, PartialEq, Functor)]
pub enum Error<T> {
Occurs(T, T),
}
impl<T> fmt::Display for Error<T>
where
T: fmt::Display,
T: for<'a> types::ToDoc<'a, ::pretty::Arena<'a, ()>, (), ()>,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Error::*;
match *self {
Occurs(ref var, ref typ) => write!(f, "Variable `{}` occurs in `{}`.", var, typ),
}
}
}
pub struct Substitution<T>
where
T: Substitutable,
{
/// Union-find data structure used to store the relationships of all variables in the
/// substitution
union: RefCell<QuickFindUf<UnionByLevel>>,
/// Vector containing all created variables for this substitution. Needed for the `real` method
/// which needs to always be able to return a `&T` reference
variables: FixedVec<T>,
/// For variables which have been infered to have a real type (not a variable) their types are
/// stored here. As the type stored will never changed we use a `FixedVecMap` lets `real` return
/// `&T` from this map safely.
types: FixedVecMap<T>,
factory: T::Factory,
interner: T::Interner,
variable_cache: RefCell<Vec<T>>,
}
impl<T> TypeContext<Symbol, T> for Substitution<T>
where
T: Substitutable + From<Type<Symbol, T>>,
for<'a> &'a T::Interner: TypeContext<Symbol, T>,
{
gluon_base::forward_type_interner_methods!(Symbol, T, self_, &self_.interner);
}
impl<'a, T> TypeContext<Symbol, T> for &'a Substitution<T>
where
T: Substitutable + From<Type<Symbol, T>>,
&'a T::Interner: TypeContext<Symbol, T>,
{
gluon_base::forward_type_interner_methods!(Symbol, T, self_, &self_.interner);
}
impl<'a> types::Substitution<Symbol, RcType> for &'a Substitution<RcType> {
fn new_var(&mut self) -> RcType {
Substitution::new_var(*self)
}
fn new_skolem(&mut self, name: Symbol, kind: ArcKind) -> RcType {
Substitution::new_skolem(*self, name, kind)
}
}
impl<T> Default for Substitution<T>
where
T: Substitutable,
T::Factory: Default,
T::Interner: Default,
{
fn default() -> Substitution<T> {
Substitution::new(Default::default(), Default::default())
}
}
/// Trait which variables need to implement to allow the substitution to get to the u32 identifying
/// the variable
pub trait Variable {
fn get_id(&self) -> u32;
}
impl Variable for u32 {
fn get_id(&self) -> u32 {
*self
}
}
pub trait VariableFactory {
type Variable: Variable;
fn new(&self, x: u32) -> Self::Variable;
}
impl VariableFactory for () {
type Variable = u32;
fn new(&self, x: u32) -> Self::Variable {
x
}
}
/// Trait implemented on types which may contain substitutable variables
pub trait Substitutable: Sized {
type Variable: Variable;
type Factory: VariableFactory<Variable = Self::Variable>;
type Interner: Default;
/// Constructs a new object from its variable type
fn from_variable(subs: &Substitution<Self>, x: Self::Variable) -> Self;
fn into_variable(&mut self, x: Self::Variable);
fn is_unique(self_: &Self) -> bool;
/// Retrieves the variable if `self` is a variable otherwise returns `None`
fn get_var(&self) -> Option<&Self::Variable>;
fn get_id(&self) -> Option<u32> {
self.get_var().map(|var| var.get_id())
}
fn traverse<'a, F>(&'a self, f: &mut F)
where
F: Walker<'a, Self>;
fn instantiate(&self, subs: &Substitution<Self>) -> Self;
// Allowed return true even if the type does not contain variables but not false if it does
// contain
fn contains_variables(&self) -> bool {
true
}
fn on_union(&self) -> Option<&Self> {
None
}
}
pub fn occurs<T>(typ: &T, subs: &Substitution<T>, var: u32) -> bool
where
T: Substitutable,
{
struct Occurs<'a, T: Substitutable + 'a> {
occurs: bool,
var: u32,
subs: &'a Substitution<T>,
}
impl<'a, 't, T> Walker<'t, T> for Occurs<'a, T>
where
T: Substitutable,
{
fn walk(&mut self, typ: &'t T) {
if !typ.contains_variables() || self.occurs {
return;
}
let typ = self.subs.real(typ);
if let Some(other) = typ.get_var() {
if self.var.get_id() == other.get_id() {
self.occurs = true;
typ.traverse(self);
return;
}
self.subs.update_level(self.var, other.get_id());
}
typ.traverse(self);
}
}
let mut occurs = Occurs {
occurs: false,
var,
subs,
};
occurs.walk(typ);
occurs.occurs
}
/// Specialized union implementation which makes sure that variables with a higher level always
/// point to the lower level variable.
///
/// map.union(1, 2);
/// map.find(2) -> 1
/// map.find(1) -> 1
#[derive(Debug)]
struct UnionByLevel {
rank: UnionByRank,
level: u32,
}
impl Default for UnionByLevel {
fn default() -> UnionByLevel {
UnionByLevel {
rank: UnionByRank::default(),
level: ::std::u32::MAX,
}
}
}
impl Union for UnionByLevel {
#[inline]
fn union(left: UnionByLevel, right: UnionByLevel) -> UnionResult<UnionByLevel> {
use std::cmp::Ordering;
let (rank_result, rank) = match Union::union(left.rank, right.rank) {
UnionResult::Left(l) => (
UnionResult::Left(UnionByLevel {
rank: l,
level: left.level,
}),
l,
),
UnionResult::Right(r) => (
UnionResult::Right(UnionByLevel {
rank: r,
level: left.level,
}),
r,
),
};
match left.level.cmp(&right.level) {
Ordering::Less => UnionResult::Left(UnionByLevel {
rank: rank,
level: left.level,
}),
Ordering::Greater => UnionResult::Right(UnionByLevel {
rank: rank,
level: right.level,
}),
Ordering::Equal => rank_result,
}
}
}
impl<T> fmt::Debug for Substitution<T>
where
T: fmt::Debug + Substitutable,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"Substitution {{ map: {:?}, var_id: {:?} }}",
self.union.borrow(),
self.var_id()
)
}
}
impl<T> Substitution<T>
where
T: Substitutable,
{
pub fn new(factory: T::Factory, interner: T::Interner) -> Substitution<T> {
Substitution {
union: RefCell::new(QuickFindUf::new(0)),
variables: FixedVec::new(),
types: FixedVecMap::new(),
factory: factory,
interner,
variable_cache: Default::default(),
}
}
pub fn var_id(&self) -> u32 {
self.variables.len() as u32
}
pub fn insert(&self, var: u32, t: T) {
match t.get_var() {
Some(_) => ice!(
"Tried to insert variable which is not allowed as that would cause memory \
unsafety"
),
None => match self.types.try_insert(var as usize, t.into()) {
Ok(()) => (),
Err(_) => ice!("Expected variable to not have a type associated with it"),
},
}
}
pub fn replace(&mut self, var: u32, t: T) {
debug_assert!(t.get_id() != Some(var));
self.types.insert(var as usize, t.into());
}
pub fn reset(&mut self, var: u32) {
self.types.remove(var as usize);
}
/// Assumes that no variables unified with anything (but variables < level may exist)
pub fn clear_from(&mut self, level: u32) {
self.union = RefCell::new(QuickFindUf::new(0));
let mut u = self.union.borrow_mut();
for _ in 0..level {
u.insert(UnionByLevel {
..UnionByLevel::default()
});
}
let mut variable_cache = self.variable_cache.borrow_mut();
// Since no types should be unified with anything we can remove all of this and reuse the
// unique values
variable_cache.extend(self.types.drain().filter(T::is_unique));
while self.variables.len() > level as usize {
variable_cache.push(self.variables.pop().unwrap());
}
}
/// Creates a new variable
pub fn new_var(&self) -> T
where
T: Clone,
{
self.new_var_fn(|var| match self.variable_cache.borrow_mut().pop() {
Some(mut typ) => {
T::into_variable(&mut typ, self.factory.new(var));
typ
}
None => T::from_variable(self, self.factory.new(var)),
})
}
pub fn new_var_fn<F>(&self, f: F) -> T
where
T: Clone,
F: FnOnce(u32) -> T,
{
let var_id = self.variables.len() as u32;
let id = self.union.borrow_mut().insert(UnionByLevel {
level: var_id,
..UnionByLevel::default()
});
assert!(id == self.variables.len());
debug!("New var {}", self.variables.len());
let var = f(var_id);
self.variables.push(var.clone().into());
var
}
/// If `typ` is a variable this returns the real unified value of that variable. Otherwise it
/// just returns the type itself. Note that the returned type may contain terms which also need
/// to have `real` called on them.
pub fn real<'r>(&'r self, typ: &'r T) -> &'r T {
match typ.get_id() {
Some(id) => match self.find_type_for_var(id) {
Some(t) => t,
None => typ,
},
_ => typ,
}
}
pub fn | (&self, var: u32) -> Option<&T> {
self.variables.get(var as usize)
}
pub fn find_type_for_var(&self, var: u32) -> Option<&T> {
let mut union = self.union.borrow_mut();
if var as usize >= union.size() {
return None;
}
let index = union.find(var as usize);
self.types.get(index).or_else(|| {
if var == index as u32 {
None
} else {
Some(&self.variables[index as usize])
}
})
}
/// Updates the level of `other` to be the minimum level value of `var` and `other`
pub fn update_level(&self, var: u32, other: u32) {
let level = ::std::cmp::min(self.get_level(var), self.get_level(other));
let mut union = self.union.borrow_mut();
union.get_mut(other as usize).level = level;
}
pub fn set_level(&self, var: u32, level: u32) {
let mut union = self.union.borrow_mut();
union.get_mut(var as usize).level = level;
}
pub fn get_level(&self, mut var: u32) -> u32 {
if let Some(v) = self.find_type_for_var(var) {
var = v.get_var().map_or(var, |v| v.get_id());
}
let mut union = self.union.borrow_mut();
let level = &mut union.get_mut(var as usize).level;
*level = ::std::cmp::min(*level, var);
*level
}
pub fn replace_variable(&self, typ: &T) -> Option<T>
where
T: Clone,
{
match typ.get_id() {
Some(id) => self.find_type_for_var(id).cloned(),
None => None,
}
}
}
pub fn is_variable_unified(subs: &Substitution<RcType>, var: &RcType) -> bool {
match **var {
Type::Variable(ref var) => subs.find_type_for_var(var.id).is_some(),
_ => false,
}
}
impl<T: Substitutable + Clone> Substitution<T> {
pub fn make_real(&self, typ: &mut T) {
*typ = self.real(typ).clone();
}
}
impl<T: Substitutable + PartialEq + Clone> Substitution<T> {
/// Takes `id` and updates the substitution to say that it should have the same type as `typ`
pub fn union(&self, variable: &T, typ: &T) -> Result<Option<T>, Error<T>>
where
T::Variable: Clone,
T: fmt::Display,
{
assert!(variable.get_id().is_some(), "Expected a variable");
let id = variable.get_id().unwrap();
let resolved_type = typ.on_union();
let typ = resolved_type.unwrap_or(typ);
// Nothing needs to be done if both are the same variable already (also prevents the occurs
// check from failing)
if typ.get_var().map_or(false, |other| other.get_id() == id) {
return Ok(None);
}
if occurs(typ, self, id) {
return Err(Error::Occurs(variable.clone(), typ.clone()));
}
{
let id_type = self.find_type_for_var(id);
let other_type = self.real(typ);
if id_type.map_or(false, |x| x == other_type)
|| other_type.get_var().map(|y| y.get_id()) == Some(id)
{
return Ok(None);
}
}
{
let typ = resolved_type.unwrap_or(typ);
match typ.get_var().map(|v| v.get_id()) {
Some(other_id) if variable.get_var().is_some() => {
self.union
.borrow_mut()
.union(id as usize, other_id as usize);
self.update_level(id.get_id(), other_id);
self.update_level(other_id, id.get_id());
}
_ => {
if let Some(other_id) = typ.get_id() {
self.update_level(id.get_id(), other_id);
}
self.insert(id.get_id(), typ.clone());
}
}
}
Ok(resolved_type.cloned())
}
}
impl Substitution<RcType> {
pub fn new_skolem(&self, name: Symbol, kind: ArcKind) -> RcType {
self.new_var_fn(|id| {
let skolem = Skolem { name, id, kind };
match self.variable_cache.borrow_mut().pop() {
Some(mut typ) => {
RcType::set(&mut typ, Type::Skolem(skolem));
typ
}
None => (&*self).skolem(skolem),
}
})
}
pub fn zonk(&self, typ: &RcType) -> RcType {
types::walk_move_type(
typ.clone(),
&mut FlagsVisitor(Flags::HAS_VARIABLES, |typ: &RcType| match typ.get_id() {
Some(id) => match self.find_type_for_var(id) {
Some(t) => Some(self.zonk(t)),
None => None,
},
None => None,
}),
)
}
// Stub kept in case multiple types are attempted again
pub fn bind_arc(&self, typ: &RcType) -> ArcType {
typ.clone()
}
}
| get_var | identifier_name |
substitution.rs | use std::{cell::RefCell, default::Default, fmt};
use union_find::{QuickFindUf, Union, UnionByRank, UnionFind, UnionResult};
use crate::base::{
fixed::{FixedVec, FixedVecMap},
kind::ArcKind,
symbol::Symbol,
types::{self, ArcType, Flags, FlagsVisitor, Skolem, Type, TypeContext, Walker},
};
use crate::typ::RcType;
#[derive(Debug, PartialEq, Functor)]
pub enum Error<T> {
Occurs(T, T),
}
impl<T> fmt::Display for Error<T>
where
T: fmt::Display,
T: for<'a> types::ToDoc<'a, ::pretty::Arena<'a, ()>, (), ()>,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::Error::*;
match *self {
Occurs(ref var, ref typ) => write!(f, "Variable `{}` occurs in `{}`.", var, typ),
}
}
}
pub struct Substitution<T>
where
T: Substitutable,
{
/// Union-find data structure used to store the relationships of all variables in the
/// substitution
union: RefCell<QuickFindUf<UnionByLevel>>,
/// Vector containing all created variables for this substitution. Needed for the `real` method
/// which needs to always be able to return a `&T` reference
variables: FixedVec<T>,
/// For variables which have been infered to have a real type (not a variable) their types are
/// stored here. As the type stored will never changed we use a `FixedVecMap` lets `real` return
/// `&T` from this map safely.
types: FixedVecMap<T>,
factory: T::Factory,
interner: T::Interner,
variable_cache: RefCell<Vec<T>>,
}
impl<T> TypeContext<Symbol, T> for Substitution<T>
where
T: Substitutable + From<Type<Symbol, T>>,
for<'a> &'a T::Interner: TypeContext<Symbol, T>,
{
gluon_base::forward_type_interner_methods!(Symbol, T, self_, &self_.interner);
}
impl<'a, T> TypeContext<Symbol, T> for &'a Substitution<T>
where
T: Substitutable + From<Type<Symbol, T>>,
&'a T::Interner: TypeContext<Symbol, T>,
{
gluon_base::forward_type_interner_methods!(Symbol, T, self_, &self_.interner);
}
impl<'a> types::Substitution<Symbol, RcType> for &'a Substitution<RcType> {
fn new_var(&mut self) -> RcType {
Substitution::new_var(*self)
}
fn new_skolem(&mut self, name: Symbol, kind: ArcKind) -> RcType {
Substitution::new_skolem(*self, name, kind)
}
}
impl<T> Default for Substitution<T>
where
T: Substitutable,
T::Factory: Default,
T::Interner: Default,
{
fn default() -> Substitution<T> {
Substitution::new(Default::default(), Default::default())
}
}
/// Trait which variables need to implement to allow the substitution to get to the u32 identifying
/// the variable
pub trait Variable {
fn get_id(&self) -> u32;
}
impl Variable for u32 {
fn get_id(&self) -> u32 {
*self
}
}
pub trait VariableFactory {
type Variable: Variable;
fn new(&self, x: u32) -> Self::Variable;
}
impl VariableFactory for () {
type Variable = u32;
fn new(&self, x: u32) -> Self::Variable {
x
}
}
/// Trait implemented on types which may contain substitutable variables
pub trait Substitutable: Sized {
type Variable: Variable;
type Factory: VariableFactory<Variable = Self::Variable>;
type Interner: Default;
/// Constructs a new object from its variable type
fn from_variable(subs: &Substitution<Self>, x: Self::Variable) -> Self;
fn into_variable(&mut self, x: Self::Variable);
fn is_unique(self_: &Self) -> bool;
/// Retrieves the variable if `self` is a variable otherwise returns `None`
fn get_var(&self) -> Option<&Self::Variable>;
fn get_id(&self) -> Option<u32> {
self.get_var().map(|var| var.get_id())
}
fn traverse<'a, F>(&'a self, f: &mut F)
where
F: Walker<'a, Self>;
fn instantiate(&self, subs: &Substitution<Self>) -> Self;
// Allowed return true even if the type does not contain variables but not false if it does
// contain
fn contains_variables(&self) -> bool {
true
}
fn on_union(&self) -> Option<&Self> {
None
}
}
pub fn occurs<T>(typ: &T, subs: &Substitution<T>, var: u32) -> bool
where
T: Substitutable,
{
struct Occurs<'a, T: Substitutable + 'a> {
occurs: bool,
var: u32,
subs: &'a Substitution<T>,
}
impl<'a, 't, T> Walker<'t, T> for Occurs<'a, T>
where
T: Substitutable,
{
fn walk(&mut self, typ: &'t T) {
if !typ.contains_variables() || self.occurs {
return;
}
let typ = self.subs.real(typ);
if let Some(other) = typ.get_var() {
if self.var.get_id() == other.get_id() {
self.occurs = true;
typ.traverse(self);
return;
}
self.subs.update_level(self.var, other.get_id());
}
typ.traverse(self);
}
}
let mut occurs = Occurs {
occurs: false,
var,
subs,
};
occurs.walk(typ);
occurs.occurs
}
/// Specialized union implementation which makes sure that variables with a higher level always
/// point to the lower level variable.
///
/// map.union(1, 2);
/// map.find(2) -> 1
/// map.find(1) -> 1
#[derive(Debug)]
struct UnionByLevel {
rank: UnionByRank,
level: u32,
}
impl Default for UnionByLevel {
fn default() -> UnionByLevel {
UnionByLevel {
rank: UnionByRank::default(),
level: ::std::u32::MAX,
}
}
}
impl Union for UnionByLevel {
#[inline]
fn union(left: UnionByLevel, right: UnionByLevel) -> UnionResult<UnionByLevel> {
use std::cmp::Ordering;
let (rank_result, rank) = match Union::union(left.rank, right.rank) {
UnionResult::Left(l) => (
UnionResult::Left(UnionByLevel {
rank: l,
level: left.level,
}),
l,
),
UnionResult::Right(r) => (
UnionResult::Right(UnionByLevel {
rank: r,
level: left.level,
}),
r,
),
};
match left.level.cmp(&right.level) {
Ordering::Less => UnionResult::Left(UnionByLevel {
rank: rank,
level: left.level,
}),
Ordering::Greater => UnionResult::Right(UnionByLevel {
rank: rank,
level: right.level,
}),
Ordering::Equal => rank_result,
}
}
}
impl<T> fmt::Debug for Substitution<T>
where
T: fmt::Debug + Substitutable,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"Substitution {{ map: {:?}, var_id: {:?} }}",
self.union.borrow(),
self.var_id()
)
}
}
impl<T> Substitution<T>
where
T: Substitutable,
{
pub fn new(factory: T::Factory, interner: T::Interner) -> Substitution<T> {
Substitution {
union: RefCell::new(QuickFindUf::new(0)),
variables: FixedVec::new(),
types: FixedVecMap::new(),
factory: factory,
interner,
variable_cache: Default::default(),
}
}
pub fn var_id(&self) -> u32 {
self.variables.len() as u32
}
pub fn insert(&self, var: u32, t: T) {
match t.get_var() {
Some(_) => ice!(
"Tried to insert variable which is not allowed as that would cause memory \
unsafety"
),
None => match self.types.try_insert(var as usize, t.into()) {
Ok(()) => (),
Err(_) => ice!("Expected variable to not have a type associated with it"),
},
}
}
pub fn replace(&mut self, var: u32, t: T) {
debug_assert!(t.get_id() != Some(var));
self.types.insert(var as usize, t.into());
}
pub fn reset(&mut self, var: u32) {
self.types.remove(var as usize);
}
/// Assumes that no variables unified with anything (but variables < level may exist)
pub fn clear_from(&mut self, level: u32) {
self.union = RefCell::new(QuickFindUf::new(0));
let mut u = self.union.borrow_mut();
for _ in 0..level {
u.insert(UnionByLevel {
..UnionByLevel::default()
});
}
let mut variable_cache = self.variable_cache.borrow_mut();
// Since no types should be unified with anything we can remove all of this and reuse the
// unique values
variable_cache.extend(self.types.drain().filter(T::is_unique));
while self.variables.len() > level as usize {
variable_cache.push(self.variables.pop().unwrap());
}
}
/// Creates a new variable
pub fn new_var(&self) -> T
where
T: Clone,
{
self.new_var_fn(|var| match self.variable_cache.borrow_mut().pop() {
Some(mut typ) => {
T::into_variable(&mut typ, self.factory.new(var));
typ
}
None => T::from_variable(self, self.factory.new(var)),
})
}
pub fn new_var_fn<F>(&self, f: F) -> T
where
T: Clone,
F: FnOnce(u32) -> T,
{
let var_id = self.variables.len() as u32;
let id = self.union.borrow_mut().insert(UnionByLevel {
level: var_id,
..UnionByLevel::default()
});
assert!(id == self.variables.len());
debug!("New var {}", self.variables.len());
let var = f(var_id);
self.variables.push(var.clone().into());
var
}
/// If `typ` is a variable this returns the real unified value of that variable. Otherwise it
/// just returns the type itself. Note that the returned type may contain terms which also need
/// to have `real` called on them.
pub fn real<'r>(&'r self, typ: &'r T) -> &'r T {
match typ.get_id() {
Some(id) => match self.find_type_for_var(id) {
Some(t) => t,
None => typ,
},
_ => typ,
}
}
pub fn get_var(&self, var: u32) -> Option<&T> {
self.variables.get(var as usize)
}
pub fn find_type_for_var(&self, var: u32) -> Option<&T> {
let mut union = self.union.borrow_mut();
if var as usize >= union.size() {
return None;
}
let index = union.find(var as usize);
self.types.get(index).or_else(|| {
if var == index as u32 {
None
} else {
Some(&self.variables[index as usize])
}
})
} | let mut union = self.union.borrow_mut();
union.get_mut(other as usize).level = level;
}
pub fn set_level(&self, var: u32, level: u32) {
let mut union = self.union.borrow_mut();
union.get_mut(var as usize).level = level;
}
pub fn get_level(&self, mut var: u32) -> u32 {
if let Some(v) = self.find_type_for_var(var) {
var = v.get_var().map_or(var, |v| v.get_id());
}
let mut union = self.union.borrow_mut();
let level = &mut union.get_mut(var as usize).level;
*level = ::std::cmp::min(*level, var);
*level
}
pub fn replace_variable(&self, typ: &T) -> Option<T>
where
T: Clone,
{
match typ.get_id() {
Some(id) => self.find_type_for_var(id).cloned(),
None => None,
}
}
}
pub fn is_variable_unified(subs: &Substitution<RcType>, var: &RcType) -> bool {
match **var {
Type::Variable(ref var) => subs.find_type_for_var(var.id).is_some(),
_ => false,
}
}
impl<T: Substitutable + Clone> Substitution<T> {
pub fn make_real(&self, typ: &mut T) {
*typ = self.real(typ).clone();
}
}
impl<T: Substitutable + PartialEq + Clone> Substitution<T> {
/// Takes `id` and updates the substitution to say that it should have the same type as `typ`
pub fn union(&self, variable: &T, typ: &T) -> Result<Option<T>, Error<T>>
where
T::Variable: Clone,
T: fmt::Display,
{
assert!(variable.get_id().is_some(), "Expected a variable");
let id = variable.get_id().unwrap();
let resolved_type = typ.on_union();
let typ = resolved_type.unwrap_or(typ);
// Nothing needs to be done if both are the same variable already (also prevents the occurs
// check from failing)
if typ.get_var().map_or(false, |other| other.get_id() == id) {
return Ok(None);
}
if occurs(typ, self, id) {
return Err(Error::Occurs(variable.clone(), typ.clone()));
}
{
let id_type = self.find_type_for_var(id);
let other_type = self.real(typ);
if id_type.map_or(false, |x| x == other_type)
|| other_type.get_var().map(|y| y.get_id()) == Some(id)
{
return Ok(None);
}
}
{
let typ = resolved_type.unwrap_or(typ);
match typ.get_var().map(|v| v.get_id()) {
Some(other_id) if variable.get_var().is_some() => {
self.union
.borrow_mut()
.union(id as usize, other_id as usize);
self.update_level(id.get_id(), other_id);
self.update_level(other_id, id.get_id());
}
_ => {
if let Some(other_id) = typ.get_id() {
self.update_level(id.get_id(), other_id);
}
self.insert(id.get_id(), typ.clone());
}
}
}
Ok(resolved_type.cloned())
}
}
impl Substitution<RcType> {
pub fn new_skolem(&self, name: Symbol, kind: ArcKind) -> RcType {
self.new_var_fn(|id| {
let skolem = Skolem { name, id, kind };
match self.variable_cache.borrow_mut().pop() {
Some(mut typ) => {
RcType::set(&mut typ, Type::Skolem(skolem));
typ
}
None => (&*self).skolem(skolem),
}
})
}
pub fn zonk(&self, typ: &RcType) -> RcType {
types::walk_move_type(
typ.clone(),
&mut FlagsVisitor(Flags::HAS_VARIABLES, |typ: &RcType| match typ.get_id() {
Some(id) => match self.find_type_for_var(id) {
Some(t) => Some(self.zonk(t)),
None => None,
},
None => None,
}),
)
}
// Stub kept in case multiple types are attempted again
pub fn bind_arc(&self, typ: &RcType) -> ArcType {
typ.clone()
}
} |
/// Updates the level of `other` to be the minimum level value of `var` and `other`
pub fn update_level(&self, var: u32, other: u32) {
let level = ::std::cmp::min(self.get_level(var), self.get_level(other)); | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.