file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
lib.rs | // use std::borrow::Cow;
use std::cmp::Ordering;
use std::rc::Rc;
use std::result::Result;
use std::vec::Vec;
#[derive(Debug)]
enum MastError {
InvalidNode,
StoreError(std::io::Error),
}
#[derive(Debug,Clone)]
struct Node {
key: Vec<i32>,
value: Vec<i32>,
link: Vec<Option<Link>>,
dirty: bool,
}
/*
// TODO
impl Clone for Node {
fn clone(&self) -> Node {
panic!("why are you doing this")
}
}
impl ToOwned for Node {
type Owned = Node;
fn to_owned(&self) -> Self::Owned {
return *(self.clone());
}
}*/
#[derive(Clone, Debug)]
enum Link {
// Empty,
MutableNode(Node, Option<Rc<Node>>),
SharedNode(Rc<Node>),
// Node(Cow<'a, Node<'a>>),
Stored(String),
}
pub struct Mast<'a> {
size: u64,
height: u8,
root_link: Link,
branch_factor: u16,
grow_after_size: u64,
shrink_below_size: u64,
key_order: fn(&i32, &i32) -> i8,
key_layer: fn(&i32, u16) -> u8,
_a: std::marker::PhantomData<&'a u32>,
// marshal:
// unmarshal:
// store: InMemoryNodeStore<'a>,
}
const default_branch_factor: u16 = 16;
fn default_order(a: &i32, b: &i32) -> i8 {
if *a < *b {
return -1;
} else if *a > *b {
return 1;
} else {
return 0;
}
}
fn | (v: &i32, branch_factor: u16) -> u8 {
let mut layer = 0;
let mut v = *v;
if branch_factor == 16 {
while v != 0 && v & 0xf == 0 {
v >>= 4;
layer += 1
}
} else {
while v != 0 && v % branch_factor as i32 == 0 {
v /= branch_factor as i32;
layer += 1;
}
}
return layer;
}
impl<'a> Mast<'a> {
pub fn newInMemory() -> Mast<'a> {
return Mast {
size: 0,
height: 0,
root_link: Link::MutableNode(Node::new(default_branch_factor as usize), None),
branch_factor: default_branch_factor,
grow_after_size: default_branch_factor as u64,
shrink_below_size: 1,
key_order: default_order,
key_layer: default_layer,
_a: std::marker::PhantomData,
// store: InMemoryNodeStore::new(),
};
}
fn insert(&mut self, key: i32, value: i32) -> Result<InsertResult, MastError> {
let key_layer = (self.key_layer)(&key, self.branch_factor);
let target_layer = std::cmp::min(key_layer, self.height);
let distance = self.height - target_layer;
let root = load_mut(&mut self.root_link)?;
let res = root.insert(key, value, distance, self.key_order)?;
match res {
InsertResult::Inserted => self.size += 1,
_ => return Ok(res),
};
if self.size > self.grow_after_size
&& root.can_grow(self.height, self.key_layer, self.branch_factor)
{
self.root_link = root
.grow(self.height, self.key_layer, self.branch_factor)
.unwrap();
self.height += 1;
self.shrink_below_size *= self.branch_factor as u64;
self.grow_after_size *= self.branch_factor as u64;
};
Ok(res)
}
fn get(&self, key: &i32) -> Result<Option<&i32>, MastError> {
let mut distance =
self.height - std::cmp::min((self.key_layer)(key, self.branch_factor), self.height);
if distance < 0 { panic!("goo") };
let mut node = load(&self.root_link)?;
loop {
let (equal, i) = get_index_for_key(key, &node.key, self.key_order);
if distance == 0 {
if equal {
return Ok(Some(&node.value[i]));
} else {
return Ok(None);
}
} else {
distance -= 1
}
match node.link[i] {
None => return Ok(None),
Some(ref link) => node = load(link)?,
}
}
}
}
fn load(link: &Link) -> Result<&Node, MastError> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref node, _) => Ok(node),
Link::SharedNode(ref rc) => Ok(rc),
Link::Stored(_) => unimplemented!("Link::Stored"),
}
}
fn load_mut(link: &mut Link) -> Result<&mut Node, MastError> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref mut node, _) => Ok(node),
Link::SharedNode(ref mut rc) => {
let mutable = Rc::make_mut(rc).to_owned();
*link = Link::MutableNode(mutable, Some(rc.clone()));
if let Link::MutableNode(ref mut scopey, _) = link {
Ok(scopey)
} else {
panic!("asdf")
}
}
Link::Stored(_) => unimplemented!("Link::Stored"),
}
}
// struct NodeAndSlot<'a>(&'a mut Node<'a>, usize);
/*
struct FindOptions<'a> {
mast: &'a mut Mast<'a>,
target_layer: u8,
current_height: u8,
create_missing_nodes: bool,
node_path: Vec<&'a mut Node>,
link_path: Vec<usize>,
}
*/
impl Node {
fn new(branch_factor: usize) -> Node {
let mut link = Vec::with_capacity(branch_factor + 1);
link.push(None);
Node {
key: Vec::with_capacity(branch_factor),
value: Vec::with_capacity(branch_factor),
link,
dirty: false,
}
}
/*
fn follow(
&'a mut self,
index: usize,
create_ok: bool,
m: &'a mut Mast<'a>,
) -> std::result::Result<&'a mut Node<'a>, std::io::Error> {
if let Some(ref mut links) = self.link {
return Ok(m.load(&mut links[index])?);
} else if !create_ok {
return Ok(self);
}
return Ok(&mut Node::empty());
}*/
fn insert(
&mut self,
key: i32,
value: i32,
distance: u8,
key_order: fn(&i32, &i32) -> i8,
) -> Result<InsertResult, MastError> {
let (equal, i) = get_index_for_key(&key, &self.key, key_order);
if distance != 0 {
let mut z = self.link.get_mut(i).unwrap();
let child = match &mut z {
Some(ref mut link) => load_mut(link)?,
None => {
*z = Some(Link::MutableNode(Node::new(self.key.capacity()), None));
match &mut z {
Some(ref mut link) => load_mut(link)?,
None => panic!("can't load just-set link"),
}
}
};
let res = child.insert(key, value, distance - 1, key_order)?;
match res {
InsertResult::NoChange => (),
_ => self.dirty = true,
};
return Ok(res);
}
if equal {
if value == self.value[i] {
return Ok(InsertResult::NoChange);
}
self.value[i] = value;
self.dirty = true;
return Ok(InsertResult::Updated);
}
let (left_link, right_link) = match self.link.get_mut(i).unwrap() {
Some(ref mut link) => {
let child = load_mut(link)?;
split(child, &key, key_order)?
}
None => (None, None),
};
self.key.insert(i, key);
self.value.insert(i, value);
self.link[i] = right_link;
self.link.insert(i, left_link);
self.dirty = true;
return Ok(InsertResult::Inserted);
}
fn can_grow(
&self,
current_height: u8,
key_layer: fn(&i32, u16) -> u8,
branch_factor: u16,
) -> bool {
for key in &self.key {
if key_layer(key, branch_factor) > current_height {
return true;
}
}
return false;
}
fn grow(
&mut self,
current_height: u8,
key_layer: fn(&i32, u16) -> u8,
branch_factor: u16,
) -> Option<Link> {
let mut new_parent = Node::new(self.key.capacity());
if !self.is_empty() {
for i in 0..self.key.len() {
let key = &self.key[i];
let layer = key_layer(key, branch_factor);
if layer <= current_height {
continue;
}
let new_left = self.extract(i);
new_parent.key.push(self.key[0]);
new_parent.value.push(self.value[0]);
new_parent.link.insert(new_parent.link.len() - 1, new_left);
}
}
let new_right = self.extract(self.key.len());
*new_parent.link.last_mut().unwrap() = new_right;
return new_parent.to_link();
}
fn extract(&mut self, end: usize) -> Option<Link> {
let mut node = Node::new(self.key.capacity());
node.key = self.key.drain(..end).collect();
node.key.reserve(self.key.capacity());
node.value = self.value.drain(..end).collect();
node.value.reserve(self.key.capacity());
node.link = self.link.drain(..=end).collect();
node.link.reserve(self.key.capacity() + 1);
self.link.insert(0, None);
return node.to_link();
}
fn to_link(self) -> Option<Link> {
if self.is_empty() {
return None;
}
return Some(Link::MutableNode(self, None));
}
fn is_empty(&self) -> bool {
return self.key.len() == 0
&& self.value.len() == 0
&& self.link.len() == 1
&& self.link[0].is_none();
}
}
#[derive(Debug)]
enum InsertResult {
Updated,
Inserted,
NoChange,
}
fn split(
node: &mut Node,
key: &i32,
key_order: fn(&i32, &i32) -> i8,
) -> Result<(Option<Link>, Option<Link>), MastError> {
let (equal, i) = get_index_for_key(key, &node.key, key_order);
if equal {
panic!("split not expecting existing key")
}
let mut left_node = Node::new(node.key.capacity());
let mut right_node = Node::new(node.key.capacity());
let (mut left, mut right) = node.key.split_at(i);
left_node.key.extend_from_slice(left);
right_node.key.extend_from_slice(right);
let (mut left, mut right) = node.value.split_at(i);
left_node.value.extend_from_slice(left);
right_node.value.extend_from_slice(left);
let (mut left, mut right) = node.link.split_at(i + 1);
left_node.link.remove(0);
left_node.link.extend_from_slice(left);
right_node.link.extend_from_slice(right);
// repartition left and right subtrees based on new key
if let Some(ref mut cur_left_max_link) = left_node.link[i] {
let left_max = load_mut(cur_left_max_link)?;
let (left_max_link, too_big_link) = split(left_max, key, key_order)?;
left_node.link[i] = left_max_link;
right_node.link[0] = too_big_link;
};
if let Some(ref mut cur_right_min_link) = right_node.link[0] {
let right_min = load_mut(cur_right_min_link)?;
let (too_small_link, right_min_link) = split(right_min, key, key_order)?;
if too_small_link.is_some() {
panic!("bad news!")
}
right_node.link[0] = right_min_link
};
return Ok((left_node.to_link(), right_node.to_link()));
}
fn get_index_for_key(key: &i32, keys: &Vec<i32>, key_order: fn(&i32, &i32) -> i8) -> (bool, usize) {
match keys.binary_search_by(|x| {
let r = key_order(x, key);
if r < 0 {
Ordering::Less
} else if r > 0 {
Ordering::Greater
} else {
Ordering::Equal
}
}) {
Ok(n) => (true, n),
Err(n) => (false, n),
}
}
fn bad_get_index_for_key(
key: &i32,
keys: &Vec<i32>,
key_order: fn(&i32, &i32) -> i8,
) -> (bool, usize) {
let mut cmp: i8 = 1;
let mut i: usize = 0;
while i < keys.len() {
cmp = (key_order)(&keys[i], key);
if cmp >= 0 {
break;
};
i += 1
}
return (cmp == 0, i);
}
/*
fn findNode<'a>(key: i32, options: &mut FindOptions<'a>) -> std::result::Result<(), MastError> {
let mut cmp: i8 = 1;
let mut i: usize = 0;
let keyOrder = options.mast.keyOrder;
let mut node = options.node_path.last().unwrap();
unimplemented!();
while i < node.key.len() {
cmp = (keyOrder)(node.key[i], key);
if cmp >= 0 {
break;
}
i += 1
}
if cmp == 0 || options.current_height == options.target_layer {
return Ok(());
};
let child_link = match node.link {
None => return Err(MastError::InvalidNode),
Some(ref mut link) => link.get_mut(i).unwrap(),
};
let child = load(child_link)?;
options.current_height -= 1;
options.node_path.push(child);
options.link_path.push(i);
return findNode(key, options);
}*/
/*
trait NodeStore<'a> {
fn load(&mut self, link: &'a mut Link) -> Result<&'a mut Node, std::io::Error>;
}
struct InMemoryNodeStore<'a> {
map: std::collections::HashMap<String, Node>,
}
impl<'a> InMemoryNodeStore<'a> {
fn new() -> InMemoryNodeStore<'a> {
InMemoryNodeStore {
map: std::collections::HashMap::new(),
}
}
}
impl<'a> NodeStore<'a> for InMemoryNodeStore<'a> {
fn load(&mut self, link: &'a mut Link) -> Result<&'a mut Node, std::io::Error> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref mut cow) => Ok(cow.to_mut()),
}
}
}
*/
#[test]
fn test_insert_accessibility() -> std::result::Result<(), MastError> {
let mut t = Mast::newInMemory();
let n = 16 * 16 + 2;
for i in 0..n {
t.insert(i, i)?;
for i in 0..=i {
let v = t.get(&i)?;
assert_eq!(v, Some(&i))
}
}
Ok(())
}
#[test]
fn test_bench_insert() -> std::result::Result<(), MastError> {
let mut t = Mast::newInMemory();
let parts = 4;
let mut n = 16 * 16 * 16;
let mut i = 0;
let mut start = std::time::Instant::now();
for p in 0..parts {
while i < n {
t.insert(i, i)?;
i += 1;
}
let end = std::time::Instant::now();
let diff = end - start;
println!(
"part {}/{}: height:{}, {}/s ({}ns/op) size:{}",
p + 1,
parts,
//diff.as_micros(), // {}μs,
t.height,
1_000_000_000 / (diff.as_nanos() / t.size as u128),
diff.as_nanos() / t.size as u128,
t.size,
);
n *= 16;
start = end;
}
Ok(())
}
#[test]
fn test_int_layer() {
assert_eq!(default_layer(&-528, 16), 1);
assert_eq!(default_layer(&-513, 16), 0);
assert_eq!(default_layer(&-512, 16), 2);
assert_eq!(default_layer(&-256, 16), 2);
assert_eq!(default_layer(&-16, 16), 1);
assert_eq!(default_layer(&-1, 16), 0);
assert_eq!(default_layer(&0, 16), 0);
assert_eq!(default_layer(&1, 16), 0);
assert_eq!(default_layer(&16, 16), 1);
assert_eq!(default_layer(&32, 16), 1);
}
| default_layer | identifier_name |
lib.rs | // use std::borrow::Cow;
use std::cmp::Ordering;
use std::rc::Rc;
use std::result::Result;
use std::vec::Vec;
#[derive(Debug)]
enum MastError {
InvalidNode,
StoreError(std::io::Error),
}
#[derive(Debug,Clone)]
struct Node {
key: Vec<i32>,
value: Vec<i32>,
link: Vec<Option<Link>>,
dirty: bool,
}
/*
// TODO
impl Clone for Node {
fn clone(&self) -> Node {
panic!("why are you doing this")
}
}
impl ToOwned for Node {
type Owned = Node;
fn to_owned(&self) -> Self::Owned {
return *(self.clone());
}
}*/
#[derive(Clone, Debug)]
enum Link {
// Empty,
MutableNode(Node, Option<Rc<Node>>),
SharedNode(Rc<Node>),
// Node(Cow<'a, Node<'a>>),
Stored(String),
}
pub struct Mast<'a> {
size: u64,
height: u8,
root_link: Link,
branch_factor: u16,
grow_after_size: u64,
shrink_below_size: u64,
key_order: fn(&i32, &i32) -> i8,
key_layer: fn(&i32, u16) -> u8,
_a: std::marker::PhantomData<&'a u32>,
// marshal:
// unmarshal:
// store: InMemoryNodeStore<'a>,
}
const default_branch_factor: u16 = 16;
fn default_order(a: &i32, b: &i32) -> i8 |
fn default_layer(v: &i32, branch_factor: u16) -> u8 {
let mut layer = 0;
let mut v = *v;
if branch_factor == 16 {
while v != 0 && v & 0xf == 0 {
v >>= 4;
layer += 1
}
} else {
while v != 0 && v % branch_factor as i32 == 0 {
v /= branch_factor as i32;
layer += 1;
}
}
return layer;
}
impl<'a> Mast<'a> {
pub fn newInMemory() -> Mast<'a> {
return Mast {
size: 0,
height: 0,
root_link: Link::MutableNode(Node::new(default_branch_factor as usize), None),
branch_factor: default_branch_factor,
grow_after_size: default_branch_factor as u64,
shrink_below_size: 1,
key_order: default_order,
key_layer: default_layer,
_a: std::marker::PhantomData,
// store: InMemoryNodeStore::new(),
};
}
fn insert(&mut self, key: i32, value: i32) -> Result<InsertResult, MastError> {
let key_layer = (self.key_layer)(&key, self.branch_factor);
let target_layer = std::cmp::min(key_layer, self.height);
let distance = self.height - target_layer;
let root = load_mut(&mut self.root_link)?;
let res = root.insert(key, value, distance, self.key_order)?;
match res {
InsertResult::Inserted => self.size += 1,
_ => return Ok(res),
};
if self.size > self.grow_after_size
&& root.can_grow(self.height, self.key_layer, self.branch_factor)
{
self.root_link = root
.grow(self.height, self.key_layer, self.branch_factor)
.unwrap();
self.height += 1;
self.shrink_below_size *= self.branch_factor as u64;
self.grow_after_size *= self.branch_factor as u64;
};
Ok(res)
}
fn get(&self, key: &i32) -> Result<Option<&i32>, MastError> {
let mut distance =
self.height - std::cmp::min((self.key_layer)(key, self.branch_factor), self.height);
if distance < 0 { panic!("goo") };
let mut node = load(&self.root_link)?;
loop {
let (equal, i) = get_index_for_key(key, &node.key, self.key_order);
if distance == 0 {
if equal {
return Ok(Some(&node.value[i]));
} else {
return Ok(None);
}
} else {
distance -= 1
}
match node.link[i] {
None => return Ok(None),
Some(ref link) => node = load(link)?,
}
}
}
}
fn load(link: &Link) -> Result<&Node, MastError> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref node, _) => Ok(node),
Link::SharedNode(ref rc) => Ok(rc),
Link::Stored(_) => unimplemented!("Link::Stored"),
}
}
fn load_mut(link: &mut Link) -> Result<&mut Node, MastError> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref mut node, _) => Ok(node),
Link::SharedNode(ref mut rc) => {
let mutable = Rc::make_mut(rc).to_owned();
*link = Link::MutableNode(mutable, Some(rc.clone()));
if let Link::MutableNode(ref mut scopey, _) = link {
Ok(scopey)
} else {
panic!("asdf")
}
}
Link::Stored(_) => unimplemented!("Link::Stored"),
}
}
// struct NodeAndSlot<'a>(&'a mut Node<'a>, usize);
/*
struct FindOptions<'a> {
mast: &'a mut Mast<'a>,
target_layer: u8,
current_height: u8,
create_missing_nodes: bool,
node_path: Vec<&'a mut Node>,
link_path: Vec<usize>,
}
*/
impl Node {
fn new(branch_factor: usize) -> Node {
let mut link = Vec::with_capacity(branch_factor + 1);
link.push(None);
Node {
key: Vec::with_capacity(branch_factor),
value: Vec::with_capacity(branch_factor),
link,
dirty: false,
}
}
/*
fn follow(
&'a mut self,
index: usize,
create_ok: bool,
m: &'a mut Mast<'a>,
) -> std::result::Result<&'a mut Node<'a>, std::io::Error> {
if let Some(ref mut links) = self.link {
return Ok(m.load(&mut links[index])?);
} else if !create_ok {
return Ok(self);
}
return Ok(&mut Node::empty());
}*/
fn insert(
&mut self,
key: i32,
value: i32,
distance: u8,
key_order: fn(&i32, &i32) -> i8,
) -> Result<InsertResult, MastError> {
let (equal, i) = get_index_for_key(&key, &self.key, key_order);
if distance != 0 {
let mut z = self.link.get_mut(i).unwrap();
let child = match &mut z {
Some(ref mut link) => load_mut(link)?,
None => {
*z = Some(Link::MutableNode(Node::new(self.key.capacity()), None));
match &mut z {
Some(ref mut link) => load_mut(link)?,
None => panic!("can't load just-set link"),
}
}
};
let res = child.insert(key, value, distance - 1, key_order)?;
match res {
InsertResult::NoChange => (),
_ => self.dirty = true,
};
return Ok(res);
}
if equal {
if value == self.value[i] {
return Ok(InsertResult::NoChange);
}
self.value[i] = value;
self.dirty = true;
return Ok(InsertResult::Updated);
}
let (left_link, right_link) = match self.link.get_mut(i).unwrap() {
Some(ref mut link) => {
let child = load_mut(link)?;
split(child, &key, key_order)?
}
None => (None, None),
};
self.key.insert(i, key);
self.value.insert(i, value);
self.link[i] = right_link;
self.link.insert(i, left_link);
self.dirty = true;
return Ok(InsertResult::Inserted);
}
fn can_grow(
&self,
current_height: u8,
key_layer: fn(&i32, u16) -> u8,
branch_factor: u16,
) -> bool {
for key in &self.key {
if key_layer(key, branch_factor) > current_height {
return true;
}
}
return false;
}
fn grow(
&mut self,
current_height: u8,
key_layer: fn(&i32, u16) -> u8,
branch_factor: u16,
) -> Option<Link> {
let mut new_parent = Node::new(self.key.capacity());
if !self.is_empty() {
for i in 0..self.key.len() {
let key = &self.key[i];
let layer = key_layer(key, branch_factor);
if layer <= current_height {
continue;
}
let new_left = self.extract(i);
new_parent.key.push(self.key[0]);
new_parent.value.push(self.value[0]);
new_parent.link.insert(new_parent.link.len() - 1, new_left);
}
}
let new_right = self.extract(self.key.len());
*new_parent.link.last_mut().unwrap() = new_right;
return new_parent.to_link();
}
fn extract(&mut self, end: usize) -> Option<Link> {
let mut node = Node::new(self.key.capacity());
node.key = self.key.drain(..end).collect();
node.key.reserve(self.key.capacity());
node.value = self.value.drain(..end).collect();
node.value.reserve(self.key.capacity());
node.link = self.link.drain(..=end).collect();
node.link.reserve(self.key.capacity() + 1);
self.link.insert(0, None);
return node.to_link();
}
fn to_link(self) -> Option<Link> {
if self.is_empty() {
return None;
}
return Some(Link::MutableNode(self, None));
}
fn is_empty(&self) -> bool {
return self.key.len() == 0
&& self.value.len() == 0
&& self.link.len() == 1
&& self.link[0].is_none();
}
}
#[derive(Debug)]
enum InsertResult {
Updated,
Inserted,
NoChange,
}
fn split(
node: &mut Node,
key: &i32,
key_order: fn(&i32, &i32) -> i8,
) -> Result<(Option<Link>, Option<Link>), MastError> {
let (equal, i) = get_index_for_key(key, &node.key, key_order);
if equal {
panic!("split not expecting existing key")
}
let mut left_node = Node::new(node.key.capacity());
let mut right_node = Node::new(node.key.capacity());
let (mut left, mut right) = node.key.split_at(i);
left_node.key.extend_from_slice(left);
right_node.key.extend_from_slice(right);
let (mut left, mut right) = node.value.split_at(i);
left_node.value.extend_from_slice(left);
right_node.value.extend_from_slice(left);
let (mut left, mut right) = node.link.split_at(i + 1);
left_node.link.remove(0);
left_node.link.extend_from_slice(left);
right_node.link.extend_from_slice(right);
// repartition left and right subtrees based on new key
if let Some(ref mut cur_left_max_link) = left_node.link[i] {
let left_max = load_mut(cur_left_max_link)?;
let (left_max_link, too_big_link) = split(left_max, key, key_order)?;
left_node.link[i] = left_max_link;
right_node.link[0] = too_big_link;
};
if let Some(ref mut cur_right_min_link) = right_node.link[0] {
let right_min = load_mut(cur_right_min_link)?;
let (too_small_link, right_min_link) = split(right_min, key, key_order)?;
if too_small_link.is_some() {
panic!("bad news!")
}
right_node.link[0] = right_min_link
};
return Ok((left_node.to_link(), right_node.to_link()));
}
fn get_index_for_key(key: &i32, keys: &Vec<i32>, key_order: fn(&i32, &i32) -> i8) -> (bool, usize) {
match keys.binary_search_by(|x| {
let r = key_order(x, key);
if r < 0 {
Ordering::Less
} else if r > 0 {
Ordering::Greater
} else {
Ordering::Equal
}
}) {
Ok(n) => (true, n),
Err(n) => (false, n),
}
}
fn bad_get_index_for_key(
key: &i32,
keys: &Vec<i32>,
key_order: fn(&i32, &i32) -> i8,
) -> (bool, usize) {
let mut cmp: i8 = 1;
let mut i: usize = 0;
while i < keys.len() {
cmp = (key_order)(&keys[i], key);
if cmp >= 0 {
break;
};
i += 1
}
return (cmp == 0, i);
}
/*
fn findNode<'a>(key: i32, options: &mut FindOptions<'a>) -> std::result::Result<(), MastError> {
let mut cmp: i8 = 1;
let mut i: usize = 0;
let keyOrder = options.mast.keyOrder;
let mut node = options.node_path.last().unwrap();
unimplemented!();
while i < node.key.len() {
cmp = (keyOrder)(node.key[i], key);
if cmp >= 0 {
break;
}
i += 1
}
if cmp == 0 || options.current_height == options.target_layer {
return Ok(());
};
let child_link = match node.link {
None => return Err(MastError::InvalidNode),
Some(ref mut link) => link.get_mut(i).unwrap(),
};
let child = load(child_link)?;
options.current_height -= 1;
options.node_path.push(child);
options.link_path.push(i);
return findNode(key, options);
}*/
/*
trait NodeStore<'a> {
fn load(&mut self, link: &'a mut Link) -> Result<&'a mut Node, std::io::Error>;
}
struct InMemoryNodeStore<'a> {
map: std::collections::HashMap<String, Node>,
}
impl<'a> InMemoryNodeStore<'a> {
fn new() -> InMemoryNodeStore<'a> {
InMemoryNodeStore {
map: std::collections::HashMap::new(),
}
}
}
impl<'a> NodeStore<'a> for InMemoryNodeStore<'a> {
fn load(&mut self, link: &'a mut Link) -> Result<&'a mut Node, std::io::Error> {
match link {
// Link::Empty => Ok(Cow::Borrowed(&Node::empty()).to_mut()),
Link::MutableNode(ref mut cow) => Ok(cow.to_mut()),
}
}
}
*/
#[test]
fn test_insert_accessibility() -> std::result::Result<(), MastError> {
let mut t = Mast::newInMemory();
let n = 16 * 16 + 2;
for i in 0..n {
t.insert(i, i)?;
for i in 0..=i {
let v = t.get(&i)?;
assert_eq!(v, Some(&i))
}
}
Ok(())
}
#[test]
fn test_bench_insert() -> std::result::Result<(), MastError> {
let mut t = Mast::newInMemory();
let parts = 4;
let mut n = 16 * 16 * 16;
let mut i = 0;
let mut start = std::time::Instant::now();
for p in 0..parts {
while i < n {
t.insert(i, i)?;
i += 1;
}
let end = std::time::Instant::now();
let diff = end - start;
println!(
"part {}/{}: height:{}, {}/s ({}ns/op) size:{}",
p + 1,
parts,
//diff.as_micros(), // {}μs,
t.height,
1_000_000_000 / (diff.as_nanos() / t.size as u128),
diff.as_nanos() / t.size as u128,
t.size,
);
n *= 16;
start = end;
}
Ok(())
}
#[test]
fn test_int_layer() {
assert_eq!(default_layer(&-528, 16), 1);
assert_eq!(default_layer(&-513, 16), 0);
assert_eq!(default_layer(&-512, 16), 2);
assert_eq!(default_layer(&-256, 16), 2);
assert_eq!(default_layer(&-16, 16), 1);
assert_eq!(default_layer(&-1, 16), 0);
assert_eq!(default_layer(&0, 16), 0);
assert_eq!(default_layer(&1, 16), 0);
assert_eq!(default_layer(&16, 16), 1);
assert_eq!(default_layer(&32, 16), 1);
}
| {
if *a < *b {
return -1;
} else if *a > *b {
return 1;
} else {
return 0;
}
} | identifier_body |
MOS6502.py | """
MOS6502: implements an emulated MOS 6502 processor.
"""
import instructions
from nesPPU import PPU
from nesMemory import Memory
from nesCart import Rom
from nesControllers import Controllers
class Register(object):
"""Representation of a MOS 6502 register. 8-bit except the program counter, which is 16-bit."""
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.bitwidth = bitwidth # currently unused
self.symb_val = [0] # placeholder until symbolic execution is added
#self.symb_val = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def get_value(self):
"""Return register's current value"""
return self.value
def set_value(self, value):
"""Set register's new value"""
self.value = value
def get_current_symb(self):
"""Return register's current symbolic value"""
return self.symb_val[-1]
def get_initial_symb(self):
"""Return register's initial symbolic value"""
return self.symb_val[0]
def set_initial_symb(self, value):
"""Set register's initial symbolic value"""
self.symb_val[0] = value
class CPU(object):
"""The emulated NES CPU core"""
def __init__(self, baseAddress=0x8000):
self.bitwidth = bitwidth = 8 # 1 byte - 8 bits
self.regs = {'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.base_address = baseAddress # unused for now
self.pc_size = 2 # 2 bytes for PC - 16 bits
self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def get_memory(self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
|
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub_op):
"""Update, as needed, the C or V bits in the emulated flags register"""
of_cond = self.create_overflow_condition(old_dst, old_src, new_val, sub_op)
cf_cond = self.create_carry_condition(new_val, sub_op)
valid_flags = {'C': cf_cond is True,
'Z': new_val & 0xFF == 0,
'V': of_cond is True,
'N': ((new_val & 0x80) != 0)}
for flag in flags:
self.set_flag(flag, valid_flags[flag])
def get_flag(self, flag_name):
"""Return the current emulated 8-bit status (flags) register"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrctrl_upt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flags_reg = self.get_register('P')
flag_index = flags[flag_name]
return (flags_reg >> flag_index) & 1
def inc_pc(self, size):
"""Increment the emulated CPU's Program Counter by specified number of bytes."""
current_pc = self.get_register('PC')
self.set_pc(current_pc + size)
def inc_cycles(self, cycles):
"""NES emulation timing is defined by emulated CPU cycles. Increment the cycle count."""
self.cycle += cycles
self.global_cycle += cycles
def handle_nmi(self):
"""Non-Maskable Interrupt handler routine. NMI is generated by PPU upon each V-Blank."""
print "NMI HANDLER"
self.push_word(self.get_register('PC'))
self.push_byte(self.get_register('P'))
self.set_flag('I', 1)
# MMM: somewhere we should check if NMIs are disabled in the status register?
# jump to the NMI vector
target = self.read_mem_word(self.nmi_vector)
self.set_pc(target)
return True
def step(self):
"""Step the emulated CPU: read, decode, and emulate execution of an instruction."""
addr = self.get_register('PC')
opcode = self.read_memory(addr)
try:
instruction = instructions.instructions[opcode]
except StandardError:
print "Failed to decode instruction: " + hex(opcode) + " @ " + hex(addr)
self.paused = True
return addr
instruction.execute(self)
self.last_four.pop(0)
self.last_four.append(addr)
self.cycle += instruction.cycles
self.global_cycle += instruction.cycles
if self.cycle % 4 == 0:
self.controllers.getInput()
if self.cycle > self.ppu.cyclesPerHBlank:
self.ppu.runPPU(self.cycle)
self.cycle = 0
if self.nmi_flipflop == 1:
self.nmi_flipflop = 0
if self.ppu.nmi == 1:
self.handle_nmi()
return self.get_register('PC')
def run_to_break(self, breaks, bwrites, breads):
"""Run the emulated CPU by stepping it until it hits a breakpoint."""
self.paused = False
self.bwrites = bwrites
self.breads = breads
while self.paused is False:
next_inst = self.step()
if next_inst in breaks:
return next_inst
# sleep the ctrl_right amount here after CPU and PPU are stepped
return next_inst
| """Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1 | identifier_body |
MOS6502.py | """
MOS6502: implements an emulated MOS 6502 processor.
"""
import instructions
from nesPPU import PPU
from nesMemory import Memory
from nesCart import Rom
from nesControllers import Controllers
class Register(object):
"""Representation of a MOS 6502 register. 8-bit except the program counter, which is 16-bit."""
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.bitwidth = bitwidth # currently unused
self.symb_val = [0] # placeholder until symbolic execution is added
#self.symb_val = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def get_value(self):
"""Return register's current value"""
return self.value
def set_value(self, value):
"""Set register's new value"""
self.value = value
def get_current_symb(self):
"""Return register's current symbolic value"""
return self.symb_val[-1]
def get_initial_symb(self):
"""Return register's initial symbolic value"""
return self.symb_val[0]
def set_initial_symb(self, value):
"""Set register's initial symbolic value"""
self.symb_val[0] = value
class CPU(object):
"""The emulated NES CPU core"""
def __init__(self, baseAddress=0x8000):
self.bitwidth = bitwidth = 8 # 1 byte - 8 bits
self.regs = {'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.base_address = baseAddress # unused for now
self.pc_size = 2 # 2 bytes for PC - 16 bits
self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def get_memory(self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
"""Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub_op):
"""Update, as needed, the C or V bits in the emulated flags register"""
of_cond = self.create_overflow_condition(old_dst, old_src, new_val, sub_op)
cf_cond = self.create_carry_condition(new_val, sub_op)
valid_flags = {'C': cf_cond is True,
'Z': new_val & 0xFF == 0,
'V': of_cond is True,
'N': ((new_val & 0x80) != 0)}
for flag in flags:
self.set_flag(flag, valid_flags[flag])
def get_flag(self, flag_name):
"""Return the current emulated 8-bit status (flags) register"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrctrl_upt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow | 'N':7} # Negative
flags_reg = self.get_register('P')
flag_index = flags[flag_name]
return (flags_reg >> flag_index) & 1
def inc_pc(self, size):
"""Increment the emulated CPU's Program Counter by specified number of bytes."""
current_pc = self.get_register('PC')
self.set_pc(current_pc + size)
def inc_cycles(self, cycles):
"""NES emulation timing is defined by emulated CPU cycles. Increment the cycle count."""
self.cycle += cycles
self.global_cycle += cycles
def handle_nmi(self):
"""Non-Maskable Interrupt handler routine. NMI is generated by PPU upon each V-Blank."""
print "NMI HANDLER"
self.push_word(self.get_register('PC'))
self.push_byte(self.get_register('P'))
self.set_flag('I', 1)
# MMM: somewhere we should check if NMIs are disabled in the status register?
# jump to the NMI vector
target = self.read_mem_word(self.nmi_vector)
self.set_pc(target)
return True
def step(self):
"""Step the emulated CPU: read, decode, and emulate execution of an instruction."""
addr = self.get_register('PC')
opcode = self.read_memory(addr)
try:
instruction = instructions.instructions[opcode]
except StandardError:
print "Failed to decode instruction: " + hex(opcode) + " @ " + hex(addr)
self.paused = True
return addr
instruction.execute(self)
self.last_four.pop(0)
self.last_four.append(addr)
self.cycle += instruction.cycles
self.global_cycle += instruction.cycles
if self.cycle % 4 == 0:
self.controllers.getInput()
if self.cycle > self.ppu.cyclesPerHBlank:
self.ppu.runPPU(self.cycle)
self.cycle = 0
if self.nmi_flipflop == 1:
self.nmi_flipflop = 0
if self.ppu.nmi == 1:
self.handle_nmi()
return self.get_register('PC')
def run_to_break(self, breaks, bwrites, breads):
"""Run the emulated CPU by stepping it until it hits a breakpoint."""
self.paused = False
self.bwrites = bwrites
self.breads = breads
while self.paused is False:
next_inst = self.step()
if next_inst in breaks:
return next_inst
# sleep the ctrl_right amount here after CPU and PPU are stepped
return next_inst | random_line_split | |
MOS6502.py | """
MOS6502: implements an emulated MOS 6502 processor.
"""
import instructions
from nesPPU import PPU
from nesMemory import Memory
from nesCart import Rom
from nesControllers import Controllers
class Register(object):
"""Representation of a MOS 6502 register. 8-bit except the program counter, which is 16-bit."""
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.bitwidth = bitwidth # currently unused
self.symb_val = [0] # placeholder until symbolic execution is added
#self.symb_val = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def get_value(self):
"""Return register's current value"""
return self.value
def set_value(self, value):
"""Set register's new value"""
self.value = value
def get_current_symb(self):
"""Return register's current symbolic value"""
return self.symb_val[-1]
def get_initial_symb(self):
"""Return register's initial symbolic value"""
return self.symb_val[0]
def set_initial_symb(self, value):
"""Set register's initial symbolic value"""
self.symb_val[0] = value
class CPU(object):
"""The emulated NES CPU core"""
def __init__(self, baseAddress=0x8000):
self.bitwidth = bitwidth = 8 # 1 byte - 8 bits
self.regs = {'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.base_address = baseAddress # unused for now
self.pc_size = 2 # 2 bytes for PC - 16 bits
self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def | (self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
"""Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub_op):
"""Update, as needed, the C or V bits in the emulated flags register"""
of_cond = self.create_overflow_condition(old_dst, old_src, new_val, sub_op)
cf_cond = self.create_carry_condition(new_val, sub_op)
valid_flags = {'C': cf_cond is True,
'Z': new_val & 0xFF == 0,
'V': of_cond is True,
'N': ((new_val & 0x80) != 0)}
for flag in flags:
self.set_flag(flag, valid_flags[flag])
def get_flag(self, flag_name):
"""Return the current emulated 8-bit status (flags) register"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrctrl_upt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flags_reg = self.get_register('P')
flag_index = flags[flag_name]
return (flags_reg >> flag_index) & 1
def inc_pc(self, size):
"""Increment the emulated CPU's Program Counter by specified number of bytes."""
current_pc = self.get_register('PC')
self.set_pc(current_pc + size)
def inc_cycles(self, cycles):
"""NES emulation timing is defined by emulated CPU cycles. Increment the cycle count."""
self.cycle += cycles
self.global_cycle += cycles
def handle_nmi(self):
"""Non-Maskable Interrupt handler routine. NMI is generated by PPU upon each V-Blank."""
print "NMI HANDLER"
self.push_word(self.get_register('PC'))
self.push_byte(self.get_register('P'))
self.set_flag('I', 1)
# MMM: somewhere we should check if NMIs are disabled in the status register?
# jump to the NMI vector
target = self.read_mem_word(self.nmi_vector)
self.set_pc(target)
return True
def step(self):
"""Step the emulated CPU: read, decode, and emulate execution of an instruction."""
addr = self.get_register('PC')
opcode = self.read_memory(addr)
try:
instruction = instructions.instructions[opcode]
except StandardError:
print "Failed to decode instruction: " + hex(opcode) + " @ " + hex(addr)
self.paused = True
return addr
instruction.execute(self)
self.last_four.pop(0)
self.last_four.append(addr)
self.cycle += instruction.cycles
self.global_cycle += instruction.cycles
if self.cycle % 4 == 0:
self.controllers.getInput()
if self.cycle > self.ppu.cyclesPerHBlank:
self.ppu.runPPU(self.cycle)
self.cycle = 0
if self.nmi_flipflop == 1:
self.nmi_flipflop = 0
if self.ppu.nmi == 1:
self.handle_nmi()
return self.get_register('PC')
def run_to_break(self, breaks, bwrites, breads):
"""Run the emulated CPU by stepping it until it hits a breakpoint."""
self.paused = False
self.bwrites = bwrites
self.breads = breads
while self.paused is False:
next_inst = self.step()
if next_inst in breaks:
return next_inst
# sleep the ctrl_right amount here after CPU and PPU are stepped
return next_inst
| get_memory | identifier_name |
MOS6502.py | """
MOS6502: implements an emulated MOS 6502 processor.
"""
import instructions
from nesPPU import PPU
from nesMemory import Memory
from nesCart import Rom
from nesControllers import Controllers
class Register(object):
"""Representation of a MOS 6502 register. 8-bit except the program counter, which is 16-bit."""
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.bitwidth = bitwidth # currently unused
self.symb_val = [0] # placeholder until symbolic execution is added
#self.symb_val = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def get_value(self):
"""Return register's current value"""
return self.value
def set_value(self, value):
"""Set register's new value"""
self.value = value
def get_current_symb(self):
"""Return register's current symbolic value"""
return self.symb_val[-1]
def get_initial_symb(self):
"""Return register's initial symbolic value"""
return self.symb_val[0]
def set_initial_symb(self, value):
"""Set register's initial symbolic value"""
self.symb_val[0] = value
class CPU(object):
"""The emulated NES CPU core"""
def __init__(self, baseAddress=0x8000):
self.bitwidth = bitwidth = 8 # 1 byte - 8 bits
self.regs = {'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.base_address = baseAddress # unused for now
self.pc_size = 2 # 2 bytes for PC - 16 bits
self.memory = Memory()
self.past_memory = []
#self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
self.regs['PC'].set_value(0)
self.regs['S'].set_value(0)
self.cycle = 0
self.global_cycle = 0
self.nmi_flipflop = 0
self.stack_base = 0x0100
self.ppu_mem = 0x2000
self.apu_mem = 0x4000
self.spr_dma = 0x4014
self.channels = 0x4015
self.ctrl1 = 0x4016
self.ctrl2 = 0x4017
self.nmi_vector = 0xFFFA
self.reset_vector = 0xFFFC
self.irq_brk_vector = 0xFFFE
self.ctrl_a = 7
self.ctrl_b = 6
self.ctrl_select = 5
self.ctrl_start = 4
self.ctrl_up = 3
self.ctrl_down = 2
self.ctrl_left = 1
self.ctrl_right = 0
self.rom = None
self.ppu = None
self.controllers = None
self.paused = False
self.pause_reason = None
self.last_four = [0x00] * 4
self.bwrites = []
self.breads = []
def clear_memory(self):
"""Clear (zero out) emulated memory"""
self.memory.ClearMemory()
def reset(self):
"""Reset CPU. Note: reset state is not the same as the initial power-on state"""
# https://wiki.nesdev.com/w/index.php/CPU_power_up_state
ctrl_start = self.read_mem_word(self.reset_vector)
self.set_pc(ctrl_start)
self.set_register('S', 0xFD)
self.set_register('P', 0x24) # MMM: documentation says P = P | 0x04 ?
self.clear_memory() # MMM: documentation says memory is unchanged
self.set_memory(0x01FE, 0xFF)
self.set_memory(0x01FF, 0xFF)
# https://wiki.nesdev.com/w/index.php/PPU_power_up_state
self.ppu = PPU(self) # MMM: PPU object should probably have its own reset() method
self.controllers = Controllers(self)
def map_mem(self, address):
"""Map program memory from the NES ROM into the CPU's memory space"""
return self.rom.mapMem(self, address)
def map_vmem(self, address):
"""Map VMem from the NES ROM into the CPU's memory space"""
return self.rom.mapVMem(self, address)
def load_rom(self, rom_path):
"""Load an NES cartridge ROM from the given .nes file path"""
self.rom = Rom(rom_path, self)
return self.rom
def read_memory(self, address):
"""Return a read of 1 byte of main memory from the given address"""
address = address & 0xFFFF
if address in self.breads:
self.paused = True
self.pause_reason = 'Read at ' + hex(address)
return self.memory.ReadMemory(self, address)
def read_vmemory(self, address):
"""Return a read of 1 byte of video memory from the given address"""
# MMM: isn't VRAM a property of the PPU?
return self.rom.ReadVMemory(self, address)
def read_mem_word(self, address):
"""Return a read of one 16-bit word of memory from the given address"""
value = self.read_memory(address)
value += self.read_memory(address + 1) << 8
return value
def read_mem_word_bug(self, addr):
addr = addr % 0xFFFF
hi_addr = (addr & 0xFF00) | ((addr + 1) & 0xFF)
lo_byte = self.read_memory(addr)
hi_byte = self.read_memory(hi_addr)
return ((hi_byte << 8) | lo_byte)
def read_rel_pc(self, offset):
"""Return 1 byte of memory read using PC-relative addressing"""
return self.read_memory(self.get_register('PC')+offset) & 0xFF
def set_memory(self, address, value):
"""Write the given 1 byte value to the given address in NES memory"""
#self.memory[address] = value & 0xFF
if address in self.bwrites:
self.paused = True
self.pause_reason = 'Write at ' + hex(address)
return self.memory.SetMemory(self, address, value)
def init_memory(self, address, values):
"""Initialize a region of emulated memory to the bytes specified in 'values'"""
for value in values: # writing 1 byte at a time
self.set_memory(address, value)
address = address + 1
def get_memory(self, address, size):
"""Return an arbitrarily sized region of the emulated memory space"""
mem = []
for i in range(0, size):
mem.append(self.read_memory(address+i))
return mem
def get_register(self, name):
"""Return the value of the given register (valid values: A, X, Y, S, P, PC)"""
return self.regs[name].get_value()
def set_pc(self, value):
"""Set the Program Counter of the emulated CPU to the given 16-bit value."""
self.regs['PC'].set_value(value & 0xFFFF)
return value & 0xFFFF
def set_register(self, name, value):
"""Set the given 8-bit register (A, X, Y, S, or P registers only)"""
if name is 'P':
value = value | (1 << 5)
self.regs[name].set_value(value & 0xFF)
return value & 0xFF
def push_byte(self, value):
"""Push the given byte value onto the emulated CPU stack."""
reg_s = self.get_register('S')
self.set_memory(reg_s + self.stack_base, value)
self.set_register('S', reg_s - 1)
return reg_s + self.stack_base - 1
def push_word(self, value):
"""Push the given 16-bit word value onto the emulated CPU stack."""
self.push_byte((value & 0xFF00) >> 8)
return self.push_byte(value & 0xFF)
def pop_byte(self):
"""Return a byte value popped from the emulated CPU stack."""
reg_s = self.get_register('S') + 1
value = self.read_memory(reg_s +self.stack_base)
self.set_register('S', reg_s)
return value
def pop_word(self):
"""Return a 16-bit word value popped from the emulated CPU stack."""
return self.pop_byte() + (self.pop_byte() << 8)
def set_flag(self, flag_name, value):
"""Set the current emulated 8-bit status (flags) register to the given value"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrupt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flag_reg = self.get_register('P')
if value == 1:
new_flag = flag_reg | 1 << flags[flag_name]
else:
new_flag = flag_reg & ~(1 << flags[flag_name])
self.set_register('P', new_flag)
def create_overflow_condition(self, old_dst, old_src, new_val, sub_op):
"""Return boolean value whether operation creates an overflow condition"""
if not sub_op:
of_cond = (old_dst^old_src)&0x80 == 0 and (old_dst^new_val) & 0x80 != 0
else:
of_cond = (old_dst^old_src)&0x80 != 0 and (old_dst^new_val) & 0x80 != 0
return of_cond
@staticmethod
def create_carry_condition(new_val, sub_op):
"""Return boolean value whether operation creates a carry condition"""
if not sub_op:
carry_cond = new_val > 0xFF
else:
carry_cond = new_val >= 0
return carry_cond
def ctrl_update_flags(self, flags, old_dst, old_src, new_val, sub_op):
"""Update, as needed, the C or V bits in the emulated flags register"""
of_cond = self.create_overflow_condition(old_dst, old_src, new_val, sub_op)
cf_cond = self.create_carry_condition(new_val, sub_op)
valid_flags = {'C': cf_cond is True,
'Z': new_val & 0xFF == 0,
'V': of_cond is True,
'N': ((new_val & 0x80) != 0)}
for flag in flags:
|
def get_flag(self, flag_name):
"""Return the current emulated 8-bit status (flags) register"""
flags = {'C':0, # Carry
'Z':1, # Zero
'I':2, # Interrctrl_upt mask
'D':3, # Decimal
'B':4, # Break
'V':6, # Overflow
'N':7} # Negative
flags_reg = self.get_register('P')
flag_index = flags[flag_name]
return (flags_reg >> flag_index) & 1
def inc_pc(self, size):
"""Increment the emulated CPU's Program Counter by specified number of bytes."""
current_pc = self.get_register('PC')
self.set_pc(current_pc + size)
def inc_cycles(self, cycles):
"""NES emulation timing is defined by emulated CPU cycles. Increment the cycle count."""
self.cycle += cycles
self.global_cycle += cycles
def handle_nmi(self):
"""Non-Maskable Interrupt handler routine. NMI is generated by PPU upon each V-Blank."""
print "NMI HANDLER"
self.push_word(self.get_register('PC'))
self.push_byte(self.get_register('P'))
self.set_flag('I', 1)
# MMM: somewhere we should check if NMIs are disabled in the status register?
# jump to the NMI vector
target = self.read_mem_word(self.nmi_vector)
self.set_pc(target)
return True
def step(self):
"""Step the emulated CPU: read, decode, and emulate execution of an instruction."""
addr = self.get_register('PC')
opcode = self.read_memory(addr)
try:
instruction = instructions.instructions[opcode]
except StandardError:
print "Failed to decode instruction: " + hex(opcode) + " @ " + hex(addr)
self.paused = True
return addr
instruction.execute(self)
self.last_four.pop(0)
self.last_four.append(addr)
self.cycle += instruction.cycles
self.global_cycle += instruction.cycles
if self.cycle % 4 == 0:
self.controllers.getInput()
if self.cycle > self.ppu.cyclesPerHBlank:
self.ppu.runPPU(self.cycle)
self.cycle = 0
if self.nmi_flipflop == 1:
self.nmi_flipflop = 0
if self.ppu.nmi == 1:
self.handle_nmi()
return self.get_register('PC')
def run_to_break(self, breaks, bwrites, breads):
"""Run the emulated CPU by stepping it until it hits a breakpoint."""
self.paused = False
self.bwrites = bwrites
self.breads = breads
while self.paused is False:
next_inst = self.step()
if next_inst in breaks:
return next_inst
# sleep the ctrl_right amount here after CPU and PPU are stepped
return next_inst
| self.set_flag(flag, valid_flags[flag]) | conditional_block |
icip_train_val.py | # !/usr/bin/env python
# coding: utf-8
'''
@File : boost.py
@Time : 2020/04/13 13:46:21
@Author : Wang Kai
@Version : 1.0
@Contact : wk15@mail.ustc.edu.cn
'''
# This is used to build catboost model using extracted features
import argparse
import gc
import time
import os
import math
import catboost
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy import stats
from scipy.stats import spearmanr
from sklearn.cluster import KMeans
from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.model_selection import train_test_split
random_seed = 2020
num_class = 50
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
all_popularity_filepath = "/home/wangkai/ICIP/feature/label/popularity_TRAIN_20337.csv"
cluster_center_filepath = "/home/wangkai/ICIP/feature/label/cluster_center.csv"
cluser_label_filepath = "/home/wangkai/ICIP/feature/label/cluster_label_20337.csv"
# random
train_popularity_filepath = "/home/wangkai/ICIP/feature/label/train_label_random.csv"
validate_popularity_filepath = "/home/wangkai/ICIP/feature/label/validate_label_random.csv"
# # postdate
train_popularity_filepath="/home/wangkai/ICIP/feature/label/train_label_datetaken.csv"
validate_popularity_filepath="/home/wangkai/ICIP/feature/label/validate_label_datetaken.csv"
number_columns = ["PhotoCount", "MeanViews", "Contacts", "GroupsCount", "NumSets", "GroupsAvgPictures",
"GroupsAvgMembers", "Ispro", "HasStats", "AvgGroupsMemb", "AvgGroupPhotos", "NumGroups"] # 12
text_columns = ["Tags", "Title", "Description"] # 3
first_columns = ["FlickrId", "UserId"] # 2
train_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/train/train_feature_20337.csv",
"fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature(feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} .. | columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day30.to_csv("/home/wangkai/ICIP/temp/predict_day30.csv",index=False)
df_predict_day30=pd.read_csv("/home/wangkai/ICIP/temp/predict_day30.csv")
df_preds = pd.merge(df_predict_label, df_predict_day30,
on="FlickrId", how="inner")
df_cluster_center = pd.read_csv(cluster_center_filepath)
df_temp = pd.merge(df_preds, df_cluster_center, how="left",
left_on="preds_label", right_on="label")
# FlickrId,preds,回归对时,真相
df_preds_result = pd.concat([df_temp["FlickrId"], df_temp.iloc[:, -30:].mul(df_temp["preds_day30"], axis=0),
df_temp.iloc[:, -30:].mul(df_temp["Day30"], axis=0), validate.iloc[:, -30:]], axis=1)
columns = ["FlickrId"]+["preds_day"+str(i+1) for i in range(30)]+[
"regression_truth"+str(i+1) for i in range(30)]+["truth"+str(i+1) for i in range(30)]
df_preds_result.columns = columns
# analysis
y_preds = np.array(df_preds_result.iloc[:, 1:31])
y_regression_true = np.array(df_preds_result.iloc[:, 31:61])
y_true = np.array(df_preds_result.iloc[:, 61:])
# 对于预测结果
rmse_errors = np.sqrt([mean_squared_error(y_true[i], y_preds[i])
for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_preds[:, -1])
print("\n Predict:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# 对于回归为真
rmse_errors = np.sqrt([mean_squared_error(
y_true[i], y_regression_true[i]) for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_regression_true[:, -1])
print("\n for regression is true:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# classify_model = 1
return classify_model, regression_model
def parse_arguments():
parser = argparse.ArgumentParser(description=" ICIP Catboost model")
parser.add_argument("-classify_f", "--classify_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid"],
help="which feature will be used for classify")
parser.add_argument("-reg_f", "--regression_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid", "lda", "wordchar"],
help="which feature will be used for regression")
parser.add_argument("-output", "--submission_path", type=str,
default="/home/wangkai/ICIP/submission",
help="ICIP file(.csv) will be submit path")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_arguments()
# clutser(num_class=num_class)
calssify_model, regression_model = train(
args.classify_feature, args.regression_feature)
test_classify_feature=load_feature(args.classify_feature,flag="test")
predict_label=calssify_model.predict(test_classify_feature.iloc[:,1:])
df_predict_label=pd.DataFrame({"FlickrId":test_classify_feature["FlickrId"],"preds_label":predict_label.flatten()})
# predict Day30
test_regression_feature=load_feature(args.regression_feature,flag="test")
preds_p_test=regression_model.predict(test_regression_feature.iloc[:,1:])
preds_day30=(np.exp(preds_p_test)-1)*4
df_predict_day30=pd.DataFrame({"FlickrId":test_regression_feature["FlickrId"],"preds_day30":preds_day30})
df_preds=pd.merge(df_predict_label,df_predict_day30,on="FlickrId",how="inner")
df_cluster_center=pd.read_csv(cluster_center_filepath)
df_temp=pd.merge(df_preds,df_cluster_center,how="left",left_on="preds_label",right_on="label")
df_preds_result=pd.concat([df_temp["FlickrId"],df_temp.iloc[:,-30:].mul(df_temp["preds_day30"],axis=0)],axis=1)
df_preds_result.columns=pd.read_csv(all_popularity_filepath).columns
print(df_preds_result)
submission_filepath=os.path.join(args.submission_path,"1_submission.csv")
df_preds_result.to_csv(submission_filepath,index=False) | .".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_ | conditional_block |
icip_train_val.py | # !/usr/bin/env python
# coding: utf-8
'''
@File : boost.py
@Time : 2020/04/13 13:46:21
@Author : Wang Kai
@Version : 1.0
@Contact : wk15@mail.ustc.edu.cn
'''
# This is used to build catboost model using extracted features
import argparse
import gc
import time
import os
import math
import catboost
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd | from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.model_selection import train_test_split
random_seed = 2020
num_class = 50
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
all_popularity_filepath = "/home/wangkai/ICIP/feature/label/popularity_TRAIN_20337.csv"
cluster_center_filepath = "/home/wangkai/ICIP/feature/label/cluster_center.csv"
cluser_label_filepath = "/home/wangkai/ICIP/feature/label/cluster_label_20337.csv"
# random
train_popularity_filepath = "/home/wangkai/ICIP/feature/label/train_label_random.csv"
validate_popularity_filepath = "/home/wangkai/ICIP/feature/label/validate_label_random.csv"
# # postdate
train_popularity_filepath="/home/wangkai/ICIP/feature/label/train_label_datetaken.csv"
validate_popularity_filepath="/home/wangkai/ICIP/feature/label/validate_label_datetaken.csv"
number_columns = ["PhotoCount", "MeanViews", "Contacts", "GroupsCount", "NumSets", "GroupsAvgPictures",
"GroupsAvgMembers", "Ispro", "HasStats", "AvgGroupsMemb", "AvgGroupPhotos", "NumGroups"] # 12
text_columns = ["Tags", "Title", "Description"] # 3
first_columns = ["FlickrId", "UserId"] # 2
train_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/train/train_feature_20337.csv",
"fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature(feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} ...".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day30.to_csv("/home/wangkai/ICIP/temp/predict_day30.csv",index=False)
df_predict_day30=pd.read_csv("/home/wangkai/ICIP/temp/predict_day30.csv")
df_preds = pd.merge(df_predict_label, df_predict_day30,
on="FlickrId", how="inner")
df_cluster_center = pd.read_csv(cluster_center_filepath)
df_temp = pd.merge(df_preds, df_cluster_center, how="left",
left_on="preds_label", right_on="label")
# FlickrId,preds,回归对时,真相
df_preds_result = pd.concat([df_temp["FlickrId"], df_temp.iloc[:, -30:].mul(df_temp["preds_day30"], axis=0),
df_temp.iloc[:, -30:].mul(df_temp["Day30"], axis=0), validate.iloc[:, -30:]], axis=1)
columns = ["FlickrId"]+["preds_day"+str(i+1) for i in range(30)]+[
"regression_truth"+str(i+1) for i in range(30)]+["truth"+str(i+1) for i in range(30)]
df_preds_result.columns = columns
# analysis
y_preds = np.array(df_preds_result.iloc[:, 1:31])
y_regression_true = np.array(df_preds_result.iloc[:, 31:61])
y_true = np.array(df_preds_result.iloc[:, 61:])
# 对于预测结果
rmse_errors = np.sqrt([mean_squared_error(y_true[i], y_preds[i])
for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_preds[:, -1])
print("\n Predict:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# 对于回归为真
rmse_errors = np.sqrt([mean_squared_error(
y_true[i], y_regression_true[i]) for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_regression_true[:, -1])
print("\n for regression is true:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# classify_model = 1
return classify_model, regression_model
def parse_arguments():
parser = argparse.ArgumentParser(description=" ICIP Catboost model")
parser.add_argument("-classify_f", "--classify_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid"],
help="which feature will be used for classify")
parser.add_argument("-reg_f", "--regression_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid", "lda", "wordchar"],
help="which feature will be used for regression")
parser.add_argument("-output", "--submission_path", type=str,
default="/home/wangkai/ICIP/submission",
help="ICIP file(.csv) will be submit path")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_arguments()
# clutser(num_class=num_class)
calssify_model, regression_model = train(
args.classify_feature, args.regression_feature)
test_classify_feature=load_feature(args.classify_feature,flag="test")
predict_label=calssify_model.predict(test_classify_feature.iloc[:,1:])
df_predict_label=pd.DataFrame({"FlickrId":test_classify_feature["FlickrId"],"preds_label":predict_label.flatten()})
# predict Day30
test_regression_feature=load_feature(args.regression_feature,flag="test")
preds_p_test=regression_model.predict(test_regression_feature.iloc[:,1:])
preds_day30=(np.exp(preds_p_test)-1)*4
df_predict_day30=pd.DataFrame({"FlickrId":test_regression_feature["FlickrId"],"preds_day30":preds_day30})
df_preds=pd.merge(df_predict_label,df_predict_day30,on="FlickrId",how="inner")
df_cluster_center=pd.read_csv(cluster_center_filepath)
df_temp=pd.merge(df_preds,df_cluster_center,how="left",left_on="preds_label",right_on="label")
df_preds_result=pd.concat([df_temp["FlickrId"],df_temp.iloc[:,-30:].mul(df_temp["preds_day30"],axis=0)],axis=1)
df_preds_result.columns=pd.read_csv(all_popularity_filepath).columns
print(df_preds_result)
submission_filepath=os.path.join(args.submission_path,"1_submission.csv")
df_preds_result.to_csv(submission_filepath,index=False) | from scipy import stats
from scipy.stats import spearmanr
from sklearn.cluster import KMeans | random_line_split |
icip_train_val.py | # !/usr/bin/env python
# coding: utf-8
'''
@File : boost.py
@Time : 2020/04/13 13:46:21
@Author : Wang Kai
@Version : 1.0
@Contact : wk15@mail.ustc.edu.cn
'''
# This is used to build catboost model using extracted features
import argparse
import gc
import time
import os
import math
import catboost
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy import stats
from scipy.stats import spearmanr
from sklearn.cluster import KMeans
from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.model_selection import train_test_split
random_seed = 2020
num_class = 50
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
all_popularity_filepath = "/home/wangkai/ICIP/feature/label/popularity_TRAIN_20337.csv"
cluster_center_filepath = "/home/wangkai/ICIP/feature/label/cluster_center.csv"
cluser_label_filepath = "/home/wangkai/ICIP/feature/label/cluster_label_20337.csv"
# random
train_popularity_filepath = "/home/wangkai/ICIP/feature/label/train_label_random.csv"
validate_popularity_filepath = "/home/wangkai/ICIP/feature/label/validate_label_random.csv"
# # postdate
train_popularity_filepath="/home/wangkai/ICIP/feature/label/train_label_datetaken.csv"
validate_popularity_filepath="/home/wangkai/ICIP/feature/label/validate_label_datetaken.csv"
number_columns = ["PhotoCount", "MeanViews", "Contacts", "GroupsCount", "NumSets", "GroupsAvgPictures",
"GroupsAvgMembers", "Ispro", "HasStats", "AvgGroupsMemb", "AvgGroupPhotos", "NumGroups"] # 12
text_columns = ["Tags", "Title", "Description"] # 3
first_columns = ["FlickrId", "UserId"] # 2
train_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/train/train_feature_20337.csv",
"fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
| feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} ...".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day30.to_csv("/home/wangkai/ICIP/temp/predict_day30.csv",index=False)
df_predict_day30=pd.read_csv("/home/wangkai/ICIP/temp/predict_day30.csv")
df_preds = pd.merge(df_predict_label, df_predict_day30,
on="FlickrId", how="inner")
df_cluster_center = pd.read_csv(cluster_center_filepath)
df_temp = pd.merge(df_preds, df_cluster_center, how="left",
left_on="preds_label", right_on="label")
# FlickrId,preds,回归对时,真相
df_preds_result = pd.concat([df_temp["FlickrId"], df_temp.iloc[:, -30:].mul(df_temp["preds_day30"], axis=0),
df_temp.iloc[:, -30:].mul(df_temp["Day30"], axis=0), validate.iloc[:, -30:]], axis=1)
columns = ["FlickrId"]+["preds_day"+str(i+1) for i in range(30)]+[
"regression_truth"+str(i+1) for i in range(30)]+["truth"+str(i+1) for i in range(30)]
df_preds_result.columns = columns
# analysis
y_preds = np.array(df_preds_result.iloc[:, 1:31])
y_regression_true = np.array(df_preds_result.iloc[:, 31:61])
y_true = np.array(df_preds_result.iloc[:, 61:])
# 对于预测结果
rmse_errors = np.sqrt([mean_squared_error(y_true[i], y_preds[i])
for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_preds[:, -1])
print("\n Predict:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# 对于回归为真
rmse_errors = np.sqrt([mean_squared_error(
y_true[i], y_regression_true[i]) for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_regression_true[:, -1])
print("\n for regression is true:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# classify_model = 1
return classify_model, regression_model
def parse_arguments():
parser = argparse.ArgumentParser(description=" ICIP Catboost model")
parser.add_argument("-classify_f", "--classify_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid"],
help="which feature will be used for classify")
parser.add_argument("-reg_f", "--regression_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid", "lda", "wordchar"],
help="which feature will be used for regression")
parser.add_argument("-output", "--submission_path", type=str,
default="/home/wangkai/ICIP/submission",
help="ICIP file(.csv) will be submit path")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_arguments()
# clutser(num_class=num_class)
calssify_model, regression_model = train(
args.classify_feature, args.regression_feature)
test_classify_feature=load_feature(args.classify_feature,flag="test")
predict_label=calssify_model.predict(test_classify_feature.iloc[:,1:])
df_predict_label=pd.DataFrame({"FlickrId":test_classify_feature["FlickrId"],"preds_label":predict_label.flatten()})
# predict Day30
test_regression_feature=load_feature(args.regression_feature,flag="test")
preds_p_test=regression_model.predict(test_regression_feature.iloc[:,1:])
preds_day30=(np.exp(preds_p_test)-1)*4
df_predict_day30=pd.DataFrame({"FlickrId":test_regression_feature["FlickrId"],"preds_day30":preds_day30})
df_preds=pd.merge(df_predict_label,df_predict_day30,on="FlickrId",how="inner")
df_cluster_center=pd.read_csv(cluster_center_filepath)
df_temp=pd.merge(df_preds,df_cluster_center,how="left",left_on="preds_label",right_on="label")
df_preds_result=pd.concat([df_temp["FlickrId"],df_temp.iloc[:,-30:].mul(df_temp["preds_day30"],axis=0)],axis=1)
df_preds_result.columns=pd.read_csv(all_popularity_filepath).columns
print(df_preds_result)
submission_filepath=os.path.join(args.submission_path,"1_submission.csv")
df_preds_result.to_csv(submission_filepath,index=False) | df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature( | identifier_body |
icip_train_val.py | # !/usr/bin/env python
# coding: utf-8
'''
@File : boost.py
@Time : 2020/04/13 13:46:21
@Author : Wang Kai
@Version : 1.0
@Contact : wk15@mail.ustc.edu.cn
'''
# This is used to build catboost model using extracted features
import argparse
import gc
import time
import os
import math
import catboost
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy import stats
from scipy.stats import spearmanr
from sklearn.cluster import KMeans
from sklearn.metrics import mean_absolute_error, mean_squared_error
from sklearn.model_selection import train_test_split
random_seed = 2020
num_class = 50
os.environ["CUDA_VISIBLE_DEVICES"] = "2"
all_popularity_filepath = "/home/wangkai/ICIP/feature/label/popularity_TRAIN_20337.csv"
cluster_center_filepath = "/home/wangkai/ICIP/feature/label/cluster_center.csv"
cluser_label_filepath = "/home/wangkai/ICIP/feature/label/cluster_label_20337.csv"
# random
train_popularity_filepath = "/home/wangkai/ICIP/feature/label/train_label_random.csv"
validate_popularity_filepath = "/home/wangkai/ICIP/feature/label/validate_label_random.csv"
# # postdate
train_popularity_filepath="/home/wangkai/ICIP/feature/label/train_label_datetaken.csv"
validate_popularity_filepath="/home/wangkai/ICIP/feature/label/validate_label_datetaken.csv"
number_columns = ["PhotoCount", "MeanViews", "Contacts", "GroupsCount", "NumSets", "GroupsAvgPictures",
"GroupsAvgMembers", "Ispro", "HasStats", "AvgGroupsMemb", "AvgGroupPhotos", "NumGroups"] # 12
text_columns = ["Tags", "Title", "Description"] # 3
first_columns = ["FlickrId", "UserId"] # 2
train_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/train/train_feature_20337.csv",
"fasttext": "/home/wangkai/ICIP/feature/train/FastText_tags+des_20337.csv",
"tfidf": "/home/wangkai/ICIP/feature/train/Tfidf_tags+des_20337.csv",
"lsa": "/home/wangkai/ICIP/feature/train/LSA_tags+title+des_20337.csv",
"lda": "/home/wangkai/ICIP/feature/train/LDA_tags+title+des_20337.csv",
"wordchar": "/home/wangkai/ICIP/feature/train/wordchar_tags+title+des_20337.csv",
"userid": "/home/wangkai/ICIP/feature/train/UserId256_20337.csv",
"image": "/home/wangkai/ICIP/feature/train/ResNext101_image_20337.csv"
}
test_feature_filepath = {
"original": "/home/wangkai/ICIP/feature/test/test_feature_7693.csv",
"fasttext": "/home/wangkai/ICIP/feature/test/FastText_tags+des_7693.csv",
"tfidf": "/home/wangkai/ICIP/feature/test/Tfidf_tags+des_7693.csv",
"lsa": "/home/wangkai/ICIP/feature/test/LSA_tags+title+des_7693.csv",
"lda": "/home/wangkai/ICIP/feature/test/LDA_tags+title+des_7693.csv",
"wordchar": "/home/wangkai/ICIP/feature/test/wordchar_tags+title+des_7693.csv",
"userid": "/home/wangkai/ICIP/feature/test/UserId256_7693.csv",
"image": "/home/wangkai/ICIP/feature/test/ResNext101_image_7693.csv"
}
def clutser(num_class=num_class):
df_popularity = pd.read_csv(all_popularity_filepath)
# 归一化
normalized_popularity = df_popularity.iloc[:, 1:].div(
df_popularity["Day30"], axis=0)
# 聚类的label
kmeans = KMeans(n_clusters=num_class, init="k-means++", n_init=100, max_iter=10000,
random_state=random_seed, n_jobs=-1, algorithm="auto").fit(normalized_popularity)
df_label = pd.DataFrame(
{"FlickrId": df_popularity["FlickrId"], "label": kmeans.labels_})
df_label.to_csv(cluser_label_filepath, index=False)
# 聚类中心
df_cluster_center = pd.DataFrame(kmeans.cluster_centers_)
df_cluster_center.columns = ["day"+str(i+1) for i in range(30)]
df_cluster_center.insert(0, column="label", value=np.arange(num_class))
df_cluster_center.to_csv(cluster_center_filepath, index=False)
def load_feature(feature_list, flag="train"):
feature_path = train_feature_filepath if flag == "train" else test_feature_filepath
for i, feature_name in enumerate(feature_list):
print("Loading {} ...".format(feature_name))
feature = pd.read_csv(feature_path[feature_name])
print("feature: {}, len:{}".format(
feature_name, len(feature.columns)-1))
if i == 0:
all_feature = feature
else:
all_feature = pd.merge(all_feature, feature)
useless = text_columns
all_feature.drop(useless, axis=1, inplace=True)
print(all_feature)
return all_feature
def calssify_catboost(tr | cat_features = ["UserId"]
# cat_features=[]
train_data = catboost.Pool(
train.iloc[:, 1:-31], train["label"], cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], validate["label"], cat_features=cat_features)
model = catboost.CatBoostClassifier(iterations=10000, learning_rate=0.01, depth=6, objective="MultiClass", classes_count=num_class, eval_metric="Accuracy", l2_leaf_reg=3.0,
min_data_in_leaf=1, boosting_type="Plain", use_best_model=False, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000)
model = model.fit(train_data, eval_set=validata_data, plot=False)
# predict label
preds = model.predict(validata_data)
preds = preds.flatten()
print("\nValidate\nACC: {}\tTotal right: {}".format(
np.sum(preds == validate["label"])/len(preds), np.sum(preds == validate["label"])))
# feature importance
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_label = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "preds_label": preds})
return model, df_predict_label
def regression_catboost(train, validate):
cat_features = ["UserId"]
# cat_features=[]
p_train, p_validate = np.log(
train["Day30"]/4+1), np.log(validate["Day30"]/4+1)
# p_train,p_validate=train["Day30"],validate["Day30"]
train_data = catboost.Pool(
train.iloc[:, 1:-31], p_train, cat_features=cat_features)
validata_data = catboost.Pool(
validate.iloc[:, 1:-31], p_validate, cat_features=cat_features)
model = catboost.CatBoostRegressor(iterations=35000, learning_rate=0.003, depth=6, objective="MAPE", eval_metric="MAPE", custom_metric=["RMSE", "MAE", "MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="GPU", devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=1000, fold_permutation_block=1, bagging_temperature=0)
# model=catboost.CatBoostRegressor(iterations=100000, learning_rate=0.1, depth=6, objective="RMSE", eval_metric="RMSE",custom_metric=["RMSE","MAE","MAPE"], l2_leaf_reg=3.0, min_data_in_leaf=1, boosting_type="Plain", use_best_model=True, thread_count=-1, task_type="CPU",devices="0", random_state=random_seed, verbose=300, early_stopping_rounds=500)
model.fit(train_data, eval_set=validata_data, plot=False)
preds_p_validate = model.predict(validata_data)
preds_day30 = (np.exp(preds_p_validate)-1)*4
src, _ = spearmanr(validate["Day30"], preds_day30)
df_important = pd.DataFrame(
{"feature_name": model.feature_names_, "importance": model.feature_importances_})
df_important = df_important.sort_values(by=["importance"], ascending=False)
print(df_important)
df_predict_day30 = pd.DataFrame(
{"FlickrId": validate["FlickrId"], "Day30": validate["Day30"], "preds_day30": preds_day30})
return model, df_predict_day30
def train(classify_feature_list, regression_feature_list):
df_label = pd.read_csv(cluser_label_filepath)
df_train_popularity = pd.read_csv(train_popularity_filepath)
df_validate_popularity = pd.read_csv(validate_popularity_filepath)
train_label = pd.merge(df_label, df_train_popularity,
on="FlickrId", how="inner")
validate_label = pd.merge(
df_label, df_validate_popularity, on="FlickrId", how="inner")
# Classify
classify_feature = load_feature(classify_feature_list, flag="train")
train = pd.merge(classify_feature, train_label, on="FlickrId", how="inner")
validate = pd.merge(classify_feature, validate_label,
on="FlickrId", how="inner")
classify_model, df_predict_label = calssify_catboost(train, validate)
df_predict_label.to_csv(
"/home/wangkai/ICIP/predict_label.csv", index=False)
df_predict_label = pd.read_csv("/home/wangkai/ICIP/predict_label.csv")
regression_feature = load_feature(regression_feature_list, flag="train")
train = pd.merge(regression_feature, train_label,
on="FlickrId", how="inner")
validate = pd.merge(regression_feature, validate_label,
on="FlickrId", how="inner")
regression_model, df_predict_day30 = regression_catboost(train, validate)
df_predict_day30.to_csv("/home/wangkai/ICIP/temp/predict_day30.csv",index=False)
df_predict_day30=pd.read_csv("/home/wangkai/ICIP/temp/predict_day30.csv")
df_preds = pd.merge(df_predict_label, df_predict_day30,
on="FlickrId", how="inner")
df_cluster_center = pd.read_csv(cluster_center_filepath)
df_temp = pd.merge(df_preds, df_cluster_center, how="left",
left_on="preds_label", right_on="label")
# FlickrId,preds,回归对时,真相
df_preds_result = pd.concat([df_temp["FlickrId"], df_temp.iloc[:, -30:].mul(df_temp["preds_day30"], axis=0),
df_temp.iloc[:, -30:].mul(df_temp["Day30"], axis=0), validate.iloc[:, -30:]], axis=1)
columns = ["FlickrId"]+["preds_day"+str(i+1) for i in range(30)]+[
"regression_truth"+str(i+1) for i in range(30)]+["truth"+str(i+1) for i in range(30)]
df_preds_result.columns = columns
# analysis
y_preds = np.array(df_preds_result.iloc[:, 1:31])
y_regression_true = np.array(df_preds_result.iloc[:, 31:61])
y_true = np.array(df_preds_result.iloc[:, 61:])
# 对于预测结果
rmse_errors = np.sqrt([mean_squared_error(y_true[i], y_preds[i])
for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_preds[:, -1])
print("\n Predict:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# 对于回归为真
rmse_errors = np.sqrt([mean_squared_error(
y_true[i], y_regression_true[i]) for i in range(y_true.shape[0])])
trmse = stats.trim_mean(rmse_errors, 0.25)
median_rmse = np.median(rmse_errors)
src, _ = spearmanr(y_true[:, -1], y_regression_true[:, -1])
print("\n for regression is true:")
print("RMSE(trimmed 0.25): {}".format(trmse))
print("RMSE(median): {}".format(median_rmse))
print("SRC: {}".format(src))
# classify_model = 1
return classify_model, regression_model
def parse_arguments():
parser = argparse.ArgumentParser(description=" ICIP Catboost model")
parser.add_argument("-classify_f", "--classify_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid"],
help="which feature will be used for classify")
parser.add_argument("-reg_f", "--regression_feature", type=str,
choices=["original", "fasttext", "tfidf",
"lsa", "lda", "wordchar", "userid", "image"],
nargs="?",
const=["original"],
default=["original", "userid", "lda", "wordchar"],
help="which feature will be used for regression")
parser.add_argument("-output", "--submission_path", type=str,
default="/home/wangkai/ICIP/submission",
help="ICIP file(.csv) will be submit path")
args = parser.parse_args()
return args
if __name__ == "__main__":
args = parse_arguments()
# clutser(num_class=num_class)
calssify_model, regression_model = train(
args.classify_feature, args.regression_feature)
test_classify_feature=load_feature(args.classify_feature,flag="test")
predict_label=calssify_model.predict(test_classify_feature.iloc[:,1:])
df_predict_label=pd.DataFrame({"FlickrId":test_classify_feature["FlickrId"],"preds_label":predict_label.flatten()})
# predict Day30
test_regression_feature=load_feature(args.regression_feature,flag="test")
preds_p_test=regression_model.predict(test_regression_feature.iloc[:,1:])
preds_day30=(np.exp(preds_p_test)-1)*4
df_predict_day30=pd.DataFrame({"FlickrId":test_regression_feature["FlickrId"],"preds_day30":preds_day30})
df_preds=pd.merge(df_predict_label,df_predict_day30,on="FlickrId",how="inner")
df_cluster_center=pd.read_csv(cluster_center_filepath)
df_temp=pd.merge(df_preds,df_cluster_center,how="left",left_on="preds_label",right_on="label")
df_preds_result=pd.concat([df_temp["FlickrId"],df_temp.iloc[:,-30:].mul(df_temp["preds_day30"],axis=0)],axis=1)
df_preds_result.columns=pd.read_csv(all_popularity_filepath).columns
print(df_preds_result)
submission_filepath=os.path.join(args.submission_path,"1_submission.csv")
df_preds_result.to_csv(submission_filepath,index=False) | ain, validate):
| identifier_name |
offset.rs | use std::alloc::{GlobalAlloc, System, Layout};
use std::borrow::Borrow;
use std::cmp;
use std::convert::TryInto;
use std::fmt;
use std::marker::PhantomData;
use std::mem::{self, ManuallyDrop};
use std::num::NonZeroU64;
use std::ptr::NonNull;
use std::hint::unreachable_unchecked;
use thiserror::Error;
use leint::Le;
use owned::{IntoOwned, Take};
use crate::pointee::Pointee;
use crate::refs::Ref;
use crate::blob::*;
use crate::load::*;
use crate::save::*;
use crate::scalar::*;
use crate::ptr::*;
use crate::pile::*;
use crate::heap::*;
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct Offset<'pile, 'version> {
marker: PhantomData<(
fn(&'pile ()) -> &'pile (),
&'version (),
)>,
raw: Le<NonZeroU64>,
}
unsafe impl Persist for Offset<'_, '_> {}
impl fmt::Debug for Offset<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.get().fmt(f)
}
}
#[derive(Debug, Error)]
#[error("invalid offset")]
#[non_exhaustive]
pub struct ValidateOffsetBlobError;
impl Scalar for Offset<'_, '_> {
const BLOB_LAYOUT: BlobLayout = BlobLayout::new_nonzero(mem::size_of::<Self>());
type ScalarBlobError = ValidateOffsetBlobError;
fn validate_blob<'a>(blob: Blob<'a, Self>) -> Result<ValidBlob<'a, Self>, Self::ScalarBlobError> {
let raw = u64::from_le_bytes(blob.as_bytes().try_into().unwrap());
if raw & 0b1 == 0b1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
}
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn as_ref(&self) -> &OffsetMut<'p, 'v, A> {
// SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
}
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, 'v>;
type Persist = Offset<'p, 'v>;
unsafe fn dealloc<T: ?Sized + Pointee>(&self, metadata: T::Metadata) {
match self.kind() {
Kind::Offset(_) => {},
Kind::Ptr(heap_ptr) => heap_ptr.dealloc::<T>(metadata),
}
}
unsafe fn try_get_dirty_unchecked<T: ?Sized + Pointee>(&self, metadata: T::Metadata) -> Result<&T, Self::Persist> {
match self.kind() {
Kind::Ptr(ptr) => {
todo!()
},
Kind::Offset(offset) => Err(offset),
}
}
}
impl<'p,'v> Default for OffsetMut<'p, 'v> {
fn default() -> Self {
Offset::dangling().into()
}
}
#[derive(Debug, Default)]
pub struct ShallowDumper<'p, 'v> {
marker: PhantomData<OffsetMut<'p, 'v>>,
written: Vec<u8>,
initial_offset: usize,
}
impl<'p, 'v> Saver for ShallowDumper<'p, 'v> {
type SrcPtr = OffsetMut<'p, 'v>;
type DstPtr = Offset<'p, 'v>;
type Error = !;
fn try_save_raw<R, T: ?Sized + ValidateBlob>(&self,
ptr: &Offset<'p, 'v>,
_metadata: T::Metadata,
_f: impl FnOnce(ValidBlob<T>, &<Self::SrcPtr as Ptr>::BlobZone) -> R,
) -> Result<Result<<Self::DstPtr as Ptr>::Persist, R>,
Self::Error>
{
Ok(Ok(*ptr))
}
fn finish_save<T>(&mut self, value_poll: &T) -> Result<Offset<'p, 'v>, Self::Error>
where T: EncodeBlob
{
let offset = self.initial_offset
.checked_add(self.written.len())
.and_then(Offset::new)
.expect("overflow");
let written = mem::replace(&mut self.written, vec![]);
self.written = value_poll.encode_blob(written).into_ok();
Ok(offset)
}
}
impl<'p, 'v> ShallowDumper<'p, 'v> {
pub fn new(initial_offset: usize) -> Self {
Self {
marker: PhantomData,
written: vec![],
initial_offset,
}
}
pub fn from_buf(buf: impl Into<Vec<u8>>) -> Self {
Self {
marker: PhantomData,
initial_offset: 0,
written: buf.into(),
}
}
pub fn | <T: ?Sized>(mut self, value: &T) -> (Vec<u8>, Offset<'p, 'v>)
where T: SavePtr<OffsetMut<'p, 'v>, Offset<'p, 'v>>
{
let mut encoder = value.init_save_ptr();
encoder.save_poll(&mut self).into_ok();
let offset = self.finish_save(&encoder).into_ok();
(self.written, offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bag::Bag;
#[test]
fn test_shallow_dumper() {
let (buf, offset) = ShallowDumper::new(0).save(&42u8);
//assert_eq!(offset, 0);
//assert_eq!(buf, &[42]);
/*
let own = OffsetMut::alloc(42u8);
let (buf, offset) = ShallowDumper::new(0).save(&own);
assert_eq!(offset, 1);
assert_eq!(buf, &[42, 1,0,0,0,0,0,0,0]);
let own2 = OffsetMut::alloc(own);
let (buf, offset) = ShallowDumper::new(0).save(&own2);
assert_eq!(offset, 9);
assert_eq!(buf,
&[42,
1,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,
]);
*/
}
}
| save | identifier_name |
offset.rs | use std::alloc::{GlobalAlloc, System, Layout};
use std::borrow::Borrow;
use std::cmp;
use std::convert::TryInto;
use std::fmt;
use std::marker::PhantomData;
use std::mem::{self, ManuallyDrop};
use std::num::NonZeroU64;
use std::ptr::NonNull;
use std::hint::unreachable_unchecked;
use thiserror::Error;
use leint::Le;
use owned::{IntoOwned, Take};
use crate::pointee::Pointee;
use crate::refs::Ref;
use crate::blob::*;
use crate::load::*;
use crate::save::*;
use crate::scalar::*;
use crate::ptr::*;
use crate::pile::*;
use crate::heap::*;
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct Offset<'pile, 'version> {
marker: PhantomData<(
fn(&'pile ()) -> &'pile (),
&'version (),
)>,
raw: Le<NonZeroU64>,
}
unsafe impl Persist for Offset<'_, '_> {}
impl fmt::Debug for Offset<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.get().fmt(f)
}
}
#[derive(Debug, Error)]
#[error("invalid offset")]
#[non_exhaustive]
pub struct ValidateOffsetBlobError;
impl Scalar for Offset<'_, '_> {
const BLOB_LAYOUT: BlobLayout = BlobLayout::new_nonzero(mem::size_of::<Self>());
type ScalarBlobError = ValidateOffsetBlobError;
fn validate_blob<'a>(blob: Blob<'a, Self>) -> Result<ValidBlob<'a, Self>, Self::ScalarBlobError> {
let raw = u64::from_le_bytes(blob.as_bytes().try_into().unwrap());
if raw & 0b1 == 0b1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
}
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn as_ref(&self) -> &OffsetMut<'p, 'v, A> {
// SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else |
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, 'v>;
type Persist = Offset<'p, 'v>;
unsafe fn dealloc<T: ?Sized + Pointee>(&self, metadata: T::Metadata) {
match self.kind() {
Kind::Offset(_) => {},
Kind::Ptr(heap_ptr) => heap_ptr.dealloc::<T>(metadata),
}
}
unsafe fn try_get_dirty_unchecked<T: ?Sized + Pointee>(&self, metadata: T::Metadata) -> Result<&T, Self::Persist> {
match self.kind() {
Kind::Ptr(ptr) => {
todo!()
},
Kind::Offset(offset) => Err(offset),
}
}
}
impl<'p,'v> Default for OffsetMut<'p, 'v> {
fn default() -> Self {
Offset::dangling().into()
}
}
#[derive(Debug, Default)]
pub struct ShallowDumper<'p, 'v> {
marker: PhantomData<OffsetMut<'p, 'v>>,
written: Vec<u8>,
initial_offset: usize,
}
impl<'p, 'v> Saver for ShallowDumper<'p, 'v> {
type SrcPtr = OffsetMut<'p, 'v>;
type DstPtr = Offset<'p, 'v>;
type Error = !;
fn try_save_raw<R, T: ?Sized + ValidateBlob>(&self,
ptr: &Offset<'p, 'v>,
_metadata: T::Metadata,
_f: impl FnOnce(ValidBlob<T>, &<Self::SrcPtr as Ptr>::BlobZone) -> R,
) -> Result<Result<<Self::DstPtr as Ptr>::Persist, R>,
Self::Error>
{
Ok(Ok(*ptr))
}
fn finish_save<T>(&mut self, value_poll: &T) -> Result<Offset<'p, 'v>, Self::Error>
where T: EncodeBlob
{
let offset = self.initial_offset
.checked_add(self.written.len())
.and_then(Offset::new)
.expect("overflow");
let written = mem::replace(&mut self.written, vec![]);
self.written = value_poll.encode_blob(written).into_ok();
Ok(offset)
}
}
impl<'p, 'v> ShallowDumper<'p, 'v> {
pub fn new(initial_offset: usize) -> Self {
Self {
marker: PhantomData,
written: vec![],
initial_offset,
}
}
pub fn from_buf(buf: impl Into<Vec<u8>>) -> Self {
Self {
marker: PhantomData,
initial_offset: 0,
written: buf.into(),
}
}
pub fn save<T: ?Sized>(mut self, value: &T) -> (Vec<u8>, Offset<'p, 'v>)
where T: SavePtr<OffsetMut<'p, 'v>, Offset<'p, 'v>>
{
let mut encoder = value.init_save_ptr();
encoder.save_poll(&mut self).into_ok();
let offset = self.finish_save(&encoder).into_ok();
(self.written, offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bag::Bag;
#[test]
fn test_shallow_dumper() {
let (buf, offset) = ShallowDumper::new(0).save(&42u8);
//assert_eq!(offset, 0);
//assert_eq!(buf, &[42]);
/*
let own = OffsetMut::alloc(42u8);
let (buf, offset) = ShallowDumper::new(0).save(&own);
assert_eq!(offset, 1);
assert_eq!(buf, &[42, 1,0,0,0,0,0,0,0]);
let own2 = OffsetMut::alloc(own);
let (buf, offset) = ShallowDumper::new(0).save(&own2);
assert_eq!(offset, 9);
assert_eq!(buf,
&[42,
1,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,
]);
*/
}
}
| {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
} | conditional_block |
offset.rs | use std::alloc::{GlobalAlloc, System, Layout};
use std::borrow::Borrow;
use std::cmp;
use std::convert::TryInto;
use std::fmt;
use std::marker::PhantomData;
use std::mem::{self, ManuallyDrop};
use std::num::NonZeroU64;
use std::ptr::NonNull;
use std::hint::unreachable_unchecked;
use thiserror::Error;
use leint::Le;
use owned::{IntoOwned, Take};
use crate::pointee::Pointee;
use crate::refs::Ref;
use crate::blob::*;
use crate::load::*;
use crate::save::*;
use crate::scalar::*;
use crate::ptr::*;
use crate::pile::*;
use crate::heap::*;
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct Offset<'pile, 'version> {
marker: PhantomData<(
fn(&'pile ()) -> &'pile (),
&'version (),
)>,
raw: Le<NonZeroU64>,
}
unsafe impl Persist for Offset<'_, '_> {}
impl fmt::Debug for Offset<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.get().fmt(f)
}
}
#[derive(Debug, Error)]
#[error("invalid offset")]
#[non_exhaustive]
pub struct ValidateOffsetBlobError;
impl Scalar for Offset<'_, '_> {
const BLOB_LAYOUT: BlobLayout = BlobLayout::new_nonzero(mem::size_of::<Self>());
type ScalarBlobError = ValidateOffsetBlobError;
fn validate_blob<'a>(blob: Blob<'a, Self>) -> Result<ValidBlob<'a, Self>, Self::ScalarBlobError> {
let raw = u64::from_le_bytes(blob.as_bytes().try_into().unwrap());
if raw & 0b1 == 0b1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
}
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)] | // SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
}
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, 'v>;
type Persist = Offset<'p, 'v>;
unsafe fn dealloc<T: ?Sized + Pointee>(&self, metadata: T::Metadata) {
match self.kind() {
Kind::Offset(_) => {},
Kind::Ptr(heap_ptr) => heap_ptr.dealloc::<T>(metadata),
}
}
unsafe fn try_get_dirty_unchecked<T: ?Sized + Pointee>(&self, metadata: T::Metadata) -> Result<&T, Self::Persist> {
match self.kind() {
Kind::Ptr(ptr) => {
todo!()
},
Kind::Offset(offset) => Err(offset),
}
}
}
impl<'p,'v> Default for OffsetMut<'p, 'v> {
fn default() -> Self {
Offset::dangling().into()
}
}
#[derive(Debug, Default)]
pub struct ShallowDumper<'p, 'v> {
marker: PhantomData<OffsetMut<'p, 'v>>,
written: Vec<u8>,
initial_offset: usize,
}
impl<'p, 'v> Saver for ShallowDumper<'p, 'v> {
type SrcPtr = OffsetMut<'p, 'v>;
type DstPtr = Offset<'p, 'v>;
type Error = !;
fn try_save_raw<R, T: ?Sized + ValidateBlob>(&self,
ptr: &Offset<'p, 'v>,
_metadata: T::Metadata,
_f: impl FnOnce(ValidBlob<T>, &<Self::SrcPtr as Ptr>::BlobZone) -> R,
) -> Result<Result<<Self::DstPtr as Ptr>::Persist, R>,
Self::Error>
{
Ok(Ok(*ptr))
}
fn finish_save<T>(&mut self, value_poll: &T) -> Result<Offset<'p, 'v>, Self::Error>
where T: EncodeBlob
{
let offset = self.initial_offset
.checked_add(self.written.len())
.and_then(Offset::new)
.expect("overflow");
let written = mem::replace(&mut self.written, vec![]);
self.written = value_poll.encode_blob(written).into_ok();
Ok(offset)
}
}
impl<'p, 'v> ShallowDumper<'p, 'v> {
pub fn new(initial_offset: usize) -> Self {
Self {
marker: PhantomData,
written: vec![],
initial_offset,
}
}
pub fn from_buf(buf: impl Into<Vec<u8>>) -> Self {
Self {
marker: PhantomData,
initial_offset: 0,
written: buf.into(),
}
}
pub fn save<T: ?Sized>(mut self, value: &T) -> (Vec<u8>, Offset<'p, 'v>)
where T: SavePtr<OffsetMut<'p, 'v>, Offset<'p, 'v>>
{
let mut encoder = value.init_save_ptr();
encoder.save_poll(&mut self).into_ok();
let offset = self.finish_save(&encoder).into_ok();
(self.written, offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bag::Bag;
#[test]
fn test_shallow_dumper() {
let (buf, offset) = ShallowDumper::new(0).save(&42u8);
//assert_eq!(offset, 0);
//assert_eq!(buf, &[42]);
/*
let own = OffsetMut::alloc(42u8);
let (buf, offset) = ShallowDumper::new(0).save(&own);
assert_eq!(offset, 1);
assert_eq!(buf, &[42, 1,0,0,0,0,0,0,0]);
let own2 = OffsetMut::alloc(own);
let (buf, offset) = ShallowDumper::new(0).save(&own2);
assert_eq!(offset, 9);
assert_eq!(buf,
&[42,
1,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,
]);
*/
}
} | fn as_ref(&self) -> &OffsetMut<'p, 'v, A> { | random_line_split |
offset.rs | use std::alloc::{GlobalAlloc, System, Layout};
use std::borrow::Borrow;
use std::cmp;
use std::convert::TryInto;
use std::fmt;
use std::marker::PhantomData;
use std::mem::{self, ManuallyDrop};
use std::num::NonZeroU64;
use std::ptr::NonNull;
use std::hint::unreachable_unchecked;
use thiserror::Error;
use leint::Le;
use owned::{IntoOwned, Take};
use crate::pointee::Pointee;
use crate::refs::Ref;
use crate::blob::*;
use crate::load::*;
use crate::save::*;
use crate::scalar::*;
use crate::ptr::*;
use crate::pile::*;
use crate::heap::*;
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct Offset<'pile, 'version> {
marker: PhantomData<(
fn(&'pile ()) -> &'pile (),
&'version (),
)>,
raw: Le<NonZeroU64>,
}
unsafe impl Persist for Offset<'_, '_> {}
impl fmt::Debug for Offset<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.get().fmt(f)
}
}
#[derive(Debug, Error)]
#[error("invalid offset")]
#[non_exhaustive]
pub struct ValidateOffsetBlobError;
impl Scalar for Offset<'_, '_> {
const BLOB_LAYOUT: BlobLayout = BlobLayout::new_nonzero(mem::size_of::<Self>());
type ScalarBlobError = ValidateOffsetBlobError;
fn validate_blob<'a>(blob: Blob<'a, Self>) -> Result<ValidBlob<'a, Self>, Self::ScalarBlobError> |
fn decode_blob<'a>(blob: ValidBlob<'a, Self>) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
fn encode_blob<W: WriteBlob>(&self, dst: W) -> Result<W::Ok, W::Error> {
todo!()
}
}
impl AsPtrImpl<Self> for Offset<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
impl<'p, 'v> PersistPtr for Offset<'p, 'v> {
type Zone = !;
type BlobZone = TryPile<'p, 'v>;
}
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct OffsetMut<'p, 'v, A = System> {
marker: PhantomData<A>,
inner: Offset<'p, 'v>,
}
unsafe impl Persist for OffsetMut<'_, '_> {}
impl fmt::Debug for OffsetMut<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.kind().fmt(f)
}
}
unsafe impl ValidateBlob for OffsetMut<'_, '_> {
type BlobError = ValidateOffsetBlobError;
fn try_blob_layout(_: ()) -> Result<BlobLayout, !> {
Ok(BlobLayout::new_nonzero(mem::size_of::<Self>()))
}
fn validate_blob<'a>(blob: Blob<'a, Self>, ignore_padding: bool) -> Result<ValidBlob<'a, Self>, Self::BlobError> {
let mut fields = blob.validate_fields(ignore_padding);
fields.validate_blob::<Offset>()?;
unsafe { Ok(fields.finish()) }
}
}
impl Load for OffsetMut<'_, '_> {
type Ptr = !;
fn decode_blob(blob: ValidBlob<Self>, _: &<Self::Ptr as Ptr>::BlobZone) -> Self {
blob.as_value().clone()
}
fn try_deref_blob<'a>(blob: ValidBlob<'a, Self>, _: &()) -> Result<&'a Self, ValidBlob<'a, Self>> {
Ok(blob.as_value())
}
}
impl AsPtrImpl<Self> for OffsetMut<'_, '_> {
fn as_ptr_impl(this: &Self) -> &Self {
this
}
}
/*
impl<'p, 'v, A> Borrow<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn borrow(&self) -> &OffsetMut<'p, 'v, A> {
self.as_ref()
}
}
impl<'p, 'v, A> AsRef<OffsetMut<'p, 'v, A>> for Offset<'p, 'v> {
#[inline(always)]
fn as_ref(&self) -> &OffsetMut<'p, 'v, A> {
// SAFETY: #[repr(transparent)]
unsafe { &*(self as *const Self as *const _) }
}
}
*/
impl<'p, 'v> From<Offset<'p, 'v>> for usize {
fn from(offset: Offset<'p, 'v>) -> usize {
offset.get()
}
}
impl<'p, 'v> From<Offset<'p, 'v>> for OffsetMut<'p, 'v> {
fn from(inner: Offset<'p, 'v>) -> Self {
Self {
marker: PhantomData,
inner,
}
}
}
impl cmp::PartialEq<usize> for Offset<'_, '_> {
fn eq(&self, other: &usize) -> bool {
self.get() == *other
}
}
impl cmp::PartialEq<Offset<'_, '_>> for usize {
fn eq(&self, other: &Offset<'_, '_>) -> bool {
*self == other.get()
}
}
impl<'p, 'v> Offset<'p, 'v> {
/// The largest `Offset`.
pub const MAX: usize = (1 << 62) - 1;
/// Creates a new `Offset`.
///
/// Returns `None` if the offset is out of range:
///
/// ```
/// use hoard::offset::Offset;
///
/// assert!(Offset::new(Offset::MAX + 1)
/// .is_none());
/// ```
///
/// # Examples
///
/// Zero is a valid offset:
///
/// ```
/// use hoard::offset::Offset;
///
/// Offset::new(0).unwrap();
/// ```
pub fn new(offset: usize) -> Option<Self> {
if offset <= Self::MAX {
let offset = offset as u64;
Some(offset.checked_shl(1).map(|offset|
Self {
marker: PhantomData,
raw: NonZeroU64::new(offset | 1).unwrap().into(),
}
).unwrap())
} else {
None
}
}
/// Casts the `Offset` to a different lifetime.
///
/// This is *safe* because an offset by itself has no guarantees associated with it.
#[inline(always)]
pub fn cast<'p2, 'v2>(&self) -> Offset<'p2, 'v2> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
/// Gets the offset as a `usize`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::new(0).unwrap().get(), 0);
/// assert_eq!(Offset::new(1).unwrap().get(), 1);
/// ```
#[inline(always)]
pub fn get(&self) -> usize {
(self.raw.get().get() >> 1) as usize
}
/// Creates a dangling `Offset`.
///
/// # Examples
///
/// ```
/// use hoard::offset::Offset;
///
/// assert_eq!(Offset::dangling().get(), Offset::MAX);
/// ```
#[inline(always)]
pub fn dangling() -> Self {
Self::new(Self::MAX).unwrap()
}
/// Erases the lifetime of an `Offset`.
pub fn to_static(&self) -> Offset<'static, 'static> {
Offset {
marker: PhantomData,
raw: self.raw,
}
}
}
/// Enum for the kinds of `OffsetMut`.
#[derive(Debug)]
pub enum Kind<'p, 'v> {
/// An unmodified `Offset`.
Offset(Offset<'p, 'v>),
/// A pointer to something in the heap.
Ptr(HeapPtr),
}
impl<'p, 'v, A> OffsetMut<'p, 'v, A> {
/// Create an `OffsetMut` from a pointer.
///
/// Returns `None` if the alignment is incorrect.
#[inline]
pub fn from_ptr(ptr: NonNull<u16>) -> Option<Self> {
let raw = ptr.as_ptr() as usize as u64;
if raw & 1 == 1 {
unsafe { Some(mem::transmute(ptr.as_ptr() as usize as u64)) }
} else {
None
}
}
/// Creates an `OffsetMut` from a pointer without checking the alignment.
///
/// # Safety
///
/// The pointer must be properly aligned.
#[inline]
pub unsafe fn from_ptr_unchecked(ptr: NonNull<u16>) -> Self {
match Self::from_ptr(ptr) {
Some(this) => this,
None => {
unreachable_unchecked()
}
}
}
/// Returns the kind of offset.
pub fn kind(&self) -> Kind<'p, 'v> {
if self.inner.raw.get().get() & 1 == 1 {
Kind::Offset(self.inner)
} else {
Kind::Ptr(unsafe { mem::transmute(self.inner) })
}
}
/// Gets the `Offset` from a clean `OffsetMut`.
#[inline(always)]
pub fn get_offset(&self) -> Option<Offset<'p, 'v>> {
match self.kind() {
Kind::Offset(offset) => Some(offset),
Kind::Ptr(_) => None,
}
}
/// Gets the pointer from a dirty `OffsetMut`.
#[inline(always)]
pub fn get_ptr(&self) -> Option<HeapPtr> {
match self.kind() {
Kind::Ptr(ptr) => Some(ptr),
Kind::Offset(_) => None,
}
}
}
/*
impl<'p, 'v, A> AsPtr<OffsetMut<'p, 'v, A>> for HeapPtr {
#[inline(always)]
fn as_ptr(&self) -> &OffsetMut<'p, 'v, A> {
static_assertions::assert_eq_size!(OffsetMut, HeapPtr);
unsafe {
&*(self as *const _ as *const _)
}
}
}
*/
impl<'p, 'v> Ptr for OffsetMut<'p, 'v> {
type Zone = TryPile<'p, 'v>;
type BlobZone = TryPile<'p, 'v>;
type Persist = Offset<'p, 'v>;
unsafe fn dealloc<T: ?Sized + Pointee>(&self, metadata: T::Metadata) {
match self.kind() {
Kind::Offset(_) => {},
Kind::Ptr(heap_ptr) => heap_ptr.dealloc::<T>(metadata),
}
}
unsafe fn try_get_dirty_unchecked<T: ?Sized + Pointee>(&self, metadata: T::Metadata) -> Result<&T, Self::Persist> {
match self.kind() {
Kind::Ptr(ptr) => {
todo!()
},
Kind::Offset(offset) => Err(offset),
}
}
}
impl<'p,'v> Default for OffsetMut<'p, 'v> {
fn default() -> Self {
Offset::dangling().into()
}
}
#[derive(Debug, Default)]
pub struct ShallowDumper<'p, 'v> {
marker: PhantomData<OffsetMut<'p, 'v>>,
written: Vec<u8>,
initial_offset: usize,
}
impl<'p, 'v> Saver for ShallowDumper<'p, 'v> {
type SrcPtr = OffsetMut<'p, 'v>;
type DstPtr = Offset<'p, 'v>;
type Error = !;
fn try_save_raw<R, T: ?Sized + ValidateBlob>(&self,
ptr: &Offset<'p, 'v>,
_metadata: T::Metadata,
_f: impl FnOnce(ValidBlob<T>, &<Self::SrcPtr as Ptr>::BlobZone) -> R,
) -> Result<Result<<Self::DstPtr as Ptr>::Persist, R>,
Self::Error>
{
Ok(Ok(*ptr))
}
fn finish_save<T>(&mut self, value_poll: &T) -> Result<Offset<'p, 'v>, Self::Error>
where T: EncodeBlob
{
let offset = self.initial_offset
.checked_add(self.written.len())
.and_then(Offset::new)
.expect("overflow");
let written = mem::replace(&mut self.written, vec![]);
self.written = value_poll.encode_blob(written).into_ok();
Ok(offset)
}
}
impl<'p, 'v> ShallowDumper<'p, 'v> {
pub fn new(initial_offset: usize) -> Self {
Self {
marker: PhantomData,
written: vec![],
initial_offset,
}
}
pub fn from_buf(buf: impl Into<Vec<u8>>) -> Self {
Self {
marker: PhantomData,
initial_offset: 0,
written: buf.into(),
}
}
pub fn save<T: ?Sized>(mut self, value: &T) -> (Vec<u8>, Offset<'p, 'v>)
where T: SavePtr<OffsetMut<'p, 'v>, Offset<'p, 'v>>
{
let mut encoder = value.init_save_ptr();
encoder.save_poll(&mut self).into_ok();
let offset = self.finish_save(&encoder).into_ok();
(self.written, offset)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bag::Bag;
#[test]
fn test_shallow_dumper() {
let (buf, offset) = ShallowDumper::new(0).save(&42u8);
//assert_eq!(offset, 0);
//assert_eq!(buf, &[42]);
/*
let own = OffsetMut::alloc(42u8);
let (buf, offset) = ShallowDumper::new(0).save(&own);
assert_eq!(offset, 1);
assert_eq!(buf, &[42, 1,0,0,0,0,0,0,0]);
let own2 = OffsetMut::alloc(own);
let (buf, offset) = ShallowDumper::new(0).save(&own2);
assert_eq!(offset, 9);
assert_eq!(buf,
&[42,
1,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,
]);
*/
}
}
| {
let raw = u64::from_le_bytes(blob.as_bytes().try_into().unwrap());
if raw & 0b1 == 0b1 && (raw >> 1) < Offset::MAX as u64 {
unsafe { Ok(blob.assume_valid()) }
} else {
Err(ValidateOffsetBlobError)
}
} | identifier_body |
glyph.rs | use super::math::*;
use crate::config::font::{Font, FontDescription};
use crate::config::ui_config::Delta;
use crate::config::Config;
use crate::cursor;
use alacritty_terminal::ansi::CursorStyle;
use alacritty_terminal::term::CursorKey;
use crossfont::{FontDesc, FontKey, Rasterize, Rasterizer, Size, Slant, Style, Weight};
use fnv::FnvHasher;
use log::*;
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct GlyphKey {
pub key: crossfont::GlyphKey,
pub wide: bool,
pub zero_width: bool,
}
#[derive(Debug)]
pub struct RasterizedGlyph {
pub rasterized: crossfont::RasterizedGlyph,
pub wide: bool,
pub zero_width: bool,
}
/// `LoadGlyph` allows for copying a rasterized glyph into graphics memory.
pub trait LoadGlyph {
/// Load the rasterized glyph into GPU memory.
fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> AtlasGlyph;
/// Clear any state accumulated from previous loaded glyphs.
///
/// This can, for instance, be used to reset the texture Atlas.
fn clear(&mut self, cell_size: Vec2<i32>, cell_offset: Vec2<i32>);
}
#[derive(Copy, Debug, Clone)]
pub struct GridAtlasGlyph {
pub atlas_index: usize,
pub line: u16,
pub column: u16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub struct QuadAtlasGlyph {
pub atlas_index: usize,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub enum AtlasGlyph {
Grid(GridAtlasGlyph),
Quad(QuadAtlasGlyph),
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
pub cache: HashMap<GlyphKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Cache of buffered cursor glyphs.
pub cursor_cache: HashMap<CursorKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
pub metrics: crossfont::Metrics,
/// Cell size
pub cell_size: Vec2<i32>,
}
impl GlyphCache {
pub fn new<L>(
mut rasterizer: Rasterizer,
config: &Config,
font: &Font,
loader: &mut L,
) -> Result<GlyphCache, crossfont::Error>
where
L: LoadGlyph,
{
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
let metrics = rasterizer.metrics(regular, font.size)?;
let (cell_width, cell_height) = Self::compute_cell_size(config, &metrics);
let cell_size = Vec2::new(cell_width as i32, cell_height as i32);
let mut cache = Self {
cache: HashMap::default(),
cursor_cache: HashMap::default(),
rasterizer,
font_size: font.size,
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
glyph_offset: font.glyph_offset,
metrics,
cell_size,
};
cache.clear_cache_with_common_glyphs(loader, config);
Ok(cache)
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size;
// Load regular font.
let regular_desc = Self::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc { | else {
rasterizer.load_font(&desc, size).unwrap_or_else(|_| regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(&Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
fn rasterize_glyph(
glyph_key: GlyphKey,
rasterizer: &mut Rasterizer,
glyph_offset: Delta<i8>,
metrics: &crossfont::Metrics,
) -> RasterizedGlyph {
let mut rasterized =
rasterizer.get_glyph(glyph_key.key).unwrap_or_else(|_| Default::default());
rasterized.left += i32::from(glyph_offset.x);
rasterized.top += i32::from(glyph_offset.y);
rasterized.top -= metrics.descent as i32;
RasterizedGlyph { wide: glyph_key.wide, zero_width: glyph_key.zero_width, rasterized }
}
pub fn get<L>(&mut self, glyph_key: GlyphKey, loader: &mut L) -> &AtlasGlyph
where
L: LoadGlyph,
{
let glyph_offset = self.glyph_offset;
let rasterizer = &mut self.rasterizer;
let metrics = &self.metrics;
self.cache.entry(glyph_key).or_insert_with(|| {
let rasterized = Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
loader.load_glyph(&rasterized)
})
}
/// Clear currently cached data in both GL and the registry.
pub fn clear_glyph_cache<L: LoadGlyph>(&mut self, config: &Config, loader: &mut L) {
let (cell_width, cell_height) = Self::compute_cell_size(config, &self.metrics);
self.cell_size = Vec2::new(cell_width as i32, cell_height as i32);
self.cache = HashMap::default();
self.cursor_cache = HashMap::default();
self.clear_cache_with_common_glyphs(loader, config);
}
pub fn update_font_size<L: LoadGlyph>(
&mut self,
config: &Config,
font: &Font,
dpr: f64,
loader: &mut L,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(dpr as f32);
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(crossfont::GlyphKey {
font_key: regular,
c: 'm',
size: font.size,
})?;
let metrics = self.rasterizer.metrics(regular, font.size)?;
info!("Font size changed to {:?} with DPR of {}", font.size, dpr);
self.font_size = font.size;
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.clear_glyph_cache(config, loader);
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
fn clear_cache_with_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L, config: &Config) {
let glyph_offset = self.glyph_offset;
let metrics = &self.metrics;
let font_size = self.font_size;
let rasterizer = &mut self.rasterizer;
let cell_size = self.cell_size;
let mut atlas_cell_size = self.cell_size;
let mut atlas_cell_offset = Vec2 { x: 0, y: 0 };
type Glyphs = Vec<(GlyphKey, RasterizedGlyph)>;
let glyphs: Glyphs = [self.font_key, self.bold_key, self.italic_key, self.bold_italic_key]
.iter()
.flat_map(|font| {
(32u8..=126u8)
.map(|c| {
let glyph_key = GlyphKey {
wide: false,
zero_width: false,
key: crossfont::GlyphKey {
font_key: *font,
c: c as char,
size: font_size,
},
};
let glyph =
Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
atlas_cell_size.x = std::cmp::max(
atlas_cell_size.x,
glyph.rasterized.left + glyph.rasterized.width,
);
atlas_cell_size.y = std::cmp::max(atlas_cell_size.y, glyph.rasterized.top);
atlas_cell_offset.x =
std::cmp::max(atlas_cell_offset.x, -glyph.rasterized.left);
atlas_cell_offset.y = std::cmp::max(
atlas_cell_offset.y,
glyph.rasterized.height - glyph.rasterized.top,
);
debug!(
"precomp: '{}' left={} top={} w={} h={} off={:?} atlas_cell={:?} \
offset={:?}",
glyph.rasterized.c,
glyph.rasterized.left,
glyph.rasterized.top,
glyph.rasterized.width,
glyph.rasterized.height,
glyph_offset,
atlas_cell_size,
atlas_cell_offset,
);
(glyph_key, glyph)
})
.collect::<Glyphs>()
})
.collect();
info!("Max glyph size: {:?}", cell_size);
loader.clear(atlas_cell_size, atlas_cell_offset);
// Multipass grid render workaround for large font sizes
// Generate cursor glyphs first to ensure that they end up strictly
// in the first atlas/pass
for style in [
CursorStyle::Block,
CursorStyle::Beam,
CursorStyle::Underline,
CursorStyle::HollowBlock,
]
.iter()
{
let cursor_key = CursorKey { style: *style, is_wide: false };
let cursor_glyph = RasterizedGlyph {
wide: false,
zero_width: false,
rasterized: cursor::get_cursor_glyph(
cursor_key.style,
*metrics,
config.ui_config.font.offset.x,
config.ui_config.font.offset.y,
cursor_key.is_wide,
config.cursor.thickness(),
),
};
self.cursor_cache.entry(cursor_key).or_insert_with(|| loader.load_glyph(&cursor_glyph));
}
for glyph in glyphs {
self.cache.entry(glyph.0).or_insert_with(|| loader.load_glyph(&glyph.1));
}
}
/// Calculate font metrics without access to a glyph cache.
pub fn static_metrics(font: Font, dpr: f64) -> Result<crossfont::Metrics, crossfont::Error> {
let mut rasterizer = crossfont::Rasterizer::new(dpr as f32, font.use_thin_strokes())?;
let regular_desc = GlyphCache::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(&mut rasterizer, ®ular_desc, font.size)?;
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
rasterizer.metrics(regular, font.size)
}
/// Calculate the cell dimensions based on font metrics.
///
/// This will return a tuple of the cell width and height.
#[inline]
pub fn compute_cell_size(config: &Config, metrics: &crossfont::Metrics) -> (f32, f32) {
let offset_x = f64::from(config.ui_config.font.offset.x);
let offset_y = f64::from(config.ui_config.font.offset.y);
(
(metrics.average_advance + offset_x).floor().max(1.) as f32,
(metrics.line_height + offset_y).floor().max(1.) as f32,
)
}
}
|
regular
} | conditional_block |
glyph.rs | use super::math::*;
use crate::config::font::{Font, FontDescription};
use crate::config::ui_config::Delta;
use crate::config::Config;
use crate::cursor;
use alacritty_terminal::ansi::CursorStyle;
use alacritty_terminal::term::CursorKey;
use crossfont::{FontDesc, FontKey, Rasterize, Rasterizer, Size, Slant, Style, Weight};
use fnv::FnvHasher;
use log::*;
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct GlyphKey {
pub key: crossfont::GlyphKey,
pub wide: bool,
pub zero_width: bool,
}
#[derive(Debug)]
pub struct | {
pub rasterized: crossfont::RasterizedGlyph,
pub wide: bool,
pub zero_width: bool,
}
/// `LoadGlyph` allows for copying a rasterized glyph into graphics memory.
pub trait LoadGlyph {
/// Load the rasterized glyph into GPU memory.
fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> AtlasGlyph;
/// Clear any state accumulated from previous loaded glyphs.
///
/// This can, for instance, be used to reset the texture Atlas.
fn clear(&mut self, cell_size: Vec2<i32>, cell_offset: Vec2<i32>);
}
#[derive(Copy, Debug, Clone)]
pub struct GridAtlasGlyph {
pub atlas_index: usize,
pub line: u16,
pub column: u16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub struct QuadAtlasGlyph {
pub atlas_index: usize,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub enum AtlasGlyph {
Grid(GridAtlasGlyph),
Quad(QuadAtlasGlyph),
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
pub cache: HashMap<GlyphKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Cache of buffered cursor glyphs.
pub cursor_cache: HashMap<CursorKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
pub metrics: crossfont::Metrics,
/// Cell size
pub cell_size: Vec2<i32>,
}
impl GlyphCache {
pub fn new<L>(
mut rasterizer: Rasterizer,
config: &Config,
font: &Font,
loader: &mut L,
) -> Result<GlyphCache, crossfont::Error>
where
L: LoadGlyph,
{
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
let metrics = rasterizer.metrics(regular, font.size)?;
let (cell_width, cell_height) = Self::compute_cell_size(config, &metrics);
let cell_size = Vec2::new(cell_width as i32, cell_height as i32);
let mut cache = Self {
cache: HashMap::default(),
cursor_cache: HashMap::default(),
rasterizer,
font_size: font.size,
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
glyph_offset: font.glyph_offset,
metrics,
cell_size,
};
cache.clear_cache_with_common_glyphs(loader, config);
Ok(cache)
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size;
// Load regular font.
let regular_desc = Self::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc {
regular
} else {
rasterizer.load_font(&desc, size).unwrap_or_else(|_| regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(&Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
fn rasterize_glyph(
glyph_key: GlyphKey,
rasterizer: &mut Rasterizer,
glyph_offset: Delta<i8>,
metrics: &crossfont::Metrics,
) -> RasterizedGlyph {
let mut rasterized =
rasterizer.get_glyph(glyph_key.key).unwrap_or_else(|_| Default::default());
rasterized.left += i32::from(glyph_offset.x);
rasterized.top += i32::from(glyph_offset.y);
rasterized.top -= metrics.descent as i32;
RasterizedGlyph { wide: glyph_key.wide, zero_width: glyph_key.zero_width, rasterized }
}
pub fn get<L>(&mut self, glyph_key: GlyphKey, loader: &mut L) -> &AtlasGlyph
where
L: LoadGlyph,
{
let glyph_offset = self.glyph_offset;
let rasterizer = &mut self.rasterizer;
let metrics = &self.metrics;
self.cache.entry(glyph_key).or_insert_with(|| {
let rasterized = Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
loader.load_glyph(&rasterized)
})
}
/// Clear currently cached data in both GL and the registry.
pub fn clear_glyph_cache<L: LoadGlyph>(&mut self, config: &Config, loader: &mut L) {
let (cell_width, cell_height) = Self::compute_cell_size(config, &self.metrics);
self.cell_size = Vec2::new(cell_width as i32, cell_height as i32);
self.cache = HashMap::default();
self.cursor_cache = HashMap::default();
self.clear_cache_with_common_glyphs(loader, config);
}
pub fn update_font_size<L: LoadGlyph>(
&mut self,
config: &Config,
font: &Font,
dpr: f64,
loader: &mut L,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(dpr as f32);
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(crossfont::GlyphKey {
font_key: regular,
c: 'm',
size: font.size,
})?;
let metrics = self.rasterizer.metrics(regular, font.size)?;
info!("Font size changed to {:?} with DPR of {}", font.size, dpr);
self.font_size = font.size;
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.clear_glyph_cache(config, loader);
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
fn clear_cache_with_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L, config: &Config) {
let glyph_offset = self.glyph_offset;
let metrics = &self.metrics;
let font_size = self.font_size;
let rasterizer = &mut self.rasterizer;
let cell_size = self.cell_size;
let mut atlas_cell_size = self.cell_size;
let mut atlas_cell_offset = Vec2 { x: 0, y: 0 };
type Glyphs = Vec<(GlyphKey, RasterizedGlyph)>;
let glyphs: Glyphs = [self.font_key, self.bold_key, self.italic_key, self.bold_italic_key]
.iter()
.flat_map(|font| {
(32u8..=126u8)
.map(|c| {
let glyph_key = GlyphKey {
wide: false,
zero_width: false,
key: crossfont::GlyphKey {
font_key: *font,
c: c as char,
size: font_size,
},
};
let glyph =
Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
atlas_cell_size.x = std::cmp::max(
atlas_cell_size.x,
glyph.rasterized.left + glyph.rasterized.width,
);
atlas_cell_size.y = std::cmp::max(atlas_cell_size.y, glyph.rasterized.top);
atlas_cell_offset.x =
std::cmp::max(atlas_cell_offset.x, -glyph.rasterized.left);
atlas_cell_offset.y = std::cmp::max(
atlas_cell_offset.y,
glyph.rasterized.height - glyph.rasterized.top,
);
debug!(
"precomp: '{}' left={} top={} w={} h={} off={:?} atlas_cell={:?} \
offset={:?}",
glyph.rasterized.c,
glyph.rasterized.left,
glyph.rasterized.top,
glyph.rasterized.width,
glyph.rasterized.height,
glyph_offset,
atlas_cell_size,
atlas_cell_offset,
);
(glyph_key, glyph)
})
.collect::<Glyphs>()
})
.collect();
info!("Max glyph size: {:?}", cell_size);
loader.clear(atlas_cell_size, atlas_cell_offset);
// Multipass grid render workaround for large font sizes
// Generate cursor glyphs first to ensure that they end up strictly
// in the first atlas/pass
for style in [
CursorStyle::Block,
CursorStyle::Beam,
CursorStyle::Underline,
CursorStyle::HollowBlock,
]
.iter()
{
let cursor_key = CursorKey { style: *style, is_wide: false };
let cursor_glyph = RasterizedGlyph {
wide: false,
zero_width: false,
rasterized: cursor::get_cursor_glyph(
cursor_key.style,
*metrics,
config.ui_config.font.offset.x,
config.ui_config.font.offset.y,
cursor_key.is_wide,
config.cursor.thickness(),
),
};
self.cursor_cache.entry(cursor_key).or_insert_with(|| loader.load_glyph(&cursor_glyph));
}
for glyph in glyphs {
self.cache.entry(glyph.0).or_insert_with(|| loader.load_glyph(&glyph.1));
}
}
/// Calculate font metrics without access to a glyph cache.
pub fn static_metrics(font: Font, dpr: f64) -> Result<crossfont::Metrics, crossfont::Error> {
let mut rasterizer = crossfont::Rasterizer::new(dpr as f32, font.use_thin_strokes())?;
let regular_desc = GlyphCache::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(&mut rasterizer, ®ular_desc, font.size)?;
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
rasterizer.metrics(regular, font.size)
}
/// Calculate the cell dimensions based on font metrics.
///
/// This will return a tuple of the cell width and height.
#[inline]
pub fn compute_cell_size(config: &Config, metrics: &crossfont::Metrics) -> (f32, f32) {
let offset_x = f64::from(config.ui_config.font.offset.x);
let offset_y = f64::from(config.ui_config.font.offset.y);
(
(metrics.average_advance + offset_x).floor().max(1.) as f32,
(metrics.line_height + offset_y).floor().max(1.) as f32,
)
}
}
| RasterizedGlyph | identifier_name |
glyph.rs | use super::math::*;
use crate::config::font::{Font, FontDescription};
use crate::config::ui_config::Delta;
use crate::config::Config;
use crate::cursor;
use alacritty_terminal::ansi::CursorStyle;
use alacritty_terminal::term::CursorKey;
use crossfont::{FontDesc, FontKey, Rasterize, Rasterizer, Size, Slant, Style, Weight};
use fnv::FnvHasher;
use log::*;
use std::collections::HashMap;
use std::hash::BuildHasherDefault;
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct GlyphKey {
pub key: crossfont::GlyphKey,
pub wide: bool,
pub zero_width: bool,
}
#[derive(Debug)]
pub struct RasterizedGlyph {
pub rasterized: crossfont::RasterizedGlyph,
pub wide: bool,
pub zero_width: bool,
}
/// `LoadGlyph` allows for copying a rasterized glyph into graphics memory.
pub trait LoadGlyph {
/// Load the rasterized glyph into GPU memory.
fn load_glyph(&mut self, rasterized: &RasterizedGlyph) -> AtlasGlyph;
/// Clear any state accumulated from previous loaded glyphs.
///
/// This can, for instance, be used to reset the texture Atlas.
fn clear(&mut self, cell_size: Vec2<i32>, cell_offset: Vec2<i32>);
}
#[derive(Copy, Debug, Clone)]
pub struct GridAtlasGlyph {
pub atlas_index: usize,
pub line: u16,
pub column: u16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub struct QuadAtlasGlyph {
pub atlas_index: usize,
pub uv_bot: f32,
pub uv_left: f32,
pub uv_width: f32,
pub uv_height: f32,
pub top: i16,
pub left: i16,
pub width: i16,
pub height: i16,
pub colored: bool,
}
#[derive(Copy, Debug, Clone)]
pub enum AtlasGlyph {
Grid(GridAtlasGlyph),
Quad(QuadAtlasGlyph),
}
/// Naïve glyph cache.
///
/// Currently only keyed by `char`, and thus not possible to hold different
/// representations of the same code point.
pub struct GlyphCache {
/// Cache of buffered glyphs.
pub cache: HashMap<GlyphKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Cache of buffered cursor glyphs.
pub cursor_cache: HashMap<CursorKey, AtlasGlyph, BuildHasherDefault<FnvHasher>>,
/// Rasterizer for loading new glyphs.
rasterizer: Rasterizer,
/// Regular font.
pub font_key: FontKey,
/// Bold font.
pub bold_key: FontKey,
/// Italic font.
pub italic_key: FontKey,
/// Bold italic font.
pub bold_italic_key: FontKey,
/// Font size.
pub font_size: crossfont::Size,
/// Glyph offset.
glyph_offset: Delta<i8>,
/// Font metrics.
pub metrics: crossfont::Metrics,
/// Cell size
pub cell_size: Vec2<i32>,
}
impl GlyphCache {
pub fn new<L>(
mut rasterizer: Rasterizer,
config: &Config,
font: &Font,
loader: &mut L,
) -> Result<GlyphCache, crossfont::Error>
where
L: LoadGlyph,
{
let (regular, bold, italic, bold_italic) = Self::compute_font_keys(font, &mut rasterizer)?;
// Need to load at least one glyph for the face before calling metrics.
// The glyph requested here ('m' at the time of writing) has no special
// meaning.
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
let metrics = rasterizer.metrics(regular, font.size)?;
let (cell_width, cell_height) = Self::compute_cell_size(config, &metrics);
let cell_size = Vec2::new(cell_width as i32, cell_height as i32);
let mut cache = Self {
cache: HashMap::default(),
cursor_cache: HashMap::default(),
rasterizer,
font_size: font.size,
font_key: regular,
bold_key: bold,
italic_key: italic,
bold_italic_key: bold_italic,
glyph_offset: font.glyph_offset,
metrics,
cell_size,
};
cache.clear_cache_with_common_glyphs(loader, config);
Ok(cache)
}
/// Computes font keys for (Regular, Bold, Italic, Bold Italic).
fn compute_font_keys(
font: &Font,
rasterizer: &mut Rasterizer,
) -> Result<(FontKey, FontKey, FontKey, FontKey), crossfont::Error> {
let size = font.size;
// Load regular font.
let regular_desc = Self::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(rasterizer, ®ular_desc, size)?;
// Helper to load a description if it is not the `regular_desc`.
let mut load_or_regular = |desc: FontDesc| {
if desc == regular_desc {
regular
} else {
rasterizer.load_font(&desc, size).unwrap_or_else(|_| regular)
}
};
// Load bold font.
let bold_desc = Self::make_desc(&font.bold(), Slant::Normal, Weight::Bold);
let bold = load_or_regular(bold_desc);
// Load italic font.
let italic_desc = Self::make_desc(&font.italic(), Slant::Italic, Weight::Normal);
let italic = load_or_regular(italic_desc);
// Load bold italic font.
let bold_italic_desc = Self::make_desc(&font.bold_italic(), Slant::Italic, Weight::Bold);
let bold_italic = load_or_regular(bold_italic_desc);
Ok((regular, bold, italic, bold_italic))
}
fn load_regular_font(
rasterizer: &mut Rasterizer,
description: &FontDesc,
size: Size,
) -> Result<FontKey, crossfont::Error> {
match rasterizer.load_font(description, size) {
Ok(font) => Ok(font),
Err(err) => {
error!("{}", err);
let fallback_desc =
Self::make_desc(&Font::default().normal(), Slant::Normal, Weight::Normal);
rasterizer.load_font(&fallback_desc, size)
},
}
}
fn make_desc(desc: &FontDescription, slant: Slant, weight: Weight) -> FontDesc {
let style = if let Some(ref spec) = desc.style {
Style::Specific(spec.to_owned())
} else {
Style::Description { slant, weight }
};
FontDesc::new(desc.family.clone(), style)
}
fn rasterize_glyph(
glyph_key: GlyphKey,
rasterizer: &mut Rasterizer,
glyph_offset: Delta<i8>,
metrics: &crossfont::Metrics,
) -> RasterizedGlyph {
let mut rasterized =
rasterizer.get_glyph(glyph_key.key).unwrap_or_else(|_| Default::default());
rasterized.left += i32::from(glyph_offset.x);
rasterized.top += i32::from(glyph_offset.y);
rasterized.top -= metrics.descent as i32;
RasterizedGlyph { wide: glyph_key.wide, zero_width: glyph_key.zero_width, rasterized }
}
pub fn get<L>(&mut self, glyph_key: GlyphKey, loader: &mut L) -> &AtlasGlyph
where
L: LoadGlyph,
{
let glyph_offset = self.glyph_offset;
let rasterizer = &mut self.rasterizer;
let metrics = &self.metrics;
self.cache.entry(glyph_key).or_insert_with(|| {
let rasterized = Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
loader.load_glyph(&rasterized)
})
}
/// Clear currently cached data in both GL and the registry.
pub fn clear_glyph_cache<L: LoadGlyph>(&mut self, config: &Config, loader: &mut L) {
let (cell_width, cell_height) = Self::compute_cell_size(config, &self.metrics);
self.cell_size = Vec2::new(cell_width as i32, cell_height as i32);
self.cache = HashMap::default();
self.cursor_cache = HashMap::default();
self.clear_cache_with_common_glyphs(loader, config);
}
pub fn update_font_size<L: LoadGlyph>(
&mut self,
config: &Config,
font: &Font,
dpr: f64,
loader: &mut L,
) -> Result<(), crossfont::Error> {
// Update dpi scaling.
self.rasterizer.update_dpr(dpr as f32);
// Recompute font keys.
let (regular, bold, italic, bold_italic) =
Self::compute_font_keys(font, &mut self.rasterizer)?;
self.rasterizer.get_glyph(crossfont::GlyphKey {
font_key: regular,
c: 'm',
size: font.size,
})?;
let metrics = self.rasterizer.metrics(regular, font.size)?;
info!("Font size changed to {:?} with DPR of {}", font.size, dpr);
self.font_size = font.size;
self.font_key = regular;
self.bold_key = bold;
self.italic_key = italic;
self.bold_italic_key = bold_italic;
self.metrics = metrics;
self.clear_glyph_cache(config, loader);
Ok(())
}
pub fn font_metrics(&self) -> crossfont::Metrics {
self.metrics
}
/// Prefetch glyphs that are almost guaranteed to be loaded anyways.
fn clear_cache_with_common_glyphs<L: LoadGlyph>(&mut self, loader: &mut L, config: &Config) {
let glyph_offset = self.glyph_offset;
let metrics = &self.metrics;
let font_size = self.font_size;
let rasterizer = &mut self.rasterizer;
let cell_size = self.cell_size;
let mut atlas_cell_size = self.cell_size;
let mut atlas_cell_offset = Vec2 { x: 0, y: 0 };
type Glyphs = Vec<(GlyphKey, RasterizedGlyph)>;
let glyphs: Glyphs = [self.font_key, self.bold_key, self.italic_key, self.bold_italic_key]
.iter()
.flat_map(|font| {
(32u8..=126u8)
.map(|c| {
let glyph_key = GlyphKey {
wide: false,
zero_width: false, | size: font_size,
},
};
let glyph =
Self::rasterize_glyph(glyph_key, rasterizer, glyph_offset, metrics);
atlas_cell_size.x = std::cmp::max(
atlas_cell_size.x,
glyph.rasterized.left + glyph.rasterized.width,
);
atlas_cell_size.y = std::cmp::max(atlas_cell_size.y, glyph.rasterized.top);
atlas_cell_offset.x =
std::cmp::max(atlas_cell_offset.x, -glyph.rasterized.left);
atlas_cell_offset.y = std::cmp::max(
atlas_cell_offset.y,
glyph.rasterized.height - glyph.rasterized.top,
);
debug!(
"precomp: '{}' left={} top={} w={} h={} off={:?} atlas_cell={:?} \
offset={:?}",
glyph.rasterized.c,
glyph.rasterized.left,
glyph.rasterized.top,
glyph.rasterized.width,
glyph.rasterized.height,
glyph_offset,
atlas_cell_size,
atlas_cell_offset,
);
(glyph_key, glyph)
})
.collect::<Glyphs>()
})
.collect();
info!("Max glyph size: {:?}", cell_size);
loader.clear(atlas_cell_size, atlas_cell_offset);
// Multipass grid render workaround for large font sizes
// Generate cursor glyphs first to ensure that they end up strictly
// in the first atlas/pass
for style in [
CursorStyle::Block,
CursorStyle::Beam,
CursorStyle::Underline,
CursorStyle::HollowBlock,
]
.iter()
{
let cursor_key = CursorKey { style: *style, is_wide: false };
let cursor_glyph = RasterizedGlyph {
wide: false,
zero_width: false,
rasterized: cursor::get_cursor_glyph(
cursor_key.style,
*metrics,
config.ui_config.font.offset.x,
config.ui_config.font.offset.y,
cursor_key.is_wide,
config.cursor.thickness(),
),
};
self.cursor_cache.entry(cursor_key).or_insert_with(|| loader.load_glyph(&cursor_glyph));
}
for glyph in glyphs {
self.cache.entry(glyph.0).or_insert_with(|| loader.load_glyph(&glyph.1));
}
}
/// Calculate font metrics without access to a glyph cache.
pub fn static_metrics(font: Font, dpr: f64) -> Result<crossfont::Metrics, crossfont::Error> {
let mut rasterizer = crossfont::Rasterizer::new(dpr as f32, font.use_thin_strokes())?;
let regular_desc = GlyphCache::make_desc(&font.normal(), Slant::Normal, Weight::Normal);
let regular = Self::load_regular_font(&mut rasterizer, ®ular_desc, font.size)?;
rasterizer.get_glyph(crossfont::GlyphKey { font_key: regular, c: 'm', size: font.size })?;
rasterizer.metrics(regular, font.size)
}
/// Calculate the cell dimensions based on font metrics.
///
/// This will return a tuple of the cell width and height.
#[inline]
pub fn compute_cell_size(config: &Config, metrics: &crossfont::Metrics) -> (f32, f32) {
let offset_x = f64::from(config.ui_config.font.offset.x);
let offset_y = f64::from(config.ui_config.font.offset.y);
(
(metrics.average_advance + offset_x).floor().max(1.) as f32,
(metrics.line_height + offset_y).floor().max(1.) as f32,
)
}
} | key: crossfont::GlyphKey {
font_key: *font,
c: c as char, | random_line_split |
main.go | package main
import (
"bytes"
"encoding/json"
"errors"
"flag"
"html/template"
"io/ioutil"
nethttp "net/http"
"net/url"
"os"
"strings"
"sync"
"time"
"github.com/companieshouse/gopan/getpan/getpan"
"github.com/companieshouse/gopan/gopan"
gotcha "github.com/companieshouse/gotcha/app"
"github.com/companieshouse/gotcha/events"
"github.com/companieshouse/gotcha/http"
"github.com/ian-kent/go-log/log"
)
var CurrentRelease = "0.12"
type Releases []*Release
type Release struct {
TagName string `json:"tag_name"`
URL string `json:"html_url"`
}
var update_indexes func()
var load_index func(string, string)
func main() {
configure()
args := flag.Args()
if len(args) > 0 && args[0] == "init" {
log.Info("Initialising SmartPAN")
log.Info("=> Installing Perl dependencies")
// FIXME most of this is repeated from getpan/main.go
cfg := getpan.DefaultConfig()
cfg.CacheDir = config.CacheDir
for _, source := range cfg.Sources {
if err := source.Load(); err != nil |
}
deps := &getpan.DependencyList{
Dependencies: make([]*getpan.Dependency, 0),
}
d1, _ := getpan.DependencyFromString("Parse::LocalDistribution", "")
d2, _ := getpan.DependencyFromString("JSON::XS", "")
deps.AddDependency(d1)
deps.AddDependency(d2)
if err := deps.Resolve(); err != nil {
log.Error("Error resolving dependencies: %s", err)
os.Exit(1)
return
}
_, err := deps.Install()
if err != nil {
log.Error("Error installing dependencies: %s", err)
os.Exit(2)
return
}
log.Info(" - Installed %d modules", deps.UniqueInstalled())
log.Info("SmartPAN initialisation complete")
return
}
if config.TestDeps {
perldeps := gopan.TestPerlDeps()
perldeps.Dump()
if !perldeps.Ok {
log.Error("Required perl dependencies are missing")
os.Exit(1)
return
}
}
if len(args) > 0 && args[0] == "import" {
if len(args) < 4 {
log.Error("Invalid arguments, expecting: smartpan import FILE AUTHORID INDEX")
return
}
fname := args[1]
log.Info("Importing module from %s", fname)
log.Info("Author ID: %s", args[2])
log.Info("Index : %s", args[3])
extraParams := map[string]string{
"importinto": args[3],
"authorid": args[2],
"newindex": "",
"cpanmirror": "",
"importurl": "",
"fromdir": "",
}
if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") {
log.Info("URL: %s", fname)
extraParams["importurl"] = fname
request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams)
if err != nil {
log.Error("Create request error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
} else {
fname = strings.TrimPrefix(fname, "file://")
log.Info("File: %s", fname)
if _, err := os.Stat(fname); err != nil {
log.Error("File not found: %s", err.Error())
return
}
request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname)
if err != nil {
log.Error("Create upload error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
}
return
}
config.CurrentRelease = CurrentRelease
var wg sync.WaitGroup
load_index = func(index string, file string) {
indexes[index] = gopan.LoadIndex(file)
}
wg.Add(1)
go func() {
defer wg.Done()
indexes = make(map[string]map[string]*gopan.Source)
// Load CPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil {
config.HasCPANIndex = true
config.CPANIndexDate = fi.ModTime().String()
config.CPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Loaded"
}()
}
// Load BackPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil {
config.HasBackPANIndex = true
config.BackPANIndexDate = fi.ModTime().String()
config.BackPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Loaded"
}()
}
// Load our secondary indexes
for _, idx := range config.Indexes {
wg.Add(1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
}
mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s", err.Error())
return
}
var r Releases
if err = json.Unmarshal(b, &r); err != nil {
log.Error("Error unmarshalling JSON: %s", err.Error())
return
}
log.Info("Current release: %s", config.CurrentRelease)
rel := strings.TrimPrefix(r[0].TagName, "v")
log.Info("Latest release: %s", rel)
config.LatestRelease = rel
config.UpdateURL = r[0].URL
if config.CurrentRelease < rel {
config.CanUpdate = true
log.Info("Your version of SmartPAN can be updated.")
}
}()
// Create our Gotcha application
var app = gotcha.Create(Asset)
app.Config.Listen = config.Bind
summary = &Summary{0, 0, 0, 0}
app.On(events.BeforeHandler, func(session *http.Session, next func()) {
session.Stash["summary"] = summary
session.Stash["config"] = config
next()
})
// Get the router
r := app.Router
// Create some routes
r.Get("/", search)
r.Post("/", search)
r.Get("/help", help)
r.Get("/settings", settings)
r.Get("/browse", browse)
r.Get("/import", import1)
r.Post("/import", import1)
r.Get("/import/(?P<jobid>[^/]+)", import2)
r.Get("/import/(?P<jobid>[^/]+)/stream", importstream)
r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex)
// Serve static content (but really use a CDN)
r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}"))
r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}"))
// JSON endpoints
r.Get("/where/(?P<module>[^/]+)/?", where)
r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where)
// Put these last so they only match /{repo} if nothing else matches
r.Get("/(?P<repo>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex)
r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download)
r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse)
// Start our application
app.Start()
<-make(chan int)
}
func getindex(session *http.Session) {
idx := session.Stash["index"]
switch idx {
case "CPAN":
go func() {
config.CPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/cpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.CPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.CPANStatus = "Downloaded"
config.HasCPANIndex = true
config.CPANIndexDate = time.Now().String()
config.CPANStatus = "Loading"
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Indexing"
update_indexes()
config.CPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
case "BackPAN":
go func() {
config.BackPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/backpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.BackPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.BackPANStatus = "Downloaded"
config.HasBackPANIndex = true
config.BackPANIndexDate = time.Now().String()
config.BackPANStatus = "Loading"
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Indexing"
update_indexes()
config.BackPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
}
session.RenderNotFound()
}
func help(session *http.Session) {
session.Stash["Title"] = "SmartPAN Help"
html, _ := session.RenderTemplate("help.html")
session.Stash["Page"] = "Help"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
func settings(session *http.Session) {
session.Stash["Title"] = "SmartPAN Settings"
html, _ := session.RenderTemplate("settings.html")
session.Stash["Page"] = "Settings"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
| {
log.Error("Error loading sources: %s", err)
os.Exit(1)
return
} | conditional_block |
main.go | package main
import (
"bytes"
"encoding/json"
"errors"
"flag"
"html/template"
"io/ioutil"
nethttp "net/http"
"net/url"
"os"
"strings"
"sync"
"time"
"github.com/companieshouse/gopan/getpan/getpan"
"github.com/companieshouse/gopan/gopan"
gotcha "github.com/companieshouse/gotcha/app"
"github.com/companieshouse/gotcha/events"
"github.com/companieshouse/gotcha/http"
"github.com/ian-kent/go-log/log"
)
var CurrentRelease = "0.12"
type Releases []*Release
type Release struct {
TagName string `json:"tag_name"`
URL string `json:"html_url"`
}
var update_indexes func()
var load_index func(string, string)
func main() {
configure()
args := flag.Args()
if len(args) > 0 && args[0] == "init" {
log.Info("Initialising SmartPAN")
log.Info("=> Installing Perl dependencies")
// FIXME most of this is repeated from getpan/main.go
cfg := getpan.DefaultConfig()
cfg.CacheDir = config.CacheDir
for _, source := range cfg.Sources {
if err := source.Load(); err != nil {
log.Error("Error loading sources: %s", err)
os.Exit(1)
return
}
}
deps := &getpan.DependencyList{
Dependencies: make([]*getpan.Dependency, 0),
}
d1, _ := getpan.DependencyFromString("Parse::LocalDistribution", "")
d2, _ := getpan.DependencyFromString("JSON::XS", "")
deps.AddDependency(d1)
deps.AddDependency(d2)
if err := deps.Resolve(); err != nil {
log.Error("Error resolving dependencies: %s", err)
os.Exit(1)
return
}
_, err := deps.Install()
if err != nil {
log.Error("Error installing dependencies: %s", err)
os.Exit(2)
return
}
log.Info(" - Installed %d modules", deps.UniqueInstalled())
log.Info("SmartPAN initialisation complete")
return
}
if config.TestDeps {
perldeps := gopan.TestPerlDeps()
perldeps.Dump()
if !perldeps.Ok {
log.Error("Required perl dependencies are missing")
os.Exit(1)
return
}
}
if len(args) > 0 && args[0] == "import" {
if len(args) < 4 {
log.Error("Invalid arguments, expecting: smartpan import FILE AUTHORID INDEX")
return
}
fname := args[1]
log.Info("Importing module from %s", fname)
log.Info("Author ID: %s", args[2])
log.Info("Index : %s", args[3])
extraParams := map[string]string{
"importinto": args[3],
"authorid": args[2],
"newindex": "",
"cpanmirror": "",
"importurl": "",
"fromdir": "",
}
if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") {
log.Info("URL: %s", fname)
extraParams["importurl"] = fname
request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams)
if err != nil {
log.Error("Create request error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
} else {
fname = strings.TrimPrefix(fname, "file://")
log.Info("File: %s", fname)
if _, err := os.Stat(fname); err != nil {
log.Error("File not found: %s", err.Error())
return
}
request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname)
if err != nil {
log.Error("Create upload error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
}
return
}
config.CurrentRelease = CurrentRelease
var wg sync.WaitGroup
load_index = func(index string, file string) {
indexes[index] = gopan.LoadIndex(file)
}
wg.Add(1)
go func() {
defer wg.Done()
indexes = make(map[string]map[string]*gopan.Source)
// Load CPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil {
config.HasCPANIndex = true
config.CPANIndexDate = fi.ModTime().String()
config.CPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Loaded"
}()
}
// Load BackPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil {
config.HasBackPANIndex = true
config.BackPANIndexDate = fi.ModTime().String()
config.BackPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Loaded"
}()
}
// Load our secondary indexes
for _, idx := range config.Indexes {
wg.Add(1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
}
mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s", err.Error())
return
}
var r Releases
if err = json.Unmarshal(b, &r); err != nil {
log.Error("Error unmarshalling JSON: %s", err.Error())
return
}
log.Info("Current release: %s", config.CurrentRelease)
rel := strings.TrimPrefix(r[0].TagName, "v")
log.Info("Latest release: %s", rel)
config.LatestRelease = rel
config.UpdateURL = r[0].URL
if config.CurrentRelease < rel {
config.CanUpdate = true
log.Info("Your version of SmartPAN can be updated.")
}
}()
// Create our Gotcha application
var app = gotcha.Create(Asset)
app.Config.Listen = config.Bind
summary = &Summary{0, 0, 0, 0}
app.On(events.BeforeHandler, func(session *http.Session, next func()) {
session.Stash["summary"] = summary
session.Stash["config"] = config
next()
})
// Get the router
r := app.Router
// Create some routes
r.Get("/", search)
r.Post("/", search)
r.Get("/help", help)
r.Get("/settings", settings)
r.Get("/browse", browse)
r.Get("/import", import1)
r.Post("/import", import1)
r.Get("/import/(?P<jobid>[^/]+)", import2)
r.Get("/import/(?P<jobid>[^/]+)/stream", importstream)
r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex)
// Serve static content (but really use a CDN)
r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}"))
r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}"))
// JSON endpoints
r.Get("/where/(?P<module>[^/]+)/?", where)
r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where)
// Put these last so they only match /{repo} if nothing else matches
r.Get("/(?P<repo>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex)
r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download)
r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse)
// Start our application
app.Start()
<-make(chan int)
}
func getindex(session *http.Session) {
idx := session.Stash["index"]
switch idx {
case "CPAN":
go func() {
config.CPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/cpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.CPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.CPANStatus = "Downloaded"
config.HasCPANIndex = true
config.CPANIndexDate = time.Now().String()
config.CPANStatus = "Loading"
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Indexing"
update_indexes()
config.CPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
case "BackPAN":
go func() {
config.BackPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/backpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.BackPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.BackPANStatus = "Downloaded"
config.HasBackPANIndex = true
config.BackPANIndexDate = time.Now().String()
config.BackPANStatus = "Loading"
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Indexing"
update_indexes()
config.BackPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
}
session.RenderNotFound()
}
func | (session *http.Session) {
session.Stash["Title"] = "SmartPAN Help"
html, _ := session.RenderTemplate("help.html")
session.Stash["Page"] = "Help"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
func settings(session *http.Session) {
session.Stash["Title"] = "SmartPAN Settings"
html, _ := session.RenderTemplate("settings.html")
session.Stash["Page"] = "Settings"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
| help | identifier_name |
main.go | package main
import (
"bytes"
"encoding/json"
"errors"
"flag"
"html/template"
"io/ioutil"
nethttp "net/http"
"net/url"
"os"
"strings"
"sync"
"time"
"github.com/companieshouse/gopan/getpan/getpan"
"github.com/companieshouse/gopan/gopan"
gotcha "github.com/companieshouse/gotcha/app"
"github.com/companieshouse/gotcha/events"
"github.com/companieshouse/gotcha/http"
"github.com/ian-kent/go-log/log"
)
var CurrentRelease = "0.12"
type Releases []*Release
type Release struct {
TagName string `json:"tag_name"`
URL string `json:"html_url"`
}
var update_indexes func()
var load_index func(string, string)
func main() {
configure()
args := flag.Args()
if len(args) > 0 && args[0] == "init" {
log.Info("Initialising SmartPAN")
log.Info("=> Installing Perl dependencies")
// FIXME most of this is repeated from getpan/main.go
cfg := getpan.DefaultConfig()
cfg.CacheDir = config.CacheDir
for _, source := range cfg.Sources {
if err := source.Load(); err != nil {
log.Error("Error loading sources: %s", err)
os.Exit(1)
return
}
}
deps := &getpan.DependencyList{
Dependencies: make([]*getpan.Dependency, 0),
}
d1, _ := getpan.DependencyFromString("Parse::LocalDistribution", "")
d2, _ := getpan.DependencyFromString("JSON::XS", "")
deps.AddDependency(d1)
deps.AddDependency(d2)
if err := deps.Resolve(); err != nil {
log.Error("Error resolving dependencies: %s", err)
os.Exit(1)
return
}
_, err := deps.Install()
if err != nil {
log.Error("Error installing dependencies: %s", err)
os.Exit(2)
return
}
log.Info(" - Installed %d modules", deps.UniqueInstalled())
log.Info("SmartPAN initialisation complete")
return
}
if config.TestDeps {
perldeps := gopan.TestPerlDeps()
perldeps.Dump()
if !perldeps.Ok {
log.Error("Required perl dependencies are missing")
os.Exit(1)
return
}
}
if len(args) > 0 && args[0] == "import" {
if len(args) < 4 {
log.Error("Invalid arguments, expecting: smartpan import FILE AUTHORID INDEX")
return
}
fname := args[1]
log.Info("Importing module from %s", fname)
log.Info("Author ID: %s", args[2])
log.Info("Index : %s", args[3])
extraParams := map[string]string{
"importinto": args[3],
"authorid": args[2],
"newindex": "",
"cpanmirror": "",
"importurl": "",
"fromdir": "",
}
if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") {
log.Info("URL: %s", fname)
extraParams["importurl"] = fname
request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams)
if err != nil {
log.Error("Create request error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
} else {
fname = strings.TrimPrefix(fname, "file://")
log.Info("File: %s", fname)
if _, err := os.Stat(fname); err != nil {
log.Error("File not found: %s", err.Error())
return
}
request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname)
if err != nil {
log.Error("Create upload error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
}
return
}
config.CurrentRelease = CurrentRelease
var wg sync.WaitGroup
load_index = func(index string, file string) {
indexes[index] = gopan.LoadIndex(file)
}
wg.Add(1)
go func() {
defer wg.Done()
indexes = make(map[string]map[string]*gopan.Source)
// Load CPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil {
config.HasCPANIndex = true
config.CPANIndexDate = fi.ModTime().String()
config.CPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Loaded"
}()
}
// Load BackPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil {
config.HasBackPANIndex = true
config.BackPANIndexDate = fi.ModTime().String()
config.BackPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Loaded"
}()
}
// Load our secondary indexes
for _, idx := range config.Indexes {
wg.Add(1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
}
mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s", err.Error())
return
}
var r Releases
if err = json.Unmarshal(b, &r); err != nil {
log.Error("Error unmarshalling JSON: %s", err.Error())
return
}
log.Info("Current release: %s", config.CurrentRelease)
rel := strings.TrimPrefix(r[0].TagName, "v")
log.Info("Latest release: %s", rel)
config.LatestRelease = rel
config.UpdateURL = r[0].URL
if config.CurrentRelease < rel {
config.CanUpdate = true
log.Info("Your version of SmartPAN can be updated.")
}
}()
// Create our Gotcha application
var app = gotcha.Create(Asset)
app.Config.Listen = config.Bind
summary = &Summary{0, 0, 0, 0}
app.On(events.BeforeHandler, func(session *http.Session, next func()) {
session.Stash["summary"] = summary
session.Stash["config"] = config
next()
})
// Get the router
r := app.Router
// Create some routes
r.Get("/", search)
r.Post("/", search)
r.Get("/help", help)
r.Get("/settings", settings)
r.Get("/browse", browse)
r.Get("/import", import1)
r.Post("/import", import1)
r.Get("/import/(?P<jobid>[^/]+)", import2)
r.Get("/import/(?P<jobid>[^/]+)/stream", importstream)
r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex)
// Serve static content (but really use a CDN)
r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}"))
r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}"))
// JSON endpoints
r.Get("/where/(?P<module>[^/]+)/?", where)
r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where)
// Put these last so they only match /{repo} if nothing else matches
r.Get("/(?P<repo>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex)
r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download)
r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse)
// Start our application
app.Start()
<-make(chan int)
}
func getindex(session *http.Session) |
func help(session *http.Session) {
session.Stash["Title"] = "SmartPAN Help"
html, _ := session.RenderTemplate("help.html")
session.Stash["Page"] = "Help"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
func settings(session *http.Session) {
session.Stash["Title"] = "SmartPAN Settings"
html, _ := session.RenderTemplate("settings.html")
session.Stash["Page"] = "Settings"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
| {
idx := session.Stash["index"]
switch idx {
case "CPAN":
go func() {
config.CPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/cpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.CPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.CPANStatus = "Downloaded"
config.HasCPANIndex = true
config.CPANIndexDate = time.Now().String()
config.CPANStatus = "Loading"
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Indexing"
update_indexes()
config.CPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
case "BackPAN":
go func() {
config.BackPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/backpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.BackPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.BackPANStatus = "Downloaded"
config.HasBackPANIndex = true
config.BackPANIndexDate = time.Now().String()
config.BackPANStatus = "Loading"
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Indexing"
update_indexes()
config.BackPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
}
session.RenderNotFound()
} | identifier_body |
main.go | package main
import (
"bytes"
"encoding/json"
"errors"
"flag"
"html/template"
"io/ioutil"
nethttp "net/http"
"net/url"
"os"
"strings"
"sync"
"time"
"github.com/companieshouse/gopan/getpan/getpan"
"github.com/companieshouse/gopan/gopan"
gotcha "github.com/companieshouse/gotcha/app"
"github.com/companieshouse/gotcha/events"
"github.com/companieshouse/gotcha/http"
"github.com/ian-kent/go-log/log"
)
var CurrentRelease = "0.12"
type Releases []*Release
type Release struct {
TagName string `json:"tag_name"`
URL string `json:"html_url"`
}
var update_indexes func()
var load_index func(string, string)
func main() {
configure()
args := flag.Args()
if len(args) > 0 && args[0] == "init" {
log.Info("Initialising SmartPAN")
log.Info("=> Installing Perl dependencies")
// FIXME most of this is repeated from getpan/main.go
cfg := getpan.DefaultConfig()
cfg.CacheDir = config.CacheDir
for _, source := range cfg.Sources {
if err := source.Load(); err != nil {
log.Error("Error loading sources: %s", err)
os.Exit(1)
return
}
}
deps := &getpan.DependencyList{
Dependencies: make([]*getpan.Dependency, 0),
}
d1, _ := getpan.DependencyFromString("Parse::LocalDistribution", "")
d2, _ := getpan.DependencyFromString("JSON::XS", "")
deps.AddDependency(d1)
deps.AddDependency(d2)
if err := deps.Resolve(); err != nil {
log.Error("Error resolving dependencies: %s", err)
os.Exit(1)
return
}
_, err := deps.Install()
if err != nil {
log.Error("Error installing dependencies: %s", err)
os.Exit(2)
return
}
log.Info(" - Installed %d modules", deps.UniqueInstalled())
log.Info("SmartPAN initialisation complete")
return
}
if config.TestDeps {
perldeps := gopan.TestPerlDeps()
perldeps.Dump()
if !perldeps.Ok {
log.Error("Required perl dependencies are missing")
os.Exit(1)
return
}
}
if len(args) > 0 && args[0] == "import" {
if len(args) < 4 {
log.Error("Invalid arguments, expecting: smartpan import FILE AUTHORID INDEX")
return
}
fname := args[1]
log.Info("Importing module from %s", fname)
log.Info("Author ID: %s", args[2])
log.Info("Index : %s", args[3])
extraParams := map[string]string{
"importinto": args[3],
"authorid": args[2],
"newindex": "",
"cpanmirror": "",
"importurl": "",
"fromdir": "",
}
if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") {
log.Info("URL: %s", fname)
extraParams["importurl"] = fname
request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams)
if err != nil {
log.Error("Create request error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
} else {
fname = strings.TrimPrefix(fname, "file://")
log.Info("File: %s", fname)
if _, err := os.Stat(fname); err != nil {
log.Error("File not found: %s", err.Error())
return
}
request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname)
if err != nil {
log.Error("Create upload error: %s", err.Error())
return
}
client := &nethttp.Client{}
resp, err := client.Do(request)
if err != nil {
log.Error("Error connecting to host: %s", err.Error())
return
} else {
// TODO stream this
body := &bytes.Buffer{}
_, err := body.ReadFrom(resp.Body)
if err != nil {
log.Error("Error reading response: %s", err.Error())
return
}
resp.Body.Close()
//log.Info("%d", resp.StatusCode)
//log.Info("%s", resp.Header)
log.Info("%s", body.String())
}
}
return
}
config.CurrentRelease = CurrentRelease
var wg sync.WaitGroup
load_index = func(index string, file string) {
indexes[index] = gopan.LoadIndex(file)
}
wg.Add(1)
go func() {
defer wg.Done()
indexes = make(map[string]map[string]*gopan.Source)
// Load CPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil {
config.HasCPANIndex = true
config.CPANIndexDate = fi.ModTime().String()
config.CPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Loaded"
}()
}
// Load BackPAN index
if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil {
config.HasBackPANIndex = true
config.BackPANIndexDate = fi.ModTime().String()
config.BackPANStatus = "Loading"
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Loaded"
}()
}
// Load our secondary indexes
for _, idx := range config.Indexes {
wg.Add(1)
go func() {
defer wg.Done()
load_index(idx, config.CacheDir+"/"+idx)
}()
}
// Load our primary index (this is the only index written back to)
wg.Add(1)
go func() {
defer wg.Done()
load_index(config.Index, config.CacheDir+"/"+config.Index)
}()
}()
update_indexes = func() {
wg.Wait()
wg.Add(1)
go func() {
wg.Wait()
config.ImportAvailable = true
nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes)
// TODO should probably be in the index - needs to udpate when index changes
summary = &Summary{nsrc, nauth, npkg, nprov}
// Do this now so changing the level doesn't interfere with index load
log.Logger().SetLevel(log.Stol(config.LogLevel))
}()
defer wg.Done()
// Create in-memory indexes for UI/search etc
for fname, _ := range indexes {
for idn, idx := range indexes[fname] {
mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author)
for _, auth := range idx.Authors {
// author name
if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok {
mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok {
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth
// wildcards
if _, ok := mapped[idx.Name]["*"]; !ok {
mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author) | mapped[idx.Name]["*"]["**"][auth.Name] = auth
// combos
if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok {
mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author)
}
if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok {
mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author)
}
mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth
mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth
for _, pkg := range auth.Packages {
filemap[pkg.AuthorURL()] = idn
for _, prov := range pkg.Provides {
parts := strings.Split(prov.Name, "::")
log.Trace("PACKAGE: %s", prov.Name)
if _, ok := packages[parts[0]]; !ok {
packages[parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if _, ok := idxpackages[idx.Name]; !ok {
idxpackages[idx.Name] = make(map[string]*PkgSpace)
}
if _, ok := idxpackages[idx.Name][parts[0]]; !ok {
idxpackages[idx.Name][parts[0]] = &PkgSpace{
Namespace: parts[0],
Packages: make([]*gopan.PerlPackage, 0),
Children: make(map[string]*PkgSpace),
Parent: nil,
Versions: make(map[float64]*gopan.PerlPackage),
}
}
if len(parts) == 1 {
packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov)
packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov)
idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov
log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name)
} else {
packages[parts[0]].Populate(parts[1:], prov)
idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov)
}
}
}
}
}
}
}
go update_indexes()
// Get latest SmartPAN version
go func() {
res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases")
if err != nil {
log.Error("Error getting latest version: %s", err.Error())
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading stream: %s", err.Error())
return
}
var r Releases
if err = json.Unmarshal(b, &r); err != nil {
log.Error("Error unmarshalling JSON: %s", err.Error())
return
}
log.Info("Current release: %s", config.CurrentRelease)
rel := strings.TrimPrefix(r[0].TagName, "v")
log.Info("Latest release: %s", rel)
config.LatestRelease = rel
config.UpdateURL = r[0].URL
if config.CurrentRelease < rel {
config.CanUpdate = true
log.Info("Your version of SmartPAN can be updated.")
}
}()
// Create our Gotcha application
var app = gotcha.Create(Asset)
app.Config.Listen = config.Bind
summary = &Summary{0, 0, 0, 0}
app.On(events.BeforeHandler, func(session *http.Session, next func()) {
session.Stash["summary"] = summary
session.Stash["config"] = config
next()
})
// Get the router
r := app.Router
// Create some routes
r.Get("/", search)
r.Post("/", search)
r.Get("/help", help)
r.Get("/settings", settings)
r.Get("/browse", browse)
r.Get("/import", import1)
r.Post("/import", import1)
r.Get("/import/(?P<jobid>[^/]+)", import2)
r.Get("/import/(?P<jobid>[^/]+)/stream", importstream)
r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex)
// Serve static content (but really use a CDN)
r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}"))
r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}"))
// JSON endpoints
r.Get("/where/(?P<module>[^/]+)/?", where)
r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where)
// Put these last so they only match /{repo} if nothing else matches
r.Get("/(?P<repo>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse)
r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex)
r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download)
r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file)
r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse)
// Start our application
app.Start()
<-make(chan int)
}
func getindex(session *http.Session) {
idx := session.Stash["index"]
switch idx {
case "CPAN":
go func() {
config.CPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/cpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading CPAN index: "+err.Error()))
config.CPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.CPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.CPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.CPANStatus = "Downloaded"
config.HasCPANIndex = true
config.CPANIndexDate = time.Now().String()
config.CPANStatus = "Loading"
load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex)
config.CPANStatus = "Indexing"
update_indexes()
config.CPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
case "BackPAN":
go func() {
config.BackPANStatus = "Downloading"
res, err := nethttp.Get("https://s3-eu-west-1.amazonaws.com/gopan/backpan_index.gz")
if err != nil {
log.Error("Error downloading index: %s", err.Error())
session.RenderException(500, errors.New("Error downloading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer res.Body.Close()
b, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Error("Error reading index: %s", err.Error())
session.RenderException(500, errors.New("Error reading BackPAN index: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
fi, err := os.Create(config.CacheDir + "/" + config.BackPANIndex)
if err != nil {
log.Error("Error creating output file: %s", err.Error())
session.RenderException(500, errors.New("Error creating output file: "+err.Error()))
config.BackPANStatus = "Failed"
return
}
defer fi.Close()
fi.Write(b)
config.BackPANStatus = "Downloaded"
config.HasBackPANIndex = true
config.BackPANIndexDate = time.Now().String()
config.BackPANStatus = "Loading"
load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex)
config.BackPANStatus = "Indexing"
update_indexes()
config.BackPANStatus = "Loaded"
}()
session.Redirect(&url.URL{Path: "/settings"})
return
}
session.RenderNotFound()
}
func help(session *http.Session) {
session.Stash["Title"] = "SmartPAN Help"
html, _ := session.RenderTemplate("help.html")
session.Stash["Page"] = "Help"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
}
func settings(session *http.Session) {
session.Stash["Title"] = "SmartPAN Settings"
html, _ := session.RenderTemplate("settings.html")
session.Stash["Page"] = "Settings"
session.Stash["Content"] = template.HTML(html)
session.Render("layout.html")
} | }
if _, ok := mapped[idx.Name]["*"]["**"]; !ok {
mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author)
} | random_line_split |
sourcemap.rs | use std::{fs::read_to_string, path::PathBuf};
use rustc_hash::FxHashSet;
use sourcemap::SourceMap;
use swc_common::{comments::SingleThreadedComments, source_map::SourceMapGenConfig};
use swc_ecma_ast::EsVersion;
use swc_ecma_codegen::{self, text_writer::WriteJs, Emitter};
use swc_ecma_parser::{lexer::Lexer, Parser, Syntax};
use swc_ecma_testing::{exec_node_js, JsExecOptions};
static IGNORED_PASS_TESTS: &[&str] = &[
// Temporally ignored
"16c7073c546fdd58.js",
"369fd0a1e40030d8.js",
"3df03e7e138b7760.js",
"5333f04581124314.js",
"a157424306915066.js",
"ce5f3bc27d5ccaac.js",
"d4e81043d808dc31.js",
// Stack size (Stupid parens)
"6b5e7e125097d439.js",
"714be6d28082eaa7.js",
"882910de7dd1aef9.js",
"dd3c63403db5c06e.js",
// Wrong tests (variable name or value is different)
"0339fa95c78c11bd.js",
"0426f15dac46e92d.js",
"0b4d61559ccce0f9.js",
"0f88c334715d2489.js",
"1093d98f5fc0758d.js",
"15d9592709b947a0.js",
"2179895ec5cc6276.js",
"247a3a57e8176ebd.js",
"441a92357939904a.js",
"47f974d6fc52e3e4.js",
"4e1a0da46ca45afe.js",
"5829d742ab805866.js",
"589dc8ad3b9aa28f.js",
"598a5cedba92154d.js",
"72d79750e81ef03d.js",
"7788d3c1e1247da9.js",
"7b72d7b43bedc895.js",
"7dab6e55461806c9.js",
"82c827ccaecbe22b.js",
"87a9b0d1d80812cc.js",
"8c80f7ee04352eba.js",
"96f5d93be9a54573.js",
"988e362ed9ddcac5.js",
"9bcae7c7f00b4e3c.js",
"a8a03a88237c4e8f.js",
"ad06370e34811a6a.js",
"b0fdc038ee292aba.js",
"b62c6dd890bef675.js",
"cb211fadccb029c7.js",
"ce968fcdf3a1987c.js",
"db3c01738aaf0b92.js",
"e1387fe892984e2b.js",
"e71c1d5f0b6b833c.js",
"e8ea384458526db0.js",
// We don't implement Annex B fully.
"1c1e2a43fe5515b6.js",
"3dabeca76119d501.js",
"52aeec7b8da212a2.js",
"59ae0289778b80cd.js",
"a4d62a651f69d815.js",
"c06df922631aeabc.js",
// swc_common issue - `\r` should be treated as a newline
"be2c3fff6426873e.js",
"db66e1e8f3f1faef.js",
"a7b8ce1d4c0f0bc2.js",
"6498dcc494193cb4.js",
"6a240463b40550d2.js",
// TODO: (maybe) fix span of `,`
"641ac9060a206183.js",
"e4cef19dab44335a.js",
"a6806d6fedbf6759.js",
"2dc0ded5a1bff643.js",
"547fa50af16beca7.js",
"547fa50af16beca7.js",
"8c8a7a2941fb6d64.js",
"9e98dbfde77e3dfe.js",
"d9eb39b11bc766f4.js",
"f9888fa1a1e366e7.js",
"78cf02220fb0937c.js",
// TODO(kdy1): Non-ascii char count
"58cb05d17f7ec010.js",
"4d2c7020de650d40.js",
"dafb7abe5b9b44f5.js",
// Our one is better
"1efde9ddd9d6e6ce.module.js",
"d010d377bcfd5565.js",
"ce0aaec02d5d4465.js",
"edd1f39f90576180.js",
"290fdc5a2f826ead.js",
"e71a91c61343cdb1.js",
"409f30dc7efe75d5.js",
"03608b6e222ae700.js",
"e54c1a2fc15cd4b8.js",
"e08e181172bad2b1.js",
"cc793d44a11617e7.js",
"54e70df597a4f9a3.js",
"efef19e06f58fdd9.js",
"e0fc2148b455a6be.js",
"10857a84ed2962f1.js",
"d7c7ff252e84e81d.js",
"0aa6aab640155051.js",
"c80d9415dde647cd.js",
"09e84f25af85b836.js",
"ce8c443eb361e1a2.js",
"affd557fd820e1f2.js",
"ec99a663d6f3983d.js",
"01fd8e8a0a42307b.js",
"e01c7172cf204b92.js",
"12d5bedf1812952a.js",
"df20c9b7a7d534cb.js",
"c767fa4d683aa3ce.js",
"bf8ffad512a5f568.js",
"c8513472857eae9c.js",
"b86b0122e80c330e.js",
"aa7e721756949024.js",
"a830df7cf2e74c9f.js",
"845631d1a33b3409.js",
"066b76285ce79182.js",
"fe2d3b945530c806.js",
"bd28a7d19ac0d50b.js",
"06c7efc128ce74a0.js",
"075c7204d0b0af60.js",
"0827a8316cca777a.js",
"b9a0cb6df76a73d2.js",
"bf210a4f0cf9e352.js",
"6edc155d463535cb.js",
"b8f8dfc41df97add.js",
"b549d045fc8e93bf.js",
"e42f306327c0f578.js",
"9a9cb616daadf90a.js",
"d2ae1c7b6e55143f.js",
"a445a478b4ce0c58.js",
"0d137e8a97ffe083.js",
"b7a6a807ae6db312.js",
"bb8b546cf9db5996.js",
"50ac15a08f7c812f.js",
"a2cb5a14559c6a50.js",
"bbff5671643cc2ea.js",
"c2f12d66ce17d5ab.js",
"13045bfdda0434e0.js",
"10d6486502949e74.js",
"119e9dce4feae643.js",
"1223609b0f7a2129.js",
"177fef3d002eb873.js",
"19ffea7e9e887e08.js",
"1c6c67fcd71f2d08.js",
"1cdce2d337e64b4f.js",
"1f039e0eeb1bc271.js",
"227118dffd2c9935.js",
"250ced8c8e83b389.js",
"a2798917405b080b.js",
"ad6bf12aa7eda975.js",
"24fa28a37061a18f.js",
"252bb992a448270e.js",
"285648c16156804f.js",
"2d10fed2af94fbd1.js",
"3097f73926c93640.js",
"30aee1020fc69090.js",
"312f85fecc352681.js",
"317532451c2ce8ff.js",
"32b635a9667a9fb1.js",
"36224cf8215ad8e4.js",
"37e4a6eca1ece7e5.js",
"38284ea2d9914d86.js",
"3b57183c81070eec.js",
"3bbd75d597d54fe6.js",
"3c1e2ada0ac2b8e3.js",
"3e1a6f702041b599.js",
"3e3a99768a4a1502.js",
"3e69c5cc1a7ac103.js",
"3eac36e29398cdc5.js",
"3ff52d86c77678bd.js",
"43023cd549deee77.js",
"44af28febe2288cc.js",
"478ede4cfe7906d5.js",
"4869454dd215468e.js",
"48b6f8ce65d3b3ee.js",
"4c71e11fbbc56349.js",
"4d833cbc56caaaf9.js",
"4e7c58761e24d77c.js",
"4e7c58761e24d77c.js",
"5641ad33abcd1752.js",
"587400d1c019785a.js",
"58ed6ffb30191684.js",
"5b8d2b991d2c1f5b.js",
"5f730961df66e8e8.js",
"597108fd45a6e79b.js",
"60dcd48a3f6af44f.js",
"62d7c1ee4e1626c4.js",
"665f4940c7cf30c9.js",
"64cc57f82a54b7fb.js",
"66d2dbcb692491ec.module.js",
"697b3d30c1d06918.js",
"698a8cfb0705c277.js",
"69bbdc7c34ed23cc.js",
"6a323491fe75918a.js",
"6b76b8761a049c19.js",
"70bf2c409480ae10.js",
"74c5ebda713c8bd7.js",
"75172741c27c7703.js",
"753a8b016a700975.js",
"77c661b2fbe3dd3a.js",
"784a059faa166072.js",
"7855fbf5ea10e622.js",
"7cd7c68a6131f816.js",
"7df2a606ecc6cd84.js",
"7dfb625b91c5c879.js",
"7fdf990c6f42edcd.module.js",
"80d2351a5ae68524.js",
"84250e15785d8a9e.js",
"85263ecacc7a4dc5.js",
"8628cd459b39ffe8.js",
"870a0b8d891753e9.js",
"8d14286a8cc6ee9d.js",
"8d67ad04bfc356c9.js",
"8ecaef2617d8c6a7.js",
"918e105a2ff6c64a.js",
"92fd8e24864fde0a.js",
"94b8a654a87039b9.js",
"94cb828d5dcfd136.js",
"98df58b0c40fac90.js",
"9949a2e1a6844836.module.js",
"99cdfc40e20af6f5.js",
"9a666205cafd530f.js",
"a454d2e2ab3484e6.js",
"a54cca69085ad35a.js",
"a86a29773d1168d3.js",
"b205355de22689d1.js",
"b93d116fd0409637.js",
"c85bc4de504befc7.js",
"c8689b6da6fd227a.js",
"cda499c521ff60c7.js",
"d4b898b45172a637.js",
"e2ac0bea41202dc9.js",
"f01d9f3c7b2b2717.js",
"f15772354efa5ecf.js",
"f17ec9517a3339d9.js",
"fa5b398eeef697a6.js",
"fa9eaf58f51d6926.js",
"faa4a026e1e86145.js",
"fada2c7bbfabe14a.js",
"fb8db7a71f3755fc.js",
"fbde237f11796df9.js",
"fd5ea844fcc07d3d.js",
"6c5f0dd83c417a5a.js",
"78eb22badc114b6f.js",
"7afd38d79e6795a8.js",
"80950061e291542b.js",
"8a0fc8ea31727188.module.js",
"af97a3752e579223.js",
"bbffb851469a3f0e.js",
"bc302492d441d561.js",
"be2fd5888f434cbd.js",
"f3260491590325af.js",
];
#[testing::fixture("../swc_ecma_parser/tests/test262-parser/pass/*.js")]
fn identity(entry: PathBuf) {
let file_name = entry
.file_name()
.unwrap()
.to_str()
.expect("to_str() failed")
.to_string();
let input = read_to_string(&entry).unwrap();
let ignore = IGNORED_PASS_TESTS.contains(&&*file_name);
if ignore {
return;
}
let is_module = file_name.contains("module");
let msg = format!(
"\n\n========== Running codegen test {}\nSource:\n{}\n",
file_name, input
);
let mut wr = vec![];
::testing::run_test(false, |cm, handler| {
let fm = cm.load_file(&entry).expect("failed to load file");
eprintln!(
"{}\nPos: {:?} ~ {:?} (L{})",
msg,
fm.start_pos,
fm.end_pos,
fm.count_lines()
);
let (expected_code, expected_map, visualizer_url_for_expected) =
match get_expected(&fm.src, is_module) {
Some(v) => v,
None => return Ok(()),
};
println!("Expected code:\n{}", expected_code);
let expected_tokens = print_source_map(&expected_map);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(
Syntax::default(),
Default::default(),
(&*fm).into(),
Some(&comments),
);
let mut parser: Parser<Lexer> = Parser::new_from(lexer);
let mut src_map = vec![];
{
let mut wr = Box::new(swc_ecma_codegen::text_writer::JsWriter::new(
cm.clone(),
"\n",
&mut wr,
Some(&mut src_map),
)) as Box<dyn WriteJs>;
wr = Box::new(swc_ecma_codegen::text_writer::omit_trailing_semi(wr));
let mut emitter = Emitter {
cfg: swc_ecma_codegen::Config {
minify: true,
target: EsVersion::Es5,
ascii_only: true,
..Default::default()
},
cm: cm.clone(),
wr,
comments: None,
};
// Parse source
if is_module {
emitter
.emit_module(
&parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
} else {
emitter
.emit_script(
&parser
.parse_script()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
}
}
let actual_code = String::from_utf8(wr).unwrap();
let actual_map = cm.build_source_map_with_config(&src_map, None, SourceMapConfigImpl);
let visualizer_url_for_actual = visualizer_url(&actual_code, &actual_map);
let actual_tokens = print_source_map(&actual_map);
let common_tokens = actual_tokens
.iter()
.filter(|a| expected_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<FxHashSet<_>>();
let actual_tokens_diff = actual_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
let expected_tokens_diff = expected_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
eprintln!("---- Actual -----");
for s in actual_tokens_diff {
eprintln!("{}", s);
}
eprintln!("---- Expected -----");
for s in expected_tokens_diff {
eprintln!("{}", s);
}
dbg!(&src_map);
if actual_code != expected_code {
// Generated code is different
// We can't ensure that identical sourcemap will mean identical code
eprintln!("Actual code:\n{}", actual_code);
eprintln!("Expected code:\n{}", expected_code);
return Ok(());
}
eprintln!(
"----- Visualizer -----\nExpected: {}\nActual: {}",
visualizer_url_for_expected, visualizer_url_for_actual
);
assert_eq_same_map(&expected_map, &actual_map);
Ok(())
})
.expect("failed to run test");
}
fn get_expected(code: &str, is_module: bool) -> Option<(String, SourceMap, String)> {
let output = exec_node_js(
include_str!("./srcmap.mjs"),
JsExecOptions {
cache: true,
module: true,
args: vec![
code.to_string(),
if is_module {
"module".into()
} else {
"script".into()
},
],
},
)
.ok()?;
let v = serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(&output).unwrap();
let code = v.get("code").unwrap().as_str().unwrap();
let map = v.get("map").unwrap().as_str().unwrap();
let map = SourceMap::from_slice(map.as_bytes()).expect("invalid sourcemap");
let visualizer_url = visualizer_url(code, &map);
Some((code.to_string(), map, visualizer_url))
}
fn print_source_map(map: &SourceMap) -> Vec<String> {
let mut v = map
.tokens()
.map(|t| {
format!(
"Token: {}:{} => {}:{}",
t.get_src_line(),
t.get_src_col(),
t.get_dst_line(),
t.get_dst_col()
)
})
.collect::<Vec<_>>();
v.sort();
v
}
fn assert_eq_same_map(expected: &SourceMap, actual: &SourceMap) {
for expected_token in expected.tokens() {
let actual_token = actual
.lookup_token(expected_token.get_dst_line(), expected_token.get_dst_col())
.unwrap_or_else(|| panic!("token not found: {:?}", expected_token));
if expected_token.get_src_line() == 0 && expected_token.get_src_col() == 0 {
continue;
}
assert_eq!(
expected_token.get_src_line(),
actual_token.get_src_line(),
"line mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
assert_eq!(
expected_token.get_src_col(),
actual_token.get_src_col(),
"col mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
}
}
/// Creates a url for https://evanw.github.io/source-map-visualization/
fn visualizer_url(code: &str, map: &SourceMap) -> String {
let map = {
let mut buf = vec![];
map.to_writer(&mut buf).unwrap();
String::from_utf8(buf).unwrap()
};
let code_len = format!("{}\0", code.len());
let map_len = format!("{}\0", map.len());
let hash = base64::encode(format!("{}{}{}{}", code_len, code, map_len, map));
format!("https://evanw.github.io/source-map-visualization/#{}", hash)
}
struct SourceMapConfigImpl;
impl SourceMapGenConfig for SourceMapConfigImpl {
fn file_name_to_source(&self, f: &swc_common::FileName) -> String {
f.to_string()
}
fn inline_sources_content(&self, _: &swc_common::FileName) -> bool |
}
| {
true
} | identifier_body |
sourcemap.rs | use std::{fs::read_to_string, path::PathBuf};
use rustc_hash::FxHashSet;
use sourcemap::SourceMap;
use swc_common::{comments::SingleThreadedComments, source_map::SourceMapGenConfig};
use swc_ecma_ast::EsVersion;
use swc_ecma_codegen::{self, text_writer::WriteJs, Emitter};
use swc_ecma_parser::{lexer::Lexer, Parser, Syntax};
use swc_ecma_testing::{exec_node_js, JsExecOptions};
static IGNORED_PASS_TESTS: &[&str] = &[
// Temporally ignored
"16c7073c546fdd58.js",
"369fd0a1e40030d8.js",
"3df03e7e138b7760.js",
"5333f04581124314.js",
"a157424306915066.js",
"ce5f3bc27d5ccaac.js",
"d4e81043d808dc31.js",
// Stack size (Stupid parens)
"6b5e7e125097d439.js",
"714be6d28082eaa7.js",
"882910de7dd1aef9.js",
"dd3c63403db5c06e.js",
// Wrong tests (variable name or value is different)
"0339fa95c78c11bd.js",
"0426f15dac46e92d.js",
"0b4d61559ccce0f9.js",
"0f88c334715d2489.js",
"1093d98f5fc0758d.js",
"15d9592709b947a0.js",
"2179895ec5cc6276.js",
"247a3a57e8176ebd.js",
"441a92357939904a.js",
"47f974d6fc52e3e4.js",
"4e1a0da46ca45afe.js",
"5829d742ab805866.js",
"589dc8ad3b9aa28f.js",
"598a5cedba92154d.js",
"72d79750e81ef03d.js",
"7788d3c1e1247da9.js",
"7b72d7b43bedc895.js",
"7dab6e55461806c9.js",
"82c827ccaecbe22b.js",
"87a9b0d1d80812cc.js",
"8c80f7ee04352eba.js",
"96f5d93be9a54573.js",
"988e362ed9ddcac5.js",
"9bcae7c7f00b4e3c.js",
"a8a03a88237c4e8f.js",
"ad06370e34811a6a.js",
"b0fdc038ee292aba.js",
"b62c6dd890bef675.js",
"cb211fadccb029c7.js",
"ce968fcdf3a1987c.js",
"db3c01738aaf0b92.js",
"e1387fe892984e2b.js",
"e71c1d5f0b6b833c.js",
"e8ea384458526db0.js",
// We don't implement Annex B fully.
"1c1e2a43fe5515b6.js",
"3dabeca76119d501.js",
"52aeec7b8da212a2.js",
"59ae0289778b80cd.js",
"a4d62a651f69d815.js",
"c06df922631aeabc.js",
// swc_common issue - `\r` should be treated as a newline
"be2c3fff6426873e.js",
"db66e1e8f3f1faef.js",
"a7b8ce1d4c0f0bc2.js",
"6498dcc494193cb4.js",
"6a240463b40550d2.js",
// TODO: (maybe) fix span of `,`
"641ac9060a206183.js",
"e4cef19dab44335a.js",
"a6806d6fedbf6759.js",
"2dc0ded5a1bff643.js",
"547fa50af16beca7.js",
"547fa50af16beca7.js",
"8c8a7a2941fb6d64.js",
"9e98dbfde77e3dfe.js",
"d9eb39b11bc766f4.js",
"f9888fa1a1e366e7.js",
"78cf02220fb0937c.js",
// TODO(kdy1): Non-ascii char count
"58cb05d17f7ec010.js",
"4d2c7020de650d40.js",
"dafb7abe5b9b44f5.js",
// Our one is better
"1efde9ddd9d6e6ce.module.js",
"d010d377bcfd5565.js",
"ce0aaec02d5d4465.js",
"edd1f39f90576180.js",
"290fdc5a2f826ead.js",
"e71a91c61343cdb1.js",
"409f30dc7efe75d5.js",
"03608b6e222ae700.js",
"e54c1a2fc15cd4b8.js",
"e08e181172bad2b1.js",
"cc793d44a11617e7.js",
"54e70df597a4f9a3.js",
"efef19e06f58fdd9.js",
"e0fc2148b455a6be.js",
"10857a84ed2962f1.js",
"d7c7ff252e84e81d.js",
"0aa6aab640155051.js",
"c80d9415dde647cd.js",
"09e84f25af85b836.js",
"ce8c443eb361e1a2.js",
"affd557fd820e1f2.js",
"ec99a663d6f3983d.js",
"01fd8e8a0a42307b.js",
"e01c7172cf204b92.js",
"12d5bedf1812952a.js",
"df20c9b7a7d534cb.js",
"c767fa4d683aa3ce.js",
"bf8ffad512a5f568.js",
"c8513472857eae9c.js",
"b86b0122e80c330e.js",
"aa7e721756949024.js",
"a830df7cf2e74c9f.js",
"845631d1a33b3409.js",
"066b76285ce79182.js",
"fe2d3b945530c806.js",
"bd28a7d19ac0d50b.js",
"06c7efc128ce74a0.js",
"075c7204d0b0af60.js",
"0827a8316cca777a.js",
"b9a0cb6df76a73d2.js",
"bf210a4f0cf9e352.js",
"6edc155d463535cb.js",
"b8f8dfc41df97add.js",
"b549d045fc8e93bf.js",
"e42f306327c0f578.js",
"9a9cb616daadf90a.js",
"d2ae1c7b6e55143f.js",
"a445a478b4ce0c58.js",
"0d137e8a97ffe083.js",
"b7a6a807ae6db312.js",
"bb8b546cf9db5996.js",
"50ac15a08f7c812f.js",
"a2cb5a14559c6a50.js",
"bbff5671643cc2ea.js",
"c2f12d66ce17d5ab.js",
"13045bfdda0434e0.js",
"10d6486502949e74.js",
"119e9dce4feae643.js",
"1223609b0f7a2129.js",
"177fef3d002eb873.js",
"19ffea7e9e887e08.js",
"1c6c67fcd71f2d08.js",
"1cdce2d337e64b4f.js",
"1f039e0eeb1bc271.js",
"227118dffd2c9935.js",
"250ced8c8e83b389.js",
"a2798917405b080b.js",
"ad6bf12aa7eda975.js",
"24fa28a37061a18f.js",
"252bb992a448270e.js",
"285648c16156804f.js",
"2d10fed2af94fbd1.js",
"3097f73926c93640.js",
"30aee1020fc69090.js",
"312f85fecc352681.js",
"317532451c2ce8ff.js",
"32b635a9667a9fb1.js",
"36224cf8215ad8e4.js",
"37e4a6eca1ece7e5.js",
"38284ea2d9914d86.js",
"3b57183c81070eec.js",
"3bbd75d597d54fe6.js",
"3c1e2ada0ac2b8e3.js",
"3e1a6f702041b599.js",
"3e3a99768a4a1502.js",
"3e69c5cc1a7ac103.js",
"3eac36e29398cdc5.js",
"3ff52d86c77678bd.js",
"43023cd549deee77.js",
"44af28febe2288cc.js",
"478ede4cfe7906d5.js",
"4869454dd215468e.js",
"48b6f8ce65d3b3ee.js",
"4c71e11fbbc56349.js",
"4d833cbc56caaaf9.js",
"4e7c58761e24d77c.js",
"4e7c58761e24d77c.js",
"5641ad33abcd1752.js",
"587400d1c019785a.js",
"58ed6ffb30191684.js",
"5b8d2b991d2c1f5b.js",
"5f730961df66e8e8.js",
"597108fd45a6e79b.js",
"60dcd48a3f6af44f.js",
"62d7c1ee4e1626c4.js",
"665f4940c7cf30c9.js",
"64cc57f82a54b7fb.js",
"66d2dbcb692491ec.module.js",
"697b3d30c1d06918.js",
"698a8cfb0705c277.js",
"69bbdc7c34ed23cc.js",
"6a323491fe75918a.js",
"6b76b8761a049c19.js",
"70bf2c409480ae10.js",
"74c5ebda713c8bd7.js",
"75172741c27c7703.js",
"753a8b016a700975.js",
"77c661b2fbe3dd3a.js",
"784a059faa166072.js",
"7855fbf5ea10e622.js",
"7cd7c68a6131f816.js",
"7df2a606ecc6cd84.js",
"7dfb625b91c5c879.js",
"7fdf990c6f42edcd.module.js",
"80d2351a5ae68524.js",
"84250e15785d8a9e.js",
"85263ecacc7a4dc5.js",
"8628cd459b39ffe8.js",
"870a0b8d891753e9.js",
"8d14286a8cc6ee9d.js",
"8d67ad04bfc356c9.js",
"8ecaef2617d8c6a7.js",
"918e105a2ff6c64a.js",
"92fd8e24864fde0a.js",
"94b8a654a87039b9.js",
"94cb828d5dcfd136.js",
"98df58b0c40fac90.js",
"9949a2e1a6844836.module.js",
"99cdfc40e20af6f5.js",
"9a666205cafd530f.js",
"a454d2e2ab3484e6.js",
"a54cca69085ad35a.js",
"a86a29773d1168d3.js",
"b205355de22689d1.js",
"b93d116fd0409637.js",
"c85bc4de504befc7.js",
"c8689b6da6fd227a.js",
"cda499c521ff60c7.js",
"d4b898b45172a637.js",
"e2ac0bea41202dc9.js",
"f01d9f3c7b2b2717.js",
"f15772354efa5ecf.js",
"f17ec9517a3339d9.js",
"fa5b398eeef697a6.js",
"fa9eaf58f51d6926.js",
"faa4a026e1e86145.js",
"fada2c7bbfabe14a.js",
"fb8db7a71f3755fc.js",
"fbde237f11796df9.js",
"fd5ea844fcc07d3d.js",
"6c5f0dd83c417a5a.js",
"78eb22badc114b6f.js",
"7afd38d79e6795a8.js",
"80950061e291542b.js",
"8a0fc8ea31727188.module.js",
"af97a3752e579223.js",
"bbffb851469a3f0e.js",
"bc302492d441d561.js",
"be2fd5888f434cbd.js",
"f3260491590325af.js",
];
#[testing::fixture("../swc_ecma_parser/tests/test262-parser/pass/*.js")]
fn identity(entry: PathBuf) {
let file_name = entry
.file_name()
.unwrap()
.to_str()
.expect("to_str() failed")
.to_string();
let input = read_to_string(&entry).unwrap();
let ignore = IGNORED_PASS_TESTS.contains(&&*file_name);
if ignore {
return;
}
let is_module = file_name.contains("module");
let msg = format!(
"\n\n========== Running codegen test {}\nSource:\n{}\n",
file_name, input
);
let mut wr = vec![];
::testing::run_test(false, |cm, handler| {
let fm = cm.load_file(&entry).expect("failed to load file");
eprintln!(
"{}\nPos: {:?} ~ {:?} (L{})",
msg,
fm.start_pos,
fm.end_pos,
fm.count_lines()
);
let (expected_code, expected_map, visualizer_url_for_expected) =
match get_expected(&fm.src, is_module) {
Some(v) => v,
None => return Ok(()),
};
println!("Expected code:\n{}", expected_code);
let expected_tokens = print_source_map(&expected_map);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(
Syntax::default(),
Default::default(),
(&*fm).into(),
Some(&comments),
);
let mut parser: Parser<Lexer> = Parser::new_from(lexer);
let mut src_map = vec![];
{
let mut wr = Box::new(swc_ecma_codegen::text_writer::JsWriter::new(
cm.clone(),
"\n",
&mut wr,
Some(&mut src_map),
)) as Box<dyn WriteJs>;
wr = Box::new(swc_ecma_codegen::text_writer::omit_trailing_semi(wr));
let mut emitter = Emitter {
cfg: swc_ecma_codegen::Config {
minify: true,
target: EsVersion::Es5,
ascii_only: true,
..Default::default()
},
cm: cm.clone(),
wr,
comments: None,
};
// Parse source
if is_module {
emitter
.emit_module(
&parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
} else {
emitter
.emit_script(
&parser
.parse_script()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
}
}
let actual_code = String::from_utf8(wr).unwrap();
let actual_map = cm.build_source_map_with_config(&src_map, None, SourceMapConfigImpl);
let visualizer_url_for_actual = visualizer_url(&actual_code, &actual_map);
let actual_tokens = print_source_map(&actual_map);
let common_tokens = actual_tokens
.iter()
.filter(|a| expected_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<FxHashSet<_>>();
let actual_tokens_diff = actual_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
let expected_tokens_diff = expected_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
eprintln!("---- Actual -----");
for s in actual_tokens_diff {
eprintln!("{}", s);
}
eprintln!("---- Expected -----");
for s in expected_tokens_diff {
eprintln!("{}", s);
}
dbg!(&src_map);
if actual_code != expected_code {
// Generated code is different
// We can't ensure that identical sourcemap will mean identical code
eprintln!("Actual code:\n{}", actual_code);
eprintln!("Expected code:\n{}", expected_code);
return Ok(());
}
eprintln!(
"----- Visualizer -----\nExpected: {}\nActual: {}",
visualizer_url_for_expected, visualizer_url_for_actual
);
assert_eq_same_map(&expected_map, &actual_map);
Ok(())
})
.expect("failed to run test");
}
fn get_expected(code: &str, is_module: bool) -> Option<(String, SourceMap, String)> {
let output = exec_node_js(
include_str!("./srcmap.mjs"),
JsExecOptions {
cache: true,
module: true,
args: vec![
code.to_string(),
if is_module {
"module".into()
} else {
"script".into()
},
],
},
)
.ok()?;
let v = serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(&output).unwrap();
let code = v.get("code").unwrap().as_str().unwrap();
let map = v.get("map").unwrap().as_str().unwrap();
let map = SourceMap::from_slice(map.as_bytes()).expect("invalid sourcemap");
let visualizer_url = visualizer_url(code, &map);
Some((code.to_string(), map, visualizer_url))
}
fn print_source_map(map: &SourceMap) -> Vec<String> {
let mut v = map
.tokens()
.map(|t| {
format!(
"Token: {}:{} => {}:{}",
t.get_src_line(),
t.get_src_col(),
t.get_dst_line(),
t.get_dst_col()
)
})
.collect::<Vec<_>>();
v.sort();
v
}
fn assert_eq_same_map(expected: &SourceMap, actual: &SourceMap) {
for expected_token in expected.tokens() {
let actual_token = actual
.lookup_token(expected_token.get_dst_line(), expected_token.get_dst_col())
.unwrap_or_else(|| panic!("token not found: {:?}", expected_token));
if expected_token.get_src_line() == 0 && expected_token.get_src_col() == 0 {
continue;
}
assert_eq!(
expected_token.get_src_line(),
actual_token.get_src_line(),
"line mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
assert_eq!(
expected_token.get_src_col(),
actual_token.get_src_col(),
"col mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
}
}
/// Creates a url for https://evanw.github.io/source-map-visualization/
fn visualizer_url(code: &str, map: &SourceMap) -> String {
let map = {
let mut buf = vec![];
map.to_writer(&mut buf).unwrap();
String::from_utf8(buf).unwrap()
};
let code_len = format!("{}\0", code.len());
let map_len = format!("{}\0", map.len());
let hash = base64::encode(format!("{}{}{}{}", code_len, code, map_len, map));
format!("https://evanw.github.io/source-map-visualization/#{}", hash)
}
struct | ;
impl SourceMapGenConfig for SourceMapConfigImpl {
fn file_name_to_source(&self, f: &swc_common::FileName) -> String {
f.to_string()
}
fn inline_sources_content(&self, _: &swc_common::FileName) -> bool {
true
}
}
| SourceMapConfigImpl | identifier_name |
sourcemap.rs | use std::{fs::read_to_string, path::PathBuf};
use rustc_hash::FxHashSet;
use sourcemap::SourceMap;
use swc_common::{comments::SingleThreadedComments, source_map::SourceMapGenConfig};
use swc_ecma_ast::EsVersion;
use swc_ecma_codegen::{self, text_writer::WriteJs, Emitter};
use swc_ecma_parser::{lexer::Lexer, Parser, Syntax};
use swc_ecma_testing::{exec_node_js, JsExecOptions};
static IGNORED_PASS_TESTS: &[&str] = &[
// Temporally ignored
"16c7073c546fdd58.js",
"369fd0a1e40030d8.js",
"3df03e7e138b7760.js",
"5333f04581124314.js",
"a157424306915066.js",
"ce5f3bc27d5ccaac.js",
"d4e81043d808dc31.js",
// Stack size (Stupid parens)
"6b5e7e125097d439.js",
"714be6d28082eaa7.js",
"882910de7dd1aef9.js",
"dd3c63403db5c06e.js",
// Wrong tests (variable name or value is different)
"0339fa95c78c11bd.js",
"0426f15dac46e92d.js",
"0b4d61559ccce0f9.js",
"0f88c334715d2489.js",
"1093d98f5fc0758d.js",
"15d9592709b947a0.js",
"2179895ec5cc6276.js",
"247a3a57e8176ebd.js",
"441a92357939904a.js",
"47f974d6fc52e3e4.js",
"4e1a0da46ca45afe.js",
"5829d742ab805866.js",
"589dc8ad3b9aa28f.js",
"598a5cedba92154d.js",
"72d79750e81ef03d.js",
"7788d3c1e1247da9.js",
"7b72d7b43bedc895.js",
"7dab6e55461806c9.js",
"82c827ccaecbe22b.js",
"87a9b0d1d80812cc.js",
"8c80f7ee04352eba.js",
"96f5d93be9a54573.js",
"988e362ed9ddcac5.js",
"9bcae7c7f00b4e3c.js",
"a8a03a88237c4e8f.js",
"ad06370e34811a6a.js",
"b0fdc038ee292aba.js",
"b62c6dd890bef675.js",
"cb211fadccb029c7.js",
"ce968fcdf3a1987c.js",
"db3c01738aaf0b92.js",
"e1387fe892984e2b.js",
"e71c1d5f0b6b833c.js",
"e8ea384458526db0.js",
// We don't implement Annex B fully.
"1c1e2a43fe5515b6.js",
"3dabeca76119d501.js",
"52aeec7b8da212a2.js",
"59ae0289778b80cd.js",
"a4d62a651f69d815.js",
"c06df922631aeabc.js",
// swc_common issue - `\r` should be treated as a newline
"be2c3fff6426873e.js",
"db66e1e8f3f1faef.js",
"a7b8ce1d4c0f0bc2.js",
"6498dcc494193cb4.js",
"6a240463b40550d2.js",
// TODO: (maybe) fix span of `,`
"641ac9060a206183.js",
"e4cef19dab44335a.js",
"a6806d6fedbf6759.js",
"2dc0ded5a1bff643.js",
"547fa50af16beca7.js",
"547fa50af16beca7.js",
"8c8a7a2941fb6d64.js",
"9e98dbfde77e3dfe.js",
"d9eb39b11bc766f4.js",
"f9888fa1a1e366e7.js",
"78cf02220fb0937c.js",
// TODO(kdy1): Non-ascii char count
"58cb05d17f7ec010.js",
"4d2c7020de650d40.js",
"dafb7abe5b9b44f5.js",
// Our one is better
"1efde9ddd9d6e6ce.module.js",
"d010d377bcfd5565.js",
"ce0aaec02d5d4465.js",
"edd1f39f90576180.js",
"290fdc5a2f826ead.js",
"e71a91c61343cdb1.js",
"409f30dc7efe75d5.js",
"03608b6e222ae700.js",
"e54c1a2fc15cd4b8.js",
"e08e181172bad2b1.js",
"cc793d44a11617e7.js",
"54e70df597a4f9a3.js",
"efef19e06f58fdd9.js",
"e0fc2148b455a6be.js",
"10857a84ed2962f1.js",
"d7c7ff252e84e81d.js",
"0aa6aab640155051.js",
"c80d9415dde647cd.js",
"09e84f25af85b836.js",
"ce8c443eb361e1a2.js",
"affd557fd820e1f2.js",
"ec99a663d6f3983d.js",
"01fd8e8a0a42307b.js",
"e01c7172cf204b92.js",
"12d5bedf1812952a.js",
"df20c9b7a7d534cb.js",
"c767fa4d683aa3ce.js",
"bf8ffad512a5f568.js",
"c8513472857eae9c.js",
"b86b0122e80c330e.js",
"aa7e721756949024.js",
"a830df7cf2e74c9f.js",
"845631d1a33b3409.js",
"066b76285ce79182.js",
"fe2d3b945530c806.js",
"bd28a7d19ac0d50b.js",
"06c7efc128ce74a0.js",
"075c7204d0b0af60.js",
"0827a8316cca777a.js",
"b9a0cb6df76a73d2.js",
"bf210a4f0cf9e352.js",
"6edc155d463535cb.js",
"b8f8dfc41df97add.js",
"b549d045fc8e93bf.js",
"e42f306327c0f578.js",
"9a9cb616daadf90a.js",
"d2ae1c7b6e55143f.js",
"a445a478b4ce0c58.js",
"0d137e8a97ffe083.js",
"b7a6a807ae6db312.js",
"bb8b546cf9db5996.js",
"50ac15a08f7c812f.js",
"a2cb5a14559c6a50.js",
"bbff5671643cc2ea.js",
"c2f12d66ce17d5ab.js",
"13045bfdda0434e0.js",
"10d6486502949e74.js",
"119e9dce4feae643.js",
"1223609b0f7a2129.js",
"177fef3d002eb873.js",
"19ffea7e9e887e08.js",
"1c6c67fcd71f2d08.js",
"1cdce2d337e64b4f.js",
"1f039e0eeb1bc271.js",
"227118dffd2c9935.js",
"250ced8c8e83b389.js",
"a2798917405b080b.js",
"ad6bf12aa7eda975.js",
"24fa28a37061a18f.js",
"252bb992a448270e.js",
"285648c16156804f.js",
"2d10fed2af94fbd1.js",
"3097f73926c93640.js",
"30aee1020fc69090.js",
"312f85fecc352681.js",
"317532451c2ce8ff.js", | "38284ea2d9914d86.js",
"3b57183c81070eec.js",
"3bbd75d597d54fe6.js",
"3c1e2ada0ac2b8e3.js",
"3e1a6f702041b599.js",
"3e3a99768a4a1502.js",
"3e69c5cc1a7ac103.js",
"3eac36e29398cdc5.js",
"3ff52d86c77678bd.js",
"43023cd549deee77.js",
"44af28febe2288cc.js",
"478ede4cfe7906d5.js",
"4869454dd215468e.js",
"48b6f8ce65d3b3ee.js",
"4c71e11fbbc56349.js",
"4d833cbc56caaaf9.js",
"4e7c58761e24d77c.js",
"4e7c58761e24d77c.js",
"5641ad33abcd1752.js",
"587400d1c019785a.js",
"58ed6ffb30191684.js",
"5b8d2b991d2c1f5b.js",
"5f730961df66e8e8.js",
"597108fd45a6e79b.js",
"60dcd48a3f6af44f.js",
"62d7c1ee4e1626c4.js",
"665f4940c7cf30c9.js",
"64cc57f82a54b7fb.js",
"66d2dbcb692491ec.module.js",
"697b3d30c1d06918.js",
"698a8cfb0705c277.js",
"69bbdc7c34ed23cc.js",
"6a323491fe75918a.js",
"6b76b8761a049c19.js",
"70bf2c409480ae10.js",
"74c5ebda713c8bd7.js",
"75172741c27c7703.js",
"753a8b016a700975.js",
"77c661b2fbe3dd3a.js",
"784a059faa166072.js",
"7855fbf5ea10e622.js",
"7cd7c68a6131f816.js",
"7df2a606ecc6cd84.js",
"7dfb625b91c5c879.js",
"7fdf990c6f42edcd.module.js",
"80d2351a5ae68524.js",
"84250e15785d8a9e.js",
"85263ecacc7a4dc5.js",
"8628cd459b39ffe8.js",
"870a0b8d891753e9.js",
"8d14286a8cc6ee9d.js",
"8d67ad04bfc356c9.js",
"8ecaef2617d8c6a7.js",
"918e105a2ff6c64a.js",
"92fd8e24864fde0a.js",
"94b8a654a87039b9.js",
"94cb828d5dcfd136.js",
"98df58b0c40fac90.js",
"9949a2e1a6844836.module.js",
"99cdfc40e20af6f5.js",
"9a666205cafd530f.js",
"a454d2e2ab3484e6.js",
"a54cca69085ad35a.js",
"a86a29773d1168d3.js",
"b205355de22689d1.js",
"b93d116fd0409637.js",
"c85bc4de504befc7.js",
"c8689b6da6fd227a.js",
"cda499c521ff60c7.js",
"d4b898b45172a637.js",
"e2ac0bea41202dc9.js",
"f01d9f3c7b2b2717.js",
"f15772354efa5ecf.js",
"f17ec9517a3339d9.js",
"fa5b398eeef697a6.js",
"fa9eaf58f51d6926.js",
"faa4a026e1e86145.js",
"fada2c7bbfabe14a.js",
"fb8db7a71f3755fc.js",
"fbde237f11796df9.js",
"fd5ea844fcc07d3d.js",
"6c5f0dd83c417a5a.js",
"78eb22badc114b6f.js",
"7afd38d79e6795a8.js",
"80950061e291542b.js",
"8a0fc8ea31727188.module.js",
"af97a3752e579223.js",
"bbffb851469a3f0e.js",
"bc302492d441d561.js",
"be2fd5888f434cbd.js",
"f3260491590325af.js",
];
#[testing::fixture("../swc_ecma_parser/tests/test262-parser/pass/*.js")]
fn identity(entry: PathBuf) {
let file_name = entry
.file_name()
.unwrap()
.to_str()
.expect("to_str() failed")
.to_string();
let input = read_to_string(&entry).unwrap();
let ignore = IGNORED_PASS_TESTS.contains(&&*file_name);
if ignore {
return;
}
let is_module = file_name.contains("module");
let msg = format!(
"\n\n========== Running codegen test {}\nSource:\n{}\n",
file_name, input
);
let mut wr = vec![];
::testing::run_test(false, |cm, handler| {
let fm = cm.load_file(&entry).expect("failed to load file");
eprintln!(
"{}\nPos: {:?} ~ {:?} (L{})",
msg,
fm.start_pos,
fm.end_pos,
fm.count_lines()
);
let (expected_code, expected_map, visualizer_url_for_expected) =
match get_expected(&fm.src, is_module) {
Some(v) => v,
None => return Ok(()),
};
println!("Expected code:\n{}", expected_code);
let expected_tokens = print_source_map(&expected_map);
let comments = SingleThreadedComments::default();
let lexer = Lexer::new(
Syntax::default(),
Default::default(),
(&*fm).into(),
Some(&comments),
);
let mut parser: Parser<Lexer> = Parser::new_from(lexer);
let mut src_map = vec![];
{
let mut wr = Box::new(swc_ecma_codegen::text_writer::JsWriter::new(
cm.clone(),
"\n",
&mut wr,
Some(&mut src_map),
)) as Box<dyn WriteJs>;
wr = Box::new(swc_ecma_codegen::text_writer::omit_trailing_semi(wr));
let mut emitter = Emitter {
cfg: swc_ecma_codegen::Config {
minify: true,
target: EsVersion::Es5,
ascii_only: true,
..Default::default()
},
cm: cm.clone(),
wr,
comments: None,
};
// Parse source
if is_module {
emitter
.emit_module(
&parser
.parse_module()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
} else {
emitter
.emit_script(
&parser
.parse_script()
.map_err(|e| e.into_diagnostic(handler).emit())?,
)
.unwrap();
}
}
let actual_code = String::from_utf8(wr).unwrap();
let actual_map = cm.build_source_map_with_config(&src_map, None, SourceMapConfigImpl);
let visualizer_url_for_actual = visualizer_url(&actual_code, &actual_map);
let actual_tokens = print_source_map(&actual_map);
let common_tokens = actual_tokens
.iter()
.filter(|a| expected_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<FxHashSet<_>>();
let actual_tokens_diff = actual_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
let expected_tokens_diff = expected_tokens
.iter()
.filter(|a| !common_tokens.contains(&**a))
.map(|v| v.to_string())
.collect::<Vec<_>>();
eprintln!("---- Actual -----");
for s in actual_tokens_diff {
eprintln!("{}", s);
}
eprintln!("---- Expected -----");
for s in expected_tokens_diff {
eprintln!("{}", s);
}
dbg!(&src_map);
if actual_code != expected_code {
// Generated code is different
// We can't ensure that identical sourcemap will mean identical code
eprintln!("Actual code:\n{}", actual_code);
eprintln!("Expected code:\n{}", expected_code);
return Ok(());
}
eprintln!(
"----- Visualizer -----\nExpected: {}\nActual: {}",
visualizer_url_for_expected, visualizer_url_for_actual
);
assert_eq_same_map(&expected_map, &actual_map);
Ok(())
})
.expect("failed to run test");
}
fn get_expected(code: &str, is_module: bool) -> Option<(String, SourceMap, String)> {
let output = exec_node_js(
include_str!("./srcmap.mjs"),
JsExecOptions {
cache: true,
module: true,
args: vec![
code.to_string(),
if is_module {
"module".into()
} else {
"script".into()
},
],
},
)
.ok()?;
let v = serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(&output).unwrap();
let code = v.get("code").unwrap().as_str().unwrap();
let map = v.get("map").unwrap().as_str().unwrap();
let map = SourceMap::from_slice(map.as_bytes()).expect("invalid sourcemap");
let visualizer_url = visualizer_url(code, &map);
Some((code.to_string(), map, visualizer_url))
}
fn print_source_map(map: &SourceMap) -> Vec<String> {
let mut v = map
.tokens()
.map(|t| {
format!(
"Token: {}:{} => {}:{}",
t.get_src_line(),
t.get_src_col(),
t.get_dst_line(),
t.get_dst_col()
)
})
.collect::<Vec<_>>();
v.sort();
v
}
fn assert_eq_same_map(expected: &SourceMap, actual: &SourceMap) {
for expected_token in expected.tokens() {
let actual_token = actual
.lookup_token(expected_token.get_dst_line(), expected_token.get_dst_col())
.unwrap_or_else(|| panic!("token not found: {:?}", expected_token));
if expected_token.get_src_line() == 0 && expected_token.get_src_col() == 0 {
continue;
}
assert_eq!(
expected_token.get_src_line(),
actual_token.get_src_line(),
"line mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
assert_eq!(
expected_token.get_src_col(),
actual_token.get_src_col(),
"col mismatch at {}:{}",
expected_token.get_dst_line(),
expected_token.get_dst_col()
);
}
}
/// Creates a url for https://evanw.github.io/source-map-visualization/
fn visualizer_url(code: &str, map: &SourceMap) -> String {
let map = {
let mut buf = vec![];
map.to_writer(&mut buf).unwrap();
String::from_utf8(buf).unwrap()
};
let code_len = format!("{}\0", code.len());
let map_len = format!("{}\0", map.len());
let hash = base64::encode(format!("{}{}{}{}", code_len, code, map_len, map));
format!("https://evanw.github.io/source-map-visualization/#{}", hash)
}
struct SourceMapConfigImpl;
impl SourceMapGenConfig for SourceMapConfigImpl {
fn file_name_to_source(&self, f: &swc_common::FileName) -> String {
f.to_string()
}
fn inline_sources_content(&self, _: &swc_common::FileName) -> bool {
true
}
} | "32b635a9667a9fb1.js",
"36224cf8215ad8e4.js",
"37e4a6eca1ece7e5.js", | random_line_split |
cloudevents.go | package cfg
import (
"fmt"
"time"
"github.com/Shopify/sarama"
kafkaSarama "github.com/cloudevents/sdk-go/protocol/kafka_sarama/v2"
cloudevents "github.com/cloudevents/sdk-go/v2"
eventclient "github.com/cloudevents/sdk-go/v2/client"
)
const (
CloudEventsProtocolKafkaSarama = "kafka_sarama"
)
// SaramaConfig 用于kafka客户端配置,结构等同于sarama类库
// https://pkg.go.dev/github.com/Shopify/sarama#Config
type SaramaConfig struct {
Net struct {
// 默认:5
MaxOpenRequests int `mapstructure:"max_open_requests"`
// 以下默认:30s
DialTimeout time.Duration `mapstructure:"dial_timeout"`
ReadTimeout time.Duration `mapstructure:"read_timeout"`
WriteTimeout time.Duration `mapstructure:"write_timeout"`
TLS struct {
// 默认:false
Enable bool `mapstructure:"enable"`
// TODO; 不支持开启tls认证
} `mapstructure:"tls"`
SASL struct {
Enable bool `mapstructure:"enable"`
Mechanism string `mapstructure:"mechanism"`
User string `mapstructure:"user"`
Password string `mapstructure:"password"`
} `mapstructure:"sasl"`
KeepAlive time.Duration `mapstructure:"keep_alive"`
} `mapstructure:"net"`
Metadata struct {
// 获取元数据的策略
Retry struct {
// 当集群处于leader选举时最大重试次数,默认:3
Max int `mapstructure:"max"`
// 当集群处于leader选举重试的等扽时间,默认:250ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 后台与集群同步metadata的间隔,默认: 10m
RefreshFrequency time.Duration `mapstructure:"refresh_frequency"`
// 是否为所有topic维护元数据,默认: true
Full bool `mapstructure:"full"`
// 等待metadata响应的超时时间,默认禁用表示失败则继续重试
// Net.[Dial|Read]Timeout * BrokerCount * (Metadata.Retry.Max + 1) + Metadata.Retry.Backoff * Metadata.Retry.Max
Timeout time.Duration `mapstructure:"timeout"`
// 如果提供的topic不存在是否允许自动创建(前提是集群配置可允许该操作),默认:true
AllowAutoTopicCreation bool `mapstructure:"allow_auto_topic_creation"`
} `mapstructure:"metadata"`
// 生产者相关配置
Producer struct {
// 允许的最大消息大小,最好等于集群配置的:message.max.bytes,默认:1000000
MaxMessageBytes int `mapstructure:"max_message_bytes"`
// 消息生产被集群接收的策略,主要影响是否会丢消息与性能,默认:1
// 设置为0: 生产者不等扽集群的响应,继续下一条
// 设置为1: 生成者等待leader响应,然后在继续下一条
// 设置为-1: 生产者必须等待所有"in-sync"副本响应完成,继续下一条,这个副本由: min.insync.replicas 决定
RequiredAcks int16 `mapstructure:"required_acks"`
// 生产者等扽响应的最长时间,当RequiredAcks设置大于1时才有效,等同于`request.timeout.ms`,默认:10s
Timeout time.Duration `mapstructure:"timeout"`
// 生产的消息使用的压缩算法,默认不压缩,默认:0
Compression int8 `mapstructure:"compression"`
// 压缩的等级,依赖具体压缩算法
CompressionLevel int `mapstructure:"commpression_level"`
// Partitioner PartitionerConstructor
// 如果启用,生产者将确保每个消息只写入一个副本。
Idempotent bool `mapstructure:"idempotent"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 成功的消息是否记录,默认:false
Successes bool `mapstructure:"successes"`
// 失败的消息是否记录,默认:true
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 生产者达到以下阈值时触发打包消息发送至集群
Flush struct {
// 最大值被 sarama.MaxRequestSiz 限制,值:100 * 1024 * 1024
Bytes int `mapstructure:"bytes"`
// 消息数量阈值,最大限制通过以下MaxMessages控制
Messages int `mapstructure:"messages"`
// 等待时间阈值
Frequency time.Duration `mapstructure:"frequency"`
// 在单一请求broker时允许的最大消息数,设置为0则不限制
MaxMessages int `mapstructure:"max_messages"`
} `mapstructure:"flush"`
// 生产消息失败的重试策略
Retry struct {
// 最大重试次数,等同于jvm的:message.send.max.retries,默认:3
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,等同于jvm的:retry.backoff.ms,默认值:100ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"producer"`
// 消费者相关配置
Consumer struct {
Group struct {
Session struct {
// 当broker端未收到消费者的心跳包,超过该时间间隔,则broker认为该消费者离线,将进行重均衡,默认:10s
// 该值必须在broker配置`group.min.session.timeout.ms`与`group.max.session.timeout.ms`之间
Timeout time.Duration `mapstructure:"timeout"`
} `mapstructure:"session"`
Heartbeat struct {
// kafka协调者预期的心跳间隔,用于确保消费者session处于活跃状态,值必须小于session.timeout,默认:3s
// 一般建议设置为session.timeout的3分之一
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"heartbeat"`
Rebalance struct {
// topic分区分配给消费者的策略,支持:range, roundrobin, sticky,默认:range
// range: 标识使用范围分区分配策略的策略
// roundrobin: 标识使用循环分区分配策略的策略
// sticky: 标识使用粘性分区分配策略的策略
Strategy string `mapstructure:"strategy"`
// 重均衡开始后,消费者加入群组的最大允许时间,默认:60s
Timeout time.Duration `mapstructure:"timeout"`
Retry struct {
// 最大重试次数,默认:4
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"rebalance"`
} `mapstructure:"group"`
// 读取分区失败的重试
Retry struct {
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 控制每个请求所拉取数据的大小,单位bytes
Fetch struct {
// 必须等待的最小消息大小,不要设置为0,等同于jvm `fetch.min.bytes`,默认:1
Min int32 `mapstructure:"min"`
// 每请求从broker获取的消息大小,默认:1MB
// 尽量大于你消息的大部分大小,否则还要做额外的切割,等同于jvm `fetch.message.max.bytes`
Default int32 `mapstructure:"default"`
// 每请求可最大获取的消息大小,值为0表示不限制,等同于jvm `fetch.message.max.bytes`,默认:0
Max int32 `mapstructure:"max"`
} `mapstructure:"fetch"`
// broker在等待消息达到 Consumer.Fetch.Min 大小的最大时间,不要设置为0,默认:250ms
// 建议在 100-500ms,等同于jvm `fetch.wait.max.ms`
MaxWaitTime time.Duration `mapstructure:"max_wait_time"`
// 消费者为用户处理消息所需的最长时间,如果写入消息通道所需的时间超过此时间,则该分区将停止获取更多消息,直到可以再次继续。
// 由于消息通道已缓冲,因此实际宽限时间为 (MaxProcessingTime * ChannelBufferSize),默认:100ms
MaxProcessingTime time.Duration `mapstructure:"max_processing_time"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 失败的消息是否记录,默认:false
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 控制如何提交消费offset
Offsets struct {
AutoCommit struct {
// 是否自动更新,默认:true
Enable bool `mapstructure:"enable"`
// 自动更新频率,默认:1s
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"auto_commit"`
// OffsetNewest=-1 代表访问 commit 位置的下一条消息
// OffsetOldest=-2 消费者可以访问到的 topic 里的最早的消息
Initial int64 `mapstructure:"initial"`
Retention time.Duration `mapstructure:"retention"`
// 提交offset失败的重试
Retry struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush.Bytes = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
| etry.Max = s.Producer.Retry.Max
}
if s.Producer.Retry.Backoff.Seconds() != 0 {
c.Producer.Retry.Backoff = s.Producer.Retry.Backoff
}
// consumer
if s.Consumer.Group.Session.Timeout.Seconds() != 0 {
c.Consumer.Group.Session.Timeout = s.Consumer.Group.Session.Timeout
}
if s.Consumer.Group.Heartbeat.Interval.Seconds() != 0 {
c.Consumer.Group.Heartbeat.Interval = s.Consumer.Group.Heartbeat.Interval
}
if s.Consumer.Group.Rebalance.Strategy != "" {
switch s.Consumer.Group.Rebalance.Strategy {
case sarama.RangeBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
case sarama.RoundRobinBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin
case sarama.StickyBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky
}
}
if s.Consumer.Group.Rebalance.Timeout.Seconds() != 0 {
c.Consumer.Group.Rebalance.Timeout = s.Consumer.Group.Rebalance.Timeout
}
if s.Consumer.Group.Rebalance.Retry.Max != 0 {
c.Consumer.Group.Rebalance.Retry.Max = s.Consumer.Group.Rebalance.Retry.Max
}
if s.Consumer.Group.Rebalance.Retry.Backoff.Seconds() != 0 {
c.Consumer.Group.Rebalance.Retry.Backoff = s.Consumer.Group.Rebalance.Retry.Backoff
}
if s.Consumer.Retry.Backoff.Seconds() != 0 {
c.Consumer.Retry.Backoff = s.Consumer.Retry.Backoff
}
if s.Consumer.Fetch.Min != 0 {
c.Consumer.Fetch.Min = s.Consumer.Fetch.Min
}
if s.Consumer.Fetch.Max != 0 {
c.Consumer.Fetch.Max = s.Consumer.Fetch.Max
}
if s.Consumer.Fetch.Default != 0 {
c.Consumer.Fetch.Default = s.Consumer.Fetch.Default
}
if s.Consumer.MaxWaitTime.Seconds() != 0 {
c.Consumer.MaxWaitTime = s.Consumer.MaxWaitTime
}
if s.Consumer.MaxProcessingTime.Seconds() != 0 {
c.Consumer.MaxProcessingTime = s.Consumer.MaxProcessingTime
}
if s.Consumer.Return.Errors {
c.Consumer.Return.Errors = true
}
if s.Consumer.Offsets.AutoCommit.Enable {
c.Consumer.Offsets.AutoCommit.Enable = true
c.Consumer.Offsets.AutoCommit.Interval = s.Consumer.Offsets.AutoCommit.Interval
}
if s.Consumer.Offsets.Initial != 0 {
c.Consumer.Offsets.Initial = s.Consumer.Offsets.Initial
}
if s.Consumer.Offsets.Retention.Seconds() != 0 {
c.Consumer.Offsets.Retention = s.Consumer.Offsets.Retention
}
if s.Consumer.Offsets.Retry.Max != 0 {
c.Consumer.Offsets.Retry.Max = s.Consumer.Offsets.Retry.Max
}
if s.Consumer.IsolationLevel != 0 {
c.Consumer.IsolationLevel = sarama.IsolationLevel(s.Consumer.IsolationLevel)
}
if s.ClientID != "" {
c.ClientID = s.ClientID
}
if s.RackID != "" {
c.RackID = s.RackID
}
if s.ChannelBufferSize != 0 {
c.ChannelBufferSize = s.ChannelBufferSize
}
if s.Version != "" {
ver, err := sarama.ParseKafkaVersion(s.Version)
if err != nil {
// 解析版本错误则指定版本1.0.0
c.Version = sarama.V1_0_0_0
} else {
c.Version = ver
}
}
return c
}
// InitCloudEvents 初始化 cloudevents 数据实例
func (c *LocalConfig) InitCloudEvents() error {
if c.CloudEvents == nil || c.CloudEvents.Protocol == "" {
return nil
}
switch c.CloudEvents.Protocol {
case CloudEventsProtocolKafkaSarama:
default:
return fmt.Errorf("not support cloudevents protocol %v", c.CloudEvents.Protocol)
}
saramaConfig := c.CloudEvents.KafkaSarama.Config.Parse()
sender, err := kafkaSarama.NewSender(c.CloudEvents.KafkaSarama.Brokers,
saramaConfig,
c.CloudEvents.KafkaSarama.Topic)
if err != nil {
return err
}
client, err := cloudevents.NewClient(sender, cloudevents.WithTimeNow(), cloudevents.WithUUIDs())
if err != nil {
return err
}
c.eventClient = client
return nil
}
// GetCloudEvents 用于获取 cloudevents 连接客户端
func (c *LocalConfig) GetCloudEvents() (eventclient.Client, error) {
if c.eventClient == nil {
return nil, fmt.Errorf("cloudevents client is nil")
}
return c.eventClient, nil
}
| if s.Producer.Retry.Max != 0 {
c.Producer.R | conditional_block |
cloudevents.go | package cfg
import (
"fmt"
"time"
"github.com/Shopify/sarama"
kafkaSarama "github.com/cloudevents/sdk-go/protocol/kafka_sarama/v2"
cloudevents "github.com/cloudevents/sdk-go/v2"
eventclient "github.com/cloudevents/sdk-go/v2/client"
)
const (
CloudEventsProtocolKafkaSarama = "kafka_sarama"
)
// SaramaConfig 用于kafka客户端配置,结构等同于sarama类库
// https://pkg.go.dev/github.com/Shopify/sarama#Config
type SaramaConfig struct {
Net struct {
// 默认:5
MaxOpenRequests int `mapstructure:"max_open_requests"`
// 以下默认:30s
DialTimeout time.Duration `mapstructure:"dial_timeout"`
ReadTimeout time.Duration `mapstructure:"read_timeout"`
WriteTimeout time.Duration `mapstructure:"write_timeout"`
TLS struct {
// 默认:false
Enable bool `mapstructure:"enable"`
// TODO; 不支持开启tls认证
} `mapstructure:"tls"`
SASL struct {
Enable bool `mapstructure:"enable"`
Mechanism string `mapstructure:"mechanism"`
User string `mapstructure:"user"`
Password string `mapstructure:"password"`
} `mapstructure:"sasl"`
KeepAlive time.Duration `mapstructure:"keep_alive"`
} `mapstructure:"net"`
Metadata struct {
// 获取元数据的策略
Retry struct {
// 当集群处于leader选举时最大重试次数,默认:3
Max int `mapstructure:"max"`
// 当集群处于leader选举重试的等扽时间,默认:250ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 后台与集群同步metadata的间隔,默认: 10m
RefreshFrequency time.Duration `mapstructure:"refresh_frequency"`
// 是否为所有topic维护元数据,默认: true
Full bool `mapstructure:"full"`
// 等待metadata响应的超时时间,默认禁用表示失败则继续重试
// Net.[Dial|Read]Timeout * BrokerCount * (Metadata.Retry.Max + 1) + Metadata.Retry.Backoff * Metadata.Retry.Max
Timeout time.Duration `mapstructure:"timeout"`
// 如果提供的topic不存在是否允许自动创建(前提是集群配置可允许该操作),默认:true
AllowAutoTopicCreation bool `mapstructure:"allow_auto_topic_creation"`
} `mapstructure:"metadata"`
// 生产者相关配置
Producer struct {
// 允许的最大消息大小,最好等于集群配置的:message.max.bytes,默认:1000000
MaxMessageBytes int `mapstructure:"max_message_bytes"`
// 消息生产被集群接收的策略,主要影响是否会丢消息与性能,默认:1
// 设置为0: 生产者不等扽集群的响应,继续下一条
// 设置为1: 生成者等待leader响应,然后在继续下一条
// 设置为-1: 生产者必须等待所有"in-sync"副本响应完成,继续下一条,这个副本由: min.insync.replicas 决定
RequiredAcks int16 `mapstructure:"required_acks"`
// 生产者等扽响应的最长时间,当RequiredAcks设置大于1时才有效,等同于`request.timeout.ms`,默认:10s
Timeout time.Duration `mapstructure:"timeout"`
// 生产的消息使用的压缩算法,默认不压缩,默认:0
Compression int8 `mapstructure:"compression"`
// 压缩的等级,依赖具体压缩算法
CompressionLevel int `mapstructure:"commpression_level"`
// Partitioner PartitionerConstructor
// 如果启用,生产者将确保每个消息只写入一个副本。
Idempotent bool `mapstructure:"idempotent"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 成功的消息是否记录,默认:false
Successes bool `mapstructure:"successes"`
// 失败的消息是否记录,默认:true
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 生产者达到以下阈值时触发打包消息发送至集群
Flush struct {
// 最大值被 sarama.MaxRequestSiz 限制,值:100 * 1024 * 1024
Bytes int `mapstructure:"bytes"`
// 消息数量阈值,最大限制通过以下MaxMessages控制
Messages int `mapstructure:"messages"`
// 等待时间阈值
Frequency time.Duration `mapstructure:"frequency"`
// 在单一请求broker时允许的最大消息数,设置为0则不限制
MaxMessages int `mapstructure:"max_messages"`
} `mapstructure:"flush"`
// 生产消息失败的重试策略
Retry struct {
// 最大重试次数,等同于jvm的:message.send.max.retries,默认:3
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,等同于jvm的:retry.backoff.ms,默认值:100ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"producer"`
// 消费者相关配置
Consumer struct {
Group struct {
Session struct {
// 当broker端未收到消费者的心跳包,超过该时间间隔,则broker认为该消费者离线,将进行重均衡,默认:10s
// 该值必须在broker配置`group.min.session.timeout.ms`与`group.max.session.timeout.ms`之间
Timeout time.Duration `mapstructure:"timeout"`
} `mapstructure:"session"`
Heartbeat struct {
// kafka协调者预期的心跳间隔,用于确保消费者session处于活跃状态,值必须小于session.timeout,默认:3s
// 一般建议设置为session.timeout的3分之一
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"heartbeat"`
Rebalance struct {
// topic分区分配给消费者的策略,支持:range, roundrobin, sticky,默认:range
// range: 标识使用范围分区分配策略的策略
// roundrobin: 标识使用循环分区分配策略的策略
// sticky: 标识使用粘性分区分配策略的策略
Strategy string `mapstructure:"strategy"`
// 重均衡开始后,消费者加入群组的最大允许时间,默认:60s
Timeout time.Duration `mapstructure:"timeout"`
Retry struct {
// 最大重试次数,默认:4
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"rebalance"`
} `mapstructure:"group"`
// 读取分区失败的重试
Retry struct {
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 控制每个请求所拉取数据的大小,单位bytes
Fetch struct {
// 必须等待的最小消息大小,不要设置为0,等同于jvm `fetch.min.bytes`,默认:1
Min int32 `mapstructure:"min"`
// 每请求从broker获取的消息大小,默认:1MB
// 尽量大于你消息的大部分大小,否则还要做额外的切割,等同于jvm `fetch.message.max.bytes`
Default int32 `mapstructure:"default"`
// 每请求可最大获取的消息大小,值为0表示不限制,等同于jvm `fetch.message.max.bytes`,默认:0
Max int32 `mapstructure:"max"`
} `mapstructure:"fetch"`
// broker在等待消息达到 Consumer.Fetch.Min 大小的最大时间,不要设置为0,默认:250ms
// 建议在 100-500ms,等同于jvm `fetch.wait.max.ms`
MaxWaitTime time.Duration `mapstructure:"max_wait_time"`
// 消费者为用户处理消息所需的最长时间,如果写入消息通道所需的时间超过此时间,则该分区将停止获取更多消息,直到可以再次继续。
// 由于消息通道已缓冲,因此实际宽限时间为 (MaxProcessingTime * ChannelBufferSize),默认:100ms
MaxProcessingTime time.Duration `mapstructure:"max_processing_time"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 失败的消息是否记录,默认:false
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 控制如何提交消费offset
Offsets struct {
AutoCommit struct {
// 是否自动更新,默认:true
Enable bool `mapstructure:"enable"`
// 自动更新频率,默认:1s
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"auto_commit"`
// OffsetNewest=-1 代表访问 commit 位置的下一条消息
// OffsetOldest=-2 消费者可以访问到的 topic 里的最早的消息
Initial int64 `mapstructure:"initial"`
Retention time.Duration `mapstructure:"retention"`
// 提交offset失败的重试
Retry struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush.Bytes = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
if s.Producer.Retry.Max != 0 {
c.Producer.Retry.Max = s.Producer.Retry.Max
}
if s.Producer.Retry.Backoff.Seconds() != 0 {
c.Producer.Retry.Backoff = s.Producer.Retry.Backoff
}
// consumer
if s.Consumer.Group.Session.Timeout.Seconds() != 0 {
c.Consumer.Group.Session.Timeout = s.Consumer.Group.Session.Timeout
}
if s.Consumer.Group.Heartbeat.Interval.Seconds() != 0 {
c.Consumer.Group.Heartbeat.Interval = s.Consumer.Group.Heartbeat.Interval
}
if s.Consumer.Group.Rebalance.Strategy != "" {
switch s.Consumer.Group.Rebalance.Strategy {
case sarama.RangeBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
case sarama.RoundRobinBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin
case sarama.StickyBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky
}
}
if s.Consumer.Group.Rebalance.Timeout.Seconds() != 0 {
c.Consumer.Group.Rebalance.Timeout = s.Consumer.Group.Rebalance.Timeout
}
if s.Consumer.Group.Rebalance.Retry.Max != 0 {
c.Consumer.Group.Rebalance.Retry.Max = s.Consumer.Group.Rebalance.Retry.Max
}
if s.Consumer.Group.Rebalance.Retry.Backoff.Seconds() != 0 {
c.Consumer.Group.Rebalance.Retry.Backoff = s.Consumer.Group.Rebalance.Retry.Backoff
}
if s.Consumer.Retry.Backoff.Seconds() != 0 {
c.Consumer.Retry.Backoff = s.Consumer.Retry.Backoff
}
if s.Consumer.Fetch.Min != 0 {
c.Consumer.Fetch.Min = s.Consumer.Fetch.Min
}
if s.Consumer.Fetch.Max != 0 {
c.Consumer.Fetch.Max = s.Consumer.Fetch.Max
}
if s.Consumer.Fetch.Default != 0 {
c.Consumer.Fetch.Default = s.Consumer.Fetch.Default
}
if s.Consumer.MaxWaitTime.Seconds() != 0 {
c.Consumer.MaxWaitTime = s.Consumer.MaxWaitTime
}
if s.Consumer.MaxProcessingTime.Seconds() != 0 {
c.Consumer.MaxProcessingTime = s.Consumer.MaxProcessingTime
}
if s.Consumer.Return.Errors {
c.Consumer.Return.Errors = true
}
if s.Consumer.Offsets.AutoCommit.Enable {
c.Consumer.Offsets.AutoCommit.Enable = true
c.Consumer.Offsets.AutoCommit.Interval = s.Consumer.Offsets.AutoCommit.Interval
}
if s.Consumer.Offsets.Initial != 0 {
c.Consumer.Offsets.Initial = s.Consumer.Offsets.Initial
}
if s.Consumer.Offsets.Retention.Seconds() != 0 {
c.Consumer.Offsets.Retention = s.Consumer.Offsets.Retention
}
if s.Consumer.Offsets.Retry.Max != 0 {
c.Consumer.Offsets.Retry.Max = s.Consumer.Offsets.Retry.Max
}
if s.Consumer.IsolationLevel != 0 {
c.Consumer.IsolationLevel = sarama.IsolationLevel(s.Consumer.IsolationLevel)
}
if s.ClientID != "" {
c.ClientID = s.ClientID
}
if s.RackID != "" {
c.RackID = s.RackID
}
if s.ChannelBufferSize != 0 {
c.ChannelBufferSize = s.ChannelBufferSize
}
if s.Version != "" {
ver, err := sarama.ParseKafkaVersion(s.Version)
if err != nil {
// 解析版本错误则指定版本1.0.0
c.Version = sarama.V1_0_0_0
} else {
c.Version = ver
}
}
return c
}
// InitCloudEvents 初始化 cloudevents 数据实例
func (c *LocalConfig) InitCloudEvents() error {
if c.CloudEvents == nil || c.CloudEvents.Protocol == "" {
return nil
}
switch c.CloudEvents.Protocol {
case CloudEventsProtocolKafkaSarama:
default:
return fmt.Errorf("not support cloudevents protocol %v", c.CloudEvents.Protocol)
}
saramaConfig := c.CloudEvents.KafkaSarama.Config.Parse()
sender, err := kafkaSarama.NewSender(c.CloudEvents.KafkaSarama.Brokers,
saramaConfig,
c.CloudEvents.KafkaSarama.Topic)
if err != nil {
return err
}
client, err := cloudevents.NewClient(sender, cloudevents.WithTimeNow(), cloudevents.WithUUIDs())
if err != nil {
return err
}
c.eventClient = client
return nil
}
// GetCloudEvents 用于获取 cloudevents 连接客户端
func (c *LocalConfig) GetCloudEvents() (eventclient.Client, error) {
if c.eventClient == nil {
return nil, fmt.Errorf("cloudevents client is nil")
}
return c.eventClient, nil
}
| identifier_body | ||
cloudevents.go | package cfg
import (
"fmt"
"time"
"github.com/Shopify/sarama"
kafkaSarama "github.com/cloudevents/sdk-go/protocol/kafka_sarama/v2"
cloudevents "github.com/cloudevents/sdk-go/v2"
eventclient "github.com/cloudevents/sdk-go/v2/client"
)
const (
CloudEventsProtocolKafkaSarama = "kafka_sarama"
)
// SaramaConfig 用于kafka客户端配置,结构等同于sarama类库
// https://pkg.go.dev/github.com/Shopify/sarama#Config
type SaramaConfig struct {
Net struct {
// 默认:5
MaxOpenRequests int `mapstructure:"max_open_requests"`
// 以下默认:30s
DialTimeout time.Duration `mapstructure:"dial_timeout"`
ReadTimeout time.Duration `mapstructure:"read_timeout"`
WriteTimeout time.Duration `mapstructure:"write_timeout"`
TLS struct {
// 默认:false
Enable bool `mapstructure:"enable"`
// TODO; 不支持开启tls认证
} `mapstructure:"tls"`
SASL struct {
Enable bool `mapstructure:"enable"`
Mechanism string `mapstructure:"mechanism"`
User string `mapstructure:"user"`
Password string `mapstructure:"password"`
} `mapstructure:"sasl"`
KeepAlive time.Duration `mapstructure:"keep_alive"`
} `mapstructure:"net"`
Metadata struct {
// 获取元数据的策略
Retry struct {
// 当集群处于leader选举时最大重试次数,默认:3
Max int `mapstructure:"max"`
// 当集群处于leader选举重试的等扽时间,默认:250ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 后台与集群同步metadata的间隔,默认: 10m
RefreshFrequency time.Duration `mapstructure:"refresh_frequency"`
// 是否为所有topic维护元数据,默认: true
Full bool `mapstructure:"full"`
// 等待metadata响应的超时时间,默认禁用表示失败则继续重试
// Net.[Dial|Read]Timeout * BrokerCount * (Metadata.Retry.Max + 1) + Metadata.Retry.Backoff * Metadata.Retry.Max
Timeout time.Duration `mapstructure:"timeout"`
// 如果提供的topic不存在是否允许自动创建(前提是集群配置可允许该操作),默认:true
AllowAutoTopicCreation bool `mapstructure:"allow_auto_topic_creation"`
} `mapstructure:"metadata"`
// 生产者相关配置
Producer struct {
// 允许的最大消息大小,最好等于集群配置的:message.max.bytes,默认:1000000
MaxMessageBytes int `mapstructure:"max_message_bytes"`
// 消息生产被集群接收的策略,主要影响是否会丢消息与性能,默认:1
// 设置为0: 生产者不等扽集群的响应,继续下一条
// 设置为1: 生成者等待leader响应,然后在继续下一条
// 设置为-1: 生产者必须等待所有"in-sync"副本响应完成,继续下一条,这个副本由: min.insync.replicas 决定
RequiredAcks int16 `mapstructure:"required_acks"`
// 生产者等扽响应的最长时间,当RequiredAcks设置大于1时才有效,等同于`request.timeout.ms`,默认:10s
Timeout time.Duration `mapstructure:"timeout"`
// 生产的消息使用的压缩算法,默认不压缩,默认:0
Compression int8 `mapstructure:"compression"`
// 压缩的等级,依赖具体压缩算法
CompressionLevel int `mapstructure:"commpression_level"`
// Partitioner PartitionerConstructor
// 如果启用,生产者将确保每个消息只写入一个副本。
Idempotent bool `mapstructure:"idempotent"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 成功的消息是否记录,默认:false
Successes bool `mapstructure:"successes"`
// 失败的消息是否记录,默认:true
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 生产者达到以下阈值时触发打包消息发送至集群
Flush struct {
// 最大值被 sarama.MaxRequestSiz 限制,值:100 * 1024 * 1024
Bytes int `mapstructure:"bytes"`
// 消息数量阈值,最大限制通过以下MaxMessages控制
Messages int `mapstructure:"messages"`
// 等待时间阈值
Frequency time.Duration `mapstructure:"frequency"`
// 在单一请求broker时允许的最大消息数,设置为0则不限制
MaxMessages int `mapstructure:"max_messages"`
} `mapstructure:"flush"`
// 生产消息失败的重试策略
Retry struct {
// 最大重试次数,等同于jvm的:message.send.max.retries,默认:3
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,等同于jvm的:retry.backoff.ms,默认值:100ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"producer"`
// 消费者相关配置
Consumer struct {
Group struct {
Session struct {
// 当broker端未收到消费者的心跳包,超过该时间间隔,则broker认为该消费者离线,将进行重均衡,默认:10s
// 该值必须在broker配置`group.min.session.timeout.ms`与`group.max.session.timeout.ms`之间
Timeout time.Duration `mapstructure:"timeout"`
} `mapstructure:"session"`
Heartbeat struct {
// kafka协调者预期的心跳间隔,用于确保消费者session处于活跃状态,值必须小于session.timeout,默认:3s
// 一般建议设置为session.timeout的3分之一
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"heartbeat"`
Rebalance struct {
// topic分区分配给消费者的策略,支持:range, roundrobin, sticky,默认:range
// range: 标识使用范围分区分配策略的策略
// roundrobin: 标识使用循环分区分配策略的策略
// sticky: 标识使用粘性分区分配策略的策略
Strategy string `mapstructure:"strategy"`
// 重均衡开始后,消费者加入群组的最大允许时间,默认:60s
Timeout time.Duration `mapstructure:"timeout"`
Retry struct {
// 最大重试次数,默认:4
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"rebalance"`
} `mapstructure:"group"`
// 读取分区失败的重试
Retry struct {
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 控制每个请求所拉取数据的大小,单位bytes
Fetch struct {
// 必须等待的最小消息大小,不要设置为0,等同于jvm `fetch.min.bytes`,默认:1
Min int32 `mapstructure:"min"`
// 每请求从broker获取的消息大小,默认:1MB
// 尽量大于你消息的大部分大小,否则还要做额外的切割,等同于jvm `fetch.message.max.bytes`
Default int32 `mapstructure:"default"`
// 每请求可最大获取的消息大小,值为0表示不限制,等同于jvm `fetch.message.max.bytes`,默认:0
Max int32 `mapstructure:"max"`
} `mapstructure:"fetch"`
// broker在等待消息达到 Consumer.Fetch.Min 大小的最大时间,不要设置为0,默认:250ms
// 建议在 100-500ms,等同于jvm `fetch.wait.max.ms`
MaxWaitTime time.Duration `mapstructure:"max_wait_time"`
// 消费者为用户处理消息所需的最长时间,如果写入消息通道所需的时间超过此时间,则该分区将停止获取更多消息,直到可以再次继续。
// 由于消息通道已缓冲,因此实际宽限时间为 (MaxProcessingTime * ChannelBufferSize),默认:100ms
MaxProcessingTime time.Duration `mapstructure:"max_processing_time"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 失败的消息是否记录,默认:false
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 控制如何提交消费offset
Offsets struct {
AutoCommit struct {
// 是否自动更新,默认:true
Enable bool `mapstructure:"enable"`
// 自动更新频率,默认:1s
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"auto_commit"`
// OffsetNewest=-1 代表访问 commit 位置的下一条消息
// OffsetOldest=-2 消费者可以访问到的 topic 里的最早的消息
Initial int64 `mapstructure:"initial"`
Retention time.Duration `mapstructure:"retention"`
// 提交offset失败的重试
Retry struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush.Bytes = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
if s.Producer.Retry.Max != 0 {
c.Producer.Retry.Max = s.Producer.Retry.Max
}
if s.Producer.Retry.Backoff.Seconds() != 0 {
c.Producer.Retry.Backoff = s.Producer.Retry.Backoff
}
// consumer
if s.Consumer.Group.Session.Timeout.Seconds() != 0 {
c.Consumer.Group.Session.Timeout = s.Consumer.Group.Session.Timeout
}
if s.Consumer.Group.Heartbeat.Interval.Seconds() != 0 {
c.Consumer.Group.Heartbeat.Interval = s.Consumer.Group.Heartbeat.Interval
}
if s.Consumer.Group.Rebalance.Strategy != "" {
switch s.Consumer.Group.Rebalance.Strategy {
case sarama.RangeBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
case sarama.RoundRobinBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin
case sarama.StickyBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky
}
}
if s.Consumer.Group.Rebalance.Timeout.Seconds() != 0 {
c.Consumer.Group.Rebalance.Timeout = s.Consumer.Group.Rebalance.Timeout
}
if s.Consumer.Group.Rebalance.Retry.Max != 0 {
c.Consumer.Group.Rebalance.Retry.Max = s.Consumer.Group.Rebalance.Retry.Max
}
if s.Consumer.Group.Rebalance.Retry.Backoff.Seconds() != 0 {
c.Consumer.Group.Rebalance.Retry.Backoff = s.Consumer.Group.Rebalance.Retry.Backoff
}
if s.Consumer.Retry.Backoff.Seconds() != 0 {
c.Consumer.Retry.Backoff = s.Consumer.Retry.Backoff
}
if s.Consumer.Fetch.Min != 0 {
c.Consumer.Fetch.Min = s.Consumer.Fetch.Min
}
if s.Consumer.Fetch.Max != 0 {
c.Consumer.Fetch.Max = s.Consumer.Fetch.Max
}
if s.Consumer.Fetch.Default != 0 {
c.Consumer.Fetch.Default = s.Consumer.Fetch.Default
}
if s.Consumer.MaxWaitTime.Seconds() != 0 {
c.Consumer.MaxWaitTime = s.Consumer.MaxWaitTime
}
if s.Consumer.MaxProcessingTime.Seconds() != 0 {
c.Consumer.MaxProcessingTime = s.Consumer.MaxProcessingTime
}
if s.Consumer.Return.Errors {
c.Consumer.Return.Errors = true
}
if s.Consumer.Offsets.AutoCommit.Enable {
c.Consumer.Offsets.AutoCommit.Enable = true
c.Consumer.Offsets.AutoCommit.Interval = s.Consumer.Offsets.AutoCommit.Interval
}
if s.Consumer.Offsets.Initial != 0 {
c.Consumer.Offsets.Initial = s.Consumer.Offsets.Initial
}
if s.Consumer.Offsets.Retention.Seconds() != 0 {
c.Consumer.Offsets.Retention = s.Consumer.Offsets.Retention
}
if s.Consumer.Offsets.Retry.Max != 0 {
c.Consumer.Offsets.Retry.Max = s.Consumer.Offsets.Retry.Max
}
if s.Consumer.IsolationLevel != 0 {
c.Consumer.IsolationLevel = sarama.IsolationLevel(s.Consumer.IsolationLevel)
}
if s.ClientID != "" {
c.ClientID = s.ClientID
}
if s.RackID != "" {
c.RackID = s.RackID
}
if s.ChannelBufferSize != 0 {
c.ChannelBufferSize = s.ChannelBufferSize
}
if s.Version != "" {
ver, err := sarama.ParseKafkaVersion(s.Version)
if err != nil {
// 解析版本错误则指定版本1.0.0
c.Version = sarama.V1_0_0_0
} else {
c.Version = ver
}
}
return c
}
// InitCloudEvents 初始化 cloudevents 数据实例
func (c *LocalConfig) InitCloudEvents() error {
if c.CloudEvents == nil || c.CloudEvents.Protocol == "" {
return nil
}
switch c.CloudEvents.Protocol {
case CloudEventsProtocolKafkaSarama:
default:
return fmt.Errorf("not support cloudevents protocol %v", c.CloudEvents.Protocol)
}
saramaConfig := c.CloudEvents.KafkaSarama.Config.Parse()
sender, err := kafkaSarama.NewSender(c.CloudEvents.KafkaSarama.Brokers, |
client, err := cloudevents.NewClient(sender, cloudevents.WithTimeNow(), cloudevents.WithUUIDs())
if err != nil {
return err
}
c.eventClient = client
return nil
}
// GetCloudEvents 用于获取 cloudevents 连接客户端
func (c *LocalConfig) GetCloudEvents() (eventclient.Client, error) {
if c.eventClient == nil {
return nil, fmt.Errorf("cloudevents client is nil")
}
return c.eventClient, nil
} | saramaConfig,
c.CloudEvents.KafkaSarama.Topic)
if err != nil {
return err
} | random_line_split |
cloudevents.go | package cfg
import (
"fmt"
"time"
"github.com/Shopify/sarama"
kafkaSarama "github.com/cloudevents/sdk-go/protocol/kafka_sarama/v2"
cloudevents "github.com/cloudevents/sdk-go/v2"
eventclient "github.com/cloudevents/sdk-go/v2/client"
)
const (
CloudEventsProtocolKafkaSarama = "kafka_sarama"
)
// SaramaConfig 用于kafka客户端配置,结构等同于sarama类库
// https://pkg.go.dev/github.com/Shopify/sarama#Config
type SaramaConfig struct {
Net struct {
// 默认:5
MaxOpenRequests int `mapstructure:"max_open_requests"`
// 以下默认:30s
DialTimeout time.Duration `mapstructure:"dial_timeout"`
ReadTimeout time.Duration `mapstructure:"read_timeout"`
WriteTimeout time.Duration `mapstructure:"write_timeout"`
TLS struct {
// 默认:false
Enable bool `mapstructure:"enable"`
// TODO; 不支持开启tls认证
} `mapstructure:"tls"`
SASL struct {
Enable bool `mapstructure:"enable"`
Mechanism string `mapstructure:"mechanism"`
User string `mapstructure:"user"`
Password string `mapstructure:"password"`
} `mapstructure:"sasl"`
KeepAlive time.Duration `mapstructure:"keep_alive"`
} `mapstructure:"net"`
Metadata struct {
// 获取元数据的策略
Retry struct {
// 当集群处于leader选举时最大重试次数,默认:3
Max int `mapstructure:"max"`
// 当集群处于leader选举重试的等扽时间,默认:250ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 后台与集群同步metadata的间隔,默认: 10m
RefreshFrequency time.Duration `mapstructure:"refresh_frequency"`
// 是否为所有topic维护元数据,默认: true
Full bool `mapstructure:"full"`
// 等待metadata响应的超时时间,默认禁用表示失败则继续重试
// Net.[Dial|Read]Timeout * BrokerCount * (Metadata.Retry.Max + 1) + Metadata.Retry.Backoff * Metadata.Retry.Max
Timeout time.Duration `mapstructure:"timeout"`
// 如果提供的topic不存在是否允许自动创建(前提是集群配置可允许该操作),默认:true
AllowAutoTopicCreation bool `mapstructure:"allow_auto_topic_creation"`
} `mapstructure:"metadata"`
// 生产者相关配置
Producer struct {
// 允许的最大消息大小,最好等于集群配置的:message.max.bytes,默认:1000000
MaxMessageBytes int `mapstructure:"max_message_bytes"`
// 消息生产被集群接收的策略,主要影响是否会丢消息与性能,默认:1
// 设置为0: 生产者不等扽集群的响应,继续下一条
// 设置为1: 生成者等待leader响应,然后在继续下一条
// 设置为-1: 生产者必须等待所有"in-sync"副本响应完成,继续下一条,这个副本由: min.insync.replicas 决定
RequiredAcks int16 `mapstructure:"required_acks"`
// 生产者等扽响应的最长时间,当RequiredAcks设置大于1时才有效,等同于`request.timeout.ms`,默认:10s
Timeout time.Duration `mapstructure:"timeout"`
// 生产的消息使用的压缩算法,默认不压缩,默认:0
Compression int8 `mapstructure:"compression"`
// 压缩的等级,依赖具体压缩算法
CompressionLevel int `mapstructure:"commpression_level"`
// Partitioner PartitionerConstructor
// 如果启用,生产者将确保每个消息只写入一个副本。
Idempotent bool `mapstructure:"idempotent"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 成功的消息是否记录,默认:false
Successes bool `mapstructure:"successes"`
// 失败的消息是否记录,默认:true
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 生产者达到以下阈值时触发打包消息发送至集群
Flush struct {
// 最大值被 sarama.MaxRequestSiz 限制,值:100 * 1024 * 1024
Bytes int `mapstructure:"bytes"`
// 消息数量阈值,最大限制通过以下MaxMessages控制
Messages int `mapstructure:"messages"`
// 等待时间阈值
Frequency time.Duration `mapstructure:"frequency"`
// 在单一请求broker时允许的最大消息数,设置为0则不限制
MaxMessages int `mapstructure:"max_messages"`
} `mapstructure:"flush"`
// 生产消息失败的重试策略
Retry struct {
// 最大重试次数,等同于jvm的:message.send.max.retries,默认:3
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,等同于jvm的:retry.backoff.ms,默认值:100ms
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"producer"`
// 消费者相关配置
Consumer struct {
Group struct {
Session struct {
// 当broker端未收到消费者的心跳包,超过该时间间隔,则broker认为该消费者离线,将进行重均衡,默认:10s
// 该值必须在broker配置`group.min.session.timeout.ms`与`group.max.session.timeout.ms`之间
Timeout time.Duration `mapstructure:"timeout"`
} `mapstructure:"session"`
Heartbeat struct {
// kafka协调者预期的心跳间隔,用于确保消费者session处于活跃状态,值必须小于session.timeout,默认:3s
// 一般建议设置为session.timeout的3分之一
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"heartbeat"`
Rebalance struct {
// topic分区分配给消费者的策略,支持:range, roundrobin, sticky,默认:range
// range: 标识使用范围分区分配策略的策略
// roundrobin: 标识使用循环分区分配策略的策略
// sticky: 标识使用粘性分区分配策略的策略
Strategy string `mapstructure:"strategy"`
// 重均衡开始后,消费者加入群组的最大允许时间,默认:60s
Timeout time.Duration `mapstructure:"timeout"`
Retry struct {
// 最大重试次数,默认:4
Max int `mapstructure:"max"`
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
} `mapstructure:"rebalance"`
} `mapstructure:"group"`
// 读取分区失败的重试
Retry struct {
// 重试失败之间等待间隔,默认:2s
Backoff time.Duration `mapstructure:"backoff"`
} `mapstructure:"retry"`
// 控制每个请求所拉取数据的大小,单位bytes
Fetch struct {
// 必须等待的最小消息大小,不要设置为0,等同于jvm `fetch.min.bytes`,默认:1
Min int32 `mapstructure:"min"`
// 每请求从broker获取的消息大小,默认:1MB
// 尽量大于你消息的大部分大小,否则还要做额外的切割,等同于jvm `fetch.message.max.bytes`
Default int32 `mapstructure:"default"`
// 每请求可最大获取的消息大小,值为0表示不限制,等同于jvm `fetch.message.max.bytes`,默认:0
Max int32 `mapstructure:"max"`
} `mapstructure:"fetch"`
// broker在等待消息达到 Consumer.Fetch.Min 大小的最大时间,不要设置为0,默认:250ms
// 建议在 100-500ms,等同于jvm `fetch.wait.max.ms`
MaxWaitTime time.Duration `mapstructure:"max_wait_time"`
// 消费者为用户处理消息所需的最长时间,如果写入消息通道所需的时间超过此时间,则该分区将停止获取更多消息,直到可以再次继续。
// 由于消息通道已缓冲,因此实际宽限时间为 (MaxProcessingTime * ChannelBufferSize),默认:100ms
MaxProcessingTime time.Duration `mapstructure:"max_processing_time"`
// 消息响应成功或失败是否写入channel里,如果写入则必须被消费,否则可能出现死锁
Return struct {
// 失败的消息是否记录,默认:false
Errors bool `mapstructure:"errors"`
} `mapstructure:"return"`
// 控制如何提交消费offset
Offsets struct {
AutoCommit struct {
// 是否自动更新,默认:true
Enable bool `mapstructure:"enable"`
// 自动更新频率,默认:1s
Interval time.Duration `mapstructure:"interval"`
} `mapstructure:"auto_commit"`
// OffsetNewest=-1 代表访问 commit 位置的下一条消息
// OffsetOldest=-2 消费者可以访问到的 topic 里的最早的消息
Initial int64 `mapstructure:"initial"`
Retention time.Duration `mapstructure:"retention"`
// 提交offset失败的重试
Retry struct {
// 最大重试次数,默认:3
Max int `mapstructure:"max"`
} `mapstructure:"retry"`
} `mapstructure:"offsets"`
// 消费隔离级别,ReadUncommitted 或 ReadCommitted,默认:ReadUncommitted
// ReadUncommitted: 可以读取到未提交的数据(报错终止前的数据)
// ReadCommitted: 生产者已提交的数据才能读取到
IsolationLevel int8 `mapstructure:"isolation_level"`
} `mapstructure:"consumer"`
// 标识该消费者
ClientID string `mapstructure:"client_id"`
// 机柜标识,见 'broker.rack'
RackID string `mapstructure:"rack_id"`
// 默认:256
ChannelBufferSize int `mapstructure:"chnnel_buffer_size"`
Version string `mapstructure:"version"`
}
// Parse 解析为 https://pkg.go.dev/github.com/Shopify/sarama#Config
func (s *SaramaConfig) Parse() *sarama.Config {
c := sarama.NewConfig()
// net
if s.Net.MaxOpenRequests != 0 {
c.Net.MaxOpenRequests = s.Net.MaxOpenRequests
}
if s.Net.DialTimeout.Seconds() != 0 {
c.Net.DialTimeout = s.Net.DialTimeout
}
if s.Net.ReadTimeout.Seconds() != 0 {
c.Net.ReadTimeout = s.Net.ReadTimeout
}
if s.Net.WriteTimeout.Seconds() != 0 {
c.Net.WriteTimeout = s.Net.WriteTimeout
}
// TODO; tls目前配置不支持解析,需客户端自己解决
if s.Net.TLS.Enable {
c.Net.TLS.Enable = true
}
if s.Net.SASL.Enable {
c.Net.SASL.Enable = true
c.Net.SASL.Mechanism = sarama.SASLMechanism(s.Net.SASL.Mechanism)
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA256 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
}
if c.Net.SASL.Mechanism == sarama.SASLTypeSCRAMSHA512 {
c.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
}
c.Net.SASL.User = s.Net.SASL.User
c.Net.SASL.Password = s.Net.SASL.Password
}
if s.Net.KeepAlive.Seconds() != 0 {
c.Net.KeepAlive = s.Net.KeepAlive
}
// metadata
if s.Metadata.Retry.Max != 0 {
c.Metadata.Retry.Max = s.Metadata.Retry.Max
}
if s.Metadata.Retry.Backoff.Seconds() != 0 {
c.Metadata.Retry.Backoff = s.Metadata.Retry.Backoff
}
if s.Metadata.RefreshFrequency.Seconds() != 0 {
c.Metadata.RefreshFrequency = s.Metadata.RefreshFrequency
}
if s.Metadata.Full {
c.Metadata.Full = true
}
if s.Metadata.Timeout.Seconds() != 0 {
c.Metadata.Timeout = s.Metadata.Timeout
}
if s.Metadata.AllowAutoTopicCreation {
c.Metadata.AllowAutoTopicCreation = true
}
// producer
if s.Producer.MaxMessageBytes != 0 {
c.Producer.MaxMessageBytes = s.Producer.MaxMessageBytes
}
if s.Producer.RequiredAcks != 0 {
c.Producer.RequiredAcks = sarama.RequiredAcks(s.Producer.RequiredAcks)
}
if s.Producer.Timeout.Seconds() != 0 {
c.Producer.Timeout = s.Producer.Timeout
}
if s.Producer.Compression != 0 {
c.Producer.Compression = sarama.CompressionCodec(s.Producer.Compression)
}
if s.Producer.CompressionLevel != 0 {
c.Producer.CompressionLevel = s.Producer.CompressionLevel
}
if s.Producer.Idempotent {
c.Producer.Idempotent = true
}
if s.Producer.Return.Successes {
c.Producer.Return.Successes = true
}
if s.Producer.Return.Errors {
c.Producer.Return.Errors = true
}
if s.Producer.Flush.Bytes != 0 {
c.Producer.Flush | s = s.Producer.Flush.Bytes
}
if s.Producer.Flush.Messages != 0 {
c.Producer.Flush.Messages = s.Producer.Flush.Messages
}
if s.Producer.Flush.Frequency.Seconds() != 0 {
c.Producer.Flush.Frequency = s.Producer.Flush.Frequency
}
if s.Producer.Flush.MaxMessages != 0 {
c.Producer.Flush.MaxMessages = s.Producer.Flush.MaxMessages
}
if s.Producer.Retry.Max != 0 {
c.Producer.Retry.Max = s.Producer.Retry.Max
}
if s.Producer.Retry.Backoff.Seconds() != 0 {
c.Producer.Retry.Backoff = s.Producer.Retry.Backoff
}
// consumer
if s.Consumer.Group.Session.Timeout.Seconds() != 0 {
c.Consumer.Group.Session.Timeout = s.Consumer.Group.Session.Timeout
}
if s.Consumer.Group.Heartbeat.Interval.Seconds() != 0 {
c.Consumer.Group.Heartbeat.Interval = s.Consumer.Group.Heartbeat.Interval
}
if s.Consumer.Group.Rebalance.Strategy != "" {
switch s.Consumer.Group.Rebalance.Strategy {
case sarama.RangeBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
case sarama.RoundRobinBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRoundRobin
case sarama.StickyBalanceStrategyName:
c.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategySticky
}
}
if s.Consumer.Group.Rebalance.Timeout.Seconds() != 0 {
c.Consumer.Group.Rebalance.Timeout = s.Consumer.Group.Rebalance.Timeout
}
if s.Consumer.Group.Rebalance.Retry.Max != 0 {
c.Consumer.Group.Rebalance.Retry.Max = s.Consumer.Group.Rebalance.Retry.Max
}
if s.Consumer.Group.Rebalance.Retry.Backoff.Seconds() != 0 {
c.Consumer.Group.Rebalance.Retry.Backoff = s.Consumer.Group.Rebalance.Retry.Backoff
}
if s.Consumer.Retry.Backoff.Seconds() != 0 {
c.Consumer.Retry.Backoff = s.Consumer.Retry.Backoff
}
if s.Consumer.Fetch.Min != 0 {
c.Consumer.Fetch.Min = s.Consumer.Fetch.Min
}
if s.Consumer.Fetch.Max != 0 {
c.Consumer.Fetch.Max = s.Consumer.Fetch.Max
}
if s.Consumer.Fetch.Default != 0 {
c.Consumer.Fetch.Default = s.Consumer.Fetch.Default
}
if s.Consumer.MaxWaitTime.Seconds() != 0 {
c.Consumer.MaxWaitTime = s.Consumer.MaxWaitTime
}
if s.Consumer.MaxProcessingTime.Seconds() != 0 {
c.Consumer.MaxProcessingTime = s.Consumer.MaxProcessingTime
}
if s.Consumer.Return.Errors {
c.Consumer.Return.Errors = true
}
if s.Consumer.Offsets.AutoCommit.Enable {
c.Consumer.Offsets.AutoCommit.Enable = true
c.Consumer.Offsets.AutoCommit.Interval = s.Consumer.Offsets.AutoCommit.Interval
}
if s.Consumer.Offsets.Initial != 0 {
c.Consumer.Offsets.Initial = s.Consumer.Offsets.Initial
}
if s.Consumer.Offsets.Retention.Seconds() != 0 {
c.Consumer.Offsets.Retention = s.Consumer.Offsets.Retention
}
if s.Consumer.Offsets.Retry.Max != 0 {
c.Consumer.Offsets.Retry.Max = s.Consumer.Offsets.Retry.Max
}
if s.Consumer.IsolationLevel != 0 {
c.Consumer.IsolationLevel = sarama.IsolationLevel(s.Consumer.IsolationLevel)
}
if s.ClientID != "" {
c.ClientID = s.ClientID
}
if s.RackID != "" {
c.RackID = s.RackID
}
if s.ChannelBufferSize != 0 {
c.ChannelBufferSize = s.ChannelBufferSize
}
if s.Version != "" {
ver, err := sarama.ParseKafkaVersion(s.Version)
if err != nil {
// 解析版本错误则指定版本1.0.0
c.Version = sarama.V1_0_0_0
} else {
c.Version = ver
}
}
return c
}
// InitCloudEvents 初始化 cloudevents 数据实例
func (c *LocalConfig) InitCloudEvents() error {
if c.CloudEvents == nil || c.CloudEvents.Protocol == "" {
return nil
}
switch c.CloudEvents.Protocol {
case CloudEventsProtocolKafkaSarama:
default:
return fmt.Errorf("not support cloudevents protocol %v", c.CloudEvents.Protocol)
}
saramaConfig := c.CloudEvents.KafkaSarama.Config.Parse()
sender, err := kafkaSarama.NewSender(c.CloudEvents.KafkaSarama.Brokers,
saramaConfig,
c.CloudEvents.KafkaSarama.Topic)
if err != nil {
return err
}
client, err := cloudevents.NewClient(sender, cloudevents.WithTimeNow(), cloudevents.WithUUIDs())
if err != nil {
return err
}
c.eventClient = client
return nil
}
// GetCloudEvents 用于获取 cloudevents 连接客户端
func (c *LocalConfig) GetCloudEvents() (eventclient.Client, error) {
if c.eventClient == nil {
return nil, fmt.Errorf("cloudevents client is nil")
}
return c.eventClient, nil
}
| .Byte | identifier_name |
smf.go | package smf
import (
"bytes"
"fmt"
"math"
"sort"
"gitlab.com/gomidi/midi"
"gitlab.com/gomidi/midi/midimessage/channel"
"gitlab.com/gomidi/midi/midimessage/meta"
"gitlab.com/gomidi/midi/midimessage/meta/meter"
"gitlab.com/gomidi/midi/reader"
"gitlab.com/gomidi/midi/smf"
"gitlab.com/gomidi/midi/smf/smfwriter"
"gitlab.com/gomidi/midi/writer"
)
func ReadSMF(file string) (*Song, error) {
s := New()
// add the first track
s.AddTrack(false, -1)
var rd = reader.New(
reader.NoLogger(),
reader.Each(s.scanMessage),
)
err := reader.ReadSMFFile(rd, file)
if err != nil {
return nil, err
}
if rd.Header().Type() != 1 {
return nil, fmt.Errorf("wrong SMF type %v, currently only supports type 1, please convert your midi file %s", rd.Header().Type(), file)
}
tpq, isMetric := rd.Header().TimeFormat.(smf.MetricTicks)
if !isMetric {
return nil, fmt.Errorf("wrong time format type %s, currently only supports metric time, please convert your midi file %s", rd.Header().TimeFormat.String(), file)
}
s.ticksPerQN = tpq.Ticks4th()
err = s.finishScan()
if err != nil {
return nil, err
}
return s, nil
}
type TempoChange struct {
AbsPos uint64
TempoBPM float64
}
type TempoChanges []*TempoChange
func (p TempoChanges) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TempoChanges) Len() int {
return len(p)
}
func (p TempoChanges) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSigs []*TimeSig
func (p TimeSigs) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TimeSigs) Len() int {
return len(p)
}
func (p TimeSigs) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSig struct {
AbsPos uint64
Num uint8
Denom uint8
}
type Song struct {
CopyRight string
Properties map[string]string
Bars []*Bar
Tracks []*Track
scannedMessages []*TrackMessage
scannedTempoChanges TempoChanges
scannedTimeSig TimeSigs
ticksPerQN uint32
lastPos uint64
}
func (s *Song) AddTrack(withContent bool, channel int8) *Track {
t := &Track{
Song: s,
WithContent: withContent,
Channel: channel,
}
s.Tracks = append(s.Tracks, t)
s.RenumberTracks()
return t
}
func (s *Song) createBarsUntil(from, to uint64, num, denom uint8) {
pos := from
for pos < to {
b := s.AddBar(pos, num, denom)
pos = b.EndPos()
}
}
func (s *Song) createBars(firstTimeSig [2]uint8, changes TimeSigs) {
num := firstTimeSig[0]
denom := firstTimeSig[1]
var pos uint64
for _, change := range changes {
if pos < change.AbsPos {
s.createBarsUntil(pos, change.AbsPos, num, denom)
}
num, denom = change.Num, change.Denom
b := s.AddBar(change.AbsPos, num, denom)
pos = b.EndPos()
}
s.createBarsUntil(pos, s.lastPos, num, denom)
s.RenumberBars()
}
func (s *Song) findBar(pos uint64) (bar *Bar) {
for _, b := range s.Bars {
if pos >= b.AbsPos {
bar = b
}
}
return
}
func (s *Song) NoOfContentTracks() (no uint16) {
for _, tr := range s.Tracks {
if tr.WithContent {
no++
}
}
return
}
func (s *Song) finishScan() (err error) {
sort.Sort(s.scannedTempoChanges)
sort.Sort(s.scannedTimeSig)
if len(s.scannedTimeSig) > 0 && s.scannedTimeSig[0].AbsPos == 0 {
var rest TimeSigs
if len(s.scannedTimeSig) > 1 {
rest = s.scannedTimeSig[1:]
}
s.createBars([2]uint8{s.scannedTimeSig[0].Num, s.scannedTimeSig[0].Denom}, rest)
} else {
s.createBars([2]uint8{4, 4}, s.scannedTimeSig)
}
for _, msg := range s.scannedMessages {
b := s.findBar(msg.AbsPos)
if b == nil {
return fmt.Errorf("can't find bar for message: %v at position %v", msg.Message, msg.AbsPos)
}
b.SetMessageByRelTicks(msg.AbsPos-b.AbsPos, msg.TrackNo, msg.Message)
b.SortPositions()
}
return nil
}
func (s *Song) LastTrack() *Track {
return s.Tracks[len(s.Tracks)-1]
}
func (s *Song) scanMessage(p *reader.Position, msg midi.Message) {
if p.AbsoluteTicks > s.lastPos {
s.lastPos = p.AbsoluteTicks
}
if msg == meta.EndOfTrack {
s.AddTrack(false, -1)
return
}
t := s.LastTrack()
switch m := msg.(type) {
case meta.Copyright:
s.CopyRight = m.Text()
case meta.TrackSequenceName:
t.Name = m.Text()
case meta.Instrument:
t.Instrument = m.Text()
case meta.TimeSig:
ts := &TimeSig{
AbsPos: p.AbsoluteTicks,
Num: m.Numerator,
Denom: m.Denominator,
}
s.scannedTimeSig = append(s.scannedTimeSig, ts)
case meta.Tempo:
tc := &TempoChange{
AbsPos: p.AbsoluteTicks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) | (w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
}
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction[1]
}
beat := fmt.Sprintf("%0.4f", float64(p.Beat)+float64(1)+frac)
fmt.Fprintf(&bf, "| %s | %s | %s | %s | ", p.Comment, p.Mark, tempo, beat)
for _, t := range s.Tracks {
if t.WithContent {
var printed bool
for _, m := range p.Messages {
if m.TrackNo == t.No {
fmt.Fprintf(&bf, " %s | ", ShowMessage(m.Message))
printed = true
}
}
if !printed {
fmt.Fprintf(&bf, " | ")
}
}
}
fmt.Fprintf(&bf, "\n")
}
}
return bf.String()
}
func (s *Song) RenumberBars() {
for i := range s.Bars {
s.Bars[i].No = uint16(i)
}
}
func (s *Song) RenumberTracks() {
for i := range s.Tracks {
s.Tracks[i].No = uint16(i)
}
}
type TrackMessage struct {
TrackNo uint16
AbsPos uint64
Message midi.Message
Position *Position
}
type Positions []*Position
func (p Positions) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p Positions) Len() int {
return len(p)
}
func (p Positions) Less(a, b int) bool {
if p[a].Bar.No < p[b].Bar.No {
return true
}
if p[a].Bar.No > p[b].Bar.No {
return false
}
if p[a].Beat < p[b].Beat {
return true
}
if p[a].Beat > p[b].Beat {
return false
}
var frac_a float64
var frac_b float64
if p[a].Fraction[1] > 0 {
frac_a = float64(p[a].Fraction[0]) / float64(p[a].Fraction[1])
}
if p[b].Fraction[1] > 0 {
frac_b = float64(p[b].Fraction[0]) / float64(p[b].Fraction[1])
}
return frac_a < frac_b
}
type Bar struct {
Song *Song
No uint16
TimeSig [2]uint8
Positions Positions
AbsPos uint64
}
func (b *Bar) EndPos() uint64 {
return b.AbsPos + b.Length()
}
func (b *Bar) Length() uint64 {
l := float64(b.Song.ticksPerQN*4*uint32(b.TimeSig[0])) / float64(b.TimeSig[1])
return uint64(math.Round(l))
}
func (b *Bar) SetMessageByRelTicks(ticks uint64, trackNo uint16, msg midi.Message) {
beat := uint8(ticks / uint64(b.Song.ticksPerQN))
ticksRest := ticks % uint64(b.Song.ticksPerQN)
var pos *Position
for _, p := range b.Positions {
if p.Beat == beat && p.WithinFraction(ticksRest) {
pos = p
break
}
}
if pos == nil {
pos = b.AddPosition()
pos.Beat = beat
pos.Fraction[0] = float64(ticksRest)
pos.Fraction[1] = float64(b.Song.ticksPerQN)
}
pos.SetMessage(trackNo, msg)
}
func (b *Bar) AddPosition() *Position {
p := &Position{
Bar: b,
}
b.Positions = append(b.Positions, p)
return p
}
func (b *Bar) Columns() []string {
//cols := make([]string)
return nil
}
func (b *Bar) SortPositions() {
sort.Sort(b.Positions)
}
type Position struct {
Bar *Bar
Comment string
Mark string
Beat uint8
Tempo float32
Fraction [2]float64
Messages []*TrackMessage
}
/*
WithinFraction determines, if the given ticks are within the fraction of the position.
The given ticks must be less than a quarternote (Songs ticks per quarternote).
The fraction is a fraction of a quarternote. So we first have to check, to which fraction
of the qn the given ticks correspond and then to check, if the difference between this fraction
and the fraction of the Position lies within the tolerance
*/
func (p *Position) WithinFraction(ticks uint64) bool {
//tolerance := float64(0.0000001)
tolerance := float64(0.001)
fracTicks := float64(ticks) / float64(p.Bar.Song.ticksPerQN)
if fracTicks >= 1 {
panic("must not happen, we are on the wrong beat")
}
fracPos := p.Fraction[0] / p.Fraction[1]
//fmt.Printf("\nwithin fraction %v vs %v (ticks: %v perQN: %v)\n", fracPos, fracTicks, ticks, p.Bar.Song.ticksPerQN)
return math.Abs(fracPos-fracTicks) < tolerance
}
func (p *Position) AbsTicks() uint64 {
beatTicks := p.Bar.Song.ticksPerQN * uint32(p.Beat)
fracTicks := math.Round((float64(p.Bar.Song.ticksPerQN) * p.Fraction[0]) / p.Fraction[1])
return p.Bar.AbsPos + uint64(beatTicks) + uint64(fracTicks)
}
func (p *Position) AddMessage(track uint16, msg midi.Message) {
tm := &TrackMessage{
TrackNo: track,
Message: msg,
Position: p,
}
p.Messages = append(p.Messages, tm)
}
func (p *Position) GetMessage(track uint16) *TrackMessage {
for _, m := range p.Messages {
if m.TrackNo == track {
return m
}
}
return nil
}
func (p *Position) SetMessage(track uint16, msg midi.Message) {
var tm *TrackMessage
for _, m := range p.Messages {
if m.TrackNo == track {
tm = m
break
}
}
if tm == nil {
p.AddMessage(track, msg)
return
}
tm.Message = msg
}
type Track struct {
Song *Song
No uint16
Channel int8 // -1 == not defined
Name string
Instrument string
Solo bool
Mute bool
RecordArm bool
WithContent bool
External bool // for non editable track
}
/*
-----------------------------------------------------------------------
File | Edit | View | Config (the menu, open the first with ALT+SPACE and then navigate with arrow keys and select with ENTER)
-----------------------------------------------------------------------
Comment | Mark | Bar | Beat || Drums[10] | Bass[9] | Vocal[1] | Piano[1] | (piano track on channel 1 etc)
| | | || S M R | S M R | S M R | S M R | (Solo/Mute/Record indicators)
----------------------------------------------------------------------- (everything above this line is static/non scrollable)
1 | Intro | 4/4 | 1.0 || C3/100 | C5_/120 | | | (drum note is just a 32ths, bass is note on)
| | 144 | 1.0 || | | | | tempo change
#2 (bar change)
| | | 2.25 || C5/60 | _C5 | "hiho" | CC123/100 |
=====I====>===V====C=== position indicator, always the pre-last line of the screen (each = is a bar, each letter is the first letter of a Marker)
F1 Play | F2 Rec | F3 Metro | F4 Keyb | F5 V1 | F6 V2 | F7 V3 | F8 V4 | F9 V5 | F10 Track Properties | F11 Song Properties
(play, record, metronome, Keyboard are switches that indicate if it is active)
(views are a selector; only one view can be active at a time)
*/
func New() *Song {
return &Song{
Properties: map[string]string{},
}
}
| writeTempoTrack | identifier_name |
smf.go | package smf
import (
"bytes"
"fmt"
"math"
"sort"
"gitlab.com/gomidi/midi"
"gitlab.com/gomidi/midi/midimessage/channel"
"gitlab.com/gomidi/midi/midimessage/meta"
"gitlab.com/gomidi/midi/midimessage/meta/meter"
"gitlab.com/gomidi/midi/reader"
"gitlab.com/gomidi/midi/smf"
"gitlab.com/gomidi/midi/smf/smfwriter"
"gitlab.com/gomidi/midi/writer"
)
func ReadSMF(file string) (*Song, error) {
s := New()
// add the first track
s.AddTrack(false, -1)
var rd = reader.New(
reader.NoLogger(),
reader.Each(s.scanMessage),
)
err := reader.ReadSMFFile(rd, file)
if err != nil {
return nil, err
}
if rd.Header().Type() != 1 {
return nil, fmt.Errorf("wrong SMF type %v, currently only supports type 1, please convert your midi file %s", rd.Header().Type(), file)
}
tpq, isMetric := rd.Header().TimeFormat.(smf.MetricTicks)
if !isMetric {
return nil, fmt.Errorf("wrong time format type %s, currently only supports metric time, please convert your midi file %s", rd.Header().TimeFormat.String(), file)
}
s.ticksPerQN = tpq.Ticks4th()
err = s.finishScan()
if err != nil {
return nil, err
}
return s, nil
}
type TempoChange struct {
AbsPos uint64
TempoBPM float64
}
type TempoChanges []*TempoChange
func (p TempoChanges) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TempoChanges) Len() int {
return len(p)
}
func (p TempoChanges) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSigs []*TimeSig
func (p TimeSigs) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TimeSigs) Len() int {
return len(p)
}
func (p TimeSigs) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSig struct {
AbsPos uint64
Num uint8
Denom uint8
}
type Song struct {
CopyRight string
Properties map[string]string
Bars []*Bar
Tracks []*Track
scannedMessages []*TrackMessage
scannedTempoChanges TempoChanges
scannedTimeSig TimeSigs
ticksPerQN uint32
lastPos uint64
}
func (s *Song) AddTrack(withContent bool, channel int8) *Track {
t := &Track{
Song: s,
WithContent: withContent,
Channel: channel,
}
s.Tracks = append(s.Tracks, t)
s.RenumberTracks()
return t
}
func (s *Song) createBarsUntil(from, to uint64, num, denom uint8) {
pos := from
for pos < to {
b := s.AddBar(pos, num, denom)
pos = b.EndPos()
}
}
func (s *Song) createBars(firstTimeSig [2]uint8, changes TimeSigs) {
num := firstTimeSig[0]
denom := firstTimeSig[1]
var pos uint64
for _, change := range changes {
if pos < change.AbsPos {
s.createBarsUntil(pos, change.AbsPos, num, denom)
}
num, denom = change.Num, change.Denom
b := s.AddBar(change.AbsPos, num, denom)
pos = b.EndPos()
}
s.createBarsUntil(pos, s.lastPos, num, denom)
s.RenumberBars()
}
func (s *Song) findBar(pos uint64) (bar *Bar) {
for _, b := range s.Bars {
if pos >= b.AbsPos {
bar = b
}
}
return
}
func (s *Song) NoOfContentTracks() (no uint16) {
for _, tr := range s.Tracks {
if tr.WithContent {
no++
}
}
return
}
func (s *Song) finishScan() (err error) {
sort.Sort(s.scannedTempoChanges)
sort.Sort(s.scannedTimeSig)
if len(s.scannedTimeSig) > 0 && s.scannedTimeSig[0].AbsPos == 0 {
var rest TimeSigs
if len(s.scannedTimeSig) > 1 {
rest = s.scannedTimeSig[1:]
}
s.createBars([2]uint8{s.scannedTimeSig[0].Num, s.scannedTimeSig[0].Denom}, rest)
} else {
s.createBars([2]uint8{4, 4}, s.scannedTimeSig)
}
for _, msg := range s.scannedMessages {
b := s.findBar(msg.AbsPos)
if b == nil {
return fmt.Errorf("can't find bar for message: %v at position %v", msg.Message, msg.AbsPos)
}
b.SetMessageByRelTicks(msg.AbsPos-b.AbsPos, msg.TrackNo, msg.Message)
b.SortPositions()
}
return nil
}
func (s *Song) LastTrack() *Track {
return s.Tracks[len(s.Tracks)-1]
}
func (s *Song) scanMessage(p *reader.Position, msg midi.Message) {
if p.AbsoluteTicks > s.lastPos {
s.lastPos = p.AbsoluteTicks
}
if msg == meta.EndOfTrack {
s.AddTrack(false, -1)
return
}
t := s.LastTrack()
switch m := msg.(type) {
case meta.Copyright:
s.CopyRight = m.Text()
case meta.TrackSequenceName:
t.Name = m.Text()
case meta.Instrument:
t.Instrument = m.Text()
case meta.TimeSig:
ts := &TimeSig{
AbsPos: p.AbsoluteTicks,
Num: m.Numerator,
Denom: m.Denominator,
}
s.scannedTimeSig = append(s.scannedTimeSig, ts)
case meta.Tempo:
tc := &TempoChange{
AbsPos: p.AbsoluteTicks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) writeTempoTrack(w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
}
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction[1]
}
beat := fmt.Sprintf("%0.4f", float64(p.Beat)+float64(1)+frac)
fmt.Fprintf(&bf, "| %s | %s | %s | %s | ", p.Comment, p.Mark, tempo, beat)
for _, t := range s.Tracks {
if t.WithContent {
var printed bool
for _, m := range p.Messages {
if m.TrackNo == t.No {
fmt.Fprintf(&bf, " %s | ", ShowMessage(m.Message))
printed = true
}
}
if !printed {
fmt.Fprintf(&bf, " | ")
}
}
}
fmt.Fprintf(&bf, "\n")
}
}
return bf.String()
}
func (s *Song) RenumberBars() {
for i := range s.Bars {
s.Bars[i].No = uint16(i)
}
}
func (s *Song) RenumberTracks() {
for i := range s.Tracks {
s.Tracks[i].No = uint16(i)
}
}
type TrackMessage struct {
TrackNo uint16
AbsPos uint64
Message midi.Message
Position *Position
}
type Positions []*Position
func (p Positions) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p Positions) Len() int {
return len(p)
}
func (p Positions) Less(a, b int) bool {
if p[a].Bar.No < p[b].Bar.No {
return true
}
if p[a].Bar.No > p[b].Bar.No {
return false
}
if p[a].Beat < p[b].Beat {
return true
}
if p[a].Beat > p[b].Beat {
return false
}
var frac_a float64
var frac_b float64
if p[a].Fraction[1] > 0 {
frac_a = float64(p[a].Fraction[0]) / float64(p[a].Fraction[1])
}
if p[b].Fraction[1] > 0 {
frac_b = float64(p[b].Fraction[0]) / float64(p[b].Fraction[1])
}
return frac_a < frac_b
}
type Bar struct {
Song *Song
No uint16
TimeSig [2]uint8
Positions Positions
AbsPos uint64
}
func (b *Bar) EndPos() uint64 {
return b.AbsPos + b.Length()
}
func (b *Bar) Length() uint64 {
l := float64(b.Song.ticksPerQN*4*uint32(b.TimeSig[0])) / float64(b.TimeSig[1])
return uint64(math.Round(l))
}
func (b *Bar) SetMessageByRelTicks(ticks uint64, trackNo uint16, msg midi.Message) {
beat := uint8(ticks / uint64(b.Song.ticksPerQN))
ticksRest := ticks % uint64(b.Song.ticksPerQN)
var pos *Position
for _, p := range b.Positions {
if p.Beat == beat && p.WithinFraction(ticksRest) {
pos = p
break
}
}
if pos == nil {
pos = b.AddPosition()
pos.Beat = beat
pos.Fraction[0] = float64(ticksRest)
pos.Fraction[1] = float64(b.Song.ticksPerQN)
}
pos.SetMessage(trackNo, msg)
}
func (b *Bar) AddPosition() *Position {
p := &Position{
Bar: b,
}
b.Positions = append(b.Positions, p)
return p
}
func (b *Bar) Columns() []string {
//cols := make([]string)
return nil
}
func (b *Bar) SortPositions() {
sort.Sort(b.Positions)
}
type Position struct {
Bar *Bar
Comment string
Mark string
Beat uint8
Tempo float32
Fraction [2]float64
Messages []*TrackMessage
}
/*
WithinFraction determines, if the given ticks are within the fraction of the position.
The given ticks must be less than a quarternote (Songs ticks per quarternote).
The fraction is a fraction of a quarternote. So we first have to check, to which fraction
of the qn the given ticks correspond and then to check, if the difference between this fraction
and the fraction of the Position lies within the tolerance
*/
func (p *Position) WithinFraction(ticks uint64) bool {
//tolerance := float64(0.0000001)
tolerance := float64(0.001)
fracTicks := float64(ticks) / float64(p.Bar.Song.ticksPerQN)
if fracTicks >= 1 {
panic("must not happen, we are on the wrong beat")
}
fracPos := p.Fraction[0] / p.Fraction[1]
//fmt.Printf("\nwithin fraction %v vs %v (ticks: %v perQN: %v)\n", fracPos, fracTicks, ticks, p.Bar.Song.ticksPerQN)
return math.Abs(fracPos-fracTicks) < tolerance
}
func (p *Position) AbsTicks() uint64 {
beatTicks := p.Bar.Song.ticksPerQN * uint32(p.Beat)
fracTicks := math.Round((float64(p.Bar.Song.ticksPerQN) * p.Fraction[0]) / p.Fraction[1])
return p.Bar.AbsPos + uint64(beatTicks) + uint64(fracTicks)
}
func (p *Position) AddMessage(track uint16, msg midi.Message) {
tm := &TrackMessage{
TrackNo: track,
Message: msg,
Position: p,
}
p.Messages = append(p.Messages, tm)
}
func (p *Position) GetMessage(track uint16) *TrackMessage {
for _, m := range p.Messages {
if m.TrackNo == track {
return m
}
} | func (p *Position) SetMessage(track uint16, msg midi.Message) {
var tm *TrackMessage
for _, m := range p.Messages {
if m.TrackNo == track {
tm = m
break
}
}
if tm == nil {
p.AddMessage(track, msg)
return
}
tm.Message = msg
}
type Track struct {
Song *Song
No uint16
Channel int8 // -1 == not defined
Name string
Instrument string
Solo bool
Mute bool
RecordArm bool
WithContent bool
External bool // for non editable track
}
/*
-----------------------------------------------------------------------
File | Edit | View | Config (the menu, open the first with ALT+SPACE and then navigate with arrow keys and select with ENTER)
-----------------------------------------------------------------------
Comment | Mark | Bar | Beat || Drums[10] | Bass[9] | Vocal[1] | Piano[1] | (piano track on channel 1 etc)
| | | || S M R | S M R | S M R | S M R | (Solo/Mute/Record indicators)
----------------------------------------------------------------------- (everything above this line is static/non scrollable)
1 | Intro | 4/4 | 1.0 || C3/100 | C5_/120 | | | (drum note is just a 32ths, bass is note on)
| | 144 | 1.0 || | | | | tempo change
#2 (bar change)
| | | 2.25 || C5/60 | _C5 | "hiho" | CC123/100 |
=====I====>===V====C=== position indicator, always the pre-last line of the screen (each = is a bar, each letter is the first letter of a Marker)
F1 Play | F2 Rec | F3 Metro | F4 Keyb | F5 V1 | F6 V2 | F7 V3 | F8 V4 | F9 V5 | F10 Track Properties | F11 Song Properties
(play, record, metronome, Keyboard are switches that indicate if it is active)
(views are a selector; only one view can be active at a time)
*/
func New() *Song {
return &Song{
Properties: map[string]string{},
}
} |
return nil
}
| random_line_split |
smf.go | package smf
import (
"bytes"
"fmt"
"math"
"sort"
"gitlab.com/gomidi/midi"
"gitlab.com/gomidi/midi/midimessage/channel"
"gitlab.com/gomidi/midi/midimessage/meta"
"gitlab.com/gomidi/midi/midimessage/meta/meter"
"gitlab.com/gomidi/midi/reader"
"gitlab.com/gomidi/midi/smf"
"gitlab.com/gomidi/midi/smf/smfwriter"
"gitlab.com/gomidi/midi/writer"
)
func ReadSMF(file string) (*Song, error) {
s := New()
// add the first track
s.AddTrack(false, -1)
var rd = reader.New(
reader.NoLogger(),
reader.Each(s.scanMessage),
)
err := reader.ReadSMFFile(rd, file)
if err != nil {
return nil, err
}
if rd.Header().Type() != 1 {
return nil, fmt.Errorf("wrong SMF type %v, currently only supports type 1, please convert your midi file %s", rd.Header().Type(), file)
}
tpq, isMetric := rd.Header().TimeFormat.(smf.MetricTicks)
if !isMetric {
return nil, fmt.Errorf("wrong time format type %s, currently only supports metric time, please convert your midi file %s", rd.Header().TimeFormat.String(), file)
}
s.ticksPerQN = tpq.Ticks4th()
err = s.finishScan()
if err != nil {
return nil, err
}
return s, nil
}
type TempoChange struct {
AbsPos uint64
TempoBPM float64
}
type TempoChanges []*TempoChange
func (p TempoChanges) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TempoChanges) Len() int {
return len(p)
}
func (p TempoChanges) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSigs []*TimeSig
func (p TimeSigs) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TimeSigs) Len() int {
return len(p)
}
func (p TimeSigs) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSig struct {
AbsPos uint64
Num uint8
Denom uint8
}
type Song struct {
CopyRight string
Properties map[string]string
Bars []*Bar
Tracks []*Track
scannedMessages []*TrackMessage
scannedTempoChanges TempoChanges
scannedTimeSig TimeSigs
ticksPerQN uint32
lastPos uint64
}
func (s *Song) AddTrack(withContent bool, channel int8) *Track {
t := &Track{
Song: s,
WithContent: withContent,
Channel: channel,
}
s.Tracks = append(s.Tracks, t)
s.RenumberTracks()
return t
}
func (s *Song) createBarsUntil(from, to uint64, num, denom uint8) {
pos := from
for pos < to {
b := s.AddBar(pos, num, denom)
pos = b.EndPos()
}
}
func (s *Song) createBars(firstTimeSig [2]uint8, changes TimeSigs) {
num := firstTimeSig[0]
denom := firstTimeSig[1]
var pos uint64
for _, change := range changes {
if pos < change.AbsPos {
s.createBarsUntil(pos, change.AbsPos, num, denom)
}
num, denom = change.Num, change.Denom
b := s.AddBar(change.AbsPos, num, denom)
pos = b.EndPos()
}
s.createBarsUntil(pos, s.lastPos, num, denom)
s.RenumberBars()
}
func (s *Song) findBar(pos uint64) (bar *Bar) {
for _, b := range s.Bars {
if pos >= b.AbsPos {
bar = b
}
}
return
}
func (s *Song) NoOfContentTracks() (no uint16) {
for _, tr := range s.Tracks {
if tr.WithContent {
no++
}
}
return
}
func (s *Song) finishScan() (err error) {
sort.Sort(s.scannedTempoChanges)
sort.Sort(s.scannedTimeSig)
if len(s.scannedTimeSig) > 0 && s.scannedTimeSig[0].AbsPos == 0 {
var rest TimeSigs
if len(s.scannedTimeSig) > 1 {
rest = s.scannedTimeSig[1:]
}
s.createBars([2]uint8{s.scannedTimeSig[0].Num, s.scannedTimeSig[0].Denom}, rest)
} else {
s.createBars([2]uint8{4, 4}, s.scannedTimeSig)
}
for _, msg := range s.scannedMessages {
b := s.findBar(msg.AbsPos)
if b == nil {
return fmt.Errorf("can't find bar for message: %v at position %v", msg.Message, msg.AbsPos)
}
b.SetMessageByRelTicks(msg.AbsPos-b.AbsPos, msg.TrackNo, msg.Message)
b.SortPositions()
}
return nil
}
func (s *Song) LastTrack() *Track {
return s.Tracks[len(s.Tracks)-1]
}
func (s *Song) scanMessage(p *reader.Position, msg midi.Message) {
if p.AbsoluteTicks > s.lastPos {
s.lastPos = p.AbsoluteTicks
}
if msg == meta.EndOfTrack {
s.AddTrack(false, -1)
return
}
t := s.LastTrack()
switch m := msg.(type) {
case meta.Copyright:
s.CopyRight = m.Text()
case meta.TrackSequenceName:
t.Name = m.Text()
case meta.Instrument:
t.Instrument = m.Text()
case meta.TimeSig:
ts := &TimeSig{
AbsPos: p.AbsoluteTicks,
Num: m.Numerator,
Denom: m.Denominator,
}
s.scannedTimeSig = append(s.scannedTimeSig, ts)
case meta.Tempo:
tc := &TempoChange{
AbsPos: p.AbsoluteTicks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) writeTempoTrack(w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent |
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction[1]
}
beat := fmt.Sprintf("%0.4f", float64(p.Beat)+float64(1)+frac)
fmt.Fprintf(&bf, "| %s | %s | %s | %s | ", p.Comment, p.Mark, tempo, beat)
for _, t := range s.Tracks {
if t.WithContent {
var printed bool
for _, m := range p.Messages {
if m.TrackNo == t.No {
fmt.Fprintf(&bf, " %s | ", ShowMessage(m.Message))
printed = true
}
}
if !printed {
fmt.Fprintf(&bf, " | ")
}
}
}
fmt.Fprintf(&bf, "\n")
}
}
return bf.String()
}
func (s *Song) RenumberBars() {
for i := range s.Bars {
s.Bars[i].No = uint16(i)
}
}
func (s *Song) RenumberTracks() {
for i := range s.Tracks {
s.Tracks[i].No = uint16(i)
}
}
type TrackMessage struct {
TrackNo uint16
AbsPos uint64
Message midi.Message
Position *Position
}
type Positions []*Position
func (p Positions) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p Positions) Len() int {
return len(p)
}
func (p Positions) Less(a, b int) bool {
if p[a].Bar.No < p[b].Bar.No {
return true
}
if p[a].Bar.No > p[b].Bar.No {
return false
}
if p[a].Beat < p[b].Beat {
return true
}
if p[a].Beat > p[b].Beat {
return false
}
var frac_a float64
var frac_b float64
if p[a].Fraction[1] > 0 {
frac_a = float64(p[a].Fraction[0]) / float64(p[a].Fraction[1])
}
if p[b].Fraction[1] > 0 {
frac_b = float64(p[b].Fraction[0]) / float64(p[b].Fraction[1])
}
return frac_a < frac_b
}
type Bar struct {
Song *Song
No uint16
TimeSig [2]uint8
Positions Positions
AbsPos uint64
}
func (b *Bar) EndPos() uint64 {
return b.AbsPos + b.Length()
}
func (b *Bar) Length() uint64 {
l := float64(b.Song.ticksPerQN*4*uint32(b.TimeSig[0])) / float64(b.TimeSig[1])
return uint64(math.Round(l))
}
func (b *Bar) SetMessageByRelTicks(ticks uint64, trackNo uint16, msg midi.Message) {
beat := uint8(ticks / uint64(b.Song.ticksPerQN))
ticksRest := ticks % uint64(b.Song.ticksPerQN)
var pos *Position
for _, p := range b.Positions {
if p.Beat == beat && p.WithinFraction(ticksRest) {
pos = p
break
}
}
if pos == nil {
pos = b.AddPosition()
pos.Beat = beat
pos.Fraction[0] = float64(ticksRest)
pos.Fraction[1] = float64(b.Song.ticksPerQN)
}
pos.SetMessage(trackNo, msg)
}
func (b *Bar) AddPosition() *Position {
p := &Position{
Bar: b,
}
b.Positions = append(b.Positions, p)
return p
}
func (b *Bar) Columns() []string {
//cols := make([]string)
return nil
}
func (b *Bar) SortPositions() {
sort.Sort(b.Positions)
}
type Position struct {
Bar *Bar
Comment string
Mark string
Beat uint8
Tempo float32
Fraction [2]float64
Messages []*TrackMessage
}
/*
WithinFraction determines, if the given ticks are within the fraction of the position.
The given ticks must be less than a quarternote (Songs ticks per quarternote).
The fraction is a fraction of a quarternote. So we first have to check, to which fraction
of the qn the given ticks correspond and then to check, if the difference between this fraction
and the fraction of the Position lies within the tolerance
*/
func (p *Position) WithinFraction(ticks uint64) bool {
//tolerance := float64(0.0000001)
tolerance := float64(0.001)
fracTicks := float64(ticks) / float64(p.Bar.Song.ticksPerQN)
if fracTicks >= 1 {
panic("must not happen, we are on the wrong beat")
}
fracPos := p.Fraction[0] / p.Fraction[1]
//fmt.Printf("\nwithin fraction %v vs %v (ticks: %v perQN: %v)\n", fracPos, fracTicks, ticks, p.Bar.Song.ticksPerQN)
return math.Abs(fracPos-fracTicks) < tolerance
}
func (p *Position) AbsTicks() uint64 {
beatTicks := p.Bar.Song.ticksPerQN * uint32(p.Beat)
fracTicks := math.Round((float64(p.Bar.Song.ticksPerQN) * p.Fraction[0]) / p.Fraction[1])
return p.Bar.AbsPos + uint64(beatTicks) + uint64(fracTicks)
}
func (p *Position) AddMessage(track uint16, msg midi.Message) {
tm := &TrackMessage{
TrackNo: track,
Message: msg,
Position: p,
}
p.Messages = append(p.Messages, tm)
}
func (p *Position) GetMessage(track uint16) *TrackMessage {
for _, m := range p.Messages {
if m.TrackNo == track {
return m
}
}
return nil
}
func (p *Position) SetMessage(track uint16, msg midi.Message) {
var tm *TrackMessage
for _, m := range p.Messages {
if m.TrackNo == track {
tm = m
break
}
}
if tm == nil {
p.AddMessage(track, msg)
return
}
tm.Message = msg
}
type Track struct {
Song *Song
No uint16
Channel int8 // -1 == not defined
Name string
Instrument string
Solo bool
Mute bool
RecordArm bool
WithContent bool
External bool // for non editable track
}
/*
-----------------------------------------------------------------------
File | Edit | View | Config (the menu, open the first with ALT+SPACE and then navigate with arrow keys and select with ENTER)
-----------------------------------------------------------------------
Comment | Mark | Bar | Beat || Drums[10] | Bass[9] | Vocal[1] | Piano[1] | (piano track on channel 1 etc)
| | | || S M R | S M R | S M R | S M R | (Solo/Mute/Record indicators)
----------------------------------------------------------------------- (everything above this line is static/non scrollable)
1 | Intro | 4/4 | 1.0 || C3/100 | C5_/120 | | | (drum note is just a 32ths, bass is note on)
| | 144 | 1.0 || | | | | tempo change
#2 (bar change)
| | | 2.25 || C5/60 | _C5 | "hiho" | CC123/100 |
=====I====>===V====C=== position indicator, always the pre-last line of the screen (each = is a bar, each letter is the first letter of a Marker)
F1 Play | F2 Rec | F3 Metro | F4 Keyb | F5 V1 | F6 V2 | F7 V3 | F8 V4 | F9 V5 | F10 Track Properties | F11 Song Properties
(play, record, metronome, Keyboard are switches that indicate if it is active)
(views are a selector; only one view can be active at a time)
*/
func New() *Song {
return &Song{
Properties: map[string]string{},
}
}
| {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
} | conditional_block |
smf.go | package smf
import (
"bytes"
"fmt"
"math"
"sort"
"gitlab.com/gomidi/midi"
"gitlab.com/gomidi/midi/midimessage/channel"
"gitlab.com/gomidi/midi/midimessage/meta"
"gitlab.com/gomidi/midi/midimessage/meta/meter"
"gitlab.com/gomidi/midi/reader"
"gitlab.com/gomidi/midi/smf"
"gitlab.com/gomidi/midi/smf/smfwriter"
"gitlab.com/gomidi/midi/writer"
)
func ReadSMF(file string) (*Song, error) {
s := New()
// add the first track
s.AddTrack(false, -1)
var rd = reader.New(
reader.NoLogger(),
reader.Each(s.scanMessage),
)
err := reader.ReadSMFFile(rd, file)
if err != nil {
return nil, err
}
if rd.Header().Type() != 1 {
return nil, fmt.Errorf("wrong SMF type %v, currently only supports type 1, please convert your midi file %s", rd.Header().Type(), file)
}
tpq, isMetric := rd.Header().TimeFormat.(smf.MetricTicks)
if !isMetric {
return nil, fmt.Errorf("wrong time format type %s, currently only supports metric time, please convert your midi file %s", rd.Header().TimeFormat.String(), file)
}
s.ticksPerQN = tpq.Ticks4th()
err = s.finishScan()
if err != nil {
return nil, err
}
return s, nil
}
type TempoChange struct {
AbsPos uint64
TempoBPM float64
}
type TempoChanges []*TempoChange
func (p TempoChanges) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TempoChanges) Len() int {
return len(p)
}
func (p TempoChanges) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSigs []*TimeSig
func (p TimeSigs) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p TimeSigs) Len() int {
return len(p)
}
func (p TimeSigs) Less(a, b int) bool {
return p[a].AbsPos < p[b].AbsPos
}
type TimeSig struct {
AbsPos uint64
Num uint8
Denom uint8
}
type Song struct {
CopyRight string
Properties map[string]string
Bars []*Bar
Tracks []*Track
scannedMessages []*TrackMessage
scannedTempoChanges TempoChanges
scannedTimeSig TimeSigs
ticksPerQN uint32
lastPos uint64
}
func (s *Song) AddTrack(withContent bool, channel int8) *Track {
t := &Track{
Song: s,
WithContent: withContent,
Channel: channel,
}
s.Tracks = append(s.Tracks, t)
s.RenumberTracks()
return t
}
func (s *Song) createBarsUntil(from, to uint64, num, denom uint8) {
pos := from
for pos < to {
b := s.AddBar(pos, num, denom)
pos = b.EndPos()
}
}
func (s *Song) createBars(firstTimeSig [2]uint8, changes TimeSigs) {
num := firstTimeSig[0]
denom := firstTimeSig[1]
var pos uint64
for _, change := range changes {
if pos < change.AbsPos {
s.createBarsUntil(pos, change.AbsPos, num, denom)
}
num, denom = change.Num, change.Denom
b := s.AddBar(change.AbsPos, num, denom)
pos = b.EndPos()
}
s.createBarsUntil(pos, s.lastPos, num, denom)
s.RenumberBars()
}
func (s *Song) findBar(pos uint64) (bar *Bar) {
for _, b := range s.Bars {
if pos >= b.AbsPos {
bar = b
}
}
return
}
func (s *Song) NoOfContentTracks() (no uint16) {
for _, tr := range s.Tracks {
if tr.WithContent {
no++
}
}
return
}
func (s *Song) finishScan() (err error) {
sort.Sort(s.scannedTempoChanges)
sort.Sort(s.scannedTimeSig)
if len(s.scannedTimeSig) > 0 && s.scannedTimeSig[0].AbsPos == 0 {
var rest TimeSigs
if len(s.scannedTimeSig) > 1 {
rest = s.scannedTimeSig[1:]
}
s.createBars([2]uint8{s.scannedTimeSig[0].Num, s.scannedTimeSig[0].Denom}, rest)
} else {
s.createBars([2]uint8{4, 4}, s.scannedTimeSig)
}
for _, msg := range s.scannedMessages {
b := s.findBar(msg.AbsPos)
if b == nil {
return fmt.Errorf("can't find bar for message: %v at position %v", msg.Message, msg.AbsPos)
}
b.SetMessageByRelTicks(msg.AbsPos-b.AbsPos, msg.TrackNo, msg.Message)
b.SortPositions()
}
return nil
}
func (s *Song) LastTrack() *Track {
return s.Tracks[len(s.Tracks)-1]
}
func (s *Song) scanMessage(p *reader.Position, msg midi.Message) {
if p.AbsoluteTicks > s.lastPos {
s.lastPos = p.AbsoluteTicks
}
if msg == meta.EndOfTrack {
s.AddTrack(false, -1)
return
}
t := s.LastTrack()
switch m := msg.(type) {
case meta.Copyright:
s.CopyRight = m.Text()
case meta.TrackSequenceName:
t.Name = m.Text()
case meta.Instrument:
t.Instrument = m.Text()
case meta.TimeSig:
ts := &TimeSig{
AbsPos: p.AbsoluteTicks,
Num: m.Numerator,
Denom: m.Denominator,
}
s.scannedTimeSig = append(s.scannedTimeSig, ts)
case meta.Tempo:
tc := &TempoChange{
AbsPos: p.AbsoluteTicks,
TempoBPM: m.FractionalBPM(),
}
s.scannedTempoChanges = append(s.scannedTempoChanges, tc)
default:
if msg != nil {
tm := &TrackMessage{}
tm.Message = msg
tm.TrackNo = t.No
tm.AbsPos = p.AbsoluteTicks
s.scannedMessages = append(s.scannedMessages, tm)
t.WithContent = true
if chMsg, is := msg.(channel.Message); is {
if t.Channel >= 0 && uint8(t.Channel) != chMsg.Channel() {
panic(fmt.Sprintf("track no %v (%s) has mixed channel messages for channel %v and %v - not supported", t.No, t.Name, t.Channel, chMsg.Channel()))
}
if t.Channel < 0 {
t.Channel = int8(chMsg.Channel())
}
}
}
}
}
func (s *Song) AddBar(pos uint64, num, denom uint8) *Bar {
b := &Bar{
AbsPos: pos,
Song: s,
TimeSig: [2]uint8{num, denom},
}
s.Bars = append(s.Bars, b)
return b
}
func (s *Song) Save(file string) error {
return writer.WriteSMF(file, s.NoOfContentTracks()+2, s.writeSMF, smfwriter.Format(smf.SMF1), smfwriter.TimeFormat(smf.MetricTicks(s.ticksPerQN)))
}
func (s *Song) writeTimeSigTrack(w *writer.SMF) error {
timesig := [2]uint8{4, 4}
var pos uint64
for _, b := range s.Bars {
if b.TimeSig != timesig {
delta := uint32(b.AbsPos - pos)
w.SetDelta(delta)
w.Write(meter.Meter(b.TimeSig[0], b.TimeSig[1]))
timesig = b.TimeSig
pos = b.AbsPos
}
}
return nil
}
func (s *Song) writeTempoTrack(w *writer.SMF) error {
tempo := float32(120.0)
var pos uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
if p.Tempo != 0 && p.Tempo != tempo {
absPos := p.AbsTicks()
delta := uint32(absPos - pos)
w.SetDelta(delta)
w.Write(meta.Tempo(p.Tempo))
tempo = p.Tempo
pos = absPos
}
}
}
return nil
}
func (s *Song) writeSMF(w *writer.SMF) (err error) {
err = s.writeTimeSigTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
err = s.writeTempoTrack(w)
if err != nil {
return
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
for _, tr := range s.Tracks {
if tr.WithContent {
var lastTick uint64
for _, b := range s.Bars {
for _, p := range b.Positions {
ticks := p.AbsTicks()
for _, m := range p.Messages {
if m.TrackNo == tr.No && m.Message != nil {
delta := ticks - lastTick
if tr.Channel < 0 {
panic(fmt.Sprintf("channel for content track no %v (%s) is -1, but content tracks must have channels", tr.No, tr.Name))
}
w.SetChannel(uint8(tr.Channel))
w.SetDelta(uint32(delta))
w.Write(m.Message)
lastTick = ticks
}
}
}
}
err = writer.EndOfTrack(w)
if err != nil {
return
}
}
}
return nil
}
func (s *Song) TrackWidth(i int) uint8 {
// TODO calculate the track width
return 0
}
func KeyToNote(key uint8) string {
nt := key % 12
oct := key / 12
notes := map[uint8]string{
0: "C",
1: "C#",
2: "D",
3: "D#",
4: "E",
5: "F",
6: "F#",
7: "G",
8: "G#",
9: "A",
10: "A#",
11: "B",
}
return fmt.Sprintf("%s%v", notes[nt], oct)
}
func ShowMessage(msg midi.Message) string {
switch v := msg.(type) {
case channel.NoteOn:
return fmt.Sprintf("%s/%v_", KeyToNote(v.Key()), v.Velocity())
case channel.NoteOff:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
case channel.NoteOffVelocity:
return fmt.Sprintf("_%s", KeyToNote(v.Key()))
/*
case channel.Aftertouch:
case channel.ControlChange:
case channel.Pitchbend:
case channel.PolyAftertouch:
case channel.ProgramChange:
*/
case meta.Lyric:
return fmt.Sprintf("%q", v.Text())
case meta.Text:
return fmt.Sprintf("'%s'", v.Text())
default:
return msg.String()
}
}
func (s *Song) BarLines() string {
var bf bytes.Buffer
fmt.Fprintf(&bf, "| Comment | Mark | Tempo | Beat | ")
for _, t := range s.Tracks {
if t.WithContent {
fmt.Fprintf(&bf, " %s[%v] | ", t.Name, t.Channel)
}
}
fmt.Fprintf(&bf, "\n")
for _, b := range s.Bars {
_ = b
fmt.Fprintf(&bf, "----------- #%v %v/%v --------------\n", b.No, b.TimeSig[0], b.TimeSig[1])
for _, p := range b.Positions {
tempo := ""
if p.Tempo != 0 {
tempo = fmt.Sprintf("%0.2f", tempo)
}
var frac float64
if p.Fraction[1] > 0 {
frac = p.Fraction[0] / p.Fraction[1]
}
beat := fmt.Sprintf("%0.4f", float64(p.Beat)+float64(1)+frac)
fmt.Fprintf(&bf, "| %s | %s | %s | %s | ", p.Comment, p.Mark, tempo, beat)
for _, t := range s.Tracks {
if t.WithContent {
var printed bool
for _, m := range p.Messages {
if m.TrackNo == t.No {
fmt.Fprintf(&bf, " %s | ", ShowMessage(m.Message))
printed = true
}
}
if !printed {
fmt.Fprintf(&bf, " | ")
}
}
}
fmt.Fprintf(&bf, "\n")
}
}
return bf.String()
}
func (s *Song) RenumberBars() {
for i := range s.Bars {
s.Bars[i].No = uint16(i)
}
}
func (s *Song) RenumberTracks() {
for i := range s.Tracks {
s.Tracks[i].No = uint16(i)
}
}
type TrackMessage struct {
TrackNo uint16
AbsPos uint64
Message midi.Message
Position *Position
}
type Positions []*Position
func (p Positions) Swap(a, b int) {
p[a], p[b] = p[b], p[a]
}
func (p Positions) Len() int {
return len(p)
}
func (p Positions) Less(a, b int) bool {
if p[a].Bar.No < p[b].Bar.No {
return true
}
if p[a].Bar.No > p[b].Bar.No {
return false
}
if p[a].Beat < p[b].Beat {
return true
}
if p[a].Beat > p[b].Beat {
return false
}
var frac_a float64
var frac_b float64
if p[a].Fraction[1] > 0 {
frac_a = float64(p[a].Fraction[0]) / float64(p[a].Fraction[1])
}
if p[b].Fraction[1] > 0 {
frac_b = float64(p[b].Fraction[0]) / float64(p[b].Fraction[1])
}
return frac_a < frac_b
}
type Bar struct {
Song *Song
No uint16
TimeSig [2]uint8
Positions Positions
AbsPos uint64
}
func (b *Bar) EndPos() uint64 {
return b.AbsPos + b.Length()
}
func (b *Bar) Length() uint64 |
func (b *Bar) SetMessageByRelTicks(ticks uint64, trackNo uint16, msg midi.Message) {
beat := uint8(ticks / uint64(b.Song.ticksPerQN))
ticksRest := ticks % uint64(b.Song.ticksPerQN)
var pos *Position
for _, p := range b.Positions {
if p.Beat == beat && p.WithinFraction(ticksRest) {
pos = p
break
}
}
if pos == nil {
pos = b.AddPosition()
pos.Beat = beat
pos.Fraction[0] = float64(ticksRest)
pos.Fraction[1] = float64(b.Song.ticksPerQN)
}
pos.SetMessage(trackNo, msg)
}
func (b *Bar) AddPosition() *Position {
p := &Position{
Bar: b,
}
b.Positions = append(b.Positions, p)
return p
}
func (b *Bar) Columns() []string {
//cols := make([]string)
return nil
}
func (b *Bar) SortPositions() {
sort.Sort(b.Positions)
}
type Position struct {
Bar *Bar
Comment string
Mark string
Beat uint8
Tempo float32
Fraction [2]float64
Messages []*TrackMessage
}
/*
WithinFraction determines, if the given ticks are within the fraction of the position.
The given ticks must be less than a quarternote (Songs ticks per quarternote).
The fraction is a fraction of a quarternote. So we first have to check, to which fraction
of the qn the given ticks correspond and then to check, if the difference between this fraction
and the fraction of the Position lies within the tolerance
*/
func (p *Position) WithinFraction(ticks uint64) bool {
//tolerance := float64(0.0000001)
tolerance := float64(0.001)
fracTicks := float64(ticks) / float64(p.Bar.Song.ticksPerQN)
if fracTicks >= 1 {
panic("must not happen, we are on the wrong beat")
}
fracPos := p.Fraction[0] / p.Fraction[1]
//fmt.Printf("\nwithin fraction %v vs %v (ticks: %v perQN: %v)\n", fracPos, fracTicks, ticks, p.Bar.Song.ticksPerQN)
return math.Abs(fracPos-fracTicks) < tolerance
}
func (p *Position) AbsTicks() uint64 {
beatTicks := p.Bar.Song.ticksPerQN * uint32(p.Beat)
fracTicks := math.Round((float64(p.Bar.Song.ticksPerQN) * p.Fraction[0]) / p.Fraction[1])
return p.Bar.AbsPos + uint64(beatTicks) + uint64(fracTicks)
}
func (p *Position) AddMessage(track uint16, msg midi.Message) {
tm := &TrackMessage{
TrackNo: track,
Message: msg,
Position: p,
}
p.Messages = append(p.Messages, tm)
}
func (p *Position) GetMessage(track uint16) *TrackMessage {
for _, m := range p.Messages {
if m.TrackNo == track {
return m
}
}
return nil
}
func (p *Position) SetMessage(track uint16, msg midi.Message) {
var tm *TrackMessage
for _, m := range p.Messages {
if m.TrackNo == track {
tm = m
break
}
}
if tm == nil {
p.AddMessage(track, msg)
return
}
tm.Message = msg
}
type Track struct {
Song *Song
No uint16
Channel int8 // -1 == not defined
Name string
Instrument string
Solo bool
Mute bool
RecordArm bool
WithContent bool
External bool // for non editable track
}
/*
-----------------------------------------------------------------------
File | Edit | View | Config (the menu, open the first with ALT+SPACE and then navigate with arrow keys and select with ENTER)
-----------------------------------------------------------------------
Comment | Mark | Bar | Beat || Drums[10] | Bass[9] | Vocal[1] | Piano[1] | (piano track on channel 1 etc)
| | | || S M R | S M R | S M R | S M R | (Solo/Mute/Record indicators)
----------------------------------------------------------------------- (everything above this line is static/non scrollable)
1 | Intro | 4/4 | 1.0 || C3/100 | C5_/120 | | | (drum note is just a 32ths, bass is note on)
| | 144 | 1.0 || | | | | tempo change
#2 (bar change)
| | | 2.25 || C5/60 | _C5 | "hiho" | CC123/100 |
=====I====>===V====C=== position indicator, always the pre-last line of the screen (each = is a bar, each letter is the first letter of a Marker)
F1 Play | F2 Rec | F3 Metro | F4 Keyb | F5 V1 | F6 V2 | F7 V3 | F8 V4 | F9 V5 | F10 Track Properties | F11 Song Properties
(play, record, metronome, Keyboard are switches that indicate if it is active)
(views are a selector; only one view can be active at a time)
*/
func New() *Song {
return &Song{
Properties: map[string]string{},
}
}
| {
l := float64(b.Song.ticksPerQN*4*uint32(b.TimeSig[0])) / float64(b.TimeSig[1])
return uint64(math.Round(l))
} | identifier_body |
audio.rs | use crate::config::Config;
use crate::test::InputSampleStream;
use failure::Error;
use num::{Complex, Zero};
use portaudio as pa;
use std::sync::mpsc::{Receiver, Sender};
const CHANNELS: i32 = 1;
const FRAMES: u32 = 256;
const INTERLEAVED: bool = true;
fn run<'c, T>(
mut modulator: Modulate<'c, T>,
rx_sender: Sender<f32>,
sample_rate: f32,
) -> Result<(), pa::Error>
where
T: Iterator<Item = Complex<f32>>,
{
let pa = pa::PortAudio::new()?;
println!("PortAudio");
println!("version: {}", pa.version());
println!("version text: {:?}", pa.version_text());
println!("host count: {}", pa.host_api_count()?);
let default_host = pa.default_host_api()?;
println!("default host: {:#?}", pa.host_api_info(default_host));
let def_input = pa.default_input_device()?;
let input_info = pa.device_info(def_input)?;
println!("Default input device info: {:#?}", &input_info);
// Construct the input stream parameters.
let latency = input_info.default_low_input_latency;
let input_params = pa::StreamParameters::<f32>::new(def_input, CHANNELS, INTERLEAVED, latency);
let def_output = pa.default_output_device()?;
let output_info = pa.device_info(def_output)?;
println!("Default output device info: {:#?}", &output_info);
// Construct the output stream parameters.
let latency = output_info.default_low_output_latency;
let output_params =
pa::StreamParameters::<f32>::new(def_output, CHANNELS, INTERLEAVED, latency);
// Check that the stream format is supported.
pa.is_duplex_format_supported(input_params, output_params, sample_rate as f64)?;
// Construct the settings with which we'll open our duplex stream.
let settings =
pa::DuplexStreamSettings::new(input_params, output_params, sample_rate as f64, FRAMES);
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// We'll use this function to wait for read/write availability.
fn wait_for_stream<F>(f: F, name: &str) -> u32
where
F: Fn() -> Result<pa::StreamAvailable, pa::error::Error>,
{
loop {
match f() {
Ok(available) => match available {
pa::StreamAvailable::Frames(frames) => return frames as u32,
pa::StreamAvailable::InputOverflowed => println!("Input stream has overflowed"),
pa::StreamAvailable::OutputUnderflowed => {
println!("Output stream has underflowed")
}
},
Err(err) => panic!(
"An error occurred while waiting for the {} stream: {}",
name, err
),
}
}
};
// Now start the main read/write loop! In this example, we pass
// the input buffer directly to the output buffer, so watch out
// for feedback.
loop {
// How many frames are available on the input stream?
let in_frames = wait_for_stream(|| stream.read_available(), "Read");
// If there are frames available, let's take them and add them
// to our buffer.
if in_frames > 0 {
let input_samples = stream.read(in_frames)?;
for samp in input_samples {
rx_sender.send(*samp).unwrap();
}
}
// How many frames are available for writing on the output stream?
let out_frames = wait_for_stream(|| stream.write_available(), "Write");
// If there are frames available for writing and we have some
// to write, then write!
if out_frames > 0 {
// If we have more than enough frames for writing, take
// them from the start of the buffer. Otherwise if we
// have less, just take what we can for now.
let write_frames = out_frames;
let n_write_samples = write_frames as usize * CHANNELS as usize;
let mut flag = false;
stream.write(write_frames, |output| {
for i in 0..n_write_samples {
if let Some(samp) = modulator.next() {
output[i] = samp;
} else {
println!("Tx samples finished. Exiting");
flag = true;
break;
}
}
})?;
if flag {
break;
}
}
}
Ok(())
}
pub fn start_audio<'c>(
tx_receiver: Receiver<Complex<f32>>,
config: &'c Config,
) -> Result<(std::thread::JoinHandle<()>, AudioSampleStream<'c>), Error> {
// For the microphone
let (rx_sender, rx_receiver) = std::sync::mpsc::channel::<f32>();
let sample_rate = config.audio.sample_rate;
let config_c = config.clone();
let handle = std::thread::spawn(move || {
// Use the modulator to go from baseband to carrier frequency
let modulator = Modulate::new(tx_receiver.iter(), sample_rate, &config_c);
run(modulator, rx_sender, sample_rate).unwrap();
});
return Ok((
handle,
AudioSampleStream::new(rx_receiver, sample_rate as f32, config),
));
}
| /// Stream of samples from an audio device
pub struct AudioSampleStream<'c> {
channel: Receiver<f32>,
demod: Demodulate<'c>,
}
impl<'c> AudioSampleStream<'c> {
fn new(channel: Receiver<f32>, sample_rate: f32, config: &'c Config) -> Self {
Self {
channel,
demod: Demodulate::new(sample_rate, config),
}
}
}
impl<'c> InputSampleStream for AudioSampleStream<'c> {}
impl<'c> Iterator for AudioSampleStream<'c> {
type Item = Complex<f32>;
fn next(&mut self) -> Option<Complex<f32>> {
loop {
let in_samp = if let Ok(samp) = self.channel.recv() {
samp
} else {
return None;
};
let out = self.demod.push(in_samp);
if out.is_some() {
return out;
}
}
}
}
/// Upconvert signal from baseband to carrier frequency
struct Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
config: &'c Config,
/// Our source of baseband samples
src: T,
/// Number of carrier samples to skip per baseband sample
to_skip: u64,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// If true, input is done and we'll always return None
done: bool,
/// The two samples we are currently in between sending
cur_samps: (Complex<f32>, Complex<f32>),
/// The current sample we are sending
cur_ewma: Complex<f32>,
}
impl<'c, T> Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
fn new(src: T, sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = 1; //to_skip.round() as u64;
Self {
config,
src,
to_skip,
sample_rate,
num_samps: 0,
done: false,
cur_samps: (Complex::zero(), Complex::zero()),
cur_ewma: Complex::zero(),
}
}
}
impl<'c, T> Iterator for Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.done {
return None;
}
// See if we need to update the current sample
if self.num_samps % self.to_skip == 0 {
if let Some(x) = self.src.next() {
self.cur_samps = (self.cur_samps.1, x);
} else {
self.done = true;
return None;
}
}
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.num_samps += 1;
// Low-pass filter
// Frequency-domain sinc
//let f = (self.num_samps % self.to_skip) as f32 / self.to_skip as f32;
//let samp = prev * (1. - f) + cur * f;
// EWMA (equivalent to RC-filter)
// let alpha = 1. / (1. + self.to_skip as f32);
// self.cur_ewma = alpha * self.cur_samps.1 + (1. - alpha) * self.cur_ewma;
// let samp = self.cur_ewma;
let samp = self.cur_samps.1;
Some(e.re * samp.re + e.im * samp.im)
}
}
/// Convert to baseband from carrier frequency
struct Demodulate<'c> {
config: &'c Config,
/// Number of carrier samples to skip per baseband sample
to_skip: usize,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// Average of the sample so far
samp_avg: Complex<f32>,
}
impl<'c> Demodulate<'c> {
fn new(sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = to_skip.round() as usize;
Self {
config,
to_skip,
sample_rate,
num_samps: 0,
samp_avg: Complex::zero(),
}
}
/// Takes an audio sample, and if appropriate, returns a baseband
/// sample
fn push(&mut self, samp: f32) -> Option<Complex<f32>> {
// Add to the average
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.samp_avg += samp * e;
// Should we output something?
let res = if self.num_samps % self.to_skip as u64 == 0 {
let res = Some(self.samp_avg / self.to_skip as f32);
self.samp_avg = Complex::zero();
res
} else {
None
};
self.num_samps += 1;
res
}
}
#[cfg(test)]
mod tests {
use super::{Demodulate, Modulate};
use crate::config::Config;
use num::Complex;
use rand::Rng;
#[test]
fn mod_demod() {
let config = Config::default();
let sample_rate = 44100.;
// The samples we'll transmit
let mut rng = rand_pcg::Pcg32::new(1, 1);
let samples: Vec<_> = (0..100_000)
.map(|_| Complex::new(rng.gen(), rng.gen()))
.collect();
let modulate = Modulate::new(samples.clone().into_iter(), sample_rate, &config);
let mut demodulate = Demodulate::new(sample_rate, &config);
let mut pos = 0;
let mut channel = None;
for x in modulate {
if let Some(out) = demodulate.push(x) {
if channel.is_none() {
channel = Some(out / samples[pos]);
}
let channel = channel.unwrap();
//println!("{:?} {:?}", out.to_polar(), samples[pos].to_polar());
//println!("{} {}", out / samples[pos], channel);
assert!((out / samples[pos] - channel).norm() <= 1e-3);
pos += 1;
}
}
}
} | random_line_split | |
audio.rs | use crate::config::Config;
use crate::test::InputSampleStream;
use failure::Error;
use num::{Complex, Zero};
use portaudio as pa;
use std::sync::mpsc::{Receiver, Sender};
const CHANNELS: i32 = 1;
const FRAMES: u32 = 256;
const INTERLEAVED: bool = true;
fn run<'c, T>(
mut modulator: Modulate<'c, T>,
rx_sender: Sender<f32>,
sample_rate: f32,
) -> Result<(), pa::Error>
where
T: Iterator<Item = Complex<f32>>,
{
let pa = pa::PortAudio::new()?;
println!("PortAudio");
println!("version: {}", pa.version());
println!("version text: {:?}", pa.version_text());
println!("host count: {}", pa.host_api_count()?);
let default_host = pa.default_host_api()?;
println!("default host: {:#?}", pa.host_api_info(default_host));
let def_input = pa.default_input_device()?;
let input_info = pa.device_info(def_input)?;
println!("Default input device info: {:#?}", &input_info);
// Construct the input stream parameters.
let latency = input_info.default_low_input_latency;
let input_params = pa::StreamParameters::<f32>::new(def_input, CHANNELS, INTERLEAVED, latency);
let def_output = pa.default_output_device()?;
let output_info = pa.device_info(def_output)?;
println!("Default output device info: {:#?}", &output_info);
// Construct the output stream parameters.
let latency = output_info.default_low_output_latency;
let output_params =
pa::StreamParameters::<f32>::new(def_output, CHANNELS, INTERLEAVED, latency);
// Check that the stream format is supported.
pa.is_duplex_format_supported(input_params, output_params, sample_rate as f64)?;
// Construct the settings with which we'll open our duplex stream.
let settings =
pa::DuplexStreamSettings::new(input_params, output_params, sample_rate as f64, FRAMES);
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// We'll use this function to wait for read/write availability.
fn wait_for_stream<F>(f: F, name: &str) -> u32
where
F: Fn() -> Result<pa::StreamAvailable, pa::error::Error>,
{
loop {
match f() {
Ok(available) => match available {
pa::StreamAvailable::Frames(frames) => return frames as u32,
pa::StreamAvailable::InputOverflowed => println!("Input stream has overflowed"),
pa::StreamAvailable::OutputUnderflowed => {
println!("Output stream has underflowed")
}
},
Err(err) => panic!(
"An error occurred while waiting for the {} stream: {}",
name, err
),
}
}
};
// Now start the main read/write loop! In this example, we pass
// the input buffer directly to the output buffer, so watch out
// for feedback.
loop {
// How many frames are available on the input stream?
let in_frames = wait_for_stream(|| stream.read_available(), "Read");
// If there are frames available, let's take them and add them
// to our buffer.
if in_frames > 0 {
let input_samples = stream.read(in_frames)?;
for samp in input_samples {
rx_sender.send(*samp).unwrap();
}
}
// How many frames are available for writing on the output stream?
let out_frames = wait_for_stream(|| stream.write_available(), "Write");
// If there are frames available for writing and we have some
// to write, then write!
if out_frames > 0 {
// If we have more than enough frames for writing, take
// them from the start of the buffer. Otherwise if we
// have less, just take what we can for now.
let write_frames = out_frames;
let n_write_samples = write_frames as usize * CHANNELS as usize;
let mut flag = false;
stream.write(write_frames, |output| {
for i in 0..n_write_samples {
if let Some(samp) = modulator.next() {
output[i] = samp;
} else {
println!("Tx samples finished. Exiting");
flag = true;
break;
}
}
})?;
if flag |
}
}
Ok(())
}
pub fn start_audio<'c>(
tx_receiver: Receiver<Complex<f32>>,
config: &'c Config,
) -> Result<(std::thread::JoinHandle<()>, AudioSampleStream<'c>), Error> {
// For the microphone
let (rx_sender, rx_receiver) = std::sync::mpsc::channel::<f32>();
let sample_rate = config.audio.sample_rate;
let config_c = config.clone();
let handle = std::thread::spawn(move || {
// Use the modulator to go from baseband to carrier frequency
let modulator = Modulate::new(tx_receiver.iter(), sample_rate, &config_c);
run(modulator, rx_sender, sample_rate).unwrap();
});
return Ok((
handle,
AudioSampleStream::new(rx_receiver, sample_rate as f32, config),
));
}
/// Stream of samples from an audio device
pub struct AudioSampleStream<'c> {
channel: Receiver<f32>,
demod: Demodulate<'c>,
}
impl<'c> AudioSampleStream<'c> {
fn new(channel: Receiver<f32>, sample_rate: f32, config: &'c Config) -> Self {
Self {
channel,
demod: Demodulate::new(sample_rate, config),
}
}
}
impl<'c> InputSampleStream for AudioSampleStream<'c> {}
impl<'c> Iterator for AudioSampleStream<'c> {
type Item = Complex<f32>;
fn next(&mut self) -> Option<Complex<f32>> {
loop {
let in_samp = if let Ok(samp) = self.channel.recv() {
samp
} else {
return None;
};
let out = self.demod.push(in_samp);
if out.is_some() {
return out;
}
}
}
}
/// Upconvert signal from baseband to carrier frequency
struct Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
config: &'c Config,
/// Our source of baseband samples
src: T,
/// Number of carrier samples to skip per baseband sample
to_skip: u64,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// If true, input is done and we'll always return None
done: bool,
/// The two samples we are currently in between sending
cur_samps: (Complex<f32>, Complex<f32>),
/// The current sample we are sending
cur_ewma: Complex<f32>,
}
impl<'c, T> Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
fn new(src: T, sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = 1; //to_skip.round() as u64;
Self {
config,
src,
to_skip,
sample_rate,
num_samps: 0,
done: false,
cur_samps: (Complex::zero(), Complex::zero()),
cur_ewma: Complex::zero(),
}
}
}
impl<'c, T> Iterator for Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.done {
return None;
}
// See if we need to update the current sample
if self.num_samps % self.to_skip == 0 {
if let Some(x) = self.src.next() {
self.cur_samps = (self.cur_samps.1, x);
} else {
self.done = true;
return None;
}
}
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.num_samps += 1;
// Low-pass filter
// Frequency-domain sinc
//let f = (self.num_samps % self.to_skip) as f32 / self.to_skip as f32;
//let samp = prev * (1. - f) + cur * f;
// EWMA (equivalent to RC-filter)
// let alpha = 1. / (1. + self.to_skip as f32);
// self.cur_ewma = alpha * self.cur_samps.1 + (1. - alpha) * self.cur_ewma;
// let samp = self.cur_ewma;
let samp = self.cur_samps.1;
Some(e.re * samp.re + e.im * samp.im)
}
}
/// Convert to baseband from carrier frequency
struct Demodulate<'c> {
config: &'c Config,
/// Number of carrier samples to skip per baseband sample
to_skip: usize,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// Average of the sample so far
samp_avg: Complex<f32>,
}
impl<'c> Demodulate<'c> {
fn new(sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = to_skip.round() as usize;
Self {
config,
to_skip,
sample_rate,
num_samps: 0,
samp_avg: Complex::zero(),
}
}
/// Takes an audio sample, and if appropriate, returns a baseband
/// sample
fn push(&mut self, samp: f32) -> Option<Complex<f32>> {
// Add to the average
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.samp_avg += samp * e;
// Should we output something?
let res = if self.num_samps % self.to_skip as u64 == 0 {
let res = Some(self.samp_avg / self.to_skip as f32);
self.samp_avg = Complex::zero();
res
} else {
None
};
self.num_samps += 1;
res
}
}
#[cfg(test)]
mod tests {
use super::{Demodulate, Modulate};
use crate::config::Config;
use num::Complex;
use rand::Rng;
#[test]
fn mod_demod() {
let config = Config::default();
let sample_rate = 44100.;
// The samples we'll transmit
let mut rng = rand_pcg::Pcg32::new(1, 1);
let samples: Vec<_> = (0..100_000)
.map(|_| Complex::new(rng.gen(), rng.gen()))
.collect();
let modulate = Modulate::new(samples.clone().into_iter(), sample_rate, &config);
let mut demodulate = Demodulate::new(sample_rate, &config);
let mut pos = 0;
let mut channel = None;
for x in modulate {
if let Some(out) = demodulate.push(x) {
if channel.is_none() {
channel = Some(out / samples[pos]);
}
let channel = channel.unwrap();
//println!("{:?} {:?}", out.to_polar(), samples[pos].to_polar());
//println!("{} {}", out / samples[pos], channel);
assert!((out / samples[pos] - channel).norm() <= 1e-3);
pos += 1;
}
}
}
}
| {
break;
} | conditional_block |
audio.rs | use crate::config::Config;
use crate::test::InputSampleStream;
use failure::Error;
use num::{Complex, Zero};
use portaudio as pa;
use std::sync::mpsc::{Receiver, Sender};
const CHANNELS: i32 = 1;
const FRAMES: u32 = 256;
const INTERLEAVED: bool = true;
fn run<'c, T>(
mut modulator: Modulate<'c, T>,
rx_sender: Sender<f32>,
sample_rate: f32,
) -> Result<(), pa::Error>
where
T: Iterator<Item = Complex<f32>>,
{
let pa = pa::PortAudio::new()?;
println!("PortAudio");
println!("version: {}", pa.version());
println!("version text: {:?}", pa.version_text());
println!("host count: {}", pa.host_api_count()?);
let default_host = pa.default_host_api()?;
println!("default host: {:#?}", pa.host_api_info(default_host));
let def_input = pa.default_input_device()?;
let input_info = pa.device_info(def_input)?;
println!("Default input device info: {:#?}", &input_info);
// Construct the input stream parameters.
let latency = input_info.default_low_input_latency;
let input_params = pa::StreamParameters::<f32>::new(def_input, CHANNELS, INTERLEAVED, latency);
let def_output = pa.default_output_device()?;
let output_info = pa.device_info(def_output)?;
println!("Default output device info: {:#?}", &output_info);
// Construct the output stream parameters.
let latency = output_info.default_low_output_latency;
let output_params =
pa::StreamParameters::<f32>::new(def_output, CHANNELS, INTERLEAVED, latency);
// Check that the stream format is supported.
pa.is_duplex_format_supported(input_params, output_params, sample_rate as f64)?;
// Construct the settings with which we'll open our duplex stream.
let settings =
pa::DuplexStreamSettings::new(input_params, output_params, sample_rate as f64, FRAMES);
let mut stream = pa.open_blocking_stream(settings)?;
stream.start()?;
// We'll use this function to wait for read/write availability.
fn wait_for_stream<F>(f: F, name: &str) -> u32
where
F: Fn() -> Result<pa::StreamAvailable, pa::error::Error>,
{
loop {
match f() {
Ok(available) => match available {
pa::StreamAvailable::Frames(frames) => return frames as u32,
pa::StreamAvailable::InputOverflowed => println!("Input stream has overflowed"),
pa::StreamAvailable::OutputUnderflowed => {
println!("Output stream has underflowed")
}
},
Err(err) => panic!(
"An error occurred while waiting for the {} stream: {}",
name, err
),
}
}
};
// Now start the main read/write loop! In this example, we pass
// the input buffer directly to the output buffer, so watch out
// for feedback.
loop {
// How many frames are available on the input stream?
let in_frames = wait_for_stream(|| stream.read_available(), "Read");
// If there are frames available, let's take them and add them
// to our buffer.
if in_frames > 0 {
let input_samples = stream.read(in_frames)?;
for samp in input_samples {
rx_sender.send(*samp).unwrap();
}
}
// How many frames are available for writing on the output stream?
let out_frames = wait_for_stream(|| stream.write_available(), "Write");
// If there are frames available for writing and we have some
// to write, then write!
if out_frames > 0 {
// If we have more than enough frames for writing, take
// them from the start of the buffer. Otherwise if we
// have less, just take what we can for now.
let write_frames = out_frames;
let n_write_samples = write_frames as usize * CHANNELS as usize;
let mut flag = false;
stream.write(write_frames, |output| {
for i in 0..n_write_samples {
if let Some(samp) = modulator.next() {
output[i] = samp;
} else {
println!("Tx samples finished. Exiting");
flag = true;
break;
}
}
})?;
if flag {
break;
}
}
}
Ok(())
}
pub fn start_audio<'c>(
tx_receiver: Receiver<Complex<f32>>,
config: &'c Config,
) -> Result<(std::thread::JoinHandle<()>, AudioSampleStream<'c>), Error> {
// For the microphone
let (rx_sender, rx_receiver) = std::sync::mpsc::channel::<f32>();
let sample_rate = config.audio.sample_rate;
let config_c = config.clone();
let handle = std::thread::spawn(move || {
// Use the modulator to go from baseband to carrier frequency
let modulator = Modulate::new(tx_receiver.iter(), sample_rate, &config_c);
run(modulator, rx_sender, sample_rate).unwrap();
});
return Ok((
handle,
AudioSampleStream::new(rx_receiver, sample_rate as f32, config),
));
}
/// Stream of samples from an audio device
pub struct AudioSampleStream<'c> {
channel: Receiver<f32>,
demod: Demodulate<'c>,
}
impl<'c> AudioSampleStream<'c> {
fn new(channel: Receiver<f32>, sample_rate: f32, config: &'c Config) -> Self {
Self {
channel,
demod: Demodulate::new(sample_rate, config),
}
}
}
impl<'c> InputSampleStream for AudioSampleStream<'c> {}
impl<'c> Iterator for AudioSampleStream<'c> {
type Item = Complex<f32>;
fn next(&mut self) -> Option<Complex<f32>> {
loop {
let in_samp = if let Ok(samp) = self.channel.recv() {
samp
} else {
return None;
};
let out = self.demod.push(in_samp);
if out.is_some() {
return out;
}
}
}
}
/// Upconvert signal from baseband to carrier frequency
struct Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
config: &'c Config,
/// Our source of baseband samples
src: T,
/// Number of carrier samples to skip per baseband sample
to_skip: u64,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// If true, input is done and we'll always return None
done: bool,
/// The two samples we are currently in between sending
cur_samps: (Complex<f32>, Complex<f32>),
/// The current sample we are sending
cur_ewma: Complex<f32>,
}
impl<'c, T> Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
fn new(src: T, sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = 1; //to_skip.round() as u64;
Self {
config,
src,
to_skip,
sample_rate,
num_samps: 0,
done: false,
cur_samps: (Complex::zero(), Complex::zero()),
cur_ewma: Complex::zero(),
}
}
}
impl<'c, T> Iterator for Modulate<'c, T>
where
T: Iterator<Item = Complex<f32>>,
{
type Item = f32;
fn next(&mut self) -> Option<f32> {
if self.done {
return None;
}
// See if we need to update the current sample
if self.num_samps % self.to_skip == 0 {
if let Some(x) = self.src.next() {
self.cur_samps = (self.cur_samps.1, x);
} else {
self.done = true;
return None;
}
}
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.num_samps += 1;
// Low-pass filter
// Frequency-domain sinc
//let f = (self.num_samps % self.to_skip) as f32 / self.to_skip as f32;
//let samp = prev * (1. - f) + cur * f;
// EWMA (equivalent to RC-filter)
// let alpha = 1. / (1. + self.to_skip as f32);
// self.cur_ewma = alpha * self.cur_samps.1 + (1. - alpha) * self.cur_ewma;
// let samp = self.cur_ewma;
let samp = self.cur_samps.1;
Some(e.re * samp.re + e.im * samp.im)
}
}
/// Convert to baseband from carrier frequency
struct | <'c> {
config: &'c Config,
/// Number of carrier samples to skip per baseband sample
to_skip: usize,
/// Sample rate of the audio signal
sample_rate: f32,
/// Total number of (audio) samples transmitted so far
num_samps: u64,
/// Average of the sample so far
samp_avg: Complex<f32>,
}
impl<'c> Demodulate<'c> {
fn new(sample_rate: f32, config: &'c Config) -> Self {
// For now, sample_rate has to be a multiple of bandwidth. Can
// remove restriction later
let to_skip = sample_rate / config.audio.bandwidth;
assert!((to_skip.round() - to_skip).abs() <= 1e-3);
let to_skip = to_skip.round() as usize;
Self {
config,
to_skip,
sample_rate,
num_samps: 0,
samp_avg: Complex::zero(),
}
}
/// Takes an audio sample, and if appropriate, returns a baseband
/// sample
fn push(&mut self, samp: f32) -> Option<Complex<f32>> {
// Add to the average
let pi2 = 2. * std::f32::consts::PI;
let e = Complex::new(
0.,
pi2 * self.num_samps as f32 / self.sample_rate * self.config.audio.center_frequency,
)
.exp();
self.samp_avg += samp * e;
// Should we output something?
let res = if self.num_samps % self.to_skip as u64 == 0 {
let res = Some(self.samp_avg / self.to_skip as f32);
self.samp_avg = Complex::zero();
res
} else {
None
};
self.num_samps += 1;
res
}
}
#[cfg(test)]
mod tests {
use super::{Demodulate, Modulate};
use crate::config::Config;
use num::Complex;
use rand::Rng;
#[test]
fn mod_demod() {
let config = Config::default();
let sample_rate = 44100.;
// The samples we'll transmit
let mut rng = rand_pcg::Pcg32::new(1, 1);
let samples: Vec<_> = (0..100_000)
.map(|_| Complex::new(rng.gen(), rng.gen()))
.collect();
let modulate = Modulate::new(samples.clone().into_iter(), sample_rate, &config);
let mut demodulate = Demodulate::new(sample_rate, &config);
let mut pos = 0;
let mut channel = None;
for x in modulate {
if let Some(out) = demodulate.push(x) {
if channel.is_none() {
channel = Some(out / samples[pos]);
}
let channel = channel.unwrap();
//println!("{:?} {:?}", out.to_polar(), samples[pos].to_polar());
//println!("{} {}", out / samples[pos], channel);
assert!((out / samples[pos] - channel).norm() <= 1e-3);
pos += 1;
}
}
}
}
| Demodulate | identifier_name |
tests.rs | use quickcheck::{QuickCheck, StdGen, TestResult};
use snap::raw::{decompress_len, Decoder, Encoder};
use snap::Error;
#[cfg(feature = "cpp")]
use snappy_cpp as cpp;
// roundtrip is a macro that compresses the input, then decompresses the result
// and compares it with the original input. If they are not equal, then the
// test fails.
macro_rules! roundtrip {
($data:expr) => {{
let d = &$data[..];
assert_eq!(d, &*depress(&press(d)));
}};
}
// errored is a macro that tries to decompress the input and asserts that it
// resulted in an error. If decompression was successful, then the test fails.
macro_rules! errored {
($data:expr, $err:expr) => {
errored!($data, $err, false);
};
($data:expr, $err:expr, $bad_header:expr) => {{
let d = &$data[..];
let mut buf = if $bad_header {
assert_eq!($err, decompress_len(d).unwrap_err());
vec![0; 1024]
} else {
vec![0; decompress_len(d).unwrap()]
};
match Decoder::new().decompress(d, &mut buf) {
Err(ref err) if err == &$err => {}
Err(ref err) => panic!(
"expected decompression to fail with {:?}, \
but got {:?}",
$err, err
),
Ok(n) => {
panic!(
"\nexpected decompression to fail, but did not!
original (len == {:?})
----------------------
{:?}
decompressed (len == {:?})
--------------------------
{:?}
",
d.len(),
d,
n,
buf
);
}
}
}};
}
// testtrip is a macro that defines a test that compresses the input, then
// decompresses the result and compares it with the original input. If they are
// not equal, then the test fails. This test is performed both on the raw
// Snappy format and the framed Snappy format.
//
// If tests are compiled with the cpp feature, then this also tests that the
// C++ library compresses to the same bytes that the Rust library does.
macro_rules! testtrip {
($name:ident, $data:expr) => {
mod $name {
#[test]
fn roundtrip_raw() {
use super::{depress, press};
roundtrip!($data);
}
#[test]
fn roundtrip_frame() {
use super::{read_frame_depress, write_frame_press};
let d = &$data[..];
assert_eq!(d, &*read_frame_depress(&write_frame_press(d)));
}
#[test]
fn read_and_write_frame_encoder_match() {
use super::{read_frame_press, write_frame_press};
let d = &$data[..];
assert_eq!(read_frame_press(d), write_frame_press(d));
}
#[test]
#[cfg(feature = "cpp")]
fn cmpcpp() {
use super::{press, press_cpp};
let data = &$data[..];
let rust = press(data);
let cpp = press_cpp(data);
if rust == cpp {
return;
}
panic!(
"\ncompression results are not equal!
original (len == {:?})
----------------------
{:?}
rust (len == {:?})
------------------
{:?}
cpp (len == {:?})
-----------------
{:?}
",
data.len(),
data,
rust.len(),
rust,
cpp.len(),
cpp
);
}
}
};
}
// testcorrupt is a macro that defines a test that decompresses the input,
// and if the result is anything other than the error given, the test fails.
macro_rules! testerrored {
($name:ident, $data:expr, $err:expr) => {
testerrored!($name, $data, $err, false);
};
($name:ident, $data:expr, $err:expr, $bad_header:expr) => {
#[test]
fn $name() {
errored!($data, $err, $bad_header);
}
};
}
// Simple test cases.
testtrip!(empty, &[]);
testtrip!(one_zero, &[0]);
// Roundtrip all of the benchmark data.
testtrip!(data_html, include_bytes!("../data/html"));
testtrip!(data_urls, include_bytes!("../data/urls.10K"));
testtrip!(data_jpg, include_bytes!("../data/fireworks.jpeg"));
testtrip!(data_pdf, include_bytes!("../data/paper-100k.pdf"));
testtrip!(data_html4, include_bytes!("../data/html_x_4"));
testtrip!(data_txt1, include_bytes!("../data/alice29.txt"));
testtrip!(data_txt2, include_bytes!("../data/asyoulik.txt"));
testtrip!(data_txt3, include_bytes!("../data/lcet10.txt"));
testtrip!(data_txt4, include_bytes!("../data/plrabn12.txt"));
testtrip!(data_pb, include_bytes!("../data/geo.protodata"));
testtrip!(data_gaviota, include_bytes!("../data/kppkn.gtb"));
testtrip!(data_golden, include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt"));
// Do it again, with the Snappy frame format.
// Roundtrip the golden data, starting with the compressed bytes.
#[test]
fn data_golden_rev() {
let data = include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt.rawsnappy");
let data = &data[..];
assert_eq!(data, &*press(&depress(data)));
}
// Miscellaneous tests.
#[test]
fn small_copy() {
use std::iter::repeat;
for i in 0..32 {
let inner: String = repeat('b').take(i).collect();
roundtrip!(format!("aaaa{}aaaabbbb", inner).into_bytes());
}
}
#[test]
fn small_regular() {
let mut i = 1;
while i < 20_000 {
let mut buf = vec![0; i];
for (j, x) in buf.iter_mut().enumerate() {
*x = (j % 10) as u8 + b'a';
}
roundtrip!(buf);
i += 23;
}
}
// Test that triggered an out of bounds write.
#[test]
fn decompress_copy_close_to_end_1() {
let buf = [
27,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010110_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26,
];
assert_eq!(decompressed, &*depress(&buf));
}
#[test]
fn decompress_copy_close_to_end_2() {
let buf = [
28,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010111_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27,
];
assert_eq!(decompressed, &*depress(&buf));
}
// The `read::FrameEncoder` code uses different code paths depending on buffer
// size, so let's test both. Also, very small buffers are a good stress test.
#[test]
fn read_frame_encoder_big_and_little_buffers() {
use snap::read;
use std::io::{BufReader, Read};
let bytes = &include_bytes!("../data/html")[..];
let mut big =
BufReader::with_capacity(1_000_000, read::FrameEncoder::new(bytes));
let mut big_out = vec![];
big.read_to_end(&mut big_out).unwrap();
// 5 bytes is small enough to break up headers, etc.
let mut little =
BufReader::with_capacity(5, read::FrameEncoder::new(bytes));
let mut little_out = vec![];
little.read_to_end(&mut little_out).unwrap();
assert_eq!(big_out, little_out);
}
// Tests decompression on malformed data.
// An empty buffer.
testerrored!(err_empty, &b""[..], Error::Empty);
// Decompress fewer bytes than the header reports.
testerrored!(
err_header_mismatch,
&b"\x05\x00a"[..],
Error::HeaderMismatch { expected_len: 5, got_len: 1 }
);
// An invalid varint (final byte has continuation bit set).
testerrored!(err_varint1, &b"\xFF"[..], Error::Header, true);
// A varint that overflows u64.
testerrored!(
err_varint2,
&b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00"[..],
Error::Header,
true
);
// A varint that fits in u64 but overflows u32.
testerrored!(
err_varint3,
&b"\x80\x80\x80\x80\x10"[..],
Error::TooBig { given: 4294967296, max: 4294967295 },
true
);
// A literal whose length is too small.
// Since the literal length is 1, 'h' is read as a literal and 'i' is
// interpreted as a copy 1 operation missing its offset byte.
testerrored!(
err_lit,
&b"\x02\x00hi"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A literal whose length is too big.
testerrored!(
err_lit_big1,
&b"\x02\xechi"[..],
Error::Literal { len: 60, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read, and
// src is too short to read that byte.
testerrored!(
err_lit_big2a,
&b"\x02\xf0hi"[..],
Error::Literal { len: 4, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read,
// src is too short to read the full literal.
testerrored!(
err_lit_big2b,
&b"\x02\xf0hi\x00\x00\x00"[..],
Error::Literal {
len: 105, // because 105 == 'h' as u8 + 1
src_len: 4,
dst_len: 2,
}
);
// A copy 1 operation that stops at the tag byte. This fails because there's
// no byte to read for the copy offset.
testerrored!(
err_copy1,
&b"\x02\x00a\x01"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A copy 2 operation that stops at the tag byte and another copy 2 operation
// that stops after the first byte in the offset.
testerrored!(
err_copy2a,
&b"\x11\x00a\x3e"[..],
Error::CopyRead { len: 2, src_len: 0 }
);
testerrored!(
err_copy2b,
&b"\x11\x00a\x3e\x01"[..],
Error::CopyRead { len: 2, src_len: 1 }
);
// Same as copy 2, but for copy 4.
testerrored!(
err_copy3a,
&b"\x11\x00a\x3f"[..],
Error::CopyRead { len: 4, src_len: 0 }
);
testerrored!(
err_copy3b,
&b"\x11\x00a\x3f\x00"[..],
Error::CopyRead { len: 4, src_len: 1 }
);
testerrored!(
err_copy3c,
&b"\x11\x00a\x3f\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 2 }
);
testerrored!(
err_copy3d,
&b"\x11\x00a\x3f\x00\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 3 }
);
// A copy operation whose offset is zero.
testerrored!(
err_copy_offset_zero,
&b"\x11\x00a\x01\x00"[..],
Error::Offset { offset: 0, dst_pos: 1 }
);
// A copy operation whose offset is too big.
testerrored!(
err_copy_offset_big,
&b"\x11\x00a\x01\xFF"[..],
Error::Offset { offset: 255, dst_pos: 1 }
);
// A copy operation whose length is too big.
testerrored!(
err_copy_len_big,
&b"\x05\x00a\x1d\x01"[..],
Error::CopyWrite { len: 11, dst_len: 4 }
);
// Selected random inputs pulled from quickcheck failure witnesses.
testtrip!(
random1,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 0, 0, 6, 0, 0, 3, 1, 0,
0, 0, 7, 0, 0, 1, 3, 0, 0, 0, 8, 0, 0, 2, 3, 0, 0, 0, 9, 0, 0, 1, 4,
0, 0, 1, 0, 0, 3, 0, 0, 1, 0, 1, 0, 0, 0, 10, 0, 0, 0, 0, 2, 4, 0, 0,
2, 0, 0, 3, 0, 1, 0, 0, 1, 5, 0, 0, 6, 0, 0, 0, 0, 11, 0, 0, 1, 6, 0,
0, 1, 7, 0, 0, 0, 12, 0, 0, 3, 2, 0, 0, 0, 13, 0, 0, 2, 5, 0, 0, 0, 3,
3, 0, 0, 0, 1, 8, 0, 0, 1, 0, 1, 0, 0, 0, 4, 1, 0, 0, 0, 0, 14, 0, 0,
0, 1, 9, 0, 0, 0, 1, 10, 0, 0, 0, 0, 1, 11, 0, 0, 0, 1, 0, 2, 0, 0, 0,
1, 1, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 2, 6, 0,
0, 0, 0, 0, 1, 12, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
);
testtrip!(
random2,
&[
10, 2, 14, 13, 0, 8, 2, 10, 2, 14, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0,
]
);
testtrip!(
random3,
&[0, 0, 0, 4, 1, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]
);
testtrip!(
random4,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 4, 0, 0, 2, 1, 0, 0, 0, 4, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
// QuickCheck properties for testing that random data roundtrips.
// These properties tend to produce the inputs for the "random" tests above.
#[test]
fn qc_roundtrip() {
fn p(bytes: Vec<u8>) -> bool {
depress(&press(&bytes)) == bytes
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn qc_roundtrip_stream() {
fn p(bytes: Vec<u8>) -> TestResult {
if bytes.is_empty() |
TestResult::from_bool(
read_frame_depress(&write_frame_press(&bytes)) == bytes,
)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn test_short_input() {
// Regression test for https://github.com/BurntSushi/rust-snappy/issues/42
use snap::read;
use std::io::Read;
let err =
read::FrameDecoder::new(&b"123"[..]).read_to_end(&mut Vec::new());
assert_eq!(err.unwrap_err().kind(), std::io::ErrorKind::UnexpectedEof);
}
#[test]
#[cfg(feature = "cpp")]
fn qc_cmpcpp() {
fn p(bytes: Vec<u8>) -> bool {
press(&bytes) == press_cpp(&bytes)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(10_000)
.quickcheck(p as fn(_) -> _);
}
// Regression tests.
// See: https://github.com/BurntSushi/rust-snappy/issues/3
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow1,
&b"\x11\x00\x00\xfc\xfe\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64, src_len: 0, dst_len: 16 }
);
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow2,
&b"\x11\x00\x00\xfc\xff\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64 + 1, src_len: 0, dst_len: 16 }
);
// Helper functions.
fn press(bytes: &[u8]) -> Vec<u8> {
Encoder::new().compress_vec(bytes).unwrap()
}
fn depress(bytes: &[u8]) -> Vec<u8> {
Decoder::new().decompress_vec(bytes).unwrap()
}
fn write_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::write;
use std::io::Write;
let mut wtr = write::FrameEncoder::new(vec![]);
wtr.write_all(bytes).unwrap();
wtr.into_inner().unwrap()
}
fn read_frame_depress(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameDecoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
fn read_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameEncoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
#[cfg(feature = "cpp")]
fn press_cpp(bytes: &[u8]) -> Vec<u8> {
use snap::raw::max_compress_len;
let mut buf = vec![0; max_compress_len(bytes.len())];
let n = cpp::compress(bytes, &mut buf).unwrap();
buf.truncate(n);
buf
}
| {
return TestResult::discard();
} | conditional_block |
tests.rs | use quickcheck::{QuickCheck, StdGen, TestResult};
use snap::raw::{decompress_len, Decoder, Encoder};
use snap::Error;
#[cfg(feature = "cpp")]
use snappy_cpp as cpp;
// roundtrip is a macro that compresses the input, then decompresses the result
// and compares it with the original input. If they are not equal, then the
// test fails.
macro_rules! roundtrip {
($data:expr) => {{
let d = &$data[..];
assert_eq!(d, &*depress(&press(d)));
}};
}
// errored is a macro that tries to decompress the input and asserts that it
// resulted in an error. If decompression was successful, then the test fails.
macro_rules! errored {
($data:expr, $err:expr) => {
errored!($data, $err, false);
};
($data:expr, $err:expr, $bad_header:expr) => {{
let d = &$data[..];
let mut buf = if $bad_header {
assert_eq!($err, decompress_len(d).unwrap_err());
vec![0; 1024]
} else {
vec![0; decompress_len(d).unwrap()]
};
match Decoder::new().decompress(d, &mut buf) {
Err(ref err) if err == &$err => {}
Err(ref err) => panic!(
"expected decompression to fail with {:?}, \
but got {:?}",
$err, err
),
Ok(n) => {
panic!(
"\nexpected decompression to fail, but did not!
original (len == {:?})
----------------------
{:?}
decompressed (len == {:?})
--------------------------
{:?}
",
d.len(),
d,
n,
buf
);
}
}
}};
}
// testtrip is a macro that defines a test that compresses the input, then
// decompresses the result and compares it with the original input. If they are
// not equal, then the test fails. This test is performed both on the raw
// Snappy format and the framed Snappy format.
//
// If tests are compiled with the cpp feature, then this also tests that the
// C++ library compresses to the same bytes that the Rust library does.
macro_rules! testtrip {
($name:ident, $data:expr) => {
mod $name {
#[test]
fn roundtrip_raw() {
use super::{depress, press};
roundtrip!($data);
}
#[test]
fn roundtrip_frame() {
use super::{read_frame_depress, write_frame_press};
let d = &$data[..];
assert_eq!(d, &*read_frame_depress(&write_frame_press(d)));
}
#[test]
fn read_and_write_frame_encoder_match() {
use super::{read_frame_press, write_frame_press};
let d = &$data[..];
assert_eq!(read_frame_press(d), write_frame_press(d));
}
#[test]
#[cfg(feature = "cpp")]
fn cmpcpp() {
use super::{press, press_cpp};
let data = &$data[..];
let rust = press(data);
let cpp = press_cpp(data);
if rust == cpp {
return;
}
panic!(
"\ncompression results are not equal!
original (len == {:?})
----------------------
{:?}
rust (len == {:?})
------------------
{:?}
cpp (len == {:?})
-----------------
{:?}
",
data.len(),
data,
rust.len(),
rust,
cpp.len(),
cpp
);
}
}
};
}
// testcorrupt is a macro that defines a test that decompresses the input,
// and if the result is anything other than the error given, the test fails.
macro_rules! testerrored {
($name:ident, $data:expr, $err:expr) => {
testerrored!($name, $data, $err, false);
};
($name:ident, $data:expr, $err:expr, $bad_header:expr) => {
#[test]
fn $name() {
errored!($data, $err, $bad_header);
}
};
}
// Simple test cases.
testtrip!(empty, &[]);
testtrip!(one_zero, &[0]);
// Roundtrip all of the benchmark data.
testtrip!(data_html, include_bytes!("../data/html"));
testtrip!(data_urls, include_bytes!("../data/urls.10K"));
testtrip!(data_jpg, include_bytes!("../data/fireworks.jpeg"));
testtrip!(data_pdf, include_bytes!("../data/paper-100k.pdf"));
testtrip!(data_html4, include_bytes!("../data/html_x_4"));
testtrip!(data_txt1, include_bytes!("../data/alice29.txt"));
testtrip!(data_txt2, include_bytes!("../data/asyoulik.txt"));
testtrip!(data_txt3, include_bytes!("../data/lcet10.txt"));
testtrip!(data_txt4, include_bytes!("../data/plrabn12.txt"));
testtrip!(data_pb, include_bytes!("../data/geo.protodata"));
testtrip!(data_gaviota, include_bytes!("../data/kppkn.gtb"));
testtrip!(data_golden, include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt"));
// Do it again, with the Snappy frame format.
// Roundtrip the golden data, starting with the compressed bytes.
#[test]
fn data_golden_rev() {
let data = include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt.rawsnappy");
let data = &data[..];
assert_eq!(data, &*press(&depress(data)));
}
// Miscellaneous tests.
#[test]
fn small_copy() {
use std::iter::repeat;
for i in 0..32 {
let inner: String = repeat('b').take(i).collect();
roundtrip!(format!("aaaa{}aaaabbbb", inner).into_bytes());
}
}
#[test]
fn small_regular() {
let mut i = 1;
while i < 20_000 {
let mut buf = vec![0; i];
for (j, x) in buf.iter_mut().enumerate() {
*x = (j % 10) as u8 + b'a';
}
roundtrip!(buf);
i += 23;
}
}
// Test that triggered an out of bounds write.
#[test]
fn decompress_copy_close_to_end_1() {
let buf = [
27,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010110_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26,
];
assert_eq!(decompressed, &*depress(&buf));
}
#[test]
fn decompress_copy_close_to_end_2() {
let buf = [
28,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010111_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27,
];
assert_eq!(decompressed, &*depress(&buf));
}
// The `read::FrameEncoder` code uses different code paths depending on buffer
// size, so let's test both. Also, very small buffers are a good stress test.
#[test]
fn read_frame_encoder_big_and_little_buffers() {
use snap::read;
use std::io::{BufReader, Read};
let bytes = &include_bytes!("../data/html")[..];
let mut big =
BufReader::with_capacity(1_000_000, read::FrameEncoder::new(bytes));
let mut big_out = vec![];
big.read_to_end(&mut big_out).unwrap();
// 5 bytes is small enough to break up headers, etc.
let mut little =
BufReader::with_capacity(5, read::FrameEncoder::new(bytes));
let mut little_out = vec![];
little.read_to_end(&mut little_out).unwrap();
assert_eq!(big_out, little_out);
}
// Tests decompression on malformed data.
// An empty buffer.
testerrored!(err_empty, &b""[..], Error::Empty);
// Decompress fewer bytes than the header reports.
testerrored!(
err_header_mismatch,
&b"\x05\x00a"[..],
Error::HeaderMismatch { expected_len: 5, got_len: 1 }
);
// An invalid varint (final byte has continuation bit set).
testerrored!(err_varint1, &b"\xFF"[..], Error::Header, true);
// A varint that overflows u64.
testerrored!(
err_varint2,
&b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00"[..],
Error::Header,
true
);
// A varint that fits in u64 but overflows u32.
testerrored!(
err_varint3,
&b"\x80\x80\x80\x80\x10"[..],
Error::TooBig { given: 4294967296, max: 4294967295 },
true
);
// A literal whose length is too small.
// Since the literal length is 1, 'h' is read as a literal and 'i' is
// interpreted as a copy 1 operation missing its offset byte.
testerrored!(
err_lit,
&b"\x02\x00hi"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A literal whose length is too big.
testerrored!(
err_lit_big1,
&b"\x02\xechi"[..],
Error::Literal { len: 60, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read, and
// src is too short to read that byte.
testerrored!(
err_lit_big2a,
&b"\x02\xf0hi"[..],
Error::Literal { len: 4, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read,
// src is too short to read the full literal.
testerrored!(
err_lit_big2b,
&b"\x02\xf0hi\x00\x00\x00"[..],
Error::Literal {
len: 105, // because 105 == 'h' as u8 + 1
src_len: 4,
dst_len: 2,
}
);
// A copy 1 operation that stops at the tag byte. This fails because there's
// no byte to read for the copy offset.
testerrored!(
err_copy1,
&b"\x02\x00a\x01"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A copy 2 operation that stops at the tag byte and another copy 2 operation
// that stops after the first byte in the offset.
testerrored!(
err_copy2a,
&b"\x11\x00a\x3e"[..],
Error::CopyRead { len: 2, src_len: 0 }
);
testerrored!(
err_copy2b,
&b"\x11\x00a\x3e\x01"[..],
Error::CopyRead { len: 2, src_len: 1 }
);
// Same as copy 2, but for copy 4.
testerrored!(
err_copy3a,
&b"\x11\x00a\x3f"[..],
Error::CopyRead { len: 4, src_len: 0 }
);
testerrored!(
err_copy3b,
&b"\x11\x00a\x3f\x00"[..],
Error::CopyRead { len: 4, src_len: 1 }
);
testerrored!(
err_copy3c,
&b"\x11\x00a\x3f\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 2 }
);
testerrored!(
err_copy3d,
&b"\x11\x00a\x3f\x00\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 3 }
);
// A copy operation whose offset is zero.
testerrored!(
err_copy_offset_zero,
&b"\x11\x00a\x01\x00"[..],
Error::Offset { offset: 0, dst_pos: 1 }
);
// A copy operation whose offset is too big.
testerrored!(
err_copy_offset_big,
&b"\x11\x00a\x01\xFF"[..],
Error::Offset { offset: 255, dst_pos: 1 }
);
// A copy operation whose length is too big.
testerrored!(
err_copy_len_big,
&b"\x05\x00a\x1d\x01"[..],
Error::CopyWrite { len: 11, dst_len: 4 }
);
// Selected random inputs pulled from quickcheck failure witnesses.
testtrip!(
random1,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 0, 0, 6, 0, 0, 3, 1, 0,
0, 0, 7, 0, 0, 1, 3, 0, 0, 0, 8, 0, 0, 2, 3, 0, 0, 0, 9, 0, 0, 1, 4,
0, 0, 1, 0, 0, 3, 0, 0, 1, 0, 1, 0, 0, 0, 10, 0, 0, 0, 0, 2, 4, 0, 0,
2, 0, 0, 3, 0, 1, 0, 0, 1, 5, 0, 0, 6, 0, 0, 0, 0, 11, 0, 0, 1, 6, 0,
0, 1, 7, 0, 0, 0, 12, 0, 0, 3, 2, 0, 0, 0, 13, 0, 0, 2, 5, 0, 0, 0, 3,
3, 0, 0, 0, 1, 8, 0, 0, 1, 0, 1, 0, 0, 0, 4, 1, 0, 0, 0, 0, 14, 0, 0,
0, 1, 9, 0, 0, 0, 1, 10, 0, 0, 0, 0, 1, 11, 0, 0, 0, 1, 0, 2, 0, 0, 0,
1, 1, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 2, 6, 0,
0, 0, 0, 0, 1, 12, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
);
testtrip!(
random2,
&[
10, 2, 14, 13, 0, 8, 2, 10, 2, 14, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0,
]
);
testtrip!(
random3,
&[0, 0, 0, 4, 1, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]
);
testtrip!(
random4,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 4, 0, 0, 2, 1, 0, 0, 0, 4, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
// QuickCheck properties for testing that random data roundtrips.
// These properties tend to produce the inputs for the "random" tests above.
#[test]
fn qc_roundtrip() {
fn p(bytes: Vec<u8>) -> bool {
depress(&press(&bytes)) == bytes
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn qc_roundtrip_stream() {
fn p(bytes: Vec<u8>) -> TestResult {
if bytes.is_empty() {
return TestResult::discard();
}
TestResult::from_bool(
read_frame_depress(&write_frame_press(&bytes)) == bytes,
)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn test_short_input() {
// Regression test for https://github.com/BurntSushi/rust-snappy/issues/42
use snap::read;
use std::io::Read;
let err =
read::FrameDecoder::new(&b"123"[..]).read_to_end(&mut Vec::new());
assert_eq!(err.unwrap_err().kind(), std::io::ErrorKind::UnexpectedEof);
}
#[test]
#[cfg(feature = "cpp")]
fn qc_cmpcpp() {
fn p(bytes: Vec<u8>) -> bool {
press(&bytes) == press_cpp(&bytes)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(10_000)
.quickcheck(p as fn(_) -> _);
}
// Regression tests.
// See: https://github.com/BurntSushi/rust-snappy/issues/3
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow1,
&b"\x11\x00\x00\xfc\xfe\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64, src_len: 0, dst_len: 16 }
);
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow2,
&b"\x11\x00\x00\xfc\xff\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64 + 1, src_len: 0, dst_len: 16 }
);
// Helper functions.
fn press(bytes: &[u8]) -> Vec<u8> {
Encoder::new().compress_vec(bytes).unwrap()
}
fn | (bytes: &[u8]) -> Vec<u8> {
Decoder::new().decompress_vec(bytes).unwrap()
}
fn write_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::write;
use std::io::Write;
let mut wtr = write::FrameEncoder::new(vec![]);
wtr.write_all(bytes).unwrap();
wtr.into_inner().unwrap()
}
fn read_frame_depress(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameDecoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
fn read_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameEncoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
#[cfg(feature = "cpp")]
fn press_cpp(bytes: &[u8]) -> Vec<u8> {
use snap::raw::max_compress_len;
let mut buf = vec![0; max_compress_len(bytes.len())];
let n = cpp::compress(bytes, &mut buf).unwrap();
buf.truncate(n);
buf
}
| depress | identifier_name |
tests.rs | use quickcheck::{QuickCheck, StdGen, TestResult};
use snap::raw::{decompress_len, Decoder, Encoder};
use snap::Error;
#[cfg(feature = "cpp")]
use snappy_cpp as cpp;
// roundtrip is a macro that compresses the input, then decompresses the result
// and compares it with the original input. If they are not equal, then the
// test fails.
macro_rules! roundtrip {
($data:expr) => {{
let d = &$data[..];
assert_eq!(d, &*depress(&press(d)));
}};
}
// errored is a macro that tries to decompress the input and asserts that it
// resulted in an error. If decompression was successful, then the test fails.
macro_rules! errored {
($data:expr, $err:expr) => {
errored!($data, $err, false);
};
($data:expr, $err:expr, $bad_header:expr) => {{
let d = &$data[..];
let mut buf = if $bad_header {
assert_eq!($err, decompress_len(d).unwrap_err());
vec![0; 1024]
} else {
vec![0; decompress_len(d).unwrap()]
};
match Decoder::new().decompress(d, &mut buf) {
Err(ref err) if err == &$err => {}
Err(ref err) => panic!(
"expected decompression to fail with {:?}, \
but got {:?}",
$err, err
),
Ok(n) => {
panic!(
"\nexpected decompression to fail, but did not!
original (len == {:?})
----------------------
{:?}
decompressed (len == {:?})
--------------------------
{:?}
",
d.len(),
d,
n,
buf
);
}
}
}};
}
// testtrip is a macro that defines a test that compresses the input, then
// decompresses the result and compares it with the original input. If they are
// not equal, then the test fails. This test is performed both on the raw
// Snappy format and the framed Snappy format.
//
// If tests are compiled with the cpp feature, then this also tests that the
// C++ library compresses to the same bytes that the Rust library does.
macro_rules! testtrip {
($name:ident, $data:expr) => {
mod $name {
#[test]
fn roundtrip_raw() {
use super::{depress, press};
roundtrip!($data);
}
#[test]
fn roundtrip_frame() {
use super::{read_frame_depress, write_frame_press};
let d = &$data[..];
assert_eq!(d, &*read_frame_depress(&write_frame_press(d)));
}
#[test]
fn read_and_write_frame_encoder_match() {
use super::{read_frame_press, write_frame_press};
let d = &$data[..];
assert_eq!(read_frame_press(d), write_frame_press(d));
}
#[test]
#[cfg(feature = "cpp")]
fn cmpcpp() {
use super::{press, press_cpp};
let data = &$data[..];
let rust = press(data);
let cpp = press_cpp(data);
if rust == cpp {
return;
}
panic!(
"\ncompression results are not equal!
original (len == {:?})
----------------------
{:?}
rust (len == {:?})
------------------
{:?}
cpp (len == {:?})
-----------------
{:?}
",
data.len(),
data,
rust.len(),
rust,
cpp.len(),
cpp
);
}
}
};
}
// testcorrupt is a macro that defines a test that decompresses the input,
// and if the result is anything other than the error given, the test fails.
macro_rules! testerrored {
($name:ident, $data:expr, $err:expr) => {
testerrored!($name, $data, $err, false);
};
($name:ident, $data:expr, $err:expr, $bad_header:expr) => {
#[test]
fn $name() {
errored!($data, $err, $bad_header);
}
};
}
// Simple test cases.
testtrip!(empty, &[]);
testtrip!(one_zero, &[0]);
// Roundtrip all of the benchmark data.
testtrip!(data_html, include_bytes!("../data/html"));
testtrip!(data_urls, include_bytes!("../data/urls.10K"));
testtrip!(data_jpg, include_bytes!("../data/fireworks.jpeg"));
testtrip!(data_pdf, include_bytes!("../data/paper-100k.pdf"));
testtrip!(data_html4, include_bytes!("../data/html_x_4"));
testtrip!(data_txt1, include_bytes!("../data/alice29.txt"));
testtrip!(data_txt2, include_bytes!("../data/asyoulik.txt"));
testtrip!(data_txt3, include_bytes!("../data/lcet10.txt"));
testtrip!(data_txt4, include_bytes!("../data/plrabn12.txt"));
testtrip!(data_pb, include_bytes!("../data/geo.protodata"));
testtrip!(data_gaviota, include_bytes!("../data/kppkn.gtb"));
testtrip!(data_golden, include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt"));
// Do it again, with the Snappy frame format.
// Roundtrip the golden data, starting with the compressed bytes.
#[test]
fn data_golden_rev() |
// Miscellaneous tests.
#[test]
fn small_copy() {
use std::iter::repeat;
for i in 0..32 {
let inner: String = repeat('b').take(i).collect();
roundtrip!(format!("aaaa{}aaaabbbb", inner).into_bytes());
}
}
#[test]
fn small_regular() {
let mut i = 1;
while i < 20_000 {
let mut buf = vec![0; i];
for (j, x) in buf.iter_mut().enumerate() {
*x = (j % 10) as u8 + b'a';
}
roundtrip!(buf);
i += 23;
}
}
// Test that triggered an out of bounds write.
#[test]
fn decompress_copy_close_to_end_1() {
let buf = [
27,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010110_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26,
];
assert_eq!(decompressed, &*depress(&buf));
}
#[test]
fn decompress_copy_close_to_end_2() {
let buf = [
28,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010111_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27,
];
assert_eq!(decompressed, &*depress(&buf));
}
// The `read::FrameEncoder` code uses different code paths depending on buffer
// size, so let's test both. Also, very small buffers are a good stress test.
#[test]
fn read_frame_encoder_big_and_little_buffers() {
use snap::read;
use std::io::{BufReader, Read};
let bytes = &include_bytes!("../data/html")[..];
let mut big =
BufReader::with_capacity(1_000_000, read::FrameEncoder::new(bytes));
let mut big_out = vec![];
big.read_to_end(&mut big_out).unwrap();
// 5 bytes is small enough to break up headers, etc.
let mut little =
BufReader::with_capacity(5, read::FrameEncoder::new(bytes));
let mut little_out = vec![];
little.read_to_end(&mut little_out).unwrap();
assert_eq!(big_out, little_out);
}
// Tests decompression on malformed data.
// An empty buffer.
testerrored!(err_empty, &b""[..], Error::Empty);
// Decompress fewer bytes than the header reports.
testerrored!(
err_header_mismatch,
&b"\x05\x00a"[..],
Error::HeaderMismatch { expected_len: 5, got_len: 1 }
);
// An invalid varint (final byte has continuation bit set).
testerrored!(err_varint1, &b"\xFF"[..], Error::Header, true);
// A varint that overflows u64.
testerrored!(
err_varint2,
&b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00"[..],
Error::Header,
true
);
// A varint that fits in u64 but overflows u32.
testerrored!(
err_varint3,
&b"\x80\x80\x80\x80\x10"[..],
Error::TooBig { given: 4294967296, max: 4294967295 },
true
);
// A literal whose length is too small.
// Since the literal length is 1, 'h' is read as a literal and 'i' is
// interpreted as a copy 1 operation missing its offset byte.
testerrored!(
err_lit,
&b"\x02\x00hi"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A literal whose length is too big.
testerrored!(
err_lit_big1,
&b"\x02\xechi"[..],
Error::Literal { len: 60, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read, and
// src is too short to read that byte.
testerrored!(
err_lit_big2a,
&b"\x02\xf0hi"[..],
Error::Literal { len: 4, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read,
// src is too short to read the full literal.
testerrored!(
err_lit_big2b,
&b"\x02\xf0hi\x00\x00\x00"[..],
Error::Literal {
len: 105, // because 105 == 'h' as u8 + 1
src_len: 4,
dst_len: 2,
}
);
// A copy 1 operation that stops at the tag byte. This fails because there's
// no byte to read for the copy offset.
testerrored!(
err_copy1,
&b"\x02\x00a\x01"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A copy 2 operation that stops at the tag byte and another copy 2 operation
// that stops after the first byte in the offset.
testerrored!(
err_copy2a,
&b"\x11\x00a\x3e"[..],
Error::CopyRead { len: 2, src_len: 0 }
);
testerrored!(
err_copy2b,
&b"\x11\x00a\x3e\x01"[..],
Error::CopyRead { len: 2, src_len: 1 }
);
// Same as copy 2, but for copy 4.
testerrored!(
err_copy3a,
&b"\x11\x00a\x3f"[..],
Error::CopyRead { len: 4, src_len: 0 }
);
testerrored!(
err_copy3b,
&b"\x11\x00a\x3f\x00"[..],
Error::CopyRead { len: 4, src_len: 1 }
);
testerrored!(
err_copy3c,
&b"\x11\x00a\x3f\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 2 }
);
testerrored!(
err_copy3d,
&b"\x11\x00a\x3f\x00\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 3 }
);
// A copy operation whose offset is zero.
testerrored!(
err_copy_offset_zero,
&b"\x11\x00a\x01\x00"[..],
Error::Offset { offset: 0, dst_pos: 1 }
);
// A copy operation whose offset is too big.
testerrored!(
err_copy_offset_big,
&b"\x11\x00a\x01\xFF"[..],
Error::Offset { offset: 255, dst_pos: 1 }
);
// A copy operation whose length is too big.
testerrored!(
err_copy_len_big,
&b"\x05\x00a\x1d\x01"[..],
Error::CopyWrite { len: 11, dst_len: 4 }
);
// Selected random inputs pulled from quickcheck failure witnesses.
testtrip!(
random1,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 0, 0, 6, 0, 0, 3, 1, 0,
0, 0, 7, 0, 0, 1, 3, 0, 0, 0, 8, 0, 0, 2, 3, 0, 0, 0, 9, 0, 0, 1, 4,
0, 0, 1, 0, 0, 3, 0, 0, 1, 0, 1, 0, 0, 0, 10, 0, 0, 0, 0, 2, 4, 0, 0,
2, 0, 0, 3, 0, 1, 0, 0, 1, 5, 0, 0, 6, 0, 0, 0, 0, 11, 0, 0, 1, 6, 0,
0, 1, 7, 0, 0, 0, 12, 0, 0, 3, 2, 0, 0, 0, 13, 0, 0, 2, 5, 0, 0, 0, 3,
3, 0, 0, 0, 1, 8, 0, 0, 1, 0, 1, 0, 0, 0, 4, 1, 0, 0, 0, 0, 14, 0, 0,
0, 1, 9, 0, 0, 0, 1, 10, 0, 0, 0, 0, 1, 11, 0, 0, 0, 1, 0, 2, 0, 0, 0,
1, 1, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 2, 6, 0,
0, 0, 0, 0, 1, 12, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
);
testtrip!(
random2,
&[
10, 2, 14, 13, 0, 8, 2, 10, 2, 14, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0,
]
);
testtrip!(
random3,
&[0, 0, 0, 4, 1, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]
);
testtrip!(
random4,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 4, 0, 0, 2, 1, 0, 0, 0, 4, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
// QuickCheck properties for testing that random data roundtrips.
// These properties tend to produce the inputs for the "random" tests above.
#[test]
fn qc_roundtrip() {
fn p(bytes: Vec<u8>) -> bool {
depress(&press(&bytes)) == bytes
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn qc_roundtrip_stream() {
fn p(bytes: Vec<u8>) -> TestResult {
if bytes.is_empty() {
return TestResult::discard();
}
TestResult::from_bool(
read_frame_depress(&write_frame_press(&bytes)) == bytes,
)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn test_short_input() {
// Regression test for https://github.com/BurntSushi/rust-snappy/issues/42
use snap::read;
use std::io::Read;
let err =
read::FrameDecoder::new(&b"123"[..]).read_to_end(&mut Vec::new());
assert_eq!(err.unwrap_err().kind(), std::io::ErrorKind::UnexpectedEof);
}
#[test]
#[cfg(feature = "cpp")]
fn qc_cmpcpp() {
fn p(bytes: Vec<u8>) -> bool {
press(&bytes) == press_cpp(&bytes)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(10_000)
.quickcheck(p as fn(_) -> _);
}
// Regression tests.
// See: https://github.com/BurntSushi/rust-snappy/issues/3
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow1,
&b"\x11\x00\x00\xfc\xfe\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64, src_len: 0, dst_len: 16 }
);
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow2,
&b"\x11\x00\x00\xfc\xff\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64 + 1, src_len: 0, dst_len: 16 }
);
// Helper functions.
fn press(bytes: &[u8]) -> Vec<u8> {
Encoder::new().compress_vec(bytes).unwrap()
}
fn depress(bytes: &[u8]) -> Vec<u8> {
Decoder::new().decompress_vec(bytes).unwrap()
}
fn write_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::write;
use std::io::Write;
let mut wtr = write::FrameEncoder::new(vec![]);
wtr.write_all(bytes).unwrap();
wtr.into_inner().unwrap()
}
fn read_frame_depress(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameDecoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
fn read_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameEncoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
#[cfg(feature = "cpp")]
fn press_cpp(bytes: &[u8]) -> Vec<u8> {
use snap::raw::max_compress_len;
let mut buf = vec![0; max_compress_len(bytes.len())];
let n = cpp::compress(bytes, &mut buf).unwrap();
buf.truncate(n);
buf
}
| {
let data = include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt.rawsnappy");
let data = &data[..];
assert_eq!(data, &*press(&depress(data)));
} | identifier_body |
tests.rs | use quickcheck::{QuickCheck, StdGen, TestResult};
use snap::raw::{decompress_len, Decoder, Encoder};
use snap::Error;
#[cfg(feature = "cpp")]
use snappy_cpp as cpp;
// roundtrip is a macro that compresses the input, then decompresses the result
// and compares it with the original input. If they are not equal, then the
// test fails.
macro_rules! roundtrip {
($data:expr) => {{
let d = &$data[..];
assert_eq!(d, &*depress(&press(d)));
}};
}
// errored is a macro that tries to decompress the input and asserts that it
// resulted in an error. If decompression was successful, then the test fails.
macro_rules! errored {
($data:expr, $err:expr) => {
errored!($data, $err, false);
};
($data:expr, $err:expr, $bad_header:expr) => {{
let d = &$data[..];
let mut buf = if $bad_header {
assert_eq!($err, decompress_len(d).unwrap_err());
vec![0; 1024]
} else { | };
match Decoder::new().decompress(d, &mut buf) {
Err(ref err) if err == &$err => {}
Err(ref err) => panic!(
"expected decompression to fail with {:?}, \
but got {:?}",
$err, err
),
Ok(n) => {
panic!(
"\nexpected decompression to fail, but did not!
original (len == {:?})
----------------------
{:?}
decompressed (len == {:?})
--------------------------
{:?}
",
d.len(),
d,
n,
buf
);
}
}
}};
}
// testtrip is a macro that defines a test that compresses the input, then
// decompresses the result and compares it with the original input. If they are
// not equal, then the test fails. This test is performed both on the raw
// Snappy format and the framed Snappy format.
//
// If tests are compiled with the cpp feature, then this also tests that the
// C++ library compresses to the same bytes that the Rust library does.
macro_rules! testtrip {
($name:ident, $data:expr) => {
mod $name {
#[test]
fn roundtrip_raw() {
use super::{depress, press};
roundtrip!($data);
}
#[test]
fn roundtrip_frame() {
use super::{read_frame_depress, write_frame_press};
let d = &$data[..];
assert_eq!(d, &*read_frame_depress(&write_frame_press(d)));
}
#[test]
fn read_and_write_frame_encoder_match() {
use super::{read_frame_press, write_frame_press};
let d = &$data[..];
assert_eq!(read_frame_press(d), write_frame_press(d));
}
#[test]
#[cfg(feature = "cpp")]
fn cmpcpp() {
use super::{press, press_cpp};
let data = &$data[..];
let rust = press(data);
let cpp = press_cpp(data);
if rust == cpp {
return;
}
panic!(
"\ncompression results are not equal!
original (len == {:?})
----------------------
{:?}
rust (len == {:?})
------------------
{:?}
cpp (len == {:?})
-----------------
{:?}
",
data.len(),
data,
rust.len(),
rust,
cpp.len(),
cpp
);
}
}
};
}
// testcorrupt is a macro that defines a test that decompresses the input,
// and if the result is anything other than the error given, the test fails.
macro_rules! testerrored {
($name:ident, $data:expr, $err:expr) => {
testerrored!($name, $data, $err, false);
};
($name:ident, $data:expr, $err:expr, $bad_header:expr) => {
#[test]
fn $name() {
errored!($data, $err, $bad_header);
}
};
}
// Simple test cases.
testtrip!(empty, &[]);
testtrip!(one_zero, &[0]);
// Roundtrip all of the benchmark data.
testtrip!(data_html, include_bytes!("../data/html"));
testtrip!(data_urls, include_bytes!("../data/urls.10K"));
testtrip!(data_jpg, include_bytes!("../data/fireworks.jpeg"));
testtrip!(data_pdf, include_bytes!("../data/paper-100k.pdf"));
testtrip!(data_html4, include_bytes!("../data/html_x_4"));
testtrip!(data_txt1, include_bytes!("../data/alice29.txt"));
testtrip!(data_txt2, include_bytes!("../data/asyoulik.txt"));
testtrip!(data_txt3, include_bytes!("../data/lcet10.txt"));
testtrip!(data_txt4, include_bytes!("../data/plrabn12.txt"));
testtrip!(data_pb, include_bytes!("../data/geo.protodata"));
testtrip!(data_gaviota, include_bytes!("../data/kppkn.gtb"));
testtrip!(data_golden, include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt"));
// Do it again, with the Snappy frame format.
// Roundtrip the golden data, starting with the compressed bytes.
#[test]
fn data_golden_rev() {
let data = include_bytes!("../data/Mark.Twain-Tom.Sawyer.txt.rawsnappy");
let data = &data[..];
assert_eq!(data, &*press(&depress(data)));
}
// Miscellaneous tests.
#[test]
fn small_copy() {
use std::iter::repeat;
for i in 0..32 {
let inner: String = repeat('b').take(i).collect();
roundtrip!(format!("aaaa{}aaaabbbb", inner).into_bytes());
}
}
#[test]
fn small_regular() {
let mut i = 1;
while i < 20_000 {
let mut buf = vec![0; i];
for (j, x) in buf.iter_mut().enumerate() {
*x = (j % 10) as u8 + b'a';
}
roundtrip!(buf);
i += 23;
}
}
// Test that triggered an out of bounds write.
#[test]
fn decompress_copy_close_to_end_1() {
let buf = [
27,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010110_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26,
];
assert_eq!(decompressed, &*depress(&buf));
}
#[test]
fn decompress_copy_close_to_end_2() {
let buf = [
28,
0b000010_00,
1,
2,
3,
0b000_000_10,
3,
0,
0b010111_00,
4,
5,
6,
7,
8,
9,
10,
11,
12,
13,
14,
15,
16,
17,
18,
19,
20,
21,
22,
23,
24,
25,
26,
27,
];
let decompressed = [
1, 2, 3, 1, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27,
];
assert_eq!(decompressed, &*depress(&buf));
}
// The `read::FrameEncoder` code uses different code paths depending on buffer
// size, so let's test both. Also, very small buffers are a good stress test.
#[test]
fn read_frame_encoder_big_and_little_buffers() {
use snap::read;
use std::io::{BufReader, Read};
let bytes = &include_bytes!("../data/html")[..];
let mut big =
BufReader::with_capacity(1_000_000, read::FrameEncoder::new(bytes));
let mut big_out = vec![];
big.read_to_end(&mut big_out).unwrap();
// 5 bytes is small enough to break up headers, etc.
let mut little =
BufReader::with_capacity(5, read::FrameEncoder::new(bytes));
let mut little_out = vec![];
little.read_to_end(&mut little_out).unwrap();
assert_eq!(big_out, little_out);
}
// Tests decompression on malformed data.
// An empty buffer.
testerrored!(err_empty, &b""[..], Error::Empty);
// Decompress fewer bytes than the header reports.
testerrored!(
err_header_mismatch,
&b"\x05\x00a"[..],
Error::HeaderMismatch { expected_len: 5, got_len: 1 }
);
// An invalid varint (final byte has continuation bit set).
testerrored!(err_varint1, &b"\xFF"[..], Error::Header, true);
// A varint that overflows u64.
testerrored!(
err_varint2,
&b"\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\x00"[..],
Error::Header,
true
);
// A varint that fits in u64 but overflows u32.
testerrored!(
err_varint3,
&b"\x80\x80\x80\x80\x10"[..],
Error::TooBig { given: 4294967296, max: 4294967295 },
true
);
// A literal whose length is too small.
// Since the literal length is 1, 'h' is read as a literal and 'i' is
// interpreted as a copy 1 operation missing its offset byte.
testerrored!(
err_lit,
&b"\x02\x00hi"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A literal whose length is too big.
testerrored!(
err_lit_big1,
&b"\x02\xechi"[..],
Error::Literal { len: 60, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read, and
// src is too short to read that byte.
testerrored!(
err_lit_big2a,
&b"\x02\xf0hi"[..],
Error::Literal { len: 4, src_len: 2, dst_len: 2 }
);
// A literal whose length is too big, requires 1 extra byte to be read,
// src is too short to read the full literal.
testerrored!(
err_lit_big2b,
&b"\x02\xf0hi\x00\x00\x00"[..],
Error::Literal {
len: 105, // because 105 == 'h' as u8 + 1
src_len: 4,
dst_len: 2,
}
);
// A copy 1 operation that stops at the tag byte. This fails because there's
// no byte to read for the copy offset.
testerrored!(
err_copy1,
&b"\x02\x00a\x01"[..],
Error::CopyRead { len: 1, src_len: 0 }
);
// A copy 2 operation that stops at the tag byte and another copy 2 operation
// that stops after the first byte in the offset.
testerrored!(
err_copy2a,
&b"\x11\x00a\x3e"[..],
Error::CopyRead { len: 2, src_len: 0 }
);
testerrored!(
err_copy2b,
&b"\x11\x00a\x3e\x01"[..],
Error::CopyRead { len: 2, src_len: 1 }
);
// Same as copy 2, but for copy 4.
testerrored!(
err_copy3a,
&b"\x11\x00a\x3f"[..],
Error::CopyRead { len: 4, src_len: 0 }
);
testerrored!(
err_copy3b,
&b"\x11\x00a\x3f\x00"[..],
Error::CopyRead { len: 4, src_len: 1 }
);
testerrored!(
err_copy3c,
&b"\x11\x00a\x3f\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 2 }
);
testerrored!(
err_copy3d,
&b"\x11\x00a\x3f\x00\x00\x00"[..],
Error::CopyRead { len: 4, src_len: 3 }
);
// A copy operation whose offset is zero.
testerrored!(
err_copy_offset_zero,
&b"\x11\x00a\x01\x00"[..],
Error::Offset { offset: 0, dst_pos: 1 }
);
// A copy operation whose offset is too big.
testerrored!(
err_copy_offset_big,
&b"\x11\x00a\x01\xFF"[..],
Error::Offset { offset: 255, dst_pos: 1 }
);
// A copy operation whose length is too big.
testerrored!(
err_copy_len_big,
&b"\x05\x00a\x1d\x01"[..],
Error::CopyWrite { len: 11, dst_len: 4 }
);
// Selected random inputs pulled from quickcheck failure witnesses.
testtrip!(
random1,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 2, 1, 0, 0, 2, 2, 0, 0, 0, 6, 0, 0, 3, 1, 0,
0, 0, 7, 0, 0, 1, 3, 0, 0, 0, 8, 0, 0, 2, 3, 0, 0, 0, 9, 0, 0, 1, 4,
0, 0, 1, 0, 0, 3, 0, 0, 1, 0, 1, 0, 0, 0, 10, 0, 0, 0, 0, 2, 4, 0, 0,
2, 0, 0, 3, 0, 1, 0, 0, 1, 5, 0, 0, 6, 0, 0, 0, 0, 11, 0, 0, 1, 6, 0,
0, 1, 7, 0, 0, 0, 12, 0, 0, 3, 2, 0, 0, 0, 13, 0, 0, 2, 5, 0, 0, 0, 3,
3, 0, 0, 0, 1, 8, 0, 0, 1, 0, 1, 0, 0, 0, 4, 1, 0, 0, 0, 0, 14, 0, 0,
0, 1, 9, 0, 0, 0, 1, 10, 0, 0, 0, 0, 1, 11, 0, 0, 0, 1, 0, 2, 0, 0, 0,
1, 1, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 1, 2, 1, 0, 0, 0, 0, 0, 2, 6, 0,
0, 0, 0, 0, 1, 12, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0,
0, 1, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
]
);
testtrip!(
random2,
&[
10, 2, 14, 13, 0, 8, 2, 10, 2, 14, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0,
]
);
testtrip!(
random3,
&[0, 0, 0, 4, 1, 4, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,]
);
testtrip!(
random4,
&[
0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5, 0, 0,
1, 1, 0, 0, 1, 2, 0, 0, 1, 3, 0, 0, 1, 4, 0, 0, 2, 1, 0, 0, 0, 4, 0,
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
);
// QuickCheck properties for testing that random data roundtrips.
// These properties tend to produce the inputs for the "random" tests above.
#[test]
fn qc_roundtrip() {
fn p(bytes: Vec<u8>) -> bool {
depress(&press(&bytes)) == bytes
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn qc_roundtrip_stream() {
fn p(bytes: Vec<u8>) -> TestResult {
if bytes.is_empty() {
return TestResult::discard();
}
TestResult::from_bool(
read_frame_depress(&write_frame_press(&bytes)) == bytes,
)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(1_000)
.quickcheck(p as fn(_) -> _);
}
#[test]
fn test_short_input() {
// Regression test for https://github.com/BurntSushi/rust-snappy/issues/42
use snap::read;
use std::io::Read;
let err =
read::FrameDecoder::new(&b"123"[..]).read_to_end(&mut Vec::new());
assert_eq!(err.unwrap_err().kind(), std::io::ErrorKind::UnexpectedEof);
}
#[test]
#[cfg(feature = "cpp")]
fn qc_cmpcpp() {
fn p(bytes: Vec<u8>) -> bool {
press(&bytes) == press_cpp(&bytes)
}
QuickCheck::new()
.gen(StdGen::new(rand::thread_rng(), 10_000))
.tests(10_000)
.quickcheck(p as fn(_) -> _);
}
// Regression tests.
// See: https://github.com/BurntSushi/rust-snappy/issues/3
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow1,
&b"\x11\x00\x00\xfc\xfe\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64, src_len: 0, dst_len: 16 }
);
#[cfg(target_pointer_width = "32")]
testerrored!(
err_lit_len_overflow2,
&b"\x11\x00\x00\xfc\xff\xff\xff\xff"[..],
Error::Literal { len: std::u32::MAX as u64 + 1, src_len: 0, dst_len: 16 }
);
// Helper functions.
fn press(bytes: &[u8]) -> Vec<u8> {
Encoder::new().compress_vec(bytes).unwrap()
}
fn depress(bytes: &[u8]) -> Vec<u8> {
Decoder::new().decompress_vec(bytes).unwrap()
}
fn write_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::write;
use std::io::Write;
let mut wtr = write::FrameEncoder::new(vec![]);
wtr.write_all(bytes).unwrap();
wtr.into_inner().unwrap()
}
fn read_frame_depress(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameDecoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
fn read_frame_press(bytes: &[u8]) -> Vec<u8> {
use snap::read;
use std::io::Read;
let mut buf = vec![];
read::FrameEncoder::new(bytes).read_to_end(&mut buf).unwrap();
buf
}
#[cfg(feature = "cpp")]
fn press_cpp(bytes: &[u8]) -> Vec<u8> {
use snap::raw::max_compress_len;
let mut buf = vec![0; max_compress_len(bytes.len())];
let n = cpp::compress(bytes, &mut buf).unwrap();
buf.truncate(n);
buf
} | vec![0; decompress_len(d).unwrap()] | random_line_split |
learn.rs | use super::events_from_chunks;
use clap::{App, Arg, ArgGroup, ArgMatches, SubCommand};
use futures::{try_join, Stream, StreamExt, TryStreamExt};
use nanoid::nanoid;
use serde::Serialize;
use serde_json;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::io::{stdin, stdout, AsyncWrite};
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use optic_diff_engine::streams;
use optic_diff_engine::{analyze_undocumented_bodies, EndpointCommand, SpecCommand};
use optic_diff_engine::{
BodyAnalysisLocation, HttpInteraction, SpecChunkEvent, SpecEvent, SpecIdGenerator,
SpecProjection, TrailObservationsResult,
};
pub const SUBCOMMAND_NAME: &'static str = "learn";
pub fn create_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Learns about possible changes to the spec based on interactions or diffs")
.arg(
Arg::with_name("undocumented-bodies")
.long("undocumented-bodies")
.takes_value(false)
.help("Learn shapes of undocumented bodies from interactions piped to stdin"),
)
.arg(
Arg::with_name("shape-diffs")
.long("shape-diffs")
.takes_value(false)
.help("Learn updated shapes from shape diffs piped to stdin"),
)
.group(
ArgGroup::with_name("subject")
.args(&["undocumented-bodies", "shape-diffs"])
.multiple(false)
.required(true),
)
}
pub async fn main<'a>(
command_matches: &'a ArgMatches<'a>,
spec_chunks: Vec<SpecChunkEvent>,
input_queue_size: usize,
) {
let spec_events = events_from_chunks(spec_chunks).await;
if command_matches.is_present("undocumented-bodies") {
let stdin = stdin();
let interaction_lines = streams::http_interaction::json_lines(stdin);
let sink = stdout();
learn_undocumented_bodies(spec_events, input_queue_size, interaction_lines, sink).await;
} else if command_matches.is_present("shape-diffs") {
todo!("shape diffs learning is yet to be implemented");
} else {
unreachable!("subject is required");
}
}
async fn learn_undocumented_bodies<S: 'static + AsyncWrite + Unpin + Send>(
spec_events: Vec<SpecEvent>,
input_queue_size: usize,
interaction_lines: impl Stream<Item = Result<String, std::io::Error>>,
sink: S,
) {
let spec_projection = Arc::new(SpecProjection::from(spec_events));
let (analysis_sender, analysis_receiver) = mpsc::channel(32);
let analyzing_bodies = async move {
let analyze_results = interaction_lines
.map(Ok)
.try_for_each_concurrent(input_queue_size, |interaction_json_result| {
let projection = spec_projection.clone();
let analysis_sender = analysis_sender.clone();
let analyze_task = tokio::spawn(async move {
let analyze_comp = tokio::task::spawn_blocking(move || {
let interaction_json =
interaction_json_result.expect("can rad interaction json line form stdin");
let interaction: HttpInteraction =
serde_json::from_str(&interaction_json).expect("could not parse interaction json");
analyze_undocumented_bodies(&projection, interaction)
});
match analyze_comp.await {
Ok(results) => {
for result in results {
analysis_sender
.send(result)
.await
.expect("could not send analysis result to aggregation channel")
}
}
Err(err) => {
// ignore a single interaction not being able to deserialize
eprintln!("interaction ignored: {}", err);
}
}
});
analyze_task
})
.await;
analyze_results
};
let aggregating_results = tokio::spawn(async move {
let mut analysiss = ReceiverStream::new(analysis_receiver);
let mut id_generator = IdGenerator::default();
let mut observations_by_body_location = HashMap::new();
while let Some(analysis) = analysiss.next().await {
let existing_observations = observations_by_body_location
.entry(analysis.body_location)
.or_insert_with(|| TrailObservationsResult::default());
existing_observations.union(analysis.trail_observations);
}
let mut endpoints_by_endpoint = HashMap::new();
for (body_location, observations) in observations_by_body_location {
let (root_shape_id, body_commands) = observations.into_commands(&mut id_generator);
let endpoint_body = EndpointBody::new(&body_location, root_shape_id, body_commands);
let (path_id, method) = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => (path_id, method),
BodyAnalysisLocation::Response {
path_id, method, ..
} => (path_id, method),
};
let endpoint_bodies = endpoints_by_endpoint
.entry((path_id, method))
.or_insert_with_key(|(path_id, method)| {
EndpointBodies::new(path_id.clone(), method.clone())
});
endpoint_bodies.push(endpoint_body);
}
streams::write_to_json_lines(sink, endpoints_by_endpoint.values())
.await
.expect("could not write endpoint bodies to stdout");
});
try_join!(analyzing_bodies, aggregating_results).expect("essential worker task panicked");
}
#[derive(Debug, Default)]
struct IdGenerator;
impl SpecIdGenerator for IdGenerator {
fn generate_id(&mut self, prefix: &str) -> String {
// NanoID @ 10 chars:
// - URL-safe,
// - 17 years for a 1% chance of at least one global collision assuming
// writing 1000 ids per hour (https://zelark.github.io/nano-id-cc/)
format!("{}{}", prefix, nanoid!(10))
}
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodies {
path_id: String,
method: String,
requests: Vec<EndpointRequestBody>,
responses: Vec<EndpointResponseBody>,
}
impl EndpointBodies {
pub fn new(path_id: String, method: String) -> Self {
Self {
path_id,
method,
requests: vec![],
responses: vec![],
}
}
pub fn push(&mut self, endpoint: EndpointBody) {
match endpoint {
EndpointBody::Request(endpoint_request) => {
self.requests.push(endpoint_request);
}
EndpointBody::Response(endpoint_response) => {
self.responses.push(endpoint_response);
}
}
}
}
#[derive(Debug)]
enum EndpointBody {
Request(EndpointRequestBody),
Response(EndpointResponseBody),
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointRequestBody {
commands: Vec<SpecCommand>,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointResponseBody {
commands: Vec<SpecCommand>,
status_code: u16,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodyDescriptor {
content_type: String,
root_shape_id: String,
}
impl EndpointBody {
fn new(
body_location: &BodyAnalysisLocation,
root_shape_id: Option<String>,
body_commands: impl IntoIterator<Item = SpecCommand>,
) -> Self {
let body_descriptor = match root_shape_id {
Some(root_shape_id) => Some(EndpointBodyDescriptor {
content_type: body_location
.content_type()
.expect("root shape id implies a content type to be present")
.clone(),
root_shape_id,
}),
None => None,
};
let mut body = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => EndpointBody::Request(EndpointRequestBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
}),
BodyAnalysisLocation::Response {
status_code,
path_id,
method,
..
} => EndpointBody::Response(EndpointResponseBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
status_code: *status_code,
}),
};
body.append_endpoint_commands();
body
}
fn append_endpoint_commands(&mut self) {
let mut ids = IdGenerator::default();
match self {
EndpointBody::Request(request_body) => {
let request_id = ids.request();
request_body
.commands
.push(SpecCommand::from(EndpointCommand::add_request(
request_id.clone(),
request_body.path_id.clone(),
request_body.method.clone(),
)));
if let Some(body_descriptor) = &request_body.body_descriptor |
}
EndpointBody::Response(response_body) => {
let response_id = ids.response();
response_body.commands.push(SpecCommand::from(
EndpointCommand::add_response_by_path_and_method(
response_id.clone(),
response_body.path_id.clone(),
response_body.method.clone(),
response_body.status_code.clone(),
),
));
if let Some(body_descriptor) = &response_body.body_descriptor {
response_body
.commands
.push(SpecCommand::from(EndpointCommand::set_response_body_shape(
response_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
}
}
};
}
}
#[cfg(test)]
mod test {
use super::*;
use serde_json::json;
#[tokio::main]
#[test]
async fn can_learn_endpoint_bodies_from_interactions() {
let spec_events: Vec<SpecEvent> = serde_json::from_value(json!([
{"PathComponentAdded":{"pathId":"path_id_1","parentPathId":"root","name":"todos"}}
]))
.expect("initial spec events should be valid events");
// TODO: feed actual interactions and assert the output
let interaction_lines = streams::http_interaction::json_lines(tokio::io::empty());
let sink = tokio::io::sink();
learn_undocumented_bodies(spec_events, 1, interaction_lines, sink).await;
}
}
| {
request_body
.commands
.push(SpecCommand::from(EndpointCommand::set_request_body_shape(
request_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
} | conditional_block |
learn.rs | use super::events_from_chunks;
use clap::{App, Arg, ArgGroup, ArgMatches, SubCommand};
use futures::{try_join, Stream, StreamExt, TryStreamExt};
use nanoid::nanoid;
use serde::Serialize;
use serde_json;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::io::{stdin, stdout, AsyncWrite};
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use optic_diff_engine::streams;
use optic_diff_engine::{analyze_undocumented_bodies, EndpointCommand, SpecCommand};
use optic_diff_engine::{
BodyAnalysisLocation, HttpInteraction, SpecChunkEvent, SpecEvent, SpecIdGenerator,
SpecProjection, TrailObservationsResult,
};
pub const SUBCOMMAND_NAME: &'static str = "learn";
pub fn create_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Learns about possible changes to the spec based on interactions or diffs")
.arg(
Arg::with_name("undocumented-bodies")
.long("undocumented-bodies")
.takes_value(false)
.help("Learn shapes of undocumented bodies from interactions piped to stdin"),
)
.arg(
Arg::with_name("shape-diffs")
.long("shape-diffs")
.takes_value(false)
.help("Learn updated shapes from shape diffs piped to stdin"),
)
.group(
ArgGroup::with_name("subject")
.args(&["undocumented-bodies", "shape-diffs"])
.multiple(false)
.required(true),
)
}
pub async fn main<'a>(
command_matches: &'a ArgMatches<'a>,
spec_chunks: Vec<SpecChunkEvent>,
input_queue_size: usize,
) {
let spec_events = events_from_chunks(spec_chunks).await;
if command_matches.is_present("undocumented-bodies") {
let stdin = stdin();
let interaction_lines = streams::http_interaction::json_lines(stdin);
let sink = stdout();
learn_undocumented_bodies(spec_events, input_queue_size, interaction_lines, sink).await;
} else if command_matches.is_present("shape-diffs") {
todo!("shape diffs learning is yet to be implemented");
} else {
unreachable!("subject is required");
}
}
async fn learn_undocumented_bodies<S: 'static + AsyncWrite + Unpin + Send>(
spec_events: Vec<SpecEvent>,
input_queue_size: usize,
interaction_lines: impl Stream<Item = Result<String, std::io::Error>>,
sink: S,
) {
let spec_projection = Arc::new(SpecProjection::from(spec_events));
let (analysis_sender, analysis_receiver) = mpsc::channel(32);
let analyzing_bodies = async move {
let analyze_results = interaction_lines
.map(Ok)
.try_for_each_concurrent(input_queue_size, |interaction_json_result| {
let projection = spec_projection.clone();
let analysis_sender = analysis_sender.clone();
let analyze_task = tokio::spawn(async move {
let analyze_comp = tokio::task::spawn_blocking(move || {
let interaction_json =
interaction_json_result.expect("can rad interaction json line form stdin");
let interaction: HttpInteraction =
serde_json::from_str(&interaction_json).expect("could not parse interaction json");
analyze_undocumented_bodies(&projection, interaction)
});
match analyze_comp.await {
Ok(results) => {
for result in results {
analysis_sender
.send(result)
.await
.expect("could not send analysis result to aggregation channel")
}
}
Err(err) => {
// ignore a single interaction not being able to deserialize
eprintln!("interaction ignored: {}", err);
}
}
});
analyze_task
})
.await;
analyze_results
};
let aggregating_results = tokio::spawn(async move {
let mut analysiss = ReceiverStream::new(analysis_receiver);
let mut id_generator = IdGenerator::default();
let mut observations_by_body_location = HashMap::new();
while let Some(analysis) = analysiss.next().await {
let existing_observations = observations_by_body_location
.entry(analysis.body_location)
.or_insert_with(|| TrailObservationsResult::default());
existing_observations.union(analysis.trail_observations);
}
let mut endpoints_by_endpoint = HashMap::new();
for (body_location, observations) in observations_by_body_location {
let (root_shape_id, body_commands) = observations.into_commands(&mut id_generator);
let endpoint_body = EndpointBody::new(&body_location, root_shape_id, body_commands);
let (path_id, method) = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => (path_id, method),
BodyAnalysisLocation::Response {
path_id, method, ..
} => (path_id, method),
};
let endpoint_bodies = endpoints_by_endpoint
.entry((path_id, method))
.or_insert_with_key(|(path_id, method)| {
EndpointBodies::new(path_id.clone(), method.clone())
});
endpoint_bodies.push(endpoint_body);
}
streams::write_to_json_lines(sink, endpoints_by_endpoint.values())
.await
.expect("could not write endpoint bodies to stdout");
});
try_join!(analyzing_bodies, aggregating_results).expect("essential worker task panicked");
}
#[derive(Debug, Default)]
struct IdGenerator;
impl SpecIdGenerator for IdGenerator {
fn generate_id(&mut self, prefix: &str) -> String {
// NanoID @ 10 chars:
// - URL-safe,
// - 17 years for a 1% chance of at least one global collision assuming
// writing 1000 ids per hour (https://zelark.github.io/nano-id-cc/)
format!("{}{}", prefix, nanoid!(10))
}
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodies {
path_id: String,
method: String,
requests: Vec<EndpointRequestBody>,
responses: Vec<EndpointResponseBody>,
}
impl EndpointBodies {
pub fn new(path_id: String, method: String) -> Self {
Self {
path_id,
method,
requests: vec![],
responses: vec![],
}
}
pub fn push(&mut self, endpoint: EndpointBody) {
match endpoint {
EndpointBody::Request(endpoint_request) => {
self.requests.push(endpoint_request);
}
EndpointBody::Response(endpoint_response) => {
self.responses.push(endpoint_response);
}
}
}
}
#[derive(Debug)]
enum EndpointBody {
Request(EndpointRequestBody),
Response(EndpointResponseBody),
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointRequestBody {
commands: Vec<SpecCommand>,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointResponseBody {
commands: Vec<SpecCommand>,
status_code: u16,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodyDescriptor {
content_type: String,
root_shape_id: String,
}
impl EndpointBody {
fn new(
body_location: &BodyAnalysisLocation,
root_shape_id: Option<String>,
body_commands: impl IntoIterator<Item = SpecCommand>,
) -> Self {
let body_descriptor = match root_shape_id {
Some(root_shape_id) => Some(EndpointBodyDescriptor {
content_type: body_location
.content_type()
.expect("root shape id implies a content type to be present")
.clone(),
root_shape_id,
}),
None => None,
};
let mut body = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => EndpointBody::Request(EndpointRequestBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
}),
BodyAnalysisLocation::Response {
status_code,
path_id,
method,
..
} => EndpointBody::Response(EndpointResponseBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
status_code: *status_code,
}),
};
body.append_endpoint_commands();
body
}
fn append_endpoint_commands(&mut self) {
let mut ids = IdGenerator::default();
match self {
EndpointBody::Request(request_body) => {
let request_id = ids.request();
request_body
.commands
.push(SpecCommand::from(EndpointCommand::add_request(
request_id.clone(),
request_body.path_id.clone(),
request_body.method.clone(),
)));
if let Some(body_descriptor) = &request_body.body_descriptor { | body_descriptor.content_type.clone(),
false,
)));
}
}
EndpointBody::Response(response_body) => {
let response_id = ids.response();
response_body.commands.push(SpecCommand::from(
EndpointCommand::add_response_by_path_and_method(
response_id.clone(),
response_body.path_id.clone(),
response_body.method.clone(),
response_body.status_code.clone(),
),
));
if let Some(body_descriptor) = &response_body.body_descriptor {
response_body
.commands
.push(SpecCommand::from(EndpointCommand::set_response_body_shape(
response_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
}
}
};
}
}
#[cfg(test)]
mod test {
use super::*;
use serde_json::json;
#[tokio::main]
#[test]
async fn can_learn_endpoint_bodies_from_interactions() {
let spec_events: Vec<SpecEvent> = serde_json::from_value(json!([
{"PathComponentAdded":{"pathId":"path_id_1","parentPathId":"root","name":"todos"}}
]))
.expect("initial spec events should be valid events");
// TODO: feed actual interactions and assert the output
let interaction_lines = streams::http_interaction::json_lines(tokio::io::empty());
let sink = tokio::io::sink();
learn_undocumented_bodies(spec_events, 1, interaction_lines, sink).await;
}
} | request_body
.commands
.push(SpecCommand::from(EndpointCommand::set_request_body_shape(
request_id,
body_descriptor.root_shape_id.clone(), | random_line_split |
learn.rs | use super::events_from_chunks;
use clap::{App, Arg, ArgGroup, ArgMatches, SubCommand};
use futures::{try_join, Stream, StreamExt, TryStreamExt};
use nanoid::nanoid;
use serde::Serialize;
use serde_json;
use std::collections::HashMap;
use std::sync::Arc;
use tokio::io::{stdin, stdout, AsyncWrite};
use tokio::sync::mpsc;
use tokio_stream::wrappers::ReceiverStream;
use optic_diff_engine::streams;
use optic_diff_engine::{analyze_undocumented_bodies, EndpointCommand, SpecCommand};
use optic_diff_engine::{
BodyAnalysisLocation, HttpInteraction, SpecChunkEvent, SpecEvent, SpecIdGenerator,
SpecProjection, TrailObservationsResult,
};
pub const SUBCOMMAND_NAME: &'static str = "learn";
pub fn create_subcommand<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name(SUBCOMMAND_NAME)
.about("Learns about possible changes to the spec based on interactions or diffs")
.arg(
Arg::with_name("undocumented-bodies")
.long("undocumented-bodies")
.takes_value(false)
.help("Learn shapes of undocumented bodies from interactions piped to stdin"),
)
.arg(
Arg::with_name("shape-diffs")
.long("shape-diffs")
.takes_value(false)
.help("Learn updated shapes from shape diffs piped to stdin"),
)
.group(
ArgGroup::with_name("subject")
.args(&["undocumented-bodies", "shape-diffs"])
.multiple(false)
.required(true),
)
}
pub async fn main<'a>(
command_matches: &'a ArgMatches<'a>,
spec_chunks: Vec<SpecChunkEvent>,
input_queue_size: usize,
) {
let spec_events = events_from_chunks(spec_chunks).await;
if command_matches.is_present("undocumented-bodies") {
let stdin = stdin();
let interaction_lines = streams::http_interaction::json_lines(stdin);
let sink = stdout();
learn_undocumented_bodies(spec_events, input_queue_size, interaction_lines, sink).await;
} else if command_matches.is_present("shape-diffs") {
todo!("shape diffs learning is yet to be implemented");
} else {
unreachable!("subject is required");
}
}
async fn learn_undocumented_bodies<S: 'static + AsyncWrite + Unpin + Send>(
spec_events: Vec<SpecEvent>,
input_queue_size: usize,
interaction_lines: impl Stream<Item = Result<String, std::io::Error>>,
sink: S,
) {
let spec_projection = Arc::new(SpecProjection::from(spec_events));
let (analysis_sender, analysis_receiver) = mpsc::channel(32);
let analyzing_bodies = async move {
let analyze_results = interaction_lines
.map(Ok)
.try_for_each_concurrent(input_queue_size, |interaction_json_result| {
let projection = spec_projection.clone();
let analysis_sender = analysis_sender.clone();
let analyze_task = tokio::spawn(async move {
let analyze_comp = tokio::task::spawn_blocking(move || {
let interaction_json =
interaction_json_result.expect("can rad interaction json line form stdin");
let interaction: HttpInteraction =
serde_json::from_str(&interaction_json).expect("could not parse interaction json");
analyze_undocumented_bodies(&projection, interaction)
});
match analyze_comp.await {
Ok(results) => {
for result in results {
analysis_sender
.send(result)
.await
.expect("could not send analysis result to aggregation channel")
}
}
Err(err) => {
// ignore a single interaction not being able to deserialize
eprintln!("interaction ignored: {}", err);
}
}
});
analyze_task
})
.await;
analyze_results
};
let aggregating_results = tokio::spawn(async move {
let mut analysiss = ReceiverStream::new(analysis_receiver);
let mut id_generator = IdGenerator::default();
let mut observations_by_body_location = HashMap::new();
while let Some(analysis) = analysiss.next().await {
let existing_observations = observations_by_body_location
.entry(analysis.body_location)
.or_insert_with(|| TrailObservationsResult::default());
existing_observations.union(analysis.trail_observations);
}
let mut endpoints_by_endpoint = HashMap::new();
for (body_location, observations) in observations_by_body_location {
let (root_shape_id, body_commands) = observations.into_commands(&mut id_generator);
let endpoint_body = EndpointBody::new(&body_location, root_shape_id, body_commands);
let (path_id, method) = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => (path_id, method),
BodyAnalysisLocation::Response {
path_id, method, ..
} => (path_id, method),
};
let endpoint_bodies = endpoints_by_endpoint
.entry((path_id, method))
.or_insert_with_key(|(path_id, method)| {
EndpointBodies::new(path_id.clone(), method.clone())
});
endpoint_bodies.push(endpoint_body);
}
streams::write_to_json_lines(sink, endpoints_by_endpoint.values())
.await
.expect("could not write endpoint bodies to stdout");
});
try_join!(analyzing_bodies, aggregating_results).expect("essential worker task panicked");
}
#[derive(Debug, Default)]
struct IdGenerator;
impl SpecIdGenerator for IdGenerator {
fn generate_id(&mut self, prefix: &str) -> String {
// NanoID @ 10 chars:
// - URL-safe,
// - 17 years for a 1% chance of at least one global collision assuming
// writing 1000 ids per hour (https://zelark.github.io/nano-id-cc/)
format!("{}{}", prefix, nanoid!(10))
}
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodies {
path_id: String,
method: String,
requests: Vec<EndpointRequestBody>,
responses: Vec<EndpointResponseBody>,
}
impl EndpointBodies {
pub fn new(path_id: String, method: String) -> Self {
Self {
path_id,
method,
requests: vec![],
responses: vec![],
}
}
pub fn push(&mut self, endpoint: EndpointBody) {
match endpoint {
EndpointBody::Request(endpoint_request) => {
self.requests.push(endpoint_request);
}
EndpointBody::Response(endpoint_response) => {
self.responses.push(endpoint_response);
}
}
}
}
#[derive(Debug)]
enum EndpointBody {
Request(EndpointRequestBody),
Response(EndpointResponseBody),
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointRequestBody {
commands: Vec<SpecCommand>,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct | {
commands: Vec<SpecCommand>,
status_code: u16,
#[serde(skip)]
path_id: String,
#[serde(skip)]
method: String,
#[serde(flatten)]
body_descriptor: Option<EndpointBodyDescriptor>,
}
#[derive(Default, Debug, Serialize)]
#[serde(rename_all = "camelCase")]
struct EndpointBodyDescriptor {
content_type: String,
root_shape_id: String,
}
impl EndpointBody {
fn new(
body_location: &BodyAnalysisLocation,
root_shape_id: Option<String>,
body_commands: impl IntoIterator<Item = SpecCommand>,
) -> Self {
let body_descriptor = match root_shape_id {
Some(root_shape_id) => Some(EndpointBodyDescriptor {
content_type: body_location
.content_type()
.expect("root shape id implies a content type to be present")
.clone(),
root_shape_id,
}),
None => None,
};
let mut body = match body_location {
BodyAnalysisLocation::Request {
path_id, method, ..
} => EndpointBody::Request(EndpointRequestBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
}),
BodyAnalysisLocation::Response {
status_code,
path_id,
method,
..
} => EndpointBody::Response(EndpointResponseBody {
body_descriptor,
path_id: path_id.clone(),
method: method.clone(),
commands: body_commands.into_iter().collect(),
status_code: *status_code,
}),
};
body.append_endpoint_commands();
body
}
fn append_endpoint_commands(&mut self) {
let mut ids = IdGenerator::default();
match self {
EndpointBody::Request(request_body) => {
let request_id = ids.request();
request_body
.commands
.push(SpecCommand::from(EndpointCommand::add_request(
request_id.clone(),
request_body.path_id.clone(),
request_body.method.clone(),
)));
if let Some(body_descriptor) = &request_body.body_descriptor {
request_body
.commands
.push(SpecCommand::from(EndpointCommand::set_request_body_shape(
request_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
}
}
EndpointBody::Response(response_body) => {
let response_id = ids.response();
response_body.commands.push(SpecCommand::from(
EndpointCommand::add_response_by_path_and_method(
response_id.clone(),
response_body.path_id.clone(),
response_body.method.clone(),
response_body.status_code.clone(),
),
));
if let Some(body_descriptor) = &response_body.body_descriptor {
response_body
.commands
.push(SpecCommand::from(EndpointCommand::set_response_body_shape(
response_id,
body_descriptor.root_shape_id.clone(),
body_descriptor.content_type.clone(),
false,
)));
}
}
};
}
}
#[cfg(test)]
mod test {
use super::*;
use serde_json::json;
#[tokio::main]
#[test]
async fn can_learn_endpoint_bodies_from_interactions() {
let spec_events: Vec<SpecEvent> = serde_json::from_value(json!([
{"PathComponentAdded":{"pathId":"path_id_1","parentPathId":"root","name":"todos"}}
]))
.expect("initial spec events should be valid events");
// TODO: feed actual interactions and assert the output
let interaction_lines = streams::http_interaction::json_lines(tokio::io::empty());
let sink = tokio::io::sink();
learn_undocumented_bodies(spec_events, 1, interaction_lines, sink).await;
}
}
| EndpointResponseBody | identifier_name |
provision_spec.go | /*
Copyright 2020 The Rook Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package osd
import (
"encoding/json"
"fmt"
"path"
"strings"
"github.com/pkg/errors"
cephv1 "github.com/rook/rook/pkg/apis/ceph.rook.io/v1"
kms "github.com/rook/rook/pkg/daemon/ceph/osd/kms"
"github.com/rook/rook/pkg/operator/ceph/cluster/mon"
"github.com/rook/rook/pkg/operator/ceph/cluster/osd/config"
"github.com/rook/rook/pkg/operator/ceph/controller"
"github.com/rook/rook/pkg/operator/k8sutil"
batch "k8s.io/api/batch/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func (c *Cluster) makeJob(osdProps osdProperties, provisionConfig *provisionConfig) (*batch.Job, error) {
podSpec, err := c.provisionPodTemplateSpec(osdProps, v1.RestartPolicyOnFailure, provisionConfig)
if err != nil {
return nil, err
}
if osdProps.onPVC() {
// This is not needed in raw mode and 14.2.8 brings it
// but we still want to do this not to lose backward compatibility with lvm based OSDs...
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCInitContainer(osdProps))
if osdProps.onPVCWithMetadata() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCMetadataInitContainer("/srv", osdProps))
}
if osdProps.onPVCWithWal() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCWalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec)
}
k8sutil.RemoveDuplicateEnvVars(&podSpec)
podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) provisionOSDContainer(osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: "devices", MountPath: "/dev"},
{Name: "udev", MountPath: "/run/udev"},
copyBinariesMount,
mon.CephSecretVolumeMount(),
}...)
if controller.LoopDevicesAllowed() {
envVars = append(envVars, v1.EnvVar{Name: "CEPH_VOLUME_ALLOW_LOOP_DEVICES", Value: "true"})
}
// If the OSD runs on PVC
if osdProps.onPVC() {
volumeMounts = append(volumeMounts, getPvcOSDBridgeMount(osdProps.pvc.ClaimName))
// The device list is read by the Rook CLI via environment variables so let's add them
configuredDevices := []config.ConfiguredDevice{
{
ID: fmt.Sprintf("/mnt/%s", osdProps.pvc.ClaimName),
StoreConfig: config.NewStoreConfig(),
},
}
if osdProps.onPVCWithMetadata() |
if osdProps.onPVCWithWal() {
volumeMounts = append(volumeMounts, getPvcWalOSDBridgeMount(osdProps.walPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/wal/%s", osdProps.walPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for PVC %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
envVars = append(envVars, pvcBackedOSDEnvVar("true"))
envVars = append(envVars, encryptedDeviceEnvVar(osdProps.encrypted))
envVars = append(envVars, pvcNameEnvVar(osdProps.pvc.ClaimName))
if osdProps.encrypted {
// If a KMS is configured we populate volume mounts and env variables
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
_, volumeMountsTLS := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumeMounts = append(volumeMounts, volumeMountsTLS)
}
envVars = append(envVars, kms.ConfigToEnvVar(c.spec)...)
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
_, volmeMountsKMIP := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumeMounts = append(volumeMounts, volmeMountsKMIP)
}
} else {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
volumeMounts = append(volumeMounts, v1.VolumeMount{Name: "rootfs", MountPath: "/rootfs", ReadOnly: true})
}
// Add OSD ID as environment variables.
// When this env is set, prepare pod job will destroy this OSD.
if c.replaceOSD != nil {
// Compare pvc claim name in case of OSDs on PVC
if osdProps.onPVC() {
if strings.Contains(c.replaceOSD.Path, osdProps.pvc.ClaimName) {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
} else {
// Compare the node name in case of OSDs on disk
if c.replaceOSD.Node == osdProps.crushHostname {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
}
}
// run privileged always since we always mount /dev
privileged := true
runAsUser := int64(0)
runAsNonRoot := false
readOnlyRootFilesystem := false
osdProvisionContainer := v1.Container{
Command: []string{path.Join(rookBinariesMountPath, "rook")},
Args: []string{"ceph", "osd", "provision"},
Name: "provision",
Image: c.spec.CephVersion.Image,
ImagePullPolicy: controller.GetContainerImagePullPolicy(c.spec.CephVersion.ImagePullPolicy),
VolumeMounts: volumeMounts,
Env: envVars,
EnvFrom: getEnvFromSources(),
SecurityContext: &v1.SecurityContext{
Privileged: &privileged,
RunAsUser: &runAsUser,
RunAsNonRoot: &runAsNonRoot,
ReadOnlyRootFilesystem: &readOnlyRootFilesystem,
},
Resources: cephv1.GetPrepareOSDResources(c.spec.Resources),
}
return osdProvisionContainer, nil
}
| {
volumeMounts = append(volumeMounts, getPvcMetadataOSDBridgeMount(osdProps.metadataPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/srv/%s", osdProps.metadataPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
} | conditional_block |
provision_spec.go | /*
Copyright 2020 The Rook Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package osd
import (
"encoding/json"
"fmt"
"path"
"strings"
"github.com/pkg/errors"
cephv1 "github.com/rook/rook/pkg/apis/ceph.rook.io/v1"
kms "github.com/rook/rook/pkg/daemon/ceph/osd/kms"
"github.com/rook/rook/pkg/operator/ceph/cluster/mon"
"github.com/rook/rook/pkg/operator/ceph/cluster/osd/config"
"github.com/rook/rook/pkg/operator/ceph/controller"
"github.com/rook/rook/pkg/operator/k8sutil"
batch "k8s.io/api/batch/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func (c *Cluster) makeJob(osdProps osdProperties, provisionConfig *provisionConfig) (*batch.Job, error) {
podSpec, err := c.provisionPodTemplateSpec(osdProps, v1.RestartPolicyOnFailure, provisionConfig)
if err != nil {
return nil, err
}
if osdProps.onPVC() {
// This is not needed in raw mode and 14.2.8 brings it
// but we still want to do this not to lose backward compatibility with lvm based OSDs...
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCInitContainer(osdProps))
if osdProps.onPVCWithMetadata() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCMetadataInitContainer("/srv", osdProps))
}
if osdProps.onPVCWithWal() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCWalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec)
}
k8sutil.RemoveDuplicateEnvVars(&podSpec)
podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) provisionOSDContainer(osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) | {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: "devices", MountPath: "/dev"},
{Name: "udev", MountPath: "/run/udev"},
copyBinariesMount,
mon.CephSecretVolumeMount(),
}...)
if controller.LoopDevicesAllowed() {
envVars = append(envVars, v1.EnvVar{Name: "CEPH_VOLUME_ALLOW_LOOP_DEVICES", Value: "true"})
}
// If the OSD runs on PVC
if osdProps.onPVC() {
volumeMounts = append(volumeMounts, getPvcOSDBridgeMount(osdProps.pvc.ClaimName))
// The device list is read by the Rook CLI via environment variables so let's add them
configuredDevices := []config.ConfiguredDevice{
{
ID: fmt.Sprintf("/mnt/%s", osdProps.pvc.ClaimName),
StoreConfig: config.NewStoreConfig(),
},
}
if osdProps.onPVCWithMetadata() {
volumeMounts = append(volumeMounts, getPvcMetadataOSDBridgeMount(osdProps.metadataPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/srv/%s", osdProps.metadataPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
if osdProps.onPVCWithWal() {
volumeMounts = append(volumeMounts, getPvcWalOSDBridgeMount(osdProps.walPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/wal/%s", osdProps.walPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for PVC %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
envVars = append(envVars, pvcBackedOSDEnvVar("true"))
envVars = append(envVars, encryptedDeviceEnvVar(osdProps.encrypted))
envVars = append(envVars, pvcNameEnvVar(osdProps.pvc.ClaimName))
if osdProps.encrypted {
// If a KMS is configured we populate volume mounts and env variables
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
_, volumeMountsTLS := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumeMounts = append(volumeMounts, volumeMountsTLS)
}
envVars = append(envVars, kms.ConfigToEnvVar(c.spec)...)
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
_, volmeMountsKMIP := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumeMounts = append(volumeMounts, volmeMountsKMIP)
}
} else {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
volumeMounts = append(volumeMounts, v1.VolumeMount{Name: "rootfs", MountPath: "/rootfs", ReadOnly: true})
}
// Add OSD ID as environment variables.
// When this env is set, prepare pod job will destroy this OSD.
if c.replaceOSD != nil {
// Compare pvc claim name in case of OSDs on PVC
if osdProps.onPVC() {
if strings.Contains(c.replaceOSD.Path, osdProps.pvc.ClaimName) {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
} else {
// Compare the node name in case of OSDs on disk
if c.replaceOSD.Node == osdProps.crushHostname {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
}
}
// run privileged always since we always mount /dev
privileged := true
runAsUser := int64(0)
runAsNonRoot := false
readOnlyRootFilesystem := false
osdProvisionContainer := v1.Container{
Command: []string{path.Join(rookBinariesMountPath, "rook")},
Args: []string{"ceph", "osd", "provision"},
Name: "provision",
Image: c.spec.CephVersion.Image,
ImagePullPolicy: controller.GetContainerImagePullPolicy(c.spec.CephVersion.ImagePullPolicy),
VolumeMounts: volumeMounts,
Env: envVars,
EnvFrom: getEnvFromSources(),
SecurityContext: &v1.SecurityContext{
Privileged: &privileged,
RunAsUser: &runAsUser,
RunAsNonRoot: &runAsNonRoot,
ReadOnlyRootFilesystem: &readOnlyRootFilesystem,
},
Resources: cephv1.GetPrepareOSDResources(c.spec.Resources),
}
return osdProvisionContainer, nil
} | identifier_body | |
provision_spec.go | /*
Copyright 2020 The Rook Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package osd
import (
"encoding/json"
"fmt"
"path"
"strings"
"github.com/pkg/errors"
cephv1 "github.com/rook/rook/pkg/apis/ceph.rook.io/v1"
kms "github.com/rook/rook/pkg/daemon/ceph/osd/kms"
"github.com/rook/rook/pkg/operator/ceph/cluster/mon"
"github.com/rook/rook/pkg/operator/ceph/cluster/osd/config"
"github.com/rook/rook/pkg/operator/ceph/controller"
"github.com/rook/rook/pkg/operator/k8sutil"
batch "k8s.io/api/batch/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func (c *Cluster) makeJob(osdProps osdProperties, provisionConfig *provisionConfig) (*batch.Job, error) {
podSpec, err := c.provisionPodTemplateSpec(osdProps, v1.RestartPolicyOnFailure, provisionConfig)
if err != nil {
return nil, err
}
if osdProps.onPVC() {
// This is not needed in raw mode and 14.2.8 brings it
// but we still want to do this not to lose backward compatibility with lvm based OSDs...
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCInitContainer(osdProps))
if osdProps.onPVCWithMetadata() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCMetadataInitContainer("/srv", osdProps))
}
if osdProps.onPVCWithWal() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCWalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec) | podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) provisionOSDContainer(osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: "devices", MountPath: "/dev"},
{Name: "udev", MountPath: "/run/udev"},
copyBinariesMount,
mon.CephSecretVolumeMount(),
}...)
if controller.LoopDevicesAllowed() {
envVars = append(envVars, v1.EnvVar{Name: "CEPH_VOLUME_ALLOW_LOOP_DEVICES", Value: "true"})
}
// If the OSD runs on PVC
if osdProps.onPVC() {
volumeMounts = append(volumeMounts, getPvcOSDBridgeMount(osdProps.pvc.ClaimName))
// The device list is read by the Rook CLI via environment variables so let's add them
configuredDevices := []config.ConfiguredDevice{
{
ID: fmt.Sprintf("/mnt/%s", osdProps.pvc.ClaimName),
StoreConfig: config.NewStoreConfig(),
},
}
if osdProps.onPVCWithMetadata() {
volumeMounts = append(volumeMounts, getPvcMetadataOSDBridgeMount(osdProps.metadataPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/srv/%s", osdProps.metadataPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
if osdProps.onPVCWithWal() {
volumeMounts = append(volumeMounts, getPvcWalOSDBridgeMount(osdProps.walPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/wal/%s", osdProps.walPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for PVC %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
envVars = append(envVars, pvcBackedOSDEnvVar("true"))
envVars = append(envVars, encryptedDeviceEnvVar(osdProps.encrypted))
envVars = append(envVars, pvcNameEnvVar(osdProps.pvc.ClaimName))
if osdProps.encrypted {
// If a KMS is configured we populate volume mounts and env variables
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
_, volumeMountsTLS := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumeMounts = append(volumeMounts, volumeMountsTLS)
}
envVars = append(envVars, kms.ConfigToEnvVar(c.spec)...)
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
_, volmeMountsKMIP := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumeMounts = append(volumeMounts, volmeMountsKMIP)
}
} else {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
volumeMounts = append(volumeMounts, v1.VolumeMount{Name: "rootfs", MountPath: "/rootfs", ReadOnly: true})
}
// Add OSD ID as environment variables.
// When this env is set, prepare pod job will destroy this OSD.
if c.replaceOSD != nil {
// Compare pvc claim name in case of OSDs on PVC
if osdProps.onPVC() {
if strings.Contains(c.replaceOSD.Path, osdProps.pvc.ClaimName) {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
} else {
// Compare the node name in case of OSDs on disk
if c.replaceOSD.Node == osdProps.crushHostname {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
}
}
// run privileged always since we always mount /dev
privileged := true
runAsUser := int64(0)
runAsNonRoot := false
readOnlyRootFilesystem := false
osdProvisionContainer := v1.Container{
Command: []string{path.Join(rookBinariesMountPath, "rook")},
Args: []string{"ceph", "osd", "provision"},
Name: "provision",
Image: c.spec.CephVersion.Image,
ImagePullPolicy: controller.GetContainerImagePullPolicy(c.spec.CephVersion.ImagePullPolicy),
VolumeMounts: volumeMounts,
Env: envVars,
EnvFrom: getEnvFromSources(),
SecurityContext: &v1.SecurityContext{
Privileged: &privileged,
RunAsUser: &runAsUser,
RunAsNonRoot: &runAsNonRoot,
ReadOnlyRootFilesystem: &readOnlyRootFilesystem,
},
Resources: cephv1.GetPrepareOSDResources(c.spec.Resources),
}
return osdProvisionContainer, nil
} | }
k8sutil.RemoveDuplicateEnvVars(&podSpec)
| random_line_split |
provision_spec.go | /*
Copyright 2020 The Rook Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package osd
import (
"encoding/json"
"fmt"
"path"
"strings"
"github.com/pkg/errors"
cephv1 "github.com/rook/rook/pkg/apis/ceph.rook.io/v1"
kms "github.com/rook/rook/pkg/daemon/ceph/osd/kms"
"github.com/rook/rook/pkg/operator/ceph/cluster/mon"
"github.com/rook/rook/pkg/operator/ceph/cluster/osd/config"
"github.com/rook/rook/pkg/operator/ceph/controller"
"github.com/rook/rook/pkg/operator/k8sutil"
batch "k8s.io/api/batch/v1"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func (c *Cluster) makeJob(osdProps osdProperties, provisionConfig *provisionConfig) (*batch.Job, error) {
podSpec, err := c.provisionPodTemplateSpec(osdProps, v1.RestartPolicyOnFailure, provisionConfig)
if err != nil {
return nil, err
}
if osdProps.onPVC() {
// This is not needed in raw mode and 14.2.8 brings it
// but we still want to do this not to lose backward compatibility with lvm based OSDs...
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCInitContainer(osdProps))
if osdProps.onPVCWithMetadata() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCMetadataInitContainer("/srv", osdProps))
}
if osdProps.onPVCWithWal() {
podSpec.Spec.InitContainers = append(podSpec.Spec.InitContainers, c.getPVCWalInitContainer("/wal", osdProps))
}
} else {
podSpec.Spec.NodeSelector = map[string]string{v1.LabelHostname: osdProps.crushHostname}
}
job := &batch.Job{
ObjectMeta: metav1.ObjectMeta{
Name: k8sutil.TruncateNodeNameForJob(prepareAppNameFmt, osdProps.crushHostname),
Namespace: c.clusterInfo.Namespace,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
},
},
Spec: batch.JobSpec{
Template: *podSpec,
},
}
if osdProps.onPVC() {
k8sutil.AddLabelToJob(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, job)
k8sutil.AddLabelToJob(CephDeviceSetLabelKey, osdProps.deviceSetName, job)
k8sutil.AddLabelToPod(OSDOverPVCLabelKey, osdProps.pvc.ClaimName, &job.Spec.Template)
k8sutil.AddLabelToPod(CephDeviceSetLabelKey, osdProps.deviceSetName, &job.Spec.Template)
}
k8sutil.AddRookVersionLabelToJob(job)
controller.AddCephVersionLabelToJob(c.clusterInfo.CephVersion, job)
err = c.clusterInfo.OwnerInfo.SetControllerReference(job)
if err != nil {
return nil, err
}
// override the resources of all the init containers and main container with the expected osd prepare resources
c.applyResourcesToAllContainers(&podSpec.Spec, cephv1.GetPrepareOSDResources(c.spec.Resources))
return job, nil
}
// applyResourcesToAllContainers applies consistent resource requests for all containers and all init containers in the pod
func (c *Cluster) applyResourcesToAllContainers(spec *v1.PodSpec, resources v1.ResourceRequirements) {
for i := range spec.InitContainers {
spec.InitContainers[i].Resources = resources
}
for i := range spec.Containers {
spec.Containers[i].Resources = resources
}
}
func (c *Cluster) provisionPodTemplateSpec(osdProps osdProperties, restart v1.RestartPolicy, provisionConfig *provisionConfig) (*v1.PodTemplateSpec, error) {
copyBinariesVolume, copyBinariesContainer := c.getCopyBinariesContainer()
// ceph-volume is currently set up to use /etc/ceph/ceph.conf; this means no user config
// overrides will apply to ceph-volume, but this is unnecessary anyway
volumes := append(controller.PodVolumes(provisionConfig.DataPathMap, c.spec.DataDirHostPath, c.spec.DataDirHostPath, true), copyBinariesVolume)
// create a volume on /dev so the pod can access devices on the host
devVolume := v1.Volume{Name: "devices", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/dev"}}}
udevVolume := v1.Volume{Name: "udev", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/run/udev"}}}
volumes = append(volumes, []v1.Volume{
udevVolume,
devVolume,
mon.CephSecretVolume(),
}...)
if osdProps.onPVC() {
// Create volume config for PVCs
volumes = append(volumes, getPVCOSDVolumes(&osdProps, c.spec.DataDirHostPath, c.clusterInfo.Namespace, true)...)
if osdProps.encrypted {
// If a KMS is configured we populate
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
volumeTLS, _ := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumes = append(volumes, volumeTLS)
}
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
volumeKMIP, _ := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumes = append(volumes, volumeKMIP)
}
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
rootFSVolume := v1.Volume{Name: "rootfs", VolumeSource: v1.VolumeSource{HostPath: &v1.HostPathVolumeSource{Path: "/"}}}
volumes = append(volumes, rootFSVolume)
}
if len(volumes) == 0 {
return nil, errors.New("empty volumes")
}
provisionContainer, err := c.provisionOSDContainer(osdProps, copyBinariesContainer.VolumeMounts[0], provisionConfig)
if err != nil {
return nil, errors.Wrap(err, "failed to generate OSD provisioning container")
}
podSpec := v1.PodSpec{
ServiceAccountName: serviceAccountName,
InitContainers: []v1.Container{
*copyBinariesContainer,
},
Containers: []v1.Container{
provisionContainer,
},
RestartPolicy: restart,
Volumes: volumes,
HostNetwork: c.spec.Network.IsHost(),
PriorityClassName: cephv1.GetOSDPriorityClassName(c.spec.PriorityClassNames),
SchedulerName: osdProps.schedulerName,
}
if c.spec.Network.IsHost() {
podSpec.DNSPolicy = v1.DNSClusterFirstWithHostNet
}
if osdProps.onPVC() {
c.applyAllPlacementIfNeeded(&podSpec)
// apply storageClassDeviceSets.preparePlacement
osdProps.getPreparePlacement().ApplyToPodSpec(&podSpec)
} else {
c.applyAllPlacementIfNeeded(&podSpec)
// apply spec.placement.prepareosd
c.spec.Placement[cephv1.KeyOSDPrepare].ApplyToPodSpec(&podSpec)
}
k8sutil.RemoveDuplicateEnvVars(&podSpec)
podMeta := metav1.ObjectMeta{
Name: AppName,
Labels: map[string]string{
k8sutil.AppAttr: prepareAppName,
k8sutil.ClusterAttr: c.clusterInfo.Namespace,
OSDOverPVCLabelKey: osdProps.pvc.ClaimName,
},
Annotations: map[string]string{},
}
cephv1.GetOSDPrepareAnnotations(c.spec.Annotations).ApplyToObjectMeta(&podMeta)
cephv1.GetOSDPrepareLabels(c.spec.Labels).ApplyToObjectMeta(&podMeta)
// ceph-volume --dmcrypt uses cryptsetup that synchronizes with udev on
// host through semaphore
podSpec.HostIPC = osdProps.storeConfig.EncryptedDevice || osdProps.encrypted
return &v1.PodTemplateSpec{
ObjectMeta: podMeta,
Spec: podSpec,
}, nil
}
func (c *Cluster) | (osdProps osdProperties, copyBinariesMount v1.VolumeMount, provisionConfig *provisionConfig) (v1.Container, error) {
envVars := c.getConfigEnvVars(osdProps, k8sutil.DataDir, true)
// enable debug logging in the prepare job
envVars = append(envVars, setDebugLogLevelEnvVar(true))
// only 1 of device list, device filter, device path filter and use all devices can be specified. We prioritize in that order.
if len(osdProps.devices) > 0 {
configuredDevices := []config.ConfiguredDevice{}
for _, device := range osdProps.devices {
id := device.Name
if device.FullPath != "" {
id = device.FullPath
}
cd := config.ConfiguredDevice{
ID: id,
StoreConfig: config.ToStoreConfig(device.Config),
}
configuredDevices = append(configuredDevices, cd)
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for node %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
} else if osdProps.selection.DeviceFilter != "" {
envVars = append(envVars, deviceFilterEnvVar(osdProps.selection.DeviceFilter))
} else if osdProps.selection.DevicePathFilter != "" {
envVars = append(envVars, devicePathFilterEnvVar(osdProps.selection.DevicePathFilter))
} else if osdProps.selection.GetUseAllDevices() {
envVars = append(envVars, deviceFilterEnvVar("all"))
}
envVars = append(envVars, v1.EnvVar{Name: "ROOK_CEPH_VERSION", Value: c.clusterInfo.CephVersion.CephVersionFormatted()})
envVars = append(envVars, crushDeviceClassEnvVar(osdProps.storeConfig.DeviceClass))
envVars = append(envVars, crushInitialWeightEnvVar(osdProps.storeConfig.InitialWeight))
if osdProps.metadataDevice != "" {
envVars = append(envVars, metadataDeviceEnvVar(osdProps.metadataDevice))
}
volumeMounts := append(controller.CephVolumeMounts(provisionConfig.DataPathMap, true), []v1.VolumeMount{
{Name: "devices", MountPath: "/dev"},
{Name: "udev", MountPath: "/run/udev"},
copyBinariesMount,
mon.CephSecretVolumeMount(),
}...)
if controller.LoopDevicesAllowed() {
envVars = append(envVars, v1.EnvVar{Name: "CEPH_VOLUME_ALLOW_LOOP_DEVICES", Value: "true"})
}
// If the OSD runs on PVC
if osdProps.onPVC() {
volumeMounts = append(volumeMounts, getPvcOSDBridgeMount(osdProps.pvc.ClaimName))
// The device list is read by the Rook CLI via environment variables so let's add them
configuredDevices := []config.ConfiguredDevice{
{
ID: fmt.Sprintf("/mnt/%s", osdProps.pvc.ClaimName),
StoreConfig: config.NewStoreConfig(),
},
}
if osdProps.onPVCWithMetadata() {
volumeMounts = append(volumeMounts, getPvcMetadataOSDBridgeMount(osdProps.metadataPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/srv/%s", osdProps.metadataPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
if osdProps.onPVCWithWal() {
volumeMounts = append(volumeMounts, getPvcWalOSDBridgeMount(osdProps.walPVC.ClaimName))
configuredDevices = append(configuredDevices,
config.ConfiguredDevice{
ID: fmt.Sprintf("/wal/%s", osdProps.walPVC.ClaimName),
StoreConfig: config.NewStoreConfig(),
})
}
marshalledDevices, err := json.Marshal(configuredDevices)
if err != nil {
return v1.Container{}, errors.Wrapf(err, "failed to JSON marshal configured devices for PVC %q", osdProps.crushHostname)
}
envVars = append(envVars, dataDevicesEnvVar(string(marshalledDevices)))
envVars = append(envVars, pvcBackedOSDEnvVar("true"))
envVars = append(envVars, encryptedDeviceEnvVar(osdProps.encrypted))
envVars = append(envVars, pvcNameEnvVar(osdProps.pvc.ClaimName))
if osdProps.encrypted {
// If a KMS is configured we populate volume mounts and env variables
if c.spec.Security.KeyManagementService.IsEnabled() {
if c.spec.Security.KeyManagementService.IsVaultKMS() {
_, volumeMountsTLS := kms.VaultVolumeAndMount(c.spec.Security.KeyManagementService.ConnectionDetails, "")
volumeMounts = append(volumeMounts, volumeMountsTLS)
}
envVars = append(envVars, kms.ConfigToEnvVar(c.spec)...)
if c.spec.Security.KeyManagementService.IsKMIPKMS() {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
_, volmeMountsKMIP := kms.KMIPVolumeAndMount(c.spec.Security.KeyManagementService.TokenSecretName)
volumeMounts = append(volumeMounts, volmeMountsKMIP)
}
} else {
envVars = append(envVars, cephVolumeRawEncryptedEnvVarFromSecret(osdProps))
}
}
} else {
// If not running on PVC we mount the rootfs of the host to validate the presence of the LVM package
volumeMounts = append(volumeMounts, v1.VolumeMount{Name: "rootfs", MountPath: "/rootfs", ReadOnly: true})
}
// Add OSD ID as environment variables.
// When this env is set, prepare pod job will destroy this OSD.
if c.replaceOSD != nil {
// Compare pvc claim name in case of OSDs on PVC
if osdProps.onPVC() {
if strings.Contains(c.replaceOSD.Path, osdProps.pvc.ClaimName) {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
} else {
// Compare the node name in case of OSDs on disk
if c.replaceOSD.Node == osdProps.crushHostname {
envVars = append(envVars, replaceOSDIDEnvVar(fmt.Sprint(c.replaceOSD.ID)))
}
}
}
// run privileged always since we always mount /dev
privileged := true
runAsUser := int64(0)
runAsNonRoot := false
readOnlyRootFilesystem := false
osdProvisionContainer := v1.Container{
Command: []string{path.Join(rookBinariesMountPath, "rook")},
Args: []string{"ceph", "osd", "provision"},
Name: "provision",
Image: c.spec.CephVersion.Image,
ImagePullPolicy: controller.GetContainerImagePullPolicy(c.spec.CephVersion.ImagePullPolicy),
VolumeMounts: volumeMounts,
Env: envVars,
EnvFrom: getEnvFromSources(),
SecurityContext: &v1.SecurityContext{
Privileged: &privileged,
RunAsUser: &runAsUser,
RunAsNonRoot: &runAsNonRoot,
ReadOnlyRootFilesystem: &readOnlyRootFilesystem,
},
Resources: cephv1.GetPrepareOSDResources(c.spec.Resources),
}
return osdProvisionContainer, nil
}
| provisionOSDContainer | identifier_name |
lib.rs | //! A library that makes linear color calculations and conversion easy and
//! accessible for anyone. It uses the type system to enforce correctness and
//! to avoid mistakes, such as mixing incompatible color types.
//!
//! # It's Never "Just RGB"
//!
//! Colors in, for example, images, are often "gamma corrected", or converted
//! using some non-linear transfer function into a format like sRGB before being
//! stored or displayed. This is done as a compression method and to prevent banding,
//! and is also a bit of a legacy from the ages of the CRT monitors, where the
//! output from the electron gun was nonlinear. The problem is that these formats
//! are *non-linear color spaces*, which means that many operations that you may want
//! to perform on colors (addition, subtraction, multiplication, linear interpolation,
//! etc.) will work unexpectedly when performed in such a non-linear color space. As
//! such, the compression has to be reverted to restore linearity and make sure that
//! many operations on the colors are accurate.
//!
//! For example, this does not work:
//!
//! ```rust
//! // An alias for Rgb<Srgb>, which is what most pictures store.
//! use palette::Srgb;
//!
//! let orangeish = Srgb::new(1.0, 0.6, 0.0);
//! let blueish = Srgb::new(0.0, 0.2, 1.0);
//! // let whateve_it_becomes = orangeish + blueish;
//! ```
//!
//! Instead, they have to be made linear before adding:
//!
//! ```rust
//! // An alias for Rgb<Srgb>, which is what most pictures store.
//! use palette::{Pixel, Srgb};
//!
//! let orangeish = Srgb::new(1.0, 0.6, 0.0).into_linear();
//! let blueish = Srgb::new(0.0, 0.2, 1.0).into_linear();
//! let whateve_it_becomes = orangeish + blueish;
//!
//! // Encode the result back into sRGB and create a byte array
//! let pixel: [u8; 3] = Srgb::from_linear(whateve_it_becomes)
//! .into_format()
//! .into_raw();
//! ```
//!
//! But, even when colors *are* 'linear', there is yet more to explore.
//!
//! The most common way that colors are defined, especially for computer
//! storage, is in terms of so-called *tristimulus values*, meaning that
//! all colors are defined as a vector of three values which may represent
//! any color. The reason colors can generally be stored as only a three
//! dimensional vector, and not an *n* dimensional one, where *n* is some
//! number of possible frequencies of light, is because our eyes contain
//! only three types of cones. Each of these cones have different sensitivity
//! curves to different wavelengths of light, giving us three "dimensions"
//! of sensitivity to color. These cones are often called the S, M, and L
//! (for small, medium, and large) cones, and their sensitivity curves
//! *roughly* position them as most sensitive to "red", "green", and "blue"
//! parts of the spectrum. As such, we can choose only three values to
//! represent any possible color that a human is able to see. An interesting
//! consequence of this is that humans can see two different objects which
//! are emitting *completely different actual light spectra* as the *exact
//! same perceptual color* so long as those wavelengths, when transformed
//! by the sensitivity curves of our cones, end up resulting in the same
//! S, M, and L values sent to our brains.
//!
//! A **color space** (which simply refers to a set of standards by which
//! we map a set of arbitrary values to real-world colors) which uses
//! tristimulus values is often defined in terms of
//!
//! 1. Its **primaries**
//! 2. Its **reference white** or **white point**
//!
//! The **primaries** together represent the total *gamut* (i.e. displayable
//! range of colors) of that color space, while the **white point** defines
//! which concrete tristimulus value corresponds to a real, physical white
//! reflecting object being lit by a known light source and observed by the
//! 'standard observer' (i.e. a standardized model of human color perception).
//!
//! The informal "RGB" color space is such a tristimulus color space, since
//! it is defined by three values, but it is underspecified since we don't
//! know which primaries are being used (i.e. how exactly are the canonical
//! "red", "green", and "blue" defined?), nor its white point. In most cases,
//! when people talk about "RGB" or "Linear RGB" colors, what they are
//! *actually* talking about is the "Linear sRGB" color space, which uses the
//! primaries and white point defined in the sRGB standard, but which *does
//! not* have the (non-linear) sRGB *transfer function* applied.
//!
//! This library takes these things into account, and attempts to provide an
//! interface which will let those who don't care so much about the intricacies
//! of color still use colors correctly, while also allowing the advanced user
//! a high degree of flexibility in how they use it.
//!
//! # Transparency
//!
//! There are many cases where pixel transparency is important, but there are
//! also many cases where it becomes a dead weight, if it's always stored
//! together with the color, but not used. Palette has therefore adopted a
//! structure where the transparency component (alpha) is attachable using the
//! [`Alpha`](crate::Alpha) type, instead of having copies of each color
//! space.
//!
//! This approach comes with the extra benefit of allowing operations to
//! selectively affect the alpha component:
//!
//! ```rust
//! use palette::{LinSrgb, LinSrgba};
//!
//! let mut c1 = LinSrgba::new(1.0, 0.5, 0.5, 0.8);
//! let c2 = LinSrgb::new(0.5, 1.0, 1.0);
//!
//! c1.color = c1.color * c2; //Leave the alpha as it is
//! c1.blue += 0.2; //The color components can easily be accessed
//! c1 = c1 * 0.5; //Scale both the color and the alpha
//! ```
//!
//! # A Basic Workflow
//!
//! The overall workflow can be divided into three steps, where the first and
//! last may be taken care of by other parts of the application:
//!
//! ```text
//! Decoding -> Processing -> Encoding
//! ```
//!
//! ## 1. Decoding
//!
//! Find out what the source format is and convert it to a linear color space.
//! There may be a specification, such as when working with SVG or CSS.
//!
//! When working with RGB or gray scale (luma):
//!
//! * If you are asking your user to enter an RGB value, you are in a gray zone
//! where it depends on the context. It's usually safe to assume sRGB, but
//! sometimes it's already linear.
//!
//! * If you are decoding an image, there may be some meta data that gives you
//! the necessary details. Otherwise it's most commonly sRGB. Usually you
//! will end up with a slice or vector with RGB bytes, which can easily be
//! converted to Palette colors:
//!
//! ```rust
//! # let mut image_buffer: Vec<u8> = vec![];
//! use palette::{Srgb, Pixel};
//!
//! // This works for any (even non-RGB) color type that can have the
//! // buffer element type as component.
//! let color_buffer: &mut [Srgb<u8>] = Pixel::from_raw_slice_mut(&mut image_buffer);
//! ```
//!
//! * If you are getting your colors from the GPU, in a game or other graphical
//! application, or if they are otherwise generated by the application, then
//! chances are that they are already linear. Still, make sure to check that
//! they are not being encoded somewhere.
//!
//! When working with other colors:
//!
//! * For HSL, HSV, HWB: Check if they are based on any other color space than
//! sRGB, such as Adobe or Apple RGB.
//!
//! * For any of the CIE color spaces, check for a specification of white point
//! and light source. These are necessary for converting to RGB and other
//! colors, that depend on perception and "viewing devices". Common defaults
//! are the D65 light source and the sRGB white point. The Palette defaults
//! should take you far.
//!
//! ## 2. Processing
//!
//! When your color has been decoded into some Palette type, it's ready for
//! processing. This includes things like blending, hue shifting, darkening and
//! conversion to other formats. Just make sure that your non-linear RGB is
//! made linear first (`my_srgb.into_linear()`), to make the operations
//! available.
//!
//! Different color spaced have different capabilities, pros and cons. You may
//! have to experiment a bit (or look at the example programs) to find out what
//! gives the desired result.
//!
//! ## 3. Encoding
//!
//! When the desired processing is done, it's time to encode the colors back
//! into some image format. The same rules applies as for the decoding, but the
//! process reversed.
//!
//! # Working with Raw Data
//!
//! Oftentimes, pixel data is stored in a raw buffer such as a `[u8; 3]`. The
//! [`Pixel`](crate::encoding::pixel::Pixel) trait allows for easy interoperation between
//! Palette colors and other crates or systems. `from_raw` can be used to
//! convert into a Palette color, `into_format` converts from `Srgb<u8>` to
//! `Srgb<f32>`, and finally `into_raw` to convert from a Palette color back to
//! a `[u8;3]`.
//!
//! ```rust
//! use approx::assert_relative_eq;
//! use palette::{Srgb, Pixel};
//!
//! let buffer = [255, 0, 255];
//! let raw = Srgb::from_raw(&buffer);
//! assert_eq!(raw, &Srgb::<u8>::new(255u8, 0, 255));
//!
//! let raw_float: Srgb<f32> = raw.into_format();
//! assert_relative_eq!(raw_float, Srgb::new(1.0, 0.0, 1.0));
//!
//! let raw: [u8; 3] = Srgb::into_raw(raw_float.into_format());
//! assert_eq!(raw, buffer);
//! ```
// Keep the standard library when running tests, too
#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
#![doc(html_root_url = "https://docs.rs/palette/0.5.0/palette/")]
#![warn(missing_docs)]
#[cfg(any(feature = "std", test))]
extern crate core;
#[cfg_attr(test, macro_use)]
extern crate approx;
#[macro_use]
extern crate palette_derive;
#[cfg(feature = "phf")]
extern crate phf;
#[cfg(feature = "serializing")]
#[macro_use]
extern crate serde;
#[cfg(all(test, feature = "serializing"))]
extern crate serde_json;
use float::Float;
use luma::Luma;
pub use alpha::{Alpha, WithAlpha};
pub use blend::Blend;
#[cfg(feature = "std")]
pub use gradient::Gradient;
pub use hsl::{Hsl, Hsla};
pub use hsv::{Hsv, Hsva};
pub use hwb::{Hwb, Hwba};
pub use lab::{Lab, Laba};
pub use lch::{Lch, Lcha};
pub use luma::{GammaLuma, GammaLumaa, LinLuma, LinLumaa, SrgbLuma, SrgbLumaa};
pub use rgb::{GammaSrgb, GammaSrgba, LinSrgb, LinSrgba, Packed, RgbChannels, Srgb, Srgba};
pub use xyz::{Xyz, Xyza};
pub use yxy::{Yxy, Yxya};
pub use color_difference::ColorDifference;
pub use component::*;
pub use convert::{FromColor, IntoColor};
pub use encoding::pixel::Pixel;
pub use hues::{LabHue, RgbHue};
pub use matrix::Mat3;
pub use relative_contrast::{contrast_ratio, RelativeContrast};
//Helper macro for checking ranges and clamping.
#[cfg(test)]
macro_rules! assert_ranges {
(@make_tuple $first:pat, $next:ident,) => (($first, $next));
(@make_tuple $first:pat, $next:ident, $($rest:ident,)*) => (
assert_ranges!(@make_tuple ($first, $next), $($rest,)*)
);
(
$ty:ident < $($ty_params:ty),+ >;
limited {$($limited:ident: $limited_from:expr => $limited_to:expr),+}
limited_min {$($limited_min:ident: $limited_min_from:expr => $limited_min_to:expr),*}
unlimited {$($unlimited:ident: $unlimited_from:expr => $unlimited_to:expr),*}
) => (
{
use core::iter::repeat;
use crate::Limited;
{
print!("checking below limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_from.into(),)+
$($limited_min: $limited_min_from.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
{
print!("checking within limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
assert!(c.is_valid());
assert_relative_eq!(clamped, c);
}
println!("ok")
}
{
print!("checking above limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_to.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
}
);
}
#[macro_use]
mod macros;
pub mod blend;
#[cfg(feature = "std")]
pub mod gradient;
#[cfg(feature = "named")]
pub mod named;
#[cfg(feature = "random")]
mod random_sampling;
mod alpha;
mod hsl;
mod hsv;
mod hwb;
mod lab;
mod lch;
pub mod luma;
pub mod rgb;
mod xyz;
mod yxy;
mod hues;
pub mod chromatic_adaptation;
mod color_difference;
mod component;
pub mod convert;
pub mod encoding;
mod equality;
mod relative_contrast;
pub mod white_point;
pub mod float;
#[doc(hidden)]
pub mod matrix;
fn clamp<T: PartialOrd>(v: T, min: T, max: T) -> T {
if v < min {
min
} else if v > max {
max
} else {
v
}
}
/// A trait for clamping and checking if colors are within their ranges.
pub trait Limited {
/// Check if the color's components are within the expected ranges.
fn is_valid(&self) -> bool;
/// Return a new color where the components has been clamped to the nearest
/// valid values.
fn clamp(&self) -> Self;
/// Clamp the color's components to the nearest valid values.
fn clamp_self(&mut self);
}
/// A trait for linear color interpolation.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Mix};
///
/// let a = LinSrgb::new(0.0, 0.5, 1.0);
/// let b = LinSrgb::new(1.0, 0.5, 0.0);
///
/// assert_relative_eq!(a.mix(&b, 0.0), a);
/// assert_relative_eq!(a.mix(&b, 0.5), LinSrgb::new(0.5, 0.5, 0.5));
/// assert_relative_eq!(a.mix(&b, 1.0), b);
/// ```
pub trait Mix {
/// The type of the mixing factor.
type Scalar: Float;
/// Mix the color with an other color, by `factor`.
///
/// `factor` sould be between `0.0` and `1.0`, where `0.0` will result in
/// the same color as `self` and `1.0` will result in the same color as
/// `other`.
fn mix(&self, other: &Self, factor: Self::Scalar) -> Self;
}
/// The `Shade` trait allows a color to be lightened or darkened.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Shade};
///
/// let a = LinSrgb::new(0.4, 0.4, 0.4);
/// let b = LinSrgb::new(0.6, 0.6, 0.6);
///
/// assert_relative_eq!(a.lighten(0.1), b.darken(0.1));
/// ```
pub trait Shade: Sized {
/// The type of the lighten/darken amount.
type Scalar: Float;
/// Lighten the color by `amount`.
fn lighten(&self, amount: Self::Scalar) -> Self;
/// Darken the color by `amount`.
fn | (&self, amount: Self::Scalar) -> Self {
self.lighten(-amount)
}
}
/// A trait for colors where a hue may be calculated.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{GetHue, LinSrgb};
///
/// let red = LinSrgb::new(1.0f32, 0.0, 0.0);
/// let green = LinSrgb::new(0.0f32, 1.0, 0.0);
/// let blue = LinSrgb::new(0.0f32, 0.0, 1.0);
/// let gray = LinSrgb::new(0.5f32, 0.5, 0.5);
///
/// assert_relative_eq!(red.get_hue().unwrap(), 0.0.into());
/// assert_relative_eq!(green.get_hue().unwrap(), 120.0.into());
/// assert_relative_eq!(blue.get_hue().unwrap(), 240.0.into());
/// assert_eq!(gray.get_hue(), None);
/// ```
pub trait GetHue {
/// The kind of hue unit this color space uses.
///
/// The hue is most commonly calculated as an angle around a color circle
/// and may not always be uniform between color spaces. It's therefore not
/// recommended to take one type of hue and apply it to a color space that
/// expects an other.
type Hue;
/// Calculate a hue if possible.
///
/// Colors in the gray scale has no well defined hue and should preferably
/// return `None`.
fn get_hue(&self) -> Option<Self::Hue>;
}
/// A trait for colors where the hue can be manipulated without conversion.
pub trait Hue: GetHue {
/// Return a new copy of `self`, but with a specific hue.
fn with_hue<H: Into<Self::Hue>>(&self, hue: H) -> Self;
/// Return a new copy of `self`, but with the hue shifted by `amount`.
fn shift_hue<H: Into<Self::Hue>>(&self, amount: H) -> Self;
}
/// A trait for colors where the saturation (or chroma) can be manipulated
/// without conversion.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{Hsv, Saturate};
///
/// let a = Hsv::new(0.0, 0.25, 1.0);
/// let b = Hsv::new(0.0, 1.0, 1.0);
///
/// assert_relative_eq!(a.saturate(1.0), b.desaturate(0.5));
/// ```
pub trait Saturate: Sized {
/// The type of the (de)saturation factor.
type Scalar: Float;
/// Increase the saturation by `factor`.
fn saturate(&self, factor: Self::Scalar) -> Self;
/// Decrease the saturation by `factor`.
fn desaturate(&self, factor: Self::Scalar) -> Self {
self.saturate(-factor)
}
}
/// Perform a unary or binary operation on each component of a color.
pub trait ComponentWise {
/// The scalar type for color components.
type Scalar;
/// Perform a binary operation on this and an other color.
fn component_wise<F: FnMut(Self::Scalar, Self::Scalar) -> Self::Scalar>(
&self,
other: &Self,
f: F,
) -> Self;
/// Perform a unary operation on this color.
fn component_wise_self<F: FnMut(Self::Scalar) -> Self::Scalar>(&self, f: F) -> Self;
}
/// A trait for infallible conversion from `f64`. The conversion may be lossy.
pub trait FromF64 {
/// Creates a value from an `f64` constant.
fn from_f64(c: f64) -> Self;
}
impl FromF64 for f32 {
#[inline]
fn from_f64(c: f64) -> Self {
c as f32
}
}
impl FromF64 for f64 {
#[inline]
fn from_f64(c: f64) -> Self {
c
}
}
/// A convenience function to convert a constant number to Float Type
#[inline]
fn from_f64<T: FromF64>(c: f64) -> T {
T::from_f64(c)
}
| darken | identifier_name |
lib.rs | //! A library that makes linear color calculations and conversion easy and
//! accessible for anyone. It uses the type system to enforce correctness and
//! to avoid mistakes, such as mixing incompatible color types.
//!
//! # It's Never "Just RGB"
//!
//! Colors in, for example, images, are often "gamma corrected", or converted
//! using some non-linear transfer function into a format like sRGB before being
//! stored or displayed. This is done as a compression method and to prevent banding,
//! and is also a bit of a legacy from the ages of the CRT monitors, where the
//! output from the electron gun was nonlinear. The problem is that these formats
//! are *non-linear color spaces*, which means that many operations that you may want
//! to perform on colors (addition, subtraction, multiplication, linear interpolation,
//! etc.) will work unexpectedly when performed in such a non-linear color space. As
//! such, the compression has to be reverted to restore linearity and make sure that
//! many operations on the colors are accurate.
//!
//! For example, this does not work:
//!
//! ```rust
//! // An alias for Rgb<Srgb>, which is what most pictures store.
//! use palette::Srgb;
//!
//! let orangeish = Srgb::new(1.0, 0.6, 0.0);
//! let blueish = Srgb::new(0.0, 0.2, 1.0);
//! // let whateve_it_becomes = orangeish + blueish;
//! ```
//!
//! Instead, they have to be made linear before adding:
//!
//! ```rust
//! // An alias for Rgb<Srgb>, which is what most pictures store.
//! use palette::{Pixel, Srgb};
//!
//! let orangeish = Srgb::new(1.0, 0.6, 0.0).into_linear();
//! let blueish = Srgb::new(0.0, 0.2, 1.0).into_linear();
//! let whateve_it_becomes = orangeish + blueish;
//!
//! // Encode the result back into sRGB and create a byte array
//! let pixel: [u8; 3] = Srgb::from_linear(whateve_it_becomes)
//! .into_format()
//! .into_raw();
//! ```
//!
//! But, even when colors *are* 'linear', there is yet more to explore.
//!
//! The most common way that colors are defined, especially for computer
//! storage, is in terms of so-called *tristimulus values*, meaning that
//! all colors are defined as a vector of three values which may represent
//! any color. The reason colors can generally be stored as only a three
//! dimensional vector, and not an *n* dimensional one, where *n* is some
//! number of possible frequencies of light, is because our eyes contain
//! only three types of cones. Each of these cones have different sensitivity
//! curves to different wavelengths of light, giving us three "dimensions"
//! of sensitivity to color. These cones are often called the S, M, and L
//! (for small, medium, and large) cones, and their sensitivity curves
//! *roughly* position them as most sensitive to "red", "green", and "blue"
//! parts of the spectrum. As such, we can choose only three values to
//! represent any possible color that a human is able to see. An interesting
//! consequence of this is that humans can see two different objects which
//! are emitting *completely different actual light spectra* as the *exact
//! same perceptual color* so long as those wavelengths, when transformed
//! by the sensitivity curves of our cones, end up resulting in the same
//! S, M, and L values sent to our brains.
//!
//! A **color space** (which simply refers to a set of standards by which
//! we map a set of arbitrary values to real-world colors) which uses
//! tristimulus values is often defined in terms of
//!
//! 1. Its **primaries**
//! 2. Its **reference white** or **white point**
//!
//! The **primaries** together represent the total *gamut* (i.e. displayable
//! range of colors) of that color space, while the **white point** defines
//! which concrete tristimulus value corresponds to a real, physical white
//! reflecting object being lit by a known light source and observed by the
//! 'standard observer' (i.e. a standardized model of human color perception).
//!
//! The informal "RGB" color space is such a tristimulus color space, since
//! it is defined by three values, but it is underspecified since we don't
//! know which primaries are being used (i.e. how exactly are the canonical
//! "red", "green", and "blue" defined?), nor its white point. In most cases,
//! when people talk about "RGB" or "Linear RGB" colors, what they are
//! *actually* talking about is the "Linear sRGB" color space, which uses the
//! primaries and white point defined in the sRGB standard, but which *does
//! not* have the (non-linear) sRGB *transfer function* applied.
//!
//! This library takes these things into account, and attempts to provide an
//! interface which will let those who don't care so much about the intricacies
//! of color still use colors correctly, while also allowing the advanced user
//! a high degree of flexibility in how they use it.
//!
//! # Transparency
//!
//! There are many cases where pixel transparency is important, but there are
//! also many cases where it becomes a dead weight, if it's always stored
//! together with the color, but not used. Palette has therefore adopted a
//! structure where the transparency component (alpha) is attachable using the
//! [`Alpha`](crate::Alpha) type, instead of having copies of each color
//! space.
//!
//! This approach comes with the extra benefit of allowing operations to
//! selectively affect the alpha component:
//!
//! ```rust
//! use palette::{LinSrgb, LinSrgba};
//!
//! let mut c1 = LinSrgba::new(1.0, 0.5, 0.5, 0.8);
//! let c2 = LinSrgb::new(0.5, 1.0, 1.0);
//!
//! c1.color = c1.color * c2; //Leave the alpha as it is
//! c1.blue += 0.2; //The color components can easily be accessed
//! c1 = c1 * 0.5; //Scale both the color and the alpha
//! ```
//!
//! # A Basic Workflow
//!
//! The overall workflow can be divided into three steps, where the first and
//! last may be taken care of by other parts of the application:
//!
//! ```text
//! Decoding -> Processing -> Encoding
//! ```
//!
//! ## 1. Decoding
//!
//! Find out what the source format is and convert it to a linear color space.
//! There may be a specification, such as when working with SVG or CSS.
//!
//! When working with RGB or gray scale (luma):
//!
//! * If you are asking your user to enter an RGB value, you are in a gray zone
//! where it depends on the context. It's usually safe to assume sRGB, but
//! sometimes it's already linear.
//!
//! * If you are decoding an image, there may be some meta data that gives you
//! the necessary details. Otherwise it's most commonly sRGB. Usually you
//! will end up with a slice or vector with RGB bytes, which can easily be
//! converted to Palette colors:
//!
//! ```rust
//! # let mut image_buffer: Vec<u8> = vec![];
//! use palette::{Srgb, Pixel};
//!
//! // This works for any (even non-RGB) color type that can have the
//! // buffer element type as component.
//! let color_buffer: &mut [Srgb<u8>] = Pixel::from_raw_slice_mut(&mut image_buffer);
//! ```
//!
//! * If you are getting your colors from the GPU, in a game or other graphical
//! application, or if they are otherwise generated by the application, then
//! chances are that they are already linear. Still, make sure to check that
//! they are not being encoded somewhere.
//!
//! When working with other colors:
//!
//! * For HSL, HSV, HWB: Check if they are based on any other color space than
//! sRGB, such as Adobe or Apple RGB.
//!
//! * For any of the CIE color spaces, check for a specification of white point
//! and light source. These are necessary for converting to RGB and other
//! colors, that depend on perception and "viewing devices". Common defaults
//! are the D65 light source and the sRGB white point. The Palette defaults
//! should take you far.
//!
//! ## 2. Processing
//!
//! When your color has been decoded into some Palette type, it's ready for
//! processing. This includes things like blending, hue shifting, darkening and
//! conversion to other formats. Just make sure that your non-linear RGB is
//! made linear first (`my_srgb.into_linear()`), to make the operations
//! available.
//!
//! Different color spaced have different capabilities, pros and cons. You may
//! have to experiment a bit (or look at the example programs) to find out what
//! gives the desired result.
//!
//! ## 3. Encoding
//!
//! When the desired processing is done, it's time to encode the colors back
//! into some image format. The same rules applies as for the decoding, but the
//! process reversed.
//!
//! # Working with Raw Data
//!
//! Oftentimes, pixel data is stored in a raw buffer such as a `[u8; 3]`. The
//! [`Pixel`](crate::encoding::pixel::Pixel) trait allows for easy interoperation between
//! Palette colors and other crates or systems. `from_raw` can be used to
//! convert into a Palette color, `into_format` converts from `Srgb<u8>` to
//! `Srgb<f32>`, and finally `into_raw` to convert from a Palette color back to
//! a `[u8;3]`.
//!
//! ```rust
//! use approx::assert_relative_eq;
//! use palette::{Srgb, Pixel};
//!
//! let buffer = [255, 0, 255];
//! let raw = Srgb::from_raw(&buffer);
//! assert_eq!(raw, &Srgb::<u8>::new(255u8, 0, 255));
//!
//! let raw_float: Srgb<f32> = raw.into_format();
//! assert_relative_eq!(raw_float, Srgb::new(1.0, 0.0, 1.0));
//!
//! let raw: [u8; 3] = Srgb::into_raw(raw_float.into_format());
//! assert_eq!(raw, buffer);
//! ```
// Keep the standard library when running tests, too
#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
#![doc(html_root_url = "https://docs.rs/palette/0.5.0/palette/")]
#![warn(missing_docs)]
#[cfg(any(feature = "std", test))]
extern crate core;
#[cfg_attr(test, macro_use)]
extern crate approx;
#[macro_use]
extern crate palette_derive;
#[cfg(feature = "phf")]
extern crate phf;
#[cfg(feature = "serializing")]
#[macro_use]
extern crate serde;
#[cfg(all(test, feature = "serializing"))]
extern crate serde_json;
use float::Float;
use luma::Luma;
pub use alpha::{Alpha, WithAlpha};
pub use blend::Blend;
#[cfg(feature = "std")]
pub use gradient::Gradient;
pub use hsl::{Hsl, Hsla};
pub use hsv::{Hsv, Hsva};
pub use hwb::{Hwb, Hwba};
pub use lab::{Lab, Laba};
pub use lch::{Lch, Lcha};
pub use luma::{GammaLuma, GammaLumaa, LinLuma, LinLumaa, SrgbLuma, SrgbLumaa};
pub use rgb::{GammaSrgb, GammaSrgba, LinSrgb, LinSrgba, Packed, RgbChannels, Srgb, Srgba};
pub use xyz::{Xyz, Xyza};
pub use yxy::{Yxy, Yxya};
pub use color_difference::ColorDifference;
pub use component::*;
pub use convert::{FromColor, IntoColor};
pub use encoding::pixel::Pixel;
pub use hues::{LabHue, RgbHue};
pub use matrix::Mat3;
pub use relative_contrast::{contrast_ratio, RelativeContrast};
//Helper macro for checking ranges and clamping.
#[cfg(test)]
macro_rules! assert_ranges {
(@make_tuple $first:pat, $next:ident,) => (($first, $next));
(@make_tuple $first:pat, $next:ident, $($rest:ident,)*) => (
assert_ranges!(@make_tuple ($first, $next), $($rest,)*)
);
(
$ty:ident < $($ty_params:ty),+ >;
limited {$($limited:ident: $limited_from:expr => $limited_to:expr),+}
limited_min {$($limited_min:ident: $limited_min_from:expr => $limited_min_to:expr),*}
unlimited {$($unlimited:ident: $unlimited_from:expr => $unlimited_to:expr),*}
) => (
{
use core::iter::repeat;
use crate::Limited;
{
print!("checking below limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_from.into(),)+
$($limited_min: $limited_min_from.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
{
print!("checking within limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
assert!(c.is_valid());
assert_relative_eq!(clamped, c);
}
|
{
print!("checking above limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_to.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
}
);
}
#[macro_use]
mod macros;
pub mod blend;
#[cfg(feature = "std")]
pub mod gradient;
#[cfg(feature = "named")]
pub mod named;
#[cfg(feature = "random")]
mod random_sampling;
mod alpha;
mod hsl;
mod hsv;
mod hwb;
mod lab;
mod lch;
pub mod luma;
pub mod rgb;
mod xyz;
mod yxy;
mod hues;
pub mod chromatic_adaptation;
mod color_difference;
mod component;
pub mod convert;
pub mod encoding;
mod equality;
mod relative_contrast;
pub mod white_point;
pub mod float;
#[doc(hidden)]
pub mod matrix;
fn clamp<T: PartialOrd>(v: T, min: T, max: T) -> T {
if v < min {
min
} else if v > max {
max
} else {
v
}
}
/// A trait for clamping and checking if colors are within their ranges.
pub trait Limited {
/// Check if the color's components are within the expected ranges.
fn is_valid(&self) -> bool;
/// Return a new color where the components has been clamped to the nearest
/// valid values.
fn clamp(&self) -> Self;
/// Clamp the color's components to the nearest valid values.
fn clamp_self(&mut self);
}
/// A trait for linear color interpolation.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Mix};
///
/// let a = LinSrgb::new(0.0, 0.5, 1.0);
/// let b = LinSrgb::new(1.0, 0.5, 0.0);
///
/// assert_relative_eq!(a.mix(&b, 0.0), a);
/// assert_relative_eq!(a.mix(&b, 0.5), LinSrgb::new(0.5, 0.5, 0.5));
/// assert_relative_eq!(a.mix(&b, 1.0), b);
/// ```
pub trait Mix {
/// The type of the mixing factor.
type Scalar: Float;
/// Mix the color with an other color, by `factor`.
///
/// `factor` sould be between `0.0` and `1.0`, where `0.0` will result in
/// the same color as `self` and `1.0` will result in the same color as
/// `other`.
fn mix(&self, other: &Self, factor: Self::Scalar) -> Self;
}
/// The `Shade` trait allows a color to be lightened or darkened.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Shade};
///
/// let a = LinSrgb::new(0.4, 0.4, 0.4);
/// let b = LinSrgb::new(0.6, 0.6, 0.6);
///
/// assert_relative_eq!(a.lighten(0.1), b.darken(0.1));
/// ```
pub trait Shade: Sized {
/// The type of the lighten/darken amount.
type Scalar: Float;
/// Lighten the color by `amount`.
fn lighten(&self, amount: Self::Scalar) -> Self;
/// Darken the color by `amount`.
fn darken(&self, amount: Self::Scalar) -> Self {
self.lighten(-amount)
}
}
/// A trait for colors where a hue may be calculated.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{GetHue, LinSrgb};
///
/// let red = LinSrgb::new(1.0f32, 0.0, 0.0);
/// let green = LinSrgb::new(0.0f32, 1.0, 0.0);
/// let blue = LinSrgb::new(0.0f32, 0.0, 1.0);
/// let gray = LinSrgb::new(0.5f32, 0.5, 0.5);
///
/// assert_relative_eq!(red.get_hue().unwrap(), 0.0.into());
/// assert_relative_eq!(green.get_hue().unwrap(), 120.0.into());
/// assert_relative_eq!(blue.get_hue().unwrap(), 240.0.into());
/// assert_eq!(gray.get_hue(), None);
/// ```
pub trait GetHue {
/// The kind of hue unit this color space uses.
///
/// The hue is most commonly calculated as an angle around a color circle
/// and may not always be uniform between color spaces. It's therefore not
/// recommended to take one type of hue and apply it to a color space that
/// expects an other.
type Hue;
/// Calculate a hue if possible.
///
/// Colors in the gray scale has no well defined hue and should preferably
/// return `None`.
fn get_hue(&self) -> Option<Self::Hue>;
}
/// A trait for colors where the hue can be manipulated without conversion.
pub trait Hue: GetHue {
/// Return a new copy of `self`, but with a specific hue.
fn with_hue<H: Into<Self::Hue>>(&self, hue: H) -> Self;
/// Return a new copy of `self`, but with the hue shifted by `amount`.
fn shift_hue<H: Into<Self::Hue>>(&self, amount: H) -> Self;
}
/// A trait for colors where the saturation (or chroma) can be manipulated
/// without conversion.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{Hsv, Saturate};
///
/// let a = Hsv::new(0.0, 0.25, 1.0);
/// let b = Hsv::new(0.0, 1.0, 1.0);
///
/// assert_relative_eq!(a.saturate(1.0), b.desaturate(0.5));
/// ```
pub trait Saturate: Sized {
/// The type of the (de)saturation factor.
type Scalar: Float;
/// Increase the saturation by `factor`.
fn saturate(&self, factor: Self::Scalar) -> Self;
/// Decrease the saturation by `factor`.
fn desaturate(&self, factor: Self::Scalar) -> Self {
self.saturate(-factor)
}
}
/// Perform a unary or binary operation on each component of a color.
pub trait ComponentWise {
/// The scalar type for color components.
type Scalar;
/// Perform a binary operation on this and an other color.
fn component_wise<F: FnMut(Self::Scalar, Self::Scalar) -> Self::Scalar>(
&self,
other: &Self,
f: F,
) -> Self;
/// Perform a unary operation on this color.
fn component_wise_self<F: FnMut(Self::Scalar) -> Self::Scalar>(&self, f: F) -> Self;
}
/// A trait for infallible conversion from `f64`. The conversion may be lossy.
pub trait FromF64 {
/// Creates a value from an `f64` constant.
fn from_f64(c: f64) -> Self;
}
impl FromF64 for f32 {
#[inline]
fn from_f64(c: f64) -> Self {
c as f32
}
}
impl FromF64 for f64 {
#[inline]
fn from_f64(c: f64) -> Self {
c
}
}
/// A convenience function to convert a constant number to Float Type
#[inline]
fn from_f64<T: FromF64>(c: f64) -> T {
T::from_f64(c)
} | println!("ok")
} | random_line_split |
lib.rs | //! A library that makes linear color calculations and conversion easy and
//! accessible for anyone. It uses the type system to enforce correctness and
//! to avoid mistakes, such as mixing incompatible color types.
//!
//! # It's Never "Just RGB"
//!
//! Colors in, for example, images, are often "gamma corrected", or converted
//! using some non-linear transfer function into a format like sRGB before being
//! stored or displayed. This is done as a compression method and to prevent banding,
//! and is also a bit of a legacy from the ages of the CRT monitors, where the
//! output from the electron gun was nonlinear. The problem is that these formats
//! are *non-linear color spaces*, which means that many operations that you may want
//! to perform on colors (addition, subtraction, multiplication, linear interpolation,
//! etc.) will work unexpectedly when performed in such a non-linear color space. As
//! such, the compression has to be reverted to restore linearity and make sure that
//! many operations on the colors are accurate.
//!
//! For example, this does not work:
//!
//! ```rust
//! // An alias for Rgb<Srgb>, which is what most pictures store.
//! use palette::Srgb;
//!
//! let orangeish = Srgb::new(1.0, 0.6, 0.0);
//! let blueish = Srgb::new(0.0, 0.2, 1.0);
//! // let whateve_it_becomes = orangeish + blueish;
//! ```
//!
//! Instead, they have to be made linear before adding:
//!
//! ```rust
//! // An alias for Rgb<Srgb>, which is what most pictures store.
//! use palette::{Pixel, Srgb};
//!
//! let orangeish = Srgb::new(1.0, 0.6, 0.0).into_linear();
//! let blueish = Srgb::new(0.0, 0.2, 1.0).into_linear();
//! let whateve_it_becomes = orangeish + blueish;
//!
//! // Encode the result back into sRGB and create a byte array
//! let pixel: [u8; 3] = Srgb::from_linear(whateve_it_becomes)
//! .into_format()
//! .into_raw();
//! ```
//!
//! But, even when colors *are* 'linear', there is yet more to explore.
//!
//! The most common way that colors are defined, especially for computer
//! storage, is in terms of so-called *tristimulus values*, meaning that
//! all colors are defined as a vector of three values which may represent
//! any color. The reason colors can generally be stored as only a three
//! dimensional vector, and not an *n* dimensional one, where *n* is some
//! number of possible frequencies of light, is because our eyes contain
//! only three types of cones. Each of these cones have different sensitivity
//! curves to different wavelengths of light, giving us three "dimensions"
//! of sensitivity to color. These cones are often called the S, M, and L
//! (for small, medium, and large) cones, and their sensitivity curves
//! *roughly* position them as most sensitive to "red", "green", and "blue"
//! parts of the spectrum. As such, we can choose only three values to
//! represent any possible color that a human is able to see. An interesting
//! consequence of this is that humans can see two different objects which
//! are emitting *completely different actual light spectra* as the *exact
//! same perceptual color* so long as those wavelengths, when transformed
//! by the sensitivity curves of our cones, end up resulting in the same
//! S, M, and L values sent to our brains.
//!
//! A **color space** (which simply refers to a set of standards by which
//! we map a set of arbitrary values to real-world colors) which uses
//! tristimulus values is often defined in terms of
//!
//! 1. Its **primaries**
//! 2. Its **reference white** or **white point**
//!
//! The **primaries** together represent the total *gamut* (i.e. displayable
//! range of colors) of that color space, while the **white point** defines
//! which concrete tristimulus value corresponds to a real, physical white
//! reflecting object being lit by a known light source and observed by the
//! 'standard observer' (i.e. a standardized model of human color perception).
//!
//! The informal "RGB" color space is such a tristimulus color space, since
//! it is defined by three values, but it is underspecified since we don't
//! know which primaries are being used (i.e. how exactly are the canonical
//! "red", "green", and "blue" defined?), nor its white point. In most cases,
//! when people talk about "RGB" or "Linear RGB" colors, what they are
//! *actually* talking about is the "Linear sRGB" color space, which uses the
//! primaries and white point defined in the sRGB standard, but which *does
//! not* have the (non-linear) sRGB *transfer function* applied.
//!
//! This library takes these things into account, and attempts to provide an
//! interface which will let those who don't care so much about the intricacies
//! of color still use colors correctly, while also allowing the advanced user
//! a high degree of flexibility in how they use it.
//!
//! # Transparency
//!
//! There are many cases where pixel transparency is important, but there are
//! also many cases where it becomes a dead weight, if it's always stored
//! together with the color, but not used. Palette has therefore adopted a
//! structure where the transparency component (alpha) is attachable using the
//! [`Alpha`](crate::Alpha) type, instead of having copies of each color
//! space.
//!
//! This approach comes with the extra benefit of allowing operations to
//! selectively affect the alpha component:
//!
//! ```rust
//! use palette::{LinSrgb, LinSrgba};
//!
//! let mut c1 = LinSrgba::new(1.0, 0.5, 0.5, 0.8);
//! let c2 = LinSrgb::new(0.5, 1.0, 1.0);
//!
//! c1.color = c1.color * c2; //Leave the alpha as it is
//! c1.blue += 0.2; //The color components can easily be accessed
//! c1 = c1 * 0.5; //Scale both the color and the alpha
//! ```
//!
//! # A Basic Workflow
//!
//! The overall workflow can be divided into three steps, where the first and
//! last may be taken care of by other parts of the application:
//!
//! ```text
//! Decoding -> Processing -> Encoding
//! ```
//!
//! ## 1. Decoding
//!
//! Find out what the source format is and convert it to a linear color space.
//! There may be a specification, such as when working with SVG or CSS.
//!
//! When working with RGB or gray scale (luma):
//!
//! * If you are asking your user to enter an RGB value, you are in a gray zone
//! where it depends on the context. It's usually safe to assume sRGB, but
//! sometimes it's already linear.
//!
//! * If you are decoding an image, there may be some meta data that gives you
//! the necessary details. Otherwise it's most commonly sRGB. Usually you
//! will end up with a slice or vector with RGB bytes, which can easily be
//! converted to Palette colors:
//!
//! ```rust
//! # let mut image_buffer: Vec<u8> = vec![];
//! use palette::{Srgb, Pixel};
//!
//! // This works for any (even non-RGB) color type that can have the
//! // buffer element type as component.
//! let color_buffer: &mut [Srgb<u8>] = Pixel::from_raw_slice_mut(&mut image_buffer);
//! ```
//!
//! * If you are getting your colors from the GPU, in a game or other graphical
//! application, or if they are otherwise generated by the application, then
//! chances are that they are already linear. Still, make sure to check that
//! they are not being encoded somewhere.
//!
//! When working with other colors:
//!
//! * For HSL, HSV, HWB: Check if they are based on any other color space than
//! sRGB, such as Adobe or Apple RGB.
//!
//! * For any of the CIE color spaces, check for a specification of white point
//! and light source. These are necessary for converting to RGB and other
//! colors, that depend on perception and "viewing devices". Common defaults
//! are the D65 light source and the sRGB white point. The Palette defaults
//! should take you far.
//!
//! ## 2. Processing
//!
//! When your color has been decoded into some Palette type, it's ready for
//! processing. This includes things like blending, hue shifting, darkening and
//! conversion to other formats. Just make sure that your non-linear RGB is
//! made linear first (`my_srgb.into_linear()`), to make the operations
//! available.
//!
//! Different color spaced have different capabilities, pros and cons. You may
//! have to experiment a bit (or look at the example programs) to find out what
//! gives the desired result.
//!
//! ## 3. Encoding
//!
//! When the desired processing is done, it's time to encode the colors back
//! into some image format. The same rules applies as for the decoding, but the
//! process reversed.
//!
//! # Working with Raw Data
//!
//! Oftentimes, pixel data is stored in a raw buffer such as a `[u8; 3]`. The
//! [`Pixel`](crate::encoding::pixel::Pixel) trait allows for easy interoperation between
//! Palette colors and other crates or systems. `from_raw` can be used to
//! convert into a Palette color, `into_format` converts from `Srgb<u8>` to
//! `Srgb<f32>`, and finally `into_raw` to convert from a Palette color back to
//! a `[u8;3]`.
//!
//! ```rust
//! use approx::assert_relative_eq;
//! use palette::{Srgb, Pixel};
//!
//! let buffer = [255, 0, 255];
//! let raw = Srgb::from_raw(&buffer);
//! assert_eq!(raw, &Srgb::<u8>::new(255u8, 0, 255));
//!
//! let raw_float: Srgb<f32> = raw.into_format();
//! assert_relative_eq!(raw_float, Srgb::new(1.0, 0.0, 1.0));
//!
//! let raw: [u8; 3] = Srgb::into_raw(raw_float.into_format());
//! assert_eq!(raw, buffer);
//! ```
// Keep the standard library when running tests, too
#![cfg_attr(all(not(feature = "std"), not(test)), no_std)]
#![doc(html_root_url = "https://docs.rs/palette/0.5.0/palette/")]
#![warn(missing_docs)]
#[cfg(any(feature = "std", test))]
extern crate core;
#[cfg_attr(test, macro_use)]
extern crate approx;
#[macro_use]
extern crate palette_derive;
#[cfg(feature = "phf")]
extern crate phf;
#[cfg(feature = "serializing")]
#[macro_use]
extern crate serde;
#[cfg(all(test, feature = "serializing"))]
extern crate serde_json;
use float::Float;
use luma::Luma;
pub use alpha::{Alpha, WithAlpha};
pub use blend::Blend;
#[cfg(feature = "std")]
pub use gradient::Gradient;
pub use hsl::{Hsl, Hsla};
pub use hsv::{Hsv, Hsva};
pub use hwb::{Hwb, Hwba};
pub use lab::{Lab, Laba};
pub use lch::{Lch, Lcha};
pub use luma::{GammaLuma, GammaLumaa, LinLuma, LinLumaa, SrgbLuma, SrgbLumaa};
pub use rgb::{GammaSrgb, GammaSrgba, LinSrgb, LinSrgba, Packed, RgbChannels, Srgb, Srgba};
pub use xyz::{Xyz, Xyza};
pub use yxy::{Yxy, Yxya};
pub use color_difference::ColorDifference;
pub use component::*;
pub use convert::{FromColor, IntoColor};
pub use encoding::pixel::Pixel;
pub use hues::{LabHue, RgbHue};
pub use matrix::Mat3;
pub use relative_contrast::{contrast_ratio, RelativeContrast};
//Helper macro for checking ranges and clamping.
#[cfg(test)]
macro_rules! assert_ranges {
(@make_tuple $first:pat, $next:ident,) => (($first, $next));
(@make_tuple $first:pat, $next:ident, $($rest:ident,)*) => (
assert_ranges!(@make_tuple ($first, $next), $($rest,)*)
);
(
$ty:ident < $($ty_params:ty),+ >;
limited {$($limited:ident: $limited_from:expr => $limited_to:expr),+}
limited_min {$($limited_min:ident: $limited_min_from:expr => $limited_min_to:expr),*}
unlimited {$($unlimited:ident: $unlimited_from:expr => $unlimited_to:expr),*}
) => (
{
use core::iter::repeat;
use crate::Limited;
{
print!("checking below limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| from - (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_from.into(),)+
$($limited_min: $limited_min_from.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
{
print!("checking within limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (0..11).map(|i| from + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
assert!(c.is_valid());
assert_relative_eq!(clamped, c);
}
println!("ok")
}
{
print!("checking above limits ... ");
$(
let from = $limited_from;
let to = $limited_to;
let diff = to - from;
let $limited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)+
$(
let from = $limited_min_from;
let to = $limited_min_to;
let diff = to - from;
let $limited_min = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
$(
let from = $unlimited_from;
let to = $unlimited_to;
let diff = to - from;
let $unlimited = (1..11).map(|i| to + (i as f64 / 10.0) * diff);
)*
for assert_ranges!(@make_tuple (), $($limited,)+ $($limited_min,)* $($unlimited,)* ) in repeat(()) $(.zip($limited))+ $(.zip($limited_min))* $(.zip($unlimited))* {
let c: $ty<$($ty_params),+> = $ty {
$($limited: $limited.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
let clamped = c.clamp();
let expected: $ty<$($ty_params),+> = $ty {
$($limited: $limited_to.into(),)+
$($limited_min: $limited_min.into(),)*
$($unlimited: $unlimited.into(),)*
..$ty::default() //This prevents exhaustiveness checking
};
assert!(!c.is_valid());
assert_relative_eq!(clamped, expected);
}
println!("ok")
}
}
);
}
#[macro_use]
mod macros;
pub mod blend;
#[cfg(feature = "std")]
pub mod gradient;
#[cfg(feature = "named")]
pub mod named;
#[cfg(feature = "random")]
mod random_sampling;
mod alpha;
mod hsl;
mod hsv;
mod hwb;
mod lab;
mod lch;
pub mod luma;
pub mod rgb;
mod xyz;
mod yxy;
mod hues;
pub mod chromatic_adaptation;
mod color_difference;
mod component;
pub mod convert;
pub mod encoding;
mod equality;
mod relative_contrast;
pub mod white_point;
pub mod float;
#[doc(hidden)]
pub mod matrix;
fn clamp<T: PartialOrd>(v: T, min: T, max: T) -> T {
if v < min {
min
} else if v > max {
max
} else {
v
}
}
/// A trait for clamping and checking if colors are within their ranges.
pub trait Limited {
/// Check if the color's components are within the expected ranges.
fn is_valid(&self) -> bool;
/// Return a new color where the components has been clamped to the nearest
/// valid values.
fn clamp(&self) -> Self;
/// Clamp the color's components to the nearest valid values.
fn clamp_self(&mut self);
}
/// A trait for linear color interpolation.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Mix};
///
/// let a = LinSrgb::new(0.0, 0.5, 1.0);
/// let b = LinSrgb::new(1.0, 0.5, 0.0);
///
/// assert_relative_eq!(a.mix(&b, 0.0), a);
/// assert_relative_eq!(a.mix(&b, 0.5), LinSrgb::new(0.5, 0.5, 0.5));
/// assert_relative_eq!(a.mix(&b, 1.0), b);
/// ```
pub trait Mix {
/// The type of the mixing factor.
type Scalar: Float;
/// Mix the color with an other color, by `factor`.
///
/// `factor` sould be between `0.0` and `1.0`, where `0.0` will result in
/// the same color as `self` and `1.0` will result in the same color as
/// `other`.
fn mix(&self, other: &Self, factor: Self::Scalar) -> Self;
}
/// The `Shade` trait allows a color to be lightened or darkened.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{LinSrgb, Shade};
///
/// let a = LinSrgb::new(0.4, 0.4, 0.4);
/// let b = LinSrgb::new(0.6, 0.6, 0.6);
///
/// assert_relative_eq!(a.lighten(0.1), b.darken(0.1));
/// ```
pub trait Shade: Sized {
/// The type of the lighten/darken amount.
type Scalar: Float;
/// Lighten the color by `amount`.
fn lighten(&self, amount: Self::Scalar) -> Self;
/// Darken the color by `amount`.
fn darken(&self, amount: Self::Scalar) -> Self {
self.lighten(-amount)
}
}
/// A trait for colors where a hue may be calculated.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{GetHue, LinSrgb};
///
/// let red = LinSrgb::new(1.0f32, 0.0, 0.0);
/// let green = LinSrgb::new(0.0f32, 1.0, 0.0);
/// let blue = LinSrgb::new(0.0f32, 0.0, 1.0);
/// let gray = LinSrgb::new(0.5f32, 0.5, 0.5);
///
/// assert_relative_eq!(red.get_hue().unwrap(), 0.0.into());
/// assert_relative_eq!(green.get_hue().unwrap(), 120.0.into());
/// assert_relative_eq!(blue.get_hue().unwrap(), 240.0.into());
/// assert_eq!(gray.get_hue(), None);
/// ```
pub trait GetHue {
/// The kind of hue unit this color space uses.
///
/// The hue is most commonly calculated as an angle around a color circle
/// and may not always be uniform between color spaces. It's therefore not
/// recommended to take one type of hue and apply it to a color space that
/// expects an other.
type Hue;
/// Calculate a hue if possible.
///
/// Colors in the gray scale has no well defined hue and should preferably
/// return `None`.
fn get_hue(&self) -> Option<Self::Hue>;
}
/// A trait for colors where the hue can be manipulated without conversion.
pub trait Hue: GetHue {
/// Return a new copy of `self`, but with a specific hue.
fn with_hue<H: Into<Self::Hue>>(&self, hue: H) -> Self;
/// Return a new copy of `self`, but with the hue shifted by `amount`.
fn shift_hue<H: Into<Self::Hue>>(&self, amount: H) -> Self;
}
/// A trait for colors where the saturation (or chroma) can be manipulated
/// without conversion.
///
/// ```
/// use approx::assert_relative_eq;
///
/// use palette::{Hsv, Saturate};
///
/// let a = Hsv::new(0.0, 0.25, 1.0);
/// let b = Hsv::new(0.0, 1.0, 1.0);
///
/// assert_relative_eq!(a.saturate(1.0), b.desaturate(0.5));
/// ```
pub trait Saturate: Sized {
/// The type of the (de)saturation factor.
type Scalar: Float;
/// Increase the saturation by `factor`.
fn saturate(&self, factor: Self::Scalar) -> Self;
/// Decrease the saturation by `factor`.
fn desaturate(&self, factor: Self::Scalar) -> Self {
self.saturate(-factor)
}
}
/// Perform a unary or binary operation on each component of a color.
pub trait ComponentWise {
/// The scalar type for color components.
type Scalar;
/// Perform a binary operation on this and an other color.
fn component_wise<F: FnMut(Self::Scalar, Self::Scalar) -> Self::Scalar>(
&self,
other: &Self,
f: F,
) -> Self;
/// Perform a unary operation on this color.
fn component_wise_self<F: FnMut(Self::Scalar) -> Self::Scalar>(&self, f: F) -> Self;
}
/// A trait for infallible conversion from `f64`. The conversion may be lossy.
pub trait FromF64 {
/// Creates a value from an `f64` constant.
fn from_f64(c: f64) -> Self;
}
impl FromF64 for f32 {
#[inline]
fn from_f64(c: f64) -> Self {
c as f32
}
}
impl FromF64 for f64 {
#[inline]
fn from_f64(c: f64) -> Self {
c
}
}
/// A convenience function to convert a constant number to Float Type
#[inline]
fn from_f64<T: FromF64>(c: f64) -> T | {
T::from_f64(c)
} | identifier_body | |
chord.js | /*global Raphael, d3, $, define */
/*!
* Chord的兼容性定义
*/;
(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) {
return this[id];
});
}
})('Chord', function (require) {
var DataV = require('DataV');
/**
* 构造函数
* @param {Object} node 表示在html的哪个容器中绘制该组件
* @param {Object} options 为用户自定义的组件的属性,比如画布大小
*/
var Chord = DataV.extend(DataV.Chart, {
initialize: function (node, options) {
this.type = "Chord";
this.node = this.checkContainer(node);
this.matrix = [];
this.groupNames = []; //数组:记录每个group的名字
//图的大小设置
this.defaults.legend = true;
this.defaults.width = 800;
this.defaults.height = 800;
this.dimension.from = {
type: "string",
required: true,
index: 0,
};
this.dimension.to = {
type: "string",
required: true,
index: 1,
};
/**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
//设置用户指定的属性
this.setOptions(options);
this.legendArea = [20, (this.defaults.height - 20 - 220), 200, 220];
if (this.defaults.legend) {
this.xOffset = this.legendArea[2];
} else {
this.xOffset = 0;
}
this.defaults.innerRadius = Math.min((this.defaults.width - this.xOffset), this.defaults.height) * 0.38;
this.defaults.outerRadius = this.defaults.innerRadius * 1.10;
//创建画布
this.createCanvas();
}
});
/**
* 创建画布
*/
Chord.prototype.createCanvas = function () {
this.canvas = new Raphael(this.node, this.defaults.width, this.defaults.height);
canvasStyle = this.node.style;
canvasStyle.position = "relative";
this.floatTag = DataV.FloatTag()(this.node);
this.floatTag.css({
"visibility": "hidden"
});
};
/**
* 获取颜色
* @param {Number} i 元素类别编号
* @return {String} 返回颜色值
*/
Chord.prototype.getColor = function (i) {
var color = DataV.getColor();
return color[i % color.length][0];
};
/**
* 绘制弦图
*/
Chord.prototype.render = function () {
this.layout();
if (this.defaults.legend) {
this.drawLegend();
}
};
/**
* 绘制图例
*/
Chord.prototype.drawLegend = function () {
var that = this;
var paper = this.canvas;
var legendArea = this.legendArea;
var rectBn = paper.set();
this.underBn = [];
var underBn = this.underBn;
for (i = 0; i <= this.groupNum; i++) {
//底框
underBn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "#ebebeb",
"stroke": "none"
//"r": 3
}).hide());
//色框
paper.rect(legendArea[0] + 10 + 3, legendArea[1] + 10 + (20 + 3) * i + 6, 16, 8).attr({
"fill": this.getColor(i),
"stroke": "none"
});
//文字
paper.text(legendArea[0] + 10 + 3 + 16 + 8, legendArea[1] + 10 + (20 + 3) * i + 10, this.groupNames[i]).attr({
"fill": "black",
"fill-opacity": 1,
"font-family": "Verdana",
"font-size": 12
}).attr({
"text-anchor": "start"
});
//选框
rectBn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "white",
"fill-opacity": 0,
"stroke": "none"
//"r": 3
})).data("clicked", 0);
}
rectBn.forEach(function (d, i) {
d.mouseover(function () {
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 0.5);
underBn[i].show();
}
}).mouseout(function () {
if (d.data("clicked") === 0) {
underBn[i].hide();
}
});
d.click(function () {
for (j = 0; j < underBn.length; j++) {
if (j === i) {
underBn[j].show();
} else {
underBn[j].hide();
}
}
rectBn.forEach(function (eachBn) {
if (eachBn !== d) {
eachBn.data("clicked", 0);
}
});
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 1);
underBn[i].show();
that.chordGroups.forEach(function (d) {
if (d.data('source') !== i && d.data('target') !== i) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
d.data("clicked", 1);
} else if (d.data("clicked") === 1) {
underBn[i].hide();
d.data("clicked", 0);
that.chordGroups.forEach(function (d) {
d.attr({
'fill-opacity': 0.6
});
});
}
});
});
};
/**
*对原始数据进行处理
* @param {Array} table 将要被绘制成饼图的二维表数据
*/
Chord.prototype.setSource = function (table, map) {
map = this.map(map);
var that = this;
var index = {};
var groupNum = 0;
table = table.slice(1); // 从第一行开始,第0行舍去
table.forEach(function (d) {
var from = d[map.from];
if (typeof index[from] === 'undefined') {
index[from] = groupNum++;
that.groupNames.push(from);
}
});
table.forEach(function (d) {
var to = d[map.to];
if (typeof index[to] === 'undefined') {
index[to] = groupNum++;
that.groupNames.push(to);
}
});
this.groupNum = groupNum;
for (var i = 0; i < groupNum; i++) {
this.matrix[i] = [];
for (var j = 0; j < groupNum; j++) {
this.matrix[i][j] = 0;
};
};
table.forEach(function (d) {
that.matrix[index[d[map.from]]][index[d[map.to]]] += Number(d[map.value]);
});
};
/**
*创建chord布局
*/
Chord.prototype.layout = function () {
var floatTag = this.floatTag;
var that = this;
that.canvas.clear();
/*var see = [
[11975, 5871, 8916, 2868],
[1951, 10048, 2060, 6171],
[8010, 16145, 8090, 8045],
[1013, 990, 940, 6907]
];*/
var chordLayout = d3.layout.chord().padding(0.05) //chord segments之间的padding间隔
.sortSubgroups(d3.descending) //chord segments细分后的排序规则
.matrix(that.matrix);
/*var fillColor = d3.scale.ordinal()
.domain(d3.range(4))
.range(["#000000", "#FFDD89", "#957244", "#F26223"]);*/
//groups数组:获取每个组的起始角度、数值、索引等属性
var groups = chordLayout.groups();
//由内外半径、起始角度计算路径字符串
var pathCalc = d3.svg.arc().innerRadius(that.defaults.innerRadius).outerRadius(that.defaults.outerRadius).startAngle(function (d) {
return d.startAngle;
}).endAngle(function (d) {
return d.endAngle;
});
var chords = chordLayout.chords();
//计算弦的路径曲线
var chordCalc = d3.svg.chord().radius(that.defaults.innerRadius);
//Raphael: Paper.path()
var donutEle;
//获取每个环形的字符串表示
var spline;
//表示每条弦的element
var chordEle;
//每条弦的字符串表示
var belt;
var num; //每个group分割小格数
var unitAngle; //每个group所占的角度
var angle;
var radian;
var tickLine;
var tickStr; //每个tick的路径
var xTrans, yTrans;
var aX, aY, bX, bY; //每个tick起始端点的坐标
var anchor;
var rotateStr;
var wordStr;
var word;
var textEl;
var wXTrans, wYTrans;
var tips;
var minValue = 1000;
that.chordGroups = that.canvas.set();
that.donutGroups = that.canvas.set();
$(this.node).append(this.floatTag);
//计算某条弦被赋值为target或source的颜色
var colorCalc = function (index) {
var i = chords[index].target.value > chords[index].source.value ? chords[index].target.index : chords[index].source.index;
return i;
};
//添加透明效果
var mouseOverDonut = function () {
floatTag.html('<div style = "text-align: center;margin:auto;color:'
//+ jqNode.color
+
"#ffffff" + '">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
//fade(this.data("donutIndex"), 0.2);
that.underBn[index].attr('opacity', 0.5).show();
};
var mouseOutDonut = function () {
floatTag.css({
"visibility": "hidden"
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.6
});
}
});
| };
var mouseoverChord = function () {
floatTag.html('<div style="text-align: center;margin:auto;color:#ffffff">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
});
that.chordGroups.forEach(function (d) {
d.attr("fill-opacity", 0.1);
});
if (navigator.appName !== "Microsoft Internet Explorer") {
this.toFront(); //把当前弦移到画布最上层
}
this.attr("fill-opacity", 0.7);
that.underBn[this.data('source')].attr('opacity', 0.5).show();
};
var mouseoutChord = function () {
floatTag.css({
"visibility": "hidden"
});
//alert("***");
that.chordGroups.forEach(function (d) {
d.attr("fill-opacity", 0.6);
});
//this.attr("fill-opacity", 0.6);
that.underBn[this.data('source')].hide();
};
//画弦*********************************************************
var t;
for (t = 0; t <= chords.length - 1; t++) {
//alert(chords.length);
belt = chordCalc(chords[t]);
//hover到弦上时的效果
tips = that.groupNames[chords[t].source.index] + " to " + that.groupNames[chords[t].target.index] + ": " + that.matrix[chords[t].source.index][chords[t].target.index] + "," + that.groupNames[chords[t].target.index] + " to " + that.groupNames[chords[t].source.index] + ": " + that.matrix[chords[t].target.index][chords[t].source.index];
chordEle = that.canvas.path(belt).
translate((that.defaults.width - this.xOffset) / 2 + this.xOffset, that.defaults.height / 2).attr({
"path": belt,
"fill": that.getColor(colorCalc(t)),
"fill-opacity": 0.6,
"stroke": "#d6d6d6",
"stroke-opacity": 0.1
}).hover(mouseoverChord, mouseoutChord).data("source", chords[t].source.index).data("target", chords[t].target.index);
//.attr("fill", fillColor(chords[t].target.index))
chordEle.data('text', tips);
that.chordGroups.push(chordEle);
}
//画圆弧*********************************************************
var i, r;
var donutName;
var nameStr;
var nameX, nameY;
var ro, a;
var sum = 0;
for (r = 0; r <= groups.length - 1; r++) {
sum += groups[r].value;
}
for (i = 0; i <= groups.length - 1; i++) {
//画外圈的pie图**************************************
//计算每个group的path
spline = pathCalc(groups[i]);
tips = that.groupNames[i] + ": " + Math.round(groups[i].value) + " " + (groups[i].value * 100 / sum).toFixed(2) + "%";
donutEle = that.canvas.path(spline).translate((that.defaults.width - this.xOffset) / 2 + this.xOffset, that.defaults.height / 2).data("donutIndex", i).attr({
"path": spline,
"fill": that.getColor(i),
"stroke": that.getColor(i)
}).mouseover(mouseOverDonut).mouseout(mouseOutDonut);
donutEle.data('text', tips);
that.donutGroups.push(donutEle);
//每个donut上显示名称
ro = groups[i].startAngle * 180 / Math.PI - 86 + 90;
a = (groups[i].startAngle * 180 / Math.PI - 86) * Math.PI / 180;
nameX = ((that.defaults.outerRadius - that.defaults.innerRadius) / 2 + that.defaults.innerRadius) * Math.cos(a);
nameY = ((that.defaults.outerRadius - that.defaults.innerRadius) / 2 + that.defaults.innerRadius) * Math.sin(a);
nameStr = "T" + ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + "," + that.defaults.height / 2 + "R" + ro + "T" + nameX + "," + nameY;
if ((groups[i].endAngle - groups[i].startAngle) * 180 / Math.PI > 10) {
donutName = that.canvas.text().attr("font", "12px Verdana").attr("text", that.groupNames[i]).transform(nameStr);
}
//画刻度和刻度值**************************************
num = groups[i].value / 5000;
//最细分的每个小格代表的数值大小
unitAngle = (groups[i].endAngle - groups[i].startAngle) * 180 / Math.PI / num;
var j;
for (j = 0; j <= num; j++) {
//计算旋转角度和水平、竖直方向所需平移的距离
radian = ((groups[i].startAngle * 180 / Math.PI - 90) + j * unitAngle);
angle = radian * Math.PI / 180;
xTrans = that.defaults.outerRadius * Math.cos(angle);
yTrans = that.defaults.outerRadius * Math.sin(angle);
tickStr = "T" + ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + "," + that.defaults.height / 2 + "T" + xTrans + "," + yTrans;
//刻度线的起点终点坐标
aX = ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + xTrans;
aY = that.defaults.height / 2 + yTrans;
bX = ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + (that.defaults.outerRadius + 6) * Math.cos(angle);
bY = that.defaults.height / 2 + (that.defaults.outerRadius + 6) * Math.sin(angle);
tickLine = "M" + aX + "," + aY + "L" + bX + "," + bY;
that.canvas.path(tickLine).attr({
'stroke': "#929292",
"stroke-width": '1px'
}); //绘制刻度
//每隔五个刻度,绘制一次文字
if (j % 2 === 0) {
//计算text-anchor
if (radian + 90 < 180) {
anchor = "start";
} else {
anchor = "end";
}
//计算文字方向是否需要旋转180度
if (radian + 90 < 180) {
rotateStr = null;
} else {
rotateStr = "R180";
}
wXTrans = (that.defaults.outerRadius + 10) * Math.cos(angle);
wYTrans = (that.defaults.outerRadius + 10) * Math.sin(angle);
word = j % 2 ? "" : Math.round(((groups[i].value / num) * j) / 1000);
wordStr = "T" + ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + "," + that.defaults.height / 2 + "R" + radian
/*(groups[i].startAngle * 180 / Math.PI - 90)*/
+ rotateStr + "T" + wXTrans + "," + wYTrans;
//绘制文字
textEl = that.canvas.text(0, 0, word).attr("font", "12px Verdana").transform(wordStr).attr("text-anchor", anchor).attr('fill', "#929292");
}
}
}
/*this.canvas.text().attr("font", "12px arial").translate((that.defaults.width - this.xOffset) / 2 + this.xOffset, this.defaults.height).attr("text", "The unit of the scale on the periphery is 1000. \n 刻度值的单位为1000。");
*/
};
return Chord;
}); | //fade(this.data("donutIndex"), 0.6);
that.underBn[index].hide();
| conditional_block |
chord.js | /*global Raphael, d3, $, define */
/*!
* Chord的兼容性定义
*/;
(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) {
return this[id];
});
}
})('Chord', function (require) {
var DataV = require('DataV');
/**
* 构造函数
* @param {Object} node 表示在html的哪个容器中绘制该组件
* @param {Object} options 为用户自定义的组件的属性,比如画布大小
*/
var Chord = DataV.extend(DataV.Chart, {
initialize: function (node, options) {
this.type = "Chord";
this.node = this.checkContainer(node);
this.matrix = [];
this.groupNames = []; //数组:记录每个group的名字
//图的大小设置
this.defaults.legend = true;
this.defaults.width = 800;
this.defaults.height = 800;
this.dimension.from = {
type: "string",
required: true,
index: 0,
};
this.dimension.to = {
type: "string",
required: true,
index: 1,
};
/**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
//设置用户指定的属性
this.setOptions(options);
this.legendArea = [20, (this.defaults.height - 20 - 220), 200, 220];
if (this.defaults.legend) {
this.xOffset = this.legendArea[2];
} else {
this.xOffset = 0;
}
this.defaults.innerRadius = Math.min((this.defaults.width - this.xOffset), this.defaults.height) * 0.38;
this.defaults.outerRadius = this.defaults.innerRadius * 1.10;
//创建画布
this.createCanvas();
}
});
/**
* 创建画布
*/
Chord.prototype.createCanvas = function () {
this.canvas = new Raphael(this.node, this.defaults.width, this.defaults.height);
canvasStyle = this.node.style;
canvasStyle.position = "relative";
this.floatTag = DataV.FloatTag()(this.node);
this.floatTag.css({
"visibility": "hidden"
});
};
/**
* 获取颜色
* @param {Number} i 元素类别编号
* @return {String} 返回颜色值
*/
Chord.prototype.getColor = function (i) {
var color = DataV.getColor();
return color[i % color.length][0];
};
/**
* 绘制弦图
*/
Chord.prototype.render = function () {
this.layout();
if (this.defaults.legend) {
this.drawLegend();
}
};
/**
* 绘制图例
*/
Chord.prototype.drawLegend = function () {
var that = this;
var paper = this.canvas;
var legendArea = this.legendArea;
var rectBn = paper.set();
this.underBn = [];
var underBn = this.underBn;
for (i = 0; i <= this.groupNum; i++) {
//底框
underBn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "#ebebeb",
"stroke": "none"
//"r": 3
}).hide());
//色框
paper.rect(legendArea[0] + 10 + 3, legendArea[1] + 10 + (20 + 3) * i + 6, 16, 8).attr({
"fill": this.getColor(i),
"stroke": "none"
});
//文字
paper.text(legendArea[0] + 10 + 3 + 16 + 8, legendArea[1] + 10 + (20 + 3) * i + 10, this.groupNames[i]).attr({
"fill": "black",
"fill-opacity": 1,
"font-family": "Verdana",
"font-size": 12
}).attr({
"text-anchor": "start"
});
//选框
rectBn.push(paper.rect(legendArea[0] + 10, legendArea[1] + 10 + (20 + 3) * i, 180, 20).attr({
"fill": "white",
"fill-opacity": 0,
"stroke": "none"
//"r": 3
})).data("clicked", 0);
}
rectBn.forEach(function (d, i) {
d.mouseover(function () {
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 0.5);
underBn[i].show();
}
}).mouseout(function () {
if (d.data("clicked") === 0) {
underBn[i].hide();
}
});
d.click(function () {
for (j = 0; j < underBn.length; j++) {
if (j === i) {
underBn[j].show();
} else {
underBn[j].hide();
}
}
rectBn.forEach(function (eachBn) {
if (eachBn !== d) {
eachBn.data("clicked", 0);
}
});
if (d.data("clicked") === 0) {
underBn[i].attr('opacity', 1);
underBn[i].show();
that.chordGroups.forEach(function (d) {
if (d.data('source') !== i && d.data('target') !== i) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
d.data("clicked", 1);
} else if (d.data("clicked") === 1) {
underBn[i].hide();
d.data("clicked", 0);
that.chordGroups.forEach(function (d) {
d.attr({
'fill-opacity': 0.6
});
});
}
});
});
};
/**
*对原始数据进行处理
* @param {Array} table 将要被绘制成饼图的二维表数据
*/
Chord.prototype.setSource = function (table, map) {
map = this.map(map);
var that = this;
var index = {};
var groupNum = 0;
table = table.slice(1); // 从第一行开始,第0行舍去
table.forEach(function (d) {
var from = d[map.from];
if (typeof index[from] === 'undefined') {
index[from] = groupNum++;
that.groupNames.push(from);
}
});
table.forEach(function (d) {
var to = d[map.to];
if (typeof index[to] === 'undefined') {
index[to] = groupNum++;
that.groupNames.push(to);
}
});
this.groupNum = groupNum;
for (var i = 0; i < groupNum; i++) {
this.matrix[i] = [];
for (var j = 0; j < groupNum; j++) {
this.matrix[i][j] = 0;
};
};
table.forEach(function (d) {
that.matrix[index[d[map.from]]][index[d[map.to]]] += Number(d[map.value]);
});
};
/**
*创建chord布局
*/
Chord.prototype.layout = function () {
var floatTag = this.floatTag;
var that = this;
that.canvas.clear();
/*var see = [
[11975, 5871, 8916, 2868],
[1951, 10048, 2060, 6171],
[8010, 16145, 8090, 8045],
[1013, 990, 940, 6907]
];*/
var chordLayout = d3.layout.chord().padding(0.05) //chord segments之间的padding间隔
.sortSubgroups(d3.descending) //chord segments细分后的排序规则
.matrix(that.matrix);
/*var fillColor = d3.scale.ordinal()
.domain(d3.range(4))
.range(["#000000", "#FFDD89", "#957244", "#F26223"]);*/
//groups数组:获取每个组的起始角度、数值、索引等属性
var groups = chordLayout.groups();
//由内外半径、起始角度计算路径字符串
var pathCalc = d3.svg.arc().innerRadius(that.defaults.innerRadius).outerRadius(that.defaults.outerRadius).startAngle(function (d) {
return d.startAngle;
}).endAngle(function (d) {
return d.endAngle;
});
var chords = chordLayout.chords();
//计算弦的路径曲线
var chordCalc = d3.svg.chord().radius(that.defaults.innerRadius);
//Raphael: Paper.path()
var donutEle;
//获取每个环形的字符串表示
var spline;
//表示每条弦的element
var chordEle;
//每条弦的字符串表示
var belt;
var num; //每个group分割小格数
var unitAngle; //每个group所占的角度
var angle;
var radian;
var tickLine;
var tickStr; //每个tick的路径
var xTrans, yTrans;
var aX, aY, bX, bY; //每个tick起始端点的坐标
var anchor;
var rotateStr;
var wordStr;
var word;
var textEl;
var wXTrans, wYTrans;
var tips;
var minValue = 1000;
that.chordGroups = that.canvas.set();
that.donutGroups = that.canvas.set();
$(this.node).append(this.floatTag);
//计算某条弦被赋值为target或source的颜色
var colorCalc = function (index) {
var i = chords[index].target.value > chords[index].source.value ? chords[index].target.index : chords[index].source.index;
return i;
};
|
//添加透明效果
var mouseOverDonut = function () {
floatTag.html('<div style = "text-align: center;margin:auto;color:'
//+ jqNode.color
+
"#ffffff" + '">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.1
});
} else {
d.attr({
'fill-opacity': 0.6
});
}
});
//fade(this.data("donutIndex"), 0.2);
that.underBn[index].attr('opacity', 0.5).show();
};
var mouseOutDonut = function () {
floatTag.css({
"visibility": "hidden"
});
index = this.data("donutIndex");
that.chordGroups.forEach(function (d) {
if (d.data('source') !== index && d.data('target') !== index) {
d.attr({
'fill-opacity': 0.6
});
}
});
//fade(this.data("donutIndex"), 0.6);
that.underBn[index].hide();
};
var mouseoverChord = function () {
floatTag.html('<div style="text-align: center;margin:auto;color:#ffffff">' + this.data('text') + '</div>');
floatTag.css({
"visibility": "visible"
});
that.underBn.forEach(function (d) {
d.hide();
});
that.chordGroups.forEach(function (d) {
d.attr("fill-opacity", 0.1);
});
if (navigator.appName !== "Microsoft Internet Explorer") {
this.toFront(); //把当前弦移到画布最上层
}
this.attr("fill-opacity", 0.7);
that.underBn[this.data('source')].attr('opacity', 0.5).show();
};
var mouseoutChord = function () {
floatTag.css({
"visibility": "hidden"
});
//alert("***");
that.chordGroups.forEach(function (d) {
d.attr("fill-opacity", 0.6);
});
//this.attr("fill-opacity", 0.6);
that.underBn[this.data('source')].hide();
};
//画弦*********************************************************
var t;
for (t = 0; t <= chords.length - 1; t++) {
//alert(chords.length);
belt = chordCalc(chords[t]);
//hover到弦上时的效果
tips = that.groupNames[chords[t].source.index] + " to " + that.groupNames[chords[t].target.index] + ": " + that.matrix[chords[t].source.index][chords[t].target.index] + "," + that.groupNames[chords[t].target.index] + " to " + that.groupNames[chords[t].source.index] + ": " + that.matrix[chords[t].target.index][chords[t].source.index];
chordEle = that.canvas.path(belt).
translate((that.defaults.width - this.xOffset) / 2 + this.xOffset, that.defaults.height / 2).attr({
"path": belt,
"fill": that.getColor(colorCalc(t)),
"fill-opacity": 0.6,
"stroke": "#d6d6d6",
"stroke-opacity": 0.1
}).hover(mouseoverChord, mouseoutChord).data("source", chords[t].source.index).data("target", chords[t].target.index);
//.attr("fill", fillColor(chords[t].target.index))
chordEle.data('text', tips);
that.chordGroups.push(chordEle);
}
//画圆弧*********************************************************
var i, r;
var donutName;
var nameStr;
var nameX, nameY;
var ro, a;
var sum = 0;
for (r = 0; r <= groups.length - 1; r++) {
sum += groups[r].value;
}
for (i = 0; i <= groups.length - 1; i++) {
//画外圈的pie图**************************************
//计算每个group的path
spline = pathCalc(groups[i]);
tips = that.groupNames[i] + ": " + Math.round(groups[i].value) + " " + (groups[i].value * 100 / sum).toFixed(2) + "%";
donutEle = that.canvas.path(spline).translate((that.defaults.width - this.xOffset) / 2 + this.xOffset, that.defaults.height / 2).data("donutIndex", i).attr({
"path": spline,
"fill": that.getColor(i),
"stroke": that.getColor(i)
}).mouseover(mouseOverDonut).mouseout(mouseOutDonut);
donutEle.data('text', tips);
that.donutGroups.push(donutEle);
//每个donut上显示名称
ro = groups[i].startAngle * 180 / Math.PI - 86 + 90;
a = (groups[i].startAngle * 180 / Math.PI - 86) * Math.PI / 180;
nameX = ((that.defaults.outerRadius - that.defaults.innerRadius) / 2 + that.defaults.innerRadius) * Math.cos(a);
nameY = ((that.defaults.outerRadius - that.defaults.innerRadius) / 2 + that.defaults.innerRadius) * Math.sin(a);
nameStr = "T" + ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + "," + that.defaults.height / 2 + "R" + ro + "T" + nameX + "," + nameY;
if ((groups[i].endAngle - groups[i].startAngle) * 180 / Math.PI > 10) {
donutName = that.canvas.text().attr("font", "12px Verdana").attr("text", that.groupNames[i]).transform(nameStr);
}
//画刻度和刻度值**************************************
num = groups[i].value / 5000;
//最细分的每个小格代表的数值大小
unitAngle = (groups[i].endAngle - groups[i].startAngle) * 180 / Math.PI / num;
var j;
for (j = 0; j <= num; j++) {
//计算旋转角度和水平、竖直方向所需平移的距离
radian = ((groups[i].startAngle * 180 / Math.PI - 90) + j * unitAngle);
angle = radian * Math.PI / 180;
xTrans = that.defaults.outerRadius * Math.cos(angle);
yTrans = that.defaults.outerRadius * Math.sin(angle);
tickStr = "T" + ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + "," + that.defaults.height / 2 + "T" + xTrans + "," + yTrans;
//刻度线的起点终点坐标
aX = ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + xTrans;
aY = that.defaults.height / 2 + yTrans;
bX = ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + (that.defaults.outerRadius + 6) * Math.cos(angle);
bY = that.defaults.height / 2 + (that.defaults.outerRadius + 6) * Math.sin(angle);
tickLine = "M" + aX + "," + aY + "L" + bX + "," + bY;
that.canvas.path(tickLine).attr({
'stroke': "#929292",
"stroke-width": '1px'
}); //绘制刻度
//每隔五个刻度,绘制一次文字
if (j % 2 === 0) {
//计算text-anchor
if (radian + 90 < 180) {
anchor = "start";
} else {
anchor = "end";
}
//计算文字方向是否需要旋转180度
if (radian + 90 < 180) {
rotateStr = null;
} else {
rotateStr = "R180";
}
wXTrans = (that.defaults.outerRadius + 10) * Math.cos(angle);
wYTrans = (that.defaults.outerRadius + 10) * Math.sin(angle);
word = j % 2 ? "" : Math.round(((groups[i].value / num) * j) / 1000);
wordStr = "T" + ((that.defaults.width - that.xOffset) / 2 + that.xOffset) + "," + that.defaults.height / 2 + "R" + radian
/*(groups[i].startAngle * 180 / Math.PI - 90)*/
+ rotateStr + "T" + wXTrans + "," + wYTrans;
//绘制文字
textEl = that.canvas.text(0, 0, word).attr("font", "12px Verdana").transform(wordStr).attr("text-anchor", anchor).attr('fill', "#929292");
}
}
}
/*this.canvas.text().attr("font", "12px arial").translate((that.defaults.width - this.xOffset) / 2 + this.xOffset, this.defaults.height).attr("text", "The unit of the scale on the periphery is 1000. \n 刻度值的单位为1000。");
*/
};
return Chord;
}); | random_line_split | |
provision.go | package cli
/**
* SPDX-License-Identifier: Apache-2.0
* Copyright 2020 vorteil.io Pty Ltd
*/
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
"os"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/vorteil/vorteil/pkg/provisioners"
"github.com/vorteil/vorteil/pkg/provisioners/amazon"
"github.com/vorteil/vorteil/pkg/provisioners/azure"
"github.com/vorteil/vorteil/pkg/provisioners/google"
"github.com/vorteil/vorteil/pkg/provisioners/registry"
"github.com/vorteil/vorteil/pkg/vdisk"
"github.com/vorteil/vorteil/pkg/vio"
"github.com/vorteil/vorteil/pkg/vpkg"
)
var provisionCmd = &cobra.Command{
Use: "provision BUILDABLE PROVISIONER",
Short: "Provision a vorteil buildable",
Long: `Provision a vorteil buildable to a supported provisioner online.
Example Command:
- Provisioning python3 package to an aws provisioner:
$ vorteil images provision ./python3.vorteil ./awsProvisioner
PROVISIONER is a file that has been created with the 'vorteil provisioners new' command.
It tells vorteil where to provision your BUILDABLE to.
If your PROVISIONER was created with a passphrase you can input this passphrase with the
'--passphrase' flag when using the 'provision' command.`,
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
var provisionFile string
provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil |
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func init() {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.")
f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.")
f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
}
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
StorageAccountKey: provisionersNewAzureStorageAccountKey,
StorageAccountName: provisionersNewAzureStorageAccountName,
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewAzureCmd.Flags()
f.StringVarP(&provisionersNewAzureKeyFile, "key-file", "k", "", "Azure 'Service Principal' credentials file")
provisionersNewAzureCmd.MarkFlagRequired("key-file")
f.StringVarP(&provisionersNewAzureContainer, "container", "c", "", "Azure container name")
provisionersNewAzureCmd.MarkFlagRequired("container")
f.StringVarP(&provisionersNewAzureResourceGroup, "resource-group", "r", "", "Azure resource group name")
provisionersNewAzureCmd.MarkFlagRequired("resource-group")
f.StringVarP(&provisionersNewAzureLocation, "location", "l", "", "Azure location")
provisionersNewAzureCmd.MarkFlagRequired("location")
f.StringVarP(&provisionersNewAzureStorageAccountKey, "storage-account-key", "s", "", "Azure storage account key")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-key")
f.StringVarP(&provisionersNewAzureStorageAccountName, "storage-account-name", "n", "", "Azure storage account name")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-name")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewGoogleCmd = &cobra.Command{
Use: "google <OUTPUT_FILE>",
Short: "Add a new Google Cloud (Compute Engine) Provisioner.",
Args: cobra.ExactArgs(1), // Single arg, points to output file
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewGoogleKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := google.NewProvisioner(log, &google.Config{
Bucket: provisionersNewGoogleBucket,
Key: base64.StdEncoding.EncodeToString(b),
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewGoogleCmd.Flags()
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
f.StringVarP(&provisionersNewGoogleBucket, "bucket", "b", "", "Name of an existing Google Cloud Storage bucket, for which the provided service account credentials have adequate permissions for object creation/deletion.")
provisionersNewGoogleCmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewGoogleKeyFile, "credentials", "f", "", "Path of an existing JSON-formatted Google Cloud Platform service account credentials file.")
provisionersNewGoogleCmd.MarkFlagRequired("credentials")
}
| {
SetError(err, 12)
return
} | conditional_block |
provision.go | package cli
/**
* SPDX-License-Identifier: Apache-2.0
* Copyright 2020 vorteil.io Pty Ltd
*/
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
"os"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/vorteil/vorteil/pkg/provisioners"
"github.com/vorteil/vorteil/pkg/provisioners/amazon"
"github.com/vorteil/vorteil/pkg/provisioners/azure"
"github.com/vorteil/vorteil/pkg/provisioners/google"
"github.com/vorteil/vorteil/pkg/provisioners/registry"
"github.com/vorteil/vorteil/pkg/vdisk"
"github.com/vorteil/vorteil/pkg/vio"
"github.com/vorteil/vorteil/pkg/vpkg"
)
var provisionCmd = &cobra.Command{
Use: "provision BUILDABLE PROVISIONER",
Short: "Provision a vorteil buildable",
Long: `Provision a vorteil buildable to a supported provisioner online.
Example Command:
- Provisioning python3 package to an aws provisioner:
$ vorteil images provision ./python3.vorteil ./awsProvisioner
PROVISIONER is a file that has been created with the 'vorteil provisioners new' command.
It tells vorteil where to provision your BUILDABLE to.
If your PROVISIONER was created with a passphrase you can input this passphrase with the
'--passphrase' flag when using the 'provision' command.`,
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
var provisionFile string
provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil {
SetError(err, 12)
return
}
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func | () {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.")
f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.")
f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
}
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
StorageAccountKey: provisionersNewAzureStorageAccountKey,
StorageAccountName: provisionersNewAzureStorageAccountName,
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewAzureCmd.Flags()
f.StringVarP(&provisionersNewAzureKeyFile, "key-file", "k", "", "Azure 'Service Principal' credentials file")
provisionersNewAzureCmd.MarkFlagRequired("key-file")
f.StringVarP(&provisionersNewAzureContainer, "container", "c", "", "Azure container name")
provisionersNewAzureCmd.MarkFlagRequired("container")
f.StringVarP(&provisionersNewAzureResourceGroup, "resource-group", "r", "", "Azure resource group name")
provisionersNewAzureCmd.MarkFlagRequired("resource-group")
f.StringVarP(&provisionersNewAzureLocation, "location", "l", "", "Azure location")
provisionersNewAzureCmd.MarkFlagRequired("location")
f.StringVarP(&provisionersNewAzureStorageAccountKey, "storage-account-key", "s", "", "Azure storage account key")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-key")
f.StringVarP(&provisionersNewAzureStorageAccountName, "storage-account-name", "n", "", "Azure storage account name")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-name")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewGoogleCmd = &cobra.Command{
Use: "google <OUTPUT_FILE>",
Short: "Add a new Google Cloud (Compute Engine) Provisioner.",
Args: cobra.ExactArgs(1), // Single arg, points to output file
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewGoogleKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := google.NewProvisioner(log, &google.Config{
Bucket: provisionersNewGoogleBucket,
Key: base64.StdEncoding.EncodeToString(b),
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewGoogleCmd.Flags()
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
f.StringVarP(&provisionersNewGoogleBucket, "bucket", "b", "", "Name of an existing Google Cloud Storage bucket, for which the provided service account credentials have adequate permissions for object creation/deletion.")
provisionersNewGoogleCmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewGoogleKeyFile, "credentials", "f", "", "Path of an existing JSON-formatted Google Cloud Platform service account credentials file.")
provisionersNewGoogleCmd.MarkFlagRequired("credentials")
}
| init | identifier_name |
provision.go | package cli
/**
* SPDX-License-Identifier: Apache-2.0
* Copyright 2020 vorteil.io Pty Ltd
*/
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
"os"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/vorteil/vorteil/pkg/provisioners"
"github.com/vorteil/vorteil/pkg/provisioners/amazon"
"github.com/vorteil/vorteil/pkg/provisioners/azure"
"github.com/vorteil/vorteil/pkg/provisioners/google"
"github.com/vorteil/vorteil/pkg/provisioners/registry"
"github.com/vorteil/vorteil/pkg/vdisk"
"github.com/vorteil/vorteil/pkg/vio"
"github.com/vorteil/vorteil/pkg/vpkg"
)
var provisionCmd = &cobra.Command{
Use: "provision BUILDABLE PROVISIONER",
Short: "Provision a vorteil buildable",
Long: `Provision a vorteil buildable to a supported provisioner online.
Example Command:
- Provisioning python3 package to an aws provisioner:
$ vorteil images provision ./python3.vorteil ./awsProvisioner
PROVISIONER is a file that has been created with the 'vorteil provisioners new' command.
It tells vorteil where to provision your BUILDABLE to.
If your PROVISIONER was created with a passphrase you can input this passphrase with the
'--passphrase' flag when using the 'provision' command.`,
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
var provisionFile string
provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil {
SetError(err, 12)
return
}
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func init() {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.") | f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
}
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
StorageAccountKey: provisionersNewAzureStorageAccountKey,
StorageAccountName: provisionersNewAzureStorageAccountName,
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewAzureCmd.Flags()
f.StringVarP(&provisionersNewAzureKeyFile, "key-file", "k", "", "Azure 'Service Principal' credentials file")
provisionersNewAzureCmd.MarkFlagRequired("key-file")
f.StringVarP(&provisionersNewAzureContainer, "container", "c", "", "Azure container name")
provisionersNewAzureCmd.MarkFlagRequired("container")
f.StringVarP(&provisionersNewAzureResourceGroup, "resource-group", "r", "", "Azure resource group name")
provisionersNewAzureCmd.MarkFlagRequired("resource-group")
f.StringVarP(&provisionersNewAzureLocation, "location", "l", "", "Azure location")
provisionersNewAzureCmd.MarkFlagRequired("location")
f.StringVarP(&provisionersNewAzureStorageAccountKey, "storage-account-key", "s", "", "Azure storage account key")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-key")
f.StringVarP(&provisionersNewAzureStorageAccountName, "storage-account-name", "n", "", "Azure storage account name")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-name")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewGoogleCmd = &cobra.Command{
Use: "google <OUTPUT_FILE>",
Short: "Add a new Google Cloud (Compute Engine) Provisioner.",
Args: cobra.ExactArgs(1), // Single arg, points to output file
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewGoogleKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := google.NewProvisioner(log, &google.Config{
Bucket: provisionersNewGoogleBucket,
Key: base64.StdEncoding.EncodeToString(b),
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewGoogleCmd.Flags()
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
f.StringVarP(&provisionersNewGoogleBucket, "bucket", "b", "", "Name of an existing Google Cloud Storage bucket, for which the provided service account credentials have adequate permissions for object creation/deletion.")
provisionersNewGoogleCmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewGoogleKeyFile, "credentials", "f", "", "Path of an existing JSON-formatted Google Cloud Platform service account credentials file.")
provisionersNewGoogleCmd.MarkFlagRequired("credentials")
} | f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.") | random_line_split |
provision.go | package cli
/**
* SPDX-License-Identifier: Apache-2.0
* Copyright 2020 vorteil.io Pty Ltd
*/
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"io"
"io/ioutil"
"os"
"strconv"
"strings"
"github.com/google/uuid"
"github.com/spf13/cobra"
"github.com/vorteil/vorteil/pkg/provisioners"
"github.com/vorteil/vorteil/pkg/provisioners/amazon"
"github.com/vorteil/vorteil/pkg/provisioners/azure"
"github.com/vorteil/vorteil/pkg/provisioners/google"
"github.com/vorteil/vorteil/pkg/provisioners/registry"
"github.com/vorteil/vorteil/pkg/vdisk"
"github.com/vorteil/vorteil/pkg/vio"
"github.com/vorteil/vorteil/pkg/vpkg"
)
var provisionCmd = &cobra.Command{
Use: "provision BUILDABLE PROVISIONER",
Short: "Provision a vorteil buildable",
Long: `Provision a vorteil buildable to a supported provisioner online.
Example Command:
- Provisioning python3 package to an aws provisioner:
$ vorteil images provision ./python3.vorteil ./awsProvisioner
PROVISIONER is a file that has been created with the 'vorteil provisioners new' command.
It tells vorteil where to provision your BUILDABLE to.
If your PROVISIONER was created with a passphrase you can input this passphrase with the
'--passphrase' flag when using the 'provision' command.`,
Args: cobra.ExactArgs(2),
Run: func(cmd *cobra.Command, args []string) {
var provisionFile string
provisionFile = args[1]
// Load the provided provisioner file
if _, err := os.Stat(provisionFile); err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 1)
return
}
b, err := ioutil.ReadFile(provisionFile)
if err != nil {
SetError(fmt.Errorf("Could not read PROVISIONER '%s' , error: %v", provisionFile, err), 2)
return
}
data, err := provisioners.Decrypt(b, provisionPassPhrase)
if err != nil {
SetError(err, 3)
return
}
ptype, err := provisioners.ProvisionerType(data)
if err != nil {
SetError(err, 4)
return
}
prov, err := registry.NewProvisioner(ptype, log, data)
if err != nil {
SetError(err, 5)
return
}
buildablePath := "."
if len(args) >= 1 {
buildablePath = args[0]
}
pkgBuilder, err := getPackageBuilder("BUILDABLE", buildablePath)
if err != nil {
SetError(err, 9)
return
}
err = modifyPackageBuilder(pkgBuilder)
if err != nil {
SetError(err, 10)
return
}
pkgReader, err := vpkg.ReaderFromBuilder(pkgBuilder)
if err != nil {
SetError(err, 11)
return
}
defer pkgReader.Close()
pkgReader, err = vpkg.PeekVCFG(pkgReader)
if err != nil {
SetError(err, 12)
return
}
err = initKernels()
if err != nil {
SetError(err, 13)
return
}
f, err := ioutil.TempFile(os.TempDir(), "vorteil.disk")
if err != nil {
SetError(err, 14)
return
}
defer os.Remove(f.Name())
defer f.Close()
err = vdisk.Build(context.Background(), f, &vdisk.BuildArgs{
WithVCFGDefaults: true,
PackageReader: pkgReader,
Format: prov.DiskFormat(),
SizeAlign: int64(prov.SizeAlign()),
KernelOptions: vdisk.KernelOptions{
Shell: flagShell,
},
Logger: log,
})
if err != nil {
SetError(err, 15)
return
}
err = f.Close()
if err != nil {
SetError(err, 16)
return
}
err = pkgReader.Close()
if err != nil {
SetError(err, 17)
return
}
image, err := vio.LazyOpen(f.Name())
if err != nil {
SetError(err, 18)
return
}
if provisionName == "" {
provisionName = generateProvisionUUID()
log.Infof("--name flag what not set using generated uuid '%s'", provisionName)
}
ctx := context.TODO()
err = prov.Provision(&provisioners.ProvisionArgs{
Context: ctx,
Image: image,
Name: provisionName,
Description: provisionDescription,
Force: provisionForce,
ReadyWhenUsable: provisionReadyWhenUsable,
})
if err != nil {
SetError(err, 19)
return
}
fmt.Printf("Finished creating image.\n")
},
}
func generateProvisionUUID() string {
pName := strings.ReplaceAll(uuid.New().String(), "-", "")
// Replace first character with v if its a number
if _, err := strconv.Atoi(pName[:1]); err == nil {
pName = "v" + pName[1:]
}
return pName
}
var (
provisionName string
provisionDescription string
provisionForce bool
provisionReadyWhenUsable bool
provisionPassPhrase string
)
func init() |
var provisionersCmd = &cobra.Command{
Use: "provisioners",
Short: "Helper commands for working with Vorteil provisioners",
Long: ``,
Example: ``,
}
var provisionersNewCmd = &cobra.Command{
Use: "new",
Short: "Add a new provisioner.",
}
var (
provisionersNewPassphrase string
// Google Cloud Platform
provisionersNewGoogleBucket string
provisionersNewGoogleKeyFile string
// Amazon Web Services
provisionersNewAmazonKey string
provisionersNewAmazonRegion string
provisionersNewAmazonBucket string
provisionersNewAmazonSecret string
// Azure
provisionersNewAzureContainer string
provisionersNewAzureKeyFile string
provisionersNewAzureLocation string
provisionersNewAzureResourceGroup string
provisionersNewAzureStorageAccountKey string
provisionersNewAzureStorageAccountName string
)
var provisionersNewAmazonEC2Cmd = &cobra.Command{
Use: "amazon-ec2 <OUTPUT_FILE>",
Short: "Add a new AWS (Amazon Web Services) Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
p, err := amazon.NewProvisioner(log, &amazon.Config{
Key: provisionersNewAmazonKey,
Secret: provisionersNewAmazonSecret,
Region: provisionersNewAmazonRegion,
Bucket: provisionersNewAmazonBucket,
})
if err != nil {
SetError(err, 2)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 3)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 4)
return
}
},
}
func init() {
f := provisionersNewAmazonEC2Cmd.Flags()
f.StringVarP(&provisionersNewAmazonKey, "key", "k", "", "Access key ID")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("key")
f.StringVarP(&provisionersNewAmazonSecret, "secret", "s", "", "Secret access key")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("secret")
f.StringVarP(&provisionersNewAmazonRegion, "region", "r", "ap-southeast-2", "AWS region")
f.StringVarP(&provisionersNewAmazonBucket, "bucket", "b", "", "AWS bucket")
provisionersNewAmazonEC2Cmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewAzureCmd = &cobra.Command{
Use: "azure <OUTPUT_FILE>",
Short: "Add a new Microsoft Azure Provisioner.",
Args: cobra.ExactArgs(1),
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewAzureKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := azure.NewProvisioner(log, &azure.Config{
Key: base64.StdEncoding.EncodeToString(b),
Container: provisionersNewAzureContainer,
Location: provisionersNewAzureLocation,
ResourceGroup: provisionersNewAzureResourceGroup,
StorageAccountKey: provisionersNewAzureStorageAccountKey,
StorageAccountName: provisionersNewAzureStorageAccountName,
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewAzureCmd.Flags()
f.StringVarP(&provisionersNewAzureKeyFile, "key-file", "k", "", "Azure 'Service Principal' credentials file")
provisionersNewAzureCmd.MarkFlagRequired("key-file")
f.StringVarP(&provisionersNewAzureContainer, "container", "c", "", "Azure container name")
provisionersNewAzureCmd.MarkFlagRequired("container")
f.StringVarP(&provisionersNewAzureResourceGroup, "resource-group", "r", "", "Azure resource group name")
provisionersNewAzureCmd.MarkFlagRequired("resource-group")
f.StringVarP(&provisionersNewAzureLocation, "location", "l", "", "Azure location")
provisionersNewAzureCmd.MarkFlagRequired("location")
f.StringVarP(&provisionersNewAzureStorageAccountKey, "storage-account-key", "s", "", "Azure storage account key")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-key")
f.StringVarP(&provisionersNewAzureStorageAccountName, "storage-account-name", "n", "", "Azure storage account name")
provisionersNewAzureCmd.MarkFlagRequired("storage-account-name")
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
}
var provisionersNewGoogleCmd = &cobra.Command{
Use: "google <OUTPUT_FILE>",
Short: "Add a new Google Cloud (Compute Engine) Provisioner.",
Args: cobra.ExactArgs(1), // Single arg, points to output file
Run: func(cmd *cobra.Command, args []string) {
f, err := os.OpenFile(args[0], os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
SetError(err, 1)
return
}
defer f.Close()
path := provisionersNewGoogleKeyFile
_, err = os.Stat(path)
if err != nil {
SetError(err, 2)
return
}
b, err := ioutil.ReadFile(path)
if err != nil {
SetError(err, 3)
return
}
p, err := google.NewProvisioner(log, &google.Config{
Bucket: provisionersNewGoogleBucket,
Key: base64.StdEncoding.EncodeToString(b),
})
if err != nil {
SetError(err, 4)
return
}
data, err := p.Marshal()
if err != nil {
SetError(err, 5)
return
}
out := provisioners.Encrypt(data, provisionersNewPassphrase)
_, err = io.Copy(f, bytes.NewReader(out))
if err != nil {
SetError(err, 6)
return
}
},
}
func init() {
f := provisionersNewGoogleCmd.Flags()
f.StringVarP(&provisionersNewPassphrase, "passphrase", "p", "", "Passphrase for encrypting exported provisioner data.")
f.StringVarP(&provisionersNewGoogleBucket, "bucket", "b", "", "Name of an existing Google Cloud Storage bucket, for which the provided service account credentials have adequate permissions for object creation/deletion.")
provisionersNewGoogleCmd.MarkFlagRequired("bucket")
f.StringVarP(&provisionersNewGoogleKeyFile, "credentials", "f", "", "Path of an existing JSON-formatted Google Cloud Platform service account credentials file.")
provisionersNewGoogleCmd.MarkFlagRequired("credentials")
}
| {
f := provisionCmd.Flags()
f.StringVarP(&flagKey, "key", "k", "", "vrepo authentication key")
f.StringVarP(&provisionName, "name", "n", "", "Name of the resulting image on the remote platform.")
f.StringVarP(&provisionDescription, "description", "D", "", "Description for the resulting image, if supported by the platform.")
f.BoolVarP(&provisionForce, "force", "f", false, "Force an overwrite if an existing image conflicts with the new.")
f.BoolVarP(&provisionReadyWhenUsable, "ready-when-usable", "r", false, "Return successfully as soon as the operation is complete, regardless of whether or not the platform is still processing the image.")
f.StringVarP(&provisionPassPhrase, "passphrase", "s", "", "Passphrase used to decrypt encrypted provisioner data.")
} | identifier_body |
NumberPicker.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _uncontrollable = require("uncontrollable");
var _Button = _interopRequireDefault(require("./Button"));
var _Icon = require("./Icon");
var _Localization = require("./Localization");
var _NumberInput = _interopRequireDefault(require("./NumberInput"));
var _Widget = _interopRequireDefault(require("./Widget"));
var _WidgetPicker = _interopRequireDefault(require("./WidgetPicker"));
var CustomPropTypes = _interopRequireWildcard(require("./PropTypes"));
var _useFocusManager = _interopRequireDefault(require("./useFocusManager"));
var _WidgetHelpers = require("./WidgetHelpers");
var _useEventCallback = _interopRequireDefault(require("@restart/hooks/useEventCallback"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function | (obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') {
event.preventDefault();
decrement(event);
} else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(precision != null ? parseFloat(round(nextValue, precision)) : nextValue, event);
return nextValue;
}
const clampedValue = clamp(value, min, max);
return /*#__PURE__*/_react.default.createElement(_Widget.default, Object.assign({}, elementProps, {
focused: focused,
disabled: disabled,
readOnly: readOnly,
onKeyDown: handleKeyDown
}, focusEvents, {
ref: ref,
className: (0, _classnames.default)(className, 'rw-number-picker')
}), /*#__PURE__*/_react.default.createElement(_WidgetPicker.default, {
className: containerClassName
}, /*#__PURE__*/_react.default.createElement(_NumberInput.default, Object.assign({}, inputProps, {
role: "spinbutton",
tabIndex: tabIndex,
value: clampedValue,
placeholder: placeholder,
autoFocus: autoFocus,
editing: focused,
localizer: localizer,
parse: parse,
name: name,
min: min,
max: max,
disabled: disabled,
readOnly: readOnly,
onChange: handleChange,
onKeyPress: onKeyPress,
onKeyUp: onKeyUp,
innerRef: inputRef
})), /*#__PURE__*/_react.default.createElement("span", {
className: "rw-input-addon rw-number-picker-spinners"
}, /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: incrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === max || disabled || readOnly,
label: localizer.messages.increment({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('UP', e),
onMouseLeave: () => handleMouseUp()
}), /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: decrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === min || disabled || readOnly,
label: localizer.messages.decrement({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('DOWN', e),
onMouseLeave: () => handleMouseUp()
}))));
}
;
NumberPicker.propTypes = propTypes;
NumberPicker.defaultProps = defaultProps;
var _default = NumberPicker; // thank you kendo ui core
// https://github.com/telerik/kendo-ui-core/blob/master/src/kendo.core.js#L1036
exports.default = _default;
function round(value, precision) {
precision = precision || 0;
let parts = ('' + value).split('e');
let valueInt = Math.round(+(parts[0] + 'e' + (parts[1] ? +parts[1] + precision : precision)));
parts = ('' + valueInt).split('e');
valueInt = +(parts[0] + 'e' + (parts[1] ? +parts[1] - precision : -precision));
return valueInt.toFixed(precision);
} | _interopRequireWildcard | identifier_name |
NumberPicker.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _uncontrollable = require("uncontrollable");
var _Button = _interopRequireDefault(require("./Button"));
var _Icon = require("./Icon");
var _Localization = require("./Localization");
var _NumberInput = _interopRequireDefault(require("./NumberInput"));
var _Widget = _interopRequireDefault(require("./Widget"));
var _WidgetPicker = _interopRequireDefault(require("./WidgetPicker"));
var CustomPropTypes = _interopRequireWildcard(require("./PropTypes"));
var _useFocusManager = _interopRequireDefault(require("./useFocusManager"));
var _WidgetHelpers = require("./WidgetHelpers");
var _useEventCallback = _interopRequireDefault(require("@restart/hooks/useEventCallback"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) |
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') {
event.preventDefault();
decrement(event);
} else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(precision != null ? parseFloat(round(nextValue, precision)) : nextValue, event);
return nextValue;
}
const clampedValue = clamp(value, min, max);
return /*#__PURE__*/_react.default.createElement(_Widget.default, Object.assign({}, elementProps, {
focused: focused,
disabled: disabled,
readOnly: readOnly,
onKeyDown: handleKeyDown
}, focusEvents, {
ref: ref,
className: (0, _classnames.default)(className, 'rw-number-picker')
}), /*#__PURE__*/_react.default.createElement(_WidgetPicker.default, {
className: containerClassName
}, /*#__PURE__*/_react.default.createElement(_NumberInput.default, Object.assign({}, inputProps, {
role: "spinbutton",
tabIndex: tabIndex,
value: clampedValue,
placeholder: placeholder,
autoFocus: autoFocus,
editing: focused,
localizer: localizer,
parse: parse,
name: name,
min: min,
max: max,
disabled: disabled,
readOnly: readOnly,
onChange: handleChange,
onKeyPress: onKeyPress,
onKeyUp: onKeyUp,
innerRef: inputRef
})), /*#__PURE__*/_react.default.createElement("span", {
className: "rw-input-addon rw-number-picker-spinners"
}, /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: incrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === max || disabled || readOnly,
label: localizer.messages.increment({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('UP', e),
onMouseLeave: () => handleMouseUp()
}), /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: decrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === min || disabled || readOnly,
label: localizer.messages.decrement({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('DOWN', e),
onMouseLeave: () => handleMouseUp()
}))));
}
;
NumberPicker.propTypes = propTypes;
NumberPicker.defaultProps = defaultProps;
var _default = NumberPicker; // thank you kendo ui core
// https://github.com/telerik/kendo-ui-core/blob/master/src/kendo.core.js#L1036
exports.default = _default;
function round(value, precision) {
precision = precision || 0;
let parts = ('' + value).split('e');
let valueInt = Math.round(+(parts[0] + 'e' + (parts[1] ? +parts[1] + precision : precision)));
parts = ('' + valueInt).split('e');
valueInt = +(parts[0] + 'e' + (parts[1] ? +parts[1] - precision : -precision));
return valueInt.toFixed(precision);
} | { return obj && obj.__esModule ? obj : { default: obj }; } | identifier_body |
NumberPicker.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _uncontrollable = require("uncontrollable");
var _Button = _interopRequireDefault(require("./Button"));
var _Icon = require("./Icon");
var _Localization = require("./Localization");
var _NumberInput = _interopRequireDefault(require("./NumberInput"));
var _Widget = _interopRequireDefault(require("./Widget"));
var _WidgetPicker = _interopRequireDefault(require("./WidgetPicker"));
var CustomPropTypes = _interopRequireWildcard(require("./PropTypes"));
var _useFocusManager = _interopRequireDefault(require("./useFocusManager"));
var _WidgetHelpers = require("./WidgetHelpers");
var _useEventCallback = _interopRequireDefault(require("@restart/hooks/useEventCallback"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') | else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(precision != null ? parseFloat(round(nextValue, precision)) : nextValue, event);
return nextValue;
}
const clampedValue = clamp(value, min, max);
return /*#__PURE__*/_react.default.createElement(_Widget.default, Object.assign({}, elementProps, {
focused: focused,
disabled: disabled,
readOnly: readOnly,
onKeyDown: handleKeyDown
}, focusEvents, {
ref: ref,
className: (0, _classnames.default)(className, 'rw-number-picker')
}), /*#__PURE__*/_react.default.createElement(_WidgetPicker.default, {
className: containerClassName
}, /*#__PURE__*/_react.default.createElement(_NumberInput.default, Object.assign({}, inputProps, {
role: "spinbutton",
tabIndex: tabIndex,
value: clampedValue,
placeholder: placeholder,
autoFocus: autoFocus,
editing: focused,
localizer: localizer,
parse: parse,
name: name,
min: min,
max: max,
disabled: disabled,
readOnly: readOnly,
onChange: handleChange,
onKeyPress: onKeyPress,
onKeyUp: onKeyUp,
innerRef: inputRef
})), /*#__PURE__*/_react.default.createElement("span", {
className: "rw-input-addon rw-number-picker-spinners"
}, /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: incrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === max || disabled || readOnly,
label: localizer.messages.increment({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('UP', e),
onMouseLeave: () => handleMouseUp()
}), /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: decrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === min || disabled || readOnly,
label: localizer.messages.decrement({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('DOWN', e),
onMouseLeave: () => handleMouseUp()
}))));
}
;
NumberPicker.propTypes = propTypes;
NumberPicker.defaultProps = defaultProps;
var _default = NumberPicker; // thank you kendo ui core
// https://github.com/telerik/kendo-ui-core/blob/master/src/kendo.core.js#L1036
exports.default = _default;
function round(value, precision) {
precision = precision || 0;
let parts = ('' + value).split('e');
let valueInt = Math.round(+(parts[0] + 'e' + (parts[1] ? +parts[1] + precision : precision)));
parts = ('' + valueInt).split('e');
valueInt = +(parts[0] + 'e' + (parts[1] ? +parts[1] - precision : -precision));
return valueInt.toFixed(precision);
} | {
event.preventDefault();
decrement(event);
} | conditional_block |
NumberPicker.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true | });
exports.default = void 0;
var _classnames = _interopRequireDefault(require("classnames"));
var _propTypes = _interopRequireDefault(require("prop-types"));
var _react = _interopRequireWildcard(require("react"));
var _uncontrollable = require("uncontrollable");
var _Button = _interopRequireDefault(require("./Button"));
var _Icon = require("./Icon");
var _Localization = require("./Localization");
var _NumberInput = _interopRequireDefault(require("./NumberInput"));
var _Widget = _interopRequireDefault(require("./Widget"));
var _WidgetPicker = _interopRequireDefault(require("./WidgetPicker"));
var CustomPropTypes = _interopRequireWildcard(require("./PropTypes"));
var _useFocusManager = _interopRequireDefault(require("./useFocusManager"));
var _WidgetHelpers = require("./WidgetHelpers");
var _useEventCallback = _interopRequireDefault(require("@restart/hooks/useEventCallback"));
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }
function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _objectWithoutProperties(source, excluded) { if (source == null) return {}; var target = _objectWithoutPropertiesLoose(source, excluded); var key, i; if (Object.getOwnPropertySymbols) { var sourceSymbolKeys = Object.getOwnPropertySymbols(source); for (i = 0; i < sourceSymbolKeys.length; i++) { key = sourceSymbolKeys[i]; if (excluded.indexOf(key) >= 0) continue; if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue; target[key] = source[key]; } } return target; }
function _objectWithoutPropertiesLoose(source, excluded) { if (source == null) return {}; var target = {}; var sourceKeys = Object.keys(source); var key, i; for (i = 0; i < sourceKeys.length; i++) { key = sourceKeys[i]; if (excluded.indexOf(key) >= 0) continue; target[key] = source[key]; } return target; }
// my tests in ie11/chrome/FF indicate that keyDown repeats
// at about 35ms+/- 5ms after an initial 500ms delay. callback fires on the leading edge
function createInterval(callback) {
let fn;
let id;
const cancel = () => clearTimeout(id);
id = window.setTimeout(fn = () => {
id = window.setTimeout(fn, 35);
callback(); //fire after everything in case the user cancels on the first call
}, 500);
return cancel;
}
function clamp(value, min, max) {
max = max == null ? Infinity : max;
min = min == null ? -Infinity : min;
if (value == null || value === '') return null;
return Math.max(Math.min(typeof value == 'string' ? parseInt(value) : value, max), min);
}
const propTypes = {
value: _propTypes.default.number,
/**
* @example ['onChangePicker', [ [1, null] ]]
*/
onChange: _propTypes.default.func,
/**
* The minimum number that the NumberPicker value.
* @example ['prop', ['min', 0]]
*/
min: _propTypes.default.number,
/**
* The maximum number that the NumberPicker value.
*
* @example ['prop', ['max', 0]]
*/
max: _propTypes.default.number,
/**
* Amount to increase or decrease value when using the spinner buttons.
*
* @example ['prop', ['step', 5]]
*/
step: _propTypes.default.number,
/**
* Specify how precise the `value` should be when typing, incrementing, or decrementing the value.
* When empty, precision is parsed from the current `format` and culture.
*/
precision: _propTypes.default.number,
/**
* A format string used to display the number value. Localizer dependent, read [localization](../localization) for more info.
*
* @example ['prop', { max: 1, min: -1 , defaultValue: 0.2585, format: "{ style: 'percent' }" }]
*/
format: _propTypes.default.any,
/**
* Determines how the NumberPicker parses a number from the localized string representation.
* You can also provide a parser `function` to pair with a custom `format`.
*/
parse: _propTypes.default.func,
incrementIcon: _propTypes.default.node,
decrementIcon: _propTypes.default.node,
/** @ignore */
tabIndex: _propTypes.default.any,
name: _propTypes.default.string,
placeholder: _propTypes.default.string,
onKeyDown: _propTypes.default.func,
onKeyPress: _propTypes.default.func,
onKeyUp: _propTypes.default.func,
autoFocus: _propTypes.default.bool,
/**
* @example ['disabled', ['1']]
*/
disabled: CustomPropTypes.disabled,
/**
* @example ['readOnly', ['1.5']]
*/
readOnly: CustomPropTypes.disabled,
/** Adds a css class to the input container element. */
containerClassName: _propTypes.default.string,
inputProps: _propTypes.default.object,
messages: _propTypes.default.shape({
increment: _propTypes.default.string,
decrement: _propTypes.default.string
}),
/** @ignore */
localizer: _propTypes.default.object
};
const defaultProps = {
incrementIcon: _Icon.caretUp,
decrementIcon: _Icon.caretDown,
min: -Infinity,
max: Infinity,
step: 1
};
/**
* ---
* localized: true
* shortcuts:
* - { key: down arrow, label: decrement value }
* - { key: up arrow, label: increment value }
* - { key: home, label: set value to minimum value, if finite }
* - { key: end, label: set value to maximum value, if finite }
* ---
*
* @public
*/
function NumberPicker(uncontrolledProps) {
const _useUncontrolled = (0, _uncontrollable.useUncontrolled)(uncontrolledProps, {
value: 'onChange'
}),
{
className,
containerClassName,
disabled,
readOnly,
value,
min,
max,
incrementIcon,
decrementIcon,
placeholder,
autoFocus,
tabIndex,
parse,
name,
onChange,
messages,
format,
onKeyDown,
onKeyPress,
onKeyUp,
inputProps,
precision,
step: pStep
} = _useUncontrolled,
elementProps = _objectWithoutProperties(_useUncontrolled, ["className", "containerClassName", "disabled", "readOnly", "value", "min", "max", "incrementIcon", "decrementIcon", "placeholder", "autoFocus", "tabIndex", "parse", "name", "onChange", "messages", "format", "onKeyDown", "onKeyPress", "onKeyUp", "inputProps", "precision", "step"]);
const localizer = (0, _Localization.useLocalizer)(messages, {
number: format
});
const ref = (0, _react.useRef)(null);
const inputRef = (0, _react.useRef)(null);
const repeaterRef = (0, _react.useRef)(null);
const [focusEvents, focused] = (0, _useFocusManager.default)(ref, uncontrolledProps, {
willHandle(focused) {
if (focused) focus();
}
});
const handleMouseDown = (0, _useEventCallback.default)((direction, event) => {
if (event) event.persist();
let method = direction === 'UP' ? increment : decrement;
let value = method(event),
atTop = direction === 'UP' && value === max,
atBottom = direction === 'DOWN' && value === min;
if (atTop || atBottom) handleMouseUp();else if (!repeaterRef.current) {
repeaterRef.current = createInterval(() => {
handleMouseDown(direction, event);
});
}
});
const handleMouseUp = (0, _useEventCallback.default)(() => {
if (!repeaterRef.current) return;
repeaterRef.current();
repeaterRef.current = null;
});
const handleKeyDown = (0, _useEventCallback.default)(event => {
if (readOnly) return;
let key = event.key;
(0, _WidgetHelpers.notify)(onKeyDown, [event]);
if (event.defaultPrevented) return;
if (key === 'End' && isFinite(max)) handleChange(max, event);else if (key === 'Home' && isFinite(min)) handleChange(min, event);else if (key === 'ArrowDown') {
event.preventDefault();
decrement(event);
} else if (key === 'ArrowUp') {
event.preventDefault();
increment(event);
}
});
const handleChange = (rawValue, originalEvent = null) => {
let nextValue = clamp(rawValue, min, max);
if (value !== nextValue) (0, _WidgetHelpers.notify)(onChange, [nextValue, {
rawValue,
originalEvent,
lastValue: value
}]);
};
function focus() {
var _inputRef$current;
(_inputRef$current = inputRef.current) === null || _inputRef$current === void 0 ? void 0 : _inputRef$current.focus();
}
function increment(event) {
return step(pStep, event);
}
function decrement(event) {
return step(-pStep, event);
}
function step(amount, event) {
const nextValue = (value || 0) + amount;
handleChange(precision != null ? parseFloat(round(nextValue, precision)) : nextValue, event);
return nextValue;
}
const clampedValue = clamp(value, min, max);
return /*#__PURE__*/_react.default.createElement(_Widget.default, Object.assign({}, elementProps, {
focused: focused,
disabled: disabled,
readOnly: readOnly,
onKeyDown: handleKeyDown
}, focusEvents, {
ref: ref,
className: (0, _classnames.default)(className, 'rw-number-picker')
}), /*#__PURE__*/_react.default.createElement(_WidgetPicker.default, {
className: containerClassName
}, /*#__PURE__*/_react.default.createElement(_NumberInput.default, Object.assign({}, inputProps, {
role: "spinbutton",
tabIndex: tabIndex,
value: clampedValue,
placeholder: placeholder,
autoFocus: autoFocus,
editing: focused,
localizer: localizer,
parse: parse,
name: name,
min: min,
max: max,
disabled: disabled,
readOnly: readOnly,
onChange: handleChange,
onKeyPress: onKeyPress,
onKeyUp: onKeyUp,
innerRef: inputRef
})), /*#__PURE__*/_react.default.createElement("span", {
className: "rw-input-addon rw-number-picker-spinners"
}, /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: incrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === max || disabled || readOnly,
label: localizer.messages.increment({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('UP', e),
onMouseLeave: () => handleMouseUp()
}), /*#__PURE__*/_react.default.createElement(_Button.default, {
icon: decrementIcon,
className: "rw-picker-btn",
disabled: clampedValue === min || disabled || readOnly,
label: localizer.messages.decrement({
value: clampedValue,
min,
max
}),
onMouseUp: () => handleMouseUp(),
onMouseDown: e => handleMouseDown('DOWN', e),
onMouseLeave: () => handleMouseUp()
}))));
}
;
NumberPicker.propTypes = propTypes;
NumberPicker.defaultProps = defaultProps;
var _default = NumberPicker; // thank you kendo ui core
// https://github.com/telerik/kendo-ui-core/blob/master/src/kendo.core.js#L1036
exports.default = _default;
function round(value, precision) {
precision = precision || 0;
let parts = ('' + value).split('e');
let valueInt = Math.round(+(parts[0] + 'e' + (parts[1] ? +parts[1] + precision : precision)));
parts = ('' + valueInt).split('e');
valueInt = +(parts[0] + 'e' + (parts[1] ? +parts[1] - precision : -precision));
return valueInt.toFixed(precision);
} | random_line_split | |
imgur.js | // Sends image directly to vision api
// Don't use if image is over 4MB
// Also rotates the image if necessary for display
// image is a base64 image URL string
// orientation is a number between 1 and 8 that should be obtained
// with the getOrientation method.
function sendImageDirect(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is less than 4MB")
queryVisionAPI(base64result);
}
// Sends the image to Imgur to be stored and then Vision.
// Use if image is greater than 4MB.
// Requires a base64Image string with the prepending tags striped out.
function sendImageImgur(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is greater than 4MB, wait for Imgur");
var form = new FormData();
form.append("image", base64result);
var settings = {
"async": true,
"crossDomain": true,
"url": "https://api.imgur.com/3/image",
"method": "POST",
"headers": {
"authorization": `Client-ID ${apiKey.imgur_client_id}`
},
"processData": false,
"contentType": false,
"mimeType": "multipart/form-data",
"data": form
}
$.ajax(settings).done(function(response) {
var res = JSON.parse(response);
console.log(res);
queryVisionAPI(res.data.link);
// displayPic(res.data.link);
});
}
//from http://stackoverflow.com/a/32490603
//Gets the orientation alteration of an image
// based on this picture https://i.stack.imgur.com/VGsAj.gif
//file is a file blob
//call back is the function to perform after completion
// with the paramater of the orientation variable
function getOrientation(file, callback) {
var reader = new FileReader();
reader.onload = function(event) {
var view = new DataView(event.target.result);
if (view.getUint16(0, false) != 0xFFD8) return callback(-2);
var length = view.byteLength,
offset = 2;
while (offset < length) {
var marker = view.getUint16(offset, false);
offset += 2;
if (marker == 0xFFE1) {
if (view.getUint32(offset += 2, false) != 0x45786966) {
return callback(-1);
}
var little = view.getUint16(offset += 6, false) == 0x4949;
offset += view.getUint32(offset + 4, little);
var tags = view.getUint16(offset, little);
offset += 2;
for (var i = 0; i < tags; i++)
if (view.getUint16(offset + (i * 12), little) == 0x0112)
return callback(view.getUint16(offset + (i * 12) + 8, little));
} else if ((marker & 0xFF00) != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id + "&response_type=token"
var auth_token = ""
var refreshForm = new FormData();
refreshForm.append("refresh_token", "4c771b1e1055e3eb3adee452a2155ebd258392d1");
refreshForm.append("client_id", "cffdbdcf9cb88c7");
refreshForm.append("client_secret", "4e806c50fb260cc521bfe11d4e7edfa22cfbf684");
refreshForm.append("grant_type", "refresh_token");
//adds drag and drop listeners to the drag element
$("#drag").on('dragover', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragover');
$(this).css('border', '2px solid #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.6)');
$(this).css('color', '#173848');
});
$("#drag").on('dragleave', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragleave');
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
});
$("#drag").on('drop', function(event) {
event.stopPropagation();
event.preventDefault();
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
var files = event.originalEvent.dataTransfer.files;
console.log(files.length === 0);
if (files.length !== 0) {
//We need to send dropped files to Server
console.log(files);
for (var i = 0; i < files.length; i++) {
readPic(files[i]);
}
} else {
var url = $(event.originalEvent.dataTransfer.getData('text/html')).filter('img').attr('src');
console.log(url);
if (isURL(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else
if (isBase64image(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else {
displayError("Not a valid URL");
}
}
});
$("#uploadBtn").on("change", function readFile(event) {
console.log(event);
console.log($(this));
for (var i = 0; i < this.files.length; i++) { |
});
}); |
readPic(this.files[i]);
}
| conditional_block |
imgur.js | // Sends image directly to vision api
// Don't use if image is over 4MB
// Also rotates the image if necessary for display
// image is a base64 image URL string
// orientation is a number between 1 and 8 that should be obtained
// with the getOrientation method.
function | (image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is less than 4MB")
queryVisionAPI(base64result);
}
// Sends the image to Imgur to be stored and then Vision.
// Use if image is greater than 4MB.
// Requires a base64Image string with the prepending tags striped out.
function sendImageImgur(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is greater than 4MB, wait for Imgur");
var form = new FormData();
form.append("image", base64result);
var settings = {
"async": true,
"crossDomain": true,
"url": "https://api.imgur.com/3/image",
"method": "POST",
"headers": {
"authorization": `Client-ID ${apiKey.imgur_client_id}`
},
"processData": false,
"contentType": false,
"mimeType": "multipart/form-data",
"data": form
}
$.ajax(settings).done(function(response) {
var res = JSON.parse(response);
console.log(res);
queryVisionAPI(res.data.link);
// displayPic(res.data.link);
});
}
//from http://stackoverflow.com/a/32490603
//Gets the orientation alteration of an image
// based on this picture https://i.stack.imgur.com/VGsAj.gif
//file is a file blob
//call back is the function to perform after completion
// with the paramater of the orientation variable
function getOrientation(file, callback) {
var reader = new FileReader();
reader.onload = function(event) {
var view = new DataView(event.target.result);
if (view.getUint16(0, false) != 0xFFD8) return callback(-2);
var length = view.byteLength,
offset = 2;
while (offset < length) {
var marker = view.getUint16(offset, false);
offset += 2;
if (marker == 0xFFE1) {
if (view.getUint32(offset += 2, false) != 0x45786966) {
return callback(-1);
}
var little = view.getUint16(offset += 6, false) == 0x4949;
offset += view.getUint32(offset + 4, little);
var tags = view.getUint16(offset, little);
offset += 2;
for (var i = 0; i < tags; i++)
if (view.getUint16(offset + (i * 12), little) == 0x0112)
return callback(view.getUint16(offset + (i * 12) + 8, little));
} else if ((marker & 0xFF00) != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id + "&response_type=token"
var auth_token = ""
var refreshForm = new FormData();
refreshForm.append("refresh_token", "4c771b1e1055e3eb3adee452a2155ebd258392d1");
refreshForm.append("client_id", "cffdbdcf9cb88c7");
refreshForm.append("client_secret", "4e806c50fb260cc521bfe11d4e7edfa22cfbf684");
refreshForm.append("grant_type", "refresh_token");
//adds drag and drop listeners to the drag element
$("#drag").on('dragover', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragover');
$(this).css('border', '2px solid #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.6)');
$(this).css('color', '#173848');
});
$("#drag").on('dragleave', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragleave');
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
});
$("#drag").on('drop', function(event) {
event.stopPropagation();
event.preventDefault();
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
var files = event.originalEvent.dataTransfer.files;
console.log(files.length === 0);
if (files.length !== 0) {
//We need to send dropped files to Server
console.log(files);
for (var i = 0; i < files.length; i++) {
readPic(files[i]);
}
} else {
var url = $(event.originalEvent.dataTransfer.getData('text/html')).filter('img').attr('src');
console.log(url);
if (isURL(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else
if (isBase64image(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else {
displayError("Not a valid URL");
}
}
});
$("#uploadBtn").on("change", function readFile(event) {
console.log(event);
console.log($(this));
for (var i = 0; i < this.files.length; i++) {
readPic(this.files[i]);
}
});
}); | sendImageDirect | identifier_name |
imgur.js | // Sends image directly to vision api
// Don't use if image is over 4MB
// Also rotates the image if necessary for display
// image is a base64 image URL string
// orientation is a number between 1 and 8 that should be obtained
// with the getOrientation method.
function sendImageDirect(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is less than 4MB")
queryVisionAPI(base64result);
}
// Sends the image to Imgur to be stored and then Vision.
// Use if image is greater than 4MB.
// Requires a base64Image string with the prepending tags striped out.
function sendImageImgur(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is greater than 4MB, wait for Imgur");
var form = new FormData();
form.append("image", base64result);
var settings = {
"async": true,
"crossDomain": true,
"url": "https://api.imgur.com/3/image",
"method": "POST",
"headers": {
"authorization": `Client-ID ${apiKey.imgur_client_id}`
},
"processData": false,
"contentType": false,
"mimeType": "multipart/form-data",
"data": form
}
$.ajax(settings).done(function(response) {
var res = JSON.parse(response);
console.log(res);
queryVisionAPI(res.data.link);
// displayPic(res.data.link);
});
}
//from http://stackoverflow.com/a/32490603
//Gets the orientation alteration of an image
// based on this picture https://i.stack.imgur.com/VGsAj.gif
//file is a file blob
//call back is the function to perform after completion
// with the paramater of the orientation variable
function getOrientation(file, callback) {
var reader = new FileReader();
reader.onload = function(event) {
var view = new DataView(event.target.result);
if (view.getUint16(0, false) != 0xFFD8) return callback(-2);
var length = view.byteLength,
offset = 2;
while (offset < length) {
var marker = view.getUint16(offset, false);
offset += 2;
if (marker == 0xFFE1) {
if (view.getUint32(offset += 2, false) != 0x45786966) {
return callback(-1);
}
var little = view.getUint16(offset += 6, false) == 0x4949;
offset += view.getUint32(offset + 4, little);
var tags = view.getUint16(offset, little);
offset += 2;
for (var i = 0; i < tags; i++)
if (view.getUint16(offset + (i * 12), little) == 0x0112)
return callback(view.getUint16(offset + (i * 12) + 8, little));
} else if ((marker & 0xFF00) != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) |
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id + "&response_type=token"
var auth_token = ""
var refreshForm = new FormData();
refreshForm.append("refresh_token", "4c771b1e1055e3eb3adee452a2155ebd258392d1");
refreshForm.append("client_id", "cffdbdcf9cb88c7");
refreshForm.append("client_secret", "4e806c50fb260cc521bfe11d4e7edfa22cfbf684");
refreshForm.append("grant_type", "refresh_token");
//adds drag and drop listeners to the drag element
$("#drag").on('dragover', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragover');
$(this).css('border', '2px solid #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.6)');
$(this).css('color', '#173848');
});
$("#drag").on('dragleave', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragleave');
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
});
$("#drag").on('drop', function(event) {
event.stopPropagation();
event.preventDefault();
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
var files = event.originalEvent.dataTransfer.files;
console.log(files.length === 0);
if (files.length !== 0) {
//We need to send dropped files to Server
console.log(files);
for (var i = 0; i < files.length; i++) {
readPic(files[i]);
}
} else {
var url = $(event.originalEvent.dataTransfer.getData('text/html')).filter('img').attr('src');
console.log(url);
if (isURL(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else
if (isBase64image(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else {
displayError("Not a valid URL");
}
}
});
$("#uploadBtn").on("change", function readFile(event) {
console.log(event);
console.log($(this));
for (var i = 0; i < this.files.length; i++) {
readPic(this.files[i]);
}
});
}); | {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
} | identifier_body |
imgur.js | // Sends image directly to vision api
// Don't use if image is over 4MB
// Also rotates the image if necessary for display
// image is a base64 image URL string
// orientation is a number between 1 and 8 that should be obtained
// with the getOrientation method.
function sendImageDirect(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is less than 4MB")
queryVisionAPI(base64result);
}
// Sends the image to Imgur to be stored and then Vision.
// Use if image is greater than 4MB.
// Requires a base64Image string with the prepending tags striped out.
function sendImageImgur(image, orientation = 1) {
var base64result = image.replace(/^data:image\/[a-z]+;base64,/, "");
console.log("Image is greater than 4MB, wait for Imgur");
var form = new FormData();
form.append("image", base64result);
var settings = {
"async": true,
"crossDomain": true,
"url": "https://api.imgur.com/3/image",
"method": "POST",
"headers": {
"authorization": `Client-ID ${apiKey.imgur_client_id}`
},
"processData": false,
"contentType": false,
"mimeType": "multipart/form-data",
"data": form
}
$.ajax(settings).done(function(response) {
var res = JSON.parse(response);
console.log(res);
queryVisionAPI(res.data.link);
// displayPic(res.data.link);
});
}
//from http://stackoverflow.com/a/32490603
//Gets the orientation alteration of an image
// based on this picture https://i.stack.imgur.com/VGsAj.gif
//file is a file blob
//call back is the function to perform after completion
// with the paramater of the orientation variable
function getOrientation(file, callback) {
var reader = new FileReader();
reader.onload = function(event) {
var view = new DataView(event.target.result);
if (view.getUint16(0, false) != 0xFFD8) return callback(-2);
var length = view.byteLength,
offset = 2;
while (offset < length) {
var marker = view.getUint16(offset, false);
offset += 2;
if (marker == 0xFFE1) {
if (view.getUint32(offset += 2, false) != 0x45786966) {
return callback(-1);
}
var little = view.getUint16(offset += 6, false) == 0x4949;
offset += view.getUint32(offset + 4, little);
var tags = view.getUint16(offset, little);
offset += 2;
for (var i = 0; i < tags; i++)
if (view.getUint16(offset + (i * 12), little) == 0x0112)
return callback(view.getUint16(offset + (i * 12) + 8, little));
} else if ((marker & 0xFF00) != 0xFF00) break;
else offset += view.getUint16(offset, false);
}
return callback(-1);
};
reader.readAsArrayBuffer(file.slice(0, 64 * 1024));
};
//from https://stackoverflow.com/a/40867559/8630411
//Resets the orientation of an base64 image url string
// based on the orientation from getOrientaion
// or this image https://i.stack.imgur.com/VGsAj.gif
//srcBase64 is a base64 url string
//srcOrientation is a number between 2 and 8
//callback is the function to perform after the orientaion has finished
function resetOrientation(srcBase64, srcOrientation, callback) {
console.log("Transforming image, please wait");
var img = new Image();
img.onload = function() {
var width = img.width,
height = img.height,
canvas = document.createElement('canvas'),
ctx = canvas.getContext("2d");
// set proper canvas dimensions before transform & export
if (4 < srcOrientation && srcOrientation < 9) {
canvas.width = height;
canvas.height = width;
} else {
canvas.width = width;
canvas.height = height;
}
// transform context before drawing image
switch (srcOrientation) {
case 2:
ctx.transform(-1, 0, 0, 1, width, 0);
break;
case 3:
ctx.transform(-1, 0, 0, -1, width, height);
break;
case 4:
ctx.transform(1, 0, 0, -1, 0, height);
break;
case 5:
ctx.transform(0, 1, 1, 0, 0, 0);
break;
case 6:
ctx.transform(0, 1, -1, 0, height, 0);
break;
case 7:
ctx.transform(0, -1, -1, 0, height, width);
break;
case 8:
ctx.transform(0, -1, 1, 0, 0, width);
break;
default:
break;
}
// draw image
ctx.drawImage(img, 0, 0);
// export base64
callback(canvas.toDataURL());
};
img.src = srcBase64;
}
//If the image is not oriented properly uses the resetOrientation function to
// reorient the image
function orientDisplayPic(picture, orientation) {
if (orientation > 1) {
resetOrientation(picture, orientation, function rotate(rotated) {
displayPic(rotated);
});
} else {
displayPic(picture);
}
}
//Displays picture on the page
//picture is a valid string for that an image src tag can use
function displayPic(picture) {
$("#displayImage").attr("src", picture);
$("#displayImage").removeClass("hidden");
$("#drag").removeClass("uploadWanted");
displayLoad();
}
//Display a message and gif to give the user feed back that the program is doing something
//Empties the cloudDesc element, puts it there and uses clouds.gif
function displayLoad() {
var load = $("<img>");
load.addClass("img-center img-responsive");
load.attr("src", "assets/images/clouds.gif");
load.attr("id", "cloud-gif");
$("#cloudDesc").empty();
$("#cloudDesc").attr("hidden", false);
$("#cloudDesc").append("<h3 style='text-align: center;'>Searching for cloud types</h3>");
$("#cloudDesc").append(load);
}
//Display a message to the user, meant to be for errors
//Empties the cloudDesc element and puts the message there
function displayError(message, id = "cloudDesc") {
$("#" + id).empty();
setTimeout(function() {
$("#" + id).attr("hidden", false);
$("#" + id).append("<h3 style='text-align:center;'>" + message + "</h3>");
}, 48);
}
//Make sure that a file is an image and if so sends to the FileReader
//file is a valid file blob
function readPic(file) {
console.log(file);
var fileType = file["type"].split("/")[0];
if (fileType !== "image") {
displayError("Not an image, please use a valid image format: \
JPEG, PNG8, PNG24, GIF, Animated GIF (first frame only), BMP, WEBP, RAW, or ICO");
} else {
loadImage.parseMetaData(file, function(data) {
//default image orientation
var orientation = 0;
//if exif data available, update orientation
if (data.exif) {
orientation = data.exif.get('Orientation');
}
var loadingImage = loadImage(
file,
function(canvas) {
//here's the base64 data result
var base64data = canvas.toDataURL('image/jpeg');
displayPic(base64data);
if (file.size < 4000000) {
sendImageDirect(base64data);
} else {
sendImageImgur(base64data);
}
}, {
//should be set to canvas : true to activate auto fix orientation
canvas: true,
orientation: orientation
}
);
});
// getOrientation(file, function checkRotate(orientation) {
// console.log(orientation);
// var reader = new FileReader();
// getImageBase64(file, function(image) {
// if (file.size < 4000000) {
// orientDisplayPic(image, orientation);
// sendImageDirect(image, orientation);
// } else {
// orientDisplayPic(image, orientation);
// sendImageImgur(image, orientation);
// }
// });
// });
}
}
//Takes a file blob and returns a base64dataURL
function getImageBase64(file, callback) {
var reader = new FileReader();
reader.onload = function gotImage(event) {
var image = event.target.result;
callback(image)
};
reader.readAsDataURL(file);
}
//Checks if a string is a base65URL
//returns boolean
function isBase64image(base64string) {
var legal = base64string[base64string.length - 1] === "=";
console.log(legal);
var base64reg = new RegExp(/data:image\/([a-zA-Z]*);base64,([^\"]*)/);
var valid = base64reg.test(base64string);
console.log(valid);
var image = base64string.search(/data:image/) !== -1;
console.log(image);
return (legal && valid && image);
}
$(document).ready(function addUpload() {
var imgur_client_id = "cffdbdcf9cb88c7";
var imgur_sec = "4e806c50fb260cc521bfe11d4e7edfa22cfbf684";
var authURL = "https://api.imgur.com/oauth2/authorize?client_id=" + imgur_client_id + "&response_type=token"
var auth_token = ""
var refreshForm = new FormData();
refreshForm.append("refresh_token", "4c771b1e1055e3eb3adee452a2155ebd258392d1");
refreshForm.append("client_id", "cffdbdcf9cb88c7");
refreshForm.append("client_secret", "4e806c50fb260cc521bfe11d4e7edfa22cfbf684");
refreshForm.append("grant_type", "refresh_token");
//adds drag and drop listeners to the drag element
$("#drag").on('dragover', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragover');
$(this).css('border', '2px solid #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.6)');
$(this).css('color', '#173848');
});
$("#drag").on('dragleave', function(event) {
event.stopPropagation();
event.preventDefault();
console.log('dragleave');
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)');
$(this).css('color', '#31708f');
});
$("#drag").on('drop', function(event) {
event.stopPropagation();
event.preventDefault();
$(this).css('border', '2px dotted #66aede');
$(this).css('background-color', 'rgba(255,255,255,0.4)'); | if (files.length !== 0) {
//We need to send dropped files to Server
console.log(files);
for (var i = 0; i < files.length; i++) {
readPic(files[i]);
}
} else {
var url = $(event.originalEvent.dataTransfer.getData('text/html')).filter('img').attr('src');
console.log(url);
if (isURL(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else
if (isBase64image(url)) {
console.log("Upload from another site");
displayPic(url);
queryVisionAPI(url);
} else {
displayError("Not a valid URL");
}
}
});
$("#uploadBtn").on("change", function readFile(event) {
console.log(event);
console.log($(this));
for (var i = 0; i < this.files.length; i++) {
readPic(this.files[i]);
}
});
}); | $(this).css('color', '#31708f');
var files = event.originalEvent.dataTransfer.files;
console.log(files.length === 0); | random_line_split |
Model.py | import numpy as np
import pandas as pd
import warnings
from sklearn.ensemble import RandomForestRegressor # 随机森林算法将缺失值补充
from sklearn.model_selection import train_test_split # 数据集划分模块
from scipy.stats import stats # scipy.stats是一个很好的统计推断包
from sklearn.linear_model import LogisticRegression
import math
def optimal_bins(Y, X, n):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param n: 分箱数初始值
:return: 统计值、分箱边界值列表、woe值、iv值
"""
r = 0 # 初始值
total_bad = Y.sum() # 总的坏样本数
total_good = Y.count() - total_bad # 总的好样本数
# 分箱过程
while np.abs(r) < 1:
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.qcut(X, n, duplicates='drop')}) # qcut():基于量化的离散化函数
df2 = df1.groupby('bin')
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
n = n - 1
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
# 分箱边界值列表
cut = []
cut.append(float('-inf'))
for i in range(1, n + 1):
qua = X.quantile(i / (n + 1))
cut.append(round(qua, 6))
cut.append(float('inf'))
# woe值列表
woe = list(df3['woe'])
return df3, cut, woe, iv
def custom_bins(Y, X, binList):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param binList: 分箱边界值列表
:return: 统计值、woe值、iv值
"""
r = 0
total_bad = Y.sum()
total_good = Y.count() - total_bad
# 等距分箱
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.cut(X, binList)})
df2 = df1.groupby('bin', as_index=True)
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
woe = list(df3['woe'])
return df3, woe, iv
# 90D、RevolvingRatio、30-59D、60-89D、Age
class Model:
data = None
clf1 = None # 保存训练模型的情况
def __init__(self): # 初始化时候要导入数据进行训练,保存导入的数据以及模型参数以供使用
self.get_data()
self.train()
def predict(self, info_list: list) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
self.get_data()
# 根据传入的数据信息构造一个字典
dic = {"Label": 1, "90D": info_list[0], "RevolvingRatio": info_list[1], '30-59D': info_list[2],
'60-89D': info_list[3], 'Age': info_list[4]}
# print(self.data)
# 插入要预测的信息
self.data = self.data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
self.data = self.data.append(dic, ignore_index=True)
# print(self.data)
return self.get_score()
def get_score(self) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
return 0 # 表示错误
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
cut_new2 = [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
self.data.loc[(self.data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# Dependents特征处理
self.data['Dependents'].fillna(self.data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = self.data[self.data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
self.data.loc[self.data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = self.data[self.data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
self.data.loc[self.data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
self.data.drop(self.data[self.data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# 处理年龄特征异常值
self.data.drop(self.data[self.data['Age'] == 0].index, inplace=True) # 根据索引删除样本
self.data.drop(self.data[self.data['Age'] > 96].index, inplace=True)
| data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# print(data.head(10)) # 观察整理后数据集
# MonthlyIncome特征处理
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
data.loc[(data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# print("此时的MonthlyIncome特征统计指标:\n")
# print(rfDf['MonthlyIncome'].describe())
# Dependents特征处理
data['Dependents'].fillna(data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# print("此时Dependents特征统计指标:\n")
# print(data['Dependents'].describe())
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = data[data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
data.loc[data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = data[data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
data.loc[data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
data.drop(data[data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# print("剩下的样本数为:", data.shape[0])
# 处理年龄特征异常值
data.drop(data[data['Age'] == 0].index, inplace=True) # 根据索引删除样本
data.drop(data[data['Age'] > 96].index, inplace=True)
# print("剩下的样本数为:", data.shape[0])
# 构建新特征
# IncAvg:家庭中每个人分摊的平均月收入
data['IncAvg'] = data['MonthlyIncome'] / (data['Dependents'] + 1)
# MonthlyDept:每月的债务
data['MonthlyDept'] = data['MonthlyIncome'] * data['DebtRatio']
# DeptAvg:家庭中平均每个人分摊每月应还债务
data['DeptAvg'] = data['MonthlyDept'] / (data['Dependents'] + 1)
data[['IncAvg', 'MonthlyDept', 'DeptAvg']].head(10) # 查看新特征
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(data.Label, data.RevolvingRatio, n=10)
# print(rrDf)
# print(cut_rr)
# MonthlyIncome特征
miDf, cut_mi, woe_mi, iv_mi = optimal_bins(data.Label, data.MonthlyIncome, n=10)
# print("MonthlyIncome特征分箱情况:", cut_mi)
# Age特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(data.Label, data.Age, n=10)
# print("Age特征分箱情况:", cut_age)
# DebtRatio特征
drDf, cut_dr, woe_dr, iv_dr = optimal_bins(data.Label, data.DebtRatio, 10)
# print("DebtRatio特征分箱情况:", cut_dr)
# 自定义分箱区间如下
# 原始特征
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
# 新特征
cut_new2 = [ninf, 414, 1209, 2518, pinf] # 新特征MonthlyDept自定义分箱
# 计算统计值、woe和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(data.Label, data['30-59D'], cut_thirty) # 30-59D特征
openDf, woe_open, iv_open = custom_bins(data.Label, data.OpenL, cut_open) # OpenL特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(data.Label, data['90D'], cut_ninety) # 90D特征
reDf, woe_re, iv_re = custom_bins(data.Label, data.RealEstate, cut_re) # RealEstate特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(data.Label, data['60-89D'], cut_sixty) # 60-89D特征
dptDf, woe_dpt, iv_dpt = custom_bins(data.Label, data.Dependents, cut_dpt) # Dependents特征
newDf2, woe_new2, iv_new2 = custom_bins(data.Label, data.MonthlyDept, cut_new2) # 新特征MonthlyDept
# WOE编码
data['90D'] = pd.cut(data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
data['RevolvingRatio'] = pd.cut(data['RevolvingRatio'], bins=cut_rr, labels=woe_rr) # RevolvingRatio特征
data['30-59D'] = pd.cut(data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
data['60-89D'] = pd.cut(data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
data['Age'] = pd.cut(data['Age'], bins=cut_age, labels=woe_age) # Age特征
# 特征选择
data = data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
# print(data.head(10)) # 此时的数据集
X = data.iloc[:, 1:] # 特征
y = data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
'''
LogisticRegression一些重要参数的默认值:
penalty:正则化类型,默认值'l2',当solver='liblinear'时还可以选择'l1'
solver:最优化方法,默认值'liblinear',还可以选择'newton-cg', 'lbfgs', 'sag', 'saga'
tol:迭代终止的阈值,默认值为1e-4
max_iter:最大迭代次数,默认值100
(...等其他参数)
'''
model1 = LogisticRegression() # 首先全部采用默认值进行训练
clf1 = model1.fit(X_train, y_train) # 模型训练
# 记录训练后的模型
self.clf1 = clf1 | def train(self):
warnings.filterwarnings('ignore') # 忽略弹出的warnings
data = pd.read_csv('datasets/cs-training.csv')
data = data.iloc[:, 1:] # 舍弃Unnamed: 0列 | random_line_split |
Model.py | import numpy as np
import pandas as pd
import warnings
from sklearn.ensemble import RandomForestRegressor # 随机森林算法将缺失值补充
from sklearn.model_selection import train_test_split # 数据集划分模块
from scipy.stats import stats # scipy.stats是一个很好的统计推断包
from sklearn.linear_model import LogisticRegression
import math
def optimal_bins(Y, X, n):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param n: 分箱数初始值
:return: 统计值、分箱边界值列表、woe值、iv值
"""
r = 0 # 初始值
total_bad = Y.sum() # 总的坏样本数
total_good = Y.count() - total_bad # 总的好样本数
# 分箱过程
while np.abs(r) < 1:
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.qcut(X, n, duplicates='drop')}) # qcut():基于量化的离散化函数
df2 = df1.groupby('bin')
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
n = n - 1
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
# 分箱边界值列表
cut = []
cut.append(float('-inf'))
for i in range(1, n + 1):
qua = X.quantile(i / (n + 1))
cut.append(round(qua, 6))
cut.append(float('inf'))
# woe值列表
woe = list(df3['woe'])
return df3, cut, woe, iv
def custom_bins(Y, X, binList):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param binList: 分箱边界值列表
:return: 统计值、woe值、iv值
"""
r = 0
total_bad = Y.sum()
total_good = Y.count() - total_bad
# 等距分箱
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.cut(X, binList)})
df2 = df1.groupby('bin', as_index=True)
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
woe = list(df3['woe'])
return df3, woe, iv
# 90D、RevolvingRatio、30-59D、60-89D、Age
class Model:
data = None
clf1 = None # 保存训练模型的情况
def __init__(self): # 初始化时候要导入数据进行训练,保存导入的数据以及模型参数以供使用
self.get_data()
self.train()
def predict(self, info_list: list) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
self.get_data( | # 根据传入的数据信息构造一个字典
dic = {"Label": 1, "90D": info_list[0], "RevolvingRatio": info_list[1], '30-59D': info_list[2],
'60-89D': info_list[3], 'Age': info_list[4]}
# print(self.data)
# 插入要预测的信息
self.data = self.data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
self.data = self.data.append(dic, ignore_index=True)
# print(self.data)
return self.get_score()
def get_score(self) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
return 0 # 表示错误
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
cut_new2 = [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
self.data.loc[(self.data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# Dependents特征处理
self.data['Dependents'].fillna(self.data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = self.data[self.data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
self.data.loc[self.data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = self.data[self.data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
self.data.loc[self.data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
self.data.drop(self.data[self.data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# 处理年龄特征异常值
self.data.drop(self.data[self.data['Age'] == 0].index, inplace=True) # 根据索引删除样本
self.data.drop(self.data[self.data['Age'] > 96].index, inplace=True)
def train(self):
warnings.filterwarnings('ignore') # 忽略弹出的warnings
data = pd.read_csv('datasets/cs-training.csv')
data = data.iloc[:, 1:] # 舍弃Unnamed: 0列
data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# print(data.head(10)) # 观察整理后数据集
# MonthlyIncome特征处理
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
data.loc[(data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# print("此时的MonthlyIncome特征统计指标:\n")
# print(rfDf['MonthlyIncome'].describe())
# Dependents特征处理
data['Dependents'].fillna(data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# print("此时Dependents特征统计指标:\n")
# print(data['Dependents'].describe())
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = data[data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
data.loc[data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = data[data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
data.loc[data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
data.drop(data[data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# print("剩下的样本数为:", data.shape[0])
# 处理年龄特征异常值
data.drop(data[data['Age'] == 0].index, inplace=True) # 根据索引删除样本
data.drop(data[data['Age'] > 96].index, inplace=True)
# print("剩下的样本数为:", data.shape[0])
# 构建新特征
# IncAvg:家庭中每个人分摊的平均月收入
data['IncAvg'] = data['MonthlyIncome'] / (data['Dependents'] + 1)
# MonthlyDept:每月的债务
data['MonthlyDept'] = data['MonthlyIncome'] * data['DebtRatio']
# DeptAvg:家庭中平均每个人分摊每月应还债务
data['DeptAvg'] = data['MonthlyDept'] / (data['Dependents'] + 1)
data[['IncAvg', 'MonthlyDept', 'DeptAvg']].head(10) # 查看新特征
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(data.Label, data.RevolvingRatio, n=10)
# print(rrDf)
# print(cut_rr)
# MonthlyIncome特征
miDf, cut_mi, woe_mi, iv_mi = optimal_bins(data.Label, data.MonthlyIncome, n=10)
# print("MonthlyIncome特征分箱情况:", cut_mi)
# Age特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(data.Label, data.Age, n=10)
# print("Age特征分箱情况:", cut_age)
# DebtRatio特征
drDf, cut_dr, woe_dr, iv_dr = optimal_bins(data.Label, data.DebtRatio, 10)
# print("DebtRatio特征分箱情况:", cut_dr)
# 自定义分箱区间如下
# 原始特征
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
# 新特征
cut_new2 = [ninf, 414, 1209, 2518, pinf] # 新特征MonthlyDept自定义分箱
# 计算统计值、woe和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(data.Label, data['30-59D'], cut_thirty) # 30-59D特征
openDf, woe_open, iv_open = custom_bins(data.Label, data.OpenL, cut_open) # OpenL特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(data.Label, data['90D'], cut_ninety) # 90D特征
reDf, woe_re, iv_re = custom_bins(data.Label, data.RealEstate, cut_re) # RealEstate特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(data.Label, data['60-89D'], cut_sixty) # 60-89D特征
dptDf, woe_dpt, iv_dpt = custom_bins(data.Label, data.Dependents, cut_dpt) # Dependents特征
newDf2, woe_new2, iv_new2 = custom_bins(data.Label, data.MonthlyDept, cut_new2) # 新特征MonthlyDept
# WOE编码
data['90D'] = pd.cut(data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
data['RevolvingRatio'] = pd.cut(data['RevolvingRatio'], bins=cut_rr, labels=woe_rr) # RevolvingRatio特征
data['30-59D'] = pd.cut(data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
data['60-89D'] = pd.cut(data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
data['Age'] = pd.cut(data['Age'], bins=cut_age, labels=woe_age) # Age特征
# 特征选择
data = data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
# print(data.head(10)) # 此时的数据集
X = data.iloc[:, 1:] # 特征
y = data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
'''
LogisticRegression一些重要参数的默认值:
penalty:正则化类型,默认值'l2',当solver='liblinear'时还可以选择'l1'
solver:最优化方法,默认值'liblinear',还可以选择'newton-cg', 'lbfgs', 'sag', 'saga'
tol:迭代终止的阈值,默认值为1e-4
max_iter:最大迭代次数,默认值100
(...等其他参数)
'''
model1 = LogisticRegression() # 首先全部采用默认值进行训练
clf1 = model1.fit(X_train, y_train) # 模型训练
# 记录训练后的模型
self.clf1 = clf1
| )
| identifier_name |
Model.py | import numpy as np
import pandas as pd
import warnings
from sklearn.ensemble import RandomForestRegressor # 随机森林算法将缺失值补充
from sklearn.model_selection import train_test_split # 数据集划分模块
from scipy.stats import stats # scipy.stats是一个很好的统计推断包
from sklearn.linear_model import LogisticRegression
import math
def optimal_bins(Y, X, n):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param n: 分箱数初始值
:return: 统计值、分箱边界值列表、woe值、iv值
"""
r = 0 # 初始值
total_bad = Y.sum() # 总的坏样本数
total_good = Y.count() - total_bad # 总的好样本数
# 分箱过程
while np.abs(r) < 1:
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.qcut(X, n, duplicates='drop')}) # qcut():基于量化的离散化函数
df2 = df1.groupby('bin')
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
n = n - 1
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
# 分箱边界值列表
cut = []
cut.append(float('-inf'))
for i in range(1, n + 1):
qua = X.quantile(i / (n + 1))
cut.append(round(qua, 6))
cut.append(float('inf'))
# woe值列表
woe = list(df3['woe'])
return df3, cut, woe, iv
def custom_bins(Y, X, binList):
"" | 边界值列表
:return: 统计值、woe值、iv值
"""
r = 0
total_bad = Y.sum()
total_good = Y.count() - total_bad
# 等距分箱
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.cut(X, binList)})
df2 = df1.groupby('bin', as_index=True)
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
woe = list(df3['woe'])
return df3, woe, iv
# 90D、RevolvingRatio、30-59D、60-89D、Age
class Model:
data = None
clf1 = None # 保存训练模型的情况
def __init__(self): # 初始化时候要导入数据进行训练,保存导入的数据以及模型参数以供使用
self.get_data()
self.train()
def predict(self, info_list: list) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
self.get_data()
# 根据传入的数据信息构造一个字典
dic = {"Label": 1, "90D": info_list[0], "RevolvingRatio": info_list[1], '30-59D': info_list[2],
'60-89D': info_list[3], 'Age': info_list[4]}
# print(self.data)
# 插入要预测的信息
self.data = self.data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
self.data = self.data.append(dic, ignore_index=True)
# print(self.data)
return self.get_score()
def get_score(self) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
return 0 # 表示错误
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
cut_new2 = [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
self.data.loc[(self.data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# Dependents特征处理
self.data['Dependents'].fillna(self.data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = self.data[self.data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
self.data.loc[self.data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = self.data[self.data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
self.data.loc[self.data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
self.data.drop(self.data[self.data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# 处理年龄特征异常值
self.data.drop(self.data[self.data['Age'] == 0].index, inplace=True) # 根据索引删除样本
self.data.drop(self.data[self.data['Age'] > 96].index, inplace=True)
def train(self):
warnings.filterwarnings('ignore') # 忽略弹出的warnings
data = pd.read_csv('datasets/cs-training.csv')
data = data.iloc[:, 1:] # 舍弃Unnamed: 0列
data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# print(data.head(10)) # 观察整理后数据集
# MonthlyIncome特征处理
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
data.loc[(data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# print("此时的MonthlyIncome特征统计指标:\n")
# print(rfDf['MonthlyIncome'].describe())
# Dependents特征处理
data['Dependents'].fillna(data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# print("此时Dependents特征统计指标:\n")
# print(data['Dependents'].describe())
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = data[data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
data.loc[data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = data[data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
data.loc[data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
data.drop(data[data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# print("剩下的样本数为:", data.shape[0])
# 处理年龄特征异常值
data.drop(data[data['Age'] == 0].index, inplace=True) # 根据索引删除样本
data.drop(data[data['Age'] > 96].index, inplace=True)
# print("剩下的样本数为:", data.shape[0])
# 构建新特征
# IncAvg:家庭中每个人分摊的平均月收入
data['IncAvg'] = data['MonthlyIncome'] / (data['Dependents'] + 1)
# MonthlyDept:每月的债务
data['MonthlyDept'] = data['MonthlyIncome'] * data['DebtRatio']
# DeptAvg:家庭中平均每个人分摊每月应还债务
data['DeptAvg'] = data['MonthlyDept'] / (data['Dependents'] + 1)
data[['IncAvg', 'MonthlyDept', 'DeptAvg']].head(10) # 查看新特征
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(data.Label, data.RevolvingRatio, n=10)
# print(rrDf)
# print(cut_rr)
# MonthlyIncome特征
miDf, cut_mi, woe_mi, iv_mi = optimal_bins(data.Label, data.MonthlyIncome, n=10)
# print("MonthlyIncome特征分箱情况:", cut_mi)
# Age特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(data.Label, data.Age, n=10)
# print("Age特征分箱情况:", cut_age)
# DebtRatio特征
drDf, cut_dr, woe_dr, iv_dr = optimal_bins(data.Label, data.DebtRatio, 10)
# print("DebtRatio特征分箱情况:", cut_dr)
# 自定义分箱区间如下
# 原始特征
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
# 新特征
cut_new2 = [ninf, 414, 1209, 2518, pinf] # 新特征MonthlyDept自定义分箱
# 计算统计值、woe和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(data.Label, data['30-59D'], cut_thirty) # 30-59D特征
openDf, woe_open, iv_open = custom_bins(data.Label, data.OpenL, cut_open) # OpenL特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(data.Label, data['90D'], cut_ninety) # 90D特征
reDf, woe_re, iv_re = custom_bins(data.Label, data.RealEstate, cut_re) # RealEstate特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(data.Label, data['60-89D'], cut_sixty) # 60-89D特征
dptDf, woe_dpt, iv_dpt = custom_bins(data.Label, data.Dependents, cut_dpt) # Dependents特征
newDf2, woe_new2, iv_new2 = custom_bins(data.Label, data.MonthlyDept, cut_new2) # 新特征MonthlyDept
# WOE编码
data['90D'] = pd.cut(data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
data['RevolvingRatio'] = pd.cut(data['RevolvingRatio'], bins=cut_rr, labels=woe_rr) # RevolvingRatio特征
data['30-59D'] = pd.cut(data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
data['60-89D'] = pd.cut(data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
data['Age'] = pd.cut(data['Age'], bins=cut_age, labels=woe_age) # Age特征
# 特征选择
data = data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
# print(data.head(10)) # 此时的数据集
X = data.iloc[:, 1:] # 特征
y = data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
'''
LogisticRegression一些重要参数的默认值:
penalty:正则化类型,默认值'l2',当solver='liblinear'时还可以选择'l1'
solver:最优化方法,默认值'liblinear',还可以选择'newton-cg', 'lbfgs', 'sag', 'saga'
tol:迭代终止的阈值,默认值为1e-4
max_iter:最大迭代次数,默认值100
(...等其他参数)
'''
model1 = LogisticRegression() # 首先全部采用默认值进行训练
clf1 = model1.fit(X_train, y_train) # 模型训练
# 记录训练后的模型
self.clf1 = clf1
| "
:param Y: 目标变量
:param X: 待分箱特征
:param binList: 分箱 | conditional_block |
Model.py | import numpy as np
import pandas as pd
import warnings
from sklearn.ensemble import RandomForestRegressor # 随机森林算法将缺失值补充
from sklearn.model_selection import train_test_split # 数据集划分模块
from scipy.stats import stats # scipy.stats是一个很好的统计推断包
from sklearn.linear_model import LogisticRegression
import math
def optimal_bins(Y, X, n):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param n: 分箱数初始值
:return: 统计值、分箱边界值列表、woe值、iv值
"""
r = 0 # 初始值
total_bad = Y.sum() # 总的坏样本数
total_good = Y.count() - total_bad # 总的好样本数
# 分箱过程
while np.abs(r) < 1:
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.qcut(X, n, duplicates='drop')}) # qcut():基于量化的离散化函数
df2 = df1.groupby('bin')
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
n = n - 1
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
# 分箱边界值列表
cut = []
cut.append(float('-inf'))
for i in range(1, n + 1):
qua = X.quantile(i / (n + 1))
cut.append(round(qua, 6))
cut.append(float('inf'))
# woe值列表
woe = list(df3['woe'])
return df3, cut, woe, iv
def custom_bins(Y, X, binList):
"""
:param Y: 目标变量
:param X: 待分箱特征
:param binList: 分箱边界值列表
:return: 统计值、woe值、iv值
"""
r = 0
total_bad = Y.sum()
total_good = Y.count() - total_bad
# 等距分箱
df1 = pd.DataFrame({'X': X, 'Y': Y, 'bin': pd.cut(X, binList)})
df2 = df1.groupby('bin', as_index=True)
r, p = stats.spearmanr(df2.mean().X, df2.mean().Y)
# 计算woe值和iv值
df3 = pd.DataFrame()
df3['min_' + X.name] = df2.min().X
df3['max_' + X.name] = df2.max().X
df3['sum'] = df2.sum().Y
df3['total'] = df2.count().Y
df3['rate'] = df2.mean().Y
df3['badattr'] = df3['sum'] / total_bad
df3['goodattr'] = (df3['total'] - df3['sum']) / total_good
df3['woe'] = np.log(df3['badattr'] / df3['goodattr'])
iv = ((df3['badattr'] - df3['goodattr']) * df3['woe']).sum()
df3 = df3.sort_values(by='min_' + X.name).reset_index(drop=True)
woe = list(df3['woe'])
return df3, woe, iv
# 90D、RevolvingRatio、30-59D、60-89D、Age
class Model:
data = None
clf1 = None # 保存训练模型的情况
def __init__(self): # 初始化时候要导入数据进行训练,保存导入的数据以及模型参数以供使用
self.get_data()
self.train()
def predict(self, info_list: list) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
self.get_data()
# 根据传入的数据信息构造一个字典
dic = {"Label": 1, "90D": info_list[0], "RevolvingRatio": info_list[1], '30-59D': info_list[2],
'60-89D': info_list[3], 'Age': info_list[4]}
# print(self.data)
# 插入要预测的信息
self.data = self.data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
self.data = self.data.append(dic, ignore_index=True)
# print(self.data)
return self.get_score()
def get_score(self) -> int:
if self.data is None: # 如果未导入数据则首先导入数据
return 0 # 表示错误
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
cut_new2 = [ninf, 414, 1209, 2518, pinf]
# 计算统计值、woe 和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(self.data.Label, self.data['30-59D'], cut_thirty) # 30-59D特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(self.data.Label, self.data['90D'], cut_ninety) # 90D特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(self.data.Label, self.data['60-89D'], cut_sixty) # 60-89D特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(self.data.Label, self.data.Age, n=10)
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(self.data.Label, self.data.RevolvingRatio, n=10)
n_data = pd.DataFrame()
n_data['90D'] = pd.cut(self.data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
n_data['RevolvingRatio'] = pd.cut(self.data['RevolvingRatio'], bins=cut_rr,
labels=woe_rr) # RevolvingRatio特征
n_data['30-59D'] = pd.cut(self.data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
n_data['60-89D'] = pd.cut(self.data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
n_data['Age'] = pd.cut(self.data['Age'], bins=cut_age, labels=woe_age) # Age特征
n_data['Label'] = self.data[['Label']] # 将标签传递
# 特征选择
# print(n_data.tail(10))
X = n_data.iloc[:, 1:] # 特征
y = n_data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
# 计算分值
# 计算基础分
B = 20 / math.log(2)
A = 600 + B * math.log(1 / 20)
BaseScore = round(A - B * self.clf1.intercept_[0], 0)
# print("评分卡的基础分为:", BaseScore)
# 每个特征列分值计算函数
def score(coef, woe):
"""
:param coef: 特征在逻辑回归模型中对应的参数
:param woe: 特征的WOE编码取值列表
:return: 分值
"""
scores = []
for x in woe:
score = round(-B * coef * x, 0)
scores.append(score)
return scores
# 不同特征各个区间对应的分值
score_ninety = score(self.clf1.coef_[0][0], woe_ninety) # 90D特征
# print("90D特征各个区间对应的分值为:", score_ninety)
score_rr = score(self.clf1.coef_[0][1], woe_rr) # RevolvingRatio特征
# print("RevolvingRatio特征各个区间对应的分值为:", score_rr)
score_thirty = score(self.clf1.coef_[0][2], woe_thirty) # 30-59D特征
# print("30-59D特征各个区间对应的分值为:", score_thirty)
score_sixty = score(self.clf1.coef_[0][3], woe_sixty) # 60-89D特征
# print("60-89D特征各个区间对应的分值为:", score_sixty)
score_age = score(self.clf1.coef_[0][4], woe_age) # Age特征
# print("Age特征各个区间对应的分值为:", score_age)
# 测试集样本转化为分值形式
cardDf = X_test.copy() # 不改变原测试集,在副本上操作
# 将特征值转化为分值
n_data['90D'] = n_data['90D'].replace(woe_ninety, score_ninety)
n_data['RevolvingRatio'] = n_data['RevolvingRatio'].replace(woe_rr, score_rr)
n_data['30-59D'] = n_data['30-59D'].replace(woe_thirty, score_thirty)
n_data['60-89D'] = n_data['60-89D'].replace(woe_sixty, score_sixty)
n_data['Age'] = n_data['Age'].replace(woe_age, score_age)
# print(n_data.head(10)) # 观察此时的测试集副本
# 计算每个样本的分值
n_data['Score'] = BaseScore + n_data['90D'] + n_data['RevolvingRatio'] + \
n_data['30-59D'] + n_data['60-89D'] + n_data['Age']
# print(n_data.head(10))
return int(n_data.tail(1).Score)
def get_data(self):
"""
导入数据操作,首先需要进行数据导入以及预处理
:return: None
"""
self.data = pd.read_csv('datasets/cs-training.csv')
self.data = self.data.iloc[:, 1:] # 舍弃Unnamed: 0列
self.data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = self.data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值 | s'].describe())
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = data[data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
data.loc[data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = data[data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
data.loc[data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
data.drop(data[data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# print("剩下的样本数为:", data.shape[0])
# 处理年龄特征异常值
data.drop(data[data['Age'] == 0].index, inplace=True) # 根据索引删除样本
data.drop(data[data['Age'] > 96].index, inplace=True)
# print("剩下的样本数为:", data.shape[0])
# 构建新特征
# IncAvg:家庭中每个人分摊的平均月收入
data['IncAvg'] = data['MonthlyIncome'] / (data['Dependents'] + 1)
# MonthlyDept:每月的债务
data['MonthlyDept'] = data['MonthlyIncome'] * data['DebtRatio']
# DeptAvg:家庭中平均每个人分摊每月应还债务
data['DeptAvg'] = data['MonthlyDept'] / (data['Dependents'] + 1)
data[['IncAvg', 'MonthlyDept', 'DeptAvg']].head(10) # 查看新特征
rrDf, cut_rr, woe_rr, iv_rr = optimal_bins(data.Label, data.RevolvingRatio, n=10)
# print(rrDf)
# print(cut_rr)
# MonthlyIncome特征
miDf, cut_mi, woe_mi, iv_mi = optimal_bins(data.Label, data.MonthlyIncome, n=10)
# print("MonthlyIncome特征分箱情况:", cut_mi)
# Age特征
ageDf, cut_age, woe_age, iv_age = optimal_bins(data.Label, data.Age, n=10)
# print("Age特征分箱情况:", cut_age)
# DebtRatio特征
drDf, cut_dr, woe_dr, iv_dr = optimal_bins(data.Label, data.DebtRatio, 10)
# print("DebtRatio特征分箱情况:", cut_dr)
# 自定义分箱区间如下
# 原始特征
ninf = float('-inf')
pinf = float('inf')
cut_thirty = [ninf, 0, 1, 3, 5, pinf] # 30-59D特征
cut_open = [ninf, 1, 2, 3, 5, pinf] # OpenL特征
cut_ninety = [ninf, 0, 1, 3, 5, pinf] # 90D特征
cut_re = [ninf, 0, 1, 2, 3, pinf] # RealEstate特征
cut_sixty = [ninf, 0, 1, 3, pinf] # 60-89D特征
cut_dpt = [ninf, 0, 1, 2, 3, 5, pinf] # Dependents特征
# 新特征
cut_new2 = [ninf, 414, 1209, 2518, pinf] # 新特征MonthlyDept自定义分箱
# 计算统计值、woe和iv
thirtyDf, woe_thirty, iv_thirty = custom_bins(data.Label, data['30-59D'], cut_thirty) # 30-59D特征
openDf, woe_open, iv_open = custom_bins(data.Label, data.OpenL, cut_open) # OpenL特征
ninetyDf, woe_ninety, iv_ninety = custom_bins(data.Label, data['90D'], cut_ninety) # 90D特征
reDf, woe_re, iv_re = custom_bins(data.Label, data.RealEstate, cut_re) # RealEstate特征
sixtyDf, woe_sixty, iv_sixty = custom_bins(data.Label, data['60-89D'], cut_sixty) # 60-89D特征
dptDf, woe_dpt, iv_dpt = custom_bins(data.Label, data.Dependents, cut_dpt) # Dependents特征
newDf2, woe_new2, iv_new2 = custom_bins(data.Label, data.MonthlyDept, cut_new2) # 新特征MonthlyDept
# WOE编码
data['90D'] = pd.cut(data['90D'], bins=cut_ninety, labels=woe_ninety) # 90D特征
data['RevolvingRatio'] = pd.cut(data['RevolvingRatio'], bins=cut_rr, labels=woe_rr) # RevolvingRatio特征
data['30-59D'] = pd.cut(data['30-59D'], bins=cut_thirty, labels=woe_thirty) # 30-59D特征
data['60-89D'] = pd.cut(data['60-89D'], bins=cut_sixty, labels=woe_sixty) # 60-89D特征
data['Age'] = pd.cut(data['Age'], bins=cut_age, labels=woe_age) # Age特征
# 特征选择
data = data[['Label', '90D', 'RevolvingRatio', '30-59D', '60-89D', 'Age']]
# print(data.head(10)) # 此时的数据集
X = data.iloc[:, 1:] # 特征
y = data.iloc[:, 0] # 目标变量
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) # 训练集:测试集 = 7:3
'''
LogisticRegression一些重要参数的默认值:
penalty:正则化类型,默认值'l2',当solver='liblinear'时还可以选择'l1'
solver:最优化方法,默认值'liblinear',还可以选择'newton-cg', 'lbfgs', 'sag', 'saga'
tol:迭代终止的阈值,默认值为1e-4
max_iter:最大迭代次数,默认值100
(...等其他参数)
'''
model1 = LogisticRegression() # 首先全部采用默认值进行训练
clf1 = model1.fit(X_train, y_train) # 模型训练
# 记录训练后的模型
self.clf1 = clf1
| 四舍五入并保留一位小数点
self.data.loc[(self.data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# Dependents特征处理
self.data['Dependents'].fillna(self.data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# 处理百分比类异常值
# RevolvingRatio特征
ruulDf = self.data[self.data['RevolvingRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['RevolvingRatio'].mean() # 计算均值
self.data.loc[self.data['RevolvingRatio'] > 1, 'RevolvingRatio'] = ruul_mean # 均值替代
# DebtRatio特征
ruulDf = self.data[self.data['DebtRatio'] <= 1] # 去掉高于1的部分
ruul_mean = ruulDf['DebtRatio'].mean() # 计算均值
self.data.loc[self.data['DebtRatio'] > 1, 'DebtRatio'] = ruul_mean # 均值替代
# 处理逾期特征异常值
self.data.drop(self.data[self.data['30-59D'] > 80].index, inplace=True) # 根据索引删除样本
# 处理年龄特征异常值
self.data.drop(self.data[self.data['Age'] == 0].index, inplace=True) # 根据索引删除样本
self.data.drop(self.data[self.data['Age'] > 96].index, inplace=True)
def train(self):
warnings.filterwarnings('ignore') # 忽略弹出的warnings
data = pd.read_csv('datasets/cs-training.csv')
data = data.iloc[:, 1:] # 舍弃Unnamed: 0列
data.columns = ['Label', 'RevolvingRatio', 'Age', '30-59D', 'DebtRatio', 'MonthlyIncome',
'OpenL', '90D', 'RealEstate', '60-89D', 'Dependents'] # 列重命名
# print(data.head(10)) # 观察整理后数据集
# MonthlyIncome特征处理
# 用MonthlyIncome特征值非空的样本构建训练集,MonthlyIncome特征值缺失的样本构建测试集
rfDf = data.iloc[:, [5, 1, 2, 3, 4, 6, 7, 8, 9]] # 原始数据集中的无缺失数值特征
rfDf_train = rfDf.loc[rfDf['MonthlyIncome'].notnull()]
rfDf_test = rfDf.loc[rfDf['MonthlyIncome'].isnull()]
# 划分训练数据和标签(label)
X = rfDf_train.iloc[:, 1:]
y = rfDf_train.iloc[:, 0]
# 训练过程
rf = RandomForestRegressor(random_state=0, n_estimators=200, max_depth=3, n_jobs=-1) # 这里重在理解过程,因此仅简单选取部分参数
rf.fit(X, y)
# 预测过程
pred = rf.predict(rfDf_test.iloc[:, 1:]).round(0) # 预测值四舍五入并保留一位小数点
data.loc[(data['MonthlyIncome'].isnull()), 'MonthlyIncome'] = pred # 填补缺失值
# print("此时的MonthlyIncome特征统计指标:\n")
# print(rfDf['MonthlyIncome'].describe())
# Dependents特征处理
data['Dependents'].fillna(data['Dependents'].mode()[0], inplace=True) # 这里采用众数填充
# print("此时Dependents特征统计指标:\n")
# print(data['Dependent | identifier_body |
ledger.go | package ws
import (
"fmt"
"net/http"
"rentroll/rlib"
"time"
)
// LedgerGrid is a structure specifically for the UI Grid.
type LedgerGrid struct {
Recid int64 `json:"recid"` // this is to support the w2ui form
LID int64
GLNumber string
Name string
Active string
AllowPost string
Balance float64
LMDate string
LMAmount float64
LMState string
}
// SearchLedgersResponse is a response string to the search request for receipts
type SearchLedgersResponse struct {
Status string `json:"status"`
Total int64 `json:"total"`
Records []LedgerGrid `json:"records"`
}
// GetLedgerResponse is the response to a GetAR request
type GetLedgerResponse struct {
Status string `json:"status"`
Record ARSendForm `json:"record"`
}
// SvcSearchHandlerLedger generates a report of all ARs defined business d.BID
// wsdoc {
// @Title Search Account Rules
// @URL /v1/ars/:BUI
// @Method POST
// @Synopsis Search Account Rules
// @Description Search all ARs and return those that match the Search Logic.
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func SvcSearchHandlerLedger(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "SvcSearchHandlerLedger"
fmt.Printf("Entered %s\n", funcname)
switch d.wsSearchReq.Cmd {
case "get":
getLedgerGrid(w, r, d)
break
default:
err := fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
SvcGridErrorReturn(w, err, funcname)
return
}
}
// GetAccountBalance returns the balance of the account at time dt
//
func GetAccountBalance(bid, lid int64, dt *time.Time) (float64, rlib.LedgerMarker) {
lm := rlib.GetRALedgerMarkerOnOrBeforeDeprecated(bid, lid, 0, dt) // find nearest ledgermarker, use it as a starting point
bal, _ := rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) |
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s", d.ARID, err.Error())
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// SvcWriteSuccessResponseWithID(w, a.ARID)
// }
// // which fields needs to be fetched for SQL query for receipts grid
// var getARQuerySelectFields = rlib.SelectQueryFields{
// "AR.ARID",
// "AR.Name",
// "AR.ARType",
// "AR.DebitLID",
// "debitQuery.Name as DebitLedgerName",
// "AR.CreditLID",
// "creditQuery.Name as CreditLedgerName",
// "AR.Description",
// "AR.DtStart",
// "AR.DtStop",
// "AR.RARequired",
// }
// // for what RARequired value, prior and after value are
// var raRequiredMap = map[int][2]bool{
// 0: {false, false}, // during
// 1: {true, false}, // prior or during
// 2: {false, true}, // after or during
// 3: {true, true}, // after or during or prior
// }
// // getARForm returns the requested ars
// // wsdoc {
// // @Title Get AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Get information on a AR
// // @Description Return all fields for ars :ARID
// // @Input WebGridSearchRequest
// // @Response GetLedgerResponse
// // wsdoc }
// func getARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "getARForm"
// g GetLedgerResponse
// err error
// )
// fmt.Printf("entered %s\n", funcname)
// arQuery := `
// SELECT
// {{.SelectClause}}
// FROM AR
// INNER JOIN GLAccount as debitQuery on AR.DebitLID=debitQuery.LID
// INNER JOIN GLAccount as creditQuery on AR.CreditLID=creditQuery.LID
// WHERE {{.WhereClause}};`
// qc := rlib.QueryClause{
// "SelectClause": strings.Join(getARQuerySelectFields, ","),
// "WhereClause": fmt.Sprintf("AR.BID=%d AND AR.ARID=%d", d.BID, d.ARID),
// }
// // get formatted query with substitution of select, where, order clause
// q := rlib.RenderSQLQuery(arQuery, qc)
// fmt.Printf("db query = %s\n", q)
// // execute the query
// rows, err := rlib.RRdb.Dbrr.Query(q)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// defer rows.Close()
// for rows.Next() {
// var gg ARSendForm
// gg.BID = getBUDFromBIDList(d.BID)
// err = rows.Scan(&gg.ARID, &gg.Name, &gg.ARType, &gg.DebitLID, &gg.DebitLedgerName, &gg.CreditLID, &gg.CreditLedgerName, &gg.Description, &gg.DtStart, &gg.DtStop, &gg.raRequired)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // according to RARequired map, fill out PriorToRAStart, PriorToRAStop values
// raReqMappedVal := raRequiredMap[gg.raRequired]
// gg.PriorToRAStart = raReqMappedVal[0]
// gg.PriorToRAStop = raReqMappedVal[1]
// g.Record = gg
// }
// // error check
// err = rows.Err()
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// g.Status = "success"
// w.Header().Set("Content-Type", "application/json")
// SvcWriteResponse(&g, w)
// }
// // deleteAR request delete AR from database
// // wsdoc {
// // @Title Delete AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method DELETE
// // @Synopsis Delete record for a AR
// // @Description Delete record from database ars :ARID
// // @Input WebGridSearchRequest
// // @Response SvcWriteSuccessResponse
// // wsdoc }
// func deleteARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "deleteARForm"
// del DeleteARForm
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// if err := json.Unmarshal([]byte(d.data), &del); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := rlib.DeleteAR(del.ARID); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// SvcWriteSuccessResponse(w)
// }
| {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil {
SvcGridErrorReturn(w, err, funcname)
return
}
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
} | identifier_body |
ledger.go | package ws
import (
"fmt"
"net/http"
"rentroll/rlib"
"time"
)
// LedgerGrid is a structure specifically for the UI Grid.
type LedgerGrid struct {
Recid int64 `json:"recid"` // this is to support the w2ui form
LID int64
GLNumber string
Name string
Active string
AllowPost string
Balance float64
LMDate string
LMAmount float64
LMState string
}
// SearchLedgersResponse is a response string to the search request for receipts
type SearchLedgersResponse struct {
Status string `json:"status"`
Total int64 `json:"total"`
Records []LedgerGrid `json:"records"`
}
// GetLedgerResponse is the response to a GetAR request
type GetLedgerResponse struct {
Status string `json:"status"`
Record ARSendForm `json:"record"`
}
// SvcSearchHandlerLedger generates a report of all ARs defined business d.BID
// wsdoc {
// @Title Search Account Rules
// @URL /v1/ars/:BUI
// @Method POST
// @Synopsis Search Account Rules
// @Description Search all ARs and return those that match the Search Logic.
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func SvcSearchHandlerLedger(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "SvcSearchHandlerLedger"
fmt.Printf("Entered %s\n", funcname)
switch d.wsSearchReq.Cmd {
case "get":
getLedgerGrid(w, r, d)
break
default:
err := fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
SvcGridErrorReturn(w, err, funcname)
return
}
}
// GetAccountBalance returns the balance of the account at time dt
//
func GetAccountBalance(bid, lid int64, dt *time.Time) (float64, rlib.LedgerMarker) {
lm := rlib.GetRALedgerMarkerOnOrBeforeDeprecated(bid, lid, 0, dt) // find nearest ledgermarker, use it as a starting point
bal, _ := rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil {
SvcGridErrorReturn(w, err, funcname)
return
}
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s", d.ARID, err.Error())
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// SvcWriteSuccessResponseWithID(w, a.ARID)
// }
// // which fields needs to be fetched for SQL query for receipts grid
// var getARQuerySelectFields = rlib.SelectQueryFields{ | // "AR.DebitLID",
// "debitQuery.Name as DebitLedgerName",
// "AR.CreditLID",
// "creditQuery.Name as CreditLedgerName",
// "AR.Description",
// "AR.DtStart",
// "AR.DtStop",
// "AR.RARequired",
// }
// // for what RARequired value, prior and after value are
// var raRequiredMap = map[int][2]bool{
// 0: {false, false}, // during
// 1: {true, false}, // prior or during
// 2: {false, true}, // after or during
// 3: {true, true}, // after or during or prior
// }
// // getARForm returns the requested ars
// // wsdoc {
// // @Title Get AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Get information on a AR
// // @Description Return all fields for ars :ARID
// // @Input WebGridSearchRequest
// // @Response GetLedgerResponse
// // wsdoc }
// func getARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "getARForm"
// g GetLedgerResponse
// err error
// )
// fmt.Printf("entered %s\n", funcname)
// arQuery := `
// SELECT
// {{.SelectClause}}
// FROM AR
// INNER JOIN GLAccount as debitQuery on AR.DebitLID=debitQuery.LID
// INNER JOIN GLAccount as creditQuery on AR.CreditLID=creditQuery.LID
// WHERE {{.WhereClause}};`
// qc := rlib.QueryClause{
// "SelectClause": strings.Join(getARQuerySelectFields, ","),
// "WhereClause": fmt.Sprintf("AR.BID=%d AND AR.ARID=%d", d.BID, d.ARID),
// }
// // get formatted query with substitution of select, where, order clause
// q := rlib.RenderSQLQuery(arQuery, qc)
// fmt.Printf("db query = %s\n", q)
// // execute the query
// rows, err := rlib.RRdb.Dbrr.Query(q)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// defer rows.Close()
// for rows.Next() {
// var gg ARSendForm
// gg.BID = getBUDFromBIDList(d.BID)
// err = rows.Scan(&gg.ARID, &gg.Name, &gg.ARType, &gg.DebitLID, &gg.DebitLedgerName, &gg.CreditLID, &gg.CreditLedgerName, &gg.Description, &gg.DtStart, &gg.DtStop, &gg.raRequired)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // according to RARequired map, fill out PriorToRAStart, PriorToRAStop values
// raReqMappedVal := raRequiredMap[gg.raRequired]
// gg.PriorToRAStart = raReqMappedVal[0]
// gg.PriorToRAStop = raReqMappedVal[1]
// g.Record = gg
// }
// // error check
// err = rows.Err()
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// g.Status = "success"
// w.Header().Set("Content-Type", "application/json")
// SvcWriteResponse(&g, w)
// }
// // deleteAR request delete AR from database
// // wsdoc {
// // @Title Delete AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method DELETE
// // @Synopsis Delete record for a AR
// // @Description Delete record from database ars :ARID
// // @Input WebGridSearchRequest
// // @Response SvcWriteSuccessResponse
// // wsdoc }
// func deleteARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "deleteARForm"
// del DeleteARForm
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// if err := json.Unmarshal([]byte(d.data), &del); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := rlib.DeleteAR(del.ARID); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// SvcWriteSuccessResponse(w)
// } | // "AR.ARID",
// "AR.Name",
// "AR.ARType", | random_line_split |
ledger.go | package ws
import (
"fmt"
"net/http"
"rentroll/rlib"
"time"
)
// LedgerGrid is a structure specifically for the UI Grid.
type LedgerGrid struct {
Recid int64 `json:"recid"` // this is to support the w2ui form
LID int64
GLNumber string
Name string
Active string
AllowPost string
Balance float64
LMDate string
LMAmount float64
LMState string
}
// SearchLedgersResponse is a response string to the search request for receipts
type SearchLedgersResponse struct {
Status string `json:"status"`
Total int64 `json:"total"`
Records []LedgerGrid `json:"records"`
}
// GetLedgerResponse is the response to a GetAR request
type GetLedgerResponse struct {
Status string `json:"status"`
Record ARSendForm `json:"record"`
}
// SvcSearchHandlerLedger generates a report of all ARs defined business d.BID
// wsdoc {
// @Title Search Account Rules
// @URL /v1/ars/:BUI
// @Method POST
// @Synopsis Search Account Rules
// @Description Search all ARs and return those that match the Search Logic.
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func SvcSearchHandlerLedger(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "SvcSearchHandlerLedger"
fmt.Printf("Entered %s\n", funcname)
switch d.wsSearchReq.Cmd {
case "get":
getLedgerGrid(w, r, d)
break
default:
err := fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
SvcGridErrorReturn(w, err, funcname)
return
}
}
// GetAccountBalance returns the balance of the account at time dt
//
func | (bid, lid int64, dt *time.Time) (float64, rlib.LedgerMarker) {
lm := rlib.GetRALedgerMarkerOnOrBeforeDeprecated(bid, lid, 0, dt) // find nearest ledgermarker, use it as a starting point
bal, _ := rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil {
SvcGridErrorReturn(w, err, funcname)
return
}
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s", d.ARID, err.Error())
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// SvcWriteSuccessResponseWithID(w, a.ARID)
// }
// // which fields needs to be fetched for SQL query for receipts grid
// var getARQuerySelectFields = rlib.SelectQueryFields{
// "AR.ARID",
// "AR.Name",
// "AR.ARType",
// "AR.DebitLID",
// "debitQuery.Name as DebitLedgerName",
// "AR.CreditLID",
// "creditQuery.Name as CreditLedgerName",
// "AR.Description",
// "AR.DtStart",
// "AR.DtStop",
// "AR.RARequired",
// }
// // for what RARequired value, prior and after value are
// var raRequiredMap = map[int][2]bool{
// 0: {false, false}, // during
// 1: {true, false}, // prior or during
// 2: {false, true}, // after or during
// 3: {true, true}, // after or during or prior
// }
// // getARForm returns the requested ars
// // wsdoc {
// // @Title Get AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Get information on a AR
// // @Description Return all fields for ars :ARID
// // @Input WebGridSearchRequest
// // @Response GetLedgerResponse
// // wsdoc }
// func getARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "getARForm"
// g GetLedgerResponse
// err error
// )
// fmt.Printf("entered %s\n", funcname)
// arQuery := `
// SELECT
// {{.SelectClause}}
// FROM AR
// INNER JOIN GLAccount as debitQuery on AR.DebitLID=debitQuery.LID
// INNER JOIN GLAccount as creditQuery on AR.CreditLID=creditQuery.LID
// WHERE {{.WhereClause}};`
// qc := rlib.QueryClause{
// "SelectClause": strings.Join(getARQuerySelectFields, ","),
// "WhereClause": fmt.Sprintf("AR.BID=%d AND AR.ARID=%d", d.BID, d.ARID),
// }
// // get formatted query with substitution of select, where, order clause
// q := rlib.RenderSQLQuery(arQuery, qc)
// fmt.Printf("db query = %s\n", q)
// // execute the query
// rows, err := rlib.RRdb.Dbrr.Query(q)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// defer rows.Close()
// for rows.Next() {
// var gg ARSendForm
// gg.BID = getBUDFromBIDList(d.BID)
// err = rows.Scan(&gg.ARID, &gg.Name, &gg.ARType, &gg.DebitLID, &gg.DebitLedgerName, &gg.CreditLID, &gg.CreditLedgerName, &gg.Description, &gg.DtStart, &gg.DtStop, &gg.raRequired)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // according to RARequired map, fill out PriorToRAStart, PriorToRAStop values
// raReqMappedVal := raRequiredMap[gg.raRequired]
// gg.PriorToRAStart = raReqMappedVal[0]
// gg.PriorToRAStop = raReqMappedVal[1]
// g.Record = gg
// }
// // error check
// err = rows.Err()
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// g.Status = "success"
// w.Header().Set("Content-Type", "application/json")
// SvcWriteResponse(&g, w)
// }
// // deleteAR request delete AR from database
// // wsdoc {
// // @Title Delete AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method DELETE
// // @Synopsis Delete record for a AR
// // @Description Delete record from database ars :ARID
// // @Input WebGridSearchRequest
// // @Response SvcWriteSuccessResponse
// // wsdoc }
// func deleteARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "deleteARForm"
// del DeleteARForm
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// if err := json.Unmarshal([]byte(d.data), &del); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := rlib.DeleteAR(del.ARID); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// SvcWriteSuccessResponse(w)
// }
| GetAccountBalance | identifier_name |
ledger.go | package ws
import (
"fmt"
"net/http"
"rentroll/rlib"
"time"
)
// LedgerGrid is a structure specifically for the UI Grid.
type LedgerGrid struct {
Recid int64 `json:"recid"` // this is to support the w2ui form
LID int64
GLNumber string
Name string
Active string
AllowPost string
Balance float64
LMDate string
LMAmount float64
LMState string
}
// SearchLedgersResponse is a response string to the search request for receipts
type SearchLedgersResponse struct {
Status string `json:"status"`
Total int64 `json:"total"`
Records []LedgerGrid `json:"records"`
}
// GetLedgerResponse is the response to a GetAR request
type GetLedgerResponse struct {
Status string `json:"status"`
Record ARSendForm `json:"record"`
}
// SvcSearchHandlerLedger generates a report of all ARs defined business d.BID
// wsdoc {
// @Title Search Account Rules
// @URL /v1/ars/:BUI
// @Method POST
// @Synopsis Search Account Rules
// @Description Search all ARs and return those that match the Search Logic.
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func SvcSearchHandlerLedger(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "SvcSearchHandlerLedger"
fmt.Printf("Entered %s\n", funcname)
switch d.wsSearchReq.Cmd {
case "get":
getLedgerGrid(w, r, d)
break
default:
err := fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
SvcGridErrorReturn(w, err, funcname)
return
}
}
// GetAccountBalance returns the balance of the account at time dt
//
func GetAccountBalance(bid, lid int64, dt *time.Time) (float64, rlib.LedgerMarker) {
lm := rlib.GetRALedgerMarkerOnOrBeforeDeprecated(bid, lid, 0, dt) // find nearest ledgermarker, use it as a starting point
bal, _ := rlib.GetAccountActivity(bid, lid, &lm.Dt, dt)
return bal, lm
}
// LMStates is an array of strings describing the meaning of the states a Ledger Marker can have.
var LMStates = []string{
"open", "closed", "locked", "initial",
}
// getLedgerGrid returns a list of ARs for w2ui grid
// wsdoc {
// @Title list ARs
// @URL /v1/ars/:BUI
// @Method GET
// @Synopsis Get Account Rules
// @Description Get all ARs associated with BID
// @Desc By default, the search is made for receipts from "today" to 31 days prior.
// @Input WebGridSearchRequest
// @Response SearchLedgersResponse
// wsdoc }
func getLedgerGrid(w http.ResponseWriter, r *http.Request, d *ServiceData) {
funcname := "getLedgerGrid"
var (
err error
g SearchLedgersResponse
)
rows, err := rlib.RRdb.Prepstmt.GetLedgersForGrid.Query(d.BID, d.wsSearchReq.Limit, d.wsSearchReq.Offset)
if err != nil {
fmt.Printf("%s: Error from DB Query: %s\n", funcname, err.Error())
SvcGridErrorReturn(w, err, funcname)
return
}
defer rows.Close()
dt := time.Time(d.wsSearchReq.SearchDtStart)
i := int64(d.wsSearchReq.Offset)
for rows.Next() {
var acct rlib.GLAccount
rlib.ReadGLAccounts(rows, &acct)
active := "active"
if 1 == acct.Status {
active = "inactive"
}
posts := "yes"
if acct.AllowPost == 0 {
posts = "no"
}
bal, lm := GetAccountBalance(acct.BID, acct.LID, &dt)
state := "??"
j := int(lm.State)
if 0 <= j && j <= 3 {
state = LMStates[j]
}
var lg = LedgerGrid{
Recid: i,
LID: acct.LID,
GLNumber: acct.GLNumber,
Name: acct.Name,
Active: active,
AllowPost: posts,
Balance: bal,
LMDate: lm.Dt.In(rlib.RRdb.Zone).Format("Jan _2, 2006 15:04:05 MST"),
LMAmount: lm.Balance,
LMState: state,
}
g.Records = append(g.Records, lg)
i++
}
// error check
err = rows.Err()
if err != nil |
g.Status = "success"
g.Total = int64(len(g.Records))
w.Header().Set("Content-Type", "application/json")
SvcWriteResponse(&g, w)
}
// // SvcFormHandlerAR formats a complete data record for a person suitable for use with the w2ui Form
// // For this call, we expect the URI to contain the BID and the ARID as follows:
// // 0 1 2 3
// // uri /v1/receipt/BUI/ARID
// // The server command can be:
// // get
// // save
// // delete
// //-----------------------------------------------------------------------------------
// func SvcFormHandlerAR(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "SvcFormHandlerAR"
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// if d.ARID, err = SvcExtractIDFromURI(r.RequestURI, "ARID", 3, w); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// fmt.Printf("Request: %s: BID = %d, ID = %d\n", d.wsSearchReq.Cmd, d.BID, d.ARID)
// switch d.wsSearchReq.Cmd {
// case "get":
// getARForm(w, r, d)
// break
// case "save":
// saveARForm(w, r, d)
// break
// case "delete":
// deleteARForm(w, r, d)
// break
// default:
// err = fmt.Errorf("Unhandled command: %s", d.wsSearchReq.Cmd)
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// }
// // saveARForm returns the requested receipt
// // wsdoc {
// // @Title Save AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Save a AR
// // @Desc This service saves a AR. If :ARID exists, it will
// // @Desc be updated with the information supplied. All fields must
// // @Desc be supplied. If ARID is 0, then a new receipt is created.
// // @Input SaveARInput
// // @Response SvcStatusResponse
// // wsdoc }
// func saveARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "saveARForm"
// foo SaveARInput
// bar SaveAROther
// a rlib.AR
// err error
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// // get data
// data := []byte(d.data)
// if err := json.Unmarshal(data, &foo); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := json.Unmarshal(data, &bar); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // migrate foo.Record data to a struct's fields
// rlib.MigrateStructVals(&foo.Record, &a) // the variables that don't need special handling
// fmt.Printf("saveAR - first migrate: a = %#v\n", a)
// var ok bool
// a.BID, ok = rlib.RRdb.BUDlist[bar.Record.BID.ID]
// if !ok {
// e := fmt.Errorf("%s: Could not map BID value: %s", funcname, bar.Record.BID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.CreditLID, ok = rlib.StringToInt64(bar.Record.CreditLID.ID) // CreditLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid CreditLID value: %s", funcname, bar.Record.CreditLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.DebitLID, ok = rlib.StringToInt64(bar.Record.DebitLID.ID) // DebitLID has drop list
// if !ok {
// e := fmt.Errorf("%s: invalid DebitLID value: %s", funcname, bar.Record.DebitLID.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// a.ARType, ok = rlib.StringToInt64(bar.Record.ARType.ID) // ArType has drop list
// if !ok {
// e := fmt.Errorf("%s: Invalid ARType value: %s", funcname, bar.Record.ARType.ID)
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// fmt.Printf("saveAR - second migrate: a = %#v\n", a)
// // get PriorToRAStart and PriorToRAStop values and accordingly get RARequired field value
// formBoolMap := [2]bool{foo.Record.PriorToRAStart, foo.Record.PriorToRAStop}
// for raReq, boolMap := range raRequiredMap {
// if boolMap == formBoolMap {
// a.RARequired = int64(raReq)
// break
// }
// }
// // save or update
// if a.ARID == 0 && d.ARID == 0 {
// // This is a new AR
// fmt.Printf(">>>> NEW RECEIPT IS BEING ADDED\n")
// _, err = rlib.InsertAR(&a)
// } else {
// // update existing record
// fmt.Printf("Updating existing AR: %d\n", a.ARID)
// err = rlib.UpdateAR(&a)
// }
// if err != nil {
// e := fmt.Errorf("Error saving receipt (ARID=%d\n: %s", d.ARID, err.Error())
// SvcGridErrorReturn(w, e, funcname)
// return
// }
// SvcWriteSuccessResponseWithID(w, a.ARID)
// }
// // which fields needs to be fetched for SQL query for receipts grid
// var getARQuerySelectFields = rlib.SelectQueryFields{
// "AR.ARID",
// "AR.Name",
// "AR.ARType",
// "AR.DebitLID",
// "debitQuery.Name as DebitLedgerName",
// "AR.CreditLID",
// "creditQuery.Name as CreditLedgerName",
// "AR.Description",
// "AR.DtStart",
// "AR.DtStop",
// "AR.RARequired",
// }
// // for what RARequired value, prior and after value are
// var raRequiredMap = map[int][2]bool{
// 0: {false, false}, // during
// 1: {true, false}, // prior or during
// 2: {false, true}, // after or during
// 3: {true, true}, // after or during or prior
// }
// // getARForm returns the requested ars
// // wsdoc {
// // @Title Get AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method GET
// // @Synopsis Get information on a AR
// // @Description Return all fields for ars :ARID
// // @Input WebGridSearchRequest
// // @Response GetLedgerResponse
// // wsdoc }
// func getARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "getARForm"
// g GetLedgerResponse
// err error
// )
// fmt.Printf("entered %s\n", funcname)
// arQuery := `
// SELECT
// {{.SelectClause}}
// FROM AR
// INNER JOIN GLAccount as debitQuery on AR.DebitLID=debitQuery.LID
// INNER JOIN GLAccount as creditQuery on AR.CreditLID=creditQuery.LID
// WHERE {{.WhereClause}};`
// qc := rlib.QueryClause{
// "SelectClause": strings.Join(getARQuerySelectFields, ","),
// "WhereClause": fmt.Sprintf("AR.BID=%d AND AR.ARID=%d", d.BID, d.ARID),
// }
// // get formatted query with substitution of select, where, order clause
// q := rlib.RenderSQLQuery(arQuery, qc)
// fmt.Printf("db query = %s\n", q)
// // execute the query
// rows, err := rlib.RRdb.Dbrr.Query(q)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// defer rows.Close()
// for rows.Next() {
// var gg ARSendForm
// gg.BID = getBUDFromBIDList(d.BID)
// err = rows.Scan(&gg.ARID, &gg.Name, &gg.ARType, &gg.DebitLID, &gg.DebitLedgerName, &gg.CreditLID, &gg.CreditLedgerName, &gg.Description, &gg.DtStart, &gg.DtStop, &gg.raRequired)
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// // according to RARequired map, fill out PriorToRAStart, PriorToRAStop values
// raReqMappedVal := raRequiredMap[gg.raRequired]
// gg.PriorToRAStart = raReqMappedVal[0]
// gg.PriorToRAStop = raReqMappedVal[1]
// g.Record = gg
// }
// // error check
// err = rows.Err()
// if err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// g.Status = "success"
// w.Header().Set("Content-Type", "application/json")
// SvcWriteResponse(&g, w)
// }
// // deleteAR request delete AR from database
// // wsdoc {
// // @Title Delete AR
// // @URL /v1/ars/:BUI/:ARID
// // @Method DELETE
// // @Synopsis Delete record for a AR
// // @Description Delete record from database ars :ARID
// // @Input WebGridSearchRequest
// // @Response SvcWriteSuccessResponse
// // wsdoc }
// func deleteARForm(w http.ResponseWriter, r *http.Request, d *ServiceData) {
// var (
// funcname = "deleteARForm"
// del DeleteARForm
// )
// fmt.Printf("Entered %s\n", funcname)
// fmt.Printf("record data = %s\n", d.data)
// if err := json.Unmarshal([]byte(d.data), &del); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// if err := rlib.DeleteAR(del.ARID); err != nil {
// SvcGridErrorReturn(w, err, funcname)
// return
// }
// SvcWriteSuccessResponse(w)
// }
| {
SvcGridErrorReturn(w, err, funcname)
return
} | conditional_block |
client.go | package main
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/base64"
"encoding/pem"
"flag"
"fmt"
"io/ioutil"
"log"
"math"
"net/http"
"net/url"
"os"
"os/exec"
"sync"
"sync/atomic"
"time"
"remote-terminal-client/protocol"
"github.com/gorilla/websocket"
"github.com/kr/pty"
)
const (
stateOpen uint32 = iota
stateConnecting
stateReady
stateReconnecting
stateClose
)
const (
EventOpen string = "open"
EventConnect = "connect"
EventReconnect = "reconnect"
EventError = "error"
)
type Listener func(args ...interface{})
type emitter struct {
listeners map[string][]Listener
m sync.RWMutex
}
type option struct {
AutoReconnect bool
MaxReconnections int32
}
var defaultOption = &option{
AutoReconnect: true,
MaxReconnections: math.MaxInt32,
}
type socketClient struct {
emitter
state uint32
url *url.URL
option *option
transprot protocol.Transport
outChan chan *protocol.Packet
closeChan chan bool
}
var connection *websocket.Conn
//---------------------虚拟终端--------------------------
type wsPty struct {
Cmd *exec.Cmd // pty builds on os.exec
Pty *os.File // a pty is simply an os.File
}
func (wp *wsPty) Start() {
var err error
args := flag.Args()
wp.Cmd = exec.Command(cmdFlag, args...)
wp.Cmd.Env = append(os.Environ(), "TERM=xterm")
wp.Pty, err = pty.Start(wp.Cmd)
if err != nil {
log.Fatalf("Failed to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() {
wp := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
| enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) start(conn protocol.Conn, requestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
}
}
}
}
func (s *socketClient) startWrite(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
select {
case <-stopper:
return
case p, ok := <-s.outChan:
if !ok {
return
}
err := conn.Write(p)
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
}
}
}
func (s *socketClient) startPing(h *protocol.Handshake, stopper chan bool) {
defer func() {
recover()
}()
for {
time.Sleep(time.Duration(h.PingInterval) * time.Millisecond)
select {
case <-stopper:
return
case <-s.closeChan:
return
default:
}
if atomic.LoadUint32(&s.state) != stateReady {
return
}
s.outChan <- protocol.NewPingPacket()
}
}
func EncryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
enc := cipher.NewCBCEncrypter(block, iv)
content := PKCS5Padding([]byte(message), block.BlockSize())
crypted := make([]byte, len(content))
enc.CryptBlocks(crypted, content)
return base64.StdEncoding.EncodeToString(crypted)
}
func DecryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
messageData, _ := base64.StdEncoding.DecodeString(message)
dec := cipher.NewCBCDecrypter(block, iv)
decrypted := make([]byte, len(messageData))
dec.CryptBlocks(decrypted, messageData)
return string(PKCS5Unpadding(decrypted))
}
func PKCS5Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
func PKCS5Unpadding(encrypt []byte) []byte {
padding := encrypt[len(encrypt)-1]
return encrypt[:len(encrypt)-int(padding)]
}
func ParseString(value interface{}) (string, error) {
switch value.(type) {
case string:
return value.(string), nil
default:
return "", fmt.Errorf("unable to casting number %v (type %T)", value, value)
}
}
func RsaEncrypt(data []byte) string {
pubKey, err := ioutil.ReadFile("../public.pem")
if err != nil {
log.Fatal(err.Error())
}
block, _ := pem.Decode(pubKey) //将密钥解析成公钥实例
if block == nil {
fmt.Println("public key error")
return ""
}
pubInterface, err := x509.ParsePKIXPublicKey(block.Bytes) //解析pem.Decode()返回的Block指针实例
if err != nil {
fmt.Println(err)
return ""
}
pub := pubInterface.(*rsa.PublicKey)
res, err := rsa.EncryptOAEP(sha1.New(), rand.Reader, pub, data, nil)
if err != nil {
fmt.Println(err)
return ""
}
return base64.StdEncoding.EncodeToString(res)
} | fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) { | random_line_split |
client.go | package main
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/base64"
"encoding/pem"
"flag"
"fmt"
"io/ioutil"
"log"
"math"
"net/http"
"net/url"
"os"
"os/exec"
"sync"
"sync/atomic"
"time"
"remote-terminal-client/protocol"
"github.com/gorilla/websocket"
"github.com/kr/pty"
)
const (
stateOpen uint32 = iota
stateConnecting
stateReady
stateReconnecting
stateClose
)
const (
EventOpen string = "open"
EventConnect = "connect"
EventReconnect = "reconnect"
EventError = "error"
)
type Listener func(args ...interface{})
type emitter struct {
listeners map[string][]Listener
m sync.RWMutex
}
type option struct {
AutoReconnect bool
MaxReconnections int32
}
var defaultOption = &option{
AutoReconnect: true,
MaxReconnections: math.MaxInt32,
}
type socketClient struct {
emitter
state uint32
url *url.URL
option *option
transprot protocol.Transport
outChan chan *protocol.Packet
closeChan chan bool
}
var connection *websocket.Conn
//---------------------虚拟终端--------------------------
type wsPty struct {
Cmd *exec.Cmd // pty builds on os.exec
Pty *os.File // a pty is simply an os.File
}
func (wp *wsPty) Start() {
var err error
args := flag.Args()
wp.Cmd = exec.Command(cmdFlag, args...)
wp.Cmd.Env = append(os.Environ(), "TERM=xterm")
wp.Pty, err = pty.Start(wp.Cmd)
if err != nil {
log.Fatalf("Failed to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() { | p := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) start(conn protocol.Conn, requestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
}
}
}
}
func (s *socketClient) startWrite(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
select {
case <-stopper:
return
case p, ok := <-s.outChan:
if !ok {
return
}
err := conn.Write(p)
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
}
}
}
func (s *socketClient) startPing(h *protocol.Handshake, stopper chan bool) {
defer func() {
recover()
}()
for {
time.Sleep(time.Duration(h.PingInterval) * time.Millisecond)
select {
case <-stopper:
return
case <-s.closeChan:
return
default:
}
if atomic.LoadUint32(&s.state) != stateReady {
return
}
s.outChan <- protocol.NewPingPacket()
}
}
func EncryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
enc := cipher.NewCBCEncrypter(block, iv)
content := PKCS5Padding([]byte(message), block.BlockSize())
crypted := make([]byte, len(content))
enc.CryptBlocks(crypted, content)
return base64.StdEncoding.EncodeToString(crypted)
}
func DecryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
messageData, _ := base64.StdEncoding.DecodeString(message)
dec := cipher.NewCBCDecrypter(block, iv)
decrypted := make([]byte, len(messageData))
dec.CryptBlocks(decrypted, messageData)
return string(PKCS5Unpadding(decrypted))
}
func PKCS5Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
func PKCS5Unpadding(encrypt []byte) []byte {
padding := encrypt[len(encrypt)-1]
return encrypt[:len(encrypt)-int(padding)]
}
func ParseString(value interface{}) (string, error) {
switch value.(type) {
case string:
return value.(string), nil
default:
return "", fmt.Errorf("unable to casting number %v (type %T)", value, value)
}
}
func RsaEncrypt(data []byte) string {
pubKey, err := ioutil.ReadFile("../public.pem")
if err != nil {
log.Fatal(err.Error())
}
block, _ := pem.Decode(pubKey) //将密钥解析成公钥实例
if block == nil {
fmt.Println("public key error")
return ""
}
pubInterface, err := x509.ParsePKIXPublicKey(block.Bytes) //解析pem.Decode()返回的Block指针实例
if err != nil {
fmt.Println(err)
return ""
}
pub := pubInterface.(*rsa.PublicKey)
res, err := rsa.EncryptOAEP(sha1.New(), rand.Reader, pub, data, nil)
if err != nil {
fmt.Println(err)
return ""
}
return base64.StdEncoding.EncodeToString(res)
}
|
w | identifier_name |
client.go | package main
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/base64"
"encoding/pem"
"flag"
"fmt"
"io/ioutil"
"log"
"math"
"net/http"
"net/url"
"os"
"os/exec"
"sync"
"sync/atomic"
"time"
"remote-terminal-client/protocol"
"github.com/gorilla/websocket"
"github.com/kr/pty"
)
const (
stateOpen uint32 = iota
stateConnecting
stateReady
stateReconnecting
stateClose
)
const (
EventOpen string = "open"
EventConnect = "connect"
EventReconnect = "reconnect"
EventError = "error"
)
type Listener func(args ...interface{})
type emitter struct {
listeners map[string][]Listener
m sync.RWMutex
}
type option struct {
AutoReconnect bool
MaxReconnections int32
}
var defaultOption = &option{
AutoReconnect: true,
MaxReconnections: math.MaxInt32,
}
type socketClient struct {
emitter
state uint32
url *url.URL
option *option
transprot protocol.Transport
outChan chan *protocol.Packet
closeChan chan bool
}
var connection *websocket.Conn
//---------------------虚拟终端--------------------------
type wsPty struct {
Cmd *exec.Cmd // pty builds on os.exec
Pty *os.File // a pty is simply an os.File
}
func (wp *wsPty) Start() {
var err error
args := flag.Args()
wp.Cmd = exec.Command(cmdFlag, args...)
wp.Cmd.Env = append(os.Environ(), "TERM=xterm")
wp.Pty, err = pty.Start(wp.Cmd)
if err != nil {
log.Fatalf("Failed to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() {
wp := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Clos | uestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
}
}
}
}
func (s *socketClient) startWrite(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
select {
case <-stopper:
return
case p, ok := <-s.outChan:
if !ok {
return
}
err := conn.Write(p)
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
}
}
}
func (s *socketClient) startPing(h *protocol.Handshake, stopper chan bool) {
defer func() {
recover()
}()
for {
time.Sleep(time.Duration(h.PingInterval) * time.Millisecond)
select {
case <-stopper:
return
case <-s.closeChan:
return
default:
}
if atomic.LoadUint32(&s.state) != stateReady {
return
}
s.outChan <- protocol.NewPingPacket()
}
}
func EncryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
enc := cipher.NewCBCEncrypter(block, iv)
content := PKCS5Padding([]byte(message), block.BlockSize())
crypted := make([]byte, len(content))
enc.CryptBlocks(crypted, content)
return base64.StdEncoding.EncodeToString(crypted)
}
func DecryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
messageData, _ := base64.StdEncoding.DecodeString(message)
dec := cipher.NewCBCDecrypter(block, iv)
decrypted := make([]byte, len(messageData))
dec.CryptBlocks(decrypted, messageData)
return string(PKCS5Unpadding(decrypted))
}
func PKCS5Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
func PKCS5Unpadding(encrypt []byte) []byte {
padding := encrypt[len(encrypt)-1]
return encrypt[:len(encrypt)-int(padding)]
}
func ParseString(value interface{}) (string, error) {
switch value.(type) {
case string:
return value.(string), nil
default:
return "", fmt.Errorf("unable to casting number %v (type %T)", value, value)
}
}
func RsaEncrypt(data []byte) string {
pubKey, err := ioutil.ReadFile("../public.pem")
if err != nil {
log.Fatal(err.Error())
}
block, _ := pem.Decode(pubKey) //将密钥解析成公钥实例
if block == nil {
fmt.Println("public key error")
return ""
}
pubInterface, err := x509.ParsePKIXPublicKey(block.Bytes) //解析pem.Decode()返回的Block指针实例
if err != nil {
fmt.Println(err)
return ""
}
pub := pubInterface.(*rsa.PublicKey)
res, err := rsa.EncryptOAEP(sha1.New(), rand.Reader, pub, data, nil)
if err != nil {
fmt.Println(err)
return ""
}
return base64.StdEncoding.EncodeToString(res)
}
| e()
}
}
}
func (s *socketClient) start(conn protocol.Conn, req | conditional_block |
client.go | package main
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/base64"
"encoding/pem"
"flag"
"fmt"
"io/ioutil"
"log"
"math"
"net/http"
"net/url"
"os"
"os/exec"
"sync"
"sync/atomic"
"time"
"remote-terminal-client/protocol"
"github.com/gorilla/websocket"
"github.com/kr/pty"
)
const (
stateOpen uint32 = iota
stateConnecting
stateReady
stateReconnecting
stateClose
)
const (
EventOpen string = "open"
EventConnect = "connect"
EventReconnect = "reconnect"
EventError = "error"
)
type Listener func(args ...interface{})
type emitter struct {
listeners map[string][]Listener
m sync.RWMutex
}
type option struct {
AutoReconnect bool
MaxReconnections int32
}
var defaultOption = &option{
AutoReconnect: true,
MaxReconnections: math.MaxInt32,
}
type socketClient struct {
emitter
state uint32
url *url.URL
option *option
transprot protocol.Transport
outChan chan *protocol.Packet
closeChan chan bool
}
var connection *websocket.Conn
//---------------------虚拟终端--------------------------
type wsPty struct {
Cmd *exec.Cmd // pty builds on os.exec
Pty *os.File // a pty is simply an os.File
}
func (wp *wsPty) Start() {
var err error
args := flag.Args()
wp.Cmd = exec.Command(cmdFlag, args...)
wp.Cmd.Env = append(os.Environ(), "TERM=xterm")
wp.Pty, err = pty.Start(wp.Cmd)
if err != nil {
log.Fatalf("Failed to start command: %s\n", err)
}
}
func (wp *wsPty) Stop() {
wp.Pty.Close()
wp.Cmd.Wait()
}
var cmdFlag string
var messageData interface{}
func init() {
flag.StringVar(&cmdFlag, "cmd", "/bin/bash", "command to execute on slave side of the pty")
}
func main() {
wp := wsPty{}
wp.Start()
var conHd = make(map[string]*websocket.Conn)
fmt.Println(RsaEncrypt([]byte("aiyouwei")))
var Header http.Header = map[string][]string{
"moja": {"ccccc, asdasdasdasd"},
"terminal": {"en-esadasdasdwrw"},
"success": {"dasdadas", "wdsadaderew"},
"ticket": {RsaEncrypt([]byte("aiyouwei"))},
}
s, err := Socket("ws://127.0.0.1:3000")
if err != nil {
panic(err)
}
//s.Connect(Header)
//建立主连接
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), Header)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, Header)
s.emit(EventConnect)
} else {
conn.Close()
}
}
//建立子连接
go func() {
for {
//每次轮训需要判断连接句柄是否存在
//s, _ := ParseString(messageData)
//fmt.Println("bbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
// in := []byte(s)
// var raw = make(map[string]interface{})
// json.Unmarshal(in, &raw)
// fmt.Println(raw["subconn"])
if messageData == "subconn" {
sub, err := Socket("ws://127.0.0.1:3000?a=sub")
if err != nil {
panic(err)
}
if atomic.CompareAndSwapUint32(&sub.state, stateOpen, stateConnecting) {
subConn, c, err := sub.transprot.Dial(sub.url.String(), Header)
conHd["1"] = c
if err != nil {
sub.emit(EventError, err)
go sub.reconnect(stateConnecting, Header)
return
}
if atomic.CompareAndSwapUint32(&sub.state, stateConnecting, stateReady) {
go sub.start(subConn, Header)
sub.emit(EventConnect)
} else {
subConn.Close()
}
}
fmt.Println("pppppppppppppppppppppppppppp")
fmt.Println(conHd["1"])
sub.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
} else if messageData == "cmd" {
fmt.Println("wqeqweqwqw")
} else {
// fmt.Println("qweqwerrrrtytyyyqwwetrtyutuiop")
// decodeBytes, err := base64.StdEncoding.DecodeString(s)
// if err != nil {
// log.Fatalln(err)
// }
// fmt.Println(string(decodeBytes))
}
}
}()
input := []byte("testtttt")
// 演示base64编码
encodeString := base64.StdEncoding.EncodeToString(input)
s.Emit("messgae", encodeString)
//主连接接收消息类型
s.On("message", func(args ...interface{}) {
enResult, _ := ParseString(args[0])
messageData = DecryptWithAES("asdasdasdasdasd", enResult)
//fmt.Println(cmd)
//wp.Pty.Write([]byte(cmd))
})
go func() {
resBuf := make([]byte, 1024)
for {
fmt.Println(string(resBuf))
n, err := wp.Pty.Read(resBuf)
if err != nil {
log.Printf("Failed to read from pty master: %s", err)
return
}
out := make([]byte, base64.StdEncoding.EncodedLen(n))
base64.StdEncoding.Encode(out, resBuf[0:n])
s.Emit("result", string(resBuf[0:n]))
}
}()
for {
}
}
func (e *emitter) On(event string, listener Listener) {
e.m.Lock()
defer e.m.Unlock()
listeners, ok := e.listeners[event]
if ok {
listeners = append(listeners, listener)
} else {
listeners = []Listener{listener}
}
e.listeners[event] = listeners
}
func (e *emitter) emit(event string, args ...interface{}) bool {
e.m.RLock()
listeners, ok := e.listeners[event]
if ok {
for _, listener := range listeners {
listener(args...)
}
}
e.m.RUnlock()
return ok
}
func Socket(urlstring string) (*socketClient, error) {
u, err := url.Parse(urlstring)
if err != nil {
return nil, err
}
u.Path = "/socket.io/"
q := u.Query()
q.Add("EIO", "3")
q.Add("transport", "websocket")
u.RawQuery = q.Encode()
return &socketClient{
emitter: emitter{listeners: make(map[string][]Listener)},
url: u,
option: defaultOption,
transprot: protocol.NewWebSocketTransport(),
outChan: make(chan *protocol.Packet, 64),
closeChan: make(chan bool),
}, nil
}
func (s *socketClient) Connect(requestHeader http.Header) {
if atomic.CompareAndSwapUint32(&s.state, stateOpen, stateConnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateConnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateConnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventConnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) Disconnect() {
atomic.StoreUint32(&s.state, stateClose)
close(s.outChan)
close(s.closeChan)
}
func (s *socketClient) Emit(event string, args ...interface{}) {
if atomic.LoadUint32(&s.state) == stateReady && !s.emit(event, args) {
m := &protocol.Message{
Type: protocol.MessageTypeEvent,
Namespace: "/",
ID: -1,
Event: event,
Payloads: args,
}
p, err := m.Encode()
if err != nil {
s.emit(EventError, err)
} else {
s.outChan <- p
}
}
}
func (s *socketClient) reconnect(state uint32, requestHeader http.Header) {
time.Sleep(time.Second)
if atomic.CompareAndSwapUint32(&s.state, state, stateReconnecting) {
conn, c, err := s.transprot.Dial(s.url.String(), requestHeader)
connection = c
if err != nil {
s.emit(EventError, err)
go s.reconnect(stateReconnecting, requestHeader)
return
}
if atomic.CompareAndSwapUint32(&s.state, stateReconnecting, stateReady) {
go s.start(conn, requestHeader)
s.emit(EventReconnect)
} else {
conn.Close()
}
}
}
func (s *socketClient) start(conn protocol.Conn, requestHeader http.Header) {
stopper := make(chan bool)
go s.startRead(conn, stopper)
go s.startWrite(conn, stopper)
select {
case <-stopper:
go s.reconnect(stateReady, requestHeader)
conn.Close()
case <-s.closeChan:
conn.Close()
}
}
func (s *socketClient) startRead(conn protocol.Conn, stopper chan bool) {
defer func() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
| nc() {
recover()
}()
for atomic.LoadUint32(&s.state) == stateReady {
select {
case <-stopper:
return
case p, ok := <-s.outChan:
if !ok {
return
}
err := conn.Write(p)
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
}
}
}
func (s *socketClient) startPing(h *protocol.Handshake, stopper chan bool) {
defer func() {
recover()
}()
for {
time.Sleep(time.Duration(h.PingInterval) * time.Millisecond)
select {
case <-stopper:
return
case <-s.closeChan:
return
default:
}
if atomic.LoadUint32(&s.state) != stateReady {
return
}
s.outChan <- protocol.NewPingPacket()
}
}
func EncryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
enc := cipher.NewCBCEncrypter(block, iv)
content := PKCS5Padding([]byte(message), block.BlockSize())
crypted := make([]byte, len(content))
enc.CryptBlocks(crypted, content)
return base64.StdEncoding.EncodeToString(crypted)
}
func DecryptWithAES(key, message string) string {
hash := md5.New()
hash.Write([]byte(key))
keyData := hash.Sum(nil)
block, err := aes.NewCipher(keyData)
if err != nil {
panic(err)
}
iv := []byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f}
messageData, _ := base64.StdEncoding.DecodeString(message)
dec := cipher.NewCBCDecrypter(block, iv)
decrypted := make([]byte, len(messageData))
dec.CryptBlocks(decrypted, messageData)
return string(PKCS5Unpadding(decrypted))
}
func PKCS5Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
func PKCS5Unpadding(encrypt []byte) []byte {
padding := encrypt[len(encrypt)-1]
return encrypt[:len(encrypt)-int(padding)]
}
func ParseString(value interface{}) (string, error) {
switch value.(type) {
case string:
return value.(string), nil
default:
return "", fmt.Errorf("unable to casting number %v (type %T)", value, value)
}
}
func RsaEncrypt(data []byte) string {
pubKey, err := ioutil.ReadFile("../public.pem")
if err != nil {
log.Fatal(err.Error())
}
block, _ := pem.Decode(pubKey) //将密钥解析成公钥实例
if block == nil {
fmt.Println("public key error")
return ""
}
pubInterface, err := x509.ParsePKIXPublicKey(block.Bytes) //解析pem.Decode()返回的Block指针实例
if err != nil {
fmt.Println(err)
return ""
}
pub := pubInterface.(*rsa.PublicKey)
res, err := rsa.EncryptOAEP(sha1.New(), rand.Reader, pub, data, nil)
if err != nil {
fmt.Println(err)
return ""
}
return base64.StdEncoding.EncodeToString(res)
}
| p, err := conn.Read()
if err != nil {
s.emit(EventError, err)
close(stopper)
return
}
switch p.Type {
case protocol.PacketTypeOpen:
h, err := p.DecodeHandshake()
if err != nil {
s.emit(EventError, err)
} else {
go s.startPing(h, stopper)
}
case protocol.PacketTypePing:
s.outChan <- protocol.NewPongPacket()
case protocol.PacketTypeMessage:
m, err := p.DecodeMessage()
if err != nil {
s.emit(EventError, err)
} else {
s.emit(m.Event, m.Payloads...)
}
}
}
}
func (s *socketClient) startWrite(conn protocol.Conn, stopper chan bool) {
defer fu | identifier_body |
contfilter.go | package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"log"
"os"
"strconv"
"strings"
"time"
)
type Args struct {
Sample string
Margin float64
MinLength int
MaxDist int
Limit int
Penalty float64
Output string
Ercc bool
LogFilename string
Verbose bool
}
var args = Args{}
var logger *log.Logger
func init() {
log.SetFlags(0)
flag.StringVar(&args.Sample, "sample", "", "BAM file of the sample you want to filter (sorted by name, required)")
flag.Float64Var(&args.Margin, "margin", 1.0, "how much better sample needs to be matched")
flag.IntVar(&args.MinLength, "min-len", 60, "min length for an alignment")
flag.IntVar(&args.MaxDist, "max-edit-dist", 5, "max edit distance for a sample match")
flag.IntVar(&args.Limit, "limit", 0, "limit the number of sample reads considered (0 = no limit)")
flag.Float64Var(&args.Penalty, "edit-penalty", 2.0, "multiple for how to penalize edit distance")
flag.StringVar(&args.Output, "output", "", "output bam file (required)")
flag.StringVar(&args.LogFilename, "log", "", "write parameters and stats to a log file")
flag.BoolVar(&args.Verbose, "verbose", false, "keep a record of what happens to each read in the log (must give -log name)")
flag.BoolVar(&args.Ercc, "ercc", false, "exclude ERCC mappings from sample before filtering")
flag.Usage = func() {
log.Println("usage: contfilter [options] cont1.bam cont2.bam")
flag.PrintDefaults()
}
}
func benchmark(start time.Time, label string) {
elapsed := time.Since(start)
logger.Printf("%s took %s", label, elapsed)
}
func extract(row []string) (int, int, error) {
if len(row) < 15 {
return 0, 0, fmt.Errorf("too few fields")
}
match_len := len(row[9])
edit_tag := row[14]
if edit_tag[:5] != "nM:i:" {
return 0, 0, fmt.Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func LogArguments() {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC")))
}
func main() {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil |
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if !found[c] {
found[c] = true
reads_found[c]++
}
length, edit_dist, err := extract(mate)
if err != nil {
logger.Fatalf("failed to read from %s: %v", contamination[c], err)
}
if length >= args.MinLength {
score := float64(length) - float64(edit_dist)*args.Penalty
if args.Verbose {
logger.Printf("mapping meets length criteria and has score %f\n", score)
}
if best_score <= score+args.Margin {
if args.Verbose {
logger.Println("mapping has better score")
}
if !rejected[c] {
reads_filtered[c]++
rejected[c] = true
was_rejected = true
if args.Verbose {
logger.Printf("read %s with length %d and edit distance %d was rejected "+
"with score %0.1f because in %s it had a score of %0.1f with length "+
"%d and edit distance %d\n",
read, best_len, best_edit_dist, best_score, contamination[c],
score, length, edit_dist)
}
}
} else {
if args.Verbose {
logger.Println("mapping has worse score")
}
}
}
}
}
if !was_rejected {
// This read is okay, output it to the output BAM file.
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate1, "\t"))
if err != nil {
return err
}
reads_kept++
read_mates_kept++
if mate2 != nil {
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate2, "\t"))
if err != nil {
return err
}
read_mates_kept++
}
if args.Verbose {
logger.Printf("kept read %s with length %d and edit distance %d and score %0.1f\n",
read, best_len, best_edit_dist, best_score)
}
}
}
}()
if err != nil {
logger.Fatal(err)
}
outfp.Close()
out.Wait()
logger.Println("Preliminary filtering:")
if args.Ercc {
erccPerc := float64(ercc) / float64(total_reads) * 100
logger.Printf("filtered out %d ERCC reads (%0.1f%%) before comparing to contamination\n", ercc, erccPerc)
}
shortPerc := float64(too_short) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase their alignment was too short\n", too_short, shortPerc)
divergedPerc := float64(too_diverged) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase they were too diverged\n", too_diverged, divergedPerc)
logger.Printf("%d reads remaining after preliminary filtering\n", considered)
logger.Println("Contamination filtering:")
for c, cont := range contamination {
n := reads_filtered[c]
perc := float64(n) / float64(considered) * 100
found_perc := float64(reads_found[c]) / float64(considered) * 100
logger.Printf("found %d of %d reads in %s (%0.1f%%)\n", reads_found[c], considered, cont, found_perc)
logger.Printf("rejected %d of %d reads from %s (%0.1f%%)\n", reads_filtered[c], considered, cont, perc)
}
kept_percent = float64(reads_kept) / float64(considered) * 100
total_percent := float64(reads_kept) / float64(total_reads) * 100
logger.Printf("kept %d of %d reads (%0.1f%%), which is %0.1f%% of the %d reads that met preliminary filtering\n",
reads_kept, total_reads, total_percent, kept_percent, considered)
total_mates_percent := float64(read_mates_kept) / float64(total_read_mates) * 100
logger.Printf("kept %d of %d read mates (%0.1f%%)", read_mates_kept, total_read_mates, total_mates_percent)
input_mates_per_pair := float64(total_read_mates) / float64(total_reads)
output_mates_per_pair := float64(read_mates_kept) / float64(reads_kept)
logger.Printf("observed %0.1f mates/read on the input end and %0.1f mates/read on the output end\n",
input_mates_per_pair, output_mates_per_pair)
logger.Println("machine parsable stats:")
stats := []int{
total_reads,
total_read_mates,
ercc,
too_short,
too_diverged,
considered,
reads_kept,
read_mates_kept,
}
stats = append(stats, reads_found...)
stats = append(stats, reads_filtered...)
statsStr := "stats"
for _, s := range stats {
statsStr += fmt.Sprintf("\t%d", s)
}
logger.Println(statsStr)
}
| {
logger.Fatal(err)
} | conditional_block |
contfilter.go | package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"log"
"os"
"strconv"
"strings"
"time"
)
type Args struct {
Sample string
Margin float64
MinLength int
MaxDist int
Limit int
Penalty float64
Output string
Ercc bool
LogFilename string
Verbose bool
}
var args = Args{}
var logger *log.Logger
func init() {
log.SetFlags(0)
flag.StringVar(&args.Sample, "sample", "", "BAM file of the sample you want to filter (sorted by name, required)")
flag.Float64Var(&args.Margin, "margin", 1.0, "how much better sample needs to be matched")
flag.IntVar(&args.MinLength, "min-len", 60, "min length for an alignment")
flag.IntVar(&args.MaxDist, "max-edit-dist", 5, "max edit distance for a sample match")
flag.IntVar(&args.Limit, "limit", 0, "limit the number of sample reads considered (0 = no limit)")
flag.Float64Var(&args.Penalty, "edit-penalty", 2.0, "multiple for how to penalize edit distance")
flag.StringVar(&args.Output, "output", "", "output bam file (required)")
flag.StringVar(&args.LogFilename, "log", "", "write parameters and stats to a log file")
flag.BoolVar(&args.Verbose, "verbose", false, "keep a record of what happens to each read in the log (must give -log name)")
flag.BoolVar(&args.Ercc, "ercc", false, "exclude ERCC mappings from sample before filtering")
flag.Usage = func() {
log.Println("usage: contfilter [options] cont1.bam cont2.bam")
flag.PrintDefaults()
}
}
func benchmark(start time.Time, label string) {
elapsed := time.Since(start)
logger.Printf("%s took %s", label, elapsed)
}
func extract(row []string) (int, int, error) {
if len(row) < 15 {
return 0, 0, fmt.Errorf("too few fields")
}
match_len := len(row[9])
edit_tag := row[14]
if edit_tag[:5] != "nM:i:" {
return 0, 0, fmt.Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func LogArguments() {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC"))) |
func main() {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil {
logger.Fatal(err)
}
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if !found[c] {
found[c] = true
reads_found[c]++
}
length, edit_dist, err := extract(mate)
if err != nil {
logger.Fatalf("failed to read from %s: %v", contamination[c], err)
}
if length >= args.MinLength {
score := float64(length) - float64(edit_dist)*args.Penalty
if args.Verbose {
logger.Printf("mapping meets length criteria and has score %f\n", score)
}
if best_score <= score+args.Margin {
if args.Verbose {
logger.Println("mapping has better score")
}
if !rejected[c] {
reads_filtered[c]++
rejected[c] = true
was_rejected = true
if args.Verbose {
logger.Printf("read %s with length %d and edit distance %d was rejected "+
"with score %0.1f because in %s it had a score of %0.1f with length "+
"%d and edit distance %d\n",
read, best_len, best_edit_dist, best_score, contamination[c],
score, length, edit_dist)
}
}
} else {
if args.Verbose {
logger.Println("mapping has worse score")
}
}
}
}
}
if !was_rejected {
// This read is okay, output it to the output BAM file.
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate1, "\t"))
if err != nil {
return err
}
reads_kept++
read_mates_kept++
if mate2 != nil {
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate2, "\t"))
if err != nil {
return err
}
read_mates_kept++
}
if args.Verbose {
logger.Printf("kept read %s with length %d and edit distance %d and score %0.1f\n",
read, best_len, best_edit_dist, best_score)
}
}
}
}()
if err != nil {
logger.Fatal(err)
}
outfp.Close()
out.Wait()
logger.Println("Preliminary filtering:")
if args.Ercc {
erccPerc := float64(ercc) / float64(total_reads) * 100
logger.Printf("filtered out %d ERCC reads (%0.1f%%) before comparing to contamination\n", ercc, erccPerc)
}
shortPerc := float64(too_short) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase their alignment was too short\n", too_short, shortPerc)
divergedPerc := float64(too_diverged) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase they were too diverged\n", too_diverged, divergedPerc)
logger.Printf("%d reads remaining after preliminary filtering\n", considered)
logger.Println("Contamination filtering:")
for c, cont := range contamination {
n := reads_filtered[c]
perc := float64(n) / float64(considered) * 100
found_perc := float64(reads_found[c]) / float64(considered) * 100
logger.Printf("found %d of %d reads in %s (%0.1f%%)\n", reads_found[c], considered, cont, found_perc)
logger.Printf("rejected %d of %d reads from %s (%0.1f%%)\n", reads_filtered[c], considered, cont, perc)
}
kept_percent = float64(reads_kept) / float64(considered) * 100
total_percent := float64(reads_kept) / float64(total_reads) * 100
logger.Printf("kept %d of %d reads (%0.1f%%), which is %0.1f%% of the %d reads that met preliminary filtering\n",
reads_kept, total_reads, total_percent, kept_percent, considered)
total_mates_percent := float64(read_mates_kept) / float64(total_read_mates) * 100
logger.Printf("kept %d of %d read mates (%0.1f%%)", read_mates_kept, total_read_mates, total_mates_percent)
input_mates_per_pair := float64(total_read_mates) / float64(total_reads)
output_mates_per_pair := float64(read_mates_kept) / float64(reads_kept)
logger.Printf("observed %0.1f mates/read on the input end and %0.1f mates/read on the output end\n",
input_mates_per_pair, output_mates_per_pair)
logger.Println("machine parsable stats:")
stats := []int{
total_reads,
total_read_mates,
ercc,
too_short,
too_diverged,
considered,
reads_kept,
read_mates_kept,
}
stats = append(stats, reads_found...)
stats = append(stats, reads_filtered...)
statsStr := "stats"
for _, s := range stats {
statsStr += fmt.Sprintf("\t%d", s)
}
logger.Println(statsStr)
} | } | random_line_split |
contfilter.go | package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"log"
"os"
"strconv"
"strings"
"time"
)
type Args struct {
Sample string
Margin float64
MinLength int
MaxDist int
Limit int
Penalty float64
Output string
Ercc bool
LogFilename string
Verbose bool
}
var args = Args{}
var logger *log.Logger
func init() {
log.SetFlags(0)
flag.StringVar(&args.Sample, "sample", "", "BAM file of the sample you want to filter (sorted by name, required)")
flag.Float64Var(&args.Margin, "margin", 1.0, "how much better sample needs to be matched")
flag.IntVar(&args.MinLength, "min-len", 60, "min length for an alignment")
flag.IntVar(&args.MaxDist, "max-edit-dist", 5, "max edit distance for a sample match")
flag.IntVar(&args.Limit, "limit", 0, "limit the number of sample reads considered (0 = no limit)")
flag.Float64Var(&args.Penalty, "edit-penalty", 2.0, "multiple for how to penalize edit distance")
flag.StringVar(&args.Output, "output", "", "output bam file (required)")
flag.StringVar(&args.LogFilename, "log", "", "write parameters and stats to a log file")
flag.BoolVar(&args.Verbose, "verbose", false, "keep a record of what happens to each read in the log (must give -log name)")
flag.BoolVar(&args.Ercc, "ercc", false, "exclude ERCC mappings from sample before filtering")
flag.Usage = func() {
log.Println("usage: contfilter [options] cont1.bam cont2.bam")
flag.PrintDefaults()
}
}
func benchmark(start time.Time, label string) {
elapsed := time.Since(start)
logger.Printf("%s took %s", label, elapsed)
}
func extract(row []string) (int, int, error) {
if len(row) < 15 {
return 0, 0, fmt.Errorf("too few fields")
}
match_len := len(row[9])
edit_tag := row[14]
if edit_tag[:5] != "nM:i:" {
return 0, 0, fmt.Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func LogArguments() {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC")))
}
func main() | {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil {
logger.Fatal(err)
}
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if !found[c] {
found[c] = true
reads_found[c]++
}
length, edit_dist, err := extract(mate)
if err != nil {
logger.Fatalf("failed to read from %s: %v", contamination[c], err)
}
if length >= args.MinLength {
score := float64(length) - float64(edit_dist)*args.Penalty
if args.Verbose {
logger.Printf("mapping meets length criteria and has score %f\n", score)
}
if best_score <= score+args.Margin {
if args.Verbose {
logger.Println("mapping has better score")
}
if !rejected[c] {
reads_filtered[c]++
rejected[c] = true
was_rejected = true
if args.Verbose {
logger.Printf("read %s with length %d and edit distance %d was rejected "+
"with score %0.1f because in %s it had a score of %0.1f with length "+
"%d and edit distance %d\n",
read, best_len, best_edit_dist, best_score, contamination[c],
score, length, edit_dist)
}
}
} else {
if args.Verbose {
logger.Println("mapping has worse score")
}
}
}
}
}
if !was_rejected {
// This read is okay, output it to the output BAM file.
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate1, "\t"))
if err != nil {
return err
}
reads_kept++
read_mates_kept++
if mate2 != nil {
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate2, "\t"))
if err != nil {
return err
}
read_mates_kept++
}
if args.Verbose {
logger.Printf("kept read %s with length %d and edit distance %d and score %0.1f\n",
read, best_len, best_edit_dist, best_score)
}
}
}
}()
if err != nil {
logger.Fatal(err)
}
outfp.Close()
out.Wait()
logger.Println("Preliminary filtering:")
if args.Ercc {
erccPerc := float64(ercc) / float64(total_reads) * 100
logger.Printf("filtered out %d ERCC reads (%0.1f%%) before comparing to contamination\n", ercc, erccPerc)
}
shortPerc := float64(too_short) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase their alignment was too short\n", too_short, shortPerc)
divergedPerc := float64(too_diverged) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase they were too diverged\n", too_diverged, divergedPerc)
logger.Printf("%d reads remaining after preliminary filtering\n", considered)
logger.Println("Contamination filtering:")
for c, cont := range contamination {
n := reads_filtered[c]
perc := float64(n) / float64(considered) * 100
found_perc := float64(reads_found[c]) / float64(considered) * 100
logger.Printf("found %d of %d reads in %s (%0.1f%%)\n", reads_found[c], considered, cont, found_perc)
logger.Printf("rejected %d of %d reads from %s (%0.1f%%)\n", reads_filtered[c], considered, cont, perc)
}
kept_percent = float64(reads_kept) / float64(considered) * 100
total_percent := float64(reads_kept) / float64(total_reads) * 100
logger.Printf("kept %d of %d reads (%0.1f%%), which is %0.1f%% of the %d reads that met preliminary filtering\n",
reads_kept, total_reads, total_percent, kept_percent, considered)
total_mates_percent := float64(read_mates_kept) / float64(total_read_mates) * 100
logger.Printf("kept %d of %d read mates (%0.1f%%)", read_mates_kept, total_read_mates, total_mates_percent)
input_mates_per_pair := float64(total_read_mates) / float64(total_reads)
output_mates_per_pair := float64(read_mates_kept) / float64(reads_kept)
logger.Printf("observed %0.1f mates/read on the input end and %0.1f mates/read on the output end\n",
input_mates_per_pair, output_mates_per_pair)
logger.Println("machine parsable stats:")
stats := []int{
total_reads,
total_read_mates,
ercc,
too_short,
too_diverged,
considered,
reads_kept,
read_mates_kept,
}
stats = append(stats, reads_found...)
stats = append(stats, reads_filtered...)
statsStr := "stats"
for _, s := range stats {
statsStr += fmt.Sprintf("\t%d", s)
}
logger.Println(statsStr)
} | identifier_body | |
contfilter.go | package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"log"
"os"
"strconv"
"strings"
"time"
)
type Args struct {
Sample string
Margin float64
MinLength int
MaxDist int
Limit int
Penalty float64
Output string
Ercc bool
LogFilename string
Verbose bool
}
var args = Args{}
var logger *log.Logger
func init() {
log.SetFlags(0)
flag.StringVar(&args.Sample, "sample", "", "BAM file of the sample you want to filter (sorted by name, required)")
flag.Float64Var(&args.Margin, "margin", 1.0, "how much better sample needs to be matched")
flag.IntVar(&args.MinLength, "min-len", 60, "min length for an alignment")
flag.IntVar(&args.MaxDist, "max-edit-dist", 5, "max edit distance for a sample match")
flag.IntVar(&args.Limit, "limit", 0, "limit the number of sample reads considered (0 = no limit)")
flag.Float64Var(&args.Penalty, "edit-penalty", 2.0, "multiple for how to penalize edit distance")
flag.StringVar(&args.Output, "output", "", "output bam file (required)")
flag.StringVar(&args.LogFilename, "log", "", "write parameters and stats to a log file")
flag.BoolVar(&args.Verbose, "verbose", false, "keep a record of what happens to each read in the log (must give -log name)")
flag.BoolVar(&args.Ercc, "ercc", false, "exclude ERCC mappings from sample before filtering")
flag.Usage = func() {
log.Println("usage: contfilter [options] cont1.bam cont2.bam")
flag.PrintDefaults()
}
}
func benchmark(start time.Time, label string) {
elapsed := time.Since(start)
logger.Printf("%s took %s", label, elapsed)
}
func extract(row []string) (int, int, error) {
if len(row) < 15 {
return 0, 0, fmt.Errorf("too few fields")
}
match_len := len(row[9])
edit_tag := row[14]
if edit_tag[:5] != "nM:i:" {
return 0, 0, fmt.Errorf("malformed edit distance tag: %s", edit_tag)
}
edit_dist, err := strconv.Atoi(edit_tag[5:])
if err != nil {
return 0, 0, fmt.Errorf("failed to parse edit dist: %s", edit_tag)
}
return match_len, edit_dist, nil
}
func OpenLogger() {
if args.LogFilename == "" {
logger = log.New(os.Stderr, "", 0)
} else {
logfile, err := os.Create(args.LogFilename)
if err != nil {
log.Fatal(err)
}
logger = log.New(logfile, "", 0)
}
}
func | () {
logger.Println("command:", strings.Join(os.Args, " "))
blob, err := json.MarshalIndent(args, "", " ")
if err != nil {
logger.Fatal("failed to marshal arguments")
}
logger.Println(string(blob))
}
func MatchesErcc(mate1, mate2 []string) bool {
return args.Ercc &&
(strings.Contains(mate1[2], "ERCC") || (mate2 != nil && strings.Contains(mate2[2], "ERCC")))
}
func main() {
var kept_percent float64
flag.Parse()
contamination := flag.Args()
startedAt := time.Now()
if len(contamination) == 0 {
logger.Println("must specify at least one contamination mapping BAM file")
os.Exit(1)
}
if args.Output == "" {
logger.Println("must specify -output file")
os.Exit(1)
}
OpenLogger()
LogArguments()
scanner := BamScanner{}
if args.Sample == "" {
scanner.OpenStdin()
} else {
if err := scanner.OpenBam(args.Sample); err != nil {
logger.Fatal(err)
}
}
reads_found := make([]int, len(contamination))
reads_filtered := make([]int, len(contamination))
contScanners := make([]BamScanner, len(contamination))
rejected := make([]bool, len(contamination))
found := make([]bool, len(contamination))
for c := 0; c < len(contamination); c++ {
if err := contScanners[c].OpenBam(contamination[c]); err != nil {
logger.Fatal(err)
}
reads_found[c] = 0
reads_filtered[c] = 0
}
header, err := ReadBamHeader(args.Sample)
if err != nil {
logger.Fatal(err)
}
out := BamWriter{}
outfp, err := out.Open(args.Output)
if err != nil {
logger.Fatal(err)
}
io.WriteString(outfp, header)
reads_kept := 0
read_mates_kept := 0
total_reads := 0
total_read_mates := 0
ercc := 0
considered := 0
too_short := 0
too_diverged := 0
err = func() error {
defer scanner.Done()
defer benchmark(startedAt, "processing")
for {
if total_reads > 0 && total_reads%100000 == 0 {
kept_percent = float64(reads_kept) / float64(considered) * 100
logger.Printf("considered %d out of %d so far, kept %0.1f%%\n", considered, total_reads, kept_percent)
}
if args.Limit > 0 && args.Limit == total_reads {
return nil
}
// Set up flags for outcomes wrt each potential source of contamination.
for c, _ := range contamination {
rejected[c] = false
found[c] = false
}
// Read the first mate in a paired end run.
mate1, err := scanner.Record()
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if scanner.Closed {
return nil
}
scanner.Ratchet()
read := mate1[0]
total_reads++
total_read_mates++
// See if we have the second mate of this pair.
mate2, err := scanner.Find(read)
if err != nil {
return fmt.Errorf("failed to read from sample BAM: %v after %d lines", err, total_reads)
}
if mate2 != nil {
scanner.Ratchet()
total_read_mates++
}
var mate1_len int
var mate1_edit_dist int
var mate2_len int
var mate2_edit_dist int
mate1_len, mate1_edit_dist, err = extract(mate1)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 1:")
logger.Println(strings.Join(mate1, "\t"))
}
if mate2 != nil {
mate2_len, mate2_edit_dist, err = extract(mate2)
if err != nil {
return err
}
if args.Verbose {
logger.Println("found read", read, "mate 2:")
logger.Println(strings.Join(mate2, "\t"))
}
}
// Filter for ERCC if either mate is mapped to ERCC.
if MatchesErcc(mate1, mate2) {
ercc++
if args.Verbose {
logger.Println("ERCC, rejecting")
}
continue
}
if mate1_len < args.MinLength {
// If we don't have mate2 or if it's also too short, we mark this pair as too short.
if mate2 == nil || mate2_len < args.MinLength {
if args.Verbose {
logger.Println("too short, rejecting")
}
too_short++
continue
}
if args.Verbose {
logger.Println("promoting mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_len < args.MinLength {
// We have a mate2, but it doesn't meet the min length criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2 too short, forgetting")
}
}
// We treate the filter for edit distance the same way as length.
if mate1_edit_dist > args.MaxDist {
if mate2 == nil || mate2_edit_dist > args.MaxDist {
too_diverged++
if args.Verbose {
logger.Println("too divergent, rejecting")
}
continue
}
if args.Verbose {
logger.Println("promothing mate 2")
}
// Mate2 is okay, so we promote it to mate1, and forget mate2
mate1_len = mate2_len
mate1_edit_dist = mate2_edit_dist
mate1 = mate2
mate2 = nil
}
if mate2 != nil && mate2_edit_dist > args.MaxDist {
// We have a mate2, but it doesn't meet the max edit distance criteria, just forget it.
mate2 = nil
if args.Verbose {
logger.Println("mate 2, too diverged, forgetting")
}
}
// If we get this far it means the read met the preliminary filtering criteria.
considered++
// Compare agains the best score for the read pair.
mate1_score := float64(mate1_len) - float64(mate1_edit_dist)*args.Penalty
var mate2_score float64
best_score := mate1_score
best_len := mate1_len
best_edit_dist := mate1_edit_dist
if mate2 != nil {
mate2_score = float64(mate2_len) - float64(mate2_edit_dist)*args.Penalty
if mate2_score > mate1_score {
best_score = mate2_score
best_len = mate2_len
best_edit_dist = mate2_edit_dist
if args.Verbose {
logger.Printf("mate 2 has better score (%f) than mate 1 (%f)\n", mate2_score, mate1_score)
}
}
}
// Reads in the sample BAM will be rejected if either mate in any of the
// contamination BAM files maps better than in the sampel BAM file.
was_rejected := false
for c := 0; c < len(contamination); c++ {
m := 0
for {
mate, err := contScanners[c].Find(read)
if err != nil {
logger.Fatal(err)
}
if mate == nil {
// No more alignments for this read in this contamination mapping
break
}
m++
if args.Verbose {
logger.Printf("found mapping %d for %s in %s\n", m, mate[0], contamination[c])
logger.Println(strings.Join(mate, "\t"))
}
if !found[c] {
found[c] = true
reads_found[c]++
}
length, edit_dist, err := extract(mate)
if err != nil {
logger.Fatalf("failed to read from %s: %v", contamination[c], err)
}
if length >= args.MinLength {
score := float64(length) - float64(edit_dist)*args.Penalty
if args.Verbose {
logger.Printf("mapping meets length criteria and has score %f\n", score)
}
if best_score <= score+args.Margin {
if args.Verbose {
logger.Println("mapping has better score")
}
if !rejected[c] {
reads_filtered[c]++
rejected[c] = true
was_rejected = true
if args.Verbose {
logger.Printf("read %s with length %d and edit distance %d was rejected "+
"with score %0.1f because in %s it had a score of %0.1f with length "+
"%d and edit distance %d\n",
read, best_len, best_edit_dist, best_score, contamination[c],
score, length, edit_dist)
}
}
} else {
if args.Verbose {
logger.Println("mapping has worse score")
}
}
}
}
}
if !was_rejected {
// This read is okay, output it to the output BAM file.
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate1, "\t"))
if err != nil {
return err
}
reads_kept++
read_mates_kept++
if mate2 != nil {
_, err := fmt.Fprintf(outfp, "%s\n", strings.Join(mate2, "\t"))
if err != nil {
return err
}
read_mates_kept++
}
if args.Verbose {
logger.Printf("kept read %s with length %d and edit distance %d and score %0.1f\n",
read, best_len, best_edit_dist, best_score)
}
}
}
}()
if err != nil {
logger.Fatal(err)
}
outfp.Close()
out.Wait()
logger.Println("Preliminary filtering:")
if args.Ercc {
erccPerc := float64(ercc) / float64(total_reads) * 100
logger.Printf("filtered out %d ERCC reads (%0.1f%%) before comparing to contamination\n", ercc, erccPerc)
}
shortPerc := float64(too_short) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase their alignment was too short\n", too_short, shortPerc)
divergedPerc := float64(too_diverged) / float64(total_reads) * 100
logger.Printf("filtered out %d reads (%0.1f%%) becase they were too diverged\n", too_diverged, divergedPerc)
logger.Printf("%d reads remaining after preliminary filtering\n", considered)
logger.Println("Contamination filtering:")
for c, cont := range contamination {
n := reads_filtered[c]
perc := float64(n) / float64(considered) * 100
found_perc := float64(reads_found[c]) / float64(considered) * 100
logger.Printf("found %d of %d reads in %s (%0.1f%%)\n", reads_found[c], considered, cont, found_perc)
logger.Printf("rejected %d of %d reads from %s (%0.1f%%)\n", reads_filtered[c], considered, cont, perc)
}
kept_percent = float64(reads_kept) / float64(considered) * 100
total_percent := float64(reads_kept) / float64(total_reads) * 100
logger.Printf("kept %d of %d reads (%0.1f%%), which is %0.1f%% of the %d reads that met preliminary filtering\n",
reads_kept, total_reads, total_percent, kept_percent, considered)
total_mates_percent := float64(read_mates_kept) / float64(total_read_mates) * 100
logger.Printf("kept %d of %d read mates (%0.1f%%)", read_mates_kept, total_read_mates, total_mates_percent)
input_mates_per_pair := float64(total_read_mates) / float64(total_reads)
output_mates_per_pair := float64(read_mates_kept) / float64(reads_kept)
logger.Printf("observed %0.1f mates/read on the input end and %0.1f mates/read on the output end\n",
input_mates_per_pair, output_mates_per_pair)
logger.Println("machine parsable stats:")
stats := []int{
total_reads,
total_read_mates,
ercc,
too_short,
too_diverged,
considered,
reads_kept,
read_mates_kept,
}
stats = append(stats, reads_found...)
stats = append(stats, reads_filtered...)
statsStr := "stats"
for _, s := range stats {
statsStr += fmt.Sprintf("\t%d", s)
}
logger.Println(statsStr)
}
| LogArguments | identifier_name |
autogen.py | """
Parse public_api.h and generates various stubs around
"""
import attr
import re
import py
from copy import deepcopy
import pycparser
from pycparser import c_ast
from pycparser.c_generator import CGenerator
DISCLAIMER = """
/*
DO NOT EDIT THIS FILE!
This file is automatically generated by tools/autogen.py from tools/public_api.h.
Run this to regenerate:
make autogen
*/
"""
def toC(node):
return toC.gen.visit(node)
toC.gen = CGenerator()
@attr.s
class Function:
_BASE_NAME = re.compile(r'^_?HPy_?')
name = attr.ib()
cpython_name = attr.ib()
node = attr.ib(repr=False)
def _find_typedecl(self, node):
while not isinstance(node, c_ast.TypeDecl):
node = node.type
return node
def base_name(self):
return self._BASE_NAME.sub('', self.name)
def ctx_name(self):
# e.g. "ctx_Module_Create"
return self._BASE_NAME.sub(r'ctx_', self.name)
def ctx_impl_name(self):
return '&%s' % self.ctx_name()
def is_varargs(self):
return (len(self.node.type.args.params) > 0 and
isinstance(self.node.type.args.params[-1], c_ast.EllipsisParam))
def ctx_decl(self):
# e.g. "HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def)"
#
# turn the function declaration into a function POINTER declaration
newnode = deepcopy(self.node)
newnode.type = c_ast.PtrDecl(type=newnode.type, quals=[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg)
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
return self.name
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def | (self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name:
return d
raise KeyError(name)
def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def);
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s {')
w(' int ctx_version;')
for f in self.declarations:
w(' %s;' % f.ctx_decl())
w('};')
return '\n'.join(lines)
def gen_ctx_def(self):
# struct _HPyContext_s global_ctx = {
# .ctx_version = 1,
# .h_None = (HPy){CONSTANT_H_NONE},
# ...
# .ctx_Module_Create = &ctx_Module_Create,
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s global_ctx = {')
w(' .ctx_version = 1,')
for f in self.declarations:
name = f.ctx_name()
impl = f.ctx_impl_name()
w(' .%s = %s,' % (name, impl))
w('};')
return '\n'.join(lines)
def gen_func_trampolines(self):
lines = []
for f in self.declarations:
trampoline = f.trampoline_def()
if trampoline:
lines.append(trampoline)
lines.append('')
return '\n'.join(lines)
def gen_func_implementations(self):
lines = []
for f in self.declarations:
if not isinstance(f, Function):
continue
if not f.cpython_name:
continue
lines.append(f.implementation())
lines.append('')
return '\n'.join(lines)
def gen_pypy_decl(self):
lines = []
w = lines.append
w("typedef struct _HPyContext_s {")
w(" int ctx_version;")
for f in self.declarations:
w(" %s %s;" % (f.ctx_pypy_type(), f.ctx_name()))
w("} _struct_HPyContext_s;")
w("")
w("")
# generate stubs for all the API functions
for f in self.declarations:
w(f.pypy_stub())
return '\n'.join(lines)
def main():
root = py.path.local(__file__).dirpath().dirpath()
include = root.join('hpy', 'devel', 'include')
autogen_ctx = include.join('universal', 'autogen_ctx.h')
autogen_trampolines = include.join('universal', 'autogen_trampolines.h')
autogen_ctx_def = root.join('hpy', 'universal', 'src', 'autogen_ctx_def.h')
autogen_impl = include.join('common', 'autogen_impl.h')
autogen_pypy = root.join('tools', 'autogen_pypy.txt')
autogen = AutoGen(root.join('tools', 'public_api.h'))
for func in autogen.declarations:
print(func)
ctx_decl = autogen.gen_ctx_decl()
func_trampolines = autogen.gen_func_trampolines()
ctx_def = autogen.gen_ctx_def()
impl = autogen.gen_func_implementations()
pypy_decl = autogen.gen_pypy_decl()
with autogen_ctx.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_decl, file=f)
with autogen_trampolines.open('w') as f:
print(DISCLAIMER, file=f)
print(func_trampolines, file=f)
with autogen_ctx_def.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_def, file=f)
with autogen_impl.open('w') as f:
print(DISCLAIMER, file=f)
print(impl, file=f)
with autogen_pypy.open('w') as f:
print(pypy_decl, file=f)
if __name__ == '__main__':
main()
| __init__ | identifier_name |
autogen.py | """
Parse public_api.h and generates various stubs around
"""
import attr
import re
import py
from copy import deepcopy
import pycparser
from pycparser import c_ast
from pycparser.c_generator import CGenerator
DISCLAIMER = """
/*
DO NOT EDIT THIS FILE!
This file is automatically generated by tools/autogen.py from tools/public_api.h.
Run this to regenerate:
make autogen
*/
"""
def toC(node):
return toC.gen.visit(node)
toC.gen = CGenerator()
@attr.s
class Function:
_BASE_NAME = re.compile(r'^_?HPy_?')
name = attr.ib()
cpython_name = attr.ib()
node = attr.ib(repr=False)
def _find_typedecl(self, node):
while not isinstance(node, c_ast.TypeDecl):
node = node.type
return node
def base_name(self):
return self._BASE_NAME.sub('', self.name)
def ctx_name(self):
# e.g. "ctx_Module_Create"
return self._BASE_NAME.sub(r'ctx_', self.name)
def ctx_impl_name(self):
return '&%s' % self.ctx_name()
def is_varargs(self):
return (len(self.node.type.args.params) > 0 and
isinstance(self.node.type.args.params[-1], c_ast.EllipsisParam))
def ctx_decl(self):
# e.g. "HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def)"
#
# turn the function declaration into a function POINTER declaration
newnode = deepcopy(self.node)
newnode.type = c_ast.PtrDecl(type=newnode.type, quals=[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg)
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
|
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def __init__(self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name:
return d
raise KeyError(name)
def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def);
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s {')
w(' int ctx_version;')
for f in self.declarations:
w(' %s;' % f.ctx_decl())
w('};')
return '\n'.join(lines)
def gen_ctx_def(self):
# struct _HPyContext_s global_ctx = {
# .ctx_version = 1,
# .h_None = (HPy){CONSTANT_H_NONE},
# ...
# .ctx_Module_Create = &ctx_Module_Create,
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s global_ctx = {')
w(' .ctx_version = 1,')
for f in self.declarations:
name = f.ctx_name()
impl = f.ctx_impl_name()
w(' .%s = %s,' % (name, impl))
w('};')
return '\n'.join(lines)
def gen_func_trampolines(self):
lines = []
for f in self.declarations:
trampoline = f.trampoline_def()
if trampoline:
lines.append(trampoline)
lines.append('')
return '\n'.join(lines)
def gen_func_implementations(self):
lines = []
for f in self.declarations:
if not isinstance(f, Function):
continue
if not f.cpython_name:
continue
lines.append(f.implementation())
lines.append('')
return '\n'.join(lines)
def gen_pypy_decl(self):
lines = []
w = lines.append
w("typedef struct _HPyContext_s {")
w(" int ctx_version;")
for f in self.declarations:
w(" %s %s;" % (f.ctx_pypy_type(), f.ctx_name()))
w("} _struct_HPyContext_s;")
w("")
w("")
# generate stubs for all the API functions
for f in self.declarations:
w(f.pypy_stub())
return '\n'.join(lines)
def main():
root = py.path.local(__file__).dirpath().dirpath()
include = root.join('hpy', 'devel', 'include')
autogen_ctx = include.join('universal', 'autogen_ctx.h')
autogen_trampolines = include.join('universal', 'autogen_trampolines.h')
autogen_ctx_def = root.join('hpy', 'universal', 'src', 'autogen_ctx_def.h')
autogen_impl = include.join('common', 'autogen_impl.h')
autogen_pypy = root.join('tools', 'autogen_pypy.txt')
autogen = AutoGen(root.join('tools', 'public_api.h'))
for func in autogen.declarations:
print(func)
ctx_decl = autogen.gen_ctx_decl()
func_trampolines = autogen.gen_func_trampolines()
ctx_def = autogen.gen_ctx_def()
impl = autogen.gen_func_implementations()
pypy_decl = autogen.gen_pypy_decl()
with autogen_ctx.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_decl, file=f)
with autogen_trampolines.open('w') as f:
print(DISCLAIMER, file=f)
print(func_trampolines, file=f)
with autogen_ctx_def.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_def, file=f)
with autogen_impl.open('w') as f:
print(DISCLAIMER, file=f)
print(impl, file=f)
with autogen_pypy.open('w') as f:
print(pypy_decl, file=f)
if __name__ == '__main__':
main()
| return self.name | identifier_body |
autogen.py | """
Parse public_api.h and generates various stubs around
"""
import attr
import re
import py
from copy import deepcopy
import pycparser
from pycparser import c_ast
from pycparser.c_generator import CGenerator
DISCLAIMER = """
/*
DO NOT EDIT THIS FILE!
This file is automatically generated by tools/autogen.py from tools/public_api.h.
Run this to regenerate:
make autogen
*/
"""
def toC(node):
return toC.gen.visit(node)
toC.gen = CGenerator()
@attr.s
class Function:
_BASE_NAME = re.compile(r'^_?HPy_?')
name = attr.ib()
cpython_name = attr.ib()
node = attr.ib(repr=False)
def _find_typedecl(self, node):
while not isinstance(node, c_ast.TypeDecl):
node = node.type
return node
def base_name(self):
return self._BASE_NAME.sub('', self.name)
def ctx_name(self):
# e.g. "ctx_Module_Create"
return self._BASE_NAME.sub(r'ctx_', self.name)
def ctx_impl_name(self):
return '&%s' % self.ctx_name()
def is_varargs(self):
return (len(self.node.type.args.params) > 0 and
isinstance(self.node.type.args.params[-1], c_ast.EllipsisParam))
def ctx_decl(self):
# e.g. "HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def)"
#
# turn the function declaration into a function POINTER declaration
newnode = deepcopy(self.node)
newnode.type = c_ast.PtrDecl(type=newnode.type, quals=[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
|
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
return self.name
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def __init__(self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name:
return d
raise KeyError(name)
def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def);
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s {')
w(' int ctx_version;')
for f in self.declarations:
w(' %s;' % f.ctx_decl())
w('};')
return '\n'.join(lines)
def gen_ctx_def(self):
# struct _HPyContext_s global_ctx = {
# .ctx_version = 1,
# .h_None = (HPy){CONSTANT_H_NONE},
# ...
# .ctx_Module_Create = &ctx_Module_Create,
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s global_ctx = {')
w(' .ctx_version = 1,')
for f in self.declarations:
name = f.ctx_name()
impl = f.ctx_impl_name()
w(' .%s = %s,' % (name, impl))
w('};')
return '\n'.join(lines)
def gen_func_trampolines(self):
lines = []
for f in self.declarations:
trampoline = f.trampoline_def()
if trampoline:
lines.append(trampoline)
lines.append('')
return '\n'.join(lines)
def gen_func_implementations(self):
lines = []
for f in self.declarations:
if not isinstance(f, Function):
continue
if not f.cpython_name:
continue
lines.append(f.implementation())
lines.append('')
return '\n'.join(lines)
def gen_pypy_decl(self):
lines = []
w = lines.append
w("typedef struct _HPyContext_s {")
w(" int ctx_version;")
for f in self.declarations:
w(" %s %s;" % (f.ctx_pypy_type(), f.ctx_name()))
w("} _struct_HPyContext_s;")
w("")
w("")
# generate stubs for all the API functions
for f in self.declarations:
w(f.pypy_stub())
return '\n'.join(lines)
def main():
root = py.path.local(__file__).dirpath().dirpath()
include = root.join('hpy', 'devel', 'include')
autogen_ctx = include.join('universal', 'autogen_ctx.h')
autogen_trampolines = include.join('universal', 'autogen_trampolines.h')
autogen_ctx_def = root.join('hpy', 'universal', 'src', 'autogen_ctx_def.h')
autogen_impl = include.join('common', 'autogen_impl.h')
autogen_pypy = root.join('tools', 'autogen_pypy.txt')
autogen = AutoGen(root.join('tools', 'public_api.h'))
for func in autogen.declarations:
print(func)
ctx_decl = autogen.gen_ctx_decl()
func_trampolines = autogen.gen_func_trampolines()
ctx_def = autogen.gen_ctx_def()
impl = autogen.gen_func_implementations()
pypy_decl = autogen.gen_pypy_decl()
with autogen_ctx.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_decl, file=f)
with autogen_trampolines.open('w') as f:
print(DISCLAIMER, file=f)
print(func_trampolines, file=f)
with autogen_ctx_def.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_def, file=f)
with autogen_impl.open('w') as f:
print(DISCLAIMER, file=f)
print(impl, file=f)
with autogen_pypy.open('w') as f:
print(pypy_decl, file=f)
if __name__ == '__main__':
main()
| if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg) | conditional_block |
autogen.py | """
Parse public_api.h and generates various stubs around
"""
import attr
import re
import py
from copy import deepcopy
import pycparser
from pycparser import c_ast
from pycparser.c_generator import CGenerator
DISCLAIMER = """
/*
DO NOT EDIT THIS FILE!
This file is automatically generated by tools/autogen.py from tools/public_api.h.
Run this to regenerate:
make autogen
*/
"""
def toC(node):
return toC.gen.visit(node)
toC.gen = CGenerator()
@attr.s
class Function:
_BASE_NAME = re.compile(r'^_?HPy_?')
name = attr.ib()
cpython_name = attr.ib()
node = attr.ib(repr=False)
def _find_typedecl(self, node):
while not isinstance(node, c_ast.TypeDecl):
node = node.type
return node
def base_name(self):
return self._BASE_NAME.sub('', self.name)
def ctx_name(self):
# e.g. "ctx_Module_Create"
return self._BASE_NAME.sub(r'ctx_', self.name)
def ctx_impl_name(self):
return '&%s' % self.ctx_name()
def is_varargs(self):
return (len(self.node.type.args.params) > 0 and
isinstance(self.node.type.args.params[-1], c_ast.EllipsisParam))
def ctx_decl(self):
# e.g. "HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def)"
#
# turn the function declaration into a function POINTER declaration
newnode = deepcopy(self.node)
newnode.type = c_ast.PtrDecl(type=newnode.type, quals=[])
# fix the name of the function pointer
typedecl = self._find_typedecl(newnode)
typedecl.declname = self.ctx_name()
return toC(newnode)
def trampoline_def(self):
# static inline HPy HPyModule_Create(HPyContext ctx, HPyModuleDef *def) {
# return ctx->ctx_Module_Create ( ctx, def );
# }
rettype = toC(self.node.type.type)
parts = []
w = parts.append
w('static inline')
w(toC(self.node))
w('{\n ')
# trampolines cannot deal with varargs easily
assert not self.is_varargs()
if rettype == 'void':
w('ctx->%s' % self.ctx_name())
else:
w('return ctx->%s' % self.ctx_name())
w('(')
params = [p.name for p in self.node.type.args.params]
w(', '.join(params))
w(');')
w('\n}')
return ' '.join(parts)
def implementation(self):
def signature(base_name):
# HPy _HPy_API_NAME(Number_Add)(HPyContext ctx, HPy x, HPy y)
newnode = deepcopy(self.node)
typedecl = self._find_typedecl(newnode)
# rename the function
if self.name.startswith('HPy_'):
typedecl.declname = '_HPy_IMPL_NAME_NOPREFIX(%s)' % base_name
else:
typedecl.declname = '_HPy_IMPL_NAME(%s)' % base_name
return toC(newnode)
#
def call(pyfunc, return_type):
# return _py2h(PyNumber_Add(_h2py(x), _h2py(y)))
args = []
for p in self.node.type.args.params:
if toC(p.type) == 'HPyContext':
continue
elif toC(p.type) == 'HPy':
arg = '_h2py(%s)' % p.name
else:
arg = p.name
args.append(arg)
result = '%s(%s)' % (pyfunc, ', '.join(args))
if return_type == 'HPy':
result = '_py2h(%s)' % result
return result
#
lines = []
w = lines.append
pyfunc = self.cpython_name
if not pyfunc:
raise ValueError(f"Cannot generate implementation for {self}")
return_type = toC(self.node.type.type)
w('HPyAPI_STORAGE %s' % signature(self.base_name()))
w('{')
w(' return %s;' % call(pyfunc, return_type))
w('}')
return '\n'.join(lines)
def ctx_pypy_type(self):
return 'void *'
def pypy_stub(self):
signature = toC(self.node)
if self.is_varargs():
return '# %s' % signature
#
argnames = [p.name for p in self.node.type.args.params]
lines = []
w = lines.append
w('@API.func("%s")' % signature)
w('def %s(space, %s):' % (self.name, ', '.join(argnames)))
w(' from rpython.rlib.nonconst import NonConstant # for the annotator')
w(' if NonConstant(False): return 0')
w(' raise NotImplementedError')
w('')
return '\n'.join(lines)
@attr.s
class GlobalVar:
name = attr.ib()
node = attr.ib(repr=False)
def ctx_name(self):
return self.name
def ctx_impl_name(self):
return '(HPy){CONSTANT_%s}' % (self.name.upper(),)
def ctx_decl(self):
return toC(self.node)
def trampoline_def(self):
return None
def ctx_pypy_type(self):
return 'struct _HPy_s'
def pypy_stub(self):
return ''
class FuncDeclVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self, convert_name):
self.declarations = []
self.convert_name = convert_name
def visit_Decl(self, node):
if isinstance(node.type, c_ast.FuncDecl):
self._visit_function(node)
elif isinstance(node.type, c_ast.TypeDecl):
self._visit_global_var(node)
def _visit_function(self, node):
name = node.name
if not name.startswith('HPy') and not name.startswith('_HPy'):
print('WARNING: Ignoring non-hpy declaration: %s' % name)
return
for p in node.type.args.params:
if hasattr(p, 'name') and p.name is None:
raise ValueError("non-named argument in declaration of %s" %
name)
cpy_name = self.convert_name(name)
self.declarations.append(Function(name, cpy_name, node))
def _visit_global_var(self, node):
name = node.name
if not name.startswith('h_'):
print('WARNING: Ignoring non-hpy variable declaration: %s' % name)
return
assert toC(node.type.type) == "HPy"
self.declarations.append(GlobalVar(name, node))
SPECIAL_CASES = {
'HPy_Dup': None,
'HPy_Close': None,
'HPyModule_Create': None,
'HPy_GetAttr': 'PyObject_GetAttr',
'HPy_GetAttr_s': 'PyObject_GetAttrString',
'HPy_HasAttr': 'PyObject_HasAttr',
'HPy_HasAttr_s': 'PyObject_HasAttrString',
'HPy_SetAttr': 'PyObject_SetAttr',
'HPy_SetAttr_s': 'PyObject_SetAttrString',
'HPy_GetItem': 'PyObject_GetItem',
'HPy_GetItem_i': None,
'HPy_GetItem_s': None,
'HPy_SetItem': 'PyObject_SetItem',
'HPy_SetItem_i': None,
'HPy_SetItem_s': None,
'HPy_FromPyObject': None,
'HPy_AsPyObject': None,
'_HPy_CallRealFunctionFromTrampoline': None,
'HPyErr_Occurred': None,
'HPy_Add': 'PyNumber_Add',
'HPy_Subtract': 'PyNumber_Subtract',
'HPy_Multiply': 'PyNumber_Multiply',
'HPy_MatrixMultiply': 'PyNumber_MatrixMultiply',
'HPy_FloorDivide': 'PyNumber_FloorDivide',
'HPy_TrueDivide': 'PyNumber_TrueDivide',
'HPy_Remainder': 'PyNumber_Remainder',
'HPy_Divmod': 'PyNumber_Divmod',
'HPy_Power': 'PyNumber_Power',
'HPy_Negative': 'PyNumber_Negative',
'HPy_Positive': 'PyNumber_Positive',
'HPy_Absolute': 'PyNumber_Absolute',
'HPy_Invert': 'PyNumber_Invert',
'HPy_Lshift': 'PyNumber_Lshift',
'HPy_Rshift': 'PyNumber_Rshift',
'HPy_And': 'PyNumber_And',
'HPy_Xor': 'PyNumber_Xor',
'HPy_Or': 'PyNumber_Or',
'HPy_Index': 'PyNumber_Index',
'HPy_Long': 'PyNumber_Long',
'HPy_Float': 'PyNumber_Float',
'HPy_InPlaceAdd': 'PyNumber_InPlaceAdd',
'HPy_InPlaceSubtract': 'PyNumber_InPlaceSubtract',
'HPy_InPlaceMultiply': 'PyNumber_InPlaceMultiply',
'HPy_InPlaceMatrixMultiply': 'PyNumber_InPlaceMatrixMultiply',
'HPy_InPlaceFloorDivide': 'PyNumber_InPlaceFloorDivide',
'HPy_InPlaceTrueDivide': 'PyNumber_InPlaceTrueDivide',
'HPy_InPlaceRemainder': 'PyNumber_InPlaceRemainder',
'HPy_InPlacePower': 'PyNumber_InPlacePower',
'HPy_InPlaceLshift': 'PyNumber_InPlaceLshift',
'HPy_InPlaceRshift': 'PyNumber_InPlaceRshift',
'HPy_InPlaceAnd': 'PyNumber_InPlaceAnd',
'HPy_InPlaceXor': 'PyNumber_InPlaceXor',
'HPy_InPlaceOr': 'PyNumber_InPlaceOr',
}
def convert_name(hpy_name):
if hpy_name in SPECIAL_CASES:
return SPECIAL_CASES[hpy_name]
return re.sub(r'^_?HPy_?', 'Py', hpy_name)
class AutoGen:
def __init__(self, filename):
self.ast = pycparser.parse_file(filename, use_cpp=True)
#self.ast.show()
self.collect_declarations()
def get(self, name):
for d in self.declarations:
if d.name == name: | def collect_declarations(self):
v = FuncDeclVisitor(convert_name)
v.visit(self.ast)
self.declarations = v.declarations
def gen_ctx_decl(self):
# struct _HPyContext_s {
# int ctx_version;
# HPy h_None;
# ...
# HPy (*ctx_Module_Create)(HPyContext ctx, HPyModuleDef *def);
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s {')
w(' int ctx_version;')
for f in self.declarations:
w(' %s;' % f.ctx_decl())
w('};')
return '\n'.join(lines)
def gen_ctx_def(self):
# struct _HPyContext_s global_ctx = {
# .ctx_version = 1,
# .h_None = (HPy){CONSTANT_H_NONE},
# ...
# .ctx_Module_Create = &ctx_Module_Create,
# ...
# }
lines = []
w = lines.append
w('struct _HPyContext_s global_ctx = {')
w(' .ctx_version = 1,')
for f in self.declarations:
name = f.ctx_name()
impl = f.ctx_impl_name()
w(' .%s = %s,' % (name, impl))
w('};')
return '\n'.join(lines)
def gen_func_trampolines(self):
lines = []
for f in self.declarations:
trampoline = f.trampoline_def()
if trampoline:
lines.append(trampoline)
lines.append('')
return '\n'.join(lines)
def gen_func_implementations(self):
lines = []
for f in self.declarations:
if not isinstance(f, Function):
continue
if not f.cpython_name:
continue
lines.append(f.implementation())
lines.append('')
return '\n'.join(lines)
def gen_pypy_decl(self):
lines = []
w = lines.append
w("typedef struct _HPyContext_s {")
w(" int ctx_version;")
for f in self.declarations:
w(" %s %s;" % (f.ctx_pypy_type(), f.ctx_name()))
w("} _struct_HPyContext_s;")
w("")
w("")
# generate stubs for all the API functions
for f in self.declarations:
w(f.pypy_stub())
return '\n'.join(lines)
def main():
root = py.path.local(__file__).dirpath().dirpath()
include = root.join('hpy', 'devel', 'include')
autogen_ctx = include.join('universal', 'autogen_ctx.h')
autogen_trampolines = include.join('universal', 'autogen_trampolines.h')
autogen_ctx_def = root.join('hpy', 'universal', 'src', 'autogen_ctx_def.h')
autogen_impl = include.join('common', 'autogen_impl.h')
autogen_pypy = root.join('tools', 'autogen_pypy.txt')
autogen = AutoGen(root.join('tools', 'public_api.h'))
for func in autogen.declarations:
print(func)
ctx_decl = autogen.gen_ctx_decl()
func_trampolines = autogen.gen_func_trampolines()
ctx_def = autogen.gen_ctx_def()
impl = autogen.gen_func_implementations()
pypy_decl = autogen.gen_pypy_decl()
with autogen_ctx.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_decl, file=f)
with autogen_trampolines.open('w') as f:
print(DISCLAIMER, file=f)
print(func_trampolines, file=f)
with autogen_ctx_def.open('w') as f:
print(DISCLAIMER, file=f)
print(ctx_def, file=f)
with autogen_impl.open('w') as f:
print(DISCLAIMER, file=f)
print(impl, file=f)
with autogen_pypy.open('w') as f:
print(pypy_decl, file=f)
if __name__ == '__main__':
main() | return d
raise KeyError(name)
| random_line_split |
appStore.ts | import { observable, action, computed, runInAction, reaction } from 'mobx'
import { get } from '../utils/request'
import { Toast } from 'antd-mobile'
import { getRandom } from '../utils/util'
import Lyric from 'lyric-parser'
interface modeProps {
sequence?: number
shuffle?: number
loop?: number
}
interface PlayHistorysProps{
isAdd?:boolean
song?:any
index?:number
}
const mode: modeProps = {
sequence: 0, //顺序播放
shuffle: 1, //随机播放
loop: 2 //单曲循环
}
class AppStore {
@observable isExpandSider: boolean //侧边栏是否展开
@observable playing: boolean //歌曲是否正在播放
@observable playlist: any[] //播放列表
@observable mode: number //播放模式
@observable currentIndex: number //当前播放歌曲索引
@observable isFullScreen: boolean //是否全屏播放音乐
@observable likeSongs: any[] //喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(item => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, value] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
} | * 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
}
this.currentIndex = currentIndex
this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
}
/**
* 删除播放列表中的歌曲
* @param index
*/
@action
deleteSong = (index) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
playlist.splice(index, 1)
if (currentIndex > index || currentIndex === playlist.length) {
currentIndex--
}
if (playlist.length === 0) {
this.isShowPlaylist = false
}
this.playlist = playlist
this.currentIndex = currentIndex
}
/**
* 添加歌曲到播放列表中
* @param song
*/
@action
addSong = (song) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
const findex = playlist.findIndex(item => item.id === song.id)
if (findex !== -1) {
this.currentIndex = findex
return
}
currentIndex++
playlist.splice(currentIndex, 0, song)
this.playlist = playlist
this.currentIndex = currentIndex
this.isFullScreen = true
}
/**------------------------------------**/
/**
* 当歌曲准备好的处理
*/
@action
onCanPlay = () => {
this.songReady = true
this.setPlayHistorys({
isAdd: true,
song: this.currentSong
})
}
/**
* 播放错误时的处理
*/
@action
onError = () => {
Toast.info('播放错误,自动跳到下一首', 2, null, false)
this.errorTimer = setTimeout(() => {
this.changeSong('next')
}, 2000)
}
/**
* 当歌曲播放结束后的处理
*/
@action
onEnded = () => {
if (this.mode === mode.loop) {
this.loop()
} else {
this.changeSong('next')
}
}
/**
* 播放时间更新时的处理
* @param e
*/
@action
onTimeUpdate = (e) => {
this.currentTime = e.target.currentTime
}
/**
* 当播放百分比变化的处理
* @param percent
*/
onPercentChange = (percent) => {
const currentTime = percent * this.currentSong.duration
this.audio.currentTime = currentTime
this.lyric && this.lyric.seek(currentTime * 1000)
}
}
export default new AppStore() | /** | random_line_split |
appStore.ts | import { observable, action, computed, runInAction, reaction } from 'mobx'
import { get } from '../utils/request'
import { Toast } from 'antd-mobile'
import { getRandom } from '../utils/util'
import Lyric from 'lyric-parser'
interface modeProps {
sequence?: number
shuffle?: number
loop?: number
}
interface PlayHistorysProps{
isAdd?:boolean
song?:any
index?:number
}
const mode: modeProps = {
sequence: 0, //顺序播放
shuffle: 1, //随机播放
loop: 2 //单曲循环
}
class AppStore {
@observable isExpandSider: boolean //侧边栏是否展开
@observable playing: boolean //歌曲是否正在播放
@observable playlist: any[] //播放列表
@observable mode: number //播放模式
@observable currentIndex: number //当前播放歌曲索引
@observable isFullScreen: boolean //是否全屏播放音乐
@observable likeSongs: any[] //喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(ite | lue] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
}
/**
* 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
}
this.currentIndex = currentIndex
this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
}
/**
* 删除播放列表中的歌曲
* @param index
*/
@action
deleteSong = (index) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
playlist.splice(index, 1)
if (currentIndex > index || currentIndex === playlist.length) {
currentIndex--
}
if (playlist.length === 0) {
this.isShowPlaylist = false
}
this.playlist = playlist
this.currentIndex = currentIndex
}
/**
* 添加歌曲到播放列表中
* @param song
*/
@action
addSong = (song) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
const findex = playlist.findIndex(item => item.id === song.id)
if (findex !== -1) {
this.currentIndex = findex
return
}
currentIndex++
playlist.splice(currentIndex, 0, song)
this.playlist = playlist
this.currentIndex = currentIndex
this.isFullScreen = true
}
/**------------------------------------**/
/**
* 当歌曲准备好的处理
*/
@action
onCanPlay = () => {
this.songReady = true
this.setPlayHistorys({
isAdd: true,
song: this.currentSong
})
}
/**
* 播放错误时的处理
*/
@action
onError = () => {
Toast.info('播放错误,自动跳到下一首', 2, null, false)
this.errorTimer = setTimeout(() => {
this.changeSong('next')
}, 2000)
}
/**
* 当歌曲播放结束后的处理
*/
@action
onEnded = () => {
if (this.mode === mode.loop) {
this.loop()
} else {
this.changeSong('next')
}
}
/**
* 播放时间更新时的处理
* @param e
*/
@action
onTimeUpdate = (e) => {
this.currentTime = e.target.currentTime
}
/**
* 当播放百分比变化的处理
* @param percent
*/
onPercentChange = (percent) => {
const currentTime = percent * this.currentSong.duration
this.audio.currentTime = currentTime
this.lyric && this.lyric.seek(currentTime * 1000)
}
}
export default new AppStore() | m => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, va | identifier_body |
appStore.ts | import { observable, action, computed, runInAction, reaction } from 'mobx'
import { get } from '../utils/request'
import { Toast } from 'antd-mobile'
import { getRandom } from '../utils/util'
import Lyric from 'lyric-parser'
interface modeProps {
sequence?: number
shuffle?: number
loop?: number
}
interface PlayHistorysProps{
isAdd?:boolean
song?:any
index?:number
}
const mode: modeProps = {
sequence: 0, //顺序播放
shuffle: 1, //随机播放
loop: 2 //单曲循环
}
class AppStore {
@observable isExpandSider: boolean //侧边栏是否展开
@observable playing: boolean //歌曲是否正在播放
@observable playlist: any[] //播放列表
@observable mode: number //播放模式
@observable currentIndex: number //当前播放歌曲索引
@observable isFullScreen: boolean //是否全屏播放音乐
@observable likeSongs: any[] //喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(item => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, value] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
}
/**
* 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
| this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
}
/**
* 删除播放列表中的歌曲
* @param index
*/
@action
deleteSong = (index) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
playlist.splice(index, 1)
if (currentIndex > index || currentIndex === playlist.length) {
currentIndex--
}
if (playlist.length === 0) {
this.isShowPlaylist = false
}
this.playlist = playlist
this.currentIndex = currentIndex
}
/**
* 添加歌曲到播放列表中
* @param song
*/
@action
addSong = (song) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
const findex = playlist.findIndex(item => item.id === song.id)
if (findex !== -1) {
this.currentIndex = findex
return
}
currentIndex++
playlist.splice(currentIndex, 0, song)
this.playlist = playlist
this.currentIndex = currentIndex
this.isFullScreen = true
}
/**------------------------------------**/
/**
* 当歌曲准备好的处理
*/
@action
onCanPlay = () => {
this.songReady = true
this.setPlayHistorys({
isAdd: true,
song: this.currentSong
})
}
/**
* 播放错误时的处理
*/
@action
onError = () => {
Toast.info('播放错误,自动跳到下一首', 2, null, false)
this.errorTimer = setTimeout(() => {
this.changeSong('next')
}, 2000)
}
/**
* 当歌曲播放结束后的处理
*/
@action
onEnded = () => {
if (this.mode === mode.loop) {
this.loop()
} else {
this.changeSong('next')
}
}
/**
* 播放时间更新时的处理
* @param e
*/
@action
onTimeUpdate = (e) => {
this.currentTime = e.target.currentTime
}
/**
* 当播放百分比变化的处理
* @param percent
*/
onPercentChange = (percent) => {
const currentTime = percent * this.currentSong.duration
this.audio.currentTime = currentTime
this.lyric && this.lyric.seek(currentTime * 1000)
}
}
export default new AppStore() | }
this.currentIndex = currentIndex
| conditional_block |
appStore.ts | import { observable, action, computed, runInAction, reaction } from 'mobx'
import { get } from '../utils/request'
import { Toast } from 'antd-mobile'
import { getRandom } from '../utils/util'
import Lyric from 'lyric-parser'
interface modeProps {
sequence?: number
shuffle?: number
loop?: number
}
interface PlayHistorysProps{
isAdd?:boolean
song?:any
index?:number
}
const mode: modeProps = {
sequence: 0, //顺序播放
shuffle: 1, //随机播放
loop: 2 //单曲循环
}
class AppStore {
@observab | andSider: boolean //侧边栏是否展开
@observable playing: boolean //歌曲是否正在播放
@observable playlist: any[] //播放列表
@observable mode: number //播放模式
@observable currentIndex: number //当前播放歌曲索引
@observable isFullScreen: boolean //是否全屏播放音乐
@observable likeSongs: any[] //喜欢的音乐列表
@observable playHistorys: any[] //播放历史
@observable audio: any //audio
@observable songReady: boolean //歌曲是否已经准备好了播放
@observable currentTime: number //歌曲播放的时间
@observable isShowPlaylist: boolean //是否显示播放列表
@observable lyric: any //歌词
@observable playingLyric: string //正在播放的歌词
@observable playingLineNum: number //正在播放的歌词行数
@observable errorTimer:any
@observable sheetSongs:any
constructor() {
this.isExpandSider = false
this.playing = false
this.playlist = []
this.mode = mode.sequence
this.currentIndex = -1
this.isFullScreen = false
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
this.audio = null
this.songReady = false
this.currentTime = 0
this.isShowPlaylist = false
this.lyric = null
this.playingLyric = ''
this.playingLineNum = 0
this.errorTimer = null
this.sheetSongs = []
//当currentSong变化时作出反应
reaction(() => this.currentSong, () => {
this.currentSongChange()
})
}
/**
* 获取当前播放歌曲,并对数据进行处理
* @returns {*|{}}
*/
// 当playlist,currentIndex变化时,reaction就会触发,所以要对前后的变化进行判断是否是同一首歌
@computed({equals: (prevSong:{id:number}, newSong:{id:number}) =>prevSong.id === newSong.id})
get currentSong() {
let song:any = {}
if (this.playlist[this.currentIndex]) {
//引用类型的赋值一定要注意,这里必须深拷贝,否则song的改变会改变this.playlist,this.playlist的改变又触发计算属性,最后导致报错
song = { ...this.playlist[this.currentIndex] }
song = {...this.playlist[this.currentIndex]}
song.artists = song.ar.map(item => item.name).join('/')
song.image = song.al ? song.al.picUrl : ''
song.url = `https://music.163.com/song/media/outer/url?id=${song.id}.mp3`
song.duration = (song.dt / 1000) || (song.duration) / 1000 || 0
}
return song
}
/**
* 获取播放时间的百分比
* @returns {number}
*/
@computed
get percent() {
if (this.currentSong.duration) {
return this.currentTime / this.currentSong.duration
} else {
return 0
}
}
@action
setStore = (obj) => {
if (Object.prototype.toString.call(obj) !== '[object Object]') {
return
}
for (let [key, value] of Object.entries(obj)) {
this[key] = value
}
}
@action
setSheetSongs = (obj) => {
this.sheetSongs = obj
}
// @action
// getSheetSongs = (size = 0) => {
// if(songs.length >= allList.length){
// return
// }
// this.sheetSongs = this.sheetSongs.slice(size, size + 30)
// let list = []
// //增加两秒的延迟,实际项目中可以不用,这里只是为显示这样一个加载中的过程
// setTimeout(()=>{
// list = allList.slice(size, size + 30)
// setSongs(songs.concat(list))
// },2000)
// }
/**
* 切换侧边栏的折叠展开
*/
@action
toggleExpand = () => {
this.isExpandSider = !this.isExpandSider
}
/**
* 设置是否全屏播放音乐
* @param flag
*/
@action
setFullScreen = (flag) => {
this.isFullScreen = flag
}
/**
* 选择播放歌曲,设置播放列表
* @param obj
* @returns {Promise.<void>}
*/
@action
onSelectSong = async (obj) => {
const { songlist, index } = obj
this.playlist = songlist ? songlist.slice() : []
this.currentIndex = index
this.isFullScreen = true
}
/**
* 当current变化时的处理
* @returns {Promise.<void>}
*/
@action
currentSongChange = () => {
if (!this.currentSong.id) {
return
}
this.lyric && this.lyric.stop()
this.playing = true
this.currentTime = 0
this.playingLineNum = 0
this.playingLyric = ''
this.lyric = null
setTimeout(() => {
this.audio && this.audio.play()
this.getLyric(this.currentSong.id)
})
}
/**
* 获取歌曲歌词
* @param id
* @returns {Promise.<void>}
*/
@action
getLyric = async (id) => {
const res = await get(`/lyric?id=${id}`)
runInAction(() => {
this.lyric = res ? new Lyric(res, this.handler) : null
this.lyric && this.lyric.play()
})
}
/**
* 播放的歌词变化时的处理
* @param lineNum 播放的行数
* @param txt 当前播放歌词
*/
@action
handler = ({ lineNum, txt }) => {
this.playingLyric = txt
this.playingLineNum = lineNum
}
/**
* 切换播放模式
*/
@action
changeMode = () => {
let mode = (this.mode + 1) % 3
const infos = ['顺序播放', '随机播放', '单曲循环']
Toast.info(infos[mode], 1, null, false)
this.mode = mode
}
/**
* 循环播放
*/
@action
loop = () => {
this.audio.currentTime = 0
this.audio.play()
this.playing = true
this.lyric && this.lyric.seek(0)
}
/**
* 切歌,实际上就是维护的currentIndex
* @param direction 上一首(prev) 下一首(next)
*/
@action
changeSong = (direction) => {
let currentIndex = this.currentIndex
if (!this.songReady) {
return
}
if (this.playlist.length === 1) {
this.loop()
return
}
if (this.mode === mode.shuffle) {
currentIndex = getRandom(0, this.playlist.length - 1)
while (currentIndex === this.currentIndex) {
currentIndex = getRandom(0, this.playlist.length - 1)
}
} else {
if (direction === 'prev') {
currentIndex--
if (currentIndex === -1) {
currentIndex = this.playlist.length - 1
}
}
if (direction === 'next') {
currentIndex++
if (currentIndex === this.playlist.length) {
currentIndex = 0
}
}
}
this.currentIndex = currentIndex
this.songReady = false
}
/**
* 暂停/播放音乐
*/
@action
togglePlay = () => {
clearTimeout(this.errorTimer)
if (this.playing) {
this.audio && this.audio.pause()
} else {
this.audio && this.audio.play()
}
this.lyric && this.lyric.togglePlay()
this.playing = !this.playing
}
/**
* 设置喜欢的音乐
* @param isAdd 是否是添加音乐
* @param song 喜欢的音乐
* @param index 索引
*/
@action
setLikes = (song) => {
let likeSongs = this.likeSongs.slice()
const findx = likeSongs.findIndex(item => item.id === song.id)
if (findx !== -1) {
likeSongs.splice(findx, 1)
} else {
likeSongs.unshift(song)
}
localStorage.setItem('likeSongs', JSON.stringify(likeSongs))
this.likeSongs = JSON.parse(localStorage.getItem('likeSongs')) || []
}
/**
* 设置播放历史
* @param isAdd 是否是添加音乐
* @param song 音乐
* @param index 索引
*/
@action
setPlayHistorys = ({ isAdd, song, index }:PlayHistorysProps) => {
let playHistorys = this.playHistorys.slice()
if (isAdd) {
let exist = playHistorys.findIndex(item => item.id === song.id)
if (exist !== -1) {
playHistorys.splice(exist, 1)
}
playHistorys.unshift(song)
} else {
playHistorys.splice(index, 1)
}
localStorage.setItem('playHistorys', JSON.stringify(playHistorys))
this.playHistorys = JSON.parse(localStorage.getItem('playHistorys')) || []
}
/**
* 删除播放列表中的歌曲
* @param index
*/
@action
deleteSong = (index) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
playlist.splice(index, 1)
if (currentIndex > index || currentIndex === playlist.length) {
currentIndex--
}
if (playlist.length === 0) {
this.isShowPlaylist = false
}
this.playlist = playlist
this.currentIndex = currentIndex
}
/**
* 添加歌曲到播放列表中
* @param song
*/
@action
addSong = (song) => {
let playlist = this.playlist.slice()
let currentIndex = this.currentIndex
const findex = playlist.findIndex(item => item.id === song.id)
if (findex !== -1) {
this.currentIndex = findex
return
}
currentIndex++
playlist.splice(currentIndex, 0, song)
this.playlist = playlist
this.currentIndex = currentIndex
this.isFullScreen = true
}
/**------------------------------------**/
/**
* 当歌曲准备好的处理
*/
@action
onCanPlay = () => {
this.songReady = true
this.setPlayHistorys({
isAdd: true,
song: this.currentSong
})
}
/**
* 播放错误时的处理
*/
@action
onError = () => {
Toast.info('播放错误,自动跳到下一首', 2, null, false)
this.errorTimer = setTimeout(() => {
this.changeSong('next')
}, 2000)
}
/**
* 当歌曲播放结束后的处理
*/
@action
onEnded = () => {
if (this.mode === mode.loop) {
this.loop()
} else {
this.changeSong('next')
}
}
/**
* 播放时间更新时的处理
* @param e
*/
@action
onTimeUpdate = (e) => {
this.currentTime = e.target.currentTime
}
/**
* 当播放百分比变化的处理
* @param percent
*/
onPercentChange = (percent) => {
const currentTime = percent * this.currentSong.duration
this.audio.currentTime = currentTime
this.lyric && this.lyric.seek(currentTime * 1000)
}
}
export default new AppStore() | le isExp | identifier_name |
qt_events.py | #@+leo-ver=5-thin
#@+node:ekr.20140907103315.18766: * @file ../plugins/qt_events.py
"""Leo's Qt event handling code."""
#@+<< about internal bindings >>
#@+node:ekr.20110605121601.18538: ** << about internal bindings >>
#@@language rest
#@+at
# Here are the rules for translating key bindings (in leoSettings.leo) into keys
# for k.bindingsDict:
#
# 1. The case of plain letters is significant: a is not A.
#
# 2. The Shift- prefix can be applied *only* to letters. Leo will ignore (with a
# warning) the shift prefix applied to any other binding, e.g., Ctrl-Shift-(
#
# 3. The case of letters prefixed by Ctrl-, Alt-, Key- or Shift- is *not*
# significant. Thus, the Shift- prefix is required if you want an upper-case
# letter (with the exception of 'bare' uppercase letters.)
#
# The following table illustrates these rules. In each row, the first entry is the
# key (for k.bindingsDict) and the other entries are equivalents that the user may
# specify in leoSettings.leo:
#
# a, Key-a, Key-A
# A, Shift-A
# Alt-a, Alt-A
# Alt-A, Alt-Shift-a, Alt-Shift-A
# Ctrl-a, Ctrl-A
# Ctrl-A, Ctrl-Shift-a, Ctrl-Shift-A
# , Key-!,Key-exclam,exclam
#
# This table is consistent with how Leo already works (because it is consistent
# with Tk's key-event specifiers). It is also, I think, the least confusing set of
# rules.
#@-<< about internal bindings >>
import sys
from typing import Any
from leo.core import leoGlobals as g
from leo.core import leoGui
from leo.core.leoQt import QtCore, QtGui, QtWidgets
from leo.core.leoQt import Key, KeyboardModifier, Type
#@+others
#@+node:ekr.20210512101604.1: ** class LossageData
class LossageData:
def __init__(self, actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString):
self.actual_ch = actual_ch
self.binding = binding
self.ch = ch
self.keynum = keynum
self.mods = mods
self.mods2 = mods2
self.mods3 = mods3
self.stroke = None # Set later.
self.text = text
self.toString = toString
def __repr__(self):
return (
f"keynum: {self.keynum:>7x} "
f"binding: {self.binding}"
# f"ch: {self.ch:>7s} "
# f"= {self.actual_ch!r}"
# f"mods: {self.mods}, {self.mods2}, {self.mods3}\n"
# f"stroke: {self.stroke!r}\n"
# f"text: {self.text!r}\n"
# f"toString: {self.toString!r}\n"
)
__str__ = __repr__
#@+node:ekr.20141028061518.17: ** class LeoQtEventFilter
class LeoQtEventFilter(QtCore.QObject): # type:ignore
#@+others
#@+node:ekr.20110605121601.18539: *3* filter.ctor
def __init__(self, c, w, tag=''):
|
#@+node:ekr.20110605121601.18540: *3* filter.eventFilter & helpers
def eventFilter(self, obj, event):
"""Return False if Qt should handle the event."""
c, k = self.c, self.c.k
#
# Handle non-key events first.
if not g.app:
return False # For unit tests, but g.unitTesting may be False!
if not self.c.p:
return False # Startup.
#
# Trace events.
if 'events' in g.app.debug:
if isinstance(event, QtGui.QKeyEvent):
self.traceKeys(obj, event)
else:
self.traceEvent(obj, event)
self.traceWidget(event)
#
# Let Qt handle the non-key events.
if self.doNonKeyEvent(event, obj):
return False
#
# Ignore incomplete key events.
if self.shouldIgnoreKeyEvent(event, obj):
return False
#
# Generate a g.KeyStroke for k.masterKeyHandler.
try:
binding, ch, lossage = self.toBinding(event)
if not binding:
return False # Let Qt handle the key.
#
# Pass the KeyStroke to masterKeyHandler.
key_event = self.createKeyEvent(event, c, self.w, ch, binding)
#
# #1933: Update the g.app.lossage
if len(g.app.lossage) > 99:
g.app.lossage.pop()
lossage.stroke = key_event.stroke
g.app.lossage.insert(0, lossage)
#
# Call masterKeyHandler!
k.masterKeyHandler(key_event)
c.outerUpdate()
except Exception:
g.es_exception()
return True # Whatever happens, suppress all other Qt key handling.
#@+node:ekr.20110605195119.16937: *4* filter.createKeyEvent
def createKeyEvent(self, event, c, w, ch, binding):
return leoGui.LeoKeyEvent(
c=self.c,
# char = None doesn't work at present.
# But really, the binding should suffice.
char=ch,
event=event,
binding=binding,
w=w,
x=getattr(event, 'x', None) or 0,
y=getattr(event, 'y', None) or 0,
x_root=getattr(event, 'x_root', None) or 0,
y_root=getattr(event, 'y_root', None) or 0,
)
#@+node:ekr.20180413180751.2: *4* filter.doNonKeyEvent
def doNonKeyEvent(self, event, obj):
"""Handle all non-key event. """
c = self.c
eventType = event.type()
if eventType == Type.WindowActivate:
g.app.gui.onActivateEvent(event, c, obj, self.tag)
elif eventType == Type.WindowDeactivate:
g.app.gui.onDeactivateEvent(event, c, obj, self.tag)
elif eventType == Type.FocusIn:
if self.tag == 'body':
c.frame.body.onFocusIn(obj)
if c.frame and c.frame.top and obj is c.frame.top.lineEdit:
if c.k.getStateKind() == 'getArg':
c.frame.top.lineEdit.restore_selection()
elif eventType == Type.FocusOut and self.tag == 'body':
c.frame.body.onFocusOut(obj)
# Return True unless we have a key event.
return eventType not in (Type.ShortcutOverride, Type.KeyPress, Type.KeyRelease)
#@+node:ekr.20180413180751.3: *4* filter.shouldIgnoreKeyEvent
def shouldIgnoreKeyEvent(self, event, obj):
"""
Return True if we should ignore the key event.
Alas, QLineEdit *only* generates ev.KeyRelease on Windows, Ubuntu,
so the following hack is required.
"""
c = self.c
t = event.type()
isEditWidget = (obj == c.frame.tree.edit_widget(c.p))
if isEditWidget:
# QLineEdit: ignore all key events except keyRelease events.
return t != Type.KeyRelease
if t == Type.KeyPress:
# Hack Alert!
# On some Linux systems (Kubuntu, Debian, the Win or SHIFT-Win keys
# insert garbage symbols into editing areas. Filter out these
# key events. NOTE - this is a *magic number* - who knows if
# it could change in the future?
if event.key() == 0x1000053 and sys.platform == 'linux':
return True
return False # Never ignore KeyPress events.
# This doesn't work. Two shortcut-override events are generated!
# if t == ev.ShortcutOverride and event.text():
# return False # Don't ignore shortcut overrides with a real value.
return True # Ignore everything else.
#@+node:ekr.20110605121601.18543: *4* filter.toBinding & helpers
def toBinding(self, event):
"""
Return (binding, actual_ch):
binding: A user binding, to create g.KeyStroke.
Spelling no longer fragile.
actual_ch: The insertable key, or ''.
"""
mods = self.qtMods(event)
keynum, text, toString, ch = self.qtKey(event)
actual_ch = text or toString
#
# Never allow empty chars, or chars in g.app.gui.ignoreChars
if toString in g.app.gui.ignoreChars:
return None, None, None
ch = ch or toString or ''
if not ch:
return None, None, None
#
# Check for AltGr and Alt+Ctrl keys *before* creating a binding.
actual_ch, ch, mods2 = self.doMacTweaks(actual_ch, ch, mods)
mods3 = self.doAltTweaks(actual_ch, keynum, mods2, toString)
#
# Use *ch* in the binding.
# Clearer w/o f-strings.
binding = '%s%s' % (''.join([f"{z}+" for z in mods3]), ch)
#
# Return the tweaked *actual* char.
binding, actual_ch = self.doLateTweaks(binding, actual_ch)
#
# #1933: Create lossage data.
lossage = LossageData(
actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString)
return binding, actual_ch, lossage
#@+node:ekr.20180419154543.1: *5* filter.doAltTweaks
def doAltTweaks(self, actual_ch, keynum, mods, toString):
"""Turn AltGr and some Alt-Ctrl keys into plain keys."""
def removeAltCtrl(mods):
for mod in ('Alt', 'Control'):
if mod in mods:
mods.remove(mod)
return mods
#
# Remove Alt, Ctrl for AltGr keys.
# See https://en.wikipedia.org/wiki/AltGr_key
if keynum == Key.Key_AltGr:
return removeAltCtrl(mods)
#
# Never alter complex characters.
if len(actual_ch) != 1:
return mods
#
# #1563: A hack for German and Spanish keyboards:
# Remove *plain* Shift modifier for colon and semicolon.
# https://en.m.wikipedia.org/wiki/German_keyboard_layout
kind = self.keyboard_kind.lower()
if (kind in ('german', 'spanish')
and actual_ch in ":;"
and 'Shift' in mods
and 'Alt' not in mods and 'Control' not in mods
):
mods.remove('Shift')
elif kind == 'us-international':
pass # To do.
#
# Handle Alt-Ctrl modifiers for chars whose that are not ascii.
# Testing: Alt-Ctrl-E is '€'.
if ord(actual_ch) > 127 and 'Alt' in mods and 'Control' in mods:
return removeAltCtrl(mods)
return mods
#@+node:ekr.20180417161548.1: *5* filter.doLateTweaks
def doLateTweaks(self, binding, ch):
"""Make final tweaks. g.KeyStroke does other tweaks later."""
#
# These are needed because ch is separate from binding.
if ch == '\r':
ch = '\n'
if binding == 'Escape':
ch = 'Escape'
#
# Adjust the case of the binding string (for the minibuffer).
if len(ch) == 1 and len(binding) == 1 and ch.isalpha() and binding.isalpha():
if ch != binding:
binding = ch
return binding, ch
#@+node:ekr.20180419160958.1: *5* filter.doMacTweaks
def doMacTweaks(self, actual_ch, ch, mods):
"""Replace MacOS Alt characters."""
if not g.isMac:
return actual_ch, ch, mods
if ch == 'Backspace':
# On the Mac, the reported char can be DEL (7F)
return '\b', ch, mods
if len(mods) == 1 and mods[0] == 'Alt':
# Patch provided by resi147.
# See the thread: special characters in MacOSX, like '@'.
mac_d = {
'/': '\\',
'5': '[',
'6': ']',
'7': '|',
'8': '{',
'9': '}',
'e': '€',
'l': '@',
}
if ch.lower() in mac_d:
# Ignore the case.
actual_ch = ch = g.checkUnicode(mac_d.get(ch.lower()))
mods = []
return actual_ch, ch, mods
#@+node:ekr.20110605121601.18544: *5* filter.qtKey
def qtKey(self, event):
"""
Return the components of a Qt key event.
Modifiers are handled separately.
Return (keynum, text, toString, ch).
keynum: event.key()
ch: chr(keynum) or '' if there is an exception.
toString:
For special keys: made-up spelling that become part of the setting.
For all others: QtGui.QKeySequence(keynum).toString()
text: event.text()
"""
text, toString, ch = '', '', '' # Defaults.
#
# Leo 6.4: Test keynum's directly.
# The values are the same in Qt4, Qt5, Qt6.
keynum = event.key()
if keynum in (
0x01000020, # Key_Shift
0x01000021, # Key_Control
0x01000022, # Key_Meta
0x01000023, # Key_Alt
0x01001103, # Key_AltGr
0x01000024, # Key_CapsLock
):
# Disallow bare modifiers.
return keynum, text, toString, ch
#
# Compute toString and ch.
text = event.text() # This is the unicode character!
toString = QtGui.QKeySequence(keynum).toString()
#
# #1244461: Numpad 'Enter' key does not work in minibuffer
if toString == 'Enter':
toString = 'Return'
if toString == 'Esc':
toString = 'Escape'
try:
ch = chr(keynum)
except ValueError:
pass
return keynum, text, toString, ch
#@+node:ekr.20120204061120.10084: *5* filter.qtMods
def qtMods(self, event):
"""Return the text version of the modifiers of the key event."""
modifiers = event.modifiers()
mod_table = (
(KeyboardModifier.AltModifier, 'Alt'),
(KeyboardModifier.ControlModifier, 'Control'),
(KeyboardModifier.MetaModifier, 'Meta'),
(KeyboardModifier.ShiftModifier, 'Shift'),
# #1448: Replacing this by 'Key' would make separate keypad bindings impossible.
(KeyboardModifier.KeypadModifier, 'KeyPad'),
)
# pylint: disable=superfluous-parens.
mods = [b for a, b in mod_table if (modifiers & a)]
return mods
#@+node:ekr.20140907103315.18767: *3* filter.Tracing
#@+node:ekr.20190922075339.1: *4* filter.traceKeys
def traceKeys(self, obj, event):
if g.unitTesting:
return
e = QtCore.QEvent
key_events = {
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.Shortcut: 'shortcut', # 117
e.Type.ShortcutOverride: 'shortcut-override', # 51
}
kind = key_events.get(event.type())
if kind:
mods = ','.join(self.qtMods(event))
g.trace(f"{kind:>20}: {mods:>7} {event.text()!r}")
#@+node:ekr.20110605121601.18548: *4* filter.traceEvent
def traceEvent(self, obj, event):
if g.unitTesting:
return
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
exclude_names = ('tree', 'log', 'body', 'minibuffer')
traceActivate = True
traceFocus = False
traceHide = False
traceHover = False
traceKey = False
traceLayout = False
traceMouse = False
tracePaint = False
traceUpdate = False
c, e = self.c, QtCore.QEvent
eventType = event.type()
# http://doc.qt.io/qt-5/qevent.html
show: list[Any] = []
ignore = [
e.Type.MetaCall, # 43
e.Type.Timer, # 1
e.Type.ToolTip, # 110
]
activate_events = (
(e.Type.Close, 'close'), # 19
(e.Type.WindowActivate, 'window-activate'), # 24
(e.Type.WindowBlocked, 'window-blocked'), # 103
(e.Type.WindowUnblocked, 'window-unblocked'), # 104
(e.Type.WindowDeactivate, 'window-deactivate'), # 25
)
focus_events = [
(e.Type.Enter, 'enter'), # 10
(e.Type.Leave, 'leave'), # 11
(e.Type.FocusIn, 'focus-in'), # 8
(e.Type.FocusOut, 'focus-out'), # 9
(e.Type.ShowToParent, 'show-to-parent'), # 26
]
if hasattr(e, 'FocusAboutToChange'):
# pylint: disable=no-member
focus_events.extend([
(e.Type.FocusAboutToChange, 'focus-about-to-change'), # 23
])
hide_events = (
(e.Type.Hide, 'hide'), # 18
(e.Type.HideToParent, 'hide-to-parent'), # 27
# (e.Type.LeaveEditFocus,'leave-edit-focus'), # 151
(e.Type.Show, 'show'), # 17
)
hover_events = (
(e.Type.HoverEnter, 'hover-enter'), # 127
(e.Type.HoverLeave, 'hover-leave'), # 128
(e.Type.HoverMove, 'hover-move'), # 129
)
key_events = [
(e.Type.KeyPress, 'key-press'), # 6
(e.Type.KeyRelease, 'key-release'), # 7
(e.Type.Shortcut, 'shortcut'), # 117
(e.Type.ShortcutOverride, 'shortcut-override'), # 51
]
if hasattr(e, 'InputMethodQuery'):
# pylint: disable=no-member
key_events.extend([
(e.Type.InputMethodQuery, 'input-method-query'), # 207
])
layout_events = [
(e.Type.ChildAdded, 'child-added'), # 68
(e.Type.ChildRemoved, 'child-removed'), # 71
(e.Type.DynamicPropertyChange, 'dynamic-property-change'), # 170
(e.Type.FontChange, 'font-change'), # 97
(e.Type.LayoutRequest, 'layout-request'), # 76
(e.Type.Move, 'move'), # 13 widget's position changed.
(e.Type.Resize, 'resize'), # 14
(e.Type.StyleChange, 'style-change'), # 100
(e.Type.ZOrderChange, 'z-order-change'), # 126
]
if hasattr(e, 'CloseSoftwareInputPanel'):
layout_events.extend([
(e.Type.CloseSoftwareInputPanel, 'close-sip'), # 200
])
mouse_events = (
(e.Type.MouseMove, 'mouse-move'), # 155
(e.Type.MouseButtonPress, 'mouse-press'), # 2
(e.Type.MouseButtonRelease, 'mouse-release'), # 3
(e.Type.Wheel, 'mouse-wheel'), # 31
)
paint_events = [
(e.Type.ChildPolished, 'child-polished'), # 69
(e.Type.PaletteChange, 'palette-change'), # 39
(e.Type.ParentChange, 'parent-change'), # 21
(e.Type.Paint, 'paint'), # 12
(e.Type.Polish, 'polish'), # 75
(e.Type.PolishRequest, 'polish-request'), # 74
]
if hasattr(e, 'RequestSoftwareInputPanel'):
paint_events.extend([
(e.Type.RequestSoftwareInputPanel, 'sip'), # 199
])
update_events = (
(e.Type.UpdateLater, 'update-later'), # 78
(e.Type.UpdateRequest, 'update'), # 77
)
option_table = (
(traceActivate, activate_events),
(traceFocus, focus_events),
(traceHide, hide_events),
(traceHover, hover_events),
(traceKey, key_events),
(traceLayout, layout_events),
(traceMouse, mouse_events),
(tracePaint, paint_events),
(traceUpdate, update_events),
)
for option, table in option_table:
if option:
show.extend(table)
else:
for n, tag in table:
ignore.append(n)
for val, kind in show:
if self.tag in exclude_names:
return
if eventType == val:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
if traceKey:
g.trace(
f"{kind:>25} {self.tag:25} "
f"in-state: {repr(c.k and c.k.inState()):5} obj: {tag}")
return
if eventType not in ignore:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
g.trace(f"{eventType:>25} {self.tag:25} {tag}")
#@+node:ekr.20131121050226.16331: *4* filter.traceWidget
def traceWidget(self, event):
"""Show unexpected events in unusual widgets."""
verbose = False # Not good for --trace-events
e = QtCore.QEvent
assert isinstance(event, QtCore.QEvent)
et = event.type()
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
ignore_d = {
e.Type.ChildAdded: 'child-added', # 68
e.Type.ChildPolished: 'child-polished', # 69
e.Type.ChildRemoved: 'child-removed', # 71
e.Type.Close: 'close', # 19
e.Type.CloseSoftwareInputPanel: 'close-software-input-panel', # 200
178: 'contents-rect-change', # 178
# e.Type.DeferredDelete:'deferred-delete', # 52 (let's trace this)
e.Type.DynamicPropertyChange: 'dynamic-property-change', # 170
e.Type.FocusOut: 'focus-out', # 9 (We don't care if we are leaving an unknown widget)
e.Type.FontChange: 'font-change', # 97
e.Type.Hide: 'hide', # 18
e.Type.HideToParent: 'hide-to-parent', # 27
e.Type.HoverEnter: 'hover-enter', # 127
e.Type.HoverLeave: 'hover-leave', # 128
e.Type.HoverMove: 'hover-move', # 129
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.LayoutRequest: 'layout-request', # 76
e.Type.Leave: 'leave', # 11 (We don't care if we are leaving an unknown widget)
# e.Type.LeaveEditFocus:'leave-edit-focus', # 151
e.Type.MetaCall: 'meta-call', # 43
e.Type.Move: 'move', # 13 widget's position changed.
e.Type.MouseButtonPress: 'mouse-button-press', # 2
e.Type.MouseButtonRelease: 'mouse-button-release', # 3
e.Type.MouseButtonDblClick: 'mouse-button-double-click', # 4
e.Type.MouseMove: 'mouse-move', # 5
e.Type.MouseTrackingChange: 'mouse-tracking-change', # 105
e.Type.Paint: 'paint', # 12
e.Type.PaletteChange: 'palette-change', # 39
e.Type.ParentChange: 'parent-change', # 21
e.Type.Polish: 'polish', # 75
e.Type.PolishRequest: 'polish-request', # 74
e.Type.RequestSoftwareInputPanel: 'request-software-input-panel', # 199
e.Type.Resize: 'resize', # 14
e.Type.ShortcutOverride: 'shortcut-override', # 51
e.Type.Show: 'show', # 17
e.Type.ShowToParent: 'show-to-parent', # 26
e.Type.StyleChange: 'style-change', # 100
e.Type.StatusTip: 'status-tip', # 112
e.Type.Timer: 'timer', # 1
e.Type.ToolTip: 'tool-tip', # 110
e.Type.WindowBlocked: 'window-blocked', # 103
e.Type.WindowUnblocked: 'window-unblocked', # 104
e.Type.ZOrderChange: 'z-order-change', # 126
}
focus_d = {
e.Type.DeferredDelete: 'deferred-delete', # 52
e.Type.Enter: 'enter', # 10
e.Type.FocusIn: 'focus-in', # 8
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
line_edit_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
none_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
}
if et in ignore_d:
return
w = QtWidgets.QApplication.focusWidget()
if verbose: # Too verbose for --trace-events.
for d in (ignore_d, focus_d, line_edit_ignore_d, none_ignore_d):
t = d.get(et)
if t:
break
else:
t = et
g.trace(f"{t:20} {w.__class__}")
return
if w is None:
if et not in none_ignore_d:
t = focus_d.get(et) or et
g.trace(f"None {t}")
if isinstance(w, QtWidgets.QPushButton):
return
if isinstance(w, QtWidgets.QLineEdit):
if et not in line_edit_ignore_d:
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
return
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@@pagewidth 70
#@-leo
| """Ctor for LeoQtEventFilter class."""
super().__init__()
self.c = c
self.w = w # A leoQtX object, *not* a Qt object.
self.tag = tag
# Debugging.
self.keyIsActive = False
# Pretend there is a binding for these characters.
close_flashers = c.config.getString('close-flash-brackets') or ''
open_flashers = c.config.getString('open-flash-brackets') or ''
self.flashers = open_flashers + close_flashers
# #1563: Support alternate keyboards.
self.keyboard_kind = c.config.getString('keyboard-kind') or 'default-keyboard'
# Support for ctagscompleter.py plugin.
self.ctagscompleter_active = False
self.ctagscompleter_onKey = None | identifier_body |
qt_events.py | #@+leo-ver=5-thin
#@+node:ekr.20140907103315.18766: * @file ../plugins/qt_events.py
"""Leo's Qt event handling code."""
#@+<< about internal bindings >>
#@+node:ekr.20110605121601.18538: ** << about internal bindings >>
#@@language rest
#@+at
# Here are the rules for translating key bindings (in leoSettings.leo) into keys
# for k.bindingsDict:
#
# 1. The case of plain letters is significant: a is not A.
#
# 2. The Shift- prefix can be applied *only* to letters. Leo will ignore (with a
# warning) the shift prefix applied to any other binding, e.g., Ctrl-Shift-(
#
# 3. The case of letters prefixed by Ctrl-, Alt-, Key- or Shift- is *not*
# significant. Thus, the Shift- prefix is required if you want an upper-case
# letter (with the exception of 'bare' uppercase letters.)
#
# The following table illustrates these rules. In each row, the first entry is the
# key (for k.bindingsDict) and the other entries are equivalents that the user may
# specify in leoSettings.leo:
#
# a, Key-a, Key-A
# A, Shift-A
# Alt-a, Alt-A
# Alt-A, Alt-Shift-a, Alt-Shift-A
# Ctrl-a, Ctrl-A
# Ctrl-A, Ctrl-Shift-a, Ctrl-Shift-A
# , Key-!,Key-exclam,exclam
#
# This table is consistent with how Leo already works (because it is consistent
# with Tk's key-event specifiers). It is also, I think, the least confusing set of
# rules.
#@-<< about internal bindings >>
import sys
from typing import Any
from leo.core import leoGlobals as g
from leo.core import leoGui
from leo.core.leoQt import QtCore, QtGui, QtWidgets
from leo.core.leoQt import Key, KeyboardModifier, Type
#@+others
#@+node:ekr.20210512101604.1: ** class LossageData
class LossageData:
def __init__(self, actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString):
self.actual_ch = actual_ch
self.binding = binding
self.ch = ch
self.keynum = keynum
self.mods = mods
self.mods2 = mods2
self.mods3 = mods3
self.stroke = None # Set later.
self.text = text
self.toString = toString
def __repr__(self):
return (
f"keynum: {self.keynum:>7x} "
f"binding: {self.binding}"
# f"ch: {self.ch:>7s} "
# f"= {self.actual_ch!r}"
# f"mods: {self.mods}, {self.mods2}, {self.mods3}\n"
# f"stroke: {self.stroke!r}\n"
# f"text: {self.text!r}\n"
# f"toString: {self.toString!r}\n"
)
__str__ = __repr__
#@+node:ekr.20141028061518.17: ** class LeoQtEventFilter
class LeoQtEventFilter(QtCore.QObject): # type:ignore
#@+others
#@+node:ekr.20110605121601.18539: *3* filter.ctor
def __init__(self, c, w, tag=''):
"""Ctor for LeoQtEventFilter class."""
super().__init__()
self.c = c
self.w = w # A leoQtX object, *not* a Qt object.
self.tag = tag
# Debugging.
self.keyIsActive = False
# Pretend there is a binding for these characters.
close_flashers = c.config.getString('close-flash-brackets') or ''
open_flashers = c.config.getString('open-flash-brackets') or ''
self.flashers = open_flashers + close_flashers
# #1563: Support alternate keyboards.
self.keyboard_kind = c.config.getString('keyboard-kind') or 'default-keyboard'
# Support for ctagscompleter.py plugin.
self.ctagscompleter_active = False
self.ctagscompleter_onKey = None
#@+node:ekr.20110605121601.18540: *3* filter.eventFilter & helpers
def eventFilter(self, obj, event):
"""Return False if Qt should handle the event."""
c, k = self.c, self.c.k
#
# Handle non-key events first.
if not g.app:
return False # For unit tests, but g.unitTesting may be False!
if not self.c.p:
return False # Startup.
#
# Trace events.
if 'events' in g.app.debug:
if isinstance(event, QtGui.QKeyEvent):
self.traceKeys(obj, event)
else:
self.traceEvent(obj, event)
self.traceWidget(event)
#
# Let Qt handle the non-key events.
if self.doNonKeyEvent(event, obj):
return False
#
# Ignore incomplete key events.
if self.shouldIgnoreKeyEvent(event, obj):
return False
#
# Generate a g.KeyStroke for k.masterKeyHandler.
try:
binding, ch, lossage = self.toBinding(event)
if not binding:
return False # Let Qt handle the key.
#
# Pass the KeyStroke to masterKeyHandler.
key_event = self.createKeyEvent(event, c, self.w, ch, binding)
#
# #1933: Update the g.app.lossage
if len(g.app.lossage) > 99:
g.app.lossage.pop()
lossage.stroke = key_event.stroke
g.app.lossage.insert(0, lossage)
#
# Call masterKeyHandler!
k.masterKeyHandler(key_event)
c.outerUpdate()
except Exception:
g.es_exception()
return True # Whatever happens, suppress all other Qt key handling.
#@+node:ekr.20110605195119.16937: *4* filter.createKeyEvent
def createKeyEvent(self, event, c, w, ch, binding):
return leoGui.LeoKeyEvent(
c=self.c,
# char = None doesn't work at present.
# But really, the binding should suffice.
char=ch,
event=event,
binding=binding,
w=w,
x=getattr(event, 'x', None) or 0,
y=getattr(event, 'y', None) or 0,
x_root=getattr(event, 'x_root', None) or 0,
y_root=getattr(event, 'y_root', None) or 0,
)
#@+node:ekr.20180413180751.2: *4* filter.doNonKeyEvent
def doNonKeyEvent(self, event, obj):
"""Handle all non-key event. """
c = self.c
eventType = event.type()
if eventType == Type.WindowActivate:
g.app.gui.onActivateEvent(event, c, obj, self.tag)
elif eventType == Type.WindowDeactivate:
g.app.gui.onDeactivateEvent(event, c, obj, self.tag)
elif eventType == Type.FocusIn:
if self.tag == 'body':
c.frame.body.onFocusIn(obj)
if c.frame and c.frame.top and obj is c.frame.top.lineEdit:
if c.k.getStateKind() == 'getArg':
c.frame.top.lineEdit.restore_selection()
elif eventType == Type.FocusOut and self.tag == 'body':
c.frame.body.onFocusOut(obj)
# Return True unless we have a key event.
return eventType not in (Type.ShortcutOverride, Type.KeyPress, Type.KeyRelease)
#@+node:ekr.20180413180751.3: *4* filter.shouldIgnoreKeyEvent
def shouldIgnoreKeyEvent(self, event, obj):
"""
Return True if we should ignore the key event.
Alas, QLineEdit *only* generates ev.KeyRelease on Windows, Ubuntu,
so the following hack is required.
"""
c = self.c
t = event.type()
isEditWidget = (obj == c.frame.tree.edit_widget(c.p))
if isEditWidget:
# QLineEdit: ignore all key events except keyRelease events.
return t != Type.KeyRelease
if t == Type.KeyPress:
# Hack Alert!
# On some Linux systems (Kubuntu, Debian, the Win or SHIFT-Win keys
# insert garbage symbols into editing areas. Filter out these
# key events. NOTE - this is a *magic number* - who knows if
# it could change in the future?
if event.key() == 0x1000053 and sys.platform == 'linux':
return True
return False # Never ignore KeyPress events.
# This doesn't work. Two shortcut-override events are generated!
# if t == ev.ShortcutOverride and event.text():
# return False # Don't ignore shortcut overrides with a real value.
return True # Ignore everything else.
#@+node:ekr.20110605121601.18543: *4* filter.toBinding & helpers
def toBinding(self, event):
"""
Return (binding, actual_ch):
binding: A user binding, to create g.KeyStroke.
Spelling no longer fragile.
actual_ch: The insertable key, or ''.
"""
mods = self.qtMods(event)
keynum, text, toString, ch = self.qtKey(event)
actual_ch = text or toString
#
# Never allow empty chars, or chars in g.app.gui.ignoreChars
if toString in g.app.gui.ignoreChars:
return None, None, None
ch = ch or toString or ''
if not ch:
return None, None, None
#
# Check for AltGr and Alt+Ctrl keys *before* creating a binding.
actual_ch, ch, mods2 = self.doMacTweaks(actual_ch, ch, mods)
mods3 = self.doAltTweaks(actual_ch, keynum, mods2, toString)
#
# Use *ch* in the binding.
# Clearer w/o f-strings.
binding = '%s%s' % (''.join([f"{z}+" for z in mods3]), ch)
#
# Return the tweaked *actual* char.
binding, actual_ch = self.doLateTweaks(binding, actual_ch)
#
# #1933: Create lossage data.
lossage = LossageData(
actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString)
return binding, actual_ch, lossage
#@+node:ekr.20180419154543.1: *5* filter.doAltTweaks
def doAltTweaks(self, actual_ch, keynum, mods, toString):
"""Turn AltGr and some Alt-Ctrl keys into plain keys."""
def removeAltCtrl(mods):
for mod in ('Alt', 'Control'):
if mod in mods:
mods.remove(mod)
return mods
#
# Remove Alt, Ctrl for AltGr keys.
# See https://en.wikipedia.org/wiki/AltGr_key
if keynum == Key.Key_AltGr:
return removeAltCtrl(mods)
#
# Never alter complex characters.
if len(actual_ch) != 1:
return mods
#
# #1563: A hack for German and Spanish keyboards:
# Remove *plain* Shift modifier for colon and semicolon.
# https://en.m.wikipedia.org/wiki/German_keyboard_layout
kind = self.keyboard_kind.lower()
if (kind in ('german', 'spanish')
and actual_ch in ":;"
and 'Shift' in mods
and 'Alt' not in mods and 'Control' not in mods
):
mods.remove('Shift')
elif kind == 'us-international':
pass # To do.
#
# Handle Alt-Ctrl modifiers for chars whose that are not ascii.
# Testing: Alt-Ctrl-E is '€'.
if ord(actual_ch) > 127 and 'Alt' in mods and 'Control' in mods:
return removeAltCtrl(mods)
return mods
#@+node:ekr.20180417161548.1: *5* filter.doLateTweaks
def doLateTweaks(self, binding, ch):
"""Make final tweaks. g.KeyStroke does other tweaks later."""
#
# These are needed because ch is separate from binding.
if ch == '\r':
ch = '\n'
if binding == 'Escape':
ch = 'Escape'
#
# Adjust the case of the binding string (for the minibuffer).
if len(ch) == 1 and len(binding) == 1 and ch.isalpha() and binding.isalpha():
if ch != binding:
binding = ch
return binding, ch
#@+node:ekr.20180419160958.1: *5* filter.doMacTweaks
def doMacTweaks(self, actual_ch, ch, mods):
"""Replace MacOS Alt characters."""
if not g.isMac:
return actual_ch, ch, mods
if ch == 'Backspace':
# On the Mac, the reported char can be DEL (7F)
return '\b', ch, mods
if len(mods) == 1 and mods[0] == 'Alt':
# Patch provided by resi147.
# See the thread: special characters in MacOSX, like '@'.
mac_d = {
'/': '\\',
'5': '[',
'6': ']',
'7': '|',
'8': '{',
'9': '}',
'e': '€',
'l': '@',
}
if ch.lower() in mac_d:
# Ignore the case.
actual_ch = ch = g.checkUnicode(mac_d.get(ch.lower()))
mods = []
return actual_ch, ch, mods
#@+node:ekr.20110605121601.18544: *5* filter.qtKey
def qtKey(self, event):
"""
Return the components of a Qt key event.
Modifiers are handled separately.
Return (keynum, text, toString, ch).
keynum: event.key()
ch: chr(keynum) or '' if there is an exception.
toString:
For special keys: made-up spelling that become part of the setting.
For all others: QtGui.QKeySequence(keynum).toString()
text: event.text()
"""
text, toString, ch = '', '', '' # Defaults.
#
# Leo 6.4: Test keynum's directly.
# The values are the same in Qt4, Qt5, Qt6.
keynum = event.key()
if keynum in (
0x01000020, # Key_Shift
0x01000021, # Key_Control
0x01000022, # Key_Meta
0x01000023, # Key_Alt
0x01001103, # Key_AltGr
0x01000024, # Key_CapsLock
):
# Disallow bare modifiers.
return keynum, text, toString, ch
#
# Compute toString and ch.
text = event.text() # This is the unicode character!
toString = QtGui.QKeySequence(keynum).toString()
#
# #1244461: Numpad 'Enter' key does not work in minibuffer
if toString == 'Enter':
toString = 'Return'
if toString == 'Esc':
toString = 'Escape'
try:
ch = chr(keynum)
except ValueError:
pass
return keynum, text, toString, ch
#@+node:ekr.20120204061120.10084: *5* filter.qtMods
def qtMods(self, event):
"""Return the text version of the modifiers of the key event."""
modifiers = event.modifiers()
mod_table = (
(KeyboardModifier.AltModifier, 'Alt'),
(KeyboardModifier.ControlModifier, 'Control'),
(KeyboardModifier.MetaModifier, 'Meta'),
(KeyboardModifier.ShiftModifier, 'Shift'),
# #1448: Replacing this by 'Key' would make separate keypad bindings impossible.
(KeyboardModifier.KeypadModifier, 'KeyPad'),
)
# pylint: disable=superfluous-parens.
mods = [b for a, b in mod_table if (modifiers & a)]
return mods
#@+node:ekr.20140907103315.18767: *3* filter.Tracing
#@+node:ekr.20190922075339.1: *4* filter.traceKeys
def traceKeys(self, obj, event):
if g.unitTesting:
return
e = QtCore.QEvent
key_events = {
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.Shortcut: 'shortcut', # 117
e.Type.ShortcutOverride: 'shortcut-override', # 51
}
kind = key_events.get(event.type())
if kind:
mods = ','.join(self.qtMods(event))
g.trace(f"{kind:>20}: {mods:>7} {event.text()!r}")
#@+node:ekr.20110605121601.18548: *4* filter.traceEvent
def traceEvent(self, obj, event):
if g.unitTesting:
return
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
exclude_names = ('tree', 'log', 'body', 'minibuffer')
traceActivate = True
traceFocus = False
traceHide = False
traceHover = False
traceKey = False
traceLayout = False
traceMouse = False
tracePaint = False
traceUpdate = False
c, e = self.c, QtCore.QEvent
eventType = event.type()
# http://doc.qt.io/qt-5/qevent.html
show: list[Any] = []
ignore = [
e.Type.MetaCall, # 43
e.Type.Timer, # 1
e.Type.ToolTip, # 110
]
activate_events = (
(e.Type.Close, 'close'), # 19
(e.Type.WindowActivate, 'window-activate'), # 24
(e.Type.WindowBlocked, 'window-blocked'), # 103
(e.Type.WindowUnblocked, 'window-unblocked'), # 104
(e.Type.WindowDeactivate, 'window-deactivate'), # 25
)
focus_events = [
(e.Type.Enter, 'enter'), # 10
(e.Type.Leave, 'leave'), # 11
(e.Type.FocusIn, 'focus-in'), # 8
(e.Type.FocusOut, 'focus-out'), # 9
(e.Type.ShowToParent, 'show-to-parent'), # 26
]
if hasattr(e, 'FocusAboutToChange'):
# pylint: disable=no-member
focus_events.extend([
(e.Type.FocusAboutToChange, 'focus-about-to-change'), # 23
])
hide_events = (
(e.Type.Hide, 'hide'), # 18
(e.Type.HideToParent, 'hide-to-parent'), # 27
# (e.Type.LeaveEditFocus,'leave-edit-focus'), # 151
(e.Type.Show, 'show'), # 17
)
hover_events = (
(e.Type.HoverEnter, 'hover-enter'), # 127
(e.Type.HoverLeave, 'hover-leave'), # 128
(e.Type.HoverMove, 'hover-move'), # 129
)
key_events = [
(e.Type.KeyPress, 'key-press'), # 6
(e.Type.KeyRelease, 'key-release'), # 7
(e.Type.Shortcut, 'shortcut'), # 117
(e.Type.ShortcutOverride, 'shortcut-override'), # 51
]
if hasattr(e, 'InputMethodQuery'):
# pylint: disable=no-member
key_events.extend([
(e.Type.InputMethodQuery, 'input-method-query'), # 207
])
layout_events = [
(e.Type.ChildAdded, 'child-added'), # 68
(e.Type.ChildRemoved, 'child-removed'), # 71
(e.Type.DynamicPropertyChange, 'dynamic-property-change'), # 170
(e.Type.FontChange, 'font-change'), # 97
(e.Type.LayoutRequest, 'layout-request'), # 76
(e.Type.Move, 'move'), # 13 widget's position changed.
(e.Type.Resize, 'resize'), # 14
(e.Type.StyleChange, 'style-change'), # 100
(e.Type.ZOrderChange, 'z-order-change'), # 126
]
if hasattr(e, 'CloseSoftwareInputPanel'):
layout_events.extend([
(e.Type.CloseSoftwareInputPanel, 'close-sip'), # 200
])
mouse_events = (
(e.Type.MouseMove, 'mouse-move'), # 155
(e.Type.MouseButtonPress, 'mouse-press'), # 2
(e.Type.MouseButtonRelease, 'mouse-release'), # 3
(e.Type.Wheel, 'mouse-wheel'), # 31
)
paint_events = [
(e.Type.ChildPolished, 'child-polished'), # 69
(e.Type.PaletteChange, 'palette-change'), # 39
(e.Type.ParentChange, 'parent-change'), # 21
(e.Type.Paint, 'paint'), # 12
(e.Type.Polish, 'polish'), # 75
(e.Type.PolishRequest, 'polish-request'), # 74
]
if hasattr(e, 'RequestSoftwareInputPanel'):
paint_events.extend([
(e.Type.RequestSoftwareInputPanel, 'sip'), # 199
])
update_events = (
(e.Type.UpdateLater, 'update-later'), # 78
(e.Type.UpdateRequest, 'update'), # 77
)
option_table = (
(traceActivate, activate_events),
(traceFocus, focus_events),
(traceHide, hide_events),
(traceHover, hover_events),
(traceKey, key_events),
(traceLayout, layout_events),
(traceMouse, mouse_events),
(tracePaint, paint_events),
(traceUpdate, update_events),
)
for option, table in option_table:
if option:
show.extend(table)
else:
for n, tag in table:
ignore.append(n)
for val, kind in show:
if self.tag in exclude_names:
return
if eventType == val:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
if traceKey:
g.trace(
f"{kind:>25} {self.tag:25} "
f"in-state: {repr(c.k and c.k.inState()):5} obj: {tag}")
return
if eventType not in ignore:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
g.trace(f"{eventType:>25} {self.tag:25} {tag}")
#@+node:ekr.20131121050226.16331: *4* filter.traceWidget
def traceWidget(self, event):
"""Show unexpected events in unusual widgets."""
verbose = False # Not good for --trace-events
e = QtCore.QEvent
assert isinstance(event, QtCore.QEvent)
et = event.type()
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
ignore_d = {
e.Type.ChildAdded: 'child-added', # 68
e.Type.ChildPolished: 'child-polished', # 69
e.Type.ChildRemoved: 'child-removed', # 71
e.Type.Close: 'close', # 19
e.Type.CloseSoftwareInputPanel: 'close-software-input-panel', # 200
178: 'contents-rect-change', # 178
# e.Type.DeferredDelete:'deferred-delete', # 52 (let's trace this)
e.Type.DynamicPropertyChange: 'dynamic-property-change', # 170
e.Type.FocusOut: 'focus-out', # 9 (We don't care if we are leaving an unknown widget)
e.Type.FontChange: 'font-change', # 97
e.Type.Hide: 'hide', # 18
e.Type.HideToParent: 'hide-to-parent', # 27
e.Type.HoverEnter: 'hover-enter', # 127
e.Type.HoverLeave: 'hover-leave', # 128
e.Type.HoverMove: 'hover-move', # 129
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.LayoutRequest: 'layout-request', # 76
e.Type.Leave: 'leave', # 11 (We don't care if we are leaving an unknown widget)
# e.Type.LeaveEditFocus:'leave-edit-focus', # 151
e.Type.MetaCall: 'meta-call', # 43
e.Type.Move: 'move', # 13 widget's position changed.
e.Type.MouseButtonPress: 'mouse-button-press', # 2
e.Type.MouseButtonRelease: 'mouse-button-release', # 3
e.Type.MouseButtonDblClick: 'mouse-button-double-click', # 4
e.Type.MouseMove: 'mouse-move', # 5
e.Type.MouseTrackingChange: 'mouse-tracking-change', # 105
e.Type.Paint: 'paint', # 12
e.Type.PaletteChange: 'palette-change', # 39
e.Type.ParentChange: 'parent-change', # 21
e.Type.Polish: 'polish', # 75
e.Type.PolishRequest: 'polish-request', # 74
e.Type.RequestSoftwareInputPanel: 'request-software-input-panel', # 199
e.Type.Resize: 'resize', # 14
e.Type.ShortcutOverride: 'shortcut-override', # 51
e.Type.Show: 'show', # 17
e.Type.ShowToParent: 'show-to-parent', # 26
e.Type.StyleChange: 'style-change', # 100
e.Type.StatusTip: 'status-tip', # 112
e.Type.Timer: 'timer', # 1
e.Type.ToolTip: 'tool-tip', # 110
e.Type.WindowBlocked: 'window-blocked', # 103
e.Type.WindowUnblocked: 'window-unblocked', # 104
e.Type.ZOrderChange: 'z-order-change', # 126
}
focus_d = {
e.Type.DeferredDelete: 'deferred-delete', # 52
e.Type.Enter: 'enter', # 10
e.Type.FocusIn: 'focus-in', # 8
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
line_edit_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
none_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
}
if et in ignore_d:
return
w = QtWidgets.QApplication.focusWidget()
if verbose: # Too verbose for --trace-events.
for d in (ignore_d, focus_d, line_edit_ignore_d, none_ignore_d):
t = d.get(et)
if t:
break
else:
t = et
g.trace(f"{t:20} {w.__class__}")
return
if w is None:
if et not in none_ignore_d:
t = | if isinstance(w, QtWidgets.QPushButton):
return
if isinstance(w, QtWidgets.QLineEdit):
if et not in line_edit_ignore_d:
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
return
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@@pagewidth 70
#@-leo
| focus_d.get(et) or et
g.trace(f"None {t}")
| conditional_block |
qt_events.py | #@+leo-ver=5-thin
#@+node:ekr.20140907103315.18766: * @file ../plugins/qt_events.py
"""Leo's Qt event handling code."""
#@+<< about internal bindings >>
#@+node:ekr.20110605121601.18538: ** << about internal bindings >>
#@@language rest
#@+at
# Here are the rules for translating key bindings (in leoSettings.leo) into keys
# for k.bindingsDict:
#
# 1. The case of plain letters is significant: a is not A.
#
# 2. The Shift- prefix can be applied *only* to letters. Leo will ignore (with a
# warning) the shift prefix applied to any other binding, e.g., Ctrl-Shift-(
#
# 3. The case of letters prefixed by Ctrl-, Alt-, Key- or Shift- is *not*
# significant. Thus, the Shift- prefix is required if you want an upper-case
# letter (with the exception of 'bare' uppercase letters.)
#
# The following table illustrates these rules. In each row, the first entry is the
# key (for k.bindingsDict) and the other entries are equivalents that the user may
# specify in leoSettings.leo:
#
# a, Key-a, Key-A
# A, Shift-A
# Alt-a, Alt-A
# Alt-A, Alt-Shift-a, Alt-Shift-A
# Ctrl-a, Ctrl-A
# Ctrl-A, Ctrl-Shift-a, Ctrl-Shift-A
# , Key-!,Key-exclam,exclam
#
# This table is consistent with how Leo already works (because it is consistent
# with Tk's key-event specifiers). It is also, I think, the least confusing set of
# rules.
#@-<< about internal bindings >>
import sys
from typing import Any
from leo.core import leoGlobals as g
from leo.core import leoGui
from leo.core.leoQt import QtCore, QtGui, QtWidgets
from leo.core.leoQt import Key, KeyboardModifier, Type
#@+others
#@+node:ekr.20210512101604.1: ** class LossageData
class LossageData:
def __init__(self, actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString):
self.actual_ch = actual_ch
self.binding = binding
self.ch = ch
self.keynum = keynum
self.mods = mods
self.mods2 = mods2
self.mods3 = mods3
self.stroke = None # Set later.
self.text = text
self.toString = toString
def __repr__(self):
return (
f"keynum: {self.keynum:>7x} "
f"binding: {self.binding}"
# f"ch: {self.ch:>7s} "
# f"= {self.actual_ch!r}"
# f"mods: {self.mods}, {self.mods2}, {self.mods3}\n"
# f"stroke: {self.stroke!r}\n"
# f"text: {self.text!r}\n"
# f"toString: {self.toString!r}\n"
)
__str__ = __repr__
#@+node:ekr.20141028061518.17: ** class LeoQtEventFilter
class LeoQtEventFilter(QtCore.QObject): # type:ignore
#@+others
#@+node:ekr.20110605121601.18539: *3* filter.ctor
def __init__(self, c, w, tag=''):
"""Ctor for LeoQtEventFilter class."""
super().__init__()
self.c = c
self.w = w # A leoQtX object, *not* a Qt object.
self.tag = tag
# Debugging.
self.keyIsActive = False
# Pretend there is a binding for these characters.
close_flashers = c.config.getString('close-flash-brackets') or ''
open_flashers = c.config.getString('open-flash-brackets') or ''
self.flashers = open_flashers + close_flashers
# #1563: Support alternate keyboards.
self.keyboard_kind = c.config.getString('keyboard-kind') or 'default-keyboard'
# Support for ctagscompleter.py plugin.
self.ctagscompleter_active = False
self.ctagscompleter_onKey = None
#@+node:ekr.20110605121601.18540: *3* filter.eventFilter & helpers
def eventFilter(self, obj, event):
"""Return False if Qt should handle the event."""
c, k = self.c, self.c.k
#
# Handle non-key events first.
if not g.app:
return False # For unit tests, but g.unitTesting may be False!
if not self.c.p:
return False # Startup.
#
# Trace events.
if 'events' in g.app.debug:
if isinstance(event, QtGui.QKeyEvent):
self.traceKeys(obj, event)
else:
self.traceEvent(obj, event)
self.traceWidget(event)
#
# Let Qt handle the non-key events.
if self.doNonKeyEvent(event, obj):
return False
#
# Ignore incomplete key events.
if self.shouldIgnoreKeyEvent(event, obj):
return False
#
# Generate a g.KeyStroke for k.masterKeyHandler.
try:
binding, ch, lossage = self.toBinding(event)
if not binding:
return False # Let Qt handle the key.
#
# Pass the KeyStroke to masterKeyHandler.
key_event = self.createKeyEvent(event, c, self.w, ch, binding)
#
# #1933: Update the g.app.lossage
if len(g.app.lossage) > 99:
g.app.lossage.pop()
lossage.stroke = key_event.stroke
g.app.lossage.insert(0, lossage)
#
# Call masterKeyHandler!
k.masterKeyHandler(key_event)
c.outerUpdate()
except Exception:
g.es_exception()
return True # Whatever happens, suppress all other Qt key handling.
#@+node:ekr.20110605195119.16937: *4* filter.createKeyEvent
def createKeyEvent(self, event, c, w, ch, binding):
return leoGui.LeoKeyEvent(
c=self.c,
# char = None doesn't work at present.
# But really, the binding should suffice.
char=ch,
event=event,
binding=binding,
w=w,
x=getattr(event, 'x', None) or 0,
y=getattr(event, 'y', None) or 0,
x_root=getattr(event, 'x_root', None) or 0,
y_root=getattr(event, 'y_root', None) or 0,
)
#@+node:ekr.20180413180751.2: *4* filter.doNonKeyEvent
def doNonKeyEvent(self, event, obj):
"""Handle all non-key event. """
c = self.c
eventType = event.type()
if eventType == Type.WindowActivate:
g.app.gui.onActivateEvent(event, c, obj, self.tag)
elif eventType == Type.WindowDeactivate:
g.app.gui.onDeactivateEvent(event, c, obj, self.tag)
elif eventType == Type.FocusIn:
if self.tag == 'body':
c.frame.body.onFocusIn(obj)
if c.frame and c.frame.top and obj is c.frame.top.lineEdit:
if c.k.getStateKind() == 'getArg':
c.frame.top.lineEdit.restore_selection()
elif eventType == Type.FocusOut and self.tag == 'body':
c.frame.body.onFocusOut(obj)
# Return True unless we have a key event.
return eventType not in (Type.ShortcutOverride, Type.KeyPress, Type.KeyRelease)
#@+node:ekr.20180413180751.3: *4* filter.shouldIgnoreKeyEvent
def shouldIgnoreKeyEvent(self, event, obj):
"""
Return True if we should ignore the key event.
Alas, QLineEdit *only* generates ev.KeyRelease on Windows, Ubuntu,
so the following hack is required.
"""
c = self.c
t = event.type()
isEditWidget = (obj == c.frame.tree.edit_widget(c.p))
if isEditWidget:
# QLineEdit: ignore all key events except keyRelease events.
return t != Type.KeyRelease
if t == Type.KeyPress:
# Hack Alert!
# On some Linux systems (Kubuntu, Debian, the Win or SHIFT-Win keys
# insert garbage symbols into editing areas. Filter out these
# key events. NOTE - this is a *magic number* - who knows if
# it could change in the future?
if event.key() == 0x1000053 and sys.platform == 'linux':
return True
return False # Never ignore KeyPress events.
# This doesn't work. Two shortcut-override events are generated!
# if t == ev.ShortcutOverride and event.text():
# return False # Don't ignore shortcut overrides with a real value.
return True # Ignore everything else.
#@+node:ekr.20110605121601.18543: *4* filter.toBinding & helpers
def toBinding(self, event):
"""
Return (binding, actual_ch):
binding: A user binding, to create g.KeyStroke.
Spelling no longer fragile.
actual_ch: The insertable key, or ''.
"""
mods = self.qtMods(event)
keynum, text, toString, ch = self.qtKey(event)
actual_ch = text or toString
#
# Never allow empty chars, or chars in g.app.gui.ignoreChars
if toString in g.app.gui.ignoreChars:
return None, None, None
ch = ch or toString or ''
if not ch:
return None, None, None
#
# Check for AltGr and Alt+Ctrl keys *before* creating a binding.
actual_ch, ch, mods2 = self.doMacTweaks(actual_ch, ch, mods)
mods3 = self.doAltTweaks(actual_ch, keynum, mods2, toString)
#
# Use *ch* in the binding.
# Clearer w/o f-strings.
binding = '%s%s' % (''.join([f"{z}+" for z in mods3]), ch)
#
# Return the tweaked *actual* char.
binding, actual_ch = self.doLateTweaks(binding, actual_ch)
#
# #1933: Create lossage data.
lossage = LossageData(
actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString)
return binding, actual_ch, lossage
#@+node:ekr.20180419154543.1: *5* filter.doAltTweaks
def doAltTweaks(self, actual_ch, keynum, mods, toString):
"""Turn AltGr and some Alt-Ctrl keys into plain keys."""
def removeAltCtrl(mods):
for mod in ('Alt', 'Control'):
if mod in mods:
mods.remove(mod)
return mods
#
# Remove Alt, Ctrl for AltGr keys.
# See https://en.wikipedia.org/wiki/AltGr_key
if keynum == Key.Key_AltGr:
return removeAltCtrl(mods)
#
# Never alter complex characters.
if len(actual_ch) != 1:
return mods
#
# #1563: A hack for German and Spanish keyboards:
# Remove *plain* Shift modifier for colon and semicolon.
# https://en.m.wikipedia.org/wiki/German_keyboard_layout
kind = self.keyboard_kind.lower()
if (kind in ('german', 'spanish')
and actual_ch in ":;"
and 'Shift' in mods
and 'Alt' not in mods and 'Control' not in mods
):
mods.remove('Shift')
elif kind == 'us-international':
pass # To do.
#
# Handle Alt-Ctrl modifiers for chars whose that are not ascii.
# Testing: Alt-Ctrl-E is '€'.
if ord(actual_ch) > 127 and 'Alt' in mods and 'Control' in mods:
return removeAltCtrl(mods)
return mods
#@+node:ekr.20180417161548.1: *5* filter.doLateTweaks
def doLateTweaks(self, binding, ch):
"""Make final tweaks. g.KeyStroke does other tweaks later."""
#
# These are needed because ch is separate from binding.
if ch == '\r':
ch = '\n'
if binding == 'Escape':
ch = 'Escape'
#
# Adjust the case of the binding string (for the minibuffer).
if len(ch) == 1 and len(binding) == 1 and ch.isalpha() and binding.isalpha():
if ch != binding:
binding = ch
return binding, ch
#@+node:ekr.20180419160958.1: *5* filter.doMacTweaks
def doMacTweaks(self, actual_ch, ch, mods):
"""Replace MacOS Alt characters."""
if not g.isMac:
return actual_ch, ch, mods
if ch == 'Backspace':
# On the Mac, the reported char can be DEL (7F)
return '\b', ch, mods
if len(mods) == 1 and mods[0] == 'Alt':
# Patch provided by resi147.
# See the thread: special characters in MacOSX, like '@'.
mac_d = {
'/': '\\',
'5': '[',
'6': ']',
'7': '|',
'8': '{',
'9': '}',
'e': '€',
'l': '@',
}
if ch.lower() in mac_d:
# Ignore the case.
actual_ch = ch = g.checkUnicode(mac_d.get(ch.lower()))
mods = []
return actual_ch, ch, mods
#@+node:ekr.20110605121601.18544: *5* filter.qtKey
def qtKey(self, event):
"""
Return the components of a Qt key event.
Modifiers are handled separately.
Return (keynum, text, toString, ch).
keynum: event.key()
ch: chr(keynum) or '' if there is an exception.
toString:
For special keys: made-up spelling that become part of the setting.
For all others: QtGui.QKeySequence(keynum).toString()
text: event.text()
"""
text, toString, ch = '', '', '' # Defaults.
#
# Leo 6.4: Test keynum's directly.
# The values are the same in Qt4, Qt5, Qt6.
keynum = event.key()
if keynum in (
0x01000020, # Key_Shift
0x01000021, # Key_Control
0x01000022, # Key_Meta
0x01000023, # Key_Alt
0x01001103, # Key_AltGr
0x01000024, # Key_CapsLock
):
# Disallow bare modifiers.
return keynum, text, toString, ch
#
# Compute toString and ch.
text = event.text() # This is the unicode character!
toString = QtGui.QKeySequence(keynum).toString()
#
# #1244461: Numpad 'Enter' key does not work in minibuffer
if toString == 'Enter':
toString = 'Return'
if toString == 'Esc':
toString = 'Escape'
try:
ch = chr(keynum)
except ValueError:
pass
return keynum, text, toString, ch
#@+node:ekr.20120204061120.10084: *5* filter.qtMods
def qtMods(self, event):
"""Return the text version of the modifiers of the key event."""
modifiers = event.modifiers()
mod_table = (
(KeyboardModifier.AltModifier, 'Alt'),
(KeyboardModifier.ControlModifier, 'Control'),
(KeyboardModifier.MetaModifier, 'Meta'),
(KeyboardModifier.ShiftModifier, 'Shift'),
# #1448: Replacing this by 'Key' would make separate keypad bindings impossible.
(KeyboardModifier.KeypadModifier, 'KeyPad'),
)
# pylint: disable=superfluous-parens.
mods = [b for a, b in mod_table if (modifiers & a)]
return mods
#@+node:ekr.20140907103315.18767: *3* filter.Tracing
#@+node:ekr.20190922075339.1: *4* filter.traceKeys
def traceKeys(self, obj, event):
if g.unitTesting:
return
e = QtCore.QEvent
key_events = {
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.Shortcut: 'shortcut', # 117
e.Type.ShortcutOverride: 'shortcut-override', # 51
}
kind = key_events.get(event.type())
if kind:
mods = ','.join(self.qtMods(event))
g.trace(f"{kind:>20}: {mods:>7} {event.text()!r}")
#@+node:ekr.20110605121601.18548: *4* filter.traceEvent
def traceEvent(self, obj, event):
if g.unitTesting:
return
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
exclude_names = ('tree', 'log', 'body', 'minibuffer')
traceActivate = True
traceFocus = False
traceHide = False
traceHover = False
traceKey = False
traceLayout = False
traceMouse = False
tracePaint = False
traceUpdate = False
c, e = self.c, QtCore.QEvent
eventType = event.type()
# http://doc.qt.io/qt-5/qevent.html
show: list[Any] = []
ignore = [
e.Type.MetaCall, # 43
e.Type.Timer, # 1
e.Type.ToolTip, # 110
]
activate_events = (
(e.Type.Close, 'close'), # 19
(e.Type.WindowActivate, 'window-activate'), # 24
(e.Type.WindowBlocked, 'window-blocked'), # 103
(e.Type.WindowUnblocked, 'window-unblocked'), # 104
(e.Type.WindowDeactivate, 'window-deactivate'), # 25
)
focus_events = [
(e.Type.Enter, 'enter'), # 10
(e.Type.Leave, 'leave'), # 11
(e.Type.FocusIn, 'focus-in'), # 8
(e.Type.FocusOut, 'focus-out'), # 9
(e.Type.ShowToParent, 'show-to-parent'), # 26
]
if hasattr(e, 'FocusAboutToChange'):
# pylint: disable=no-member
focus_events.extend([
(e.Type.FocusAboutToChange, 'focus-about-to-change'), # 23
])
hide_events = (
(e.Type.Hide, 'hide'), # 18
(e.Type.HideToParent, 'hide-to-parent'), # 27
# (e.Type.LeaveEditFocus,'leave-edit-focus'), # 151
(e.Type.Show, 'show'), # 17
)
hover_events = (
(e.Type.HoverEnter, 'hover-enter'), # 127
(e.Type.HoverLeave, 'hover-leave'), # 128
(e.Type.HoverMove, 'hover-move'), # 129
)
key_events = [
(e.Type.KeyPress, 'key-press'), # 6
(e.Type.KeyRelease, 'key-release'), # 7
(e.Type.Shortcut, 'shortcut'), # 117
(e.Type.ShortcutOverride, 'shortcut-override'), # 51
]
if hasattr(e, 'InputMethodQuery'):
# pylint: disable=no-member
key_events.extend([
(e.Type.InputMethodQuery, 'input-method-query'), # 207
])
layout_events = [
(e.Type.ChildAdded, 'child-added'), # 68
(e.Type.ChildRemoved, 'child-removed'), # 71
(e.Type.DynamicPropertyChange, 'dynamic-property-change'), # 170
(e.Type.FontChange, 'font-change'), # 97
(e.Type.LayoutRequest, 'layout-request'), # 76
(e.Type.Move, 'move'), # 13 widget's position changed.
(e.Type.Resize, 'resize'), # 14
(e.Type.StyleChange, 'style-change'), # 100
(e.Type.ZOrderChange, 'z-order-change'), # 126
]
if hasattr(e, 'CloseSoftwareInputPanel'):
layout_events.extend([
(e.Type.CloseSoftwareInputPanel, 'close-sip'), # 200
])
mouse_events = (
(e.Type.MouseMove, 'mouse-move'), # 155
(e.Type.MouseButtonPress, 'mouse-press'), # 2
(e.Type.MouseButtonRelease, 'mouse-release'), # 3
(e.Type.Wheel, 'mouse-wheel'), # 31
)
paint_events = [
(e.Type.ChildPolished, 'child-polished'), # 69
(e.Type.PaletteChange, 'palette-change'), # 39
(e.Type.ParentChange, 'parent-change'), # 21
(e.Type.Paint, 'paint'), # 12
(e.Type.Polish, 'polish'), # 75
(e.Type.PolishRequest, 'polish-request'), # 74
]
if hasattr(e, 'RequestSoftwareInputPanel'):
paint_events.extend([
(e.Type.RequestSoftwareInputPanel, 'sip'), # 199
])
update_events = (
(e.Type.UpdateLater, 'update-later'), # 78
(e.Type.UpdateRequest, 'update'), # 77
)
option_table = (
(traceActivate, activate_events),
(traceFocus, focus_events),
(traceHide, hide_events),
(traceHover, hover_events),
(traceKey, key_events),
(traceLayout, layout_events),
(traceMouse, mouse_events),
(tracePaint, paint_events),
(traceUpdate, update_events),
)
for option, table in option_table:
if option:
show.extend(table)
else:
for n, tag in table:
ignore.append(n)
for val, kind in show:
if self.tag in exclude_names:
return
if eventType == val:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
if traceKey:
g.trace(
f"{kind:>25} {self.tag:25} "
f"in-state: {repr(c.k and c.k.inState()):5} obj: {tag}")
return
if eventType not in ignore:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
g.trace(f"{eventType:>25} {self.tag:25} {tag}")
#@+node:ekr.20131121050226.16331: *4* filter.traceWidget
def traceWidget(self, event):
"""Show unexpected events in unusual widgets."""
verbose = False # Not good for --trace-events
e = QtCore.QEvent
assert isinstance(event, QtCore.QEvent)
et = event.type()
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
ignore_d = {
e.Type.ChildAdded: 'child-added', # 68
e.Type.ChildPolished: 'child-polished', # 69
e.Type.ChildRemoved: 'child-removed', # 71
e.Type.Close: 'close', # 19
e.Type.CloseSoftwareInputPanel: 'close-software-input-panel', # 200
178: 'contents-rect-change', # 178
# e.Type.DeferredDelete:'deferred-delete', # 52 (let's trace this)
e.Type.DynamicPropertyChange: 'dynamic-property-change', # 170
e.Type.FocusOut: 'focus-out', # 9 (We don't care if we are leaving an unknown widget)
e.Type.FontChange: 'font-change', # 97
e.Type.Hide: 'hide', # 18
e.Type.HideToParent: 'hide-to-parent', # 27
e.Type.HoverEnter: 'hover-enter', # 127
e.Type.HoverLeave: 'hover-leave', # 128
e.Type.HoverMove: 'hover-move', # 129
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.LayoutRequest: 'layout-request', # 76
e.Type.Leave: 'leave', # 11 (We don't care if we are leaving an unknown widget)
# e.Type.LeaveEditFocus:'leave-edit-focus', # 151
e.Type.MetaCall: 'meta-call', # 43
e.Type.Move: 'move', # 13 widget's position changed.
e.Type.MouseButtonPress: 'mouse-button-press', # 2
e.Type.MouseButtonRelease: 'mouse-button-release', # 3
e.Type.MouseButtonDblClick: 'mouse-button-double-click', # 4
e.Type.MouseMove: 'mouse-move', # 5
e.Type.MouseTrackingChange: 'mouse-tracking-change', # 105
e.Type.Paint: 'paint', # 12
e.Type.PaletteChange: 'palette-change', # 39
e.Type.ParentChange: 'parent-change', # 21
e.Type.Polish: 'polish', # 75
e.Type.PolishRequest: 'polish-request', # 74
e.Type.RequestSoftwareInputPanel: 'request-software-input-panel', # 199
e.Type.Resize: 'resize', # 14
e.Type.ShortcutOverride: 'shortcut-override', # 51
e.Type.Show: 'show', # 17
e.Type.ShowToParent: 'show-to-parent', # 26
e.Type.StyleChange: 'style-change', # 100
e.Type.StatusTip: 'status-tip', # 112
e.Type.Timer: 'timer', # 1
e.Type.ToolTip: 'tool-tip', # 110
e.Type.WindowBlocked: 'window-blocked', # 103
e.Type.WindowUnblocked: 'window-unblocked', # 104
e.Type.ZOrderChange: 'z-order-change', # 126
}
focus_d = {
e.Type.DeferredDelete: 'deferred-delete', # 52
e.Type.Enter: 'enter', # 10
e.Type.FocusIn: 'focus-in', # 8
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
line_edit_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
none_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
}
if et in ignore_d:
return
w = QtWidgets.QApplication.focusWidget()
if verbose: # Too verbose for --trace-events.
for d in (ignore_d, focus_d, line_edit_ignore_d, none_ignore_d):
t = d.get(et)
if t:
break
else:
t = et | if et not in none_ignore_d:
t = focus_d.get(et) or et
g.trace(f"None {t}")
if isinstance(w, QtWidgets.QPushButton):
return
if isinstance(w, QtWidgets.QLineEdit):
if et not in line_edit_ignore_d:
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
return
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@@pagewidth 70
#@-leo | g.trace(f"{t:20} {w.__class__}")
return
if w is None: | random_line_split |
qt_events.py | #@+leo-ver=5-thin
#@+node:ekr.20140907103315.18766: * @file ../plugins/qt_events.py
"""Leo's Qt event handling code."""
#@+<< about internal bindings >>
#@+node:ekr.20110605121601.18538: ** << about internal bindings >>
#@@language rest
#@+at
# Here are the rules for translating key bindings (in leoSettings.leo) into keys
# for k.bindingsDict:
#
# 1. The case of plain letters is significant: a is not A.
#
# 2. The Shift- prefix can be applied *only* to letters. Leo will ignore (with a
# warning) the shift prefix applied to any other binding, e.g., Ctrl-Shift-(
#
# 3. The case of letters prefixed by Ctrl-, Alt-, Key- or Shift- is *not*
# significant. Thus, the Shift- prefix is required if you want an upper-case
# letter (with the exception of 'bare' uppercase letters.)
#
# The following table illustrates these rules. In each row, the first entry is the
# key (for k.bindingsDict) and the other entries are equivalents that the user may
# specify in leoSettings.leo:
#
# a, Key-a, Key-A
# A, Shift-A
# Alt-a, Alt-A
# Alt-A, Alt-Shift-a, Alt-Shift-A
# Ctrl-a, Ctrl-A
# Ctrl-A, Ctrl-Shift-a, Ctrl-Shift-A
# , Key-!,Key-exclam,exclam
#
# This table is consistent with how Leo already works (because it is consistent
# with Tk's key-event specifiers). It is also, I think, the least confusing set of
# rules.
#@-<< about internal bindings >>
import sys
from typing import Any
from leo.core import leoGlobals as g
from leo.core import leoGui
from leo.core.leoQt import QtCore, QtGui, QtWidgets
from leo.core.leoQt import Key, KeyboardModifier, Type
#@+others
#@+node:ekr.20210512101604.1: ** class LossageData
class LossageData:
def __init__(self, actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString):
self.actual_ch = actual_ch
self.binding = binding
self.ch = ch
self.keynum = keynum
self.mods = mods
self.mods2 = mods2
self.mods3 = mods3
self.stroke = None # Set later.
self.text = text
self.toString = toString
def __repr__(self):
return (
f"keynum: {self.keynum:>7x} "
f"binding: {self.binding}"
# f"ch: {self.ch:>7s} "
# f"= {self.actual_ch!r}"
# f"mods: {self.mods}, {self.mods2}, {self.mods3}\n"
# f"stroke: {self.stroke!r}\n"
# f"text: {self.text!r}\n"
# f"toString: {self.toString!r}\n"
)
__str__ = __repr__
#@+node:ekr.20141028061518.17: ** class LeoQtEventFilter
class LeoQtEventFilter(QtCore.QObject): # type:ignore
#@+others
#@+node:ekr.20110605121601.18539: *3* filter.ctor
def __init__(self, c, w, tag=''):
"""Ctor for LeoQtEventFilter class."""
super().__init__()
self.c = c
self.w = w # A leoQtX object, *not* a Qt object.
self.tag = tag
# Debugging.
self.keyIsActive = False
# Pretend there is a binding for these characters.
close_flashers = c.config.getString('close-flash-brackets') or ''
open_flashers = c.config.getString('open-flash-brackets') or ''
self.flashers = open_flashers + close_flashers
# #1563: Support alternate keyboards.
self.keyboard_kind = c.config.getString('keyboard-kind') or 'default-keyboard'
# Support for ctagscompleter.py plugin.
self.ctagscompleter_active = False
self.ctagscompleter_onKey = None
#@+node:ekr.20110605121601.18540: *3* filter.eventFilter & helpers
def | (self, obj, event):
"""Return False if Qt should handle the event."""
c, k = self.c, self.c.k
#
# Handle non-key events first.
if not g.app:
return False # For unit tests, but g.unitTesting may be False!
if not self.c.p:
return False # Startup.
#
# Trace events.
if 'events' in g.app.debug:
if isinstance(event, QtGui.QKeyEvent):
self.traceKeys(obj, event)
else:
self.traceEvent(obj, event)
self.traceWidget(event)
#
# Let Qt handle the non-key events.
if self.doNonKeyEvent(event, obj):
return False
#
# Ignore incomplete key events.
if self.shouldIgnoreKeyEvent(event, obj):
return False
#
# Generate a g.KeyStroke for k.masterKeyHandler.
try:
binding, ch, lossage = self.toBinding(event)
if not binding:
return False # Let Qt handle the key.
#
# Pass the KeyStroke to masterKeyHandler.
key_event = self.createKeyEvent(event, c, self.w, ch, binding)
#
# #1933: Update the g.app.lossage
if len(g.app.lossage) > 99:
g.app.lossage.pop()
lossage.stroke = key_event.stroke
g.app.lossage.insert(0, lossage)
#
# Call masterKeyHandler!
k.masterKeyHandler(key_event)
c.outerUpdate()
except Exception:
g.es_exception()
return True # Whatever happens, suppress all other Qt key handling.
#@+node:ekr.20110605195119.16937: *4* filter.createKeyEvent
def createKeyEvent(self, event, c, w, ch, binding):
return leoGui.LeoKeyEvent(
c=self.c,
# char = None doesn't work at present.
# But really, the binding should suffice.
char=ch,
event=event,
binding=binding,
w=w,
x=getattr(event, 'x', None) or 0,
y=getattr(event, 'y', None) or 0,
x_root=getattr(event, 'x_root', None) or 0,
y_root=getattr(event, 'y_root', None) or 0,
)
#@+node:ekr.20180413180751.2: *4* filter.doNonKeyEvent
def doNonKeyEvent(self, event, obj):
"""Handle all non-key event. """
c = self.c
eventType = event.type()
if eventType == Type.WindowActivate:
g.app.gui.onActivateEvent(event, c, obj, self.tag)
elif eventType == Type.WindowDeactivate:
g.app.gui.onDeactivateEvent(event, c, obj, self.tag)
elif eventType == Type.FocusIn:
if self.tag == 'body':
c.frame.body.onFocusIn(obj)
if c.frame and c.frame.top and obj is c.frame.top.lineEdit:
if c.k.getStateKind() == 'getArg':
c.frame.top.lineEdit.restore_selection()
elif eventType == Type.FocusOut and self.tag == 'body':
c.frame.body.onFocusOut(obj)
# Return True unless we have a key event.
return eventType not in (Type.ShortcutOverride, Type.KeyPress, Type.KeyRelease)
#@+node:ekr.20180413180751.3: *4* filter.shouldIgnoreKeyEvent
def shouldIgnoreKeyEvent(self, event, obj):
"""
Return True if we should ignore the key event.
Alas, QLineEdit *only* generates ev.KeyRelease on Windows, Ubuntu,
so the following hack is required.
"""
c = self.c
t = event.type()
isEditWidget = (obj == c.frame.tree.edit_widget(c.p))
if isEditWidget:
# QLineEdit: ignore all key events except keyRelease events.
return t != Type.KeyRelease
if t == Type.KeyPress:
# Hack Alert!
# On some Linux systems (Kubuntu, Debian, the Win or SHIFT-Win keys
# insert garbage symbols into editing areas. Filter out these
# key events. NOTE - this is a *magic number* - who knows if
# it could change in the future?
if event.key() == 0x1000053 and sys.platform == 'linux':
return True
return False # Never ignore KeyPress events.
# This doesn't work. Two shortcut-override events are generated!
# if t == ev.ShortcutOverride and event.text():
# return False # Don't ignore shortcut overrides with a real value.
return True # Ignore everything else.
#@+node:ekr.20110605121601.18543: *4* filter.toBinding & helpers
def toBinding(self, event):
"""
Return (binding, actual_ch):
binding: A user binding, to create g.KeyStroke.
Spelling no longer fragile.
actual_ch: The insertable key, or ''.
"""
mods = self.qtMods(event)
keynum, text, toString, ch = self.qtKey(event)
actual_ch = text or toString
#
# Never allow empty chars, or chars in g.app.gui.ignoreChars
if toString in g.app.gui.ignoreChars:
return None, None, None
ch = ch or toString or ''
if not ch:
return None, None, None
#
# Check for AltGr and Alt+Ctrl keys *before* creating a binding.
actual_ch, ch, mods2 = self.doMacTweaks(actual_ch, ch, mods)
mods3 = self.doAltTweaks(actual_ch, keynum, mods2, toString)
#
# Use *ch* in the binding.
# Clearer w/o f-strings.
binding = '%s%s' % (''.join([f"{z}+" for z in mods3]), ch)
#
# Return the tweaked *actual* char.
binding, actual_ch = self.doLateTweaks(binding, actual_ch)
#
# #1933: Create lossage data.
lossage = LossageData(
actual_ch, binding, ch, keynum, mods, mods2, mods3, text, toString)
return binding, actual_ch, lossage
#@+node:ekr.20180419154543.1: *5* filter.doAltTweaks
def doAltTweaks(self, actual_ch, keynum, mods, toString):
"""Turn AltGr and some Alt-Ctrl keys into plain keys."""
def removeAltCtrl(mods):
for mod in ('Alt', 'Control'):
if mod in mods:
mods.remove(mod)
return mods
#
# Remove Alt, Ctrl for AltGr keys.
# See https://en.wikipedia.org/wiki/AltGr_key
if keynum == Key.Key_AltGr:
return removeAltCtrl(mods)
#
# Never alter complex characters.
if len(actual_ch) != 1:
return mods
#
# #1563: A hack for German and Spanish keyboards:
# Remove *plain* Shift modifier for colon and semicolon.
# https://en.m.wikipedia.org/wiki/German_keyboard_layout
kind = self.keyboard_kind.lower()
if (kind in ('german', 'spanish')
and actual_ch in ":;"
and 'Shift' in mods
and 'Alt' not in mods and 'Control' not in mods
):
mods.remove('Shift')
elif kind == 'us-international':
pass # To do.
#
# Handle Alt-Ctrl modifiers for chars whose that are not ascii.
# Testing: Alt-Ctrl-E is '€'.
if ord(actual_ch) > 127 and 'Alt' in mods and 'Control' in mods:
return removeAltCtrl(mods)
return mods
#@+node:ekr.20180417161548.1: *5* filter.doLateTweaks
def doLateTweaks(self, binding, ch):
"""Make final tweaks. g.KeyStroke does other tweaks later."""
#
# These are needed because ch is separate from binding.
if ch == '\r':
ch = '\n'
if binding == 'Escape':
ch = 'Escape'
#
# Adjust the case of the binding string (for the minibuffer).
if len(ch) == 1 and len(binding) == 1 and ch.isalpha() and binding.isalpha():
if ch != binding:
binding = ch
return binding, ch
#@+node:ekr.20180419160958.1: *5* filter.doMacTweaks
def doMacTweaks(self, actual_ch, ch, mods):
"""Replace MacOS Alt characters."""
if not g.isMac:
return actual_ch, ch, mods
if ch == 'Backspace':
# On the Mac, the reported char can be DEL (7F)
return '\b', ch, mods
if len(mods) == 1 and mods[0] == 'Alt':
# Patch provided by resi147.
# See the thread: special characters in MacOSX, like '@'.
mac_d = {
'/': '\\',
'5': '[',
'6': ']',
'7': '|',
'8': '{',
'9': '}',
'e': '€',
'l': '@',
}
if ch.lower() in mac_d:
# Ignore the case.
actual_ch = ch = g.checkUnicode(mac_d.get(ch.lower()))
mods = []
return actual_ch, ch, mods
#@+node:ekr.20110605121601.18544: *5* filter.qtKey
def qtKey(self, event):
"""
Return the components of a Qt key event.
Modifiers are handled separately.
Return (keynum, text, toString, ch).
keynum: event.key()
ch: chr(keynum) or '' if there is an exception.
toString:
For special keys: made-up spelling that become part of the setting.
For all others: QtGui.QKeySequence(keynum).toString()
text: event.text()
"""
text, toString, ch = '', '', '' # Defaults.
#
# Leo 6.4: Test keynum's directly.
# The values are the same in Qt4, Qt5, Qt6.
keynum = event.key()
if keynum in (
0x01000020, # Key_Shift
0x01000021, # Key_Control
0x01000022, # Key_Meta
0x01000023, # Key_Alt
0x01001103, # Key_AltGr
0x01000024, # Key_CapsLock
):
# Disallow bare modifiers.
return keynum, text, toString, ch
#
# Compute toString and ch.
text = event.text() # This is the unicode character!
toString = QtGui.QKeySequence(keynum).toString()
#
# #1244461: Numpad 'Enter' key does not work in minibuffer
if toString == 'Enter':
toString = 'Return'
if toString == 'Esc':
toString = 'Escape'
try:
ch = chr(keynum)
except ValueError:
pass
return keynum, text, toString, ch
#@+node:ekr.20120204061120.10084: *5* filter.qtMods
def qtMods(self, event):
"""Return the text version of the modifiers of the key event."""
modifiers = event.modifiers()
mod_table = (
(KeyboardModifier.AltModifier, 'Alt'),
(KeyboardModifier.ControlModifier, 'Control'),
(KeyboardModifier.MetaModifier, 'Meta'),
(KeyboardModifier.ShiftModifier, 'Shift'),
# #1448: Replacing this by 'Key' would make separate keypad bindings impossible.
(KeyboardModifier.KeypadModifier, 'KeyPad'),
)
# pylint: disable=superfluous-parens.
mods = [b for a, b in mod_table if (modifiers & a)]
return mods
#@+node:ekr.20140907103315.18767: *3* filter.Tracing
#@+node:ekr.20190922075339.1: *4* filter.traceKeys
def traceKeys(self, obj, event):
if g.unitTesting:
return
e = QtCore.QEvent
key_events = {
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.Shortcut: 'shortcut', # 117
e.Type.ShortcutOverride: 'shortcut-override', # 51
}
kind = key_events.get(event.type())
if kind:
mods = ','.join(self.qtMods(event))
g.trace(f"{kind:>20}: {mods:>7} {event.text()!r}")
#@+node:ekr.20110605121601.18548: *4* filter.traceEvent
def traceEvent(self, obj, event):
if g.unitTesting:
return
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
exclude_names = ('tree', 'log', 'body', 'minibuffer')
traceActivate = True
traceFocus = False
traceHide = False
traceHover = False
traceKey = False
traceLayout = False
traceMouse = False
tracePaint = False
traceUpdate = False
c, e = self.c, QtCore.QEvent
eventType = event.type()
# http://doc.qt.io/qt-5/qevent.html
show: list[Any] = []
ignore = [
e.Type.MetaCall, # 43
e.Type.Timer, # 1
e.Type.ToolTip, # 110
]
activate_events = (
(e.Type.Close, 'close'), # 19
(e.Type.WindowActivate, 'window-activate'), # 24
(e.Type.WindowBlocked, 'window-blocked'), # 103
(e.Type.WindowUnblocked, 'window-unblocked'), # 104
(e.Type.WindowDeactivate, 'window-deactivate'), # 25
)
focus_events = [
(e.Type.Enter, 'enter'), # 10
(e.Type.Leave, 'leave'), # 11
(e.Type.FocusIn, 'focus-in'), # 8
(e.Type.FocusOut, 'focus-out'), # 9
(e.Type.ShowToParent, 'show-to-parent'), # 26
]
if hasattr(e, 'FocusAboutToChange'):
# pylint: disable=no-member
focus_events.extend([
(e.Type.FocusAboutToChange, 'focus-about-to-change'), # 23
])
hide_events = (
(e.Type.Hide, 'hide'), # 18
(e.Type.HideToParent, 'hide-to-parent'), # 27
# (e.Type.LeaveEditFocus,'leave-edit-focus'), # 151
(e.Type.Show, 'show'), # 17
)
hover_events = (
(e.Type.HoverEnter, 'hover-enter'), # 127
(e.Type.HoverLeave, 'hover-leave'), # 128
(e.Type.HoverMove, 'hover-move'), # 129
)
key_events = [
(e.Type.KeyPress, 'key-press'), # 6
(e.Type.KeyRelease, 'key-release'), # 7
(e.Type.Shortcut, 'shortcut'), # 117
(e.Type.ShortcutOverride, 'shortcut-override'), # 51
]
if hasattr(e, 'InputMethodQuery'):
# pylint: disable=no-member
key_events.extend([
(e.Type.InputMethodQuery, 'input-method-query'), # 207
])
layout_events = [
(e.Type.ChildAdded, 'child-added'), # 68
(e.Type.ChildRemoved, 'child-removed'), # 71
(e.Type.DynamicPropertyChange, 'dynamic-property-change'), # 170
(e.Type.FontChange, 'font-change'), # 97
(e.Type.LayoutRequest, 'layout-request'), # 76
(e.Type.Move, 'move'), # 13 widget's position changed.
(e.Type.Resize, 'resize'), # 14
(e.Type.StyleChange, 'style-change'), # 100
(e.Type.ZOrderChange, 'z-order-change'), # 126
]
if hasattr(e, 'CloseSoftwareInputPanel'):
layout_events.extend([
(e.Type.CloseSoftwareInputPanel, 'close-sip'), # 200
])
mouse_events = (
(e.Type.MouseMove, 'mouse-move'), # 155
(e.Type.MouseButtonPress, 'mouse-press'), # 2
(e.Type.MouseButtonRelease, 'mouse-release'), # 3
(e.Type.Wheel, 'mouse-wheel'), # 31
)
paint_events = [
(e.Type.ChildPolished, 'child-polished'), # 69
(e.Type.PaletteChange, 'palette-change'), # 39
(e.Type.ParentChange, 'parent-change'), # 21
(e.Type.Paint, 'paint'), # 12
(e.Type.Polish, 'polish'), # 75
(e.Type.PolishRequest, 'polish-request'), # 74
]
if hasattr(e, 'RequestSoftwareInputPanel'):
paint_events.extend([
(e.Type.RequestSoftwareInputPanel, 'sip'), # 199
])
update_events = (
(e.Type.UpdateLater, 'update-later'), # 78
(e.Type.UpdateRequest, 'update'), # 77
)
option_table = (
(traceActivate, activate_events),
(traceFocus, focus_events),
(traceHide, hide_events),
(traceHover, hover_events),
(traceKey, key_events),
(traceLayout, layout_events),
(traceMouse, mouse_events),
(tracePaint, paint_events),
(traceUpdate, update_events),
)
for option, table in option_table:
if option:
show.extend(table)
else:
for n, tag in table:
ignore.append(n)
for val, kind in show:
if self.tag in exclude_names:
return
if eventType == val:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
if traceKey:
g.trace(
f"{kind:>25} {self.tag:25} "
f"in-state: {repr(c.k and c.k.inState()):5} obj: {tag}")
return
if eventType not in ignore:
tag = (
obj.objectName() if hasattr(obj, 'objectName')
else f"id: {id(obj)}, {obj.__class__.__name__}"
)
g.trace(f"{eventType:>25} {self.tag:25} {tag}")
#@+node:ekr.20131121050226.16331: *4* filter.traceWidget
def traceWidget(self, event):
"""Show unexpected events in unusual widgets."""
verbose = False # Not good for --trace-events
e = QtCore.QEvent
assert isinstance(event, QtCore.QEvent)
et = event.type()
# http://qt-project.org/doc/qt-4.8/qevent.html#properties
ignore_d = {
e.Type.ChildAdded: 'child-added', # 68
e.Type.ChildPolished: 'child-polished', # 69
e.Type.ChildRemoved: 'child-removed', # 71
e.Type.Close: 'close', # 19
e.Type.CloseSoftwareInputPanel: 'close-software-input-panel', # 200
178: 'contents-rect-change', # 178
# e.Type.DeferredDelete:'deferred-delete', # 52 (let's trace this)
e.Type.DynamicPropertyChange: 'dynamic-property-change', # 170
e.Type.FocusOut: 'focus-out', # 9 (We don't care if we are leaving an unknown widget)
e.Type.FontChange: 'font-change', # 97
e.Type.Hide: 'hide', # 18
e.Type.HideToParent: 'hide-to-parent', # 27
e.Type.HoverEnter: 'hover-enter', # 127
e.Type.HoverLeave: 'hover-leave', # 128
e.Type.HoverMove: 'hover-move', # 129
e.Type.KeyPress: 'key-press', # 6
e.Type.KeyRelease: 'key-release', # 7
e.Type.LayoutRequest: 'layout-request', # 76
e.Type.Leave: 'leave', # 11 (We don't care if we are leaving an unknown widget)
# e.Type.LeaveEditFocus:'leave-edit-focus', # 151
e.Type.MetaCall: 'meta-call', # 43
e.Type.Move: 'move', # 13 widget's position changed.
e.Type.MouseButtonPress: 'mouse-button-press', # 2
e.Type.MouseButtonRelease: 'mouse-button-release', # 3
e.Type.MouseButtonDblClick: 'mouse-button-double-click', # 4
e.Type.MouseMove: 'mouse-move', # 5
e.Type.MouseTrackingChange: 'mouse-tracking-change', # 105
e.Type.Paint: 'paint', # 12
e.Type.PaletteChange: 'palette-change', # 39
e.Type.ParentChange: 'parent-change', # 21
e.Type.Polish: 'polish', # 75
e.Type.PolishRequest: 'polish-request', # 74
e.Type.RequestSoftwareInputPanel: 'request-software-input-panel', # 199
e.Type.Resize: 'resize', # 14
e.Type.ShortcutOverride: 'shortcut-override', # 51
e.Type.Show: 'show', # 17
e.Type.ShowToParent: 'show-to-parent', # 26
e.Type.StyleChange: 'style-change', # 100
e.Type.StatusTip: 'status-tip', # 112
e.Type.Timer: 'timer', # 1
e.Type.ToolTip: 'tool-tip', # 110
e.Type.WindowBlocked: 'window-blocked', # 103
e.Type.WindowUnblocked: 'window-unblocked', # 104
e.Type.ZOrderChange: 'z-order-change', # 126
}
focus_d = {
e.Type.DeferredDelete: 'deferred-delete', # 52
e.Type.Enter: 'enter', # 10
e.Type.FocusIn: 'focus-in', # 8
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
line_edit_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
e.Type.WindowDeactivate: 'window-deactivate', # 25
}
none_ignore_d = {
e.Type.Enter: 'enter', # 10 (mouse over)
e.Type.Leave: 'leave', # 11 (mouse over)
e.Type.FocusOut: 'focus-out', # 9
e.Type.WindowActivate: 'window-activate', # 24
}
if et in ignore_d:
return
w = QtWidgets.QApplication.focusWidget()
if verbose: # Too verbose for --trace-events.
for d in (ignore_d, focus_d, line_edit_ignore_d, none_ignore_d):
t = d.get(et)
if t:
break
else:
t = et
g.trace(f"{t:20} {w.__class__}")
return
if w is None:
if et not in none_ignore_d:
t = focus_d.get(et) or et
g.trace(f"None {t}")
if isinstance(w, QtWidgets.QPushButton):
return
if isinstance(w, QtWidgets.QLineEdit):
if et not in line_edit_ignore_d:
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
return
t = focus_d.get(et) or et
if hasattr(w, 'objectName'):
tag = w.objectName()
else:
tag = f"id: {id(w)}, {w.__class__.__name__}"
g.trace(f"{t:20} {tag}")
#@-others
#@-others
#@@language python
#@@tabwidth -4
#@@pagewidth 70
#@-leo
| eventFilter | identifier_name |
list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Dynamic widgets
use std::iter;
use crate::draw::{DrawHandle, SizeHandle};
use crate::event::{Event, Handler, Manager, ManagerState, Response};
use crate::geom::Coord;
use crate::layout::{
self, AxisInfo, Margins, RowPositionSolver, RulesSetter, RulesSolver, SizeRules,
}; | use kas::geom::Rect;
/// A generic row widget
///
/// See documentation of [`List`] type.
pub type Row<W> = List<Horizontal, W>;
/// A generic column widget
///
/// See documentation of [`List`] type.
pub type Column<W> = List<Vertical, W>;
/// A row of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxRow<M> = BoxList<Horizontal, M>;
/// A column of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxColumn<M> = BoxList<Vertical, M>;
/// A row/column of boxed widgets
///
/// This is parameterised over directionality and handler message type.
///
/// See documentation of [`List`] type.
pub type BoxList<D, M> = List<D, Box<dyn Handler<Msg = M>>>;
/// A generic row/column widget
///
/// This type is generic over both directionality and the type of child widgets.
/// Essentially, it is a [`Vec`] which also implements the [`Widget`] trait.
///
/// [`Row`] and [`Column`] are parameterisations with set directionality.
///
/// [`BoxList`] (and its derivatives [`BoxRow`], [`BoxColumn`]) parameterise
/// `W = Box<dyn Widget>`, thus supporting individually boxed child widgets.
/// This allows use of multiple types of child widget at the cost of extra
/// allocation, and requires dynamic dispatch of methods.
///
/// Configuring and resizing elements is O(n) in the number of children.
/// Drawing and event handling is O(log n) in the number of children (assuming
/// only a small number are visible at any one time).
///
/// For fixed configurations of child widgets, [`make_widget`] can be used
/// instead. [`make_widget`] has the advantage that it can support child widgets
/// of multiple types without allocation and via static dispatch, but the
/// disadvantage that drawing and event handling are O(n) in the number of
/// children.
///
/// [`make_widget`]: ../macros/index.html#the-make_widget-macro
#[derive(Clone, Default, Debug)]
pub struct List<D: Directional, W: Widget> {
core: CoreData,
widgets: Vec<W>,
data: layout::DynRowStorage,
direction: D,
}
// We implement this manually, because the derive implementation cannot handle
// vectors of child widgets.
impl<D: Directional, W: Widget> WidgetCore for List<D, W> {
#[inline]
fn core_data(&self) -> &CoreData {
&self.core
}
#[inline]
fn core_data_mut(&mut self) -> &mut CoreData {
&mut self.core
}
#[inline]
fn widget_name(&self) -> &'static str {
"List"
}
#[inline]
fn as_widget(&self) -> &dyn Widget {
self
}
#[inline]
fn as_widget_mut(&mut self) -> &mut dyn Widget {
self
}
#[inline]
fn len(&self) -> usize {
self.widgets.len()
}
#[inline]
fn get(&self, index: usize) -> Option<&dyn Widget> {
self.widgets.get(index).map(|w| w.as_widget())
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut dyn Widget> {
self.widgets.get_mut(index).map(|w| w.as_widget_mut())
}
fn walk(&self, f: &mut dyn FnMut(&dyn Widget)) {
for child in &self.widgets {
child.walk(f);
}
f(self)
}
fn walk_mut(&mut self, f: &mut dyn FnMut(&mut dyn Widget)) {
for child in &mut self.widgets {
child.walk_mut(f);
}
f(self)
}
}
impl<D: Directional, W: Widget> Widget for List<D, W> {}
impl<D: Directional, W: Widget> Layout for List<D, W> {
fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules {
let mut solver = layout::RowSolver::<Vec<u32>, _>::new(
axis,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
solver.for_child(&mut self.data, n, |axis| {
child.size_rules(size_handle, axis)
});
}
solver.finish(&mut self.data, iter::empty(), iter::empty())
}
fn set_rect(&mut self, size_handle: &mut dyn SizeHandle, rect: Rect, _: AlignHints) {
self.core.rect = rect;
let mut setter = layout::RowSetter::<D, Vec<u32>, _>::new(
rect,
Margins::ZERO,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
let align = AlignHints::default();
child.set_rect(size_handle, setter.child_rect(n), align);
}
}
fn find_id(&self, coord: Coord) -> Option<WidgetId> {
let solver = RowPositionSolver::new(self.direction);
if let Some(child) = solver.find_child(&self.widgets, coord) {
return child.find_id(coord);
}
// We should return Some(self), but hit a borrow check error.
// This should however be unreachable anyway.
None
}
fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &ManagerState) {
let solver = RowPositionSolver::new(self.direction);
solver.for_children(&self.widgets, draw_handle.target_rect(), |w| {
w.draw(draw_handle, mgr)
});
}
}
impl<D: Directional, W: Widget + Handler> Handler for List<D, W> {
type Msg = <W as Handler>::Msg;
fn handle(&mut self, mgr: &mut Manager, id: WidgetId, event: Event) -> Response<Self::Msg> {
for child in &mut self.widgets {
if id <= child.id() {
return child.handle(mgr, id, event);
}
}
debug_assert!(id == self.id(), "Handler::handle: bad WidgetId");
Response::Unhandled(event)
}
}
impl<D: Directional + Default, W: Widget> List<D, W> {
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`List::new_with_direction`].
pub fn new(widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction: Default::default(),
}
}
}
impl<D: Directional, W: Widget> List<D, W> {
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction,
}
}
/// True if there are no child widgets
pub fn is_empty(&self) -> bool {
self.widgets.is_empty()
}
/// Returns the number of child widgets
pub fn len(&self) -> usize {
self.widgets.len()
}
/// Returns the number of elements the vector can hold without reallocating.
pub fn capacity(&self) -> usize {
self.widgets.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// into the list. See documentation of [`Vec::reserve`].
pub fn reserve(&mut self, additional: usize) {
self.widgets.reserve(additional);
}
/// Remove all child widgets
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn clear(&mut self, mgr: &mut Manager) {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.clear();
}
/// Append a child widget
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn push(&mut self, mgr: &mut Manager, widget: W) {
self.widgets.push(widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Remove the last child widget
///
/// Returns `None` if there are no children. Otherwise, this
/// triggers a reconfigure before the next draw operation.
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn pop(&mut self, mgr: &mut Manager) -> Option<W> {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.pop()
}
/// Inserts a child widget position `index`
///
/// Panics if `index > len`.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn insert(&mut self, mgr: &mut Manager, index: usize, widget: W) {
self.widgets.insert(index, widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Removes the child widget at position `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn remove(&mut self, mgr: &mut Manager, index: usize) -> W {
let r = self.widgets.remove(index);
mgr.send_action(TkAction::Reconfigure);
r
}
/// Replace the child at `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
// TODO: in theory it is possible to avoid a reconfigure where both widgets
// have no children and have compatible size. Is this a good idea and can
// we somehow test "has compatible size"?
pub fn replace(&mut self, mgr: &mut Manager, index: usize, mut widget: W) -> W {
std::mem::swap(&mut widget, &mut self.widgets[index]);
mgr.send_action(TkAction::Reconfigure);
widget
}
/// Append child widgets from an iterator
///
/// Triggers a [reconfigure action](Manager::send_action) if any widgets
/// are added.
pub fn extend<T: IntoIterator<Item = W>>(&mut self, mgr: &mut Manager, iter: T) {
let len = self.widgets.len();
self.widgets.extend(iter);
if len != self.widgets.len() {
mgr.send_action(TkAction::Reconfigure);
}
}
/// Resize, using the given closure to construct new widgets
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn resize_with<F: Fn(usize) -> W>(&mut self, mgr: &mut Manager, len: usize, f: F) {
let l0 = self.widgets.len();
if l0 == len {
return;
} else if l0 > len {
self.widgets.truncate(len);
} else {
self.widgets.reserve(len);
for i in l0..len {
self.widgets.push(f(i));
}
}
mgr.send_action(TkAction::Reconfigure);
}
/// Retain only widgets satisfying predicate `f`
///
/// See documentation of [`Vec::retain`].
///
/// Triggers a [reconfigure action](Manager::send_action) if any widgets
/// are removed.
pub fn retain<F: FnMut(&W) -> bool>(&mut self, mgr: &mut Manager, f: F) {
let len = self.widgets.len();
self.widgets.retain(f);
if len != self.widgets.len() {
mgr.send_action(TkAction::Reconfigure);
}
}
} | use crate::{AlignHints, Directional, Horizontal, Vertical};
use crate::{CoreData, Layout, TkAction, Widget, WidgetCore, WidgetId}; | random_line_split |
list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Dynamic widgets
use std::iter;
use crate::draw::{DrawHandle, SizeHandle};
use crate::event::{Event, Handler, Manager, ManagerState, Response};
use crate::geom::Coord;
use crate::layout::{
self, AxisInfo, Margins, RowPositionSolver, RulesSetter, RulesSolver, SizeRules,
};
use crate::{AlignHints, Directional, Horizontal, Vertical};
use crate::{CoreData, Layout, TkAction, Widget, WidgetCore, WidgetId};
use kas::geom::Rect;
/// A generic row widget
///
/// See documentation of [`List`] type.
pub type Row<W> = List<Horizontal, W>;
/// A generic column widget
///
/// See documentation of [`List`] type.
pub type Column<W> = List<Vertical, W>;
/// A row of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxRow<M> = BoxList<Horizontal, M>;
/// A column of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxColumn<M> = BoxList<Vertical, M>;
/// A row/column of boxed widgets
///
/// This is parameterised over directionality and handler message type.
///
/// See documentation of [`List`] type.
pub type BoxList<D, M> = List<D, Box<dyn Handler<Msg = M>>>;
/// A generic row/column widget
///
/// This type is generic over both directionality and the type of child widgets.
/// Essentially, it is a [`Vec`] which also implements the [`Widget`] trait.
///
/// [`Row`] and [`Column`] are parameterisations with set directionality.
///
/// [`BoxList`] (and its derivatives [`BoxRow`], [`BoxColumn`]) parameterise
/// `W = Box<dyn Widget>`, thus supporting individually boxed child widgets.
/// This allows use of multiple types of child widget at the cost of extra
/// allocation, and requires dynamic dispatch of methods.
///
/// Configuring and resizing elements is O(n) in the number of children.
/// Drawing and event handling is O(log n) in the number of children (assuming
/// only a small number are visible at any one time).
///
/// For fixed configurations of child widgets, [`make_widget`] can be used
/// instead. [`make_widget`] has the advantage that it can support child widgets
/// of multiple types without allocation and via static dispatch, but the
/// disadvantage that drawing and event handling are O(n) in the number of
/// children.
///
/// [`make_widget`]: ../macros/index.html#the-make_widget-macro
#[derive(Clone, Default, Debug)]
pub struct List<D: Directional, W: Widget> {
core: CoreData,
widgets: Vec<W>,
data: layout::DynRowStorage,
direction: D,
}
// We implement this manually, because the derive implementation cannot handle
// vectors of child widgets.
impl<D: Directional, W: Widget> WidgetCore for List<D, W> {
#[inline]
fn core_data(&self) -> &CoreData {
&self.core
}
#[inline]
fn core_data_mut(&mut self) -> &mut CoreData {
&mut self.core
}
#[inline]
fn widget_name(&self) -> &'static str {
"List"
}
#[inline]
fn as_widget(&self) -> &dyn Widget {
self
}
#[inline]
fn as_widget_mut(&mut self) -> &mut dyn Widget {
self
}
#[inline]
fn len(&self) -> usize {
self.widgets.len()
}
#[inline]
fn get(&self, index: usize) -> Option<&dyn Widget> {
self.widgets.get(index).map(|w| w.as_widget())
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut dyn Widget> {
self.widgets.get_mut(index).map(|w| w.as_widget_mut())
}
fn walk(&self, f: &mut dyn FnMut(&dyn Widget)) {
for child in &self.widgets {
child.walk(f);
}
f(self)
}
fn walk_mut(&mut self, f: &mut dyn FnMut(&mut dyn Widget)) {
for child in &mut self.widgets {
child.walk_mut(f);
}
f(self)
}
}
impl<D: Directional, W: Widget> Widget for List<D, W> {}
impl<D: Directional, W: Widget> Layout for List<D, W> {
fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules {
let mut solver = layout::RowSolver::<Vec<u32>, _>::new(
axis,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
solver.for_child(&mut self.data, n, |axis| {
child.size_rules(size_handle, axis)
});
}
solver.finish(&mut self.data, iter::empty(), iter::empty())
}
fn set_rect(&mut self, size_handle: &mut dyn SizeHandle, rect: Rect, _: AlignHints) {
self.core.rect = rect;
let mut setter = layout::RowSetter::<D, Vec<u32>, _>::new(
rect,
Margins::ZERO,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
let align = AlignHints::default();
child.set_rect(size_handle, setter.child_rect(n), align);
}
}
fn find_id(&self, coord: Coord) -> Option<WidgetId> {
let solver = RowPositionSolver::new(self.direction);
if let Some(child) = solver.find_child(&self.widgets, coord) {
return child.find_id(coord);
}
// We should return Some(self), but hit a borrow check error.
// This should however be unreachable anyway.
None
}
fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &ManagerState) {
let solver = RowPositionSolver::new(self.direction);
solver.for_children(&self.widgets, draw_handle.target_rect(), |w| {
w.draw(draw_handle, mgr)
});
}
}
impl<D: Directional, W: Widget + Handler> Handler for List<D, W> {
type Msg = <W as Handler>::Msg;
fn handle(&mut self, mgr: &mut Manager, id: WidgetId, event: Event) -> Response<Self::Msg> {
for child in &mut self.widgets {
if id <= child.id() {
return child.handle(mgr, id, event);
}
}
debug_assert!(id == self.id(), "Handler::handle: bad WidgetId");
Response::Unhandled(event)
}
}
impl<D: Directional + Default, W: Widget> List<D, W> {
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`List::new_with_direction`].
pub fn new(widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction: Default::default(),
}
}
}
impl<D: Directional, W: Widget> List<D, W> {
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction,
}
}
/// True if there are no child widgets
pub fn is_empty(&self) -> bool {
self.widgets.is_empty()
}
/// Returns the number of child widgets
pub fn len(&self) -> usize {
self.widgets.len()
}
/// Returns the number of elements the vector can hold without reallocating.
pub fn capacity(&self) -> usize {
self.widgets.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// into the list. See documentation of [`Vec::reserve`].
pub fn reserve(&mut self, additional: usize) {
self.widgets.reserve(additional);
}
/// Remove all child widgets
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn | (&mut self, mgr: &mut Manager) {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.clear();
}
/// Append a child widget
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn push(&mut self, mgr: &mut Manager, widget: W) {
self.widgets.push(widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Remove the last child widget
///
/// Returns `None` if there are no children. Otherwise, this
/// triggers a reconfigure before the next draw operation.
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn pop(&mut self, mgr: &mut Manager) -> Option<W> {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.pop()
}
/// Inserts a child widget position `index`
///
/// Panics if `index > len`.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn insert(&mut self, mgr: &mut Manager, index: usize, widget: W) {
self.widgets.insert(index, widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Removes the child widget at position `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn remove(&mut self, mgr: &mut Manager, index: usize) -> W {
let r = self.widgets.remove(index);
mgr.send_action(TkAction::Reconfigure);
r
}
/// Replace the child at `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
// TODO: in theory it is possible to avoid a reconfigure where both widgets
// have no children and have compatible size. Is this a good idea and can
// we somehow test "has compatible size"?
pub fn replace(&mut self, mgr: &mut Manager, index: usize, mut widget: W) -> W {
std::mem::swap(&mut widget, &mut self.widgets[index]);
mgr.send_action(TkAction::Reconfigure);
widget
}
/// Append child widgets from an iterator
///
/// Triggers a [reconfigure action](Manager::send_action) if any widgets
/// are added.
pub fn extend<T: IntoIterator<Item = W>>(&mut self, mgr: &mut Manager, iter: T) {
let len = self.widgets.len();
self.widgets.extend(iter);
if len != self.widgets.len() {
mgr.send_action(TkAction::Reconfigure);
}
}
/// Resize, using the given closure to construct new widgets
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn resize_with<F: Fn(usize) -> W>(&mut self, mgr: &mut Manager, len: usize, f: F) {
let l0 = self.widgets.len();
if l0 == len {
return;
} else if l0 > len {
self.widgets.truncate(len);
} else {
self.widgets.reserve(len);
for i in l0..len {
self.widgets.push(f(i));
}
}
mgr.send_action(TkAction::Reconfigure);
}
/// Retain only widgets satisfying predicate `f`
///
/// See documentation of [`Vec::retain`].
///
/// Triggers a [reconfigure action](Manager::send_action) if any widgets
/// are removed.
pub fn retain<F: FnMut(&W) -> bool>(&mut self, mgr: &mut Manager, f: F) {
let len = self.widgets.len();
self.widgets.retain(f);
if len != self.widgets.len() {
mgr.send_action(TkAction::Reconfigure);
}
}
}
| clear | identifier_name |
list.rs | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License in the LICENSE-APACHE file or at:
// https://www.apache.org/licenses/LICENSE-2.0
//! Dynamic widgets
use std::iter;
use crate::draw::{DrawHandle, SizeHandle};
use crate::event::{Event, Handler, Manager, ManagerState, Response};
use crate::geom::Coord;
use crate::layout::{
self, AxisInfo, Margins, RowPositionSolver, RulesSetter, RulesSolver, SizeRules,
};
use crate::{AlignHints, Directional, Horizontal, Vertical};
use crate::{CoreData, Layout, TkAction, Widget, WidgetCore, WidgetId};
use kas::geom::Rect;
/// A generic row widget
///
/// See documentation of [`List`] type.
pub type Row<W> = List<Horizontal, W>;
/// A generic column widget
///
/// See documentation of [`List`] type.
pub type Column<W> = List<Vertical, W>;
/// A row of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxRow<M> = BoxList<Horizontal, M>;
/// A column of boxed widgets
///
/// This is parameterised over handler message type.
///
/// See documentation of [`List`] type.
pub type BoxColumn<M> = BoxList<Vertical, M>;
/// A row/column of boxed widgets
///
/// This is parameterised over directionality and handler message type.
///
/// See documentation of [`List`] type.
pub type BoxList<D, M> = List<D, Box<dyn Handler<Msg = M>>>;
/// A generic row/column widget
///
/// This type is generic over both directionality and the type of child widgets.
/// Essentially, it is a [`Vec`] which also implements the [`Widget`] trait.
///
/// [`Row`] and [`Column`] are parameterisations with set directionality.
///
/// [`BoxList`] (and its derivatives [`BoxRow`], [`BoxColumn`]) parameterise
/// `W = Box<dyn Widget>`, thus supporting individually boxed child widgets.
/// This allows use of multiple types of child widget at the cost of extra
/// allocation, and requires dynamic dispatch of methods.
///
/// Configuring and resizing elements is O(n) in the number of children.
/// Drawing and event handling is O(log n) in the number of children (assuming
/// only a small number are visible at any one time).
///
/// For fixed configurations of child widgets, [`make_widget`] can be used
/// instead. [`make_widget`] has the advantage that it can support child widgets
/// of multiple types without allocation and via static dispatch, but the
/// disadvantage that drawing and event handling are O(n) in the number of
/// children.
///
/// [`make_widget`]: ../macros/index.html#the-make_widget-macro
#[derive(Clone, Default, Debug)]
pub struct List<D: Directional, W: Widget> {
core: CoreData,
widgets: Vec<W>,
data: layout::DynRowStorage,
direction: D,
}
// We implement this manually, because the derive implementation cannot handle
// vectors of child widgets.
impl<D: Directional, W: Widget> WidgetCore for List<D, W> {
#[inline]
fn core_data(&self) -> &CoreData |
#[inline]
fn core_data_mut(&mut self) -> &mut CoreData {
&mut self.core
}
#[inline]
fn widget_name(&self) -> &'static str {
"List"
}
#[inline]
fn as_widget(&self) -> &dyn Widget {
self
}
#[inline]
fn as_widget_mut(&mut self) -> &mut dyn Widget {
self
}
#[inline]
fn len(&self) -> usize {
self.widgets.len()
}
#[inline]
fn get(&self, index: usize) -> Option<&dyn Widget> {
self.widgets.get(index).map(|w| w.as_widget())
}
#[inline]
fn get_mut(&mut self, index: usize) -> Option<&mut dyn Widget> {
self.widgets.get_mut(index).map(|w| w.as_widget_mut())
}
fn walk(&self, f: &mut dyn FnMut(&dyn Widget)) {
for child in &self.widgets {
child.walk(f);
}
f(self)
}
fn walk_mut(&mut self, f: &mut dyn FnMut(&mut dyn Widget)) {
for child in &mut self.widgets {
child.walk_mut(f);
}
f(self)
}
}
impl<D: Directional, W: Widget> Widget for List<D, W> {}
impl<D: Directional, W: Widget> Layout for List<D, W> {
fn size_rules(&mut self, size_handle: &mut dyn SizeHandle, axis: AxisInfo) -> SizeRules {
let mut solver = layout::RowSolver::<Vec<u32>, _>::new(
axis,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
solver.for_child(&mut self.data, n, |axis| {
child.size_rules(size_handle, axis)
});
}
solver.finish(&mut self.data, iter::empty(), iter::empty())
}
fn set_rect(&mut self, size_handle: &mut dyn SizeHandle, rect: Rect, _: AlignHints) {
self.core.rect = rect;
let mut setter = layout::RowSetter::<D, Vec<u32>, _>::new(
rect,
Margins::ZERO,
(self.direction, self.widgets.len()),
&mut self.data,
);
for (n, child) in self.widgets.iter_mut().enumerate() {
let align = AlignHints::default();
child.set_rect(size_handle, setter.child_rect(n), align);
}
}
fn find_id(&self, coord: Coord) -> Option<WidgetId> {
let solver = RowPositionSolver::new(self.direction);
if let Some(child) = solver.find_child(&self.widgets, coord) {
return child.find_id(coord);
}
// We should return Some(self), but hit a borrow check error.
// This should however be unreachable anyway.
None
}
fn draw(&self, draw_handle: &mut dyn DrawHandle, mgr: &ManagerState) {
let solver = RowPositionSolver::new(self.direction);
solver.for_children(&self.widgets, draw_handle.target_rect(), |w| {
w.draw(draw_handle, mgr)
});
}
}
impl<D: Directional, W: Widget + Handler> Handler for List<D, W> {
type Msg = <W as Handler>::Msg;
fn handle(&mut self, mgr: &mut Manager, id: WidgetId, event: Event) -> Response<Self::Msg> {
for child in &mut self.widgets {
if id <= child.id() {
return child.handle(mgr, id, event);
}
}
debug_assert!(id == self.id(), "Handler::handle: bad WidgetId");
Response::Unhandled(event)
}
}
impl<D: Directional + Default, W: Widget> List<D, W> {
/// Construct a new instance
///
/// This constructor is available where the direction is determined by the
/// type: for `D: Directional + Default`. In other cases, use
/// [`List::new_with_direction`].
pub fn new(widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction: Default::default(),
}
}
}
impl<D: Directional, W: Widget> List<D, W> {
/// Construct a new instance with explicit direction
pub fn new_with_direction(direction: D, widgets: Vec<W>) -> Self {
List {
core: Default::default(),
widgets,
data: Default::default(),
direction,
}
}
/// True if there are no child widgets
pub fn is_empty(&self) -> bool {
self.widgets.is_empty()
}
/// Returns the number of child widgets
pub fn len(&self) -> usize {
self.widgets.len()
}
/// Returns the number of elements the vector can hold without reallocating.
pub fn capacity(&self) -> usize {
self.widgets.capacity()
}
/// Reserves capacity for at least `additional` more elements to be inserted
/// into the list. See documentation of [`Vec::reserve`].
pub fn reserve(&mut self, additional: usize) {
self.widgets.reserve(additional);
}
/// Remove all child widgets
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn clear(&mut self, mgr: &mut Manager) {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.clear();
}
/// Append a child widget
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn push(&mut self, mgr: &mut Manager, widget: W) {
self.widgets.push(widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Remove the last child widget
///
/// Returns `None` if there are no children. Otherwise, this
/// triggers a reconfigure before the next draw operation.
///
/// Triggers a [reconfigure action](Manager::send_action) if any widget is
/// removed.
pub fn pop(&mut self, mgr: &mut Manager) -> Option<W> {
if !self.widgets.is_empty() {
mgr.send_action(TkAction::Reconfigure);
}
self.widgets.pop()
}
/// Inserts a child widget position `index`
///
/// Panics if `index > len`.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn insert(&mut self, mgr: &mut Manager, index: usize, widget: W) {
self.widgets.insert(index, widget);
mgr.send_action(TkAction::Reconfigure);
}
/// Removes the child widget at position `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn remove(&mut self, mgr: &mut Manager, index: usize) -> W {
let r = self.widgets.remove(index);
mgr.send_action(TkAction::Reconfigure);
r
}
/// Replace the child at `index`
///
/// Panics if `index` is out of bounds.
///
/// Triggers a [reconfigure action](Manager::send_action).
// TODO: in theory it is possible to avoid a reconfigure where both widgets
// have no children and have compatible size. Is this a good idea and can
// we somehow test "has compatible size"?
pub fn replace(&mut self, mgr: &mut Manager, index: usize, mut widget: W) -> W {
std::mem::swap(&mut widget, &mut self.widgets[index]);
mgr.send_action(TkAction::Reconfigure);
widget
}
/// Append child widgets from an iterator
///
/// Triggers a [reconfigure action](Manager::send_action) if any widgets
/// are added.
pub fn extend<T: IntoIterator<Item = W>>(&mut self, mgr: &mut Manager, iter: T) {
let len = self.widgets.len();
self.widgets.extend(iter);
if len != self.widgets.len() {
mgr.send_action(TkAction::Reconfigure);
}
}
/// Resize, using the given closure to construct new widgets
///
/// Triggers a [reconfigure action](Manager::send_action).
pub fn resize_with<F: Fn(usize) -> W>(&mut self, mgr: &mut Manager, len: usize, f: F) {
let l0 = self.widgets.len();
if l0 == len {
return;
} else if l0 > len {
self.widgets.truncate(len);
} else {
self.widgets.reserve(len);
for i in l0..len {
self.widgets.push(f(i));
}
}
mgr.send_action(TkAction::Reconfigure);
}
/// Retain only widgets satisfying predicate `f`
///
/// See documentation of [`Vec::retain`].
///
/// Triggers a [reconfigure action](Manager::send_action) if any widgets
/// are removed.
pub fn retain<F: FnMut(&W) -> bool>(&mut self, mgr: &mut Manager, f: F) {
let len = self.widgets.len();
self.widgets.retain(f);
if len != self.widgets.len() {
mgr.send_action(TkAction::Reconfigure);
}
}
}
| {
&self.core
} | identifier_body |
my-orders.js | import React, { useState } from 'react';
import PropTypes from 'prop-types';
import styled, { css } from 'styled-components';
import { useRouter } from 'next/router';
import useTranslation from 'next-translate/useTranslation';
import { FiMessageSquare, FiEye, FiX } from 'react-icons/fi';
import { UserProfile } from '../../../components/UserProfile';
import { Protected } from '../../../components/Authorization';
import { Title } from '../../../components/Common';
import { DataGrid } from '../../../components/DataGrid';
import { IconButton } from '../../../components/Button';
import { ORDERING as orderEnum } from '../../../utils/enums/api.enum';
import { dateToString } from '../../../utils/helper';
import { getDealStatusText } from '../../../utils/technologyOrders';
import { STATUS as dealStatusEnum } from '../../../utils/enums/orders.enum';
import { useModal } from '../../../hooks';
import OrderMessages from '../../../components/OrderMessages';
import EmptyScreen from '../../../components/EmptyScreen';
import { getOrders } from '../../../services';
const sortOptions = [
{ value: 'title', label: 'Título' },
{ value: 'responsible', label: 'Responsável' },
{ value: 'status', label: 'Status' },
{ value: 'order_date', label: 'Data do pedido' },
];
const itemsPerPage = 5;
const getTechnologyDataGrid = (order, openModal, setCurrentOrder) => {
const {
id,
status,
created_at,
technology: { title, users },
} = order;
const owner = users?.find((user) => user?.pivot?.role === 'OWNER');
const orderType = 'technology';
return {
id,
title,
institution: owner.institution.initials,
responsible: owner?.full_name,
status: {
status,
content: getDealStatusText(status),
},
orderDate: dateToString(created_at),
type: 'T',
actions: [
{
variant: 'gray',
ariaLabel: 'Order details',
icon: FiEye,
onClick: () => openModal('technologyOrderDetails', { id }),
},
{
variant: 'info',
ariaLabel: 'Send message to technology owner',
icon: FiMessageSquare,
onClick: () => setCurrentOrder({ ...order, owner }),
},
{
variant: 'remove',
ariaLabel: 'Cancel order',
icon: FiX,
onClick: () => openModal('cancelOrder', { id, orderType }),
disabled:
status === dealStatusEnum.DEAL_CANCELLED ||
status === dealStatusEnum.DEAL_STRUCK,
},
],
};
};
const getServiceDataGrid = (order, openModal, setCurrentOrder) => {
const {
id,
status,
created_at,
service: { name, user },
} = order;
const orderType = 'service';
return {
id,
title: name,
institution: user.institution.initials,
responsible: user.full_name,
status: { status, content: getDealStatusText(status) },
orderDate: dateToString(created_at),
type: 'S',
actions: [
{
variant: 'gray',
ariaLabel: 'Order details',
icon: FiEye,
onClick: () => openModal('serviceOrderDetails', { id }),
},
{
variant: 'info',
ariaLabel: 'Send message to service owner',
icon: FiMessageSquare,
onClick: () => setCurrentOrder({ ...order, owner: user }),
},
{
variant: 'remove',
ariaLabel: 'Cancel order',
icon: FiX,
onClick: () => openModal('cancelOrder', { id, orderType }),
disabled:
status === dealStatusEnum.DEAL_CANCELLED ||
status === dealStatusEnum.DEAL_STRUCK,
},
],
};
};
const solutionMapper = {
technology: getTechnologyDataGrid,
service: getServiceDataGrid,
};
const MyOrders = ({ currentPage, totalPages, totalItems, currentSort, orders }) => {
const { t } = useTranslation(['helper', 'account']);
const router = useRouter();
const { openModal } = useModal();
const [currentOrder, setCurrentOrder] = useState(null);
/**
* Pushes new page number to next/router
*
* @param {string} page Page number.
*/
const handlePagination = (page) => {
const { pathname, query } = router;
query.page = page;
router.push({
pathname,
query,
});
};
/**
* Pushes new sort options to next/router
*
* @param {string} orderBy Grid column to sort items.
* @param {('ASC'|'DESC')} order Sort order.
* @returns {Promise<boolean>} Next router push
*/
const handleSortBy = (orderBy, order = currentSort.order || orderEnum.ASC) => {
const { pathname, query } = router;
delete query.page;
|
return router.push({
pathname,
query,
});
};
return (
<Container>
<Protected>
<UserProfile />
{currentOrder ? (
<OrderMessages
isBuyer
currentOrder={currentOrder}
backToList={() => setCurrentOrder(null)}
/>
) : (
<MainContentContainer>
{orders.length ? (
<>
<Title align="left" noPadding noMargin>
{t('account:titles.myOrders')}
</Title>
<MainContent>
<DataGrid
data={orders.map((order) => {
const solutionData = solutionMapper[order.type](
order,
openModal,
setCurrentOrder,
);
return {
id: solutionData.id,
Título: solutionData.title,
Organização: solutionData.institution,
Responsável: solutionData.responsible,
Status: (
<DealStatus status={solutionData.status.status}>
{solutionData.status.content}
</DealStatus>
),
'Data do pedido': solutionData.orderDate,
Tipo: (
<SolutionType type={order.type}>
{solutionData.type}
</SolutionType>
),
Ações: (
<DealActions>
{solutionData.actions.map((action) => (
<IconButton
key={action.ariaLabel}
variant={action.variant}
aria-label={action.ariaLabel}
onClick={action.onClick}
disabled={action.disabled}
>
<action.icon />
</IconButton>
))}
</DealActions>
),
};
})}
hideItemsByKey={['id']}
currentPage={currentPage}
totalPages={totalPages}
totalItems={totalItems}
itemsPerPage={itemsPerPage}
currentOrder={currentSort.order}
sortOptions={sortOptions}
handlePagination={handlePagination}
handleSortBy={handleSortBy}
enablePagination
/>
</MainContent>
</>
) : (
<EmptyScreen message={t('account:messages.noOrdersToShow')} />
)}
</MainContentContainer>
)}
</Protected>
</Container>
);
};
MyOrders.propTypes = {
orders: PropTypes.arrayOf(PropTypes.shape({})).isRequired,
currentPage: PropTypes.number.isRequired,
totalPages: PropTypes.number.isRequired,
totalItems: PropTypes.number.isRequired,
currentSort: PropTypes.shape({
by: PropTypes.string,
order: PropTypes.string,
}),
};
MyOrders.defaultProps = {
currentSort: {},
};
MyOrders.getInitialProps = async (ctx) => {
const { query } = ctx;
const page = Number(query.page) || 1;
const { orders, totalPages, totalItems } = (await getOrders({ fromCurrentUser: true })) || [];
return {
orders,
currentPage: page,
totalPages,
totalItems,
currentSort: { by: query.orderBy, order: query.order },
sortOptions,
};
};
export const Container = styled.div`
display: flex;
margin: 0 auto;
background-color: ${({ theme }) => theme.colors.whiteSmoke};
padding: 3rem 4rem 6rem;
> section:first-child {
margin-right: 4rem;
}
@media screen and (max-width: 950px) {
flex-direction: column;
> section:first-child {
margin-bottom: 1rem;
}
}
`;
export const MainContentContainer = styled.section`
width: 100%;
`;
export const MainContent = styled.div`
min-height: 80vh;
background-color: ${({ theme }) => theme.colors.white};
padding: 2rem;
`;
export const InfoContainer = styled.div`
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: 1rem;
@media screen and (max-width: 950px) {
flex-direction: column;
button {
margin-bottom: 1rem;
}
}
`;
const statusModifiers = {
[dealStatusEnum.DEAL_STRUCK]: (colors) => css`
color: ${colors.secondary};
&::before {
background: ${colors.secondary};
}
`,
[dealStatusEnum.DEAL_ONGOING]: (colors) => css`
color: ${colors.lightGray2};
&::before {
background: ${colors.lightGray2};
}
`,
[dealStatusEnum.DEAL_CANCELLED]: (colors) => css`
color: ${colors.red};
&::before {
background: ${colors.red};
}
`,
[dealStatusEnum.DEAL_REQUESTED]: (colors) => css`
color: ${colors.lightGray2};
&::before {
background: ${colors.lightGray2};
}
`,
};
export const DealStatus = styled.div`
${({ theme: { colors }, status }) => css`
display: inline-block;
position: relative;
line-height: 2.4rem;
font-weight: 500;
padding: 0.2rem 0.8rem;
max-width: fit-content;
text-align: center;
&::before {
content: '';
display: block;
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
border-radius: 1.45rem;
opacity: 0.1;
}
${!!status && statusModifiers[status]?.(colors)};
`}
`;
export const DealActions = styled.div`
${({ theme: { screens } }) => css`
display: flex;
flex-wrap: wrap;
justify-content: flex-start;
> button {
margin: 0 1.2rem 0 0;
}
svg {
font-size: 1.4rem;
stroke-width: 3;
}
@media screen and (min-width: ${screens.large}px) {
justify-content: center;
> button {
margin: 0.8rem;
}
}
`}
`;
const solutionTypeModifier = {
technology: (colors) => css`
color: ${colors.darkOrange};
&::before {
background: ${colors.darkOrange};
}
`,
service: (colors) => css`
color: ${colors.darkGreen};
&::before {
background: ${colors.darkGreen};
}
`,
};
const SolutionType = styled.div`
${({ theme: { colors }, type }) => css`
display: inline-block;
position: relative;
line-height: 2.4rem;
font-weight: 500;
padding: 0.2rem 0.8rem;
max-width: fit-content;
text-align: center;
&::before {
content: '';
display: block;
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
border-radius: 1.45rem;
opacity: 0.1;
}
${!!type && solutionTypeModifier[type](colors)};
`}
`;
export default MyOrders; | const shouldOrderAsc = order === orderEnum.DESC && currentSort.by !== orderBy;
query.order = shouldOrderAsc ? orderEnum.ASC : order;
query.orderBy = orderBy; | random_line_split |
database.py | from django.db import connection, transaction, utils
from core.utils.transform import from_dict_list_to_gen, from_csv_file_to_gen
from core.utils.csv_helpers import gen_to_csv
from django.conf import settings
from postgres_copy import CopyManager
from io import StringIO
import itertools
import csv
import random
import math
import os
import json
import logging
logger = logging.getLogger('app')
def execute(sql):
with connection.cursor() as curs:
try:
with transaction.atomic():
curs.execute(sql)
except Exception as e:
logger.error("Database - Execute error: {}".format(e))
def create_gen_from_csv_diff(original_file_path, new_file_path):
new_file = open(new_file_path, 'r')
new_reader = csv.reader(new_file, delimiter=',', quotechar='"', doublequote=True,
quoting=csv.QUOTE_ALL, skipinitialspace=True)
logger.debug(" * Beginning CSV diff process.")
# *** if you want to speed this up, open the file and put the original_reader into a List
# I'm not doing so because I don't have confidence that the server can handle 10+ million rows in Memory
# original_reader = list(csv.reader(open(original_file_path, 'r'))
cursor = 0
count = -1 # offset for headers
# iterate through each csv row
# for new_row in new_reader:
# # pass headers first
with open(new_file_path, 'r') as nf:
new_content = nf.readlines()
for new_row in new_content:
if count == -1:
count = count + 1
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
continue
found = False
# search for csv row in old file
# original_reader = csv.reader(open(original_file_path, 'r'), delimiter=',', quotechar='"',
# doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True)
# for original_row in original_reader:
#
with open(original_file_path, 'r') as of:
original_content = of.readlines()
for original_row in original_content:
if new_row == original_row:
found = True
break
cursor = cursor + 1
# if cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys)
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def batch_upsert_rows(model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows_length - rows_created)
update.save()
except Exception as e:
logger.info('Database - error upserting rows. Doing single row upsert. - Error: {}'.format(e))
upsert_single_rows(model, rows, update=update, ignore_conflict=ignore_conflict)
def upsert_single_rows(model, rows, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
rows_created = 0
rows_updated = 0
for row in rows:
try:
with connection.cursor() as curs:
with transaction.atomic():
curs.execute(upsert_query(table_name, row, primary_key, ignore_conflict=ignore_conflict),
build_row_values(row))
rows_updated = rows_updated + 1
rows_created = rows_created + 1
if rows_created % settings.BATCH_SIZE == 0:
logger.debug("{} - seeded {}".format(table_name, rows_created))
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
rows_updated = 0
rows_updated = 0
except Exception as e:
logger.error("Database Error * - unable to upsert single record. Error: {}".format(e))
continue
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
# https://djangosnippets.org/snippets/1400/
import time
import traceback
import logging
import sys
class Status(object):
def __init__(self):
self.num_successful = 0
self.failed_ids = []
self.done = False
self.cur_idx = 0
def __repr__(self):
return u'<Status: %s/%s, %s failed>' % (
getattr(self, 'cur_idx', '-'),
getattr(self, 'total', '-'),
self.num_failed)
@property
def num_failed(self): return len(self.failed_ids)
def start(self):
self.start_time = time.time()
def finished(self):
self.cur_idx = self.total
self.done = True
self.end_time = time.time()
@property
def rate(self):
if self.done:
end_time = self.end_time
else:
end_time = time.time()
return self.cur_idx / (end_time - self.start_time)
@property
def time_left(self):
rate = self.rate
if rate == 0:
return 0
return (self.total - self.cur_idx) / self.rate
def progress_callback(status):
message = '%d/%d failed=%d, rate~%.2f per second, left~%.2f sec \r' % (
status.cur_idx, status.total, status.num_failed, status.rate, status.time_left)
if status.done:
message = "DONE! - {}".format(message)
print(message)
logger.debug(message)
else:
message = "Progress - {}".format(message)
print(message)
logger.debug(message)
def queryset_foreach(queryset, f, batch_size=1000,
progress_callback=progress_callback, transaction=True):
'''
Call a function for each element in a queryset (actually, any list).
Features:
* stable memory usage (thanks to Django paginators)
* progress indicators
* wraps batches in transactions
* can take managers or even models (e.g., Assertion.objects)
* warns about DEBUG.
* handles failures of single items without dying in general.
* stable even if items are added or removed during processing
(gets a list of ids at the start)
Returns a Status object, with the following interesting attributes
total: number of items in the queryset
num_successful: count of successful items
failed_ids: list of ids of items that failed
'''
from django.conf import settings
if settings.DEBUG:
logger.debug('Warning: DEBUG is on. django.db.connection.queries may use up a lot of memory.')
# Get querysets corresponding to managers
from django.shortcuts import _get_queryset
queryset = _get_queryset(queryset)
# Get a snapshot of all the ids that match the query
logger.debug('qs4e: Getting list of objects')
ids = list(queryset.values_list(queryset.model._meta.pk.name, flat=True))
# Initialize status
status = Status()
status.total = len(ids)
def do_all_objects(objects):
from django.db import transaction
with transaction.atomic():
for id, obj in objects.items():
try:
f(obj)
status.num_successful += 1
except Exception as e: # python 2.5+: doesn't catch KeyboardInterrupt or SystemExit
logger.error(e)
status.failed_ids.append(id)
# if transaction:
# # Wrap each batch in a transaction
# with transaction.atomic():
# do_all_objects = transaction.commit_on_success(do_all_objects)
from django.core.paginator import Paginator
paginator = Paginator(ids, batch_size)
status.start()
progress_callback(status)
for page_num in paginator.page_range: | objects = queryset.in_bulk(page.object_list)
do_all_objects(objects)
status.finished()
progress_callback(status)
return status | status.page = page = paginator.page(page_num)
status.cur_idx = page.start_index() - 1
progress_callback(status) | random_line_split |
database.py | from django.db import connection, transaction, utils
from core.utils.transform import from_dict_list_to_gen, from_csv_file_to_gen
from core.utils.csv_helpers import gen_to_csv
from django.conf import settings
from postgres_copy import CopyManager
from io import StringIO
import itertools
import csv
import random
import math
import os
import json
import logging
logger = logging.getLogger('app')
def execute(sql):
with connection.cursor() as curs:
try:
with transaction.atomic():
curs.execute(sql)
except Exception as e:
logger.error("Database - Execute error: {}".format(e))
def create_gen_from_csv_diff(original_file_path, new_file_path):
new_file = open(new_file_path, 'r')
new_reader = csv.reader(new_file, delimiter=',', quotechar='"', doublequote=True,
quoting=csv.QUOTE_ALL, skipinitialspace=True)
logger.debug(" * Beginning CSV diff process.")
# *** if you want to speed this up, open the file and put the original_reader into a List
# I'm not doing so because I don't have confidence that the server can handle 10+ million rows in Memory
# original_reader = list(csv.reader(open(original_file_path, 'r'))
cursor = 0
count = -1 # offset for headers
# iterate through each csv row
# for new_row in new_reader:
# # pass headers first
with open(new_file_path, 'r') as nf:
new_content = nf.readlines()
for new_row in new_content:
if count == -1:
count = count + 1
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
continue
found = False
# search for csv row in old file
# original_reader = csv.reader(open(original_file_path, 'r'), delimiter=',', quotechar='"',
# doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True)
# for original_row in original_reader:
#
with open(original_file_path, 'r') as of:
original_content = of.readlines()
for original_row in original_content:
if new_row == original_row:
found = True
break
cursor = cursor + 1
# if cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
|
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def batch_upsert_rows(model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows_length - rows_created)
update.save()
except Exception as e:
logger.info('Database - error upserting rows. Doing single row upsert. - Error: {}'.format(e))
upsert_single_rows(model, rows, update=update, ignore_conflict=ignore_conflict)
def upsert_single_rows(model, rows, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
rows_created = 0
rows_updated = 0
for row in rows:
try:
with connection.cursor() as curs:
with transaction.atomic():
curs.execute(upsert_query(table_name, row, primary_key, ignore_conflict=ignore_conflict),
build_row_values(row))
rows_updated = rows_updated + 1
rows_created = rows_created + 1
if rows_created % settings.BATCH_SIZE == 0:
logger.debug("{} - seeded {}".format(table_name, rows_created))
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
rows_updated = 0
rows_updated = 0
except Exception as e:
logger.error("Database Error * - unable to upsert single record. Error: {}".format(e))
continue
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
# https://djangosnippets.org/snippets/1400/
import time
import traceback
import logging
import sys
class Status(object):
def __init__(self):
self.num_successful = 0
self.failed_ids = []
self.done = False
self.cur_idx = 0
def __repr__(self):
return u'<Status: %s/%s, %s failed>' % (
getattr(self, 'cur_idx', '-'),
getattr(self, 'total', '-'),
self.num_failed)
@property
def num_failed(self): return len(self.failed_ids)
def start(self):
self.start_time = time.time()
def finished(self):
self.cur_idx = self.total
self.done = True
self.end_time = time.time()
@property
def rate(self):
if self.done:
end_time = self.end_time
else:
end_time = time.time()
return self.cur_idx / (end_time - self.start_time)
@property
def time_left(self):
rate = self.rate
if rate == 0:
return 0
return (self.total - self.cur_idx) / self.rate
def progress_callback(status):
message = '%d/%d failed=%d, rate~%.2f per second, left~%.2f sec \r' % (
status.cur_idx, status.total, status.num_failed, status.rate, status.time_left)
if status.done:
message = "DONE! - {}".format(message)
print(message)
logger.debug(message)
else:
message = "Progress - {}".format(message)
print(message)
logger.debug(message)
def queryset_foreach(queryset, f, batch_size=1000,
progress_callback=progress_callback, transaction=True):
'''
Call a function for each element in a queryset (actually, any list).
Features:
* stable memory usage (thanks to Django paginators)
* progress indicators
* wraps batches in transactions
* can take managers or even models (e.g., Assertion.objects)
* warns about DEBUG.
* handles failures of single items without dying in general.
* stable even if items are added or removed during processing
(gets a list of ids at the start)
Returns a Status object, with the following interesting attributes
total: number of items in the queryset
num_successful: count of successful items
failed_ids: list of ids of items that failed
'''
from django.conf import settings
if settings.DEBUG:
logger.debug('Warning: DEBUG is on. django.db.connection.queries may use up a lot of memory.')
# Get querysets corresponding to managers
from django.shortcuts import _get_queryset
queryset = _get_queryset(queryset)
# Get a snapshot of all the ids that match the query
logger.debug('qs4e: Getting list of objects')
ids = list(queryset.values_list(queryset.model._meta.pk.name, flat=True))
# Initialize status
status = Status()
status.total = len(ids)
def do_all_objects(objects):
from django.db import transaction
with transaction.atomic():
for id, obj in objects.items():
try:
f(obj)
status.num_successful += 1
except Exception as e: # python 2.5+: doesn't catch KeyboardInterrupt or SystemExit
logger.error(e)
status.failed_ids.append(id)
# if transaction:
# # Wrap each batch in a transaction
# with transaction.atomic():
# do_all_objects = transaction.commit_on_success(do_all_objects)
from django.core.paginator import Paginator
paginator = Paginator(ids, batch_size)
status.start()
progress_callback(status)
for page_num in paginator.page_range:
status.page = page = paginator.page(page_num)
status.cur_idx = page.start_index() - 1
progress_callback(status)
objects = queryset.in_bulk(page.object_list)
do_all_objects(objects)
status.finished()
progress_callback(status)
return status
| fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys) | identifier_body |
database.py | from django.db import connection, transaction, utils
from core.utils.transform import from_dict_list_to_gen, from_csv_file_to_gen
from core.utils.csv_helpers import gen_to_csv
from django.conf import settings
from postgres_copy import CopyManager
from io import StringIO
import itertools
import csv
import random
import math
import os
import json
import logging
logger = logging.getLogger('app')
def execute(sql):
with connection.cursor() as curs:
try:
with transaction.atomic():
curs.execute(sql)
except Exception as e:
logger.error("Database - Execute error: {}".format(e))
def create_gen_from_csv_diff(original_file_path, new_file_path):
new_file = open(new_file_path, 'r')
new_reader = csv.reader(new_file, delimiter=',', quotechar='"', doublequote=True,
quoting=csv.QUOTE_ALL, skipinitialspace=True)
logger.debug(" * Beginning CSV diff process.")
# *** if you want to speed this up, open the file and put the original_reader into a List
# I'm not doing so because I don't have confidence that the server can handle 10+ million rows in Memory
# original_reader = list(csv.reader(open(original_file_path, 'r'))
cursor = 0
count = -1 # offset for headers
# iterate through each csv row
# for new_row in new_reader:
# # pass headers first
with open(new_file_path, 'r') as nf:
new_content = nf.readlines()
for new_row in new_content:
|
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys)
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def batch_upsert_rows(model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows_length - rows_created)
update.save()
except Exception as e:
logger.info('Database - error upserting rows. Doing single row upsert. - Error: {}'.format(e))
upsert_single_rows(model, rows, update=update, ignore_conflict=ignore_conflict)
def upsert_single_rows(model, rows, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
rows_created = 0
rows_updated = 0
for row in rows:
try:
with connection.cursor() as curs:
with transaction.atomic():
curs.execute(upsert_query(table_name, row, primary_key, ignore_conflict=ignore_conflict),
build_row_values(row))
rows_updated = rows_updated + 1
rows_created = rows_created + 1
if rows_created % settings.BATCH_SIZE == 0:
logger.debug("{} - seeded {}".format(table_name, rows_created))
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
rows_updated = 0
rows_updated = 0
except Exception as e:
logger.error("Database Error * - unable to upsert single record. Error: {}".format(e))
continue
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
# https://djangosnippets.org/snippets/1400/
import time
import traceback
import logging
import sys
class Status(object):
def __init__(self):
self.num_successful = 0
self.failed_ids = []
self.done = False
self.cur_idx = 0
def __repr__(self):
return u'<Status: %s/%s, %s failed>' % (
getattr(self, 'cur_idx', '-'),
getattr(self, 'total', '-'),
self.num_failed)
@property
def num_failed(self): return len(self.failed_ids)
def start(self):
self.start_time = time.time()
def finished(self):
self.cur_idx = self.total
self.done = True
self.end_time = time.time()
@property
def rate(self):
if self.done:
end_time = self.end_time
else:
end_time = time.time()
return self.cur_idx / (end_time - self.start_time)
@property
def time_left(self):
rate = self.rate
if rate == 0:
return 0
return (self.total - self.cur_idx) / self.rate
def progress_callback(status):
message = '%d/%d failed=%d, rate~%.2f per second, left~%.2f sec \r' % (
status.cur_idx, status.total, status.num_failed, status.rate, status.time_left)
if status.done:
message = "DONE! - {}".format(message)
print(message)
logger.debug(message)
else:
message = "Progress - {}".format(message)
print(message)
logger.debug(message)
def queryset_foreach(queryset, f, batch_size=1000,
progress_callback=progress_callback, transaction=True):
'''
Call a function for each element in a queryset (actually, any list).
Features:
* stable memory usage (thanks to Django paginators)
* progress indicators
* wraps batches in transactions
* can take managers or even models (e.g., Assertion.objects)
* warns about DEBUG.
* handles failures of single items without dying in general.
* stable even if items are added or removed during processing
(gets a list of ids at the start)
Returns a Status object, with the following interesting attributes
total: number of items in the queryset
num_successful: count of successful items
failed_ids: list of ids of items that failed
'''
from django.conf import settings
if settings.DEBUG:
logger.debug('Warning: DEBUG is on. django.db.connection.queries may use up a lot of memory.')
# Get querysets corresponding to managers
from django.shortcuts import _get_queryset
queryset = _get_queryset(queryset)
# Get a snapshot of all the ids that match the query
logger.debug('qs4e: Getting list of objects')
ids = list(queryset.values_list(queryset.model._meta.pk.name, flat=True))
# Initialize status
status = Status()
status.total = len(ids)
def do_all_objects(objects):
from django.db import transaction
with transaction.atomic():
for id, obj in objects.items():
try:
f(obj)
status.num_successful += 1
except Exception as e: # python 2.5+: doesn't catch KeyboardInterrupt or SystemExit
logger.error(e)
status.failed_ids.append(id)
# if transaction:
# # Wrap each batch in a transaction
# with transaction.atomic():
# do_all_objects = transaction.commit_on_success(do_all_objects)
from django.core.paginator import Paginator
paginator = Paginator(ids, batch_size)
status.start()
progress_callback(status)
for page_num in paginator.page_range:
status.page = page = paginator.page(page_num)
status.cur_idx = page.start_index() - 1
progress_callback(status)
objects = queryset.in_bulk(page.object_list)
do_all_objects(objects)
status.finished()
progress_callback(status)
return status
| if count == -1:
count = count + 1
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
continue
found = False
# search for csv row in old file
# original_reader = csv.reader(open(original_file_path, 'r'), delimiter=',', quotechar='"',
# doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True)
# for original_row in original_reader:
#
with open(original_file_path, 'r') as of:
original_content = of.readlines()
for original_row in original_content:
if new_row == original_row:
found = True
break
cursor = cursor + 1
# if cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0] | conditional_block |
database.py | from django.db import connection, transaction, utils
from core.utils.transform import from_dict_list_to_gen, from_csv_file_to_gen
from core.utils.csv_helpers import gen_to_csv
from django.conf import settings
from postgres_copy import CopyManager
from io import StringIO
import itertools
import csv
import random
import math
import os
import json
import logging
logger = logging.getLogger('app')
def execute(sql):
with connection.cursor() as curs:
try:
with transaction.atomic():
curs.execute(sql)
except Exception as e:
logger.error("Database - Execute error: {}".format(e))
def create_gen_from_csv_diff(original_file_path, new_file_path):
new_file = open(new_file_path, 'r')
new_reader = csv.reader(new_file, delimiter=',', quotechar='"', doublequote=True,
quoting=csv.QUOTE_ALL, skipinitialspace=True)
logger.debug(" * Beginning CSV diff process.")
# *** if you want to speed this up, open the file and put the original_reader into a List
# I'm not doing so because I don't have confidence that the server can handle 10+ million rows in Memory
# original_reader = list(csv.reader(open(original_file_path, 'r'))
cursor = 0
count = -1 # offset for headers
# iterate through each csv row
# for new_row in new_reader:
# # pass headers first
with open(new_file_path, 'r') as nf:
new_content = nf.readlines()
for new_row in new_content:
if count == -1:
count = count + 1
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
continue
found = False
# search for csv row in old file
# original_reader = csv.reader(open(original_file_path, 'r'), delimiter=',', quotechar='"',
# doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True)
# for original_row in original_reader:
#
with open(original_file_path, 'r') as of:
original_content = of.readlines()
for original_row in original_content:
if new_row == original_row:
found = True
break
cursor = cursor + 1
# if cursor % settings.BATCH_SIZE == 0:
logger.debug("Diff cursor at: {}".format(cursor))
if not found:
count = count + 1
if count % settings.BATCH_SIZE == 0:
logger.debug('Performed csv diff on {} records'.format(count))
yield list(csv.reader(StringIO(new_row), delimiter=',', quotechar='"',
doublequote=True, quoting=csv.QUOTE_ALL, skipinitialspace=True))[0]
def write_gen_to_temp_file(gen_rows):
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
headers = iter(next(gen_rows))
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL, skipinitialspace=True)
writer.writerow(headers)
for row in gen_rows:
writer.writerow(row)
return temp_file_path
def seed_from_csv_diff(original_file_path, new_file_path, model, **kwargs):
"""
takes new file, filters it down in size, adds to Set()
takes old file, adds to Set()
saves to temporary file for read to avoid high memory usage
Diff Set() = New file Set() - Old file Set()
- preserves new records
- preserves altered/updated records
- removes duplicate, non updated records
seeds Diff Set() in batches
"""
original_diff_set = set()
new_diff_set = set()
new_file = open(new_file_path, 'r')
headers = new_file.readline().replace('\n', '').split(',')
new_reader = model.update_set_filter(csv.reader(new_file), headers)
original_file = open(original_file_path, 'r')
original_reader = csv.reader(original_file)
next(original_reader, None)
logger.debug(" * Beginning CSV diff process.")
for row in new_reader:
new_diff_set.add(json.dumps(row))
for row in original_reader:
original_diff_set.add(json.dumps(row))
diff = new_diff_set - original_diff_set
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'set_diff' + str(random.randint(1, 10000000))) + '.mock' if settings.TESTING else '.csv')
with open(temp_file_path, 'w') as temp_file:
writer = csv.writer(temp_file, delimiter=',')
writer.writerow(headers)
for row in diff:
writer.writerow(json.loads(row))
diff_gen = from_csv_file_to_gen(temp_file_path, kwargs['update'])
logger.debug(" * Csv diff completed, beginning batch upsert.")
batch_upsert_from_gen(model, diff_gen, settings.BATCH_SIZE, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def bulk_insert_from_file(model, file_path, **kwargs):
table_name = model._meta.db_table
logger.debug('creating temp csv with cleaned rows and seeding...')
# create new csv with cleaned rows
temp_file_extension = '.mock' if settings.TESTING else '.csv'
temp_file_path = os.path.join(settings.MEDIA_TEMP_ROOT, str(
'clean_csv_' + str(random.randint(1, 10000000))) + temp_file_extension)
update = kwargs['update'] if 'update' in kwargs else None
rows = model.transform_self_from_file(file_path, update=update)
logger.debug("writing temp file for {} at {}".format(table_name, temp_file_path))
gen_to_csv(rows, temp_file_path)
logger.debug("temp file complete for {}".format(table_name))
copy_file(model, file_path=temp_file_path, **kwargs)
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
def copy_file(model, file_path=None, **kwargs):
table_name = model._meta.db_table
with open(file_path, 'r') as file:
columns = file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
try:
copy_insert_from_csv(table_name, file_path, **kwargs)
except Exception as e:
logger.warning("Database - Bulk Import Error - beginning Batch seeding. Error: {}".format(e))
rows = from_csv_file_to_gen(file_path, kwargs['update'])
batch_upsert_from_gen(model, rows, settings.BATCH_SIZE, **kwargs)
def copy_insert_from_csv(table_name, temp_file_path, **kwargs):
with open(temp_file_path, 'r') as temp_file:
columns = temp_file.readline().replace('"', '').replace('\n', '')
sql = copy_query(table_name, columns)
with transaction.atomic():
if 'overwrite' in kwargs and kwargs['overwrite']:
logger.debug('Overwriting table...')
connection.cursor().execute('DELETE FROM {};'.format(table_name))
logger.debug("* Beginning Bulk CSV copy.")
connection.cursor().copy_expert(sql, temp_file)
logger.debug(" * Bulk CSV copy completed successfully.")
if 'update' in kwargs and kwargs['update']:
reader = csv.reader(open(temp_file_path, 'r'))
next(reader, None) # skip headers
kwargs['update'].rows_created = sum(1 for row in reader)
kwargs['update'].save()
if os.path.isfile(temp_file_path):
os.remove(temp_file_path)
def upsert_query(table_name, row, primary_key, ignore_conflict=False):
fields = ', '.join(row.keys())
upsert_fields = ', '.join([k + "= EXCLUDED." + k for k in row.keys()])
placeholders = ', '.join(["%s" for v in row.values()])
conflict_action = "DO NOTHING" if ignore_conflict else "DO UPDATE SET {}".format(upsert_fields)
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values}) ON CONFLICT ({primary_key}) {conflict_action};"
return sql.format(table_name=table_name, fields=fields, values=placeholders, primary_key=primary_key, conflict_action=conflict_action)
def insert_query(table_name, row):
fields = ', '.join(row.keys())
placeholders = ', '.join(["%s" for v in row.values()])
sql = "INSERT INTO {table_name} ({fields}) VALUES ({values})"
return sql.format(table_name=table_name, fields=fields, values=placeholders)
def update_query(table_name, row, primary_key):
fields = ', '.join(['{key} = %s'.format(key=key) for key in row.keys()])
keys = ' AND '.join(['{key} = %s'.format(key=key) for key in primary_key.split(', ')])
sql = 'UPDATE {table_name} SET {fields} WHERE({pk});'
return sql.format(table_name=table_name, fields=fields, pk=keys)
def copy_query(table_name, columns):
return 'COPY {table_name} ({fields}) FROM STDIN WITH (format csv)'.format(table_name=table_name, fields=columns)
def build_row_values(row):
t_row = tuple(row.values())
return tuple(None if x == '' else x for x in t_row)
def build_pkey_tuple(row, pkey):
tup = tuple()
for key in pkey.split(', '):
tup = tup + (row[key],)
return tup
def batch_upsert_from_gen(model, rows, batch_size, **kwargs):
table_name = model._meta.db_table
update = kwargs['update'] if 'update' in kwargs else None
ignore_conflict = kwargs['ignore_conflict'] if 'ignore_conflict' in kwargs else None
with connection.cursor() as curs:
try:
count = 0
while True:
batch = list(itertools.islice(rows, 0, batch_size))
if len(batch) == 0:
logger.info("Database - Batch upserts completed for {}.".format(model.__name__))
if 'callback' in kwargs and kwargs['callback']:
kwargs['callback']()
break
else:
with transaction.atomic():
logger.debug("Seeding next batch for {}.".format(model.__name__))
batch_upsert_rows(model, batch, batch_size, update=update, ignore_conflict=ignore_conflict)
count = count + batch_size
logger.debug("Rows touched: {}".format(count))
except Exception as e:
logger.warning("Unable to batch upsert: {}".format(e))
raise e
# No Conflict = True means DO NOTHING on conflict. False means update on conflict.
def | (model, rows, batch_size, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
""" Inserts many row, all in the same transaction"""
rows_length = len(rows)
with connection.cursor() as curs:
try:
starting_count = model.objects.count()
with transaction.atomic():
curs.executemany(upsert_query(table_name, rows[0], primary_key, ignore_conflict=ignore_conflict), tuple(
build_row_values(row) for row in rows))
if update:
rows_created = model.objects.count() - starting_count
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + (rows_length - rows_created)
update.save()
except Exception as e:
logger.info('Database - error upserting rows. Doing single row upsert. - Error: {}'.format(e))
upsert_single_rows(model, rows, update=update, ignore_conflict=ignore_conflict)
def upsert_single_rows(model, rows, update=None, ignore_conflict=False):
table_name = model._meta.db_table
primary_key = model._meta.pk.name
rows_created = 0
rows_updated = 0
for row in rows:
try:
with connection.cursor() as curs:
with transaction.atomic():
curs.execute(upsert_query(table_name, row, primary_key, ignore_conflict=ignore_conflict),
build_row_values(row))
rows_updated = rows_updated + 1
rows_created = rows_created + 1
if rows_created % settings.BATCH_SIZE == 0:
logger.debug("{} - seeded {}".format(table_name, rows_created))
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
rows_updated = 0
rows_updated = 0
except Exception as e:
logger.error("Database Error * - unable to upsert single record. Error: {}".format(e))
continue
if update:
update.rows_created = update.rows_created + rows_created
update.rows_updated = update.rows_updated + rows_created
update.save()
# https://djangosnippets.org/snippets/1400/
import time
import traceback
import logging
import sys
class Status(object):
def __init__(self):
self.num_successful = 0
self.failed_ids = []
self.done = False
self.cur_idx = 0
def __repr__(self):
return u'<Status: %s/%s, %s failed>' % (
getattr(self, 'cur_idx', '-'),
getattr(self, 'total', '-'),
self.num_failed)
@property
def num_failed(self): return len(self.failed_ids)
def start(self):
self.start_time = time.time()
def finished(self):
self.cur_idx = self.total
self.done = True
self.end_time = time.time()
@property
def rate(self):
if self.done:
end_time = self.end_time
else:
end_time = time.time()
return self.cur_idx / (end_time - self.start_time)
@property
def time_left(self):
rate = self.rate
if rate == 0:
return 0
return (self.total - self.cur_idx) / self.rate
def progress_callback(status):
message = '%d/%d failed=%d, rate~%.2f per second, left~%.2f sec \r' % (
status.cur_idx, status.total, status.num_failed, status.rate, status.time_left)
if status.done:
message = "DONE! - {}".format(message)
print(message)
logger.debug(message)
else:
message = "Progress - {}".format(message)
print(message)
logger.debug(message)
def queryset_foreach(queryset, f, batch_size=1000,
progress_callback=progress_callback, transaction=True):
'''
Call a function for each element in a queryset (actually, any list).
Features:
* stable memory usage (thanks to Django paginators)
* progress indicators
* wraps batches in transactions
* can take managers or even models (e.g., Assertion.objects)
* warns about DEBUG.
* handles failures of single items without dying in general.
* stable even if items are added or removed during processing
(gets a list of ids at the start)
Returns a Status object, with the following interesting attributes
total: number of items in the queryset
num_successful: count of successful items
failed_ids: list of ids of items that failed
'''
from django.conf import settings
if settings.DEBUG:
logger.debug('Warning: DEBUG is on. django.db.connection.queries may use up a lot of memory.')
# Get querysets corresponding to managers
from django.shortcuts import _get_queryset
queryset = _get_queryset(queryset)
# Get a snapshot of all the ids that match the query
logger.debug('qs4e: Getting list of objects')
ids = list(queryset.values_list(queryset.model._meta.pk.name, flat=True))
# Initialize status
status = Status()
status.total = len(ids)
def do_all_objects(objects):
from django.db import transaction
with transaction.atomic():
for id, obj in objects.items():
try:
f(obj)
status.num_successful += 1
except Exception as e: # python 2.5+: doesn't catch KeyboardInterrupt or SystemExit
logger.error(e)
status.failed_ids.append(id)
# if transaction:
# # Wrap each batch in a transaction
# with transaction.atomic():
# do_all_objects = transaction.commit_on_success(do_all_objects)
from django.core.paginator import Paginator
paginator = Paginator(ids, batch_size)
status.start()
progress_callback(status)
for page_num in paginator.page_range:
status.page = page = paginator.page(page_num)
status.cur_idx = page.start_index() - 1
progress_callback(status)
objects = queryset.in_bulk(page.object_list)
do_all_objects(objects)
status.finished()
progress_callback(status)
return status
| batch_upsert_rows | identifier_name |
image_processor_2.0.py | #!/usr/bin/python
#Team3238 Cyborg Ferrets 2014 Object Detection Code
#Start with
#python image_processor.py 'path/to/image.jpg'
#don't pass an image argument to use the VideoCapture(0) stream.
# Video capture mode updates the frame to process every video_pause milliseconds, so adjust that.
#set enable_dashboard = True to send range and bearing over smart dashboard network_tables interface.
#set show_windows = False for on-robot, no monitor processing on pandaboard.
#This code is a merge of vision_lib.py, bearing_formula.py, distance_formula.py and team341 java vision detection code from (2012?) competition.
#java -jar SmartDashboard ip 127.0.0.1, for example, will start the dashboard if running on this same host.
#Now tuned for green leds.
#expected camera settings (sorry no numbers on camera interface.)
# exposure -> far right
# gain -> far left
# brightness ~ 20% from left
# contrast ~ 20% from left
# color intensity ~ 18% from left
enable_dashboard = True
show_windows = False
window_scale = 0.5
window_size = (int(640*window_scale), int(480*window_scale))
from cv2 import *
import numpy as np
import sys
import math
import commands
if enable_dashboard:
from pynetworktables import *
if enable_dashboard:
SmartDashboard.init()
#pretend the robot is on the network reporting its heading to the SmartDashboard,
# then let the SmartDashboard user modify it and send it back to this code to simulate movement.
camera_exposure_title = 'Camera Exposure:'
class ImageProcessor:
#all these values could be put into the SmartDashboard for live tuning as conditions change.
default_shape = (480,640,3)
h = np.zeros(default_shape, dtype=np.uint8)
s = np.zeros(default_shape, dtype=np.uint8)
v = np.zeros(default_shape, dtype=np.uint8)
combined = np.zeros(default_shape, dtype=np.uint8)
img = np.zeros(default_shape, dtype=np.uint8)
h_title = "hue"
s_title = "sat"
v_title = "val"
combined_title = "Combined + Morphed"
targets_title = "Targets"
#for video capture mode, what approx frame rate do we want? frame rate = approx video_pause + processing time
video_pause = 1 #0 milliseconds means wait for key press, waitKey takes an integer so 1 millisecond is minimal with this approach.
#tuned for the camera settings above and the green leds. (Red didn't work as well and requires changing the threshold function to use OR of inverse and normal threshold, because red is on the top and bottom of the hue scale (wraps around.).)
hue_delta = 15
sat_delta = 25
val_delta = 100
hue_thresh = 80
sat_thresh = 233
val_thresh = 212
max_thresh = 255
#used for the morphologyEx method that fills in the pixels in the combined image prior to identifying polygons and contours.
kernel = getStructuringElement(MORPH_RECT, (2,2), anchor=(1,1))
morph_close_iterations = 9
#colors in BGR format for drawing the targets over the image.
selected_target_color = (0,0,255)
passed_up_target_color = (255,0,0)
possible_target_color = (0,255,0)
#used to judge whether a polygon side is near vertical or near horizontal, for filtering out shapes that don't match expected target characteristics
vert_threshold = math.tan(math.radians(90-20))
horiz_threshold = math.tan(math.radians(20))
#used to look for only horizontal or vertical rectangles of an aspect ratio that matches the targets.
#currently open wide to find both horizontal and vertical targets
max_target_aspect_ratio = 10 # 1.0 # top target is expected to be 24.5 in x 4 in.
min_target_aspect_ratio = 0.1 #0.01# 3# 0.5
angle_to_robot = 0 #camera's 0 bearing to robot's 0 bearing
camera_offset_position = 0
morph_close_iterations = 9
angle_to_shooter = 0 #camera's 0 bearing to shooter's 0 bearing
camera_color_intensity = 0 #value subject to change
camera_saturation = 0 #value subject to change
camera_contrast = 0 #value subject to change
camera_color_hue = 0 #value subject to change
camera_brightness = 20 #value subject to change
camera_gain = 0 #value subject to change
camera_exposure = 20
robot_heading = 0.0 #input from SmartDashboard if enabled, else hard coded here.
x_resolution = 640 #needs to match the camera.
y_resolution = 480
#theta = math.radians(49.165) #half of field of view of the camera
# field_of_view_degrees = 53.0 horizontal field of view
field_of_view_degrees = 26.4382 # vertical field of view
theta = math.radians(field_of_view_degrees/2.0) #half of field of view of the camera, in radians to work with math.tan function.
# real_target_width = 24.5 #inches #24 * 0.0254 #1 inch / 0.254 meters target is 24 inches wide
real_target_height = 28.5 #using these constants and may not be correct for current robot configuration.
angle_to_shooter = 0
#not currently using these constants and may not be correct for current robot configuration.
# target_min_width = 20
# target_max_width = 200
# degrees_horiz_field_of_view = 47.0
# degrees_vert_field_of_view = 480.0/640*degrees_horiz_field_of_view
# inches_camera_height = 54.0
# inches_top_target_height = 98 + 2 + 98
# degrees_camera_pitch = 21.0
# degrees_sighting_offset = -1.55
def __init__(self, img_path):
self.img_path = img_path
self.layout_result_windows(self.h,self.s,self.v)
self.vc = VideoCapture(0)
SmartDashboard.PutNumber(angle_to_robot_title, self.angle_to_robot)
SmartDashboard.PutNumber(camera_offset_position_title, self.camera_offset_position)
SmartDashboard.PutNumber(morph_close_iterations_title, self.morph_close_iterations)
SmartDashboard.PutNumber(angle_to_shooter_title, self.angle_to_shooter)
SmartDashboard.PutNumber(camera_color_intensity_title, self.camera_color_intensity)
SmartDashboard.PutNumber(camera_exposure_title, self.camera_exposure)
SmartDashboard.PutNumber(camera_saturation.title, self.saturation)
SmartDashboard.PutNumber(camera_contrast_title, self.contrast)
SmartDashboard.PutNumber(camera_color_hue_title, self.camera_color_hue)
SmartDashboard.PutNumber(camera_brihtness_title, self.camera_brightness)
def video_feed(self):
|
def process(self):
if enable_dashboard:
self.camera_saturation = int(SmartDashboard.GetNumber(camera_saturation_title)
self.angle_to_robot = int(SmartDashboard.GetNumber(angle_to_robot_title)
self.camera_offset_postion = int(SmartDashboard.GetNumber(camera_offset_position_title)
self.morph_close_iterations = int(SmartDashboard.GetNumber(morph_close_iterations_title)
self.angle_to_shooter = int(SmartDashboard.GetNumber(angle_to_shooter_title)
self.camera_color_intensity = int(SmartDashboard.GetNumber(camera_color_intensity_title)
self.camera_contrast = int(SmartDashboard.GetNumber(camera_contrast_title)
self.camera_color_hue = int(SmartDashboard.GetNumber(camera_color_hue_title)
self.camera_brightness = int(SmartDashboard.GetNumber(camera_brightness_title)
self.camera_exposure = int(SmartDashboard.GetNumber(camera_exposure_title)
self.camera_gain = int(SmartDashboard.GetNumber(camera_gain_title)
if self.img_path is None:
commands.getoutput(" yavta --set-control '0x009a0901 1' /dev/video0")
#print(commands.getoutput(" yavta --get-control '0x009a0901' /dev/video0") )
commands.getoutput("yavta --set-control '0x009a0902 %s' /dev/video0" % self.camera_exposure)
#print(commands.getoutput(" yavta --get-control '0x009a0902' /dev/video0"))
drawing = np.zeros(self.img.shape, dtype=np.uint8)
self.hsv = cvtColor(self.img, cv.CV_BGR2HSV)
self.h, self.s, self.v = split(self.hsv)
self.h_clipped = self.threshold_in_range(self.h, self.hue_thresh-self.hue_delta, self.hue_thresh+self.hue_delta)
self.s_clipped = self.threshold_in_range(self.s, self.sat_thresh-self.sat_delta, self.sat_thresh+self.sat_delta)
self.v_clipped = self.threshold_in_range(self.v, self.val_thresh-self.val_delta, self.val_thresh+self.val_delta)
if show_windows:
h_scaled = resize(self.h_clipped, window_size)
s_scaled = resize(self.s_clipped, window_size)
v_scaled = resize(self.v_clipped, window_size)
imshow(self.h_title, h_scaled)
imshow(self.s_title, s_scaled)
imshow(self.v_title, v_scaled)
self.find_targets()
if waitKey(self.video_pause) == ord('q'):
exit(1)
def layout_result_windows(self, h, s, v):
if show_windows:
pos_x, pos_y = 500,500
# imshow(self.img_path, self.img)
h_scaled = resize(h, window_size)
s_scaled = resize(s, window_size)
v_scaled = resize(v, window_size)
combined_scaled = resize(self.combined, window_size)
img_scaled = resize(self.img, window_size)
imshow(self.h_title , h_scaled)
imshow(self.s_title , s_scaled)
imshow(self.v_title , v_scaled)
imshow(self.combined_title, combined_scaled)
imshow(self.targets_title , img_scaled)
#moveWindow(self.h_title, pos_x*1, pos_y*0);
#moveWindow(self.s_title, pos_x*0, pos_y*1);
#moveWindow(self.v_title, pos_x*1, pos_y*1);
#moveWindow(self.combined_title, pos_x*2, pos_y*0);
#moveWindow(self.targets_title, pos_x*2, pos_y*1);
#these seem to be placed alphabetically....
# createTrackbar( "Hue High Threshold:", self.source_title, self.hue_high_thresh, self.max_thresh, self.update_hue_high_threshold);
# createTrackbar( "Hue Low Threshold:", self.source_title, self.hue_low_thresh, self.max_thresh, self.update_hue_low_threshold);
# createTrackbar( "Sat High Threshold:", self.source_title, self.sat_high_thresh, self.max_thresh, self.update_sat_high_threshold);
# createTrackbar( "Sat Low Threshold:", self.source_title, self.sat_low_thresh, self.max_thresh, self.update_sat_low_threshold);
# createTrackbar( "Val High Threshold:", self.source_title, self.val_high_thresh, self.max_thresh, self.update_val_high_threshold);
# createTrackbar( "Val Low Threshold:", self.source_title, self.val_low_thresh, self.max_thresh, self.update_val_low_threshold);
def update_hue_threshold(self, thresh):
delta = 15
self.h_clipped = self.threshold_in_range(self.h, thresh-delta, thresh+delta)
imshow(self.h_title, self.h_clipped)
self.find_targets()
def update_sat_threshold(self, thresh):
delta = 25
self.s_clipped = self.threshold_in_range(self.s, thresh-delta, thresh+delta)
imshow(self.s_title, self.s_clipped)
self.find_targets()
def update_val_threshold(self, thresh):
delta = 100
self.v_clipped = self.threshold_in_range(self.v, thresh-delta, thresh+delta)
imshow(self.v_title, self.v_clipped)
self.find_targets()
def threshold_in_range(self, img, low, high):
unused, above = threshold(img, low, self.max_thresh, THRESH_BINARY)
unused, below = threshold(img, high, self.max_thresh, THRESH_BINARY_INV)
return bitwise_and(above, below)
def find_targets(self):
#combine all the masks together to get their overlapping regions.
if True:
self.reset_targeting()
self.combined = bitwise_and(self.h_clipped, bitwise_and(self.s_clipped, self.v_clipped))
#comment above line and uncomment next line to ignore hue channel til we sort out red light hue matching around zero.
#self.combined = bitwise_and(self.s_clipped, self.v_clipped)
self.combined = morphologyEx(src=self.combined, op=MORPH_CLOSE, kernel=self.kernel, iterations=self.morph_close_iterations)
if show_windows:
combined_scaled = resize(self.combined, window_size)
imshow(self.combined_title, combined_scaled )
self.contoured = self.combined.copy()
contours, heirarchy = findContours(self.contoured, RETR_LIST, CHAIN_APPROX_TC89_KCOS)
#print("number of contours found = "+str(len(contours)))
#contours = [convexHull(c.astype(np.float32),clockwise=True,returnPoints=True) for c in contours]
#
polygon_tuples = self.contours_to_polygon_tuples(contours)
polygons = [self.unpack_polygon(t) for t in polygon_tuples]
for polygon_tuple in polygon_tuples:
self.mark_correct_shape_and_orientation(polygon_tuple)
if self.selected_target is not None:
self.draw_target(self.lowest_found_so_far_x, self.lowest_found_so_far, self.selected_target_color)
drawContours(self.drawing, contours, -1, self.selected_target_color, thickness=10)
# drawContours(self.drawing, [self.unpack_polygon(self.selected_target).astype(np.int32)], -1, self.selected_target_color, thickness=10)
self.aim()
if show_windows:
drawing_scaled = resize(self.drawing, window_size)
imshow(self.targets_title, drawing_scaled)
if enable_dashboard:
SmartDashboard.PutNumber("Potential Targets:", len(polygons))
print("Potential Targets:", len(polygons))
def aim(self):
if enable_dashboard:
self.robot_heading = SmartDashboard.GetNumber(robot_heading_title)
polygon, x, y, w, h = self.selected_target
self.target_bearing = self.get_bearing(x + w/2.0)
self.target_range = self.get_range(x, y, w, h)
#self.target_elevation = self.get_elevation(x, y, w, h)
print("Range = " + str(self.target_range))
print("Bearing = " + str(self.target_bearing))
if enable_dashboard:
SmartDashboard.PutNumber("Target Range:", self.target_range)
SmartDashboard.PutNumber("Target Bearing:", self.target_bearing)
SmartDashboard.PutNumber("Target Elevation:",self.target_elevation)
SmartDashboard.PutString("Target: ","Acquired!")
def get_bearing(self, target_center_x):
return (self.field_of_view_degrees/self.x_resolution)*(target_center_x-(self.x_resolution/2))-self.angle_to_shooter
def get_range(self, x, y, w, h):
if enable_dashboard:
SmartDashboard.PutNumber("TargetWidth: ",w)
SmartDashboard.PutNumber("TargetHeight",h)
SmartDashboard.PutNumber("TargetX",x)
SmartDashboard.PutNumber("TargetY",y)
return self.distance(h)
def distance(self, pix_height):
fovr = self.x_resolution * self.real_target_height / pix_height
if enable_dashboard:
SmartDashboard.PutNumber("FieldOfViewReal", fovr) # = 2w_real
SmartDashboard.PutNumber("TanTheta", math.tan(self.theta))
SmartDashboard.PutNumber("fovr/tan(theta)", fovr/math.tan(self.theta))
return self.real_target_height*self.y_resolution/(2*pix_height*math.tan(self.theta))
def reset_targeting(self):
if enable_dashboard:
SmartDashboard.PutString("Target: ","lost...")
self.drawing = self.img.copy()
self.selected_target = None
self.lowest_found_so_far_x = None
self.lowest_found_so_far = 0
self.target_range = 0
self.target_bearing = -1
self.target_elevation = 0
def mark_correct_shape_and_orientation(self, polygon_tuple):
p,x,y,w,h = polygon_tuple
if True: #isContourConvex(p) and 4==len(p) and self.slope_angles_correct(p):
center_x = int(x + w/2.0)
center_y = int(y + h/2.0)
self.draw_target(center_x, center_y, self.possible_target_color)
if center_y > self.lowest_found_so_far:
self.selected_target = polygon_tuple
self.lowest_found_so_far = center_y
self.lowest_found_so_far_x = center_x
else:
drawContours(self.drawing, [p.astype(np.int32)], -1, self.passed_up_target_color, thickness=7)
def draw_target(self, center_x, center_y, a_color):
#circle(self.drawing,(center_x, center_y), radius=10, color=self.selected_target_color, thickness=5)
radius = 10
a_thickness = 5
line(self.drawing, (center_x - radius, center_y), (center_x + radius, center_y), color=a_color, thickness=a_thickness)
line(self.drawing, (center_x, center_y-radius), (center_x, center_y+radius), color=a_color, thickness=a_thickness)
def slope_angles_correct(self, polygon):
num_near_vert, num_near_horiz = 0,0
for line_starting_point_index in xrange(0,4):
slope = self.get_slope(polygon, line_starting_point_index)
if slope < self.horiz_threshold:
num_near_horiz += 1
if slope > self.vert_threshold:
num_near_vert += 1
return 1 <= num_near_horiz and 2 == num_near_vert
def get_slope(self, p, line_starting_point_index):
line_ending_point_index = (line_starting_point_index+1)%4
dy = p[line_starting_point_index, 0, 1] - p[line_ending_point_index, 0, 1]
dx = p[line_starting_point_index, 0, 0] - p[line_ending_point_index, 0, 0]
slope = sys.float_info.max
if 0 != dx:
slope = abs(float(dy)/dx)
return slope
def unpack_polygon(self,t):
p,x,y,w,h = t
return p
def contours_to_polygon_tuples(self, contours):
polygon_tuples = []
for c in contours:
x, y, w, h = boundingRect(c)
if self.aspect_ratio_and_size_correct(w,h):
p = approxPolyDP(c, 20, False)
polygon_tuples.append((p,x,y,w,h))
return polygon_tuples
def aspect_ratio_and_size_correct(self, width, height):
ratio = float(width)/height #float(height)/width
return ratio < self.max_target_aspect_ratio and ratio > self.min_target_aspect_ratio #and width > self.target_min_width and width < self.target_max_width
#note: we don't want to ignore potential targets based on pixel width and height since range will change the pixel coverage of a real target.
if '__main__'==__name__:
try:
img_path = sys.argv[1]
except:
img_path= None
# print('Please add an image path argument and try again.')
# sys.exit(2)
ImageProcessor(img_path).video_feed()
| while True:
if self.img is not None:
self.process()
if self.img_path is None:
rval, self.img = self.vc.read() #might set to None
else:
self.img = imread(self.img_path) | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.