text stringlengths 8 4.13M |
|---|
pub struct Solution;
impl Solution {
pub fn find_min_height_trees(n: i32, edges: Vec<Vec<i32>>) -> Vec<i32> {
if n <= 2 {
return (0..n).collect();
}
let n = n as usize;
let mut degree = vec![0; n];
let mut neighbor = vec![0; n];
for e in edges {
let i = e[0] as usize;
let j = e[1] as usize;
degree[i] += 1;
degree[j] += 1;
neighbor[i] ^= j;
neighbor[j] ^= i;
}
let mut queue = Vec::new();
for i in 0..n {
if degree[i] == 1 {
queue.push(i);
}
}
let mut rest = n;
while rest > 2 {
let mut new_queue = Vec::new();
for i in queue {
let j = neighbor[i];
degree[j] -= 1;
neighbor[j] ^= i;
if degree[j] == 1 {
new_queue.push(j);
}
rest -= 1;
}
queue = new_queue;
}
queue.into_iter().map(|i| i as i32).collect()
}
}
#[test]
fn test0310() {
fn case(n: i32, edges: Vec<Vec<i32>>, want: Vec<i32>) {
let mut got = Solution::find_min_height_trees(n, edges);
got.sort();
assert_eq!(got, want);
}
case(4, vec![vec![1, 0], vec![1, 2], vec![1, 3]], vec![1]);
case(
6,
vec![vec![0, 3], vec![1, 3], vec![2, 3], vec![4, 3], vec![5, 4]],
vec![3, 4],
);
case(1, vec![], vec![0]);
}
|
use std::ops::{ Index, Mul };
use ::vectors::{ AffineVector, Vector };
pub enum Cell {
I1, J1, K1, W1,
I2, J2, K2, W2,
I3, J3, K3, W3,
I4, J4, K4, W4,
Row(u8),
Column(u8),
}
impl Cell {
pub fn to_column(&self) -> Cell {
match self {
&Cell::I1 => Cell::Column(0), &Cell::J1 => Cell::Column(4), &Cell::K1 => Cell::Column(8), &Cell::W1 => Cell::Column(12),
&Cell::I2 => Cell::Column(1), &Cell::J2 => Cell::Column(5), &Cell::K2 => Cell::Column(9), &Cell::W2 => Cell::Column(13),
&Cell::I3 => Cell::Column(2), &Cell::J3 => Cell::Column(6), &Cell::K3 => Cell::Column(10), &Cell::W3 => Cell::Column(14),
&Cell::I4 => Cell::Column(3), &Cell::J4 => Cell::Column(7), &Cell::K4 => Cell::Column(14), &Cell::W4 => Cell::Column(15),
&Cell::Column(i) => Cell::Column(i),
&Cell::Row(i) => Cell::Column((i * 4 % 16) + (i / 4))
}
}
}
#[derive(Debug)]
#[derive(PartialEq)]
pub struct AffineMatrix {
i1 : f64, j1 : f64, k1 : f64, w1 : f64,
i2 : f64, j2 : f64, k2 : f64, w2 : f64,
i3 : f64, j3 : f64, k3 : f64, w3 : f64,
i4 : f64, j4 : f64, k4 : f64, w4 : f64
}
impl AffineMatrix {
// column vector (1, 2, 3, 4)
pub fn cvec(&self, column : u8) -> AffineVector {
let start = (column - 1) * 4;
AffineVector::new(
self[Cell::Column(start + 0)],
self[Cell::Column(start + 1)],
self[Cell::Column(start + 2)],
self[Cell::Column(start + 3)])
}
// row vector (1, 2, 3, 4)
pub fn rvec(&self, row : u8) -> AffineVector {
let start = (row - 1) * 4;
AffineVector::new(
self[Cell::Row(start + 0)],
self[Cell::Row(start + 1)],
self[Cell::Row(start + 2)],
self[Cell::Row(start + 3)])
}
pub fn multiply(&self, m : AffineMatrix) -> AffineMatrix {
let c1 = self.cvec(1);
let c2 = self.cvec(2);
let c3 = self.cvec(3);
let c4 = self.cvec(4);
let r1 = m.rvec(1);
let r2 = m.rvec(2);
let r3 = m.rvec(3);
let r4 = m.rvec(4);
AffineMatrix {
i1: c1.dot(r1), j1: c2.dot(r1), k1: c3.dot(r1), w1: c4.dot(r1),
i2: c1.dot(r2), j2: c2.dot(r2), k2: c3.dot(r2), w2: c4.dot(r2),
i3: c1.dot(r3), j3: c2.dot(r3), k3: c3.dot(r3), w3: c4.dot(r3),
i4: c1.dot(r4), j4: c2.dot(r4), k4: c3.dot(r4), w4: c4.dot(r4),
}
}
pub fn apply_affine(&self, a : AffineVector) -> AffineVector {
AffineVector::new(self.rvec(1).dot(a), self.rvec(2).dot(a), self.rvec(3).dot(a), self.rvec(4).dot(a))
}
pub fn apply_vec3(&self, v : Vector) -> Vector {
let a = self.apply_affine(AffineVector::new(v.x(), v.y(), v.z(), 1.));
Vector::new(a.x(), a.y(), a.z())
}
pub fn inverse(&self) -> AffineMatrix {
let m = self;
let s0 = m.i1 * m.j2 - m.i2 * m.j1;
let s1 = m.i1 * m.k2 - m.i2 * m.k1;
let s2 = m.i1 * m.w2 - m.i2 * m.w1;
let s3 = m.j1 * m.k2 - m.j2 * m.k1;
let s4 = m.j1 * m.w2 - m.j2 * m.w1;
let s5 = m.k1 * m.w2 - m.k2 * m.w1;
let c5 = m.k3 * m.w4 - m.k4 * m.w3;
let c4 = m.j3 * m.w4 - m.j4 * m.w3;
let c3 = m.j3 * m.k4 - m.j4 * m.k3;
let c2 = m.i3 * m.w4 - m.i4 * m.w3;
let c1 = m.i3 * m.k4 - m.i4 * m.k3;
let c0 = m.i3 * m.j4 - m.i4 * m.j3;
let d = 1.0 / (s0 * c5 - s1 * c4 + s2 * c3 + s3 * c2 - s4 * c1 + s5 * c0);
AffineMatrix {
i1: ( m.j2 * c5 - m.k2 * c4 + m.w2 * c3) * d,
j1: (-m.j1 * c5 + m.k1 * c4 - m.w1 * c3) * d,
k1: ( m.j4 * s5 - m.k4 * s4 + m.w4 * s3) * d,
w1: (-m.j3 * s5 + m.k3 * s4 - m.w3 * s3) * d,
i2: (-m.i2 * c5 + m.k2 * c2 - m.w2 * c1) * d,
j2: ( m.i1 * c5 - m.k1 * c2 + m.w1 * c1) * d,
k2: (-m.i4 * s5 + m.k4 * s2 - m.w4 * s1) * d,
w2: ( m.i3 * s5 - m.k3 * s2 + m.w3 * s1) * d,
i3: ( m.i2 * c4 - m.j2 * c2 + m.w2 * c0) * d,
j3: (-m.i1 * c4 + m.j1 * c2 - m.w1 * c0) * d,
k3: ( m.i4 * s4 - m.j4 * s2 + m.w4 * s0) * d,
w3: (-m.i3 * s4 + m.j3 * s2 - m.w3 * s0) * d,
i4: (-m.i2 * c3 + m.j2 * c1 - m.k2 * c0) * d,
j4: ( m.i1 * c3 - m.j1 * c1 + m.k1 * c0) * d,
k4: (-m.i4 * s3 + m.j4 * s1 - m.k4 * s0) * d,
w4: ( m.i3 * s3 - m.j3 * s1 + m.k3 * s0) * d,
}
}
pub fn from_row_major(array : Vec<f64>) -> AffineMatrix {
AffineMatrix {
i1: array[0], j1: array[1], k1: array[2], w1: array[3],
i2: array[4], j2: array[5], k2: array[6], w2: array[7],
i3: array[8], j3: array[9], k3: array[10],w3: array[11],
i4: array[12],j4: array[13],k4: array[14],w4: array[15],
}
}
pub fn from_column_major(array : Vec<f64>) -> AffineMatrix {
AffineMatrix {
i1: array[0], j1: array[4], k1: array[8], w1: array[12],
i2: array[1], j2: array[5], k2: array[9], w2: array[13],
i3: array[2], j3: array[6], k3: array[10],w3: array[14],
i4: array[3], j4: array[7], k4: array[11],w4: array[15],
}
}
pub fn Zero() -> AffineMatrix {
AffineMatrix::from_row_major(vec![0.0;16])
}
pub fn Identity() -> AffineMatrix {
AffineMatrix {
i1: 1., j1: 0., k1: 0., w1: 0.,
i2: 0., j2: 1., k2: 0., w2: 0.,
i3: 0., j3: 0., k3: 1., w3: 0.,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn Translation(x : f64, y : f64, z : f64) -> AffineMatrix {
AffineMatrix {
i1: 1., j1: 0., k1: 0., w1: x ,
i2: 0., j2: 1., k2: 0., w2: y ,
i3: 0., j3: 0., k3: 1., w3: z ,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn RotationX(theta : f64) -> AffineMatrix {
let c = theta.cos();
let s = theta.sin();
AffineMatrix {
i1: 1., j1: 0., k1: 0., w1: 0.,
i2: 0., j2: c , k2: -s, w2: 0.,
i3: 0., j3: s , k3: c , w3: 0.,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn RotationY(theta : f64) -> AffineMatrix {
let c = theta.cos();
let s = theta.sin();
AffineMatrix {
i1: c , j1: 0., k1: s , w1: 0.,
i2: 0., j2: 1., k2: 0., w2: 0.,
i3: -s, j3: 0., k3: c , w3: 0.,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn RotationZ(theta : f64) -> AffineMatrix {
let c = theta.cos();
let s = theta.sin();
AffineMatrix {
i1: c , j1: -s, k1: 0., w1: 0.,
i2: s , j2: c , k2: 0., w2: 0.,
i3: 0., j3: 0., k3: 1., w3: 0.,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn Scale(x : f64, y : f64, z : f64) -> AffineMatrix {
AffineMatrix {
i1: x , j1: 0., k1: 0., w1: 0.,
i2: 0., j2: y , k2: 0., w2: 0.,
i3: 0., j3: 0., k3: z , w3: 0.,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn UniformScale(s : f64) -> AffineMatrix {
AffineMatrix {
i1: s , j1: 0., k1: 0., w1: 0.,
i2: 0., j2: s , k2: 0., w2: 0.,
i3: 0., j3: 0., k3: s , w3: 0.,
i4: 0., j4: 0., k4: 0., w4: 1.,
}
}
pub fn transpose(&self) -> AffineMatrix {
AffineMatrix {
i1: self.i1, j1: self.i2, k1: self.i3, w1: self.i4,
i2: self.j1, j2: self.j2, k2: self.j3, w2: self.j4,
i3: self.k1, j3: self.k2, k3: self.k3, w3: self.k4,
i4: self.w1, j4: self.w2, k4: self.w3, w4: self.w4
}
}
pub fn as_row_major_vec(&self) -> Vec<f64> {
vec![
self.i1, self.j1, self.k1, self.w1,
self.i2, self.j2, self.k2, self.w2,
self.i3, self.j3, self.k3, self.w3,
self.i4, self.j4, self.k4, self.w4
]
}
}
impl Index<Cell> for AffineMatrix {
type Output = f64;
fn index(&self, c : Cell) -> &f64 {
match c {
Cell::I1 => &self.i1, Cell::I2 => &self.i2, Cell::I3 => &self.i3, Cell::I4 => &self.i4,
Cell::J1 => &self.j1, Cell::J2 => &self.j2, Cell::J3 => &self.j3, Cell::J4 => &self.j4,
Cell::K1 => &self.k1, Cell::K2 => &self.k2, Cell::K3 => &self.k3, Cell::K4 => &self.k4,
Cell::W1 => &self.w1, Cell::W2 => &self.w2, Cell::W3 => &self.w3, Cell::W4 => &self.w4,
Cell::Column(0) => &self.i1, Cell::Column(4) => &self.j1, Cell::Column(8) => &self.k1, Cell::Column(12) => &self.w1,
Cell::Column(1) => &self.i2, Cell::Column(5) => &self.j2, Cell::Column(9) => &self.k2, Cell::Column(13) => &self.w2,
Cell::Column(2) => &self.i3, Cell::Column(6) => &self.j3, Cell::Column(10) =>&self.k3, Cell::Column(14) => &self.w3,
Cell::Column(3) => &self.i4, Cell::Column(7) => &self.j4, Cell::Column(11) =>&self.k4, Cell::Column(15) => &self.w4,
Cell::Column(_) => panic!("Matrix Index out of bounds"),
Cell::Row(0) => &self.i1, Cell::Row(1) => &self.j1, Cell::Row(2) => &self.k1, Cell::Row(3) => &self.w1,
Cell::Row(4) => &self.i2, Cell::Row(5) => &self.j2, Cell::Row(6) => &self.k2, Cell::Row(7) => &self.w2,
Cell::Row(8) => &self.i3, Cell::Row(9) => &self.j3, Cell::Row(10) =>&self.k3, Cell::Row(11) =>&self.w3,
Cell::Row(12) =>&self.i4, Cell::Row(13) =>&self.j4, Cell::Row(14) =>&self.k4, Cell::Row(15) =>&self.w4,
Cell::Row(_) => panic!("Matrix Index out of bounds"),
}
}
}
impl Index<i32> for AffineMatrix {
type Output = f64;
fn index(&self, c : i32) -> &f64 {
match c {
0 => &self.i1, 4 => &self.i2, 8 => &self.i3, 12=> &self.i4,
1 => &self.j1, 5 => &self.j2, 9 => &self.j3, 13=> &self.j4,
2 => &self.k1, 6 => &self.k2, 10=> &self.k3, 14=> &self.k4,
3 => &self.w1, 7 => &self.w2, 11=> &self.w3, 15=> &self.w4,
_ => panic!("Matrix Index out of bounds")
}
}
}
impl Mul for AffineMatrix {
type Output = AffineMatrix;
fn mul(self, m : AffineMatrix) -> AffineMatrix {
self.multiply(m)
}
}
impl Mul<Vector> for AffineMatrix {
type Output = Vector;
fn mul(self, v : Vector) -> Vector {
self.apply_vec3(v)
}
}
impl Mul<AffineVector> for AffineMatrix {
type Output = AffineVector;
fn mul(self, v : AffineVector) -> AffineVector {
self.apply_affine(v)
}
} |
use glib::object::IsA;
use glib::translate::*;
use glib_sys::gpointer;
use libc::c_void;
use crate::AsNativeVTable;
use crate::ChildOf;
use crate::Class;
use crate::ClassExt;
use crate::Context;
use crate::MetaClass;
use crate::NativeClass;
use crate::Value;
pub trait ContextExtManual: 'static {
fn evaluate_in_object<T>(
&self,
code: &str,
object_instance: Option<T>,
object_class: Option<&Class<T>>,
uri: &str,
line_number: u32,
) -> (Value, Value);
fn register_class<T>(&self, name: &str, vtable: Option<&dyn AsNativeVTable<T>>) -> Class<T>;
fn register_subclass<'a, Child: ChildOf<Parent>, Parent: 'a>(
&self,
name: &str,
parent_class: &Class<Parent>,
vtable: Option<&dyn AsNativeVTable<Child>>,
) -> Class<Child>;
}
impl<O: IsA<Context>> ContextExtManual for O {
fn evaluate_in_object<T>(
&self,
code: &str,
object_instance: Option<T>,
object_class: Option<&Class<T>>,
uri: &str,
line_number: u32,
) -> (Value, Value) {
let mut new_object: *mut javascriptcore_sys::JSCValue = std::ptr::null_mut() as _;
let out_param: *mut *mut javascriptcore_sys::JSCValue = &mut new_object as _;
let length = code.len() as isize;
let instance: gpointer = object_instance.map_or_else(std::ptr::null_mut, |o| {
let b = Box::new(o);
Box::into_raw(b) as _
});
unsafe {
let result: Value = from_glib_full(javascriptcore_sys::jsc_context_evaluate_in_object(
self.as_ref().to_glib_none().0,
code.to_glib_none().0,
length,
instance,
object_class.map_or(std::ptr::null_mut(), |o| o.to_glib_none().0),
uri.to_glib_none().0,
line_number,
out_param,
));
(result, from_glib_borrow(new_object))
}
}
fn register_class<T>(&self, name: &str, vtable: Option<&dyn AsNativeVTable<T>>) -> Class<T> {
unsafe extern "C" fn destroy_notify_func<T>(data: glib_sys::gpointer) {
let _instance = Box::from_raw(data as *mut T);
}
let destroy_call = Some(destroy_notify_func::<T> as _);
unsafe {
let b = vtable.map(|v| {
let boxed = Box::new(v.as_vtable());
Box::into_raw(boxed) as *mut javascriptcore_sys::JSCClassVTable
});
let ptr = javascriptcore_sys::jsc_context_register_class(
self.as_ref().to_glib_none().0,
name.to_glib_none().0,
std::ptr::null_mut(),
b.map_or(std::ptr::null_mut(), |x| x),
destroy_call,
);
let class: NativeClass = from_glib_borrow(ptr);
Class::wrap(class, vec![MetaClass::new(ptr, |input| input)])
}
}
fn register_subclass<'a, Child: ChildOf<Parent>, Parent: 'a>(
&self,
name: &str,
parent_class: &Class<Parent>,
vtable: Option<&dyn AsNativeVTable<Child>>,
) -> Class<Child> {
unsafe extern "C" fn destroy_notify_func<T>(data: glib_sys::gpointer) {
let _instance = Box::from_raw(data as *mut T);
}
let destroy_call = Some(destroy_notify_func::<Child> as _);
let parent = parent_class.to_glib_none().0;
let mut newvec = parent_class.class_list.to_vec();
unsafe {
let b = vtable.map(|v| {
let boxed = Box::new(v.as_vtable());
Box::into_raw(boxed) as *mut javascriptcore_sys::JSCClassVTable
});
let ptr = javascriptcore_sys::jsc_context_register_class(
self.as_ref().to_glib_none().0,
name.to_glib_none().0,
parent,
b.map_or(std::ptr::null_mut(), |x| x),
destroy_call,
);
newvec.push(MetaClass::new(ptr, capture_as_parent::<Child, Parent>()));
let class: NativeClass = from_glib_borrow(ptr);
Class::wrap(class, newvec)
}
}
}
fn capture_as_parent<Child: ChildOf<Parent>, Parent>() -> fn(*mut c_void) -> *mut c_void
{
fn extractor<Child: ChildOf<Parent>, Parent>(input: *mut c_void) -> *mut c_void {
unsafe {
let child_ptr: *mut Child = input as _;
let child: &Child = &*child_ptr;
let parent: &Parent = child.as_parent();
let parent_ptr: *const Parent = parent as _;
parent_ptr as _
}
}
extractor::<Child, Parent> as _
}
#[cfg(test)]
mod tests {
use super::*;
use serial_test_derive::serial;
use crate::ValueExt;
#[test]
#[serial]
fn evaluate_in_object() {
if !::gtk::is_initialized() {
gtk::init().unwrap();
}
let ctx = Context::new();
let (_res, obj) = ctx.evaluate_in_object::<&str>("var x = 42;", None, None, "", 1);
assert_eq!(obj.object_get_property("x").unwrap().to_int32(), 42);
}
}
|
//! Various utils functions for caching and file management.
use std::collections::hash_map::DefaultHasher;
use std::ffi::OsStr;
use std::hash::{Hash, Hasher};
use std::path::Path;
use std::process::{Command, Output};
pub mod bytelines;
mod io;
pub use self::io::{
count_lines, create_or_overwrite, read_first_lines, read_lines, read_lines_from,
remove_dir_contents,
};
/// Returns the width of displaying `n` on the screen.
///
/// Same with `n.to_string().len()` but without allocation.
pub fn display_width(n: usize) -> usize {
if n == 0 {
return 1;
}
let mut n = n;
let mut len = 0;
while n > 0 {
len += 1;
n /= 10;
}
len
}
/// Returns true if `dir` is a git repo, including git submodule.
pub fn is_git_repo(dir: &Path) -> bool {
dir.join(".git").exists()
}
pub fn calculate_hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
/// Converts `shell_cmd` to `Command` with optional working directory.
pub fn as_std_command<P: AsRef<Path>>(shell_cmd: impl AsRef<OsStr>, dir: Option<P>) -> Command {
let mut cmd = if cfg!(target_os = "windows") {
let mut cmd = Command::new("cmd");
cmd.arg("/C").arg(shell_cmd.as_ref());
cmd
} else {
let mut cmd = Command::new("bash");
cmd.arg("-c").arg(shell_cmd.as_ref());
cmd
};
if let Some(d) = dir {
cmd.current_dir(d);
}
cmd
}
/// Executes the `shell_cmd` and returns the output.
pub fn execute_at<S, P>(shell_cmd: S, dir: Option<P>) -> std::io::Result<Output>
where
S: AsRef<OsStr>,
P: AsRef<Path>,
{
let mut cmd = as_std_command(shell_cmd, dir);
cmd.output()
}
|
#[doc = r"Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r"Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::IF2CMSK {
#[doc = r"Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
self.register.set(f(&R { bits }, &mut W { bits }).bits);
}
#[doc = r"Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r"Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
self.register.set(
f(&mut W {
bits: Self::reset_value(),
})
.bits,
);
}
#[doc = r"Reset value of the register"]
#[inline(always)]
pub const fn reset_value() -> u32 {
0
}
#[doc = r"Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.register.set(Self::reset_value())
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_DATABR {
bits: bool,
}
impl CAN_IF2CMSK_DATABR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_DATABW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_DATABW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 0);
self.w.bits |= ((value as u32) & 1) << 0;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_DATAAR {
bits: bool,
}
impl CAN_IF2CMSK_DATAAR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_DATAAW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_DATAAW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 1);
self.w.bits |= ((value as u32) & 1) << 1;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_NEWDATR {
bits: bool,
}
impl CAN_IF2CMSK_NEWDATR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_NEWDATW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_NEWDATW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u32) & 1) << 2;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_CLRINTPNDR {
bits: bool,
}
impl CAN_IF2CMSK_CLRINTPNDR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_CLRINTPNDW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_CLRINTPNDW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 3);
self.w.bits |= ((value as u32) & 1) << 3;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_CONTROLR {
bits: bool,
}
impl CAN_IF2CMSK_CONTROLR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_CONTROLW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_CONTROLW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 4);
self.w.bits |= ((value as u32) & 1) << 4;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_ARBR {
bits: bool,
}
impl CAN_IF2CMSK_ARBR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_ARBW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_ARBW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 5);
self.w.bits |= ((value as u32) & 1) << 5;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_MASKR {
bits: bool,
}
impl CAN_IF2CMSK_MASKR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_MASKW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_MASKW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 6);
self.w.bits |= ((value as u32) & 1) << 6;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_WRNRDR {
bits: bool,
}
impl CAN_IF2CMSK_WRNRDR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_WRNRDW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_WRNRDW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 7);
self.w.bits |= ((value as u32) & 1) << 7;
self.w
}
}
#[doc = r"Value of the field"]
pub struct CAN_IF2CMSK_TXRQSTR {
bits: bool,
}
impl CAN_IF2CMSK_TXRQSTR {
#[doc = r"Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r"Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r"Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r"Proxy"]
pub struct _CAN_IF2CMSK_TXRQSTW<'a> {
w: &'a mut W,
}
impl<'a> _CAN_IF2CMSK_TXRQSTW<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits &= !(1 << 2);
self.w.bits |= ((value as u32) & 1) << 2;
self.w
}
}
impl R {
#[doc = r"Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Access Data Byte 4 to 7"]
#[inline(always)]
pub fn can_if2cmsk_datab(&self) -> CAN_IF2CMSK_DATABR {
let bits = ((self.bits >> 0) & 1) != 0;
CAN_IF2CMSK_DATABR { bits }
}
#[doc = "Bit 1 - Access Data Byte 0 to 3"]
#[inline(always)]
pub fn can_if2cmsk_dataa(&self) -> CAN_IF2CMSK_DATAAR {
let bits = ((self.bits >> 1) & 1) != 0;
CAN_IF2CMSK_DATAAR { bits }
}
#[doc = "Bit 2 - Access New Data"]
#[inline(always)]
pub fn can_if2cmsk_newdat(&self) -> CAN_IF2CMSK_NEWDATR {
let bits = ((self.bits >> 2) & 1) != 0;
CAN_IF2CMSK_NEWDATR { bits }
}
#[doc = "Bit 3 - Clear Interrupt Pending Bit"]
#[inline(always)]
pub fn can_if2cmsk_clrintpnd(&self) -> CAN_IF2CMSK_CLRINTPNDR {
let bits = ((self.bits >> 3) & 1) != 0;
CAN_IF2CMSK_CLRINTPNDR { bits }
}
#[doc = "Bit 4 - Access Control Bits"]
#[inline(always)]
pub fn can_if2cmsk_control(&self) -> CAN_IF2CMSK_CONTROLR {
let bits = ((self.bits >> 4) & 1) != 0;
CAN_IF2CMSK_CONTROLR { bits }
}
#[doc = "Bit 5 - Access Arbitration Bits"]
#[inline(always)]
pub fn can_if2cmsk_arb(&self) -> CAN_IF2CMSK_ARBR {
let bits = ((self.bits >> 5) & 1) != 0;
CAN_IF2CMSK_ARBR { bits }
}
#[doc = "Bit 6 - Access Mask Bits"]
#[inline(always)]
pub fn can_if2cmsk_mask(&self) -> CAN_IF2CMSK_MASKR {
let bits = ((self.bits >> 6) & 1) != 0;
CAN_IF2CMSK_MASKR { bits }
}
#[doc = "Bit 7 - Write, Not Read"]
#[inline(always)]
pub fn can_if2cmsk_wrnrd(&self) -> CAN_IF2CMSK_WRNRDR {
let bits = ((self.bits >> 7) & 1) != 0;
CAN_IF2CMSK_WRNRDR { bits }
}
#[doc = "Bit 2 - Access Transmission Request"]
#[inline(always)]
pub fn can_if2cmsk_txrqst(&self) -> CAN_IF2CMSK_TXRQSTR {
let bits = ((self.bits >> 2) & 1) != 0;
CAN_IF2CMSK_TXRQSTR { bits }
}
}
impl W {
#[doc = r"Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Access Data Byte 4 to 7"]
#[inline(always)]
pub fn can_if2cmsk_datab(&mut self) -> _CAN_IF2CMSK_DATABW {
_CAN_IF2CMSK_DATABW { w: self }
}
#[doc = "Bit 1 - Access Data Byte 0 to 3"]
#[inline(always)]
pub fn can_if2cmsk_dataa(&mut self) -> _CAN_IF2CMSK_DATAAW {
_CAN_IF2CMSK_DATAAW { w: self }
}
#[doc = "Bit 2 - Access New Data"]
#[inline(always)]
pub fn can_if2cmsk_newdat(&mut self) -> _CAN_IF2CMSK_NEWDATW {
_CAN_IF2CMSK_NEWDATW { w: self }
}
#[doc = "Bit 3 - Clear Interrupt Pending Bit"]
#[inline(always)]
pub fn can_if2cmsk_clrintpnd(&mut self) -> _CAN_IF2CMSK_CLRINTPNDW {
_CAN_IF2CMSK_CLRINTPNDW { w: self }
}
#[doc = "Bit 4 - Access Control Bits"]
#[inline(always)]
pub fn can_if2cmsk_control(&mut self) -> _CAN_IF2CMSK_CONTROLW {
_CAN_IF2CMSK_CONTROLW { w: self }
}
#[doc = "Bit 5 - Access Arbitration Bits"]
#[inline(always)]
pub fn can_if2cmsk_arb(&mut self) -> _CAN_IF2CMSK_ARBW {
_CAN_IF2CMSK_ARBW { w: self }
}
#[doc = "Bit 6 - Access Mask Bits"]
#[inline(always)]
pub fn can_if2cmsk_mask(&mut self) -> _CAN_IF2CMSK_MASKW {
_CAN_IF2CMSK_MASKW { w: self }
}
#[doc = "Bit 7 - Write, Not Read"]
#[inline(always)]
pub fn can_if2cmsk_wrnrd(&mut self) -> _CAN_IF2CMSK_WRNRDW {
_CAN_IF2CMSK_WRNRDW { w: self }
}
#[doc = "Bit 2 - Access Transmission Request"]
#[inline(always)]
pub fn can_if2cmsk_txrqst(&mut self) -> _CAN_IF2CMSK_TXRQSTW {
_CAN_IF2CMSK_TXRQSTW { w: self }
}
}
|
extern crate mio;
use mio::*;
use mio::tcp::{TcpListener};
fn main() {
const SERVER: Token = Token(0);
let addr = "127.0.0.1:13265".parse().unwrap();
// set up the server socket
let server = TcpListener::bind(&addr).unwrap();
// create a poll instance
let poll = Poll::new().unwrap();
// start listerning for incomming connections
poll.register(
&server,
SERVER,
Ready::readable(),
PollOpt::edge())
.unwrap();
// Create storage for events
let mut events = Events::with_capacity(1024);
loop {
poll.poll(&mut events, None).unwrap();
for event in events.iter() {
}
}
}
|
use crate::Definition;
use lazy_static::lazy_static;
use regex::Regex;
use std::str::FromStr;
// Domingler does not look for collisions below these IDs
pub const ASSUMED_FIRST_WEAPON_ID: u32 = 801;
pub const ASSUMED_FIRST_ARMOUR_ID: u32 = 270;
pub const ASSUMED_FIRST_MONSTER_ID: u32 = 3500;
pub const ASSUMED_FIRST_NAMETYPE_ID: u32 = 171;
pub const ASSUMED_FIRST_SPELL_ID: u32 = 1301;
pub const ASSUMED_FIRST_SITE_ID: u32 = 1501;
pub const ASSUMED_FIRST_NATION_ID: u32 = 111; // dangerously low, if 3 new nations get added this will break
pub const ASSUMED_FIRST_ITEM_ID: u32 = 501;
pub const ASSUMED_FIRST_MONTAG_ID: u32 = 1001;
pub const ASSUMED_FIRST_EVENTCODE_ID: u32 = 301; // technically it's negative but whatever
pub const ASSUMED_FIRST_ENCHANTMENT_ID: u32 = 200;
pub const ASSUMED_FIRST_RESTRICTED_ITEM_ID: u32 = 1;
pub struct ModLineScanner {
pub option_new_numbered_regex: Option<&'static Regex>,
pub option_new_unnumbered_regex: Option<&'static Regex>,
pub option_select_numbered_regex: Option<&'static Regex>,
pub assumed_minimum: u32,
}
impl ModLineScanner {
/// Captures:
/// - #newthing <id>
/// - #newthing
/// - #selectthing <id>
/// - #selectthing "name"
/// Note that a line can be only one of those things so this function returns
/// as soon as one of the regex matches
/// Returns true if it matched anything
pub fn scan_line<'a>(&self, line: &'a str, thing_definition: &mut Definition<'a>) -> bool {
if let Some(new_numbered_regex) = self.option_new_numbered_regex {
if let Some(capture) = new_numbered_regex.captures(line) {
let found_id = u32::from_str(capture.name("id").unwrap().as_str()).unwrap();
if found_id == 0 {
// New ID of 0 is treated the same as no number
thing_definition.implicit_definitions += 1;
return true;
} else {
let not_already_there = thing_definition.defined_ids.insert(found_id);
if !not_already_there {
println!(
"WARNING: ID in {} was already declared in the same mod",
line
);
}
return true;
}
}
}
if let Some(select_numbered_regex) = self.option_select_numbered_regex {
if let Some(capture) = select_numbered_regex.captures(line) {
let found_id = u32::from_str(capture.name("id").unwrap().as_str()).unwrap();
if found_id >= self.assumed_minimum {
thing_definition.defined_ids.insert(found_id);
} else {
thing_definition.vanilla_edited_ids.insert(found_id);
}
return true;
}
}
if let Some(new_unnumbered_regex) = self.option_new_unnumbered_regex {
if new_unnumbered_regex.is_match(line) {
thing_definition.implicit_definitions += 1;
return true;
}
}
false
}
}
lazy_static! {
/// Weapons:
/// - #newweapon <id>
/// - #newweapon
/// - #selectweapon <id>
pub static ref WEAPON_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: Some(&NEW_NUMBERED_WEAPON),
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_WEAPON),
option_select_numbered_regex: Some(&SELECT_NUMBERED_WEAPON),
assumed_minimum: ASSUMED_FIRST_WEAPON_ID,
};
/// Armours:
/// - #newarmor <id>
/// - #newarmor
/// - #selectarmor <id>
pub static ref ARMOUR_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: Some(&NEW_NUMBERED_ARMOUR),
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_ARMOUR),
option_select_numbered_regex: Some(&SELECT_NUMBERED_ARMOUR),
assumed_minimum: ASSUMED_FIRST_ARMOUR_ID,
};
/// Monsters:
/// - #newmonster <id>
/// - #newmonster
/// - #selectmonster <id>
pub static ref MONSTER_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: Some(&NEW_NUMBERED_MONSTER),
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_MONSTER),
option_select_numbered_regex: Some(&SELECT_NUMBERED_MONSTER),
assumed_minimum: ASSUMED_FIRST_MONSTER_ID,
};
/// Spells:
/// - #newspell
/// - #selectspell <id>
pub static ref SPELL_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_SPELL),
option_select_numbered_regex: Some(&SELECT_NUMBERED_SPELL),
assumed_minimum: ASSUMED_FIRST_SPELL_ID,
};
/// Items:
/// - #newitem
/// - #selectitem <id>
pub static ref ITEM_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_ITEM),
option_select_numbered_regex: Some(&SELECT_NUMBERED_ITEM),
assumed_minimum: ASSUMED_FIRST_ITEM_ID,
};
/// Sites:
/// - #newsite <id>
/// - #newsite
/// - #selectsite <id>
pub static ref SITE_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: Some(&NEW_NUMBERED_SITE),
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_SITE),
option_select_numbered_regex: Some(&SELECT_NUMBERED_SITE),
assumed_minimum: ASSUMED_FIRST_SITE_ID,
};
/// Nations:
/// - #newnation
/// - #selectnation <id> (where id >= ASSUMED_FIRST_NATION_ID)
pub static ref NATION_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: Some(&NEW_UNNUMBERED_NATION),
option_select_numbered_regex: Some(&SELECT_NUMBERED_NATION),
assumed_minimum: ASSUMED_FIRST_NATION_ID,
};
/// Name types:
/// - #selectnametype <id>
pub static ref NAMETYPE_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: None,
option_select_numbered_regex: Some(&SELECT_NUMBERED_NAMETYPE),
assumed_minimum: ASSUMED_FIRST_NAMETYPE_ID,
};
/// Montags:
/// - #montag <id>
pub static ref MONTAG_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: None,
option_select_numbered_regex: Some(&SELECT_NUMBERED_MONTAG),
assumed_minimum: ASSUMED_FIRST_MONTAG_ID,
};
/// Event codes:
/// - #code -<id>
/// - #code2 -<id>
pub static ref EVENTCODE_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: None,
option_select_numbered_regex: Some(&SELECT_NUMBERED_EVENTCODE),
assumed_minimum: ASSUMED_FIRST_EVENTCODE_ID,
};
/// Restricted items:
/// - #restricteditem <id>
pub static ref RESTRICTED_ITEM_LINE_SCANNER: ModLineScanner =
ModLineScanner {
option_new_numbered_regex: None,
option_new_unnumbered_regex: None,
option_select_numbered_regex: Some(&SELECT_NUMBERED_RESTRICTED_ITEM),
assumed_minimum: ASSUMED_FIRST_RESTRICTED_ITEM_ID,
};
// Weapons
static ref NEW_NUMBERED_WEAPON: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newweapon[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref NEW_UNNUMBERED_WEAPON: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newweapon)\
(?P<suffix>.*)$\
").unwrap();
static ref SELECT_NUMBERED_WEAPON: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectweapon[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_WEAPON: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
newweapon|\
weapon|\
copyweapon|\
secondaryeffect|\
secondaryeffectalways|\
selectweapon)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Armours
static ref NEW_NUMBERED_ARMOUR: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newarmor[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref NEW_UNNUMBERED_ARMOUR: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newarmor)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_ARMOUR: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
newarmor|\
armor|\
copyarmor)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref NEW_NAMED_ARMOUR: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newarmor[ \t]+\")\
(?P<name>[^\"]+)\
(?P<suffix>\".*)$\
").unwrap();
static ref SELECT_NUMBERED_ARMOUR: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectarmor[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Spells
static ref NEW_UNNUMBERED_SPELL: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newspell)\
(?P<suffix>.*)$\
").unwrap();
static ref SELECT_NUMBERED_SPELL: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectspell[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_SPELL: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
selectspell|\
copyspell|\
nextspell\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Items
static ref NEW_UNNUMBERED_ITEM: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newitem)\
(?P<suffix>.*)$\
").unwrap();
static ref SELECT_NUMBERED_ITEM: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectitem[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_ITEM: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
selectitem|\
startitem|\
copyitem|\
copyspr|\
req_targitem|\
req_targnoitem|\
req_worlditem|\
req_noworlditem\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Sites
static ref NEW_NUMBERED_SITE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newsite[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref SELECT_NUMBERED_SITE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectsite[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// remember to check for numbered sites first
static ref NEW_UNNUMBERED_SITE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newsite)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_SITE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
selectsite|\
newsite|\
req_nositenbr|\
addsite|\
removesite|\
hiddensite|\
futuresite|\
onlyatsite\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Monsters
static ref NEW_NUMBERED_MONSTER: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newmonster[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref SELECT_NUMBERED_MONSTER: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectmonster[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// "#newmonster", or "#newmonster -- whatever"
// n.b. make sure to check it doesn't match the numbered (or named) monster first!
static ref NEW_UNNUMBERED_MONSTER: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newmonster)\
(?P<suffix>.*)$\
").unwrap();
// TODO: optimise regex e.g. matching `[[:digit:]]+d6`
pub static ref USE_MONSTER: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
newmonster|\
copyspr|\
monpresentrec|\
ownsmonrec|\
raiseshape|\
shapechange|\
prophetshape|\
firstshape|\
secondshape|\
secondtmpshape|\
forestshape|\
plainshape|\
foreignshape|\
homeshape|\
springshape|\
summershape|\
autumnshape|\
wintershape|\
landshape|\
watershape|\
twiceborn|\
domsummon|\
domsummon2|\
domsummon20|\
raredomsummon|\
templetrainer|\
makemonsters1|\
makemonsters2|\
makemonsters3|\
makemonsters4|\
makemonsters5|\
summon1|\
summon2|\
summon3|\
summon4|\
summon5|\
battlesum1|\
battlesum2|\
battlesum3|\
battlesum4|\
battlesum5|\
batstartsum1|\
batstartsum2|\
batstartsum3|\
batstartsum4|\
batstartsum5|\
batstartsum1d6|\
batstartsum2d6|\
batstartsum3d6|\
batstartsum4d6|\
batstartsum5d6|\
batstartsum6d6|\
batstartsum7d6|\
batstartsum8d6|\
batstartsum9d6|\
farsumcom|\
onlymnr|\
homemon|\
homecom|\
mon|\
com|\
summon|\
summonlvl2|\
summonlvl3|\
summonlvl4|\
wallcom|\
wallunit|\
uwwallunit|\
uwwallcom|\
startcom|\
coastcom1|\
coastcom2|\
addforeignunit|\
addforeigncom|\
forestrec|\
mountainrec|\
swamprec|\
wasterec|\
caverec|\
startscout|\
forestcom|\
mountaincom|\
swampcom|\
wastecom|\
cavecom|\
startunittype1|\
startunittype2|\
addrecunit|\
addreccom|\
uwrec|\
uwcom|\
coastunit1|\
coastunit2|\
coastunit3|\
landrec|\
landcom|\
hero1|\
hero2|\
hero3|\
hero4|\
hero5|\
hero6|\
hero7|\
hero8|\
hero9|\
hero10|\
multihero1|\
multihero2|\
multihero3|\
multihero4|\
multihero5|\
multihero6|\
multihero7|\
defcom1|\
defcom2|\
defunit1|\
defunit1b|\
defunit1c|\
defunit1d|\
defunit2|\
defunit2b|\
wallcom|\
wallunit|\
uwwallunit|\
uwwallcom|\
addgod|\
delgod|\
cheapgod20|\
cheapgod40|\
addrecunit|\
addreccom|\
guardspirit|\
transform|\
fireboost|\
airboost|\
waterboost|\
earthboost|\
astralboost|\
deathboost|\
natureboost|\
bloodboost|\
holyboost|\
req_monster|\
req_2monsters|\
req_5monsters|\
req_nomonster|\
req_mnr|\
req_nomnr|\
req_deadmnr|\
req_targmnr|\
req_targnomnr|\
assassin|\
stealthcom|\
com|\
2com|\
4com|\
5com|\
1unit|\
1d3units|\
2d3units|\
3d3units|\
4d3units|\
1d6units|\
2d6units|\
3d6units|\
4d6units|\
5d6units|\
6d7units|\
7d6units|\
8d6units|\
9d6units|\
10d6units|\
11d6units|\
12d6units|\
13d6units|\
14d6units|\
15d6units|\
16d6units|\
killmon|\
kill2d6mon|\
killcom|\
copystats|\
coastrec|\
coastcom|\
natmon|\
natcom|\
domshape|\
notdomshape|\
slaver|\
req_godismnr|\
req_godisnotmnr|\
notmnr|\
batstartsum1d3|\
uwdefcom1|\
uwdefcom2|\
uwdefunit1|\
uwdefunit1b|\
uwdefunit1c|\
uwdefunit1d|\
uwdefunit2|\
uwdefunit2b\
)[ \t]+)\
(?P<id>[-]?[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Nations
pub static ref SELECT_NUMBERED_NATION: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectnation[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref NEW_UNNUMBERED_NATION: Regex = Regex::new("^\
(?P<prefix>[ \t]*#newnation)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_NATION: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
selectnation|\
nation|\
restricted|\
notfornation|\
nationrebate|\
req_nation|\
req_nonation|\
req_fornation|\
req_notfornation|\
req_notnation|\
req_notforally|\
req_fullowner|\
req_domowner|\
req_targowner|\
assowner|\
extramsg|\
nat|\
req_targnotowner\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Name types
static ref SELECT_NUMBERED_NAMETYPE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#selectnametype[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NAMETYPE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
nametype|\
selectnametype\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Montags
static ref SELECT_NUMBERED_MONTAG: Regex = Regex::new("^\
(?P<prefix>[ \t]*#montag[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_MONTAG: Regex = Regex::new("^\
(?P<prefix>[ \t]*#montag[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// Other
static ref SELECT_NUMBERED_EVENTCODE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:code|code2)[ \t]+-)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_EVENTCODE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
code|\
code2|\
resetcode|\
req_code|\
req_anycode|\
req_notanycode|\
req_nearbycode|\
req_nearowncode|\
codedelay|\
codedelay2|\
resetcodedelay|\
resetcodedelay2\
)[ \t]+-)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
static ref SELECT_NUMBERED_RESTRICTED_ITEM: Regex = Regex::new("^\
(?P<prefix>[ \t]*#restricteditem[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_NUMBERED_RESTRICTED_ITEM: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
restricteditem|\
userestricteditem\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
// n.b. this does not remap inside spells
pub static ref USE_GLOBAL_ENCHANTMENT: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
enchrebate50|\
enchrebate20|\
enchrebate10|\
req_noench|\
req_ench|\
req_myench|\
req_friendlyench|\
req_hostileench|\
req_enchdom|\
nationench|\
enchrebate25p|\
enchrebate50p\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
pub static ref USE_GLOBAL_ENCHANTMENT_DAMAGE: Regex = Regex::new("^\
(?P<prefix>[ \t]*#(?:\
damage\
)[ \t]+)\
(?P<id>[[:digit:]]+)\
(?P<suffix>.*)$\
").unwrap();
}
|
use readlater::args::{Args, Command};
use structopt::StructOpt;
pub fn main() {
let args = Args::from_args();
match args.cmd {
Command::Newsboat(cmd) => {
match readlater::readable_article(cmd.url, cmd.title, cmd.desc, cmd.feed_title) {
Ok(output) => {
if args.verbose {
println!("{}", output);
}
std::process::exit(0);
}
Err(e) => {
eprintln!("Unable to generate epub");
eprintln!("Reason: {}", e);
std::process::exit(1);
}
}
}
Command::Epub(cmd) => match readlater::generate_epub(&cmd.epub) {
Ok(output) => {
if args.verbose {
println!("{}", output);
}
std::process::exit(0);
}
Err(e) => {
eprintln!("Unable to generate epub");
eprintln!("Reason: {}", e);
std::process::exit(1);
}
},
Command::Rss(cmd) => match readlater::generate_rss(&cmd.rss) {
Ok(output) => {
if args.verbose {
println!("{}", output);
}
std::process::exit(0);
}
Err(e) => {
eprintln!("Unable to generate rss feed");
eprintln!("Reason: {}", e);
std::process::exit(1);
}
},
Command::Cleanup(cmd) => match readlater::cleanup(cmd.days) {
Ok(output) => {
if args.verbose {
println!("{}", output);
}
std::process::exit(0);
}
Err(e) => {
eprintln!("Unable to generate html");
eprintln!("Reason: {}", e);
std::process::exit(1);
}
},
Command::Article(cmd) => match readlater::readable_article(cmd.url, None, None, None) {
Ok(output) => {
if args.verbose {
println!("{}", output);
}
std::process::exit(0);
}
Err(e) => {
eprintln!("Unable to generate html");
eprintln!("Reason: {}", e);
std::process::exit(1);
}
},
}
}
|
#![feature(hash_drain_filter)]
use std::collections::HashMap;
use lru::LruCache;
use async_std::{prelude::*,fs};
use desert::varint;
mod storage;
use storage::{Storage,FileStorage,RW};
pub type Position = (f32,f32);
pub type BBox = (f32,f32,f32,f32);
pub type QuadId = u64;
pub type RecordId = u64;
pub type IdBlock = u64;
pub type Error = Box<dyn std::error::Error+Send+Sync>;
pub trait Record: Send+Sync+Clone+std::fmt::Debug {
fn get_id(&self) -> RecordId;
fn get_refs<'a>(&'a self) -> &'a [RecordId];
fn get_position(&self) -> Option<Position>;
fn pack(records: &HashMap<RecordId,Self>) -> Vec<u8> where Self: Sized;
fn unpack(buf: &[u8]) -> Result<HashMap<RecordId,Self>,Error>;
}
#[derive(Debug)]
pub enum QTree {
Node { children: Vec<QTree>, bbox: BBox },
Quad { id: QuadId, bbox: BBox },
}
impl QTree {
pub fn bbox(&self) -> BBox {
match self {
QTree::Node { bbox, .. } => *bbox,
QTree::Quad { bbox, .. } => *bbox,
}
}
}
pub struct XQ<S,R> where S: RW, R: Record {
storage: Box<dyn Storage<S>>,
stores: LruCache<String,S>,
root: QTree,
quad_cache: LruCache<QuadId,HashMap<RecordId,R>>,
quad_updates: HashMap<QuadId,HashMap<RecordId,R>>,
id_cache: LruCache<IdBlock,HashMap<RecordId,QuadId>>,
id_updates: HashMap<IdBlock,HashMap<RecordId,QuadId>>,
missing_updates: Vec<R>,
next_quad_id: QuadId,
}
impl<S,R> XQ<S,R> where S: RW, R: Record {
pub async fn new(storage: Box<dyn Storage<S>>) -> Result<Self,Error> {
// todo: read tree from storage
let mut xq = Self {
storage,
root: QTree::Quad {
id: 0,
bbox: (-180.0,-90.0,180.0,90.0),
},
stores: LruCache::new(500),
quad_cache: LruCache::new(10_000),
quad_updates: HashMap::new(),
id_cache: LruCache::new(10_000),
id_updates: HashMap::new(),
missing_updates: vec![],
next_quad_id: 1,
};
xq.quad_updates.insert(0, HashMap::new());
Ok(xq)
}
async fn insert_id(&mut self, id: RecordId, q_id: QuadId) -> Result<(),Error> {
let b = id_block(id);
let ifile = id_file(id);
if let Some(ids) = self.id_updates.get_mut(&b) {
ids.insert(id, q_id);
} else if let Some(mut ids) = self.id_cache.pop(&b) {
ids.insert(id, q_id);
self.id_updates.insert(b, ids);
} else if let Some(s) = self.stores.get_mut(&ifile) {
let mut buf = Vec::new();
s.read_to_end(&mut buf).await?;
let mut ids = unpack_ids(&buf)?;
ids.insert(id, q_id);
self.id_updates.insert(b, ids);
self.id_cache.pop(&b);
} else {
let mut s = self.storage.open(&ifile).await?;
let mut buf = Vec::new();
s.read_to_end(&mut buf).await?;
let mut ids = unpack_ids(&buf)?;
ids.insert(id, q_id);
self.id_updates.insert(b, ids);
self.stores.put(ifile, s);
}
Ok(())
}
pub async fn add_records(&mut self, records: &[R]) -> Result<(),Error> {
let qs = self.get_quads(&records).await?;
for (q_id,(bbox,ix)) in qs.iter() {
for i in ix {
let record = records.get(*i).unwrap();
self.insert_id(record.get_id(), *q_id).await?;
}
let mut item_len = 0;
self.quad_updates.get_mut(&q_id).map(|items| {
for i in ix {
let r = records.get(*i).unwrap();
items.insert(r.get_id(), r.clone());
}
item_len = items.len();
println!["{}", items.len()];
});
if item_len > 100_000 {
self.split_quad(&q_id, &bbox).await?;
}
}
self.check_flush().await?;
Ok(())
}
pub async fn check_flush(&mut self) -> Result<(),Error> {
if self.quad_updates.len() >= 100_000 && self.id_updates.len() >= 100_000 {
// todo: parallel io
self.quad_flush().await?;
self.id_flush().await?;
} else if self.quad_updates.len() >= 100_000 {
self.quad_flush().await?;
} else if self.id_updates.len() >= 100_000 {
self.id_flush().await?;
}
Ok(())
}
pub async fn quad_flush(&mut self) -> Result<(),Error> {
// todo: parallel io
// todo: lock quad_updates
for (q_id,rs) in self.quad_updates.drain() {
let qfile = quad_file(q_id);
if let Some(s) = self.stores.get_mut(&qfile) {
s.write_all(&R::pack(&rs)).await?;
} else {
let mut s = self.storage.open(&qfile).await?;
s.write_all(&R::pack(&rs)).await?;
self.stores.put(qfile, s);
}
self.quad_cache.put(q_id,rs);
}
Ok(())
}
pub async fn id_flush(&mut self) -> Result<(),Error> {
// todo: lock id_updates
eprintln!["id_updates.len()={}", self.id_updates.len()];
for (b,ids) in self.id_updates.drain() {
let ifile = id_file_from_block(b);
if let Some(s) = self.stores.get_mut(&ifile) {
s.write_all(&pack_ids(&ids)).await?;
} else {
let mut s = self.storage.open(&ifile).await?;
s.write_all(&pack_ids(&ids)).await?;
self.stores.put(ifile, s);
}
self.id_cache.put(b,ids);
}
Ok(())
}
pub async fn flush(&mut self) -> Result<(),Error> {
// todo: parallel io
self.quad_flush().await?;
self.id_flush().await?;
Ok(())
}
pub async fn get_record(&mut self, id: RecordId) -> Result<Option<R>,Error> {
let b = id_block(id);
let mut o_q_id = self.id_updates.get(&b).and_then(|ids| ids.get(&id));
if o_q_id.is_none() {
o_q_id = self.id_cache.get(&b).and_then(|ids| ids.get(&id));
}
let q_id = if let Some(q_id) = o_q_id { *q_id } else {
let ifile = id_file(id);
let mut buf = Vec::new();
if let Some(s) = self.stores.get_mut(&ifile) {
s.read_to_end(&mut buf).await?;
} else {
let mut s = self.storage.open(&ifile).await?;
s.read_to_end(&mut buf).await?;
self.stores.put(ifile, s);
};
let ids = unpack_ids(&buf)?;
let g = ids.get(&id).copied();
self.id_cache.put(b, ids);
if g.is_none() { return Ok(None) }
g.unwrap()
};
if let Some(records) = self.quad_updates.get(&q_id) {
return Ok(records.get(&id).cloned());
}
if let Some(records) = self.quad_cache.get(&q_id) {
return Ok(records.get(&id).cloned());
}
let qfile = quad_file(q_id);
let mut buf = Vec::new();
if let Some(s) = self.stores.get_mut(&qfile) {
s.read_to_end(&mut buf).await?;
} else {
let mut s = self.storage.open(&qfile).await?;
s.read_to_end(&mut buf).await?;
self.stores.put(qfile, s);
}
let records = R::unpack(&buf)?;
let r = records.get(&id).cloned();
self.quad_cache.put(q_id, records);
Ok(r)
}
async fn get_position(&mut self, record: &R) -> Result<Option<Position>,Error> {
if let Some(p) = record.get_position() { return Ok(Some(p)) }
let refs = record.get_refs();
if refs.is_empty() { return Ok(None) }
let o_r = self.get_record(*refs.first().unwrap()).await?;
if o_r.is_none() { return Ok(None) }
let record = o_r.unwrap();
if let Some(p) = record.get_position() { return Ok(Some(p)) }
let refs = record.get_refs();
if refs.is_empty() { return Ok(None) }
let o_r = self.get_record(*refs.first().unwrap()).await?;
if o_r.is_none() { return Ok(None) }
let record = o_r.unwrap();
Ok(record.get_position())
}
pub async fn split_quad(&mut self, q_id: &QuadId, bbox: &BBox) -> Result<(),Error> {
// todo: lock quad_updates and quad_cache?
let records = match (self.quad_updates.remove(q_id),self.quad_cache.pop(q_id)) {
(Some(rs),_) => rs,
(_,Some(rs)) => rs,
(None,None) => unimplemented![], // todo: read from storage
};
let (nx,ny) = (4,4);
let mut quads = vec![];
for i in 0..nx {
for j in 0..ny {
let b = (
bbox.0 + (i as f32/(nx as f32))*(bbox.2-bbox.0),
bbox.1 + (j as f32/(nx as f32))*(bbox.3-bbox.1),
bbox.0 + ((i+1) as f32/(nx as f32))*(bbox.2-bbox.0),
bbox.1 + ((j+1) as f32/(nx as f32))*(bbox.3-bbox.1),
);
quads.push((b,HashMap::new()));
}
}
for (r_id,r) in records {
if let Some(p) = self.get_position(&r).await? {
let i = quads.iter().position(|(b,_)| overlap(&p, &b)).unwrap();
let q = quads.get_mut(i).unwrap();
q.1.insert(r_id,r);
} else {
self.missing_updates.push(r);
}
}
let mut i = 0;
for q in quads {
if i == 0 {
for (r_id,_) in q.1.iter() {
self.insert_id(*r_id, *q_id).await?;
}
self.quad_updates.insert(*q_id, q.1);
} else {
let id = self.next_quad_id;
self.next_quad_id += 1;
for (r_id,_) in q.1.iter() {
self.insert_id(*r_id, id).await?;
}
self.quad_updates.insert(id, q.1);
}
i += 1;
}
self.check_flush().await?;
Ok(())
}
pub async fn get_quads(&mut self, records: &[R])
-> Result<HashMap<QuadId,(BBox,Vec<usize>)>,Error> {
let mut result: HashMap<QuadId,(BBox,Vec<usize>)> = HashMap::new();
let mut positions = HashMap::new();
for (i,r) in records.iter().enumerate() {
if let Some(p) = self.get_position(r).await? {
positions.insert(i,p);
} else {
self.missing_updates.push(r.clone());
}
}
let mut cursors = vec![&self.root];
let mut ncursors = vec![];
while !cursors.is_empty() {
ncursors.clear();
for c in cursors.iter() {
match c {
QTree::Node { children, .. } => {
ncursors.extend(children.iter()
.filter(|ch| {
positions.iter().any(|(_,p)| { overlap(p,&ch.bbox()) })
}).collect::<Vec<_>>());
},
QTree::Quad { id, bbox } => {
positions.drain_filter(|i,p| {
if overlap(p,bbox) {
if let Some((_,items)) = result.get_mut(id) {
items.push(*i);
} else {
result.insert(*id, (bbox.clone(),vec![*i]));
}
true
} else {
false
}
});
}
}
}
let tmp = ncursors;
ncursors = cursors;
cursors = tmp;
}
Ok(result)
}
pub async fn finish(&mut self) -> Result<(),Error> {
let mut prev_len = self.missing_updates.len();
loop {
println!["missing.len()={}", prev_len];
let records = self.missing_updates.drain(..).collect::<Vec<_>>();
self.add_records(&records).await?;
let missing_len = self.missing_updates.len();
if missing_len == 0 || missing_len == prev_len {
break;
}
prev_len = self.missing_updates.len();
}
println!["skipped {}", self.missing_updates.len()];
Ok(())
}
}
impl<R> XQ<fs::File,R> where R: Record {
pub async fn open_from_path(path: &str) -> Result<XQ<fs::File,R>,Error> {
Ok(Self::new(Box::new(FileStorage::open_from_path(path).await?)).await?)
}
}
fn overlap(p: &Position, bbox: &BBox) -> bool {
p.0 >= bbox.0 && p.0 <= bbox.2 && p.1 >= bbox.1 && p.1 <= bbox.3
}
fn quad_file(q_id: QuadId) -> String {
format!["q/{:02x}/{:x}",q_id%256,q_id/256]
}
fn id_file(id: RecordId) -> String {
let b = id_block(id);
format!["i/{:02x}/{:x}",b%256,b/256]
}
fn id_file_from_block(b: IdBlock) -> String {
format!["i/{:02x}/{:x}",b%256,b/256]
}
fn id_block(id: RecordId) -> IdBlock {
id/100_000
}
fn id_range(b: IdBlock) -> (RecordId,RecordId) {
(b*100_000,(b+1)*100_000)
}
fn unpack_ids(buf: &[u8]) -> Result<HashMap<RecordId,QuadId>,Error> {
let mut records = HashMap::new();
if buf.is_empty() { return Ok(records) }
let mut offset = 0;
let (s,len) = varint::decode(&buf[offset..])?;
offset += s;
for _ in 0..len {
let (s,r_id) = varint::decode(&buf[offset..])?;
offset += s;
let (s,q_id) = varint::decode(&buf[offset..])?;
offset += s;
records.insert(r_id, q_id);
}
Ok(records)
}
fn pack_ids(records: &HashMap<RecordId,QuadId>) -> Vec<u8> {
let mut size = 0;
size += varint::length(records.len() as u64);
for (r_id,q_id) in records {
size += varint::length(*r_id);
size += varint::length(*q_id);
}
let mut buf = vec![0;size];
let mut offset = 0;
offset += varint::encode(records.len() as u64, &mut buf[offset..]).unwrap();
for (r_id,q_id) in records {
offset += varint::encode(*r_id, &mut buf[offset..]).unwrap();
offset += varint::encode(*q_id, &mut buf[offset..]).unwrap();
}
buf
}
|
use clipboard_script::{is_jp};
use clipboard_master::{Master, ClipboardHandler, CallbackResult};
use clipboard_win::Clipboard;
use std::io;
#[inline(always)]
///Returns whether text contains only JP kana.
pub fn is_furi_skip<T: AsRef<str>>(text: T) -> bool {
let text = text.as_ref();
text.chars().all(|elem_char| match elem_char { '﹅'| ' ' | //Special case for tons of ````
'\u{3040}'..='\u{309f}'| //hiragana
'\u{30a0}'..='\u{30ff}' //katakana
=> true,
_ => false,
})
}
#[derive(Default)]
struct Handler {
buffer: String,
}
impl ClipboardHandler for Handler {
fn on_clipboard_change(&mut self) -> CallbackResult {
use clipboard_win::{Setter, Getter};
const SPLIT_PAT: &[char] = &['\r', '\n'];
let _clip = match Clipboard::new_attempts(10) {
Ok(clip) => clip,
Err(error) => {
eprintln!("Failed to open clipboard within 10 attempts. Error: {}", error);
return CallbackResult::Next;
}
};
match clipboard_win::Unicode.read_clipboard(&mut self.buffer) {
Ok(_) => (),
Err(_) => return CallbackResult::Next,
}
if !is_jp(&self.buffer) || !self.buffer.contains(SPLIT_PAT) {
self.buffer.truncate(0);
return CallbackResult::Next;
}
let text = self.buffer.trim();
let text_len = self.buffer.len();
let mut new_text = String::with_capacity((text_len + text_len) / 3);
let parts = text.split(SPLIT_PAT).map(|part| part.trim()).collect::<Vec<_>>();
new_text.push_str(unsafe { parts.get_unchecked(0) });
for idx in 1..parts.len()-1 {
let part = unsafe { parts.get_unchecked(idx) };
if part.len() == 0 || is_furi_skip(part) {
continue;
}
new_text.push_str(part);
}
new_text.push_str(unsafe { parts.get_unchecked(parts.len()-1) });
if text_len != new_text.len() {
let _ = clipboard_win::Unicode.write_clipboard(&new_text);
}
self.buffer.truncate(0);
CallbackResult::Next
}
fn on_clipboard_error(&mut self, error: io::Error) -> CallbackResult {
eprintln!("Error: {}", error);
CallbackResult::Next
}
}
fn main() {
let _ = Master::new(Handler::default()).run();
}
|
use serde::Serialize;
#[derive(Clone, Debug, Serialize)]
pub struct GmailAddress(String);
impl GmailAddress {
pub fn new(email: String) -> Self {
GmailAddress(email)
}
}
use std::fmt;
impl fmt::Display for GmailAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
|
use sensor::Sensor;
use std::cell::RefCell;
use std::rc::Rc;
pub struct SensorCollection {
sensors : Vec<Rc<RefCell<Box<Sensor>>>>,
}
impl SensorCollection {
// Creates empty sensor collection
pub fn new() -> SensorCollection {
SensorCollection { sensors : Vec::new() }
}
// Adds a sensor to the collection
pub fn add(&mut self, sensor: Rc<RefCell<Box<Sensor>>>) {
self.sensors.push(sensor);
}
// Updates all sensors
pub fn update_all(&mut self) -> Result<(), String> {
for s in self.sensors.as_mut_slice() {
try!(s.borrow_mut().update());
}
Ok(())
}
} |
//! Core infrastructure for the proxy application.
//!
//! Conglomerates:
//! - Configuration
//! - Runtime initialization
//! - Admin interfaces
//! - Tap
//! - Metric labeling
#![deny(warnings, rust_2018_idioms)]
pub use linkerd2_addr::{self as addr, Addr, NameAddr};
pub use linkerd2_conditional::Conditional;
pub use linkerd2_drain as drain;
pub use linkerd2_error::{Error, Never, Recover};
pub use linkerd2_exp_backoff as exp_backoff;
pub use linkerd2_metrics as metrics;
pub use linkerd2_opencensus as opencensus;
pub use linkerd2_reconnect as reconnect;
pub use linkerd2_request_filter as request_filter;
pub use linkerd2_router as router;
pub use linkerd2_trace_context as trace_context;
pub mod accept_error;
pub mod admin;
pub mod classify;
pub mod config;
pub mod control;
pub mod dns;
pub mod dst;
pub mod errors;
pub mod handle_time;
pub mod metric_labels;
pub mod profiles;
pub mod proxy;
pub mod serve;
pub mod spans;
pub mod svc;
pub mod telemetry;
pub mod trace;
pub mod transport;
pub const CANONICAL_DST_HEADER: &'static str = "l5d-dst-canonical";
pub const DST_OVERRIDE_HEADER: &'static str = "l5d-dst-override";
pub const L5D_REMOTE_IP: &'static str = "l5d-remote-ip";
pub const L5D_SERVER_ID: &'static str = "l5d-server-id";
pub const L5D_CLIENT_ID: &'static str = "l5d-client-id";
pub const L5D_REQUIRE_ID: &'static str = "l5d-require-id";
const DEFAULT_PORT: u16 = 80;
pub fn http_request_l5d_override_dst_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {
proxy::http::authority_from_header(req, DST_OVERRIDE_HEADER)
.ok_or(addr::Error::InvalidHost)
.and_then(|a| Addr::from_authority_and_default_port(&a, DEFAULT_PORT))
}
pub fn http_request_authority_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {
req.uri()
.authority_part()
.ok_or(addr::Error::InvalidHost)
.and_then(|a| Addr::from_authority_and_default_port(a, DEFAULT_PORT))
}
pub fn http_request_host_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {
use crate::proxy::http::h1;
h1::authority_from_host(req)
.ok_or(addr::Error::InvalidHost)
.and_then(|a| Addr::from_authority_and_default_port(&a, DEFAULT_PORT))
}
pub fn http_request_orig_dst_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {
use crate::transport::tls;
req.extensions()
.get::<tls::accept::Meta>()
.and_then(|m| m.addrs.target_addr_if_not_local())
.map(Addr::Socket)
.ok_or(addr::Error::InvalidHost)
}
#[derive(Copy, Clone, Debug)]
pub struct DispatchDeadline(std::time::Instant);
impl DispatchDeadline {
pub fn after(allowance: std::time::Duration) -> DispatchDeadline {
DispatchDeadline(tokio_timer::clock::now() + allowance)
}
pub fn extract<A>(req: &http::Request<A>) -> Option<std::time::Instant> {
req.extensions().get::<DispatchDeadline>().map(|d| d.0)
}
}
pub type ControlHttpMetricsRegistry =
proxy::http::metrics::SharedRegistry<metric_labels::ControlLabels, classify::Class>;
pub type HttpEndpointMetricsRegistry =
proxy::http::metrics::SharedRegistry<metric_labels::EndpointLabels, classify::Class>;
pub type HttpRouteMetricsRegistry =
proxy::http::metrics::SharedRegistry<metric_labels::RouteLabels, classify::Class>;
#[derive(Clone)]
pub struct ProxyMetrics {
pub http_handle_time: proxy::http::metrics::handle_time::Scope,
pub http_route: HttpRouteMetricsRegistry,
pub http_route_retry: HttpRouteMetricsRegistry,
pub http_endpoint: HttpEndpointMetricsRegistry,
pub transport: transport::MetricsRegistry,
}
|
// Copyright 2016 `multipart` Crate Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use mock::{ClientRequest, HttpBuffer};
use server::{MultipartField, ReadEntry, FieldHeaders};
use mime::{self, Mime};
use rand::{self, Rng};
use std::collections::{HashMap, HashSet};
use std::collections::hash_map::{Entry, OccupiedEntry};
use std::fmt;
use std::io::prelude::*;
use std::io::Cursor;
use std::iter::{self, FromIterator};
const MIN_FIELDS: usize = 1;
const MAX_FIELDS: usize = 3;
const MIN_LEN: usize = 2;
const MAX_LEN: usize = 5;
const MAX_DASHES: usize = 2;
fn collect_rand<C: FromIterator<T>, T, F: FnMut() -> T>(mut gen: F) -> C {
(0 .. rand::thread_rng().gen_range(MIN_FIELDS, MAX_FIELDS))
.map(|_| gen()).collect()
}
macro_rules! expect_fmt (
($val:expr, $($args:tt)*) => (
match $val {
Some(val) => val,
None => panic!($($args)*),
}
);
);
/// The error is provided as the `err` format argument
macro_rules! expect_ok_fmt (
($val:expr, $($args:tt)*) => (
match $val {
Ok(val) => val,
Err(e) => panic!($($args)*, err=e),
}
);
);
fn get_field<'m, V>(field: &FieldHeaders, fields: &'m mut HashMap<String, V>) -> Option<OccupiedEntry<'m, String, V>> {
match fields.entry(field.name.to_string()) {
Entry::Occupied(occupied) => Some(occupied),
Entry::Vacant(_) => None,
}
}
#[derive(Debug)]
struct TestFields {
texts: HashMap<String, HashSet<String>>,
files: HashMap<String, HashSet<FileEntry>>,
}
impl TestFields {
fn gen() -> Self {
TestFields {
texts: collect_rand(|| (gen_string(), collect_rand(gen_string))),
files: collect_rand(|| (gen_string(), FileEntry::gen_many())),
}
}
fn check_field<M: ReadEntry>(&mut self, mut field: MultipartField<M>) -> M {
// text/plain fields would be considered a file by `TestFields`
if field.headers.content_type.is_none() {
let mut text_entries = expect_fmt!(get_field(&field.headers, &mut self.texts),
"Got text field that wasn't in original dataset: {:?}",
field.headers);
let mut text = String::new();
expect_ok_fmt!(
field.data.read_to_string(&mut text),
"error failed to read text data to string: {:?}\n{err}", field.headers
);
assert!(
text_entries.get_mut().remove(&text),
"Got field text data that wasn't in original data set: {:?}\n{:?}\n{:?}",
field.headers,
text,
text_entries.get(),
);
if text_entries.get().is_empty() {
text_entries.remove_entry();
}
return field.data.into_inner();
}
let mut file_entries = expect_fmt!(get_field(&field.headers, &mut self.files),
"Got file field that wasn't in original dataset: {:?}",
field.headers);
let field_name = field.headers.name.clone();
let (test_entry, inner) = FileEntry::from_field(field);
assert!(
file_entries.get_mut().remove(&test_entry),
"Got field entry that wasn't in original dataset: name: {:?}\n{:?}\nEntries: {:?}",
field_name,
test_entry,
file_entries.get()
);
if file_entries.get().is_empty() {
file_entries.remove_entry();
}
return inner;
}
fn assert_is_empty(&self) {
assert!(self.texts.is_empty(), "Text Fields were not exhausted! {:?}", self.texts);
assert!(self.files.is_empty(), "File Fields were not exhausted! {:?}", self.files);
}
}
#[derive(Debug, Hash, PartialEq, Eq)]
struct FileEntry {
content_type: Mime,
filename: Option<String>,
data: PrintHex,
}
impl FileEntry {
fn from_field<M: ReadEntry>(mut field: MultipartField<M>) -> (FileEntry, M) {
let mut data = Vec::new();
expect_ok_fmt!(
field.data.read_to_end(&mut data),
"Error reading file field: {:?}\n{err}", field.headers
);
(
FileEntry {
content_type: field.headers.content_type.unwrap_or(mime!(Application/OctetStream)),
filename: field.headers.filename,
data: PrintHex(data),
},
field.data.into_inner()
)
}
fn gen_many() -> HashSet<FileEntry> {
collect_rand(Self::gen)
}
fn gen() -> Self {
let filename = match gen_bool() {
true => Some(gen_string()),
false => None,
};
let data = PrintHex(match gen_bool() {
true => gen_string().into_bytes(),
false => gen_bytes(),
});
FileEntry {
content_type: rand_mime(),
filename,
data,
}
}
fn filename(&self) -> Option<&str> {
self.filename.as_ref().map(|s| &**s)
}
}
#[derive(PartialEq, Eq, Hash)]
struct PrintHex(Vec<u8>);
impl fmt::Debug for PrintHex {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "[")?;
let mut written = false;
for byte in &self.0 {
write!(f, "{:X}", byte)?;
if written {
write!(f, ", ")?;
}
written = true;
}
write!(f, "]")
}
}
macro_rules! do_test (
($client_test:ident, $server_test:ident) => (
::init_log();
info!("Client Test: {:?} Server Test: {:?}", stringify!($client_test),
stringify!($server_test));
let mut test_fields = TestFields::gen();
trace!("Fields for test: {:?}", test_fields);
let buf = $client_test(&test_fields);
trace!(
"\n==Test Buffer Begin==\n{}\n==Test Buffer End==",
String::from_utf8_lossy(&buf.buf)
);
$server_test(buf, &mut test_fields);
test_fields.assert_is_empty();
);
);
#[test]
fn reg_client_reg_server() {
do_test!(test_client, test_server);
}
#[test]
fn reg_client_entry_server() {
do_test!(test_client, test_server_entry_api);
}
#[test]
fn lazy_client_reg_server() {
do_test!(test_client_lazy, test_server);
}
#[test]
fn lazy_client_entry_server() {
do_test!(test_client_lazy, test_server_entry_api);
}
mod extended {
use super::{test_client, test_server, test_server_entry_api, test_client_lazy, TestFields};
use std::time::Instant;
const TIME_LIMIT_SECS: u64 = 600;
#[test]
#[ignore]
fn reg_client_reg_server() {
let started = Instant::now();
while started.elapsed().as_secs() < TIME_LIMIT_SECS {
do_test!(test_client, test_server);
}
}
#[test]
#[ignore]
fn reg_client_entry_server() {
let started = Instant::now();
while started.elapsed().as_secs() < TIME_LIMIT_SECS {
do_test!(test_client, test_server_entry_api);
}
}
#[test]
#[ignore]
fn lazy_client_reg_server() {
let started = Instant::now();
while started.elapsed().as_secs() < TIME_LIMIT_SECS {
do_test!(test_client_lazy, test_server);
}
}
#[test]
#[ignore]
fn lazy_client_entry_server() {
let started = Instant::now();
while started.elapsed().as_secs() < TIME_LIMIT_SECS {
do_test!(test_client_lazy, test_server_entry_api);
}
}
}
fn gen_bool() -> bool {
rand::thread_rng().gen()
}
fn gen_string() -> String {
let mut rng_1 = rand::thread_rng();
let mut rng_2 = rand::thread_rng();
let str_len_1 = rng_1.gen_range(MIN_LEN, MAX_LEN + 1);
let str_len_2 = rng_2.gen_range(MIN_LEN, MAX_LEN + 1);
let num_dashes = rng_1.gen_range(0, MAX_DASHES + 1);
rng_1.gen_ascii_chars().take(str_len_1)
.chain(iter::repeat('-').take(num_dashes))
.chain(rng_2.gen_ascii_chars().take(str_len_2))
.collect()
}
fn gen_bytes() -> Vec<u8> {
gen_string().into_bytes()
}
fn test_client(test_fields: &TestFields) -> HttpBuffer {
use client::Multipart;
let request = ClientRequest::default();
let mut test_files = test_fields.files.iter().flat_map(
|(name, files)| files.iter().map(move |file| (name, file))
);
let test_texts = test_fields.texts.iter().flat_map(
|(name, texts)| texts.iter().map(move |text| (name, text))
);
let mut multipart = Multipart::from_request(request).unwrap();
// Intersperse file fields amongst text fields
for (name, text) in test_texts {
if let Some((file_name, file)) = test_files.next() {
multipart.write_stream(file_name, &mut &*file.data.0, file.filename(),
Some(file.content_type.clone())).unwrap();
}
multipart.write_text(name, text).unwrap();
}
// Write remaining files
for (file_name, file) in test_files {
multipart.write_stream(file_name, &mut &*file.data.0, file.filename(),
Some(file.content_type.clone())).unwrap();
}
multipart.send().unwrap()
}
fn test_client_lazy(test_fields: &TestFields) -> HttpBuffer {
use client::lazy::Multipart;
let mut multipart = Multipart::new();
let mut test_files = test_fields.files.iter().flat_map(
|(name, files)| files.iter().map(move |file| (name, file))
);
let test_texts = test_fields.texts.iter().flat_map(
|(name, texts)| texts.iter().map(move |text| (name, text))
);
for (name, text) in test_texts {
if let Some((file_name, file)) = test_files.next() {
multipart.add_stream(&**file_name, Cursor::new(&file.data.0), file.filename(),
Some(file.content_type.clone()));
}
multipart.add_text(&**name, &**text);
}
for (file_name, file) in test_files {
multipart.add_stream(&**file_name, Cursor::new(&file.data.0), file.filename(),
Some(file.content_type.clone()));
}
let mut prepared = multipart.prepare().unwrap();
let mut buf = Vec::new();
let boundary = prepared.boundary().to_owned();
let content_len = prepared.content_len();
prepared.read_to_end(&mut buf).unwrap();
HttpBuffer::with_buf(buf, boundary, content_len)
}
fn test_server(buf: HttpBuffer, fields: &mut TestFields) {
use server::Multipart;
let server_buf = buf.for_server();
if let Some(content_len) = server_buf.content_len {
assert!(content_len == server_buf.data.len() as u64, "Supplied content_len different from actual");
}
let mut multipart = Multipart::from_request(server_buf)
.unwrap_or_else(|_| panic!("Buffer should be multipart!"));
while let Some(field) = multipart.read_entry_mut().unwrap_opt() {
fields.check_field(field);
}
}
fn test_server_entry_api(buf: HttpBuffer, fields: &mut TestFields) {
use server::Multipart;
let server_buf = buf.for_server();
if let Some(content_len) = server_buf.content_len {
assert!(content_len == server_buf.data.len() as u64, "Supplied content_len different from actual");
}
let mut multipart = Multipart::from_request(server_buf)
.unwrap_or_else(|_| panic!("Buffer should be multipart!"));
let entry = multipart.into_entry().expect_alt("Expected entry, got none", "Error reading entry");
multipart = fields.check_field(entry);
while let Some(entry) = multipart.into_entry().unwrap_opt() {
multipart = fields.check_field(entry);
}
}
fn rand_mime() -> Mime {
rand::thread_rng().choose(&[
// TODO: fill this out, preferably with variants that may be hard to parse
// i.e. containing hyphens, mainly
mime!(Application/OctetStream),
mime!(Text/Plain),
mime!(Image/Png),
]).unwrap().clone()
}
|
use std::io;
fn main() {
let mut buffer = String::new();
io::stdin().read_line(&mut buffer).unwrap();
let input_vec: Vec<char> = buffer.chars().collect();
if input_vec[2] == input_vec[3] && input_vec[4] == input_vec[5] {
println!("Yes");
} else {
println!("No");
}
}
|
use super::state_prelude::*;
use crate::level_manager::LevelManager;
pub struct LevelLoad {
level_manager: LevelManager,
}
impl LevelLoad {
pub fn new(level: Level) -> Self {
Self {
level_manager: LevelManager::new(level),
}
}
pub fn with_delete_save(level: Level) -> Self {
Self {
level_manager: LevelManager::with_delete_save(level),
}
}
}
impl<'a, 'b> State<CustomGameData<'a, 'b, CustomData>, StateEvent>
for LevelLoad
{
fn on_start(&mut self, data: StateData<CustomGameData<CustomData>>) {
data.world.delete_all();
stop_audio(data.world);
self.level_manager.setup(data.world);
}
fn on_stop(&mut self, data: StateData<CustomGameData<CustomData>>) {
data.world.delete_all();
data.world.write_resource::<TimerRes>().remove_timer();
data.world.write_resource::<Music>().reset();
data.world.write_resource::<CheckpointRes>().0 = None;
data.world.write_resource::<PlayerDeaths>().0 = 0;
data.world.write_resource::<WinLevel>().0 = false;
data.world.write_resource::<WinGame>().0 = false;
// data.world.write_resource::<StopAudio>().0 = true;
}
fn update(
&mut self,
data: StateData<CustomGameData<CustomData>>,
) -> Trans<CustomGameData<'a, 'b, CustomData>, StateEvent> {
data.data.update(data.world, "level_load").unwrap();
if data.world.read_resource::<ToMainMenu>().0 {
data.world.write_resource::<ToMainMenu>().0 = false;
return Trans::Pop;
}
if self.level_manager.level_loader.is_finished() {
return Trans::Push(Box::new(Ingame::new(
self.level_manager.level.clone(),
)));
}
Trans::None
}
fn shadow_update(&mut self, data: StateData<CustomGameData<CustomData>>) {
// Reset level
if data.world.read_resource::<ResetLevel>().0 {
self.level_manager.reset(data.world);
data.world.write_resource::<ResetLevel>().0 = false;
}
// Win level
if data.world.read_resource::<WinLevel>().0 {
self.level_manager.win_level(data.world);
data.world.write_resource::<WinLevel>().0 = false;
}
// Should save to savefile
if data.world.read_resource::<ShouldSave>().0 {
self.level_manager.save_to_savefile(data.world, false);
data.world.write_resource::<ShouldSave>().0 = false;
}
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::alloc::Layout;
use std::fmt;
use std::sync::Arc;
use common_arrow::arrow::bitmap::Bitmap;
use common_exception::ErrorCode;
use common_exception::Result;
use common_expression::types::BooleanType;
use common_expression::types::DataType;
use common_expression::types::NumberDataType;
use common_expression::types::ValueType;
use common_expression::Column;
use common_expression::ColumnBuilder;
use common_expression::Scalar;
use common_io::prelude::*;
use super::aggregate_function::AggregateFunction;
use super::aggregate_function::AggregateFunctionRef;
use super::aggregate_function_factory::AggregateFunctionDescription;
use super::StateAddr;
use crate::aggregates::aggregator_common::assert_variadic_arguments;
struct AggregateRetentionState {
pub events: u32,
}
impl AggregateRetentionState {
#[inline(always)]
fn add(&mut self, event: u8) {
self.events |= 1 << event;
}
fn merge(&mut self, other: &Self) {
self.events |= other.events;
}
fn serialize(&self, writer: &mut Vec<u8>) -> Result<()> {
serialize_into_buf(writer, &self.events)
}
fn deserialize(&mut self, reader: &mut &[u8]) -> Result<()> {
self.events = deserialize_from_slice(reader)?;
Ok(())
}
}
#[derive(Clone)]
pub struct AggregateRetentionFunction {
display_name: String,
events_size: u8,
}
impl AggregateFunction for AggregateRetentionFunction {
fn name(&self) -> &str {
"AggregateRetentionFunction"
}
fn return_type(&self) -> Result<DataType> {
Ok(DataType::Array(Box::new(DataType::Number(
NumberDataType::UInt8,
))))
}
fn init_state(&self, place: StateAddr) {
place.write(|| AggregateRetentionState { events: 0 });
}
fn state_layout(&self) -> std::alloc::Layout {
Layout::new::<AggregateRetentionState>()
}
fn accumulate(
&self,
place: StateAddr,
columns: &[Column],
_validity: Option<&Bitmap>,
input_rows: usize,
) -> Result<()> {
let state = place.get::<AggregateRetentionState>();
let new_columns = columns
.iter()
.map(|col| BooleanType::try_downcast_column(col).unwrap())
.collect::<Vec<_>>();
for i in 0..input_rows {
for j in 0..self.events_size {
if new_columns[j as usize].get_bit(i) {
state.add(j);
}
}
}
Ok(())
}
fn accumulate_keys(
&self,
places: &[StateAddr],
offset: usize,
columns: &[Column],
_input_rows: usize,
) -> Result<()> {
let new_columns = columns
.iter()
.map(|col| BooleanType::try_downcast_column(col).unwrap())
.collect::<Vec<_>>();
for (row, place) in places.iter().enumerate() {
let place = place.next(offset);
let state = place.get::<AggregateRetentionState>();
for j in 0..self.events_size {
if new_columns[j as usize].get_bit(row) {
state.add(j);
}
}
}
Ok(())
}
fn accumulate_row(&self, place: StateAddr, columns: &[Column], row: usize) -> Result<()> {
let state = place.get::<AggregateRetentionState>();
let new_columns = columns
.iter()
.map(|col| BooleanType::try_downcast_column(col).unwrap())
.collect::<Vec<_>>();
for j in 0..self.events_size {
if new_columns[j as usize].get_bit(row) {
state.add(j);
}
}
Ok(())
}
fn serialize(&self, place: StateAddr, writer: &mut Vec<u8>) -> Result<()> {
let state = place.get::<AggregateRetentionState>();
state.serialize(writer)
}
fn deserialize(&self, place: StateAddr, reader: &mut &[u8]) -> Result<()> {
let state = place.get::<AggregateRetentionState>();
state.deserialize(reader)
}
fn merge(&self, place: StateAddr, rhs: StateAddr) -> Result<()> {
let rhs = rhs.get::<AggregateRetentionState>();
let state = place.get::<AggregateRetentionState>();
state.merge(rhs);
Ok(())
}
#[allow(unused_mut)]
fn merge_result(&self, place: StateAddr, builder: &mut ColumnBuilder) -> Result<()> {
let state = place.get::<AggregateRetentionState>();
let builder = builder.as_array_mut().unwrap();
let inner = builder
.builder
.as_number_mut()
.unwrap()
.as_u_int8_mut()
.unwrap();
inner.reserve(self.events_size as usize);
if state.events & 1 == 1 {
inner.push(1u8);
for i in 1..self.events_size {
if state.events & (1 << i) != 0 {
inner.push(1u8);
} else {
inner.push(0u8);
}
}
} else {
for _ in 0..self.events_size {
inner.push(0u8);
}
}
builder.offsets.push(builder.builder.len() as u64);
Ok(())
}
}
impl fmt::Display for AggregateRetentionFunction {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.display_name)
}
}
impl AggregateRetentionFunction {
pub fn try_create(
display_name: &str,
arguments: Vec<DataType>,
) -> Result<AggregateFunctionRef> {
Ok(Arc::new(Self {
display_name: display_name.to_owned(),
events_size: arguments.len() as u8,
}))
}
}
pub fn try_create_aggregate_retention_function(
display_name: &str,
_params: Vec<Scalar>,
arguments: Vec<DataType>,
) -> Result<AggregateFunctionRef> {
assert_variadic_arguments(display_name, arguments.len(), (1, 32))?;
for argument in arguments.iter() {
if !argument.is_boolean() {
return Err(ErrorCode::BadArguments(
"The arguments of AggregateRetention should be an expression which returns a Boolean result",
));
}
}
AggregateRetentionFunction::try_create(display_name, arguments)
}
pub fn aggregate_retention_function_desc() -> AggregateFunctionDescription {
AggregateFunctionDescription::creator(Box::new(try_create_aggregate_retention_function))
}
|
native "rust" mod rustrt {
fn rust_file_is_dir(str path) -> int;
}
fn path_sep() -> str {
ret _str.from_char(os_fs.path_sep);
}
type path = str;
fn dirname(path p) -> path {
auto sep = path_sep();
check (_str.byte_len(sep) == 1u);
let int i = _str.rindex(p, sep.(0));
if (i == -1) {
ret p;
}
ret _str.substr(p, 0u, i as uint);
}
fn connect(path pre, path post) -> path {
auto len = _str.byte_len(pre);
if (pre.(len - 1u) == (os_fs.path_sep as u8)) { // Trailing '/'?
ret pre + post;
}
ret pre + path_sep() + post;
}
fn file_is_dir(path p) -> bool {
ret rustrt.rust_file_is_dir(p) != 0;
}
fn list_dir(path p) -> vec[str] {
auto pl = _str.byte_len(p);
if (pl == 0u || p.(pl - 1u) as char != os_fs.path_sep) {
p += path_sep();
}
let vec[str] full_paths = vec();
for (str filename in os_fs.list_dir(p)) {
if (!_str.eq(filename, ".")) {if (!_str.eq(filename, "..")) {
_vec.push[str](full_paths, p + filename);
}}
}
ret full_paths;
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C .. 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
|
// auto generated, do not modify.
// created: Mon Feb 22 23:57:02 2016
// src-file: /QtCore/qloggingcategory.h
// dst-file: /src/core/qloggingcategory.rs
//
// header block begin =>
#![feature(libc)]
#![feature(core)]
#![feature(collections)]
extern crate libc;
use self::libc::*;
// <= header block end
// main block begin =>
// <= main block end
// use block begin =>
use std::ops::Deref;
use super::qstring::*; // 773
// <= use block end
// ext block begin =>
// #[link(name = "Qt5Core")]
// #[link(name = "Qt5Gui")]
// #[link(name = "Qt5Widgets")]
// #[link(name = "QtInline")]
extern {
fn QLoggingCategory_Class_Size() -> c_int;
// proto: void QLoggingCategory::QLoggingCategory(const char * category, QtMsgType severityLevel);
fn C_ZN16QLoggingCategoryC2EPKc9QtMsgType(arg0: *mut c_char, arg1: c_int) -> u64;
// proto: bool QLoggingCategory::isDebugEnabled();
fn C_ZNK16QLoggingCategory14isDebugEnabledEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: void QLoggingCategory::~QLoggingCategory();
fn C_ZN16QLoggingCategoryD2Ev(qthis: u64 /* *mut c_void*/);
// proto: void QLoggingCategory::QLoggingCategory(const char * category);
fn C_ZN16QLoggingCategoryC2EPKc(arg0: *mut c_char) -> u64;
// proto: void QLoggingCategory::setEnabled(QtMsgType type, bool enable);
fn C_ZN16QLoggingCategory10setEnabledE9QtMsgTypeb(qthis: u64 /* *mut c_void*/, arg0: c_int, arg1: c_char);
// proto: bool QLoggingCategory::isEnabled(QtMsgType type);
fn C_ZNK16QLoggingCategory9isEnabledE9QtMsgType(qthis: u64 /* *mut c_void*/, arg0: c_int) -> c_char;
// proto: bool QLoggingCategory::isWarningEnabled();
fn C_ZNK16QLoggingCategory16isWarningEnabledEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: bool QLoggingCategory::isInfoEnabled();
fn C_ZNK16QLoggingCategory13isInfoEnabledEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: const char * QLoggingCategory::categoryName();
fn C_ZNK16QLoggingCategory12categoryNameEv(qthis: u64 /* *mut c_void*/) -> *mut c_char;
// proto: bool QLoggingCategory::isCriticalEnabled();
fn C_ZNK16QLoggingCategory17isCriticalEnabledEv(qthis: u64 /* *mut c_void*/) -> c_char;
// proto: static QLoggingCategory * QLoggingCategory::defaultCategory();
fn C_ZN16QLoggingCategory15defaultCategoryEv() -> *mut c_void;
// proto: static void QLoggingCategory::setFilterRules(const QString & rules);
fn C_ZN16QLoggingCategory14setFilterRulesERK7QString(arg0: *mut c_void);
} // <= ext block end
// body block begin =>
// class sizeof(QLoggingCategory)=24
#[derive(Default)]
pub struct QLoggingCategory {
// qbase: None,
pub qclsinst: u64 /* *mut c_void*/,
}
impl /*struct*/ QLoggingCategory {
pub fn inheritFrom(qthis: u64 /* *mut c_void*/) -> QLoggingCategory {
return QLoggingCategory{qclsinst: qthis, ..Default::default()};
}
}
// proto: void QLoggingCategory::QLoggingCategory(const char * category, QtMsgType severityLevel);
impl /*struct*/ QLoggingCategory {
pub fn new<T: QLoggingCategory_new>(value: T) -> QLoggingCategory {
let rsthis = value.new();
return rsthis;
// return 1;
}
}
pub trait QLoggingCategory_new {
fn new(self) -> QLoggingCategory;
}
// proto: void QLoggingCategory::QLoggingCategory(const char * category, QtMsgType severityLevel);
impl<'a> /*trait*/ QLoggingCategory_new for (&'a String, i32) {
fn new(self) -> QLoggingCategory {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN16QLoggingCategoryC2EPKc9QtMsgType()};
let ctysz: c_int = unsafe{QLoggingCategory_Class_Size()};
let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64;
let arg0 = self.0.as_ptr() as *mut c_char;
let arg1 = self.1 as c_int;
let qthis: u64 = unsafe {C_ZN16QLoggingCategoryC2EPKc9QtMsgType(arg0, arg1)};
let rsthis = QLoggingCategory{qclsinst: qthis, ..Default::default()};
return rsthis;
// return 1;
}
}
// proto: bool QLoggingCategory::isDebugEnabled();
impl /*struct*/ QLoggingCategory {
pub fn isDebugEnabled<RetType, T: QLoggingCategory_isDebugEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isDebugEnabled(self);
// return 1;
}
}
pub trait QLoggingCategory_isDebugEnabled<RetType> {
fn isDebugEnabled(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: bool QLoggingCategory::isDebugEnabled();
impl<'a> /*trait*/ QLoggingCategory_isDebugEnabled<i8> for () {
fn isDebugEnabled(self , rsthis: & QLoggingCategory) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK16QLoggingCategory14isDebugEnabledEv()};
let mut ret = unsafe {C_ZNK16QLoggingCategory14isDebugEnabledEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: void QLoggingCategory::~QLoggingCategory();
impl /*struct*/ QLoggingCategory {
pub fn free<RetType, T: QLoggingCategory_free<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.free(self);
// return 1;
}
}
pub trait QLoggingCategory_free<RetType> {
fn free(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: void QLoggingCategory::~QLoggingCategory();
impl<'a> /*trait*/ QLoggingCategory_free<()> for () {
fn free(self , rsthis: & QLoggingCategory) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN16QLoggingCategoryD2Ev()};
unsafe {C_ZN16QLoggingCategoryD2Ev(rsthis.qclsinst)};
// return 1;
}
}
// proto: void QLoggingCategory::QLoggingCategory(const char * category);
impl<'a> /*trait*/ QLoggingCategory_new for (&'a String) {
fn new(self) -> QLoggingCategory {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN16QLoggingCategoryC2EPKc()};
let ctysz: c_int = unsafe{QLoggingCategory_Class_Size()};
let qthis_ph: u64 = unsafe{calloc(1, ctysz as usize)} as u64;
let arg0 = self.as_ptr() as *mut c_char;
let qthis: u64 = unsafe {C_ZN16QLoggingCategoryC2EPKc(arg0)};
let rsthis = QLoggingCategory{qclsinst: qthis, ..Default::default()};
return rsthis;
// return 1;
}
}
// proto: void QLoggingCategory::setEnabled(QtMsgType type, bool enable);
impl /*struct*/ QLoggingCategory {
pub fn setEnabled<RetType, T: QLoggingCategory_setEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.setEnabled(self);
// return 1;
}
}
pub trait QLoggingCategory_setEnabled<RetType> {
fn setEnabled(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: void QLoggingCategory::setEnabled(QtMsgType type, bool enable);
impl<'a> /*trait*/ QLoggingCategory_setEnabled<()> for (i32, i8) {
fn setEnabled(self , rsthis: & QLoggingCategory) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN16QLoggingCategory10setEnabledE9QtMsgTypeb()};
let arg0 = self.0 as c_int;
let arg1 = self.1 as c_char;
unsafe {C_ZN16QLoggingCategory10setEnabledE9QtMsgTypeb(rsthis.qclsinst, arg0, arg1)};
// return 1;
}
}
// proto: bool QLoggingCategory::isEnabled(QtMsgType type);
impl /*struct*/ QLoggingCategory {
pub fn isEnabled<RetType, T: QLoggingCategory_isEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isEnabled(self);
// return 1;
}
}
pub trait QLoggingCategory_isEnabled<RetType> {
fn isEnabled(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: bool QLoggingCategory::isEnabled(QtMsgType type);
impl<'a> /*trait*/ QLoggingCategory_isEnabled<i8> for (i32) {
fn isEnabled(self , rsthis: & QLoggingCategory) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK16QLoggingCategory9isEnabledE9QtMsgType()};
let arg0 = self as c_int;
let mut ret = unsafe {C_ZNK16QLoggingCategory9isEnabledE9QtMsgType(rsthis.qclsinst, arg0)};
return ret as i8; // 1
// return 1;
}
}
// proto: bool QLoggingCategory::isWarningEnabled();
impl /*struct*/ QLoggingCategory {
pub fn isWarningEnabled<RetType, T: QLoggingCategory_isWarningEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isWarningEnabled(self);
// return 1;
}
}
pub trait QLoggingCategory_isWarningEnabled<RetType> {
fn isWarningEnabled(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: bool QLoggingCategory::isWarningEnabled();
impl<'a> /*trait*/ QLoggingCategory_isWarningEnabled<i8> for () {
fn isWarningEnabled(self , rsthis: & QLoggingCategory) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK16QLoggingCategory16isWarningEnabledEv()};
let mut ret = unsafe {C_ZNK16QLoggingCategory16isWarningEnabledEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: bool QLoggingCategory::isInfoEnabled();
impl /*struct*/ QLoggingCategory {
pub fn isInfoEnabled<RetType, T: QLoggingCategory_isInfoEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isInfoEnabled(self);
// return 1;
}
}
pub trait QLoggingCategory_isInfoEnabled<RetType> {
fn isInfoEnabled(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: bool QLoggingCategory::isInfoEnabled();
impl<'a> /*trait*/ QLoggingCategory_isInfoEnabled<i8> for () {
fn isInfoEnabled(self , rsthis: & QLoggingCategory) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK16QLoggingCategory13isInfoEnabledEv()};
let mut ret = unsafe {C_ZNK16QLoggingCategory13isInfoEnabledEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: const char * QLoggingCategory::categoryName();
impl /*struct*/ QLoggingCategory {
pub fn categoryName<RetType, T: QLoggingCategory_categoryName<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.categoryName(self);
// return 1;
}
}
pub trait QLoggingCategory_categoryName<RetType> {
fn categoryName(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: const char * QLoggingCategory::categoryName();
impl<'a> /*trait*/ QLoggingCategory_categoryName<String> for () {
fn categoryName(self , rsthis: & QLoggingCategory) -> String {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK16QLoggingCategory12categoryNameEv()};
let mut ret = unsafe {C_ZNK16QLoggingCategory12categoryNameEv(rsthis.qclsinst)};
let slen = unsafe {strlen(ret as *const i8)} as usize;
return unsafe{String::from_raw_parts(ret as *mut u8, slen, slen+1)};
// return 1;
}
}
// proto: bool QLoggingCategory::isCriticalEnabled();
impl /*struct*/ QLoggingCategory {
pub fn isCriticalEnabled<RetType, T: QLoggingCategory_isCriticalEnabled<RetType>>(& self, overload_args: T) -> RetType {
return overload_args.isCriticalEnabled(self);
// return 1;
}
}
pub trait QLoggingCategory_isCriticalEnabled<RetType> {
fn isCriticalEnabled(self , rsthis: & QLoggingCategory) -> RetType;
}
// proto: bool QLoggingCategory::isCriticalEnabled();
impl<'a> /*trait*/ QLoggingCategory_isCriticalEnabled<i8> for () {
fn isCriticalEnabled(self , rsthis: & QLoggingCategory) -> i8 {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZNK16QLoggingCategory17isCriticalEnabledEv()};
let mut ret = unsafe {C_ZNK16QLoggingCategory17isCriticalEnabledEv(rsthis.qclsinst)};
return ret as i8; // 1
// return 1;
}
}
// proto: static QLoggingCategory * QLoggingCategory::defaultCategory();
impl /*struct*/ QLoggingCategory {
pub fn defaultCategory_s<RetType, T: QLoggingCategory_defaultCategory_s<RetType>>( overload_args: T) -> RetType {
return overload_args.defaultCategory_s();
// return 1;
}
}
pub trait QLoggingCategory_defaultCategory_s<RetType> {
fn defaultCategory_s(self ) -> RetType;
}
// proto: static QLoggingCategory * QLoggingCategory::defaultCategory();
impl<'a> /*trait*/ QLoggingCategory_defaultCategory_s<QLoggingCategory> for () {
fn defaultCategory_s(self ) -> QLoggingCategory {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN16QLoggingCategory15defaultCategoryEv()};
let mut ret = unsafe {C_ZN16QLoggingCategory15defaultCategoryEv()};
let mut ret1 = QLoggingCategory::inheritFrom(ret as u64);
return ret1;
// return 1;
}
}
// proto: static void QLoggingCategory::setFilterRules(const QString & rules);
impl /*struct*/ QLoggingCategory {
pub fn setFilterRules_s<RetType, T: QLoggingCategory_setFilterRules_s<RetType>>( overload_args: T) -> RetType {
return overload_args.setFilterRules_s();
// return 1;
}
}
pub trait QLoggingCategory_setFilterRules_s<RetType> {
fn setFilterRules_s(self ) -> RetType;
}
// proto: static void QLoggingCategory::setFilterRules(const QString & rules);
impl<'a> /*trait*/ QLoggingCategory_setFilterRules_s<()> for (&'a QString) {
fn setFilterRules_s(self ) -> () {
// let qthis: *mut c_void = unsafe{calloc(1, 32)};
// unsafe{_ZN16QLoggingCategory14setFilterRulesERK7QString()};
let arg0 = self.qclsinst as *mut c_void;
unsafe {C_ZN16QLoggingCategory14setFilterRulesERK7QString(arg0)};
// return 1;
}
}
// <= body block end
|
fn solve(n: usize, m: usize, a: Vec<Vec<usize>>) {
let mut freq = vec![0; n + 1];
for r in &a {
for &x in r {
freq[x] += 1;
}
}
let max = freq.iter().copied().max().unwrap();
let lim = (m + 1) / 2;
let mut ans = Vec::new();
if max > lim {
ans = vec![0; m];
let mut a: Vec<(usize, Vec<usize>)> = a.into_iter().enumerate().collect();
a.sort_by_key(|(_, r)| r.len());
let tgt = freq.iter().position(|&x| x == max).unwrap();
let mut choose = 0;
for (i, a) in a {
if choose < lim && a.contains(&tgt) {
choose += 1;
ans[i] = tgt;
} else if let Some(oth) = a.iter().copied().find(|&x| x != tgt) {
ans[i] = oth;
}
}
} else {
for a in a {
ans.push(a[0]);
}
}
if ans.iter().any(|&x| x == 0) {
println!("NO");
} else {
println!("YES");
let ans: Vec<String> = ans.into_iter().map(|x| x.to_string()).collect();
println!("{}", ans.join(" "));
}
}
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let t: usize = rd.get();
for _ in 0..t {
let n: usize = rd.get();
let m: usize = rd.get();
let a: Vec<Vec<usize>> = (0..m)
.map(|_| {
let k: usize = rd.get();
rd.get_vec(k)
})
.collect();
solve(n, m, a);
}
}
pub struct ProconReader<R> {
r: R,
l: String,
i: usize,
}
impl<R: std::io::BufRead> ProconReader<R> {
pub fn new(reader: R) -> Self {
Self {
r: reader,
l: String::new(),
i: 0,
}
}
pub fn get<T>(&mut self) -> T
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err: std::fmt::Debug,
{
self.skip_blanks();
assert!(self.i < self.l.len()); // remain some character
assert_ne!(&self.l[self.i..=self.i], " ");
let rest = &self.l[self.i..];
let len = rest.find(' ').unwrap_or_else(|| rest.len());
// parse self.l[self.i..(self.i + len)]
let val = rest[..len]
.parse()
.unwrap_or_else(|e| panic!("{:?}, attempt to read `{}`", e, rest));
self.i += len;
val
}
fn skip_blanks(&mut self) {
loop {
match self.l[self.i..].find(|ch| ch != ' ') {
Some(j) => {
self.i += j;
break;
}
None => {
let mut buf = String::new();
let num_bytes = self
.r
.read_line(&mut buf)
.unwrap_or_else(|_| panic!("invalid UTF-8"));
assert!(num_bytes > 0, "reached EOF :(");
self.l = buf
.trim_end_matches('\n')
.trim_end_matches('\r')
.to_string();
self.i = 0;
}
}
}
}
pub fn get_vec<T>(&mut self, n: usize) -> Vec<T>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err: std::fmt::Debug,
{
(0..n).map(|_| self.get()).collect()
}
pub fn get_chars(&mut self) -> Vec<char> {
self.get::<String>().chars().collect()
}
}
|
use crate::configuration::{new_configuration, Configuration};
use command::CreatePostCommand;
use db::SqlxPostDb;
use domain::new_post_domain;
use domain::PostDb;
use domain::PostDomain;
use std::time::Duration;
pub struct ServiceRegistry {
sqlx_post_db: Option<SqlxPostDb>,
}
impl ServiceRegistry {
pub fn new() -> Self {
Self { sqlx_post_db: None }
}
pub async fn init(&mut self) {
let conf = self.get_config();
self.sqlx_post_db = Some(
db::connect(
&*conf.database.uri,
conf.database.min_conn,
conf.database.max_conn,
Duration::from_secs(conf.database.max_lifetime),
)
.await
.expect("failed to create db"),
);
}
pub fn get_config(&self) -> Configuration {
new_configuration().unwrap()
}
pub fn get_post_domain(&self) -> impl PostDomain {
new_post_domain(self.get_db_sqlx())
}
pub fn get_db_sqlx(&self) -> impl PostDb {
self.sqlx_post_db.clone().expect("db not created")
}
pub fn get_create_post_command(&self) -> CreatePostCommand<impl PostDomain> {
CreatePostCommand::new(self.get_post_domain())
}
}
|
#![feature(asm)]
#![feature(linkage)]
#![deny(warnings)]
#[macro_use]
extern crate log;
extern crate alloc;
use {
alloc::collections::VecDeque,
async_std::task_local,
core::{cell::Cell, future::Future, pin::Pin},
git_version::git_version,
kernel_hal::PageTableTrait,
lazy_static::lazy_static,
std::fmt::{Debug, Formatter},
std::fs::{File, OpenOptions},
std::io::Error,
std::os::unix::io::AsRawFd,
std::sync::Mutex,
std::time::{Duration, SystemTime},
tempfile::tempdir,
};
pub use kernel_hal::defs::*;
use kernel_hal::vdso::*;
pub use kernel_hal::*;
use std::io::Read;
pub use trapframe::syscall_fn_entry as syscall_entry;
#[cfg(target_os = "macos")]
include!("macos.rs");
#[repr(C)]
pub struct Thread {
thread: usize,
}
impl Thread {
#[export_name = "hal_thread_spawn"]
pub fn spawn(
future: Pin<Box<dyn Future<Output = ()> + Send + 'static>>,
_vmtoken: usize,
) -> Self {
async_std::task::spawn(future);
Thread { thread: 0 }
}
#[export_name = "hal_thread_set_tid"]
pub fn set_tid(tid: u64, pid: u64) {
TID.with(|x| x.set(tid));
PID.with(|x| x.set(pid));
}
#[export_name = "hal_thread_get_tid"]
pub fn get_tid() -> (u64, u64) {
(TID.with(|x| x.get()), PID.with(|x| x.get()))
}
}
task_local! {
static TID: Cell<u64> = Cell::new(0);
static PID: Cell<u64> = Cell::new(0);
}
#[export_name = "hal_context_run"]
unsafe fn context_run(context: &mut UserContext) {
context.run_fncall();
}
/// Page Table
#[repr(C)]
pub struct PageTable {
table_phys: PhysAddr,
}
impl PageTable {
/// Create a new `PageTable`.
#[allow(clippy::new_without_default)]
#[export_name = "hal_pt_new"]
pub fn new() -> Self {
PageTable { table_phys: 0 }
}
}
impl PageTableTrait for PageTable {
/// Map the page of `vaddr` to the frame of `paddr` with `flags`.
#[export_name = "hal_pt_map"]
fn map(&mut self, vaddr: VirtAddr, paddr: PhysAddr, flags: MMUFlags) -> Result<()> {
debug_assert!(page_aligned(vaddr));
debug_assert!(page_aligned(paddr));
let prot = flags.to_mmap_prot();
mmap(FRAME_FILE.as_raw_fd(), paddr, PAGE_SIZE, vaddr, prot);
Ok(())
}
/// Unmap the page of `vaddr`.
#[export_name = "hal_pt_unmap"]
fn unmap(&mut self, vaddr: VirtAddr) -> Result<()> {
self.unmap_cont(vaddr, 1)
}
/// Change the `flags` of the page of `vaddr`.
#[export_name = "hal_pt_protect"]
fn protect(&mut self, vaddr: VirtAddr, flags: MMUFlags) -> Result<()> {
debug_assert!(page_aligned(vaddr));
let prot = flags.to_mmap_prot();
let ret = unsafe { libc::mprotect(vaddr as _, PAGE_SIZE, prot) };
assert_eq!(ret, 0, "failed to mprotect: {:?}", Error::last_os_error());
Ok(())
}
/// Query the physical address which the page of `vaddr` maps to.
#[export_name = "hal_pt_query"]
fn query(&mut self, vaddr: VirtAddr) -> Result<PhysAddr> {
debug_assert!(page_aligned(vaddr));
unimplemented!()
}
/// Get the physical address of root page table.
#[export_name = "hal_pt_table_phys"]
fn table_phys(&self) -> PhysAddr {
self.table_phys
}
#[export_name = "hal_pt_unmap_cont"]
fn unmap_cont(&mut self, vaddr: VirtAddr, pages: usize) -> Result<()> {
if pages == 0 {
return Ok(());
}
debug_assert!(page_aligned(vaddr));
let ret = unsafe { libc::munmap(vaddr as _, PAGE_SIZE * pages) };
assert_eq!(ret, 0, "failed to munmap: {:?}", Error::last_os_error());
Ok(())
}
}
#[repr(C)]
pub struct PhysFrame {
paddr: PhysAddr,
}
impl Debug for PhysFrame {
fn fmt(&self, f: &mut Formatter<'_>) -> core::result::Result<(), std::fmt::Error> {
write!(f, "PhysFrame({:#x})", self.paddr)
}
}
lazy_static! {
static ref AVAILABLE_FRAMES: Mutex<VecDeque<usize>> =
Mutex::new((PAGE_SIZE..PMEM_SIZE).step_by(PAGE_SIZE).collect());
}
impl PhysFrame {
#[export_name = "hal_frame_alloc"]
pub fn alloc() -> Option<Self> {
let ret = AVAILABLE_FRAMES
.lock()
.unwrap()
.pop_front()
.map(|paddr| PhysFrame { paddr });
trace!("frame alloc: {:?}", ret);
ret
}
#[export_name = "hal_zero_frame_paddr"]
pub fn zero_frame_addr() -> PhysAddr {
0
}
}
impl Drop for PhysFrame {
#[export_name = "hal_frame_dealloc"]
fn drop(&mut self) {
trace!("frame dealloc: {:?}", self);
AVAILABLE_FRAMES.lock().unwrap().push_back(self.paddr);
}
}
fn phys_to_virt(paddr: PhysAddr) -> VirtAddr {
/// Map physical memory from here.
const PMEM_BASE: VirtAddr = 0x8_0000_0000;
PMEM_BASE + paddr
}
/// Ensure physical memory are mmapped and accessible.
fn ensure_mmap_pmem() {
FRAME_FILE.as_raw_fd();
}
/// Read physical memory from `paddr` to `buf`.
#[export_name = "hal_pmem_read"]
pub fn pmem_read(paddr: PhysAddr, buf: &mut [u8]) {
trace!("pmem read: paddr={:#x}, len={:#x}", paddr, buf.len());
assert!(paddr + buf.len() <= PMEM_SIZE);
ensure_mmap_pmem();
unsafe {
(phys_to_virt(paddr) as *const u8).copy_to_nonoverlapping(buf.as_mut_ptr(), buf.len());
}
}
/// Write physical memory to `paddr` from `buf`.
#[export_name = "hal_pmem_write"]
pub fn pmem_write(paddr: PhysAddr, buf: &[u8]) {
trace!("pmem write: paddr={:#x}, len={:#x}", paddr, buf.len());
assert!(paddr + buf.len() <= PMEM_SIZE);
ensure_mmap_pmem();
unsafe {
buf.as_ptr()
.copy_to_nonoverlapping(phys_to_virt(paddr) as _, buf.len());
}
}
/// Zero physical memory at `[paddr, paddr + len)`
#[export_name = "hal_pmem_zero"]
pub fn pmem_zero(paddr: PhysAddr, len: usize) {
trace!("pmem_zero: addr={:#x}, len={:#x}", paddr, len);
assert!(paddr + len <= PMEM_SIZE);
ensure_mmap_pmem();
unsafe {
core::ptr::write_bytes(phys_to_virt(paddr) as *mut u8, 0, len);
}
}
/// Copy content of `src` frame to `target` frame
#[export_name = "hal_frame_copy"]
pub fn frame_copy(src: PhysAddr, target: PhysAddr) {
trace!("frame_copy: {:#x} <- {:#x}", target, src);
assert!(src + PAGE_SIZE <= PMEM_SIZE && target + PAGE_SIZE <= PMEM_SIZE);
ensure_mmap_pmem();
unsafe {
let buf = phys_to_virt(src) as *const u8;
buf.copy_to_nonoverlapping(phys_to_virt(target) as _, PAGE_SIZE);
}
}
/// Flush the physical frame.
#[export_name = "hal_frame_flush"]
pub fn frame_flush(_target: PhysAddr) {
// do nothing
}
const PAGE_SIZE: usize = 0x1000;
fn page_aligned(x: VirtAddr) -> bool {
x % PAGE_SIZE == 0
}
const PMEM_SIZE: usize = 0x4000_0000; // 1GiB
lazy_static! {
static ref FRAME_FILE: File = create_pmem_file();
}
fn create_pmem_file() -> File {
let dir = tempdir().expect("failed to create pmem dir");
let path = dir.path().join("pmem");
// workaround on macOS to avoid permission denied.
// see https://jiege.ch/software/2020/02/07/macos-mmap-exec/ for analysis on this problem.
#[cfg(target_os = "macos")]
std::mem::forget(dir);
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&path)
.expect("failed to create pmem file");
file.set_len(PMEM_SIZE as u64)
.expect("failed to resize file");
trace!("create pmem file: path={:?}, size={:#x}", path, PMEM_SIZE);
let prot = libc::PROT_READ | libc::PROT_WRITE;
mmap(file.as_raw_fd(), 0, PMEM_SIZE, phys_to_virt(0), prot);
file
}
/// Mmap frame file `fd` to `vaddr`.
fn mmap(fd: libc::c_int, offset: usize, len: usize, vaddr: VirtAddr, prot: libc::c_int) {
// workaround on macOS to write text section.
#[cfg(target_os = "macos")]
let prot = if prot & libc::PROT_EXEC != 0 {
prot | libc::PROT_WRITE
} else {
prot
};
let ret = unsafe {
let flags = libc::MAP_SHARED | libc::MAP_FIXED;
libc::mmap(vaddr as _, len, prot, flags, fd, offset as _)
} as usize;
trace!(
"mmap file: fd={}, offset={:#x}, len={:#x}, vaddr={:#x}, prot={:#b}",
fd,
offset,
len,
vaddr,
prot,
);
assert_eq!(ret, vaddr, "failed to mmap: {:?}", Error::last_os_error());
}
trait FlagsExt {
fn to_mmap_prot(&self) -> libc::c_int;
}
impl FlagsExt for MMUFlags {
fn to_mmap_prot(&self) -> libc::c_int {
let mut flags = 0;
if self.contains(MMUFlags::READ) {
flags |= libc::PROT_READ;
}
if self.contains(MMUFlags::WRITE) {
flags |= libc::PROT_WRITE;
}
if self.contains(MMUFlags::EXECUTE) {
flags |= libc::PROT_EXEC;
}
flags
}
}
lazy_static! {
static ref STDIN: Mutex<VecDeque<u8>> = Mutex::new(VecDeque::new());
static ref STDIN_CALLBACK: Mutex<Vec<Box<dyn Fn() -> bool + Send + Sync>>> =
Mutex::new(Vec::new());
}
/// Put a char by serial interrupt handler.
fn serial_put(x: u8) {
STDIN.lock().unwrap().push_back(x);
STDIN_CALLBACK.lock().unwrap().retain(|f| !f());
}
#[export_name = "hal_serial_set_callback"]
pub fn serial_set_callback(callback: Box<dyn Fn() -> bool + Send + Sync>) {
STDIN_CALLBACK.lock().unwrap().push(callback);
}
#[export_name = "hal_serial_read"]
pub fn serial_read(buf: &mut [u8]) -> usize {
let mut stdin = STDIN.lock().unwrap();
let len = stdin.len().min(buf.len());
for c in &mut buf[..len] {
*c = stdin.pop_front().unwrap();
}
len
}
/// Output a char to console.
#[export_name = "hal_serial_write"]
pub fn serial_write(s: &str) {
eprint!("{}", s);
}
/// Get current time.
#[export_name = "hal_timer_now"]
pub fn timer_now() -> Duration {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
}
/// Set a new timer.
///
/// After `deadline`, the `callback` will be called.
#[export_name = "hal_timer_set"]
pub fn timer_set(deadline: Duration, callback: Box<dyn FnOnce(Duration) + Send + Sync>) {
std::thread::spawn(move || {
let now = timer_now();
if deadline > now {
std::thread::sleep(deadline - now);
}
callback(timer_now());
});
}
#[export_name = "hal_vdso_constants"]
pub fn vdso_constants() -> VdsoConstants {
let tsc_frequency = 3000u16;
let mut constants = VdsoConstants {
max_num_cpus: 1,
features: Features {
cpu: 0,
hw_breakpoint_count: 0,
hw_watchpoint_count: 0,
},
dcache_line_size: 0,
icache_line_size: 0,
ticks_per_second: tsc_frequency as u64 * 1_000_000,
ticks_to_mono_numerator: 1000,
ticks_to_mono_denominator: tsc_frequency as u32,
physmem: PMEM_SIZE as u64,
version_string_len: 0,
version_string: Default::default(),
};
constants.set_version_string(git_version!(
prefix = "git-",
args = ["--always", "--abbrev=40", "--dirty=-dirty"]
));
constants
}
#[export_name = "hal_current_pgtable"]
pub fn current_page_table() -> usize {
0
}
/// Initialize the HAL.
///
/// This function must be called at the beginning.
pub fn init() {
#[cfg(target_os = "macos")]
unsafe {
register_sigsegv_handler();
}
// spawn a thread to read stdin
// TODO: raw mode
std::thread::spawn(|| {
for i in std::io::stdin().bytes() {
serial_put(i.unwrap());
}
});
}
pub fn init_framebuffer() {
const FBIOGET_VSCREENINFO: u64 = 0x4600;
const FBIOGET_FSCREENINFO: u64 = 0x4602;
#[cfg(target_arch = "aarch64")]
let fbfd = unsafe { libc::open("/dev/fb0".as_ptr(), libc::O_RDWR) };
#[cfg(not(target_arch = "aarch64"))]
let fbfd = unsafe { libc::open("/dev/fb0".as_ptr() as *const i8, libc::O_RDWR) };
if fbfd < 0 {
return;
}
#[repr(C)]
#[derive(Debug, Default)]
struct FbFixScreeninfo {
id: [u8; 16],
smem_start: u64,
smem_len: u32,
type_: u32,
type_aux: u32,
visual: u32,
xpanstep: u16,
ypanstep: u16,
ywrapstep: u16,
line_length: u32,
mmio_start: u64,
mmio_len: u32,
accel: u32,
capabilities: u16,
reserved: [u16; 2],
}
impl FbFixScreeninfo {
pub fn size(&self) -> u32 {
self.smem_len
}
}
#[repr(C)]
#[derive(Debug, Default)]
struct FbVarScreeninfo {
xres: u32,
yres: u32,
xres_virtual: u32,
yres_virtual: u32,
xoffset: u32,
yoffset: u32,
bits_per_pixel: u32,
grayscale: u32,
red: FbBitfield,
green: FbBitfield,
blue: FbBitfield,
transp: FbBitfield,
nonstd: u32,
activate: u32,
height: u32,
width: u32,
accel_flags: u32,
pixclock: u32,
left_margin: u32,
right_margin: u32,
upper_margin: u32,
lower_margin: u32,
hsync_len: u32,
vsync_len: u32,
sync: u32,
vmode: u32,
rotate: u32,
colorspace: u32,
reserved: [u32; 4],
}
impl FbVarScreeninfo {
pub fn resolution(&self) -> (u32, u32) {
(self.xres, self.yres)
}
}
#[repr(C)]
#[derive(Debug, Default)]
pub struct FbBitfield {
offset: u32,
length: u32,
msb_right: u32,
}
let mut vinfo = FbVarScreeninfo::default();
if unsafe { libc::ioctl(fbfd, FBIOGET_VSCREENINFO, &mut vinfo) } < 0 {
return;
}
let mut finfo = FbFixScreeninfo::default();
if unsafe { libc::ioctl(fbfd, FBIOGET_FSCREENINFO, &mut finfo) } < 0 {
return;
}
let size = finfo.size() as usize;
let addr = unsafe {
libc::mmap(
std::ptr::null_mut::<libc::c_void>(),
size,
libc::PROT_READ | libc::PROT_WRITE,
libc::MAP_SHARED,
fbfd,
0,
)
};
if (addr as isize) < 0 {
return;
}
let (width, height) = vinfo.resolution();
let addr = addr as usize;
let fb_info = FramebufferInfo {
xres: width,
yres: height,
xres_virtual: width,
yres_virtual: height,
xoffset: 0,
yoffset: 0,
depth: ColorDepth::ColorDepth32,
format: ColorFormat::RGBA8888,
// paddr: virt_to_phys(addr),
paddr: addr,
vaddr: addr,
screen_size: size,
};
*FRAME_BUFFER.write() = Some(fb_info);
}
type MouseCallbackFn = dyn Fn([u8; 3]) + Send + Sync;
type KBDCallbackFn = dyn Fn(u16, i32) + Send + Sync;
lazy_static! {
static ref MOUSE_CALLBACK: Mutex<Vec<Box<MouseCallbackFn>>> = Mutex::new(Vec::new());
static ref KBD_CALLBACK: Mutex<Vec<Box<KBDCallbackFn>>> = Mutex::new(Vec::new());
}
#[export_name = "hal_mice_set_callback"]
pub fn mice_set_callback(callback: Box<dyn Fn([u8; 3]) + Send + Sync>) {
MOUSE_CALLBACK.lock().unwrap().push(callback);
}
#[export_name = "hal_kbd_set_callback"]
pub fn kbd_set_callback(callback: Box<dyn Fn(u16, i32) + Send + Sync>) {
KBD_CALLBACK.lock().unwrap().push(callback);
}
fn init_kbd() {
let fd = std::fs::File::open("/dev/input/event1").expect("Failed to open input event device.");
// ??
/* let inputfd = unsafe {
libc::open(
"/dev/input/event1".as_ptr() as *const i8,
libc::O_RDONLY /* | libc::O_NONBLOCK */,
)
}; */
if fd.as_raw_fd() < 0 {
return;
}
#[repr(C)]
#[derive(Debug, Copy, Clone, Default)]
pub struct TimeVal {
pub sec: usize,
pub usec: usize,
}
#[repr(C)]
#[derive(Debug, Copy, Clone, Default)]
struct InputEvent {
time: TimeVal,
type_: u16,
code: u16,
value: i32,
}
std::thread::spawn(move || {
use core::mem::{size_of, transmute, transmute_copy};
let ev = InputEvent::default();
const LEN: usize = size_of::<InputEvent>();
let mut buf: [u8; LEN] = unsafe { transmute(ev) };
loop {
std::thread::sleep(std::time::Duration::from_millis(8));
let ret =
unsafe { libc::read(fd.as_raw_fd(), buf.as_mut_ptr() as *mut libc::c_void, LEN) };
if ret < 0 {
break;
}
let ev: InputEvent = unsafe { transmute_copy(&buf) };
if ev.type_ == 1 {
KBD_CALLBACK.lock().unwrap().iter().for_each(|callback| {
callback(ev.code, ev.value);
});
}
}
});
}
fn init_mice() {
let fd = std::fs::File::open("/dev/input/mice").expect("Failed to open input event device.");
if fd.as_raw_fd() < 0 {
return;
}
std::thread::spawn(move || {
let mut buf = [0u8; 3];
loop {
std::thread::sleep(std::time::Duration::from_millis(8));
let ret =
unsafe { libc::read(fd.as_raw_fd(), buf.as_mut_ptr() as *mut libc::c_void, 3) };
if ret < 0 {
break;
}
MOUSE_CALLBACK.lock().unwrap().iter().for_each(|callback| {
callback(buf);
});
}
});
}
pub fn init_input() {
init_kbd();
init_mice();
}
#[cfg(test)]
mod tests {
use super::*;
/// A valid virtual address base to mmap.
const VBASE: VirtAddr = 0x2_00000000;
#[test]
fn map_unmap() {
let mut pt = PageTable::new();
let flags = MMUFlags::READ | MMUFlags::WRITE;
// map 2 pages to 1 frame
pt.map(VBASE, 0x1000, flags).unwrap();
pt.map(VBASE + 0x1000, 0x1000, flags).unwrap();
unsafe {
const MAGIC: usize = 0xdead_beaf;
(VBASE as *mut usize).write(MAGIC);
assert_eq!(((VBASE + 0x1000) as *mut usize).read(), MAGIC);
}
pt.unmap(VBASE + 0x1000).unwrap();
}
}
|
//! Private module for selective re-export. See [`SequentialConsistencyTester`].
use crate::semantics::{ConsistencyTester, SequentialSpec};
use std::collections::{btree_map, BTreeMap, VecDeque};
use std::fmt::Debug;
/// This tester captures a potentially concurrent history of operations and
/// validates that it adheres to a [`SequentialSpec`] based on the
/// [sequential consistency] model. This model requires that operations be
/// applied atomically and that operations within a thread are sequential
/// (they have a total order within the thread).
///
/// If you're not sure whether to pick this or [`LinearizabilityTester`], favor
/// `LinearizabilityTester`.
///
/// # Sequential Consistency
///
/// Unlike with [linearizability], there is no intrinsic order of operations across threads, even
/// if they are fully sequenced in "real-time" (defined more precisely below). Anomalies are
/// therefore possible as threads do not necessarily agree on viable orders of operations. For
/// example, the later read by Thread 2 in this diagram is allowed to return the value prior to
/// Thread 1's write even though the operations are seemingly not concurrent:
///
/// ```text
/// -----------Time------------------------------>
/// Thread 1: [write invoked... and returns]
/// Thread 2: [read invoked... and returns]
/// ```
///
/// While "real time" is a common way to phrase an implicit total ordering on non-concurrent events
/// spanning threads, a more precise way to think about this is that prior to Thread 2 starting its
/// read, Thread 1 is capable of communicating with Thread 2 indicating that the write finished.
/// This perspective avoids introducing the notion of a shared global time, which is often a
/// misleading perspective when it comes to distributed systems (or even modern physics in
/// general).
///
/// If sequential consistency is checked at the edges of the system rather than at the clients,
/// then more anomalies are possible. For example, if a single thread executes a write then a read
/// and sequential consistency is checked when messages enter/leave the system, then the client can
/// observe stale reads of their own writes, something that would be disallowed when checking
/// sequential consistency when messages leave/enter the client.
///
/// The [`SequentialSpec`] will imply additional ordering constraints based on semantics specific
/// to each operation. For example, a value cannot be popped off a stack before it is pushed. It is
/// then the responsibility of the checker to establish whether a valid total ordering of events
/// exists under these constraints.
///
/// See also: [`LinearizabilityTester`].
///
/// [linearizability]: https://en.wikipedia.org/wiki/Linearizability
/// [sequential consistency]: https://en.wikipedia.org/wiki/Sequential_consistency
/// [`LinearizabilityTester`]: crate::semantics::LinearizabilityTester
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
#[allow(clippy::type_complexity)]
pub struct SequentialConsistencyTester<ThreadId, RefObj: SequentialSpec> {
init_ref_obj: RefObj,
history_by_thread: BTreeMap<ThreadId, VecDeque<(RefObj::Op, RefObj::Ret)>>,
in_flight_by_thread: BTreeMap<ThreadId, RefObj::Op>,
is_valid_history: bool,
}
#[allow(clippy::len_without_is_empty)] // no use case for an emptiness check
impl<T: Ord, RefObj: SequentialSpec> SequentialConsistencyTester<T, RefObj> {
/// Constructs a [`SequentialConsistencyTester`].
pub fn new(init_ref_obj: RefObj) -> Self {
Self {
init_ref_obj,
history_by_thread: Default::default(),
in_flight_by_thread: Default::default(),
is_valid_history: true,
}
}
/// Indicates the aggregate number of operations completed or in flight
/// across all threads.
pub fn len(&self) -> usize {
let mut len = self.in_flight_by_thread.len();
for history in self.history_by_thread.values() {
len += history.len();
}
len
}
}
impl<T, RefObj> ConsistencyTester<T, RefObj> for SequentialConsistencyTester<T, RefObj>
where
T: Copy + Debug + Ord,
RefObj: Clone + SequentialSpec,
RefObj::Op: Clone + Debug,
RefObj::Ret: Clone + Debug + PartialEq,
{
/// Indicates that a thread invoked an operation. Returns `Ok(...)` if the
/// history is valid, even if it is not sequentially consistent.
///
/// See [`SequentialConsistencyTester::serialized_history`].
fn on_invoke(&mut self, thread_id: T, op: RefObj::Op) -> Result<&mut Self, String> {
if !self.is_valid_history {
return Err("Earlier history was invalid.".to_string());
}
let in_flight_elem = self.in_flight_by_thread.entry(thread_id);
if let btree_map::Entry::Occupied(occupied_op_entry) = in_flight_elem {
self.is_valid_history = false;
return Err(format!(
"Thread already has an operation in flight. thread_id={:?}, op={:?}, history_by_thread={:?}",
thread_id, occupied_op_entry.get(), self.history_by_thread));
};
in_flight_elem.or_insert(op);
self.history_by_thread
.entry(thread_id)
.or_insert_with(VecDeque::new); // `serialize` requires entry
Ok(self)
}
/// Indicates that a thread's earlier operation invocation returned. Returns
/// `Ok(...)` if the history is valid, even if it is not sequentially
/// consistent.
///
/// See [`SequentialConsistencyTester::serialized_history`].
fn on_return(&mut self, thread_id: T, ret: RefObj::Ret) -> Result<&mut Self, String> {
if !self.is_valid_history {
return Err("Earlier history was invalid.".to_string());
}
let op = match self.in_flight_by_thread.remove(&thread_id) {
None => {
self.is_valid_history = false;
return Err(format!(
"There is no in-flight invocation for this thread ID. \
thread_id={:?}, unexpected_return={:?}, history={:?}",
thread_id,
ret,
self.history_by_thread
.entry(thread_id)
.or_insert_with(VecDeque::new)
));
}
Some(op) => op,
};
self.history_by_thread
.entry(thread_id)
.or_insert_with(VecDeque::new)
.push_back((op, ret));
Ok(self)
}
/// Indicates whether the recorded history is sequentially consistent.
fn is_consistent(&self) -> bool {
self.serialized_history().is_some()
}
}
impl<T, RefObj> SequentialConsistencyTester<T, RefObj>
where
T: Copy + Debug + Ord,
RefObj: Clone + SequentialSpec,
RefObj::Op: Clone + Debug,
RefObj::Ret: Clone + Debug + PartialEq,
{
/// Attempts to serialize the recorded partially ordered operation history
/// into a total order that is consistent with a reference object's
/// operational semantics.
pub fn serialized_history(&self) -> Option<Vec<(RefObj::Op, RefObj::Ret)>>
where
RefObj: Clone,
RefObj::Op: Clone,
RefObj::Ret: Clone,
{
if !self.is_valid_history {
return None;
}
Self::serialize(
Vec::new(),
&self.init_ref_obj,
&self.history_by_thread,
&self.in_flight_by_thread,
)
}
#[allow(clippy::type_complexity)]
fn serialize(
valid_history: Vec<(RefObj::Op, RefObj::Ret)>, // total order
ref_obj: &RefObj,
remaining_history_by_thread: &BTreeMap<T, VecDeque<(RefObj::Op, RefObj::Ret)>>, // partial order
in_flight_by_thread: &BTreeMap<T, RefObj::Op>,
) -> Option<Vec<(RefObj::Op, RefObj::Ret)>>
where
RefObj: Clone,
RefObj::Op: Clone,
RefObj::Ret: Clone,
{
// Return collected total order when there is no remaining partial order to interleave.
let done = remaining_history_by_thread
.iter()
.all(|(_id, h)| h.is_empty());
if done {
return Some(valid_history);
}
// Otherwise try remaining interleavings.
for (thread_id, remaining_history) in remaining_history_by_thread.iter() {
let mut remaining_history_by_thread =
std::borrow::Cow::Borrowed(remaining_history_by_thread);
let mut in_flight_by_thread = std::borrow::Cow::Borrowed(in_flight_by_thread);
let (ref_obj, valid_history) = if remaining_history.is_empty() {
// Case 1: No remaining history to interleave. Maybe in-flight.
if !in_flight_by_thread.contains_key(thread_id) {
continue;
}
let op = in_flight_by_thread.to_mut().remove(thread_id).unwrap(); // `contains_key` above
let mut ref_obj = ref_obj.clone();
let ret = ref_obj.invoke(&op);
let mut valid_history = valid_history.clone();
valid_history.push((op, ret));
(ref_obj, valid_history)
} else {
// Case 2: Has remaining history to interleave.
let (op, ret) = remaining_history_by_thread
.to_mut()
.get_mut(thread_id)
.unwrap() // iterator returned this thread ID
.pop_front()
.unwrap(); // `!is_empty()` above
let mut ref_obj = ref_obj.clone();
if !ref_obj.is_valid_step(&op, &ret) {
continue;
}
let mut valid_history = valid_history.clone();
valid_history.push((op, ret));
(ref_obj, valid_history)
};
if let Some(valid_history) = Self::serialize(
valid_history,
&ref_obj,
&remaining_history_by_thread,
&in_flight_by_thread,
) {
return Some(valid_history);
}
}
None
}
}
impl<T: Ord, RefObj> Default for SequentialConsistencyTester<T, RefObj>
where
RefObj: Default + SequentialSpec,
{
fn default() -> Self {
Self::new(RefObj::default())
}
}
impl<T, RefObj> serde::Serialize for SequentialConsistencyTester<T, RefObj>
where
RefObj: serde::Serialize + SequentialSpec,
RefObj::Op: serde::Serialize,
RefObj::Ret: serde::Serialize,
T: Ord + serde::Serialize,
{
fn serialize<Ser: serde::Serializer>(&self, ser: Ser) -> Result<Ser::Ok, Ser::Error> {
use serde::ser::SerializeStruct;
let mut out = ser.serialize_struct("SequentialConsistencyTester", 4)?;
out.serialize_field("init_ref_obj", &self.init_ref_obj)?;
out.serialize_field("history_by_thread", &self.history_by_thread)?;
out.serialize_field("in_flight_by_thread", &self.in_flight_by_thread)?;
out.serialize_field("is_valid_history", &self.is_valid_history)?;
out.end()
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::semantics::register::*;
use crate::semantics::vec::*;
#[test]
fn rejects_invalid_history() -> Result<(), String> {
assert_eq!(
SequentialConsistencyTester::new(Register('A'))
.on_invoke(99, RegisterOp::Write('B'))?
.on_invoke(99, RegisterOp::Write('C')),
Err("Thread already has an operation in flight. thread_id=99, op=Write('B'), history_by_thread={99: []}".to_string()));
assert_eq!(
SequentialConsistencyTester::new(Register('A'))
.on_invret(99, RegisterOp::Write('B'), RegisterRet::WriteOk)?
.on_invret(99, RegisterOp::Write('C'), RegisterRet::WriteOk)?
.on_return(99, RegisterRet::WriteOk),
Err("There is no in-flight invocation for this thread ID. \
thread_id=99, \
unexpected_return=WriteOk, \
history=[(Write('B'), WriteOk), (Write('C'), WriteOk)]"
.to_string())
);
Ok(())
}
#[test]
fn identifies_serializable_register_history() -> Result<(), String> {
assert_eq!(
SequentialConsistencyTester::new(Register('A'))
.on_invoke(0, RegisterOp::Write('B'))?
.on_invret(1, RegisterOp::Read, RegisterRet::ReadOk('A'))?
.serialized_history(),
Some(vec![(RegisterOp::Read, RegisterRet::ReadOk('A')),])
);
assert_eq!(
SequentialConsistencyTester::new(Register('A'))
.on_invret(0, RegisterOp::Read, RegisterRet::ReadOk('B'))?
.on_invoke(1, RegisterOp::Write('B'))?
.serialized_history(),
Some(vec![
(RegisterOp::Write('B'), RegisterRet::WriteOk),
(RegisterOp::Read, RegisterRet::ReadOk('B')),
])
);
Ok(())
}
#[test]
fn identifies_unserializable_register_history() -> Result<(), String> {
assert_eq!(
SequentialConsistencyTester::new(Register('A'))
.on_invret(0, RegisterOp::Read, RegisterRet::ReadOk('B'))?
.serialized_history(),
None
);
Ok(())
}
#[test]
fn identifies_serializable_vec_history() -> Result<(), String> {
assert_eq!(
SequentialConsistencyTester::new(Vec::new())
.on_invoke(0, VecOp::Push(10))?
.serialized_history(),
Some(vec![])
);
assert_eq!(
SequentialConsistencyTester::new(Vec::new())
.on_invoke(0, VecOp::Push(10))?
.on_invret(1, VecOp::Pop, VecRet::PopOk(None))?
.serialized_history(),
Some(vec![(VecOp::Pop, VecRet::PopOk(None)),])
);
assert_eq!(
SequentialConsistencyTester::new(Vec::new())
.on_invret(1, VecOp::Pop, VecRet::PopOk(Some(10)))?
.on_invret(0, VecOp::Push(10), VecRet::PushOk)?
.on_invret(0, VecOp::Pop, VecRet::PopOk(Some(20)))?
.on_invoke(0, VecOp::Push(30))?
.on_invret(1, VecOp::Push(20), VecRet::PushOk)?
.on_invret(1, VecOp::Pop, VecRet::PopOk(None))?
.serialized_history(),
Some(vec![
(VecOp::Push(10), VecRet::PushOk),
(VecOp::Pop, VecRet::PopOk(Some(10))),
(VecOp::Push(20), VecRet::PushOk),
(VecOp::Pop, VecRet::PopOk(Some(20))),
(VecOp::Pop, VecRet::PopOk(None)),
])
);
Ok(())
}
#[test]
fn identifies_unserializable_vec_history() -> Result<(), String> {
assert_eq!(
SequentialConsistencyTester::new(Vec::new())
.on_invret(0, VecOp::Push(10), VecRet::PushOk)?
.on_invoke(0, VecOp::Push(20))?
.on_invret(1, VecOp::Len, VecRet::LenOk(2))?
.on_invret(1, VecOp::Pop, VecRet::PopOk(Some(10)))?
.on_invret(1, VecOp::Pop, VecRet::PopOk(Some(20)))?
.serialized_history(),
None
);
Ok(())
}
}
|
extern crate slkparser;
extern crate bencher;
use bencher::Bencher;
use bencher::benchmark_group;
use bencher::benchmark_main;
use slkparser::SLKScanner;
fn bench_scanning_ability_data(b: &mut Bencher) {
b.iter(|| {
let slk_reader = SLKScanner::open("../resources/slk/AbilityData.slk");
for _ in slk_reader{}
});
}
benchmark_group!(benches, bench_scanning_ability_data);
benchmark_main!(benches);
|
pub mod borrowing;
pub mod enum_and_match;
pub mod guess;
pub mod lang_intro;
|
pub mod JSONObject {
pub fn new(){
}
} |
use crate::{language_client::LanguageClient, types::WorkspaceEditWithCursor};
use anyhow::{anyhow, Result};
use jsonrpc_core::Value;
use lsp_types::{Command, Location};
use serde::Deserialize;
use std::path::PathBuf;
// Runnable wraps the two possible shapes of a runnable action from rust-analyzer. Old-ish versions
// of it will use BinRunnable, whereas the newer ones use CargoRunnable.
#[derive(Debug, Deserialize, PartialEq)]
#[serde(untagged)]
#[allow(clippy::large_enum_variant)]
enum Runnable {
Bin(BinRunnable),
Generic(GenericRunnable),
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
struct BinRunnable {
pub label: String,
pub bin: String,
pub args: Vec<String>,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
struct GenericRunnable {
pub label: String,
pub kind: GenericRunnableKind,
pub location: Option<lsp_types::LocationLink>,
pub args: GenericRunnableArgs,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
struct GenericRunnableArgs {
pub workspace_root: Option<PathBuf>,
pub cargo_args: Vec<String>,
pub executable_args: Vec<String>,
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
enum GenericRunnableKind {
Cargo,
}
pub mod command {
pub const SHOW_REFERENCES: &str = "rust-analyzer.showReferences";
pub const SELECT_APPLY_SOURCE_CHANGE: &str = "rust-analyzer.selectAndApplySourceChange";
pub const APPLY_SOURCE_CHANGE: &str = "rust-analyzer.applySourceChange";
pub const RUN_SINGLE: &str = "rust-analyzer.runSingle";
pub const RUN: &str = "rust-analyzer.run";
}
impl LanguageClient {
pub fn handle_rust_analyzer_command(&self, cmd: &Command) -> Result<bool> {
match cmd.command.as_str() {
command::SHOW_REFERENCES => {
let locations = cmd
.arguments
.clone()
.unwrap_or_default()
.get(2)
.cloned()
.unwrap_or_else(|| Value::Array(vec![]));
let locations: Vec<Location> = serde_json::from_value(locations)?;
self.present_list("References", &locations)?;
}
command::SELECT_APPLY_SOURCE_CHANGE => {
if let Some(ref edits) = cmd.arguments {
for edit in edits {
let workspace_edits = <Vec<WorkspaceEditWithCursor>>::deserialize(edit)?;
for edit in workspace_edits {
self.apply_workspace_edit(&edit.workspace_edit)?;
if let Some(cursor_position) = edit.cursor_position {
self.vim()?.cursor(
cursor_position.position.line + 1,
cursor_position.position.character + 1,
)?;
}
}
}
}
}
command::APPLY_SOURCE_CHANGE => {
if let Some(ref edits) = cmd.arguments {
for edit in edits {
let edit = WorkspaceEditWithCursor::deserialize(edit)?;
self.apply_workspace_edit(&edit.workspace_edit)?;
if let Some(cursor_position) = edit.cursor_position {
self.vim()?.cursor(
cursor_position.position.line + 1,
cursor_position.position.character + 1,
)?;
}
}
}
}
command::RUN_SINGLE | command::RUN => {
let has_term: i32 = self.vim()?.eval("exists(':terminal')")?;
if has_term == 0 {
return Err(anyhow!("Terminal support is required for this action"));
}
if let Some(ref args) = cmd.arguments {
if let Some(args) = args.first() {
let runnable = Runnable::deserialize(args)?;
let cmd = match runnable {
Runnable::Bin(runnable) => {
format!("term {} {}", runnable.bin, runnable.args.join(" "))
}
Runnable::Generic(runnable) => format!(
"term cargo {} -- {}",
runnable.args.cargo_args.join(" "),
runnable.args.executable_args.join(" "),
),
};
self.vim()?.command(cmd.replace('"', ""))?;
}
}
}
_ => return Ok(false),
}
Ok(true)
}
}
#[cfg(test)]
mod test {
use super::Runnable;
use super::*;
use lsp_types::Command;
use serde::Deserialize;
#[test]
fn test_deserialize_bin_runnable() {
let cmd = r#"{
"title":"Run",
"command":"rust-analyzer.runSingle",
"arguments": [
{
"args":["run","--package","somepkg","--bin","somebin"],
"bin":"cargo",
"cwd":"/home/dev/somebin",
"extraArgs":[],
"label":"run binary"
}
]
}"#;
let cmd: Command = serde_json::from_str(cmd).unwrap();
let actual = Runnable::deserialize(cmd.arguments.unwrap().first().unwrap())
.expect("failed deserializing bin runnable");
let expected = Runnable::Bin(BinRunnable {
label: "run binary".into(),
bin: "cargo".into(),
args: vec!["run", "--package", "somepkg", "--bin", "somebin"]
.into_iter()
.map(|it| it.into())
.collect(),
});
assert_eq!(actual, expected);
}
#[test]
fn test_deserialize_generic_runnable() {
let cmd = r#"{
"title":"▶︎ Run",
"command":"rust-analyzer.runSingle",
"arguments":[{
"args":{
"cargoArgs":["run","--package","somepkg","--bin","somebin"],
"executableArgs":[],
"workspaceRoot":"/home/dev/test"
},
"kind":"cargo",
"label":"run binary"
}]
}"#;
let cmd: Command = serde_json::from_str(cmd).unwrap();
let actual = Runnable::deserialize(cmd.arguments.unwrap().first().unwrap())
.expect("failed deserializing cargo runnable");
let expected = Runnable::Generic(GenericRunnable {
label: "run binary".into(),
kind: GenericRunnableKind::Cargo,
location: None,
args: GenericRunnableArgs {
workspace_root: Some("/home/dev/test".into()),
cargo_args: vec!["run", "--package", "somepkg", "--bin", "somebin"]
.into_iter()
.map(|it| it.into())
.collect(),
executable_args: vec![],
},
});
assert_eq!(actual, expected);
}
}
|
// Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use zerocopy::ByteSlice;
pub fn skip<B: ByteSlice>(bytes: B, skip: usize) -> Option<B> {
if bytes.len() < skip {
None
} else {
Some(bytes.split_at(skip).1)
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub const TRUE_BYTES_LOWER: &str = "true";
pub const FALSE_BYTES_LOWER: &str = "false";
pub const TRUE_BYTES_NUM: &str = "1";
pub const FALSE_BYTES_NUM: &str = "0";
pub const NULL_BYTES_UPPER: &str = "NULL";
pub const NULL_BYTES_LOWER: &str = "null";
pub const NULL_BYTES_ESCAPE: &str = "\\N";
pub const NAN_BYTES_SNAKE: &str = "NaN";
pub const NAN_BYTES_LOWER: &str = "nan";
pub const INF_BYTES_LOWER: &str = "inf";
pub const INF_BYTES_LONG: &str = "Infinity";
// The size of the I/O read/write block buffer by default.
pub const DEFAULT_BLOCK_BUFFER_SIZE: usize = 100 * 1024 * 1024;
// The size of the I/O read/write block index buffer by default.
pub const DEFAULT_BLOCK_INDEX_BUFFER_SIZE: usize = 300 * 1024;
// The max number of a block by default.
pub const DEFAULT_BLOCK_MAX_ROWS: usize = 1000 * 1000;
// The min number of a block by default.
pub const DEFAULT_BLOCK_MIN_ROWS: usize = 800 * 1000;
|
mod console_render;
use console_render::framebuffer::Framebuffer;
use console_render::color::Color;
use console_render::geometry::{Line, Point};
use console_render::world::{Wall, World, Texture, TextureCell};
use std::f64::consts::PI;
use std::io::{self};
fn texture_cell_generator<'a>(x:usize, y:usize) -> &'a TextureCell {
if y == 0 || y == 3 || (y < 3 && x == 5) || (y > 3 && x == 2) {
return &TextureCell{character: None, fg_color: None, bg_color: Some(Color{r: 100, g: 100, b: 100})};
}
return &TextureCell{character: None, fg_color: None, bg_color: Some(Color{r: 255, g: 0, b: 0})};
}
fn main() {
// Create world
let texture = Texture::from_generator(6, 6, Point{x: 6.0, y: 6.0}, &texture_cell_generator);
let mut world = World{
framebuffer: Framebuffer::new(128, 48),
walls: vec![
Wall{
line: Line{
start: Point{x: -10.0, y: 10.0},
end: Point{x: 10.0, y: 10.0},
},
texture: &texture,
},
Wall{
line: Line{
start: Point{x: 10.0, y: 10.0},
end: Point{x: 10.0, y: -10.0},
},
texture: &texture,
},
Wall{
line: Line{
start: Point{x: 10.0, y: -10.0},
end: Point{x: -10.0, y: -10.0},
},
texture: &texture,
},
Wall{
line: Line{
start: Point{x: -10.0, y: -10.0},
end: Point{x: -10.0, y: 10.0},
},
texture: &texture,
},
],
pos: Point{x: 0.0, y: 0.0},
cam_rot: 0.0,
cam_hfov: PI * 0.5,
cam_range: 30.0,
};
let mut running = true;
while running {
print!("{}\nWASD: move; QE: rotate camera; X: quit; Enter: next frame\n> ", world.render());
let mut input = String::new();
match io::stdin().read_line(&mut input) {
Ok(_) => {
for c in input.chars() {
let lower_c = c.to_lowercase().next().unwrap();
if lower_c == 'w' {
world.pos = world.pos + Point::from_normal(world.cam_rot, 1.0);
}
else if lower_c == 's' {
world.pos = world.pos - Point::from_normal(world.cam_rot, 1.0);
}
else if lower_c == 'd' {
world.pos = world.pos + Point::from_normal(world.cam_rot + PI * 0.5, 1.0);
}
else if lower_c == 'a' {
world.pos = world.pos - Point::from_normal(world.cam_rot + PI * 0.5, 1.0);
}
else if lower_c == 'q' {
world.cam_rot -= PI / 32.0;
}
else if lower_c == 'e' {
world.cam_rot += PI / 32.0;
}
else if lower_c == 'x' {
running = false;
}
}
}
Err(_) => running = false,
}
}
}
|
macro_rules! check {
($n:expr, $expected:expr) => {
println!("{:?}", $n.to_le_bytes());
assert_eq!($expected, $n.to_le_bytes());
}
}
fn main() {
check!(1u8, [1]);
check!(1u16, [1, 0]);
check!(1u32, [1, 0, 0, 0]);
check!(1u64, [1, 0, 0, 0, 0, 0, 0, 0]);
check!(1u128, [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]);
}
|
use serde::{Deserialize, Deserializer};
pub use de::MCProtoDeserializer;
pub(crate) use de::Seq;
mod de;
pub(crate) mod read;
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: ?Sized + Deserialize<'de>,
D: Deserializer<'de>,
{
Deserialize::deserialize(deserializer)
}
|
//! Demo ページ
use yew::prelude::*;
#[derive(Properties, Clone, PartialEq)]
pub struct Props {
pub demo_id: usize,
}
pub struct Demo {
props: Props,
}
impl Component for Demo {
type Message = ();
type Properties = Props;
fn create(props: Self::Properties, _: ComponentLink<Self>) -> Self {
Self { props }
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
false
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
false
}
fn view(&self) -> Html {
match self.props.demo_id {
1 => { // U-Netで音楽パート分離
html! {
<>
<h2>{"U-Netで音楽パート分離"}</h2>
<a href="https://github.com/mori97/U-Net_MUSDB18">{"[GitHub]"}</a><br/>
{"ディープラーニングで楽曲を4つのパート"}
<ul>
<li>{"ボーカル(vocal)"}</li>
<li>{"ドラム(drums)"}</li>
<li>{"ベース(bass)"}</li>
<li>{"その他(others)"}</li>
</ul>
{"に分離します。"}
<h3>{"Demo"}</h3>
<h4>{"入力"}</h4>
<div>
<audio controls=true src="audio/unetdemo_mixture.wav">
{"Your browser does not support the <code>audio</code> element."}
</audio>
<figure class="spectrograms">
<img src="images/unetdemo_spec_mixture.png" alt="楽曲のスペクトログラム" />
<figcaption>{"楽曲のスペクトログラム"}</figcaption>
</figure>
</div>
<h4>{"出力"}</h4>
<h5>{"ボーカル(vocal)パート"}</h5>
<div>
<audio controls=true src="audio/unetdemo_vocal.wav">
{"Your browser does not support the <code>audio</code> element."}
</audio>
<figure class="spectrograms">
<img src="images/unetdemo_spec_vocal.png" alt="ボーカル(vocal)のスペクトログラム" />
<figcaption>{"ボーカル(vocal)のスペクトログラム"}</figcaption>
</figure>
</div>
<h5>{"ドラム(drums)パート"}</h5>
<div>
<audio controls=true src="audio/unetdemo_drum.wav">
{"Your browser does not support the <code>audio</code> element."}
</audio>
<figure class="spectrograms">
<img src="images/unetdemo_spec_drums.png" alt="ドラム(drums)のスペクトログラム" />
<figcaption>{"ドラム(drums)のスペクトログラム"}</figcaption>
</figure>
</div>
<h5>{"ベース(bass)パート"}</h5>
<div>
<audio controls=true src="audio/unetdemo_bass.wav">
{"Your browser does not support the <code>audio</code> element."}
</audio>
<figure class="spectrograms">
<img src="images/unetdemo_spec_bass.png" alt="ベース(Bass)のスペクトログラム" />
<figcaption>{"ベース(Bass)のスペクトログラム"}</figcaption>
</figure>
</div>
<h5>{"その他(others)パート"}</h5>
<div>
<audio controls=true src="audio/unetdemo_others.wav">
{"Your browser does not support the <code>audio</code> element."}
</audio>
<figure class="spectrograms">
<img src="images/unetdemo_spec_others.png" alt="その他(others)のスペクトログラム" />
<figcaption>{"その他(others)のスペクトログラム"}</figcaption>
</figure>
</div>
</>
}
},
_ => {
html! {}
},
}
}
}
|
// Copyright 2018 Fredrik Portström <https://portstrom.com>
// This is free software distributed under the terms specified in
// the file LICENSE at the top-level directory of this distribution.
/*!
Parse XML dumps exported from MediaWiki.
This module parses [XML dumps](https://www.mediawiki.org/wiki/Help:Export)
exported from MediaWiki, providing each page from the dump through an iterator.
This is useful for parsing
the [dumps from Wikipedia and other Wikimedia projects](https://dumps.wikimedia.org).
# Caution
If you need to parse any wiki text extracted from a dump, please use the crate
Parse Wiki Text ([crates.io](https://crates.io/crates/parse_wiki_text),
[Github](https://github.com/portstrom/parse_wiki_text)).
Correctly parsing wiki text requires dealing with an astonishing amount
of difficult and counterintuitive cases. Parse Wiki Text automatically deals
with all these cases, giving you an unambiguous tree of parsed elements
that is easy to work with.
# Limitations
This module only parses dumps containing only one revision of each page.
This is what you get from the page `Special:Export` when enabling the option
“Include only the current revision, not the full history”, as well as what you
get from the Wikimedia dumps with file names ending with `-pages-articles.xml.bz2`.
This module ignores the `siteinfo` element, every child element of the `page`
element except `ns`, `revision` and `title`, and every element inside the
`revision` element except `format`, `model` and `text`.
Until there is a real use case that justifies going beyond these limitations,
they will remain in order to avoid premature design driven by imagined requirements.
# Examples
Parse a bzip2 compressed file and distinguish ordinary articles from other pages.
A running example with complete error handling is available in the
`examples` folder.
```rust,no_run
fn main() {
let file = std::fs::File::open("example.xml.bz2").unwrap();
let file = std::io::BufReader::new(file);
let file = bzip2::bufread::BzDecoder::new(file);
let file = std::io::BufReader::new(file);
for result in parse_mediawiki_dump::parse(file) {
match result {
Err(error) => {
eprintln!("Error: {}", error);
break;
}
Ok(page) => if page.namespace.into_inner() == 0 && match &page.format {
None => false,
Some(format) => format == "text/x-wiki"
} && match &page.model {
None => false,
Some(model) => model == "wikitext"
} {
println!(
"The page {title:?} is an ordinary article with byte length {length}.",
title = page.title,
length = page.text.len()
);
} else {
println!("The page {:?} has something special to it.", page.title);
}
}
}
}
```
*/
#![forbid(unsafe_code)]
#![warn(missing_docs)]
use quick_xml::{events::Event, Reader};
use std::{
convert::TryInto, hash::Hash, io::BufRead, marker::PhantomData,
str::FromStr, borrow::Cow,
};
/**
The default namespace type in the [`Page`] struct.
It wraps a signed integer because the corresponding field in the database
(the [`page_namespace`] field in the `page` table) is
a signed integer. However, all namespaces in the dump are positive numbers.
The two negative namespaces, -1 (Special) and -2 (Media)
never actually appear in the `page` table or in the XML dump.
The [`FromNamespaceId`] trait can be implemented to convert this type into
an enum that represents the namespaces of a particular MediaWiki installation.
[`page_namespace`]:
https://www.mediawiki.org/wiki/Manual:Page_table#page_namespace
*/
#[derive(Debug, Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash, Default)]
pub struct NamespaceId(pub i32);
impl NamespaceId {
/// Creates a `NamespaceId` from an `i32`.
pub const fn new(n: i32) -> Self {
Self(n)
}
/// Returns the wrapped integer.
pub const fn into_inner(self) -> i32 {
self.0
}
}
impl From<i32> for NamespaceId {
fn from(n: i32) -> Self {
Self::new(n)
}
}
impl From<NamespaceId> for i32 {
fn from(id: NamespaceId) -> Self {
id.0
}
}
impl FromStr for NamespaceId {
type Err = <i32 as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Self(FromStr::from_str(s)?))
}
}
/**
Trait for a fallible conversion from [`NamespaceId`].
Required by the `namespace` field in the [`Page`] struct.
Automatically implemented for types that can be converted from `NamespaceId`
by [`TryInto::try_into`].
# Implementation
The trait can be implemented with the [`impl_namespace`] macro.
A type implementing `FromNamespaceId` should include values for namespace ids
-2 to 15, because they are present in all MediaWiki installations
according to the [MediaWiki documentation][built-in namespaces]
and all of 0 to 15 are likely to be found in a `pages-meta-current.xml` dump file.
[built-in namespaces]: https://www.mediawiki.org/wiki/Manual:Namespace#Built-in_namespaces
```rust
use std::convert::TryFrom;
use parse_mediawiki_dump::{FromNamespaceId, impl_namespace, NamespaceId};
impl_namespace! {
/// A type containing the built-in MediaWiki namespaces.
pub enum Namespace {
Media = -2,
Special = -1,
Main = 0,
Talk = 1,
User = 2,
UserTalk = 3,
Wiktionary = 4,
WiktionaryTalk = 5,
File = 6,
FileTalk = 7,
MediaWiki = 8,
MediaWikiTalk = 9,
Template = 10,
TemplateTalk = 11,
Help = 12,
HelpTalk = 13,
Category = 14,
CategoryTalk = 15,
}
}
fn main() {
assert_eq!(
Namespace::from_namespace_id(NamespaceId(0)),
Some(Namespace::Main)
);
assert_eq!(
Namespace::from_namespace_id(NamespaceId(11)),
Some(Namespace::TemplateTalk)
);
}
```
*/
pub trait FromNamespaceId: Sized {
/// Converts fallibly from `NamespaceId`.
fn from_namespace_id(id: NamespaceId) -> Option<Self>;
}
impl<T> FromNamespaceId for T
where
NamespaceId: TryInto<T>,
{
fn from_namespace_id(id: NamespaceId) -> Option<Self> {
id.try_into().ok()
}
}
enum PageChildElement {
Ns,
Revision,
Title,
Redirect,
Unknown,
}
enum RevisionChildElement {
Format,
Model,
Text,
Unknown,
}
#[derive(Debug)]
/// The error type for `Parser`.
pub enum Error {
/// Format not matching expectations.
///
/// Indicates the position in the stream.
Format(usize),
/// The source contains a feature not supported by the parser.
///
/// In particular, this means a `page` element contains more than one `revision` element.
NotSupported(usize),
/// Error from the XML reader.
XmlReader(quick_xml::Error),
/// Namespace id could not be converted to selected namespace type.
#[allow(missing_docs)]
Namespace { id: NamespaceId, position: usize },
}
/**
Parsed page.
Parsed from the `page` element.
Generic over the type of the namespace, which must be convertible
from `NamespaceId` with `TryInto`. Use [`parse_with_namespace`] to select
a custom type for the namespace; [`parse`] uses the default, `NamespaceId>.
Although the `format` and `model` elements are defined as mandatory in the
[schema], previous versions of the schema don't contain them.
Therefore the corresponding fields can be `None`.
The implementations of [`PartialOrd`], [`Ord`], [`PartialEq`], [`Eq`],
and [`Hash`] for this type only look at the `namespace` and `title` fields,
as the `page` table is set up so that this pair of fields is unique
for every page (see the [database installation script]).
[schema]: https://www.mediawiki.org/xml/export-0.10.xsd
[database installation script]:
https://phabricator.wikimedia.org/source/mediawiki/browse/master/maintenance/tables.sql;aa3c07964c56$279
*/
#[derive(Debug, Clone)]
pub struct Page<N> {
/// The title of the page with the namespace prefix.
///
/// Parsed from the text content of the `title` element in the `page` element.
pub title: String,
/**
The [namespace](https://www.mediawiki.org/wiki/Manual:Namespace)
of the page.
Parsed from the text content of the `ns` element in the `page` element.
All parsing functions require that this field implement `FromNamespaceId`.
*/
pub namespace: N,
/// The format of the revision if any.
///
/// Parsed from the text content of the `format` element in the `revision`
/// element. `None` if the element is not present.
///
/// For ordinary articles the format is `text/x-wiki`.
pub format: Option<String>,
/// The model of the revision if any.
///
/// Parsed from the text content of the `model` element in the `revision`
/// element. `None` if the element is not present.
///
/// For ordinary articles the model is `wikitext`.
pub model: Option<String>,
/// The text of the revision.
///
/// Parsed from the text content of the `text` element in the `revision` element.
pub text: String,
/// The redirect target if any.
///
/// Parsed from the content of the `title` attribute of the `redirect`
/// element in the `page` element.
///
/// For pages that are not redirects, the `redirect` element is not present.
pub redirect_title: Option<String>,
}
/// Parser working as an iterator over pages.
pub struct Parser<R: BufRead, Namespace> {
buffer: Vec<u8>,
namespace_buffer: Vec<u8>,
reader: Reader<R>,
started: bool,
phantom: PhantomData<Namespace>,
}
impl<N> PartialEq for Page<N>
where
N: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.namespace == other.namespace && self.title == other.title
}
}
impl<N> Eq for Page<N> where N: Eq {}
impl<N> PartialOrd for Page<N>
where
N: PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.namespace
.partial_cmp(&other.namespace)
.map(|ord| ord.then_with(|| self.title.cmp(&other.title)))
}
}
impl<N> Ord for Page<N>
where
N: Ord,
{
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.namespace
.cmp(&other.namespace)
.then_with(|| self.title.cmp(&other.title))
}
}
impl<N> Hash for Page<N>
where
N: Hash,
{
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.namespace.hash(state);
self.title.hash(state);
}
}
impl std::fmt::Display for Error {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Error::Format(position) => {
write!(formatter, "Invalid format at position {}", position)
}
Error::NotSupported(position) => write!(
formatter,
"The element at position {} is not supported",
position
),
Error::XmlReader(error) => error.fmt(formatter),
Error::Namespace { id, position } => write!(
formatter,
"The namespace {} at position {} was not recognized",
id.into_inner(),
position,
),
}
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
if let Self::XmlReader(e) = self {
Some(e)
} else {
None
}
}
}
impl From<quick_xml::Error> for Error {
fn from(value: quick_xml::Error) -> Self {
Error::XmlReader(value)
}
}
impl<R: BufRead, N: FromNamespaceId> Iterator for Parser<R, N> {
type Item = Result<Page<N>, Error>;
fn next(&mut self) -> Option<Self::Item> {
Some(match next(self) {
Err(error) => Err(error),
Ok(item) => Ok(item?),
})
}
}
fn match_namespace(namespace: Option<&[u8]>) -> bool {
match namespace {
None => false,
Some(namespace) => {
namespace == b"http://www.mediawiki.org/xml/export-0.10/" as &[u8]
}
}
}
fn next<R: BufRead, N: FromNamespaceId>(
parser: &mut Parser<R, N>,
) -> Result<Option<Page<N>>, Error> {
if !parser.started {
loop {
parser.buffer.clear();
if let (namespace, Event::Start(event)) =
parser.reader.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)?
{
if match_namespace(namespace)
&& event.local_name() == b"mediawiki"
{
break;
}
return Err(Error::Format(parser.reader.buffer_position()));
}
}
parser.started = true;
}
loop {
parser.buffer.clear();
if !match parser.reader.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)? {
(_, Event::End(_)) => return Ok(None),
(namespace, Event::Start(event)) => {
match_namespace(namespace) && event.local_name() == b"page"
}
_ => continue,
} {
skip_element(parser)?;
continue;
}
let mut format = None;
let mut model = None;
let mut namespace = None;
let mut redirect_title = None;
let mut text = None;
let mut title = None;
loop {
parser.buffer.clear();
match match parser.reader.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)? {
(_, Event::End(_)) => {
return match (namespace, text, title) {
(Some(namespace), Some(text), Some(title)) => {
Ok(Some(Page {
format,
model,
namespace,
redirect_title,
text,
title,
}))
}
_ => {
Err(Error::Format(parser.reader.buffer_position()))
}
}
}
(namespace, Event::Start(event)) => {
if match_namespace(namespace) {
match event.local_name() {
b"ns" => PageChildElement::Ns,
b"redirect" => {
let title_attribute = event
.attributes()
.filter_map(|r| r.ok())
.find(|attr| attr.key == b"title");
redirect_title = match title_attribute {
Some(attr) => {
Some(attr.unescape_and_decode_value(
&parser.reader,
)?)
}
None => {
return Err(Error::Format(
parser.reader.buffer_position(),
))
}
};
PageChildElement::Redirect
}
b"revision" => PageChildElement::Revision,
b"title" => PageChildElement::Title,
_ => PageChildElement::Unknown,
}
} else {
PageChildElement::Unknown
}
}
_ => continue,
} {
PageChildElement::Ns => {
match parse_text(parser, &namespace)?.parse::<NamespaceId>()
{
Err(_) => {
return Err(Error::Format(
parser.reader.buffer_position(),
))
}
Ok(value) => {
namespace =
Some(N::from_namespace_id(value).ok_or_else(
|| Error::Namespace {
id: value,
position:
parser.reader.buffer_position(),
},
)?);
continue;
}
}
}
PageChildElement::Redirect => skip_element(parser)?,
PageChildElement::Revision => {
if text.is_some() {
return Err(Error::NotSupported(
parser.reader.buffer_position(),
));
}
loop {
parser.buffer.clear();
match match parser.reader.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)? {
(_, Event::End(_)) => match text {
None => {
return Err(Error::Format(
parser.reader.buffer_position(),
))
}
Some(_) => break,
},
(namespace, Event::Start(event)) => {
if match_namespace(namespace) {
match event.local_name() {
b"format" => {
RevisionChildElement::Format
}
b"model" => RevisionChildElement::Model,
b"text" => RevisionChildElement::Text,
_ => RevisionChildElement::Unknown,
}
} else {
RevisionChildElement::Unknown
}
}
_ => continue,
} {
RevisionChildElement::Format => {
format = Some(parse_text(parser, &format)?)
}
RevisionChildElement::Model => {
model = Some(parse_text(parser, &model)?)
}
RevisionChildElement::Text => {
text = Some(parse_text(parser, &text)?)
}
RevisionChildElement::Unknown => {
skip_element(parser)?
}
}
}
continue;
}
PageChildElement::Title => {
title = Some(parse_text(parser, &title)?);
continue;
}
PageChildElement::Unknown => skip_element(parser)?,
}
}
}
}
/// Creates a parser for a stream in which namespaces are represented as
/// [`NamespaceId`]. Equivalent to `parse_with_namespace` with the second
/// generic argument set to `NamespaceId` (`parse_with_namespace::<_, NamespaceId>`).
///
/// The stream is parsed as an XML dump exported from MediaWiki. The parser is
/// an iterator over the pages in the dump.
pub fn parse<R: BufRead>(source: R) -> Parser<R, NamespaceId> {
parse_with_namespace(source)
}
/// Creates a parser for a stream. Allows you to select a type for the namespace.
///
/// The stream is parsed as an XML dump exported from MediaWiki. The parser is
/// an iterator over the pages in the dump.
pub fn parse_with_namespace<R: BufRead, N: FromNamespaceId>(
source: R,
) -> Parser<R, N> {
let mut reader = Reader::from_reader(source);
reader.expand_empty_elements(true);
Parser {
buffer: vec![],
namespace_buffer: vec![],
reader,
started: false,
phantom: PhantomData,
}
}
fn parse_text<R: BufRead, N: FromNamespaceId>(
parser: &mut Parser<R, N>,
output: &Option<impl Sized>,
) -> Result<String, Error> {
if output.is_some() {
return Err(Error::Format(parser.reader.buffer_position()));
}
parser.buffer.clear();
let text = match parser
.reader
.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)?
.1
{
Event::Text(text) => text.unescape_and_decode(&parser.reader)?,
Event::End { .. } => return Ok(String::new()),
_ => return Err(Error::Format(parser.reader.buffer_position())),
};
parser.buffer.clear();
if let Event::End(_) = parser
.reader
.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)?
.1
{
Ok(text)
} else {
Err(Error::Format(parser.reader.buffer_position()))
}
}
fn skip_element<R: BufRead, N: FromNamespaceId>(
parser: &mut Parser<R, N>,
) -> Result<(), quick_xml::Error> {
let mut level = 0;
loop {
parser.buffer.clear();
match parser
.reader
.read_namespaced_event(
&mut parser.buffer,
&mut parser.namespace_buffer,
)?
.1
{
Event::End(_) => {
if level == 0 {
return Ok(());
}
level -= 1;
}
Event::Start(_) => level += 1,
_ => {}
}
}
}
/**
Enclose a namespace enum definition to derive the [`FromNamespaceId`] trait
as well as other [common traits] ([`Debug`], [`Eq`], [`PartialEq`], [`Ord`],
[`PartialOrd`], [`Clone`], [`Copy`], [`Hash`]) for it.
[common traits]:
https://rust-lang.github.io/api-guidelines/interoperability.html#c-common-traits
*/
#[macro_export]
macro_rules! impl_namespace {
(
$(#[$attribute:meta])*
$visibility:vis enum $namespace:ident {
$($variant:ident = $id:literal),* $(,)?
}
) => {
$(#[$attribute])*
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash)]
#[repr(i32)]
$visibility enum $namespace {
$($variant = $id,)*
}
impl ::std::convert::TryFrom<::parse_mediawiki_dump::NamespaceId> for $namespace {
type Error = &'static str;
fn try_from(id: ::parse_mediawiki_dump::NamespaceId) -> Result<Self, Self::Error> {
match i32::from(id) {
$($id => Ok($namespace::$variant),)*
_ => Err("invalid namespace id"),
}
}
}
};
}
|
use core::marker::PhantomData;
use crate::*;
/// Generic helper for libm functions, abstracting over f32 and f64. <br/>
/// # Type Parameter:
/// - `T`: Either `f32` or `f64`
///
/// # Examples
/// ```rust
/// use libm::{self, Libm};
///
/// const PI_F32: f32 = 3.1415927410e+00;
/// const PI_F64: f64 = 3.1415926535897931160e+00;
///
/// assert!(Libm::<f32>::cos(0.0f32) == libm::cosf(0.0));
/// assert!(Libm::<f32>::sin(PI_F32) == libm::sinf(PI_F32));
///
/// assert!(Libm::<f64>::cos(0.0f64) == libm::cos(0.0));
/// assert!(Libm::<f64>::sin(PI_F64) == libm::sin(PI_F64));
/// ```
pub struct Libm<T>(PhantomData<T>);
macro_rules! libm_helper {
($t:ident, funcs: $funcs:tt) => {
impl Libm<$t> {
#![allow(unused_parens)]
libm_helper! { $funcs }
}
};
({$($func:tt);*}) => {
$(
libm_helper! { $func }
)*
};
((fn $func:ident($($arg:ident: $arg_typ:ty),*) -> ($($ret_typ:ty),*); => $libm_fn:ident)) => {
#[inline(always)]
pub fn $func($($arg: $arg_typ),*) -> ($($ret_typ),*) {
$libm_fn($($arg),*)
}
};
}
libm_helper! {
f32,
funcs: {
(fn acos(x: f32) -> (f32); => acosf);
(fn acosh(x: f32) -> (f32); => acoshf);
(fn asin(x: f32) -> (f32); => asinf);
(fn asinh(x: f32) -> (f32); => asinhf);
(fn atan(x: f32) -> (f32); => atanf);
(fn atan2(y: f32, x: f32) -> (f32); => atan2f);
(fn atanh(x: f32) -> (f32); => atanhf);
(fn cbrt(x: f32) -> (f32); => cbrtf);
(fn ceil(x: f32) -> (f32); => ceilf);
(fn copysign(x: f32, y: f32) -> (f32); => copysignf);
(fn cos(x: f32) -> (f32); => cosf);
(fn cosh(x: f32) -> (f32); => coshf);
(fn erf(x: f32) -> (f32); => erff);
(fn erfc(x: f32) -> (f32); => erfcf);
(fn exp(x: f32) -> (f32); => expf);
(fn exp2(x: f32) -> (f32); => exp2f);
(fn exp10(x: f32) -> (f32); => exp10f);
(fn expm1(x: f32) -> (f32); => expm1f);
(fn fabs(x: f32) -> (f32); => fabsf);
(fn fdim(x: f32, y: f32) -> (f32); => fdimf);
(fn floor(x: f32) -> (f32); => floorf);
(fn fma(x: f32, y: f32, z: f32) -> (f32); => fmaf);
(fn fmax(x: f32, y: f32) -> (f32); => fmaxf);
(fn fmin(x: f32, y: f32) -> (f32); => fminf);
(fn fmod(x: f32, y: f32) -> (f32); => fmodf);
(fn frexp(x: f32) -> (f32, i32); => frexpf);
(fn hypot(x: f32, y: f32) -> (f32); => hypotf);
(fn ilogb(x: f32) -> (i32); => ilogbf);
(fn j0(x: f32) -> (f32); => j0f);
(fn j1(x: f32) -> (f32); => j1f);
(fn jn(n: i32, x: f32) -> (f32); => jnf);
(fn ldexp(x: f32, n: i32) -> (f32); => ldexpf);
(fn lgamma_r(x: f32) -> (f32, i32); => lgammaf_r);
(fn lgamma(x: f32) -> (f32); => lgammaf);
(fn log(x: f32) -> (f32); => logf);
(fn log1p(x: f32) -> (f32); => log1pf);
(fn log2(x: f32) -> (f32); => log2f);
(fn log10(x: f32) -> (f32); => log10f);
(fn modf(x: f32) -> (f32, f32); => modff);
(fn nextafter(x: f32, y: f32) -> (f32); => nextafterf);
(fn pow(x: f32, y: f32) -> (f32); => powf);
(fn remainder(x: f32, y: f32) -> (f32); => remainderf);
(fn remquo(x: f32, y: f32) -> (f32, i32); => remquof);
(fn rint(x: f32) -> (f32); => rintf);
(fn round(x: f32) -> (f32); => roundf);
(fn scalbn(x: f32, n: i32) -> (f32); => scalbnf);
(fn sin(x: f32) -> (f32); => sinf);
(fn sincos(x: f32) -> (f32, f32); => sincosf);
(fn sinh(x: f32) -> (f32); => sinhf);
(fn sqrt(x: f32) -> (f32); => sqrtf);
(fn tan(x: f32) -> (f32); => tanf);
(fn tanh(x: f32) -> (f32); => tanhf);
(fn tgamma(x: f32) -> (f32); => tgammaf);
(fn trunc(x: f32) -> (f32); => truncf);
(fn y0(x: f32) -> (f32); => y0f);
(fn y1(x: f32) -> (f32); => y1f);
(fn yn(n: i32, x: f32) -> (f32); => ynf)
}
}
libm_helper! {
f64,
funcs: {
(fn acos(x: f64) -> (f64); => acos);
(fn acosh(x: f64) -> (f64); => acosh);
(fn asin(x: f64) -> (f64); => asin);
(fn asinh(x: f64) -> (f64); => asinh);
(fn atan(x: f64) -> (f64); => atan);
(fn atan2(y: f64, x: f64) -> (f64); => atan2);
(fn atanh(x: f64) -> (f64); => atanh);
(fn cbrt(x: f64) -> (f64); => cbrt);
(fn ceil(x: f64) -> (f64); => ceil);
(fn copysign(x: f64, y: f64) -> (f64); => copysign);
(fn cos(x: f64) -> (f64); => cos);
(fn cosh(x: f64) -> (f64); => cosh);
(fn erf(x: f64) -> (f64); => erf);
(fn erfc(x: f64) -> (f64); => erfc);
(fn exp(x: f64) -> (f64); => exp);
(fn exp2(x: f64) -> (f64); => exp2);
(fn exp10(x: f64) -> (f64); => exp10);
(fn expm1(x: f64) -> (f64); => expm1);
(fn fabs(x: f64) -> (f64); => fabs);
(fn fdim(x: f64, y: f64) -> (f64); => fdim);
(fn floor(x: f64) -> (f64); => floor);
(fn fma(x: f64, y: f64, z: f64) -> (f64); => fma);
(fn fmax(x: f64, y: f64) -> (f64); => fmax);
(fn fmin(x: f64, y: f64) -> (f64); => fmin);
(fn fmod(x: f64, y: f64) -> (f64); => fmod);
(fn frexp(x: f64) -> (f64, i32); => frexp);
(fn hypot(x: f64, y: f64) -> (f64); => hypot);
(fn ilogb(x: f64) -> (i32); => ilogb);
(fn j0(x: f64) -> (f64); => j0);
(fn j1(x: f64) -> (f64); => j1);
(fn jn(n: i32, x: f64) -> (f64); => jn);
(fn ldexp(x: f64, n: i32) -> (f64); => ldexp);
(fn lgamma_r(x: f64) -> (f64, i32); => lgamma_r);
(fn lgamma(x: f64) -> (f64); => lgamma);
(fn log(x: f64) -> (f64); => log);
(fn log1p(x: f64) -> (f64); => log1p);
(fn log2(x: f64) -> (f64); => log2);
(fn log10(x: f64) -> (f64); => log10);
(fn modf(x: f64) -> (f64, f64); => modf);
(fn nextafter(x: f64, y: f64) -> (f64); => nextafter);
(fn pow(x: f64, y: f64) -> (f64); => pow);
(fn remainder(x: f64, y: f64) -> (f64); => remainder);
(fn remquo(x: f64, y: f64) -> (f64, i32); => remquo);
(fn rint(x: f64) -> (f64); => rint);
(fn round(x: f64) -> (f64); => round);
(fn scalbn(x: f64, n: i32) -> (f64); => scalbn);
(fn sin(x: f64) -> (f64); => sin);
(fn sincos(x: f64) -> (f64, f64); => sincos);
(fn sinh(x: f64) -> (f64); => sinh);
(fn sqrt(x: f64) -> (f64); => sqrt);
(fn tan(x: f64) -> (f64); => tan);
(fn tanh(x: f64) -> (f64); => tanh);
(fn tgamma(x: f64) -> (f64); => tgamma);
(fn trunc(x: f64) -> (f64); => trunc);
(fn y0(x: f64) -> (f64); => y0);
(fn y1(x: f64) -> (f64); => y1);
(fn yn(n: i32, x: f64) -> (f64); => yn)
}
}
|
use std::io;
use super::super::{Key, Block};
use cipher;
use mem;
fn decrypt_chunk(key: &Key, prev: &mut Block, chunk: &[u8]) -> [u8; 8] {
let input_block = mem::read_block(chunk);
let mut decrypted_block = cipher::decipher(&key, input_block);
decrypted_block[0] ^= prev[0];
decrypted_block[1] ^= prev[1];
*prev = *input_block;
*mem::write_block(&decrypted_block)
}
/// Wraps an underlying `std::io::BufRead` so that bytes read get
/// decrypted on the way through.
///
/// # Example:
/// ```.ignore
/// use std::fs::File;
/// use std::io::{BufReader, Read};
/// use tea::io::Reader;
///
/// let f = File::open("foo.txt").ok().unwrap();
/// let mut decrypt_f = Reader::new(BufReader::new(f),
/// [1, 2, 3, 4], [5, 6]);
/// let mut s = "".to_string();
/// decrypt_f.read_to_string(&mut s).ok().unwrap();
/// ```
pub struct Reader<R: io::BufRead> {
source: R,
key: Key,
prev: Block,
buf: Vec<u8>,
}
impl<R: io::BufRead> Reader<R> {
/// Wraps `source` in a `Reader` that will decrypt with the given
/// `key` and `iv` (initialization vector).
pub fn new(source: R, key: Key, iv: Block) -> Reader<R> {
Reader{
source: source,
key: key,
prev: iv,
buf: Vec::with_capacity(8),
}
}
}
impl<R: io::BufRead> io::Read for Reader<R> {
/// Reads from `source`, decrypts the data, and writes the result
/// to `buf`.
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let mut pos = 0;
while pos < buf.len() {
{
let encrypted_bytes = try!(self.source.fill_buf());
if encrypted_bytes.is_empty() {
if !self.buf.is_empty() {
// Handle padding bytes.
let real_bytes = self.buf.len() - self.buf[self.buf.len()-1] as usize;
// Weird scoping follows. We want to discard
// the lifetime where real_slice is borrowing
// from self.buf so we can truncate it, but we
// still want to know what n is, so we have to
// let that value escape.
let n = {
let real_slice = &self.buf[..real_bytes];
let n2 = buf[pos..].clone_from_slice(real_slice);
pos += n2;
n2
};
if n == real_bytes {
self.buf.truncate(0);
} else {
self.buf = self.buf.split_off(n);
}
}
return Ok(pos);
} else {
assert!(encrypted_bytes.len() >= 8,
"not enough bytes to decrypt, encrypted data should be a multiple of 8 bytes but we got {}", encrypted_bytes.len());
if !self.buf.is_empty() {
let n = buf[pos..].clone_from_slice(&self.buf);
pos += n;
if n == self.buf.len() {
self.buf.truncate(0);
} else {
self.buf = self.buf.split_off(n);
return Ok(pos);
}
}
self.buf.push_all(&decrypt_chunk(&self.key, &mut self.prev, &encrypted_bytes[0..8]));
}
}
self.source.consume(8);
}
Ok(pos)
}
}
#[test]
fn it_works() {
use std::io::{Read, Write};
use super::Writer;
for chunk_size in 1..65 {
let input: Vec<u8> = (0u8..128).collect();
let mut writer = Writer::new(io::Cursor::new(Vec::with_capacity(128)),
[1, 2, 3, 4], [5, 6]);
for chunk in input.chunks(chunk_size) {
assert_eq!(writer.write(chunk).ok().unwrap(), chunk.len());
}
let crypted = writer.close().ok().unwrap().into_inner();
assert!(crypted.len() == input.len() + 8);
assert!(crypted != input);
let mut reader = Reader::new(io::Cursor::new(crypted),
[1, 2, 3, 4], [5, 6]);
let mut decrypted: Vec<u8> = Vec::new();
assert!(reader.read_to_end(&mut decrypted).is_ok());
assert_eq!(decrypted, input);
}
}
|
use super::super::controls::Knob;
type FloatStream = Box<Iterator<Item=f64>>;
pub struct FilterVolume {
generator: FloatStream,
value: Knob,
}
impl FilterVolume {
pub fn new(generator: FloatStream, volume: Knob) -> FilterVolume {
FilterVolume {
generator: generator,
value: volume,
}
}
}
impl Iterator for FilterVolume {
type Item = f64;
fn next(&mut self) -> Option<f64> {
if let Some(x) = self.generator.next() {
return Some(x * self.value.read())
}
None
}
} |
mod aes;
extern crate base64;
extern crate rand;
use std::fs::File;
use std::io::prelude::*;
use rand::Rng;
fn main() {
let plaintext = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa".as_bytes();
let ciphertext = random_cipher_encryption(&plaintext);
let ecb = detection_oracle(&ciphertext);
if ecb {
println!("ecb");
}else{
println!("cbc");
}
}
fn detection_oracle(ciphertext: &[u8]) -> bool{
let score = aes::detect_mode(ciphertext);
let mut ecb = false;
if score >= 2{
ecb = true;
}
ecb
}
fn random_cipher_encryption(text: &[u8]) -> Vec<u8>{
let mut rng = rand::thread_rng();
let key = random_key();
let iv = random_key();
let input = aes::pad(scramble_input(text),16);
let mut ciphertext = Vec::new();
if rng.gen::<u8>() % 2 == 0{
println!("encrypting cbc");
ciphertext = aes::encrypt_aes_cbc(&input,&key,&iv);
}else{
println!("encrypting ecb");
ciphertext = aes::encrypt_aes_ecb(&input,&key);
}
ciphertext
}
fn random_key() -> [u8;16]{
let mut rng = rand::thread_rng();
let mut key = [0;16];
for i in 0..16{
key[i] = rng.gen::<u8>();
}
key
}
fn scramble_input(text: &[u8])-> Vec<u8>{
//let range = Range::new(5,10);
let mut scrambled = Vec::new();
let mut rng = rand::thread_rng();
let bytes_num: u8 = rng.gen_range(5,10);
for i in 0..bytes_num{
scrambled.push(rng.gen::<u8>());
}
for i in 0 as usize..text.len(){
scrambled.push(text[i]);
}
for i in 0..bytes_num{
scrambled.push(rng.gen::<u8>());
}
scrambled
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn DisableThreadProfiling<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HANDLE>>(performancedatahandle: Param0) -> u32 {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn DisableThreadProfiling(performancedatahandle: super::super::super::Foundation::HANDLE) -> u32;
}
::core::mem::transmute(DisableThreadProfiling(performancedatahandle.into_param().abi()))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn EnableThreadProfiling<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HANDLE>>(threadhandle: Param0, flags: u32, hardwarecounters: u64, performancedatahandle: *mut super::super::super::Foundation::HANDLE) -> u32 {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn EnableThreadProfiling(threadhandle: super::super::super::Foundation::HANDLE, flags: u32, hardwarecounters: u64, performancedatahandle: *mut super::super::super::Foundation::HANDLE) -> u32;
}
::core::mem::transmute(EnableThreadProfiling(threadhandle.into_param().abi(), ::core::mem::transmute(flags), ::core::mem::transmute(hardwarecounters), ::core::mem::transmute(performancedatahandle)))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)]
#[repr(C)]
pub struct HARDWARE_COUNTER_DATA {
pub Type: HARDWARE_COUNTER_TYPE,
pub Reserved: u32,
pub Value: u64,
}
impl HARDWARE_COUNTER_DATA {}
impl ::core::default::Default for HARDWARE_COUNTER_DATA {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
impl ::core::fmt::Debug for HARDWARE_COUNTER_DATA {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
fmt.debug_struct("HARDWARE_COUNTER_DATA").field("Type", &self.Type).field("Reserved", &self.Reserved).field("Value", &self.Value).finish()
}
}
impl ::core::cmp::PartialEq for HARDWARE_COUNTER_DATA {
fn eq(&self, other: &Self) -> bool {
self.Type == other.Type && self.Reserved == other.Reserved && self.Value == other.Value
}
}
impl ::core::cmp::Eq for HARDWARE_COUNTER_DATA {}
unsafe impl ::windows::core::Abi for HARDWARE_COUNTER_DATA {
type Abi = Self;
}
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: marker :: Copy, :: core :: clone :: Clone, :: core :: default :: Default, :: core :: fmt :: Debug)]
#[repr(transparent)]
pub struct HARDWARE_COUNTER_TYPE(pub i32);
pub const PMCCounter: HARDWARE_COUNTER_TYPE = HARDWARE_COUNTER_TYPE(0i32);
pub const MaxHardwareCounterType: HARDWARE_COUNTER_TYPE = HARDWARE_COUNTER_TYPE(1i32);
impl ::core::convert::From<i32> for HARDWARE_COUNTER_TYPE {
fn from(value: i32) -> Self {
Self(value)
}
}
unsafe impl ::windows::core::Abi for HARDWARE_COUNTER_TYPE {
type Abi = Self;
}
#[derive(:: core :: clone :: Clone, :: core :: marker :: Copy)]
#[repr(C)]
pub struct PERFORMANCE_DATA {
pub Size: u16,
pub Version: u8,
pub HwCountersCount: u8,
pub ContextSwitchCount: u32,
pub WaitReasonBitMap: u64,
pub CycleTime: u64,
pub RetryCount: u32,
pub Reserved: u32,
pub HwCounters: [HARDWARE_COUNTER_DATA; 16],
}
impl PERFORMANCE_DATA {}
impl ::core::default::Default for PERFORMANCE_DATA {
fn default() -> Self {
unsafe { ::core::mem::zeroed() }
}
}
impl ::core::fmt::Debug for PERFORMANCE_DATA {
fn fmt(&self, fmt: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result {
fmt.debug_struct("PERFORMANCE_DATA")
.field("Size", &self.Size)
.field("Version", &self.Version)
.field("HwCountersCount", &self.HwCountersCount)
.field("ContextSwitchCount", &self.ContextSwitchCount)
.field("WaitReasonBitMap", &self.WaitReasonBitMap)
.field("CycleTime", &self.CycleTime)
.field("RetryCount", &self.RetryCount)
.field("Reserved", &self.Reserved)
.field("HwCounters", &self.HwCounters)
.finish()
}
}
impl ::core::cmp::PartialEq for PERFORMANCE_DATA {
fn eq(&self, other: &Self) -> bool {
self.Size == other.Size && self.Version == other.Version && self.HwCountersCount == other.HwCountersCount && self.ContextSwitchCount == other.ContextSwitchCount && self.WaitReasonBitMap == other.WaitReasonBitMap && self.CycleTime == other.CycleTime && self.RetryCount == other.RetryCount && self.Reserved == other.Reserved && self.HwCounters == other.HwCounters
}
}
impl ::core::cmp::Eq for PERFORMANCE_DATA {}
unsafe impl ::windows::core::Abi for PERFORMANCE_DATA {
type Abi = Self;
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn QueryThreadProfiling<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HANDLE>>(threadhandle: Param0, enabled: *mut super::super::super::Foundation::BOOLEAN) -> u32 {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn QueryThreadProfiling(threadhandle: super::super::super::Foundation::HANDLE, enabled: *mut super::super::super::Foundation::BOOLEAN) -> u32;
}
::core::mem::transmute(QueryThreadProfiling(threadhandle.into_param().abi(), ::core::mem::transmute(enabled)))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
#[cfg(feature = "Win32_Foundation")]
#[inline]
pub unsafe fn ReadThreadProfilingData<'a, Param0: ::windows::core::IntoParam<'a, super::super::super::Foundation::HANDLE>>(performancedatahandle: Param0, flags: u32, performancedata: *mut PERFORMANCE_DATA) -> u32 {
#[cfg(windows)]
{
#[link(name = "windows")]
extern "system" {
fn ReadThreadProfilingData(performancedatahandle: super::super::super::Foundation::HANDLE, flags: u32, performancedata: *mut PERFORMANCE_DATA) -> u32;
}
::core::mem::transmute(ReadThreadProfilingData(performancedatahandle.into_param().abi(), ::core::mem::transmute(flags), ::core::mem::transmute(performancedata)))
}
#[cfg(not(windows))]
unimplemented!("Unsupported target OS");
}
|
#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals, clashing_extern_declarations, clippy::all)]
#[link(name = "windows")]
extern "system" {}
pub type GpioPinProviderValueChangedEventArgs = *mut ::core::ffi::c_void;
pub type IGpioControllerProvider = *mut ::core::ffi::c_void;
pub type IGpioPinProvider = *mut ::core::ffi::c_void;
pub type IGpioProvider = *mut ::core::ffi::c_void;
#[repr(transparent)]
pub struct ProviderGpioPinDriveMode(pub i32);
impl ProviderGpioPinDriveMode {
pub const Input: Self = Self(0i32);
pub const Output: Self = Self(1i32);
pub const InputPullUp: Self = Self(2i32);
pub const InputPullDown: Self = Self(3i32);
pub const OutputOpenDrain: Self = Self(4i32);
pub const OutputOpenDrainPullUp: Self = Self(5i32);
pub const OutputOpenSource: Self = Self(6i32);
pub const OutputOpenSourcePullDown: Self = Self(7i32);
}
impl ::core::marker::Copy for ProviderGpioPinDriveMode {}
impl ::core::clone::Clone for ProviderGpioPinDriveMode {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct ProviderGpioPinEdge(pub i32);
impl ProviderGpioPinEdge {
pub const FallingEdge: Self = Self(0i32);
pub const RisingEdge: Self = Self(1i32);
}
impl ::core::marker::Copy for ProviderGpioPinEdge {}
impl ::core::clone::Clone for ProviderGpioPinEdge {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct ProviderGpioPinValue(pub i32);
impl ProviderGpioPinValue {
pub const Low: Self = Self(0i32);
pub const High: Self = Self(1i32);
}
impl ::core::marker::Copy for ProviderGpioPinValue {}
impl ::core::clone::Clone for ProviderGpioPinValue {
fn clone(&self) -> Self {
*self
}
}
#[repr(transparent)]
pub struct ProviderGpioSharingMode(pub i32);
impl ProviderGpioSharingMode {
pub const Exclusive: Self = Self(0i32);
pub const SharedReadOnly: Self = Self(1i32);
}
impl ::core::marker::Copy for ProviderGpioSharingMode {}
impl ::core::clone::Clone for ProviderGpioSharingMode {
fn clone(&self) -> Self {
*self
}
}
|
use franklin_crypto::bellman::bn256::{Bn256, Fr};
use franklin_crypto::bellman::Engine;
use rand::{Rand, SeedableRng, XorShiftRng};
#[allow(dead_code)]
use rescue_poseidon::generic_hash;
use rescue_poseidon::poseidon::PoseidonParams;
use rescue_poseidon::rescue::RescueParams;
use rescue_poseidon::rescue::{
generic_rescue_hash, generic_rescue_var_length, rescue_hash, rescue_hash_var_length,
};
use rescue_poseidon::rescue_prime::RescuePrimeParams;
use rescue_poseidon::GenericSponge;
use rescue_poseidon::Sponge;
use std::convert::TryInto;
pub(crate) fn init_rng() -> XorShiftRng {
const TEST_SEED: [u32; 4] = [0x5dbe6259, 0x8d313d76, 0x3237db17, 0xe5bc0654];
XorShiftRng::from_seed(TEST_SEED)
}
fn main() {
run_generic_hash_fixed_length::<Bn256>();
run_rescue_fixed_length_example::<Bn256>();
run_rescue_var_length_example::<Bn256>();
run_generic_rescue_fixed_length_example::<Bn256>();
run_generic_rescue_var_length_example::<Bn256>();
run_generic_sponge_with_rescue_params::<Bn256>();
run_generic_sponge_with_single_squeeze::<Bn256>();
run_generic_sponge_with_requested_nuumber_output::<Bn256>();
}
fn run_generic_hash_fixed_length<E: Engine>() {
const RATE: usize = 2;
const WIDTH: usize = 3;
const INPUT_LENGTH: usize = 2;
let rng = &mut init_rng();
let inputs: [Fr; INPUT_LENGTH] = (0..INPUT_LENGTH)
.map(|_| Fr::rand(rng))
.collect::<Vec<Fr>>()
.try_into()
.expect("constant array");
// we can send all type of params so lets start with rescue
let rescue_params = RescueParams::<Bn256, RATE, WIDTH>::default();
let result = generic_hash(&rescue_params, &inputs);
assert_eq!(result.len(), RATE);
// now, hash with poseidon params
let poseidon_params = PoseidonParams::<Bn256, RATE, WIDTH>::default();
let result = generic_hash(&poseidon_params, &inputs);
assert_eq!(result.len(), RATE);
// // now, hash with rescue prime params
let rescue_prime_params = RescuePrimeParams::<Bn256, RATE, WIDTH>::default();
let result = generic_hash(&rescue_prime_params, &inputs);
assert_eq!(result.len(), RATE);
}
fn run_rescue_fixed_length_example<E: Engine>() {
const INPUT_LENGTH: usize = 2;
let rng = &mut init_rng();
let input = (0..INPUT_LENGTH)
.map(|_| Fr::rand(rng))
.collect::<Vec<Fr>>();
let result = rescue_hash::<Bn256, INPUT_LENGTH>(&input.try_into().expect("static vector"));
assert_eq!(result.len(), 2);
}
fn run_rescue_var_length_example<E: Engine>() {
let rng = &mut init_rng();
let input = (0..4).map(|_| Fr::rand(rng)).collect::<Vec<Fr>>();
let result = rescue_hash_var_length::<Bn256>(&input);
assert_eq!(result.len(), 2);
}
fn run_generic_rescue_fixed_length_example<E: Engine>() {
const WIDTH: usize = 3;
const RATE: usize = 2;
const INPUT_LENGTH: usize = 5;
let rng = &mut init_rng();
let input = (0..INPUT_LENGTH)
.map(|_| Fr::rand(rng))
.collect::<Vec<Fr>>();
let result = generic_rescue_hash::<Bn256, RATE, WIDTH, INPUT_LENGTH>(
&input.try_into().expect("static vector"),
);
assert_eq!(result.len(), RATE);
}
fn run_generic_rescue_var_length_example<E: Engine>() {
const WIDTH: usize = 3;
const RATE: usize = 2;
const INPUT_LENGTH: usize = 8; // input length should be multiple of RATE
let rng = &mut init_rng();
let input = (0..INPUT_LENGTH)
.map(|_| Fr::rand(rng))
.collect::<Vec<Fr>>();
let result = generic_rescue_var_length::<Bn256, RATE, WIDTH>(&input);
assert_eq!(result.len(), RATE);
}
fn run_generic_sponge_with_rescue_params<E: Engine>() {
const WIDTH: usize = 3;
const RATE: usize = 2;
let rng = &mut init_rng();
let input = (0..2).map(|_| Fr::rand(rng)).collect::<Vec<Fr>>();
let new_params = RescueParams::<Bn256, RATE, WIDTH>::default();
let mut hasher = GenericSponge::from_params(&new_params);
hasher.absorb(&input);
let result = hasher.squeeze(None);
assert_eq!(result.len(), RATE);
}
fn run_generic_sponge_with_single_squeeze<E: Engine>() {
const WIDTH: usize = 3;
const RATE: usize = 2;
let rng = &mut init_rng();
let input = (0..2).map(|_| Fr::rand(rng)).collect::<Vec<Fr>>();
let new_params = RescueParams::<Bn256, RATE, WIDTH>::default();
let mut hasher = GenericSponge::from_params(&new_params);
hasher.absorb(&input);
let result = hasher.squeeze(Some(1));
// Specifying output length may cause to lose some bits of hash result
assert_eq!(result.len(), 1);
}
fn run_generic_sponge_with_requested_nuumber_output<E: Engine>() {
const WIDTH: usize = 3;
const RATE: usize = 2;
let rng = &mut init_rng();
let input = (0..2).map(|_| Fr::rand(rng)).collect::<Vec<Fr>>();
let requested_number_of_output: usize = 6;
let new_params = RescueParams::<Bn256, RATE, WIDTH>::default();
let mut hasher = GenericSponge::from_params(&new_params);
hasher.absorb(&input);
let result = hasher.squeeze(Some(requested_number_of_output));
assert_eq!(result.len(), requested_number_of_output);
}
|
use alloc::slice::Iter;
#[derive(PartialEq, Debug, Clone)]
pub enum KAuthVNodeAction {
READ_DATA = 1 << 1,
WRITE_DATA = 1 << 2,
EXECUTE = 1 << 3,
DELETE = 1 << 4,
APPEND_DATA = 1 << 5,
DELETE_CHILD = 1 << 6,
READ_ATTRIBUTES = 1 << 7,
WRITE_ATTRIBUTES = 1 << 8,
READ_EXTATTRIBUTES = 1 << 9,
WRITE_EXTATTRIBUTES = 1 << 10,
READ_SECURITY = 1 << 11,
WRITE_SECURITY = 1 << 12,
TAKE_OWNERSHIP = 1 << 13,
SYNCHRONIZE = 1 << 20,
LINKTARGET = 1 << 25,
CHECKIMMUTABLE = 1 << 26,
SEARCHBYANYONE = 1 << 29,
NOIMMUTABLE = 1 << 30,
ACCESS = 1 << 31,
}
impl KAuthVNodeAction {
pub fn iterator() -> Iter<'static, KAuthVNodeAction> {
static ACTIONS: [KAuthVNodeAction; 19] = [KAuthVNodeAction::READ_DATA,
KAuthVNodeAction::WRITE_DATA,
KAuthVNodeAction::EXECUTE,
KAuthVNodeAction::DELETE,
KAuthVNodeAction::APPEND_DATA,
KAuthVNodeAction::DELETE_CHILD,
KAuthVNodeAction::READ_ATTRIBUTES,
KAuthVNodeAction::WRITE_ATTRIBUTES,
KAuthVNodeAction::READ_EXTATTRIBUTES,
KAuthVNodeAction::WRITE_EXTATTRIBUTES,
KAuthVNodeAction::READ_SECURITY,
KAuthVNodeAction::WRITE_SECURITY,
KAuthVNodeAction::TAKE_OWNERSHIP,
KAuthVNodeAction::SYNCHRONIZE,
KAuthVNodeAction::LINKTARGET,
KAuthVNodeAction::CHECKIMMUTABLE,
KAuthVNodeAction::SEARCHBYANYONE,
KAuthVNodeAction::NOIMMUTABLE,
KAuthVNodeAction::ACCESS];
ACTIONS.into_iter()
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum KAuthVNodeDirectoryAction {
LIST_DIRECTORY = 1 << 1, // KAUTH_VNODE_ACTION::READ_DATA,
ADD_FILE = 1 << 2, // KAUTH_VNODE_ACTION::WRITE_DATA,
SEARCH = 1 << 3, // KAUTH_VNODE_ACTION::EXECUTE,
ADD_SUBDIRECTORY = 1 << 5, // KAUTH_VNODE_ACTION::APPEND_DATA,
}
impl KAuthVNodeDirectoryAction {
pub fn iterator() -> Iter<'static, KAuthVNodeDirectoryAction> {
static ACTIONS: [KAuthVNodeDirectoryAction; 4] =
[KAuthVNodeDirectoryAction::LIST_DIRECTORY,
KAuthVNodeDirectoryAction::ADD_FILE,
KAuthVNodeDirectoryAction::SEARCH,
KAuthVNodeDirectoryAction::ADD_SUBDIRECTORY];
ACTIONS.into_iter()
}
}
|
fn naive(n: usize, x: usize, m: usize) -> usize {
let mut ans = 0;
let mut y = x;
for _ in 0..n {
ans += y;
y = y * y % m;
}
ans
}
fn main() {
let stdin = std::io::stdin();
let mut rd = ProconReader::new(stdin.lock());
let n: usize = rd.get();
let x: usize = rd.get();
let m: usize = rd.get();
// println!("{}", naive(n, x, m));
//
// let mut y = x;
// let mut seen = std::collections::HashSet::new();
// for i in 0..1000 {
// println!("{} {}", i, y);
// if !seen.insert(y) {
// break;
// }
// y = y * y % m;
// }
let mut last = vec![0; m];
let mut a = vec![];
let mut y = x;
for i in 0..n {
if last[y] > 0 {
let p = last[y];
let q = i - p;
let r = (n - p) / q;
assert!(r >= 1);
let s = (n - p) % q;
// println!("{:?}", &a[0..p]);
// println!("{:?}", &a[p..]);
// println!("{:?}", &a[p..(p + s)]);
let ans = a[0..p].iter().sum::<usize>()
+ a[p..].iter().sum::<usize>() * r
+ a[p..(p + s)].iter().sum::<usize>();
println!("{}", ans);
return;
}
a.push(y);
last[y] = i;
y = y * y % m;
}
println!("{}", a.iter().sum::<usize>());
}
/*
17 2 1001
=> 4838
*/
pub struct ProconReader<R: std::io::Read> {
reader: R,
}
impl<R: std::io::Read> ProconReader<R> {
pub fn new(reader: R) -> Self {
Self { reader }
}
pub fn get<T: std::str::FromStr>(&mut self) -> T {
use std::io::Read;
let buf = self
.reader
.by_ref()
.bytes()
.map(|b| b.unwrap())
.skip_while(|&byte| byte == b' ' || byte == b'\n' || byte == b'\r')
.take_while(|&byte| byte != b' ' && byte != b'\n' && byte != b'\r')
.collect::<Vec<_>>();
std::str::from_utf8(&buf)
.unwrap()
.parse()
.ok()
.expect("Parse Error.")
}
}
|
use crate::auth;
use crate::diesel::QueryDsl;
use crate::diesel::RunQueryDsl;
use crate::handlers::paginate::*;
use crate::handlers::types::*;
use crate::model::{ChatList, User, UserChat};
use crate::schema::unread_user_chat::dsl::*;
use crate::schema::user_chat::dsl::id as user_chat_id;
use crate::schema::user_chat::dsl::user_id as sender_id;
use crate::schema::user_chat::dsl::*;
use crate::schema::users::dsl::id as user_id;
use crate::schema::users::dsl::*;
use crate::Pool;
use actix_web::web;
use diesel::dsl::delete;
use diesel::prelude::*;
pub fn get_chat_list_db(
db: web::Data<Pool>,
token: String,
) -> Result<Response<Vec<User>>, diesel::result::Error> {
let conn = db.get().unwrap();
let decoded_token = auth::decode_token(&token);
let chat_list: Vec<ChatList> = unread_user_chat
.filter(other.eq(&decoded_token.parse::<i32>().unwrap()))
.order(updated_at.desc())
.load::<ChatList>(&conn)?;
//find a better way to do this
//seems to be the only solution now due to diesel limitations
let mut return_val: Vec<User> = vec![];
for item in chat_list {
let user = users.find(item.user_id).first::<User>(&conn)?;
return_val.push(user)
}
Ok(Response::new(true, return_val))
}
pub fn get_all_message_db(
db: web::Data<Pool>,
token: String,
other_user_id: web::Path<IdPathInfo>,
item: web::Query<PaginateQuery>,
) -> Result<Response<(Vec<(UserChat, User)>, i64)>, diesel::result::Error> {
let conn = db.get().unwrap();
let decoded_token = auth::decode_token(&token);
let user: User = users
.filter(user_id.eq_any(vec![
&decoded_token.parse::<i32>().unwrap(),
&other_user_id.id,
]))
.first::<User>(&conn)?;
let all_chats: (Vec<(UserChat, User)>, i64) = UserChat::belonging_to(&user)
.inner_join(users)
.filter(user_id.eq_any(vec![&user.id, &other_user_id.id]))
.filter(reciever.eq_any(vec![&user.id, &other_user_id.id]))
.order(user_chat_id.desc())
.paginate(item.page)
.per_page(item.per_page)
.load_and_count_pages::<(UserChat, User)>(&conn)?;
Ok(Response::new(true, all_chats))
}
pub fn delete_message_db(
db: web::Data<Pool>,
token: String,
chat_id: web::Path<IdPathInfo>,
) -> Result<Response<String>, diesel::result::Error> {
let conn = db.get().unwrap();
let decoded_token = auth::decode_token(&token);
let user: User = users
.find(decoded_token.parse::<i32>().unwrap())
.first::<User>(&conn)?;
let _count2 = delete(
user_chat
.filter(user_chat_id.eq(chat_id.id))
.filter(sender_id.eq(user.id)),
)
.execute(&conn)?;
Ok(Response::new(
true,
"message deleted successfully".to_string(),
))
}
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This is an equivalent of issue #50504, but for declarative macros.
#![feature(decl_macro, rustc_attrs)]
macro genmod($FromOutside: ident, $Outer: ident) {
type A = $FromOutside;
struct $Outer;
mod inner {
type A = $FromOutside; // `FromOutside` shouldn't be available from here
type Inner = $Outer; // `Outer` shouldn't be available from here
}
}
#[rustc_transparent_macro]
macro genmod_transparent() {
type A = FromOutside;
struct Outer;
mod inner {
type A = FromOutside; //~ ERROR cannot find type `FromOutside` in this scope
type Inner = Outer; //~ ERROR cannot find type `Outer` in this scope
}
}
macro_rules! genmod_legacy { () => {
type A = FromOutside;
struct Outer;
mod inner {
type A = FromOutside; //~ ERROR cannot find type `FromOutside` in this scope
type Inner = Outer; //~ ERROR cannot find type `Outer` in this scope
}
}}
fn check() {
struct FromOutside;
genmod!(FromOutside, Outer); //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `Outer` in this scope
}
fn check_transparent() {
struct FromOutside;
genmod_transparent!();
}
fn check_legacy() {
struct FromOutside;
genmod_legacy!();
}
|
extern crate pine;
use pine::ast::syntax_type::{SimpleSyntaxType, SyntaxType};
use pine::libs::plot;
use pine::libs::print;
use pine::runtime::data_src::{Callback, DataSrc, NoneCallback};
use pine::runtime::output::OutputData;
use pine::runtime::AnySeries;
const MA_SCRIPT: &str = "
N = 5
ma = close
// ma = (close + close[1] + close[2] + close[3] + close[4]) / 5
for i = 1 to N - 1
ma := ma + close[i]
ma := ma / N
plot(ma)
";
#[test]
fn datasrc_test() {
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MA_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![
Some(1f64),
Some(2f64),
Some(3f64),
Some(4f64),
Some(5f64),
]),
)];
let out_data = parser.run_with_data(data, None);
assert_eq!(
out_data.unwrap().data_list[0],
Some(OutputData::new(vec![vec![
None,
None,
None,
None,
Some(3f64)
]]))
);
// assert!(parser.run_with_data(data, None).is_ok());
}
const FUNC_SCRIPT: &str = "
pine_ema(x, y) =>
sum = 0.0
sum := x + (y * sum[1] ? y * sum[1] : 0)
sum
plot(pine_ema(close, 2))
";
#[test]
fn func_call_test() {
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(FUNC_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![
Some(2f64),
Some(4f64),
Some(8f64),
Some(16f64),
Some(32f64),
]),
)];
// assert!(parser.run_with_data(data, None).is_ok());
let out_data = parser.run_with_data(data, None);
assert_eq!(
out_data.unwrap().data_list[0],
Some(OutputData::new(vec![vec![
Some(2f64),
Some(8f64),
Some(24f64),
Some(64f64),
Some(160f64)
]]))
);
}
const IF_ELSE_SCRIPT: &str = "
m = if close > open
s = close
s[1]
else
t = open
t[1]
plot(m)
";
#[test]
fn if_else_test() {
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("open", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(IF_ELSE_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![
Some(1f64),
Some(3f64),
Some(5f64),
Some(7f64),
Some(9f64),
]),
),
(
"open",
AnySeries::from_float_vec(vec![
Some(0f64),
Some(4f64),
Some(4f64),
Some(8f64),
Some(8f64),
]),
),
];
// assert!(parser.run_with_data(data, None).is_ok());
let out_data = parser.run_with_data(data, None);
assert_eq!(
out_data.unwrap().data_list[0],
Some(OutputData::new(vec![vec![
None,
None,
Some(1f64),
Some(4f64),
Some(5f64)
]]))
);
}
const FOR_RANGE_SCRIPT: &str = "
float val = 0
for i = 1 to 5
var sum = 0.0
sum := sum + 1
val := sum
";
#[test]
fn for_range_test() {
let lib_info = pine::LibInfo::new(
vec![print::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(FOR_RANGE_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(1f64), Some(3f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
}
const EMA_SCRIPT: &str = "
pine_ema(x, y) =>
alpha = 2 / (y + 1)
sum = 0.0
sum := alpha * x + (1 - alpha) * (sum[1] ? sum[1] : 0)
sum
plot(pine_ema(close, 3.0))
";
#[test]
fn ema_test() {
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(EMA_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(2f64), Some(4f64)]),
)];
// assert!(parser.run_with_data(data, None).is_ok());
let out_data = parser.run_with_data(data, None);
assert_eq!(
out_data.unwrap().data_list[0],
Some(OutputData::new(vec![vec![Some(1f64), Some(2.5f64),]]))
);
}
const MACD_SCRIPT: &str = "
pine_ema(x, y) =>
alpha = 2 / (y + 1)
sum = 0.0
sum := alpha * x + (1 - alpha) * (sum[1] ? sum[1] : 0)
sum
pine_macd(fastlen, slowlen, siglen) =>
// DIF=EMA_{{(close,12)}}-EMA_{{(close,26)}}
dif = ema(close, fastlen) - ema(close, slowlen)
// DEM=EMA_{{(DIF,9)}}
dem = ema(dif, siglen)
//OSC=DIF-DEM=DIF-MACD
osc = dif - dem
[dif, dem, osc]
[m1, m2, m3] = pine_macd(3, 7, 3)
plot(m1)
plot(m2)
plot(m3)
[macdLine, signalLine, histLine] = macd(close, 3, 7, 3)
plot(macdLine)
plot(signalLine)
plot(histLine)
";
#[test]
fn macd_test() {
use pine::libs::{ema, macd};
let lib_info = pine::LibInfo::new(
vec![
plot::declare_var(),
macd::declare_var(),
ema::declare_ema_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MACD_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(2f64), Some(4f64), Some(1f64), Some(12f64)]),
)];
// assert!(parser.run_with_data(data, None).is_ok());
let out_data = parser.run_with_data(data, None);
// assert_eq!(
// out_data.as_ref().unwrap().data_list[0],
// Some(OutputData::new(vec![vec![
// Some(12.763532763532766f64),
// Some(32.82882444136006f64),
// ]]))
// );
println!(
"get data {:?} {:?}",
out_data.as_ref().unwrap().data_list[0],
out_data.as_ref().unwrap().data_list[3]
);
assert_eq!(
out_data.as_ref().unwrap().data_list[0],
out_data.as_ref().unwrap().data_list[3],
);
assert_eq!(
out_data.as_ref().unwrap().data_list[1],
out_data.as_ref().unwrap().data_list[4],
);
assert_eq!(
out_data.as_ref().unwrap().data_list[2],
out_data.as_ref().unwrap().data_list[5],
);
}
#[test]
fn assign_test() {
let lib_info = pine::LibInfo::new(
vec![print::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
assert!(parser
.parse_src(String::from("m = close\nm := true"))
.is_err());
}
#[test]
fn plot_only_test() {
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
println!("{:?}", parser.parse_src(String::from("plot(close)")));
assert!(parser.parse_src(String::from("plot(close)")).is_ok());
}
const ALMA_SCRIPT: &str = "
m1 = (alma(close, 4, 0.85, 2.0))
// same on pine, but much less efficient
pine_alma(series, windowsize, offset, sigma) =>
m = floor(offset * (windowsize - 1))
s = windowsize / sigma
norm = 0.0
sum = 0.0
for i = 0 to windowsize - 1
weight = exp(-1 * pow(i - m, 2) / (2 * pow(s, 2)))
norm := norm + weight
sum := sum + series[windowsize - i - 1] * weight
sum / norm
m2 = (pine_alma(close, 4, 0.85, 2.0))
";
#[test]
fn alma_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{alma, ceil, cos, pow};
use pine::runtime::{NoneCallback, VarOperate};
let lib_info = pine::LibInfo::new(
vec![
print::declare_var(),
cos::declare_exp_var(),
ceil::declare_floor_var(),
pow::declare_var(),
alma::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(ALMA_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![
Some(200f64),
Some(400f64),
Some(400f64),
Some(400f64),
Some(400f64),
Some(400f64),
]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let result1 = parser
.get_runner()
.get_context()
.move_var(VarIndex::new(0, 0));
let result2 = parser
.get_runner()
.get_context()
.move_var(VarIndex::new(2, 0));
let val1 = pine_ref_to_f64_series(result1);
let val2 = pine_ref_to_f64_series(result2);
assert_eq!(
val1.unwrap().index_value(1).unwrap().unwrap().floor(),
val2.unwrap().index_value(1).unwrap().unwrap().floor()
);
// println!("{:?} {:?}", result1, result2);
}
const SMA_SCRIPT: &str = "
m1 = (sma(close, 2))
// same on pine, but much less efficient
pine_sma(x, y) =>
sum = 0.0
for i = 0 to y - 1
sum := sum + x[i] / y
sum
m2 = (pine_sma(close, 2))
";
#[test]
fn sma_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::sma;
use pine::runtime::{NoneCallback, VarOperate};
let lib_info = pine::LibInfo::new(
vec![sma::declare_sma_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(SMA_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(200f64), Some(400f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let result1 = parser
.get_runner()
.get_context()
.move_var(VarIndex::new(0, 0));
let result2 = parser
.get_runner()
.get_context()
.move_var(VarIndex::new(2, 0));
let val1 = pine_ref_to_f64_series(result1);
let val2 = pine_ref_to_f64_series(result2);
println!("val {:?} {:?}", val1, val2);
assert_eq!(
val1.unwrap().index_value(1).unwrap().unwrap().floor(),
val2.unwrap().index_value(1).unwrap().unwrap().floor()
);
// println!("{:?} {:?}", result1, result2);
}
const BB_SCRIPT: &str = "
[middle, upper, lower] = bb(close, 2, 4)
// the same on pine
f_bb(src, length, mult) =>
float basis = sma(src, length)
float dev = mult * stdev(src, length)
[basis, basis + dev, basis - dev]
[pineMiddle, pineUpper, pineLower] = f_bb(close, 2, 4)
";
#[test]
fn bb_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{bb, sma};
use pine::runtime::{NoneCallback, VarOperate};
let lib_info = pine::LibInfo::new(
vec![
bb::declare_var(),
sma::declare_sma_var(),
sma::declare_stdev_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(BB_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(200f64), Some(400f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let middle = pine_ref_to_f64_series(parser.move_var(VarIndex::new(0, 0)));
let upper = pine_ref_to_f64_series(parser.move_var(VarIndex::new(1, 0)));
let lower = pine_ref_to_f64_series(parser.move_var(VarIndex::new(2, 0)));
let pine_middle = pine_ref_to_f64_series(parser.move_var(VarIndex::new(4, 0)));
let pine_upper = pine_ref_to_f64_series(parser.move_var(VarIndex::new(5, 0)));
let pine_lower = pine_ref_to_f64_series(parser.move_var(VarIndex::new(6, 0)));
assert_eq!(
middle.unwrap().index_value(1).unwrap().unwrap().floor(),
pine_middle
.unwrap()
.index_value(1)
.unwrap()
.unwrap()
.floor()
);
assert_eq!(
upper.unwrap().index_value(1).unwrap().unwrap().floor(),
pine_upper.unwrap().index_value(1).unwrap().unwrap().floor()
);
assert_eq!(
lower.unwrap().index_value(1).unwrap().unwrap().floor(),
pine_lower.unwrap().index_value(1).unwrap().unwrap().floor()
);
// println!("{:?} {:?}", result1, result2);
}
const BBW_SCRIPT: &str = "
m1 = (bbw(close, 2, 4))
// the same on pine
f_bbw(src, length, mult) =>
float basis = sma(src, length)
float dev = mult * stdev(src, length)
((basis + dev) - (basis - dev)) / basis
m2 = f_bbw(close, 2, 4)
";
#[test]
fn bbw_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{bbw, sma};
use pine::runtime::{NoneCallback, VarOperate};
let lib_info = pine::LibInfo::new(
vec![
bbw::declare_var(),
sma::declare_sma_var(),
sma::declare_stdev_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(BBW_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(200f64), Some(400f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(0, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(2, 0)));
assert_eq!(
result1.unwrap().index_value(1).unwrap().unwrap().floor(),
result2.unwrap().index_value(1).unwrap().unwrap().floor()
);
}
const CMO_SCRIPT: &str = "
m1 = cmo(close, 2)
// the same on pine
f_cmo(src, length) =>
float mom = change(src)
float sm1 = sum((mom >= 0) ? mom : 0.0, length)
float sm2 = sum((mom >= 0) ? 0.0 : -mom, length)
100 * (sm1 - sm2) / (sm1 + sm2)
m2 = f_cmo(close, 2)
";
#[test]
fn cmo_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{change, cmo, sum};
use pine::runtime::{NoneCallback, VarOperate};
let lib_info = pine::LibInfo::new(
vec![
cmo::declare_var(),
change::declare_change_var(),
sum::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(CMO_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(200f64), Some(400f64), Some(200f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(0, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(2, 0)));
println!("result {:?} {:?}", result1, result2);
assert_eq!(
result1.unwrap().index_value(1).unwrap().unwrap().floor(),
result2.unwrap().index_value(1).unwrap().unwrap().floor()
);
}
const KC_SCRIPT: &str = "
[middle, upper, lower] = kc(close, 3, 4, false)
// the same on pine
f_kc(src, length, mult, useTrueRange) =>
float basis = ema(src, length)
float range = (useTrueRange) ? tr : (high - low)
float rangeEma = ema(range, length)
[basis, basis + rangeEma * mult, basis - rangeEma * mult]
[pineMiddle, pineUpper, pineLower] = f_kc(close, 3, 4, false)
";
#[test]
fn kc_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{ema, kc, tr};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![kc::declare_var(), ema::declare_ema_var(), tr::declare_var()],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(KC_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
),
(
"high",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
),
];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
println!("result {:?} {:?}", result1, result2);
assert_eq!(
result1.unwrap().index_value(1).unwrap().unwrap().floor(),
result2.unwrap().index_value(1).unwrap().unwrap().floor()
);
};
is_equal(&mut parser, 0, 4);
is_equal(&mut parser, 1, 5);
is_equal(&mut parser, 2, 6);
}
const KCW_SCRIPT: &str = "
m1 = kcw(close, 5, 4, true)
// the same on pine
f_kcw(src, length, mult, useTrueRange) =>
float basis = ema(src, length)
float range = (useTrueRange) ? tr : (high - low)
float rangeEma = ema(range, length)
((basis + rangeEma * mult) - (basis - rangeEma * mult)) / basis
m2 = f_kcw(close, 5, 4, true)
";
#[test]
fn kcw_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{ema, kcw, tr};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
kcw::declare_var(),
ema::declare_ema_var(),
tr::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(KCW_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
),
(
"high",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
),
];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
println!("result {:?} {:?}", result1, result2);
assert_eq!(
result1.unwrap().index_value(1).unwrap().unwrap().floor(),
result2.unwrap().index_value(1).unwrap().unwrap().floor()
);
};
is_equal(&mut parser, 0, 2);
}
const MACD_EXAMPLE_SCRIPT: &str = "
[macdLine, signalLine, histLine] = macd(close, 12, 26, 9)
[_, signalLine2, _] = macd(close, 12, 26, 9)
_ = 12
";
#[test]
fn macd_example_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::macd;
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![macd::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MACD_EXAMPLE_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
println!("result {:?} {:?}", result1, result2);
assert_eq!(
result1.unwrap().index_value(1).unwrap().unwrap().floor(),
result2.unwrap().index_value(1).unwrap().unwrap().floor()
);
};
is_equal(&mut parser, 1, 3);
}
const RSI_SCRIPT: &str = "
m1 = rsi(close, 2)
// same on pine, but less efficient
pine_rsi(x, y) =>
u = max(x - x[1], 0) // upward change
d = max(x[1] - x, 0) // downward change
rs = rma(u, y) / rma(d, y)
res = 100 - 100 / (1 + rs)
rs
m2 = pine_rsi(close, 2)
";
#[test]
fn rsi_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{ema, max, rsi};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
ema::declare_rma_var(),
max::declare_max_var(),
rsi::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(RSI_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
println!("result {:?} {:?}", result1, result2);
assert_eq!(
result1.unwrap().index_value(1).unwrap().unwrap().floor(),
result2.unwrap().index_value(1).unwrap().unwrap().floor()
);
};
is_equal(&mut parser, 0, 2);
}
const MY_RSI_SCRIPT: &str = r#"
src = close
len = 14
up = rma(max(change(src), 0), len)
down = rma(-min(change(src), 0), len)
rsia = down == 0 ? 100 : up == 0 ? 0 : 100 - (100 / (1 + up / down))
"#;
#[test]
fn myrsi_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{change, ema, max, rsi};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
change::declare_change_var(),
max::declare_max_var(),
max::declare_min_var(),
ema::declare_rma_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MY_RSI_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![None, None, None, None, Some(0f64), Some(0f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
}
const MFI_SCRIPT: &str = "
m1 = mfi(close, 2)
// the same on pine
f_mfi(src, length) =>
float upper = sum(volume * (change(src) <= 0.0 ? 0.0 : src), length)
float lower = sum(volume * (change(src) >= 0.0 ? 0.0 : src), length)
if na(lower)
float res = na
res
else
rsi(upper, lower)
m2 = f_mfi(close, 2)
";
#[test]
fn mfi_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{change, mfi, na, rsi, sum};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
change::declare_change_var(),
mfi::declare_var(),
na::declare_var(),
rsi::declare_var(),
sum::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MFI_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
),
(
"volume",
AnySeries::from_int_vec(vec![Some(1i64), Some(1i64), Some(1i64)]),
),
];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
assert_eq!(
result1.unwrap().index_value(1).unwrap(),
result2.unwrap().index_value(1).unwrap()
);
};
is_equal(&mut parser, 0, 2);
}
const MFI2_SCRIPT: &str = r#"
study(title="Money Flow", shorttitle="MFI", format=format.price, precision=2)
length = input(title="Length", type=input.integer, defval=14, minval=1, maxval=2000)
src = hlc3
upper = sum(volume * (change(src) <= 0 ? 0 : src), length)
lower = sum(volume * (change(src) >= 0 ? 0 : src), length)
pine_rsi(x, y) =>
// u = max(x - x[1], 0) // upward change
// d = max( y[1] - y, 0) // downward change
rs = x / y // rma(u, 2) / rma(d, 2)
res = 100 - 100 / (1 + rs)
res
mf = rsi(upper, lower)
mf2 = pine_rsi(upper, lower)
plot(mf)
plot(mf2)
"#;
#[test]
fn mfi2_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{change, format, hlc3, input, mfi, na, plot, rsi, study, sum};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
study::declare_var(),
format::declare_var(),
input::declare_var(),
hlc3::declare_var(),
change::declare_change_var(),
mfi::declare_var(),
na::declare_var(),
rsi::declare_var(),
sum::declare_var(),
plot::declare_var(),
],
vec![
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MFI2_SCRIPT)).unwrap();
let data = vec![
(
"high",
AnySeries::from_float_vec(vec![Some(30f64), Some(20f64), Some(1f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(10f64), Some(10f64), Some(5f64)]),
),
(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
),
(
"volume",
AnySeries::from_int_vec(vec![Some(1i64), Some(1i64), Some(1i64)]),
),
];
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
let data_list = out_data.unwrap().data_list;
assert_eq!(
data_list[0].as_ref().unwrap().series[0],
data_list[1].as_ref().unwrap().series[0]
);
}
const TSI_SCRIPT: &str = r#"
pine_tsi(x, s, l) =>
v1 = ema(ema(x - x[1], l), s)
v2 = ema(ema(abs(x - x[1]), l), s)
v1 / v2
plot(tsi(close, 2, 2))
plot(pine_tsi(close, 2, 2))
"#;
#[test]
fn tsi_test() {
use pine::libs::{abs, ema, plot, tsi};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
tsi::declare_var(),
ema::declare_ema_var(),
abs::declare_var(),
plot::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(TSI_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
)];
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
let data_list = out_data.unwrap().data_list;
assert_eq!(
data_list[0].as_ref().unwrap().series[0],
data_list[1].as_ref().unwrap().series[0]
);
}
const STOCH_SCRIPT: &str = r#"
pine_stoch(close, high, low, length) =>
100 * (close - lowest(low, length)) / (highest(high, length) - lowest(low, length))
plot(stoch(close, high, low, 2))
plot(pine_stoch(close, high, low, 2))
"#;
#[test]
fn stoch_test() {
use pine::libs::{highest, lowest, plot, stoch};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
stoch::declare_var(),
lowest::declare_var(),
highest::declare_var(),
plot::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(STOCH_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
),
(
"high",
AnySeries::from_float_vec(vec![Some(30f64), Some(10f64), Some(10f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(10f64), Some(5f64), Some(5f64)]),
),
];
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
let data_list = out_data.unwrap().data_list;
assert_eq!(
data_list[0].as_ref().unwrap().series[0],
data_list[1].as_ref().unwrap().series[0]
);
}
const SWMA_SCRIPT: &'static str = "
m1 = vwma(close, 15)
// same on pine, but less efficient
pine_vwma(x, y) =>
sma(x * volume, y) / sma(volume, y)
m2 = pine_vwma(close, 15)
";
#[test]
fn swma_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{sma, vwma};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![vwma::declare_var(), sma::declare_sma_var()],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(SWMA_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64), Some(10f64)]),
),
(
"volume",
AnySeries::from_int_vec(vec![Some(1i64), Some(1i64), Some(1i64), Some(1i64)]),
),
];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
assert_eq!(
result1.unwrap().index_value(1).unwrap(),
result2.unwrap().index_value(1).unwrap()
);
};
is_equal(&mut parser, 0, 2);
}
const DMI_SCRIPT: &'static str = r#"
adxlen = 2
dilen = 2
dirmov(len) =>
up = change(high)
down = -change(low)
truerange = rma(tr, len)
plus = fixnan(100 * rma(up > down and up > 0 ? up : 0, len) / truerange)
minus = fixnan(100 * rma(down > up and down > 0 ? down : 0, len) / truerange)
[plus, minus]
myadx(dilen, adxlen) =>
[plus, minus] = dirmov(dilen)
sum = plus + minus
adx = 100 * rma(abs(plus - minus) / (sum == 0 ? 1 : sum), adxlen)
[plus, minus, adx]
[sig, up, down] = myadx(dilen, adxlen)
[diplus, diminus, adx] = dmi(dilen, adxlen)
"#;
#[test]
fn dmi_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{abs, change, dmi, ema, fixnan, plot, tr};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
change::declare_change_var(),
ema::declare_rma_var(),
tr::declare_var(),
fixnan::declare_var(),
abs::declare_var(),
dmi::declare_var(),
plot::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(DMI_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64), Some(10f64)]),
),
(
"high",
AnySeries::from_float_vec(vec![Some(30f64), Some(10f64), Some(10f64), Some(20f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(10f64), Some(10f64), Some(5f64), Some(8f64)]),
),
];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
assert_eq!(
result1.unwrap().index_value(1).unwrap(),
result2.unwrap().index_value(1).unwrap()
);
};
is_equal(&mut parser, 4, 7);
is_equal(&mut parser, 5, 8);
is_equal(&mut parser, 6, 9);
}
const HMA_SCRIPT: &'static str = r#"
src = close
length = 2
hmaBuildIn = hma(src, length)
// X=2*WMA(C,ROUND(N/2))-WMA(C,N);
// HULLMA=WMA(X,ROUND(SQRT(N)));
x = 2 * wma(src, floor(length / 2)) - wma(src, length)
hullma = wma(x, round(sqrt(length)))
"#;
#[test]
fn hma_test() {
use pine::ast::stat_expr_types::VarIndex;
use pine::helper::pine_ref_to_f64_series;
use pine::libs::{ceil, cos, hma, sma};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
hma::declare_var(),
sma::declare_wma_var(),
ceil::declare_round_var(),
ceil::declare_floor_var(),
cos::declare_sqrt_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(HMA_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64), Some(10f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
let is_equal = |parser: &mut pine::PineScript, x, y| {
let result1 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(x, 0)));
let result2 = pine_ref_to_f64_series(parser.move_var(VarIndex::new(y, 0)));
assert_eq!(
result1.unwrap().index_value(1).unwrap(),
result2.unwrap().index_value(1).unwrap()
);
};
is_equal(&mut parser, 2, 4);
}
const STDEV_SCRIPT: &str = "
plot(stdev(close, 2))
pstdev(Series, Period) =>
mean = sum(Series, Period) / Period
summation = 0.0
for i=0 to Period-1
sampleMinusMean = nz(Series[i]) - mean
summation := summation + sampleMinusMean * sampleMinusMean
sqrt(summation / Period)
plot(pstdev(close, 2))
";
#[test]
fn stdev_test() {
use pine::libs::{abs, cos, ema, nz, plot, sma, sum};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
sma::declare_stdev_var(),
sum::declare_var(),
nz::declare_var(),
cos::declare_sqrt_var(),
plot::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(STDEV_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
)];
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
let data_list = out_data.unwrap().data_list;
assert_eq!(
data_list[0].as_ref().unwrap().series[0].last(),
data_list[1].as_ref().unwrap().series[0].last()
);
}
const CCI_SCRIPT: &str = "
length = 2
src = close
cci1 = (src - sma(src, length)) / (0.015 * dev(src, length))
plot(cci1)
plot(cci(src, length))
";
#[test]
fn cci_test() {
use pine::libs::{cci, plot, sma};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
sma::declare_dev_var(),
sma::declare_sma_var(),
cci::declare_var(),
plot::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(CCI_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
)];
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
let data_list = out_data.unwrap().data_list;
assert_eq!(
data_list[0].as_ref().unwrap().series[0].last(),
data_list[1].as_ref().unwrap().series[0].last()
);
}
const CORRELATION_SCRIPT: &str = "
plot(correlation(close,open,2))
conv = sma(close * open, 2) - sma(close, 2) * sma(open, 2)
s = conv / (stdev(close, 2) * stdev(open, 2))
plot(s)
";
#[test]
fn correlation_test() {
use pine::libs::{correlation, plot, sma};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
sma::declare_stdev_var(),
sma::declare_sma_var(),
correlation::declare_var(),
plot::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("open", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(CORRELATION_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64)]),
),
(
"open",
AnySeries::from_float_vec(vec![Some(10f64), Some(20f64), Some(5f64)]),
),
];
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
let data_list = out_data.unwrap().data_list;
assert_eq!(
data_list[0].as_ref().unwrap().series[0].last(),
data_list[1].as_ref().unwrap().series[0].last()
);
}
const MYPLOT_SCRIPT: &'static str = "
// Plot colors
col_grow_above = #26A69A
col_grow_below = #FFCDD2
col_fall_above = #B2DFDB
col_fall_below = #EF5350
plot(close, title='Histogram',
style=plot.style_columns,
color=(close>=0 ?
(close[1] < close ? col_grow_above : col_fall_above) :
(close[1] < close ? col_grow_below : col_fall_below) ),
opacity=0)
";
#[test]
fn myplot_test() {
use pine::libs::plot;
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MYPLOT_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64), Some(10f64)]),
)];
assert!(parser.run_with_data(data, None).is_ok());
}
const INPUT_SOURCE_SCRIPT: &'static str = "
src = input(title='Source', type=input.source, defval=close)
";
#[test]
fn input_source_test() {
use pine::libs::input;
use pine::runtime::output::InputVal;
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![input::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(INPUT_SOURCE_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(20f64), Some(10f64), Some(5f64), Some(10f64)]),
)];
assert!(parser
.run(
vec![Some(InputVal::Source(String::from("close")))],
data,
None
)
.is_ok());
}
const SRC_SCRIPT: &'static str = "
m = input(1, 'hello', 'int')
plot(close + m)
plot(close)
plot(high)
plot(high)
plot(open)
plot(low)
plot(time)
plot(volume)
plot(bar_index)
";
#[test]
fn inpur_srcs_test() {
use pine::libs::{input, plot, time};
use pine::runtime::output::InputSrc;
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
input::declare_var(),
plot::declare_var(),
time::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
("open", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
("_time", SyntaxType::Series(SimpleSyntaxType::Int)),
("bar_index", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(SRC_SCRIPT)).unwrap();
let io_info = parser.gen_io_info().unwrap();
assert_eq!(
io_info.get_input_srcs(),
&vec![InputSrc::new(
None,
vec![
String::from("close"),
String::from("high"),
String::from("open"),
String::from("low"),
String::from("time"),
String::from("volume"),
]
)]
);
}
const STUDY_ONLY_SCRIPT: &'static str = r#"
study(title="CCC", shorttitle="CCC")
"#;
#[test]
fn study_only_test() {
use pine::libs::study;
use pine::runtime::output::{ScriptPurpose, StudyScript};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![study::declare_var()],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(STUDY_ONLY_SCRIPT)).unwrap();
let io_info = parser.gen_io_info().unwrap();
assert_eq!(
io_info.get_script_type(),
&Some(ScriptPurpose::Study(StudyScript {
title: String::from("CCC"),
shorttitle: Some(String::from("CCC")),
overlay: None,
format: None,
precision: None
}))
);
// let data = vec![("close", AnySeries::from_float_vec(vec![Some(20f64)]))];
println!("res {:?}", parser.run(vec![], vec![], None));
assert!(parser.run(vec![], vec![], None).is_ok());
}
const VOLUME_SCRIPT: &'static str = r#"
plot(volume)
"#;
#[test]
fn volume_test() {
use pine::libs::plot;
use pine::runtime::output::{OutputData, ScriptPurpose, StudyScript};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![("volume", SyntaxType::Series(SimpleSyntaxType::Int))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(VOLUME_SCRIPT)).unwrap();
assert!(parser.gen_io_info().is_ok());
// let data = vec![("close", AnySeries::from_float_vec(vec![Some(20f64)]))];
println!("res {:?}", parser.run(vec![], vec![], None));
let result = parser.run_with_data(
vec![("volume", AnySeries::from_int_vec(vec![Some(20i64)]))],
None,
);
assert!(result.is_ok());
assert_eq!(
result.unwrap().data_list,
vec![Some(OutputData::new(vec![vec![Some(20.0f64)]]))]
);
}
const TIME_SCRIPT: &'static str = r#"
int a1 = dayofmonth
int a2 = dayofweek
float a3 = dayofweek==dayofweek.monday ? 1.0 :2.0
float a4 = dayofweek==dayofweek.tuesday ? 1.0 :2.0
plot(a1)
plot(a2)
plot(a3)
plot(a4)
"#;
#[test]
fn dayofweek_test() {
use pine::libs::{plot, year};
use pine::runtime::output::{OutputData, ScriptPurpose, StudyScript, SymbolInfo};
use pine::runtime::NoneCallback;
use std::rc::Rc;
let lib_info = pine::LibInfo::new(
vec![
plot::declare_var(),
year::declare_dayofmonth_var(),
year::declare_dayofweek_var(),
],
vec![("_time", SyntaxType::Series(SimpleSyntaxType::Int))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(TIME_SCRIPT)).unwrap();
assert!(parser.gen_io_info().is_ok());
// let data = vec![("close", AnySeries::from_float_vec(vec![Some(20f64)]))];
println!("res {:?}", parser.run(vec![], vec![], None));
let result = parser.run_with_data(
vec![(
"_time",
AnySeries::from_int_vec(vec![Some(1587978379382i64), Some(1588003200000)]),
)],
Some(Rc::new(SymbolInfo {
symbol_type: String::from("future"),
timezone: String::from("Asia/Shanghai"),
ticker: String::from("BATS:MSFT"),
session: String::from("regular"),
trade_start: String::from(""),
trade_end: String::from(""),
root: Some(String::from("le")),
currency: String::from("USD"),
description: String::from("des"),
mintick: 1f64,
})),
);
assert!(result.is_ok());
assert_eq!(
result.unwrap().data_list,
vec![
Some(OutputData::new(vec![vec![Some(27f64), Some(28f64)]])),
Some(OutputData::new(vec![vec![Some(2f64), Some(3f64)]])),
Some(OutputData::new(vec![vec![Some(1f64), Some(2f64)]])),
Some(OutputData::new(vec![vec![Some(2f64), Some(1f64)]]))
]
);
}
const RUN1_SCRIPT: &'static str = r#"
study(title="VWAPG", shorttitle="VWAPG")
src = (high + low + open)/3
t = time("D")
start = na(t[1]) or t > t[1]
sumSrc = src * volume
sumVol = volume
sumSrc := start ? sumSrc : sumSrc + sumSrc[1]
sumVol := start ? sumVol : sumVol + sumVol[1]
// You can use built-in vwap() function instead.
plot(sumSrc / sumVol, title="VWAPG", color=color.blue)
"#;
#[test]
fn run1_test() {
use pine::libs::{color, na, plot, study, time};
use pine::runtime::output::{InputSrc, ScriptPurpose, StudyScript};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
study::declare_var(),
color::declare_var(),
na::declare_var(),
plot::declare_var(),
time::declare_var(),
],
vec![
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
("open", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
("_time", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(RUN1_SCRIPT)).unwrap();
let io_info = parser.gen_io_info().unwrap();
println!("io info {:?}", io_info);
assert_eq!(
io_info.get_input_srcs(),
&vec![InputSrc::new(
None,
vec![
String::from("high"),
String::from("low"),
String::from("open"),
String::from("time"),
String::from("volume")
]
)]
);
let data = vec![
("high", AnySeries::from_float_vec(vec![Some(20f64)])),
("low", AnySeries::from_float_vec(vec![Some(20f64)])),
("open", AnySeries::from_float_vec(vec![Some(20f64)])),
("time", AnySeries::from_int_vec(vec![Some(20i64)])),
("volume", AnySeries::from_int_vec(vec![Some(20i64)])),
];
// println!("res {:?}", parser.run(vec![], vec![], None));
assert!(parser.run_with_data(data, None).is_ok());
}
const EMA2_SCRIPT: &str = r#"
out = ema(log(close), 2)
plot(out, color=color.maroon, title="TRIX")
"#;
#[test]
fn ema2_test() {
use pine::libs::{color, cos, ema};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
cos::declare_log_var(),
color::declare_var(),
ema::declare_ema_var(),
plot::declare_var(),
],
vec![("close", SyntaxType::Series(SimpleSyntaxType::Float))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(EMA2_SCRIPT)).unwrap();
let data = vec![(
"close",
AnySeries::from_float_vec(vec![Some(100f64), Some(101f64), Some(102f64)]),
)];
let out_data = parser.run_with_data(
vec![("close", AnySeries::from_float_vec(vec![Some(0f64)]))],
None,
);
assert!(out_data.is_ok());
let out_data = parser.run_with_data(data, None);
println!("Now data {:?}", out_data);
// assert!(out_data.is_err());
}
const OBV_SCRIPT: &'static str = r#"
src = close
obv = cum(sign(change(src)) * volume)
plot(obv, color=color.blue, title="OBV")
"#;
#[test]
fn obv_test() {
use pine::libs::{change, color, cos, cum};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
cos::declare_sign_var(),
color::declare_var(),
change::declare_change_var(),
cum::declare_var(),
plot::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(OBV_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(100f64), Some(101f64), Some(100f64)]),
),
(
"volume",
AnySeries::from_int_vec(vec![Some(1i64), Some(2i64), Some(3i64)]),
),
];
// na 1 -1 -> na 1 -1 -> na 2 -3 -> na 2 -1
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
assert_eq!(
out_data.unwrap().data_list,
vec![Some(OutputData::new(vec![vec![
Some(0f64),
Some(2f64),
Some(-1f64)
]]))]
);
}
const WR_SCRIPT: &'static str = r#"
_pr(length) =>
max = highest(length)
min = lowest(length)
100 * (close - max) / (max - min)
percentR = _pr(2)
plot(percentR, title="%R", color=#ff6d00, opacity=0)
"#;
#[test]
fn wr_test() {
use pine::libs::{highest, lowest};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
highest::declare_var(),
lowest::declare_var(),
plot::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(WR_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(2f64), Some(4f64), Some(4f64)]),
),
(
"high",
AnySeries::from_float_vec(vec![Some(4f64), Some(6f64), Some(8f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(2f64), Some(4f64), Some(2f64)]),
),
];
// 4 6 8, 2 2 2, -2 -2 -4, 2 4 6
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
assert_eq!(
out_data.unwrap().data_list,
vec![Some(OutputData::new(vec![vec![
Some(-100f64),
Some(-50f64),
Some(-200f64 / 3f64)
]]))]
);
}
const Chaikin_SCRIPT: &'static str = r#"
short = 2
long = 2
osc = ema(accdist, short) - ema(accdist, long)
plot(osc)
"#;
#[test]
fn chaikin_test() {
use pine::libs::{accdist, ema};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
ema::declare_ema_var(),
accdist::declare_var(),
plot::declare_var(),
],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("high", SyntaxType::Series(SimpleSyntaxType::Float)),
("low", SyntaxType::Series(SimpleSyntaxType::Float)),
("volume", SyntaxType::Series(SimpleSyntaxType::Int)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(Chaikin_SCRIPT)).unwrap();
let data = vec![
(
"close",
AnySeries::from_float_vec(vec![Some(2f64), Some(4f64), Some(4f64)]),
),
(
"high",
AnySeries::from_float_vec(vec![Some(4f64), Some(6f64), Some(8f64)]),
),
(
"low",
AnySeries::from_float_vec(vec![Some(2f64), Some(4f64), Some(2f64)]),
),
(
"volume",
AnySeries::from_int_vec(vec![Some(2i64), Some(4i64), Some(2i64)]),
),
];
// 4 6 8, 2 2 2, -2 -2 -4, 2 4 6
assert!(parser.gen_io_info().is_ok());
let out_data = parser.run_with_data(data, None);
assert!(out_data.is_ok());
println!("Out data {:?}", out_data.as_ref().unwrap().data_list);
assert!(out_data.as_ref().unwrap().data_list[0]
.as_ref()
.unwrap()
.series[0][0]
.is_some());
}
const MYPLOT2_SCRIPT: &'static str = r#"
plot(open)
plot(close)
plot(close)
plot(close)
plot(close)
plot(close)
plot(close)
plot(close)
plot(close)
plot(close)
"#;
#[test]
fn myplot2_test() {
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![plot::declare_var()],
vec![
("close", SyntaxType::Series(SimpleSyntaxType::Float)),
("open", SyntaxType::Series(SimpleSyntaxType::Float)),
],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(MYPLOT2_SCRIPT)).unwrap();
let mut closes: Vec<Option<f64>> = vec![];
let mut openes: Vec<Option<f64>> = vec![];
for m in 0..400 {
closes.push(Some(100f64 + m as f64));
}
for m in 0..400 {
openes.push(Some(100f64 + m as f64));
}
let data = vec![
("close", AnySeries::from_float_vec(closes)),
("open", AnySeries::from_float_vec(openes)),
];
// 4 6 8, 2 2 2, -2 -2 -4, 2 4 6
assert!(parser.gen_io_info().is_ok());
for _ in 0..3 {
let out_data = parser.run_with_data(data.clone(), None);
assert!(out_data.is_ok());
println!("Out data {:?}", out_data.as_ref().unwrap().data_list);
for i in 0..10 {
assert_eq!(
out_data.as_ref().unwrap().data_list[i]
.as_ref()
.unwrap()
.series[0]
.len(),
400
);
}
}
}
const ALMA_TIME_SCRIPT: &'static str = r#"
plot(alma(dayofweek, 4, 0.85, 2.0))
plot(highest(dayofweek, 2))
"#;
#[test]
fn alma_time_test() {
use pine::libs::{alma, highest, year};
use pine::runtime::NoneCallback;
let lib_info = pine::LibInfo::new(
vec![
plot::declare_var(),
alma::declare_var(),
year::declare_dayofweek_var(),
highest::declare_var(),
],
vec![("_time", SyntaxType::Series(SimpleSyntaxType::Int))],
);
let mut parser = pine::PineScript::new_with_libinfo(lib_info, Some(&NoneCallback()));
parser.parse_src(String::from(ALMA_TIME_SCRIPT)).unwrap();
let mut times: Vec<Option<i64>> = vec![];
for m in 0..16 {
times.push(Some(100i64 + m * 1000 * 3600 * 24 as i64));
}
let data = vec![("_time", AnySeries::from_int_vec(times))];
assert!(parser.gen_io_info().is_ok());
let out_data = parser.run_with_data(data.clone(), None);
assert!(out_data.is_ok());
println!("Out data {:?}", out_data.as_ref().unwrap().data_list);
for i in 0..2 {
assert!(out_data.as_ref().unwrap().data_list[i]
.as_ref()
.unwrap()
.series[0][15]
.is_some());
}
}
|
#![allow(unused_variables, non_upper_case_globals, non_snake_case, unused_unsafe, non_camel_case_types, dead_code, clippy::all)]
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct CharacterGrouping(pub ::windows::core::IInspectable);
impl CharacterGrouping {
pub fn First(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
pub fn Label(&self) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
}
unsafe impl ::windows::core::RuntimeType for CharacterGrouping {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.Globalization.Collation.CharacterGrouping;{fae761bb-805d-4bb0-95bb-c1f7c3e8eb8e})");
}
unsafe impl ::windows::core::Interface for CharacterGrouping {
type Vtable = ICharacterGrouping_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xfae761bb_805d_4bb0_95bb_c1f7c3e8eb8e);
}
impl ::windows::core::RuntimeName for CharacterGrouping {
const NAME: &'static str = "Windows.Globalization.Collation.CharacterGrouping";
}
impl ::core::convert::From<CharacterGrouping> for ::windows::core::IUnknown {
fn from(value: CharacterGrouping) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&CharacterGrouping> for ::windows::core::IUnknown {
fn from(value: &CharacterGrouping) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for CharacterGrouping {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a CharacterGrouping {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<CharacterGrouping> for ::windows::core::IInspectable {
fn from(value: CharacterGrouping) -> Self {
value.0
}
}
impl ::core::convert::From<&CharacterGrouping> for ::windows::core::IInspectable {
fn from(value: &CharacterGrouping) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for CharacterGrouping {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a CharacterGrouping {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
unsafe impl ::core::marker::Send for CharacterGrouping {}
unsafe impl ::core::marker::Sync for CharacterGrouping {}
#[repr(transparent)]
#[derive(:: core :: cmp :: PartialEq, :: core :: cmp :: Eq, :: core :: clone :: Clone, :: core :: fmt :: Debug)]
pub struct CharacterGroupings(pub ::windows::core::IInspectable);
impl CharacterGroupings {
pub fn new() -> ::windows::core::Result<Self> {
Self::IActivationFactory(|f| f.activate_instance::<Self>())
}
fn IActivationFactory<R, F: FnOnce(&::windows::core::IActivationFactory) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<CharacterGroupings, ::windows::core::IActivationFactory> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
pub fn Lookup<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(&self, text: Param0) -> ::windows::core::Result<::windows::core::HSTRING> {
let this = self;
unsafe {
let mut result__: ::core::mem::ManuallyDrop<::windows::core::HSTRING> = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), text.into_param().abi(), &mut result__).from_abi::<::windows::core::HSTRING>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn First(&self) -> ::windows::core::Result<super::super::Foundation::Collections::IIterator<CharacterGrouping>> {
let this = &::windows::core::Interface::cast::<super::super::Foundation::Collections::IIterable<CharacterGrouping>>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), &mut result__).from_abi::<super::super::Foundation::Collections::IIterator<CharacterGrouping>>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn GetAt(&self, index: u32) -> ::windows::core::Result<CharacterGrouping> {
let this = &::windows::core::Interface::cast::<super::super::Foundation::Collections::IVectorView<CharacterGrouping>>(self)?;
unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), index, &mut result__).from_abi::<CharacterGrouping>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn Size(&self) -> ::windows::core::Result<u32> {
let this = &::windows::core::Interface::cast::<super::super::Foundation::Collections::IVectorView<CharacterGrouping>>(self)?;
unsafe {
let mut result__: u32 = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).7)(::core::mem::transmute_copy(this), &mut result__).from_abi::<u32>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn IndexOf<'a, Param0: ::windows::core::IntoParam<'a, CharacterGrouping>>(&self, value: Param0, index: &mut u32) -> ::windows::core::Result<bool> {
let this = &::windows::core::Interface::cast::<super::super::Foundation::Collections::IVectorView<CharacterGrouping>>(self)?;
unsafe {
let mut result__: bool = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).8)(::core::mem::transmute_copy(this), value.into_param().abi(), index, &mut result__).from_abi::<bool>(result__)
}
}
#[cfg(feature = "Foundation_Collections")]
pub fn GetMany(&self, startindex: u32, items: &mut [<CharacterGrouping as ::windows::core::DefaultType>::DefaultType]) -> ::windows::core::Result<u32> {
let this = &::windows::core::Interface::cast::<super::super::Foundation::Collections::IVectorView<CharacterGrouping>>(self)?;
unsafe {
let mut result__: u32 = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).9)(::core::mem::transmute_copy(this), startindex, items.len() as u32, ::core::mem::transmute_copy(&items), &mut result__).from_abi::<u32>(result__)
}
}
pub fn Create<'a, Param0: ::windows::core::IntoParam<'a, ::windows::core::HSTRING>>(language: Param0) -> ::windows::core::Result<CharacterGroupings> {
Self::ICharacterGroupingsFactory(|this| unsafe {
let mut result__: ::windows::core::RawPtr = ::core::mem::zeroed();
(::windows::core::Interface::vtable(this).6)(::core::mem::transmute_copy(this), language.into_param().abi(), &mut result__).from_abi::<CharacterGroupings>(result__)
})
}
pub fn ICharacterGroupingsFactory<R, F: FnOnce(&ICharacterGroupingsFactory) -> ::windows::core::Result<R>>(callback: F) -> ::windows::core::Result<R> {
static mut SHARED: ::windows::core::FactoryCache<CharacterGroupings, ICharacterGroupingsFactory> = ::windows::core::FactoryCache::new();
unsafe { SHARED.call(callback) }
}
}
unsafe impl ::windows::core::RuntimeType for CharacterGroupings {
const SIGNATURE: ::windows::core::ConstBuffer = ::windows::core::ConstBuffer::from_slice(b"rc(Windows.Globalization.Collation.CharacterGroupings;{b8d20a75-d4cf-4055-80e5-ce169c226496})");
}
unsafe impl ::windows::core::Interface for CharacterGroupings {
type Vtable = ICharacterGroupings_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xb8d20a75_d4cf_4055_80e5_ce169c226496);
}
impl ::windows::core::RuntimeName for CharacterGroupings {
const NAME: &'static str = "Windows.Globalization.Collation.CharacterGroupings";
}
impl ::core::convert::From<CharacterGroupings> for ::windows::core::IUnknown {
fn from(value: CharacterGroupings) -> Self {
value.0 .0
}
}
impl ::core::convert::From<&CharacterGroupings> for ::windows::core::IUnknown {
fn from(value: &CharacterGroupings) -> Self {
value.0 .0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Owned(self.0 .0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IUnknown> for &'a CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IUnknown> {
::windows::core::Param::Borrowed(&self.0 .0)
}
}
impl ::core::convert::From<CharacterGroupings> for ::windows::core::IInspectable {
fn from(value: CharacterGroupings) -> Self {
value.0
}
}
impl ::core::convert::From<&CharacterGroupings> for ::windows::core::IInspectable {
fn from(value: &CharacterGroupings) -> Self {
value.0.clone()
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Owned(self.0)
}
}
impl<'a> ::windows::core::IntoParam<'a, ::windows::core::IInspectable> for &'a CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, ::windows::core::IInspectable> {
::windows::core::Param::Borrowed(&self.0)
}
}
#[cfg(feature = "Foundation_Collections")]
impl ::core::convert::TryFrom<CharacterGroupings> for super::super::Foundation::Collections::IIterable<CharacterGrouping> {
type Error = ::windows::core::Error;
fn try_from(value: CharacterGroupings) -> ::windows::core::Result<Self> {
::core::convert::TryFrom::try_from(&value)
}
}
#[cfg(feature = "Foundation_Collections")]
impl ::core::convert::TryFrom<&CharacterGroupings> for super::super::Foundation::Collections::IIterable<CharacterGrouping> {
type Error = ::windows::core::Error;
fn try_from(value: &CharacterGroupings) -> ::windows::core::Result<Self> {
::windows::core::Interface::cast(value)
}
}
#[cfg(feature = "Foundation_Collections")]
impl<'a> ::windows::core::IntoParam<'a, super::super::Foundation::Collections::IIterable<CharacterGrouping>> for CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, super::super::Foundation::Collections::IIterable<CharacterGrouping>> {
::windows::core::IntoParam::into_param(&self)
}
}
#[cfg(feature = "Foundation_Collections")]
impl<'a> ::windows::core::IntoParam<'a, super::super::Foundation::Collections::IIterable<CharacterGrouping>> for &CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, super::super::Foundation::Collections::IIterable<CharacterGrouping>> {
::core::convert::TryInto::<super::super::Foundation::Collections::IIterable<CharacterGrouping>>::try_into(self).map(::windows::core::Param::Owned).unwrap_or(::windows::core::Param::None)
}
}
#[cfg(feature = "Foundation_Collections")]
impl ::core::convert::TryFrom<CharacterGroupings> for super::super::Foundation::Collections::IVectorView<CharacterGrouping> {
type Error = ::windows::core::Error;
fn try_from(value: CharacterGroupings) -> ::windows::core::Result<Self> {
::core::convert::TryFrom::try_from(&value)
}
}
#[cfg(feature = "Foundation_Collections")]
impl ::core::convert::TryFrom<&CharacterGroupings> for super::super::Foundation::Collections::IVectorView<CharacterGrouping> {
type Error = ::windows::core::Error;
fn try_from(value: &CharacterGroupings) -> ::windows::core::Result<Self> {
::windows::core::Interface::cast(value)
}
}
#[cfg(feature = "Foundation_Collections")]
impl<'a> ::windows::core::IntoParam<'a, super::super::Foundation::Collections::IVectorView<CharacterGrouping>> for CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, super::super::Foundation::Collections::IVectorView<CharacterGrouping>> {
::windows::core::IntoParam::into_param(&self)
}
}
#[cfg(feature = "Foundation_Collections")]
impl<'a> ::windows::core::IntoParam<'a, super::super::Foundation::Collections::IVectorView<CharacterGrouping>> for &CharacterGroupings {
fn into_param(self) -> ::windows::core::Param<'a, super::super::Foundation::Collections::IVectorView<CharacterGrouping>> {
::core::convert::TryInto::<super::super::Foundation::Collections::IVectorView<CharacterGrouping>>::try_into(self).map(::windows::core::Param::Owned).unwrap_or(::windows::core::Param::None)
}
}
unsafe impl ::core::marker::Send for CharacterGroupings {}
unsafe impl ::core::marker::Sync for CharacterGroupings {}
#[cfg(all(feature = "Foundation_Collections"))]
impl ::core::iter::IntoIterator for CharacterGroupings {
type Item = CharacterGrouping;
type IntoIter = super::super::Foundation::Collections::VectorViewIterator<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
::core::iter::IntoIterator::into_iter(&self)
}
}
#[cfg(all(feature = "Foundation_Collections"))]
impl ::core::iter::IntoIterator for &CharacterGroupings {
type Item = CharacterGrouping;
type IntoIter = super::super::Foundation::Collections::VectorViewIterator<Self::Item>;
fn into_iter(self) -> Self::IntoIter {
super::super::Foundation::Collections::VectorViewIterator::new(::core::convert::TryInto::try_into(self).ok())
}
}
#[repr(transparent)]
#[doc(hidden)]
pub struct ICharacterGrouping(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for ICharacterGrouping {
type Vtable = ICharacterGrouping_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xfae761bb_805d_4bb0_95bb_c1f7c3e8eb8e);
}
#[repr(C)]
#[doc(hidden)]
pub struct ICharacterGrouping_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct ICharacterGroupings(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for ICharacterGroupings {
type Vtable = ICharacterGroupings_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0xb8d20a75_d4cf_4055_80e5_ce169c226496);
}
#[repr(C)]
#[doc(hidden)]
pub struct ICharacterGroupings_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, text: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, result__: *mut ::core::mem::ManuallyDrop<::windows::core::HSTRING>) -> ::windows::core::HRESULT,
);
#[repr(transparent)]
#[doc(hidden)]
pub struct ICharacterGroupingsFactory(pub ::windows::core::IInspectable);
unsafe impl ::windows::core::Interface for ICharacterGroupingsFactory {
type Vtable = ICharacterGroupingsFactory_abi;
const IID: ::windows::core::GUID = ::windows::core::GUID::from_u128(0x99ea9fd9_886d_4401_9f98_69c82d4c2f78);
}
#[repr(C)]
#[doc(hidden)]
pub struct ICharacterGroupingsFactory_abi(
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, iid: &::windows::core::GUID, interface: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr) -> u32,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, count: *mut u32, values: *mut *mut ::windows::core::GUID) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, value: *mut i32) -> ::windows::core::HRESULT,
pub unsafe extern "system" fn(this: ::windows::core::RawPtr, language: ::core::mem::ManuallyDrop<::windows::core::HSTRING>, result__: *mut ::windows::core::RawPtr) -> ::windows::core::HRESULT,
);
|
use futures_util::future::try_join_all;
use lazy_static::lazy_static;
use std::path::Path;
use std::time::{Duration, UNIX_EPOCH};
use time::{OffsetDateTime, UtcOffset};
use tokio::fs::{self, DirEntry};
// HTML directory template
const TEMPLATE: &str = r#"<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Index of {title}</title>
<style>
body {
font-family: "Segoe UI", Segoe,Tahoma,Arial, Verdana, sans-serif;
padding: 0 16px 0;
margin: 0;
}
h1 {
font-weight: normal;
word-wrap: break-word;
}
main {
display: grid;
grid-template-columns: {columns};
}
a:first-child {
grid-column: {column};
}
a, time, span {
height: 28px;
line-height: 28px;
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
}
a {
color: #2a7ae2;
text-decoration: none;
}
a:hover {
text-decoration: underline;
}
a:active, a:visited {
color: #1756a9;
}
time, span {
padding-left: 16px;
}
@media (prefers-color-scheme: dark) {
body {
background-color: #1e2022;
color: #d5d5d5;
}
}
</style>
</head>
<body>
<h1>Index of {title}</h1>
<main>
<a href="../">../</a>
{content}
</main>
</body>
</html>
"#;
#[derive(Debug, Clone)]
pub struct Directory {
pub time: Option<String>,
pub size: bool,
}
impl Directory {
pub async fn render(&self, dir: &Path, title: &str) -> Result<String, ()> {
let mut dir = fs::read_dir(dir).await.map_err(|_| ())?;
let mut fus = vec![];
loop {
let entry = match dir.next_entry().await {
Ok(opt) => match opt {
Some(entry) => entry,
None => break,
},
Err(_) => return Err(()),
};
if let Some(name) = entry.file_name().to_str() {
if !name.starts_with('.') {
fus.push(Self::render_row(entry, &self.time, self.size));
}
} else {
return Err(());
}
}
let content = try_join_all(fus).await?.join("");
let (columns, column) = match (&self.time, self.size) {
// Show only the name
(None, false) => ("auto", "1 / 2"),
// Show name, time, size
(Some(_), true) => ("auto auto 1fr", "1 / 4"),
// Show name time or name size
_ => ("auto 1fr", "1 / 3"),
};
let template = TEMPLATE
.replacen("{title}", title, 2)
.replacen("{columns}", columns, 1)
.replacen("{column}", column, 1)
.replacen("{content}", &content, 1);
Ok(template)
}
async fn render_row(entry: DirEntry, time: &Option<String>, size: bool) -> Result<String, ()> {
let meta = entry.metadata().await.map_err(|_| ())?;
let name = entry.file_name();
let name = name.to_str().unwrap();
let mut content = String::new();
if meta.is_file() {
content.push_str(&format!("<a href=\"{}\">{}</a>", name, name));
} else {
content.push_str(&format!("<a href=\"{}/\">{}/</a>", name, name));
}
if let Some(format) = &time {
let dur = meta
.modified()
.map_err(|_| ())?
.duration_since(UNIX_EPOCH)
.map_err(|_| ())?;
let s = format!("<time>{}</time>", format_datetime(dur, format));
content.push_str(&s);
}
if size {
if meta.is_file() {
content.push_str(&format!("<span>{}</span>", format_size(meta.len())));
} else {
content.push_str("<span></span>");
}
}
Ok(content)
}
}
fn format_datetime(dur: Duration, format: &str) -> String {
lazy_static! {
static ref UTC_OFFSET: UtcOffset = UtcOffset::try_current_local_offset().unwrap();
}
let datetime = OffsetDateTime::from_unix_timestamp(dur.as_secs() as i64).to_offset(*UTC_OFFSET);
datetime.format(format)
}
fn format_size(n: u64) -> String {
const UNITS: [char; 6] = ['K', 'M', 'G', 'T', 'P', 'E'];
if n < 1024 {
return format!("{} B", n);
}
let bytes = n as f64;
let i = (bytes.ln() / 1024_f64.ln()) as i32;
format!(
"{:.2} {}B",
bytes / 1024_f64.powi(i),
UNITS[(i - 1) as usize]
)
}
#[test]
fn test_format_size() {
assert_eq!(format_size(0), "0 B");
assert_eq!(format_size(1), "1 B");
assert_eq!(format_size(1023), "1023 B");
assert_eq!(format_size(1024), "1.00 KB");
assert_eq!(format_size(1 * 1024 * 1024), "1.00 MB");
assert_eq!(format_size(1 * 1024 * 1024 * 1024 * 1024), "1.00 TB");
assert_eq!(format_size(u64::max_value()), "16.00 EB");
}
|
use crate::binding::http::{to_event, Headers};
use crate::Event;
use actix_web::web::BytesMut;
use actix_web::{web, HttpRequest};
use async_trait::async_trait;
use futures::future::LocalBoxFuture;
use futures::{FutureExt, StreamExt};
use http::header::{AsHeaderName, HeaderName, HeaderValue};
/// Implement Headers for the actix HeaderMap
impl<'a> Headers<'a> for actix_web::http::HeaderMap {
type Iterator = Box<dyn Iterator<Item = (&'a HeaderName, &'a HeaderValue)> + 'a>;
fn get<K: AsHeaderName>(&self, key: K) -> Option<&HeaderValue> {
self.get(key.as_str())
}
fn iter(&'a self) -> Self::Iterator {
Box::new(self.iter())
}
}
/// Method to transform an incoming [`HttpRequest`] to [`Event`].
pub async fn request_to_event(
req: &HttpRequest,
mut payload: web::Payload,
) -> std::result::Result<Event, actix_web::error::Error> {
let mut bytes = BytesMut::new();
while let Some(item) = payload.next().await {
bytes.extend_from_slice(&item?);
}
to_event(req.headers(), bytes.to_vec()).map_err(actix_web::error::ErrorBadRequest)
}
/// So that an actix-web handler may take an Event parameter
impl actix_web::FromRequest for Event {
type Config = ();
type Error = actix_web::Error;
type Future = LocalBoxFuture<'static, std::result::Result<Self, Self::Error>>;
fn from_request(r: &HttpRequest, p: &mut actix_web::dev::Payload) -> Self::Future {
let payload = web::Payload(p.take());
let request = r.to_owned();
async move { request_to_event(&request, payload).await }.boxed_local()
}
}
/// Extension Trait for [`HttpRequest`] which acts as a wrapper for the function [`request_to_event()`].
///
/// This trait is sealed and cannot be implemented for types outside of this crate.
#[async_trait(?Send)]
pub trait HttpRequestExt: private::Sealed {
/// Convert this [`HttpRequest`] into an [`Event`].
async fn to_event(
&self,
mut payload: web::Payload,
) -> std::result::Result<Event, actix_web::error::Error>;
}
#[async_trait(?Send)]
impl HttpRequestExt for HttpRequest {
async fn to_event(
&self,
payload: web::Payload,
) -> std::result::Result<Event, actix_web::error::Error> {
request_to_event(self, payload).await
}
}
mod private {
// Sealing the RequestExt
pub trait Sealed {}
impl Sealed for actix_web::HttpRequest {}
}
#[cfg(test)]
mod tests {
use super::*;
use actix_web::test;
use crate::test::fixtures;
use serde_json::json;
#[actix_rt::test]
async fn test_request() {
let expected = fixtures::v10::minimal_string_extension();
let (req, payload) = test::TestRequest::post()
.header("ce-specversion", "1.0")
.header("ce-id", "0001")
.header("ce-type", "test_event.test_application")
.header("ce-source", "http://localhost/")
.header("ce-someint", "10")
.to_http_parts();
let resp = req.to_event(web::Payload(payload)).await.unwrap();
assert_eq!(expected, resp);
}
#[actix_rt::test]
async fn test_request_with_full_data() {
let expected = fixtures::v10::full_binary_json_data_string_extension();
let (req, payload) = test::TestRequest::post()
.header("ce-specversion", "1.0")
.header("ce-id", "0001")
.header("ce-type", "test_event.test_application")
.header("ce-subject", "cloudevents-sdk")
.header("ce-source", "http://localhost/")
.header("ce-time", fixtures::time().to_rfc3339())
.header("ce-string_ex", "val")
.header("ce-int_ex", "10")
.header("ce-bool_ex", "true")
.header("content-type", "application/json")
.set_json(&fixtures::json_data())
.to_http_parts();
let resp = req.to_event(web::Payload(payload)).await.unwrap();
assert_eq!(expected, resp);
}
#[actix_rt::test]
async fn test_structured_request_with_full_data() {
let payload = json!({
"specversion": "1.0",
"id": "0001",
"type": "test_event.test_application",
"subject": "cloudevents-sdk",
"source": "http://localhost/",
"time": fixtures::time().to_rfc3339(),
"string_ex": "val",
"int_ex": "10",
"bool_ex": "true",
"datacontenttype": "application/json",
"data": fixtures::json_data()
});
let bytes = serde_json::to_string(&payload).expect("Failed to serialize test data to json");
let expected = fixtures::v10::full_json_data_string_extension();
let (req, payload) = test::TestRequest::post()
.header("content-type", "application/cloudevents+json")
.set_payload(bytes)
.to_http_parts();
let resp = req.to_event(web::Payload(payload)).await.unwrap();
assert_eq!(expected, resp);
}
}
|
//! The per-partition data nested in a query [`QueryResponse`].
//!
//! [`QueryResponse`]: super::response::QueryResponse
use arrow::record_batch::RecordBatch;
use data_types::TransitionPartitionId;
/// Response data for a single partition.
#[derive(Debug)]
pub(crate) struct PartitionResponse {
/// Stream of snapshots.
batches: Vec<RecordBatch>,
/// Partition ID.
id: TransitionPartitionId,
/// Count of persisted Parquet files for this partition by this ingester instance.
completed_persistence_count: u64,
}
impl PartitionResponse {
pub(crate) fn new(
data: Vec<RecordBatch>,
id: TransitionPartitionId,
completed_persistence_count: u64,
) -> Self {
Self {
batches: data,
id,
completed_persistence_count,
}
}
pub(crate) fn id(&self) -> &TransitionPartitionId {
&self.id
}
pub(crate) fn completed_persistence_count(&self) -> u64 {
self.completed_persistence_count
}
pub(crate) fn into_record_batches(self) -> Vec<RecordBatch> {
self.batches
}
}
|
use super::abstract_container::AbstractContainer;
pub trait ContainerFactory {
fn new_container(&self) -> Box<dyn AbstractContainer>;
}
impl<F> ContainerFactory for F
where
F: Fn() -> Box<dyn AbstractContainer>,
{
fn new_container(&self) -> Box<dyn AbstractContainer> {
self()
}
}
|
use crate::{
config::IoxConfigExt,
physical_optimizer::chunk_extraction::extract_chunks,
provider::{chunks_to_physical_nodes, DeduplicateExec},
QueryChunk,
};
use datafusion::{
common::tree_node::{Transformed, TreeNode},
config::ConfigOptions,
error::Result,
physical_optimizer::PhysicalOptimizerRule,
physical_plan::{union::UnionExec, ExecutionPlan},
};
use hashbrown::HashMap;
use observability_deps::tracing::warn;
use std::sync::Arc;
/// Split de-duplication operations based on partitons.
///
/// This should usually be more cost-efficient.
#[derive(Debug, Default)]
pub struct PartitionSplit;
impl PhysicalOptimizerRule for PartitionSplit {
fn optimize(
&self,
plan: Arc<dyn ExecutionPlan>,
config: &ConfigOptions,
) -> Result<Arc<dyn ExecutionPlan>> {
plan.transform_up(&|plan| {
let plan_any = plan.as_any();
if let Some(dedup_exec) = plan_any.downcast_ref::<DeduplicateExec>() {
let mut children = dedup_exec.children();
assert_eq!(children.len(), 1);
let child = children.remove(0);
let Some((schema, chunks, output_sort_key)) = extract_chunks(child.as_ref()) else {
return Ok(Transformed::No(plan));
};
let mut chunks_by_partition: HashMap<_, Vec<Arc<dyn QueryChunk>>> =
Default::default();
for chunk in chunks {
chunks_by_partition
.entry(chunk.partition_id().clone())
.or_default()
.push(chunk);
}
// If there not multiple partitions (0 or 1), then this optimizer is a no-op. Signal that to the
// optimizer framework.
if chunks_by_partition.len() < 2 {
return Ok(Transformed::No(plan));
}
// Protect against degenerative plans
let max_dedup_partition_split = config
.extensions
.get::<IoxConfigExt>()
.cloned()
.unwrap_or_default()
.max_dedup_partition_split;
if chunks_by_partition.len() > max_dedup_partition_split {
warn!(
n_partitions = chunks_by_partition.len(),
max_dedup_partition_split,
"cannot split dedup operation based on partition, too many partitions"
);
return Ok(Transformed::No(plan));
}
// ensure deterministic order
let mut chunks_by_partition = chunks_by_partition.into_iter().collect::<Vec<_>>();
chunks_by_partition.sort_by(|a, b| a.0.cmp(&b.0));
let out = UnionExec::new(
chunks_by_partition
.into_iter()
.map(|(_p_id, chunks)| {
Arc::new(DeduplicateExec::new(
chunks_to_physical_nodes(
&schema,
output_sort_key.as_ref(),
chunks,
config.execution.target_partitions,
),
dedup_exec.sort_keys().to_vec(),
dedup_exec.use_chunk_order_col(),
)) as _
})
.collect(),
);
return Ok(Transformed::Yes(Arc::new(out)));
}
Ok(Transformed::No(plan))
})
}
fn name(&self) -> &str {
"partition_split"
}
fn schema_check(&self) -> bool {
true
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::physical_optimizer::{
dedup::test_util::{chunk, dedup_plan},
test_util::OptimizationTest,
};
use data_types::{PartitionHashId, PartitionId, TransitionPartitionId};
#[test]
fn test_no_chunks() {
let schema = chunk(1).schema().clone();
let plan = dedup_plan(schema, vec![]);
let opt = PartitionSplit;
insta::assert_yaml_snapshot!(
OptimizationTest::new(plan, opt),
@r###"
---
input:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " EmptyExec: produce_one_row=false"
output:
Ok:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " EmptyExec: produce_one_row=false"
"###
);
}
#[test]
fn test_same_partition() {
let chunk1 = chunk(1);
let chunk2 = chunk(2);
let chunk3 = chunk(3).with_dummy_parquet_file();
let schema = chunk1.schema().clone();
let plan = dedup_plan(schema, vec![chunk1, chunk2, chunk3]);
let opt = PartitionSplit;
insta::assert_yaml_snapshot!(
OptimizationTest::new(plan, opt),
@r###"
---
input:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=2 batches=0 total_rows=0"
- " ParquetExec: file_groups={1 group: [[3.parquet]]}, projection=[field, tag1, tag2, time]"
output:
Ok:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=2 batches=0 total_rows=0"
- " ParquetExec: file_groups={1 group: [[3.parquet]]}, projection=[field, tag1, tag2, time]"
"###
);
}
#[test]
fn test_different_partitions() {
let chunk1 = chunk(1).with_partition(1);
let chunk2 = chunk(2).with_partition(2);
// use at least 3 parquet files for one of the two partitions to validate that `target_partitions` is forwared correctly
let chunk3 = chunk(3).with_dummy_parquet_file().with_partition(1);
let chunk4 = chunk(4).with_dummy_parquet_file().with_partition(2);
let chunk5 = chunk(5).with_dummy_parquet_file().with_partition(1);
let chunk6 = chunk(6).with_dummy_parquet_file().with_partition(1);
let schema = chunk1.schema().clone();
let plan = dedup_plan(schema, vec![chunk1, chunk2, chunk3, chunk4, chunk5, chunk6]);
let opt = PartitionSplit;
let mut config = ConfigOptions::default();
config.execution.target_partitions = 2;
insta::assert_yaml_snapshot!(
OptimizationTest::new_with_config(plan, opt, &config),
@r###"
---
input:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=2 batches=0 total_rows=0"
- " ParquetExec: file_groups={2 groups: [[3.parquet, 5.parquet], [4.parquet, 6.parquet]]}, projection=[field, tag1, tag2, time]"
output:
Ok:
- " UnionExec"
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0"
- " ParquetExec: file_groups={2 groups: [[3.parquet, 6.parquet], [5.parquet]]}, projection=[field, tag1, tag2, time]"
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0"
- " ParquetExec: file_groups={1 group: [[4.parquet]]}, projection=[field, tag1, tag2, time]"
"###
);
}
#[test]
fn test_different_partitions_with_and_without_hash_ids() {
// Partition without hash ID in the catalog
let legacy_partition_id = 1;
let legacy_transition_partition_id =
TransitionPartitionId::Deprecated(PartitionId::new(legacy_partition_id));
// Partition with hash ID in the catalog
let transition_partition_id =
TransitionPartitionId::Deterministic(PartitionHashId::arbitrary_for_testing());
let chunk1 = chunk(1).with_partition_id(legacy_transition_partition_id.clone());
let chunk2 = chunk(2).with_partition_id(transition_partition_id.clone());
let chunk3 = chunk(3)
.with_dummy_parquet_file()
.with_partition_id(legacy_transition_partition_id.clone());
let chunk4 = chunk(4)
.with_dummy_parquet_file()
.with_partition_id(transition_partition_id.clone());
let chunk5 = chunk(5)
.with_dummy_parquet_file()
.with_partition_id(legacy_transition_partition_id.clone());
let chunk6 = chunk(6)
.with_dummy_parquet_file()
.with_partition_id(legacy_transition_partition_id.clone());
let schema = chunk1.schema().clone();
let plan = dedup_plan(schema, vec![chunk1, chunk2, chunk3, chunk4, chunk5, chunk6]);
let opt = PartitionSplit;
let mut config = ConfigOptions::default();
config.execution.target_partitions = 2;
insta::assert_yaml_snapshot!(
OptimizationTest::new_with_config(plan, opt, &config),
@r###"
---
input:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=2 batches=0 total_rows=0"
- " ParquetExec: file_groups={2 groups: [[3.parquet, 5.parquet], [4.parquet, 6.parquet]]}, projection=[field, tag1, tag2, time]"
output:
Ok:
- " UnionExec"
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0"
- " ParquetExec: file_groups={2 groups: [[3.parquet, 6.parquet], [5.parquet]]}, projection=[field, tag1, tag2, time]"
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=1 batches=0 total_rows=0"
- " ParquetExec: file_groups={1 group: [[4.parquet]]}, projection=[field, tag1, tag2, time]"
"###
);
}
#[test]
fn test_max_split() {
let chunk1 = chunk(1).with_partition(1);
let chunk2 = chunk(2).with_partition(2);
let chunk3 = chunk(3).with_partition(3);
let schema = chunk1.schema().clone();
let plan = dedup_plan(schema, vec![chunk1, chunk2, chunk3]);
let opt = PartitionSplit;
let mut config = ConfigOptions::default();
config.extensions.insert(IoxConfigExt {
max_dedup_partition_split: 2,
..Default::default()
});
insta::assert_yaml_snapshot!(
OptimizationTest::new_with_config(plan, opt, &config),
@r###"
---
input:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=3 batches=0 total_rows=0"
output:
Ok:
- " DeduplicateExec: [tag1@1 ASC,tag2@2 ASC,time@3 ASC]"
- " UnionExec"
- " RecordBatchesExec: batches_groups=3 batches=0 total_rows=0"
"###
);
}
}
|
extern crate pcap_file;
use pcap_file::{PcapReader, PcapWriter};
static DATA: &'static[u8; 1455] = include_bytes!("test_in.pcap");
#[test]
fn read() {
let pcap_reader = PcapReader::new(&DATA[..]).unwrap();
//Global header len
let mut data_len = 24;
for pcap in pcap_reader {
let pcap = pcap.unwrap();
//Packet header len
data_len += 16;
data_len += pcap.data.len();
}
assert_eq!(data_len, DATA.len());
}
#[test]
fn read_write() {
let pcap_reader = PcapReader::new(&DATA[..]).unwrap();
let header = pcap_reader.header;
let mut out = Vec::new();
let mut pcap_writer = PcapWriter::with_header(header, out).unwrap();
for pcap in pcap_reader {
pcap_writer.write_packet(&pcap.unwrap()).unwrap();
}
out = pcap_writer.into_writer();
assert_eq!(&DATA[..], &out[..]);
} |
use std::collections::HashMap;
#[derive(Debug)]
struct Tower {
name: String,
weight: usize,
subtowers: Option<Vec<String>>,
}
fn main() {
let input = include_str!("../input2.txt");
println!("{}", day_7_part_1(input));
day_7_part_2(input);
}
fn day_7_part_1(input: &str) -> String {
let towers = parse_towers(input);
let iter = towers.iter();
let iter2 = iter.clone().filter(|t| t.subtowers.is_some());
let mut iter = iter.filter(|t| t.subtowers.is_some()).peekable();
let mut set_of_subtowers = Vec::new();
for subtower_list in iter2.map(|t| t.subtowers.clone().unwrap()) {
for subtower in subtower_list {
set_of_subtowers.push(subtower);
}
}
while set_of_subtowers.contains(&iter.peek().unwrap().name) {
let _ = iter.next();
}
iter.next().unwrap().name.clone()
}
fn day_7_part_2(input: &str) -> usize {
let mut weight = 0;
let mut towers = HashMap::new();
let mut towers_total_weight = HashMap::new();
let base_tower = day_7_part_1(input);
let vec_towers = parse_towers(input);
for t in vec_towers {
towers.insert(t.name, (t.weight, t.subtowers));
}
get_total_weights(&towers, &mut towers_total_weight, base_tower);
println!("{:?}", towers_total_weight);
weight
}
fn find_divergent_weight(
towers: &HashMap<String, (usize, Option<Vec<String>>)>,
towers_total_weight: &HashMap<String, usize>,
start: String,
) -> Option<usize> {
let mut div_w = 0;
let children = towers.get(&start).unwrap();
if
}
fn get_total_weights(
towers: &HashMap<String, (usize, Option<Vec<String>>)>,
towers_total_weight: &mut HashMap<String, usize>,
start: String,
) -> usize {
let mut size = 0;
let subtowers = towers.get(&start).unwrap();
if let None = subtowers.1 {
towers_total_weight.insert(start, subtowers.0);
return subtowers.0;
} else {
size += subtowers.0;
let subtowers = subtowers.1.as_ref().unwrap();
for subtower in subtowers {
size += get_total_weights(towers, towers_total_weight, subtower.clone());
}
}
towers_total_weight.insert(start, size);
size
}
fn parse_towers(input: &str) -> Vec<Tower> {
let mut towers = Vec::new();
for line in input.lines() {
let mut parts = line.split_whitespace();
let name = parts.next().unwrap().to_string();
let weight = parts
.next()
.unwrap()
.trim_matches(|c| c == '(' || c == ')')
.parse()
.unwrap();
let mut subtowers = Vec::new();
while let Some(s) = parts.next() {
if s != "->" {
subtowers.push(s.trim_right_matches(',').to_string());
}
}
towers.push(Tower {
name: name,
weight: weight,
subtowers: if subtowers.len() >= 1 {
Some(subtowers)
} else {
None
},
})
}
towers
}
|
use failure::Error;
use flags::Flags;
use instruction::{Condition, Instruction, Operation};
use memory::{Load, Memory, Store, VideoMemory};
use registers::Registers;
use rom::Rom;
use self::Condition::*;
use self::Operation::*;
#[derive(Default)]
pub struct Cpu {
memory: Memory,
registers: Registers,
flags: Flags,
program_counter: u16,
stack_pointer: u16,
video_memory: VideoMemory,
background: u8,
sprite_height: u8,
sprite_width: u8,
flip_horizontal: bool,
flip_vertical: bool,
}
impl Cpu {
pub fn new(rom: &Rom) -> Cpu {
let mut memory = Memory::new();
memory.store(0usize, &rom.content);
Cpu {
memory,
..Default::default()
}
}
pub fn step(&mut self) -> Result<(), Error> {
let instruction = self.fetch();
// TODO: Use a checked add when incrementing the program counter.
self.program_counter += 4;
self.execute(instruction)
}
fn fetch(&self) -> Instruction {
let data = self.memory.load(self.program_counter);
Instruction::new(data)
}
fn execute(&mut self, instruction: Instruction) -> Result<(), Error> {
let operation = instruction.operation()?;
// println!("{:?} {:?}", instruction, operation);
match operation {
NOP => self.op_nop(),
JMPI => self.op_jmpi(&instruction),
LDIR => self.op_ldir(&instruction),
};
Ok(())
}
fn op_nop(&mut self) {}
fn op_jmpi(&mut self, instruction: &Instruction) {
self.program_counter = instruction.hhll();
}
fn op_ldir(&mut self, instruction: &Instruction) {
self.registers[instruction.x()] = instruction.hhll();
}
}
|
use std::fmt::{self, Debug, Formatter};
use super::{FreeBlock, FreeBlockRef};
/// This struct is similar in nature to `Block`, but is used
/// to store the size of the preceding data region when the block is free.
///
/// When the next neighboring block is freed, a check is performed to
/// see if it can be combined with its preceding block, the one containing
/// this footer, this combining operation is called "coalescing".
///
/// If the blocks can be coalesced, then the footer is used to get the size of this
/// block, so that the address of the block header can be calculated. That address
/// is then used to access the header and update it with new metadata
#[derive(Copy, Clone, PartialEq, Eq)]
#[repr(transparent)]
pub struct BlockFooter(usize);
impl BlockFooter {
#[inline]
pub const fn new(size: usize) -> Self {
Self(size)
}
#[allow(unused)]
#[inline]
pub fn usable_size(&self) -> usize {
self.0
}
#[allow(unused)]
#[inline]
pub unsafe fn to_block(&self) -> FreeBlockRef {
let raw = self as *const _ as *const u8;
let header_offset = (self.0 as isize) * -1;
FreeBlockRef::from_raw(raw.offset(header_offset) as *mut FreeBlock)
}
}
impl Debug for BlockFooter {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_struct("BlockFooter")
.field("size", &self.0)
.finish()
}
}
|
use cursive::theme::BaseColor::*;
use cursive::theme::Color::*;
use cursive::theme::PaletteColor::*;
use cursive::theme::*;
use crate::config::Config;
macro_rules! load_color {
( $cfg: expr, $member: ident, $default: expr ) => {
$cfg.theme
.as_ref()
.and_then(|t| t.$member.clone())
.map(|c| Color::parse(c.as_ref()).expect(&format!("Failed to parse color \"{}\"", c)))
.unwrap_or($default)
};
}
pub fn load(cfg: &Config) -> Theme {
let mut palette = Palette::default();
let borders = BorderStyle::Simple;
palette[Background] = load_color!(cfg, background, TerminalDefault);
palette[View] = load_color!(cfg, background, TerminalDefault);
palette[Primary] = load_color!(cfg, primary, TerminalDefault);
palette[Secondary] = load_color!(cfg, secondary, Dark(Blue));
palette[TitlePrimary] = load_color!(cfg, title, Dark(Red));
palette[Tertiary] = load_color!(cfg, highlight, TerminalDefault);
palette[Highlight] = load_color!(cfg, highlight_bg, Dark(Red));
palette.set_color("playing", load_color!(cfg, playing, Dark(Blue)));
palette.set_color(
"playing_selected",
load_color!(cfg, playing_selected, Light(Blue)),
);
palette.set_color("playing_bg", load_color!(cfg, playing_bg, TerminalDefault));
palette.set_color("error", load_color!(cfg, error, TerminalDefault));
palette.set_color("error_bg", load_color!(cfg, error_bg, Dark(Red)));
palette.set_color(
"statusbar_progress",
load_color!(cfg, statusbar_progress, Dark(Blue)),
);
palette.set_color(
"statusbar_progress_bg",
load_color!(cfg, statusbar_progress_bg, Light(Black)),
);
palette.set_color("statusbar", load_color!(cfg, statusbar, Dark(Yellow)));
palette.set_color(
"statusbar_bg",
load_color!(cfg, statusbar_bg, TerminalDefault),
);
palette.set_color("cmdline", load_color!(cfg, cmdline, TerminalDefault));
palette.set_color("cmdline_bg", load_color!(cfg, cmdline_bg, TerminalDefault));
Theme {
shadow: false,
palette,
borders,
}
}
|
#![feature(llvm_asm)]
extern "C" fn foo() { }
fn main() {
unsafe {
llvm_asm!("callq $0" :: "s"(foo) :: "volatile");
}
}
|
use super::{Color, Context};
#[allow(unused_variables)]
pub fn clear(ctx: &mut Context, color: Color) {
macroquad::prelude::clear_background(color);
}
#[allow(unused_variables)]
pub fn draw_rectangle(ctx: &mut Context, x: f32, y: f32, w: f32, h: f32, color: Color) {
macroquad::prelude::draw_rectangle(x, y, w, h, color)
}
#[allow(unused_variables)]
pub fn draw_rectangle_lines(
ctx: &mut Context,
x: f32,
y: f32,
w: f32,
h: f32,
thickness: f32,
color: Color,
) {
macroquad::prelude::draw_rectangle_lines(x, y, w, h, thickness, color)
}
/// Deprecated
pub fn draw_straight_line(
ctx: &mut Context,
x: f32,
y: f32,
len: f32,
horizontal: bool,
thickness: f32,
color: Color,
) {
match horizontal {
true => draw_line(ctx, x, x, x + len, y, thickness, color),
false => draw_line(ctx, x, y, x, y + len, thickness, color),
}
}
pub fn draw_line(
_: &mut Context,
x1: f32,
y1: f32,
x2: f32,
y2: f32,
thickness: f32,
color: Color,
) {
macroquad::prelude::draw_line(x1, y1, x2, y2, thickness, color)
}
#[allow(unused_variables)]
pub fn draw_circle(_: &mut Context, x: f32, y: f32, r: f32, color: Color) {
// todo!("draw circle")
macroquad::prelude::draw_circle(x, y, r, color);
}
|
use audio_core::{Buf, Channel, Channels, ExactSizeBuf, ReadBuf};
/// Make a buffer into a read adapter that implements [ReadBuf].
///
/// # Examples
///
/// ```rust
/// use audio::Buf;
/// use audio::io;
///
/// let from = audio::interleaved![[1, 2, 3, 4]; 2];
/// let mut to = audio::interleaved![[0; 4]; 2];
///
/// let mut to = io::ReadWrite::empty(to);
///
/// io::copy_remaining(io::Read::new((&from).skip(2).limit(1)), &mut to);
/// io::copy_remaining(io::Read::new((&from).limit(1)), &mut to);
///
/// assert_eq!(to.as_ref().as_slice(), &[3, 3, 1, 1, 0, 0, 0, 0]);
/// ```
pub struct Read<B> {
buf: B,
available: usize,
}
impl<B> Read<B> {
/// Construct a new reading adapter.
///
/// The constructed reader will be initialized so that the number of bytes
/// available for reading are equal to what's reported by
/// [ExactSizeBuf::frames].
///
/// # Examples
///
/// ```rust
/// use audio::{ReadBuf, ExactSizeBuf};
/// use audio::io;
///
/// let buffer = audio::interleaved![[1, 2, 3, 4], [5, 6, 7, 8]];
/// assert_eq!(buffer.frames(), 4);
///
/// let buffer = io::Read::new(buffer);
///
/// assert!(buffer.has_remaining());
/// assert_eq!(buffer.remaining(), 4);
/// ```
#[inline]
pub fn new(buf: B) -> Self
where
B: ExactSizeBuf,
{
let available = buf.frames();
Self { buf, available }
}
/// Construct a new reading adapter.
///
/// The constructed reader will be initialized so that there are no frames
/// available for reading.
///
/// # Examples
///
/// ```rust
/// use audio::{ReadBuf, ExactSizeBuf};
/// use audio::io;
///
/// let buffer = audio::interleaved![[1, 2, 3, 4], [5, 6, 7, 8]];
/// assert_eq!(buffer.frames(), 4);
///
/// let buffer = io::Read::empty(buffer);
///
/// assert!(!buffer.has_remaining());
/// assert_eq!(buffer.remaining(), 0);
/// ```
pub fn empty(buf: B) -> Self {
Self { buf, available: 0 }
}
/// Access the underlying buffer.
///
/// # Examples
///
/// ```rust
/// use audio::Buf;
/// use audio::{io, wrap};
///
/// let from: audio::Interleaved<i16> = audio::interleaved![[1, 2, 3, 4]; 4];
/// let mut from = io::Read::new(from);
///
/// io::copy_remaining(&mut from, wrap::interleaved(&mut [0i16; 16][..], 4));
///
/// assert_eq!(from.as_ref().channels(), 4);
/// ```
#[inline]
pub fn as_ref(&self) -> &B {
&self.buf
}
/// Access the underlying buffer mutably.
///
/// # Examples
///
/// ```rust
/// use audio::Buf;
/// use audio::{io, wrap};
///
/// let from: audio::Interleaved<i16> = audio::interleaved![[1, 2, 3, 4]; 4];
/// let mut from = io::Read::new(from);
///
/// io::copy_remaining(&mut from, wrap::interleaved(&mut [0i16; 16][..], 4));
///
/// from.as_mut().resize_channels(2);
///
/// assert_eq!(from.channels(), 2);
/// ```
#[inline]
pub fn as_mut(&mut self) -> &mut B {
&mut self.buf
}
/// Convert into the underlying buffer.
///
/// # Examples
///
/// ```rust
/// use audio::Buf;
/// use audio::{io, wrap};
///
/// let from: audio::Interleaved<i16> = audio::interleaved![[1, 2, 3, 4]; 4];
/// let mut from = io::Read::new(from);
///
/// io::copy_remaining(&mut from, wrap::interleaved(&mut [0i16; 16][..], 4));
///
/// let from = from.into_inner();
///
/// assert_eq!(from.channels(), 4);
/// ```
#[inline]
pub fn into_inner(self) -> B {
self.buf
}
/// Set the number of frames read.
///
/// This can be used to rewind the internal cursor to a previously written
/// frame if needed. Or, if the underlying buffer has changed for some
/// reason, like if it was written to through a call to [Read::as_mut].
///
/// # Examples
///
/// ```rust
/// use audio::{Buf, Channels, ReadBuf};
/// use audio::io;
///
/// fn read_from_buf(mut read: impl Buf + Channels<i16> + ReadBuf) {
/// let mut out = audio::interleaved![[0; 4]; 2];
/// io::copy_remaining(read, io::Write::new(&mut out));
/// }
///
/// let mut buffer = io::Read::new(audio::interleaved![[1, 2, 3, 4], [5, 6, 7, 8]]);
/// read_from_buf(&mut buffer);
///
/// assert!(!buffer.has_remaining());
///
/// buffer.set_read(0);
///
/// assert!(buffer.has_remaining());
/// ```
#[inline]
pub fn set_read(&mut self, read: usize)
where
B: ExactSizeBuf,
{
self.available = self.buf.frames().saturating_sub(read);
}
}
impl<B> ReadBuf for Read<B> {
fn remaining(&self) -> usize {
self.available
}
fn advance(&mut self, n: usize) {
self.available = self.available.saturating_sub(n);
}
}
impl<B> ExactSizeBuf for Read<B>
where
B: ExactSizeBuf,
{
fn frames(&self) -> usize {
self.buf.frames().saturating_sub(self.available)
}
}
impl<B> Buf for Read<B>
where
B: Buf,
{
fn frames_hint(&self) -> Option<usize> {
self.buf.frames_hint()
}
fn channels(&self) -> usize {
self.buf.channels()
}
}
impl<B, T> Channels<T> for Read<B>
where
B: Channels<T>,
{
fn channel(&self, channel: usize) -> Channel<'_, T> {
self.buf.channel(channel).tail(self.available)
}
}
|
#![feature(proc_macro_hygiene, decl_macro)]
#[macro_use] extern crate rocket;
use rocket_contrib::templates::Template;
use std::collections::HashMap;
#[get("/")]
fn index() -> Template {
let mut context = HashMap::<String, String>::new();
context.insert("welcome".to_string(), "Welcome to Rocket !".to_string());
Template::render("index", &context)
}
use rocket_contrib::serve::StaticFiles;
fn main() {
rocket::ignite()
.mount("/", routes![index])
.mount("/", StaticFiles::from("./public"))
.attach(Template::fairing())
.launch();
}
|
#![allow(dead_code)]
mod ep32;
fn main() {
ep32::run();
}
|
use std::collections::HashMap;
use std::io::BufReader;
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use std::str;
use std::thread;
use http::request::Request;
use http::response::Response;
/// HTTP server
#[derive(Clone)]
pub struct Server {
pub name: String,
pub handlers: Vec<fn(Request, Response) -> Response>,
// TODO: All of that could be in `serve_static` handler
pub root_path: String,
pub allow_trace: bool,
pub directory_indexes: Vec<String>,
pub content_types: HashMap<String, String>
}
impl Server {
pub fn new() -> Server {
let mut content_types = HashMap::new();
content_types.insert("html".into(), "text/html".into());
content_types.insert("txt".into(), "text/plain".into());
Server {
handlers: Vec::new(),
root_path: ".".into(),
name: "Simpleton HTTP Server".into(),
allow_trace: false,
directory_indexes: vec!["index.htm".into(), "index.html".into()],
content_types: content_types
}
}
pub fn add_handler(&mut self, f: fn(Request, Response) -> Response) {
self.handlers.push(f);
}
pub fn listen(self, binding: &str) {
let listener = match TcpListener::bind(binding) {
Err(e) => { println!("Error: {}", e); return }
Ok(listener) => listener
};
for stream in listener.incoming() {
match stream {
Err(e) => {
println!("Error: {}", e);
return
},
Ok(stream) => {
let server = self.clone();
thread::spawn(move|| {
handle_client(stream, server)
});
}
}
}
drop(listener);
}
}
fn handle_client(stream: TcpStream, server: Server) {
// Read raw request message
let mut lines = vec![];
let mut reader = BufReader::new(&stream);
for line in reader.by_ref().lines() {
match line {
Err(_) => return,
Ok(line) => {
if line == "" {
break
} else {
lines.push(line)
}
}
}
}
let request_message = lines.join("\n");
// Create Request message
let mut req = match Request::from_str(&request_message) {
Err(_) => return,
Ok(req) => req
};
// Set the IP address of the client in Request
let ip = match stream.peer_addr() {
Err(_) => return,
Ok(peer_addr) => peer_addr.ip()
};
req.ip = ip.to_string();
// Create Response message
let mut res = Response::new(server.clone());
// Call all handlers
for handler in &server.handlers {
match stream.try_clone() {
Ok(stream) => {
res = handler(req.clone(), res.clone());
res.write(&stream);
},
Err(e) => { panic!("{}", e) }
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use http::request::Request;
use http::response::Response;
#[warn(unused_variables)]
fn custom_handler(req: Request, mut res: Response) -> Response {
res.send("Hello, World!".as_bytes());
res
}
#[test]
fn test_new() {
let server = Server::new();
assert!(server.handlers.is_empty());
}
#[test]
fn test_add_handler() {
let mut server = Server::new();
assert_eq!(server.handlers.len(), 0);
server.add_handler(custom_handler);
assert_eq!(server.handlers.len(), 1);
}
}
|
//! Contains the `ColumnIndex`, `Row`, and `FromRow` traits.
use crate::database::Database;
use crate::decode::Decode;
use crate::types::{Type, TypeInfo};
use crate::value::{HasRawValue, RawValue};
use serde::de::DeserializeOwned;
/// A type that can be used to index into a [`Row`].
///
/// The [`get`] and [`try_get`] methods of [`Row`] accept any type that implements `ColumnIndex`.
/// This trait is implemented for strings which are used to look up a column by name, and for
/// `usize` which is used as a positional index into the row.
///
/// This trait is sealed and cannot be implemented for types outside of rbatis_core.
///
/// [`Row`]: trait.Row.html
/// [`get`]: trait.Row.html#method.get
/// [`try_get`]: trait.Row.html#method.try_get
pub trait ColumnIndex<'c, R>
where
Self: private_column_index::Sealed,
R: Row<'c> + ?Sized,
{
/// Returns a valid positional index into the row, [`ColumnIndexOutOfBounds`], or,
/// [`ColumnNotFound`].
///
/// [`ColumnNotFound`]: ../enum.Error.html#variant.ColumnNotFound
/// [`ColumnIndexOutOfBounds`]: ../enum.Error.html#variant.ColumnIndexOutOfBounds
fn index(&self, row: &R) -> crate::Result<usize>;
}
impl<'c, R, I> ColumnIndex<'c, R> for &'_ I
where
R: Row<'c>,
I: ColumnIndex<'c, R> + ?Sized,
{
#[inline]
fn index(&self, row: &R) -> crate::Result<usize> {
(**self).index(row)
}
}
// Prevent users from implementing the `ColumnIndex` trait.
mod private_column_index {
pub trait Sealed {}
impl Sealed for usize {}
impl Sealed for str {}
impl<T> Sealed for &'_ T where T: Sealed + ?Sized {}
}
/// Represents a single row from the database.
///
/// Applications should not generally need to use this trait. Values of this trait are only
/// encountered when manually implementing [`FromRow`] (as opposed to deriving) or iterating
/// a [`Cursor`] (returned from [`Query::fetch`]).
///
/// This trait is sealed and cannot be implemented for types outside of rbatis_core.
///
/// [`FromRow`]: crate::row::FromRow
/// [`Cursor`]: crate::cursor::Cursor
/// [`Query::fetch`]: crate::query::Query::fetch
pub trait Row<'c>
where
Self: private_row::Sealed + Unpin + Send + Sync,
{
/// The `Database` this `Row` is implemented for.
type Database: Database;
/// Returns `true` if this row has no columns.
#[inline]
fn is_empty(&self) -> bool {
self.len() == 0
}
/// Returns the number of columns in this row.
fn len(&self) -> usize;
/// Index into the database row and decode a single value.
///
/// A string index can be used to access a column by name and a `usize` index
/// can be used to access a column by position.
///
/// ```rust,ignore
/// # let mut cursor = rbatis_core::query("SELECT id, name FROM users")
/// # .fetch(&mut conn);
/// #
/// # let row = cursor.next().await?.unwrap();
/// #
/// let id: i32 = row.get("id"); // a column named "id"
/// let name: &str = row.get(1); // the second column in the result
/// ```
///
/// # Panics
/// Panics if the column does not exist or its value cannot be decoded into the requested type.
/// See [`try_get`](#method.try_get) for a non-panicking version.
#[inline]
fn get<T, I>(&self, index: I) -> T
where
T: Type<Self::Database>,
I: ColumnIndex<'c, Self>,
T: Decode<'c, Self::Database>,
{
self.try_get::<T, I>(index).unwrap()
}
/// Index into the database row and decode a single value.
///
/// See [`try_get_unchecked`](#method.try_get_unchecked).
#[inline]
fn get_unchecked<T, I>(&self, index: I) -> T
where
T: Type<Self::Database>,
I: ColumnIndex<'c, Self>,
T: Decode<'c, Self::Database>,
{
self.try_get_unchecked::<T, I>(index).unwrap()
}
/// Index into the database row and decode a single value.
///
/// A string index can be used to access a column by name and a `usize` index
/// can be used to access a column by position.
///
/// ```rust,ignore
/// # let mut cursor = rbatis_core::query("SELECT id, name FROM users")
/// # .fetch(&mut conn);
/// #
/// # let row = cursor.next().await?.unwrap();
/// #
/// let id: i32 = row.try_get("id")?; // a column named "id"
/// let name: &str = row.try_get(1)?; // the second column in the result
/// ```
///
/// # Errors
/// * [`ColumnNotFound`] if the column by the given name was not found.
/// * [`ColumnIndexOutOfBounds`] if the `usize` index was greater than the number of columns in the row.
/// * [`Decode`] if the value could not be decoded into the requested type.
///
/// [`Decode`]: crate::Error::Decode
/// [`ColumnNotFound`]: crate::Error::ColumnNotFound
/// [`ColumnIndexOutOfBounds`]: crate::Error::ColumnIndexOutOfBounds
fn try_get<T, I>(&self, index: I) -> crate::Result<T>
where
T: Type<Self::Database>,
I: ColumnIndex<'c, Self>,
T: Decode<'c, Self::Database>,
{
let value = self.try_get_raw(index)?;
if let Some(expected_ty) = value.type_info() {
// NOTE: If there is no type, the value is NULL. This is fine. If the user tries
// to get this into a non-Option we catch that elsewhere and report as
// UnexpectedNullError.
if !expected_ty.compatible(&T::type_info()) {
return Err(crate::Error::mismatched_types::<Self::Database, T>(
expected_ty,
));
}
}
T::decode(value)
}
///json decode
#[inline]
fn json_decode<T, I>(&self, index: I) -> crate::Result<T>
where
I: ColumnIndex<'c, Self>,
T: DeserializeOwned
{
let value = self.try_get_raw(index)?;
let v = value.try_to_json()?;
let t:Result<T,serde_json::Error> = serde_json::from_value(v);
match t {
Ok(r)=>{
return Ok(r);
}
Err(e)=>{
return Err(decode_err!("unexpected value {:?} for serde_json::from_value", e.to_string()));
}
}
}
/// Index into the database row and decode a single value.
///
/// Unlike [`try_get`](#method.try_get), this method does not check that the type
/// being returned from the database is compatible with the Rust type and just blindly tries
/// to decode the value. An example of where this could be useful is decoding a Postgres
/// enumeration as a Rust string (instead of deriving a new Rust enum).
#[inline]
fn try_get_unchecked<T, I>(&self, index: I) -> crate::Result<T>
where
T: Type<Self::Database>,
I: ColumnIndex<'c, Self>,
T: Decode<'c, Self::Database>,
{
self.try_get_raw(index).and_then(T::decode)
}
#[doc(hidden)]
fn try_get_raw<I>(
&self,
index: I,
) -> crate::Result<<Self::Database as HasRawValue<'c>>::RawValue>
where
I: ColumnIndex<'c, Self>;
}
// Prevent users from implementing the `Row` trait.
pub(crate) mod private_row {
pub trait Sealed {}
}
/// Associate [`Database`] with a [`Row`] of a generic lifetime.
///
/// ---
///
/// The upcoming Rust feature, [Generic Associated Types], should obviate
/// the need for this trait.
///
/// [Generic Associated Types]: https://www.google.com/search?q=generic+associated+types+rust&oq=generic+associated+types+rust&aqs=chrome..69i57j0l5.3327j0j7&sourceid=chrome&ie=UTF-8
pub trait HasRow<'c> {
type Database: Database;
/// The concrete `Row` implementation for this database.
type Row: Row<'c, Database = Self::Database>;
}
/// A record that can be built from a row returned by the database.
///
/// In order to use [`query_as`] the output type must implement `FromRow`.
///
/// # Deriving
/// This trait can be automatically derived by rbatis_core for any struct. The generated implementation
/// will consist of a sequence of calls to [`Row::try_get`] using the name from each
/// struct field.
///
/// ```rust,ignore
/// #[derive(rbatis_core::FromRow)]
/// struct User {
/// id: i32,
/// name: String,
/// }
/// ```
///
/// [`query_as`]: crate::query_as
/// [`Row::try_get`]: crate::row::Row::try_get
pub trait FromRow<'c, R>
where
Self: Sized,
R: Row<'c>,
{
#[allow(missing_docs)]
fn from_row(row: &R) -> crate::Result<Self>;
}
// Macros to help unify the internal implementations as a good chunk
// is very similar
#[allow(unused_macros)]
macro_rules! impl_from_row_for_tuple {
($db:ident, $r:ident; $( ($idx:tt) -> $T:ident );+;) => {
impl<'c, $($T,)+> crate::row::FromRow<'c, $r<'c>> for ($($T,)+)
where
$($T: 'c,)+
$($T: crate::types::Type<$db>,)+
$($T: crate::decode::Decode<'c, $db>,)+
{
#[inline]
fn from_row(row: &$r<'c>) -> crate::Result<Self> {
use crate::row::Row;
Ok(($(row.try_get($idx as usize)?,)+))
}
}
};
}
#[allow(unused_macros)]
macro_rules! impl_from_row_for_tuples {
($db:ident, $r:ident) => {
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
(3) -> T4;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
(3) -> T4;
(4) -> T5;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
(3) -> T4;
(4) -> T5;
(5) -> T6;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
(3) -> T4;
(4) -> T5;
(5) -> T6;
(6) -> T7;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
(3) -> T4;
(4) -> T5;
(5) -> T6;
(6) -> T7;
(7) -> T8;
);
impl_from_row_for_tuple!($db, $r;
(0) -> T1;
(1) -> T2;
(2) -> T3;
(3) -> T4;
(4) -> T5;
(5) -> T6;
(6) -> T7;
(7) -> T8;
(8) -> T9;
);
};
}
#[allow(unused_macros)]
macro_rules! impl_map_row_for_row {
($DB:ident, $R:ident) => {
impl<O: Unpin, F> crate::query::MapRow<$DB> for F
where
F: for<'c> FnMut($R<'c>) -> O,
{
type Output = O;
fn map_row(&mut self, row: $R) -> O {
(self)(row)
}
}
impl<O: Unpin, F> crate::query::TryMapRow<$DB> for F
where
F: for<'c> FnMut($R<'c>) -> crate::Result<O>,
{
type Output = O;
fn try_map_row(&mut self, row: $R) -> crate::Result<O> {
(self)(row)
}
}
};
}
#[allow(unused_macros)]
macro_rules! impl_from_row_for_row {
($R:ident) => {
impl<'c> crate::row::FromRow<'c, $R<'c>> for $R<'c> {
#[inline]
fn from_row(row: $R<'c>) -> crate::Result<Self> {
Ok(row)
}
}
};
}
|
use std::io::Read;
fn main() {
let mut buf = String::new();
// 標準入力から全部bufに読み込む
std::io::stdin().read_to_string(&mut buf).unwrap();
let mut iter = buf.split_whitespace();
let s: String = iter.next().unwrap().parse().unwrap();
let numbers: Vec<i32> = s.chars().map(|x| x.to_digit(10).unwrap() as i32).collect();
let mut patterns: Vec<Vec<i32>> = Vec::new();
for i in 0..2usize.pow(3) {
let mut row = numbers[1..].to_vec();
for j in 0..i {
// j桁右シフトして最初のbitが1かチェック
if ((i >> j) & 1) == 1 {
row[j] = -row[j];
}
}
patterns.push(row);
}
for p in patterns {
if p.iter().fold(0, |acc, num| acc + num) + numbers[0] == 7 {
let formula = p
.iter()
.map(|&num| {
if num < 0 {
num.to_string()
} else {
format!("{}{}", "+", num.to_string())
}
})
.collect::<Vec<String>>()
.join("");
println!("{}{}=7", numbers[0], formula);
return;
}
}
}
|
use crate::error::ServiceError;
use crate::models::user::User;
use crate::Pool;
use actix_identity::Identity;
use actix_web::{error::BlockingError, web, HttpResponse};
use diesel::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Deserialize)]
pub struct AuthData {
pub username: String,
pub password: String,
}
#[derive(Serialize)]
pub struct SlimUser {
pub username: String,
}
impl From<User> for SlimUser {
fn from(user: User) -> Self {
SlimUser {
username: user.username,
}
}
}
pub async fn get(id: Identity) -> Result<HttpResponse, ServiceError> {
if let Some(user) = id.identity() {
Ok(HttpResponse::Ok().json(user))
} else {
Err(ServiceError::Unauthorized)
}
}
pub async fn login(
auth_data: web::Json<AuthData>,
id: Identity,
pool: web::Data<Pool>,
) -> Result<HttpResponse, ServiceError> {
let res = web::block(move || query(auth_data.into_inner(), pool)).await;
match res {
Ok(user) => {
id.remember(serde_json::to_string(&user).unwrap());
Ok(HttpResponse::Ok().finish())
}
Err(err) => match err {
BlockingError::Error(unauthorized_error) => Err(unauthorized_error),
BlockingError::Canceled => Err(ServiceError::InternalServerError),
},
}
}
pub async fn logout(id: Identity) -> Result<HttpResponse, ServiceError> {
if let Some(_) = id.identity() {
id.forget();
Ok(HttpResponse::Ok().finish())
} else {
Err(ServiceError::Unauthorized)
}
}
fn verify(password: &str, salt: &str, hash: &str) -> bool {
argon2::hash_encoded(
password.as_bytes(),
salt.as_bytes(),
&argon2::Config::default(),
)
.unwrap()
.as_str()
== hash
}
fn query(auth_data: AuthData, pool: web::Data<Pool>) -> Result<SlimUser, ServiceError> {
use crate::schema::api_user::dsl::{api_user, username};
let conn = &pool.get().unwrap();
let mut items = api_user
.filter(username.eq(&auth_data.username))
.load::<User>(conn)?;
if let Some(user) = items.pop() {
if verify(&auth_data.password, &user.salt, &user.hash) {
return Ok(user.into());
}
}
Err(ServiceError::Unauthorized)
}
|
#![deny(rust_2018_compatibility, rust_2018_idioms)]
#[macro_use]
extern crate gfx;
// Reexport modules from gfx_voxel while stuff is moving
// from Hematite to the library.
pub use gfx_voxel::{array, cube};
use std::cmp::max;
use std::f32::consts::PI;
use std::fs::File;
use std::path::{Path, PathBuf};
use std::time::Instant;
use crate::array::*;
use crate::shader::Renderer;
use docopt::Docopt;
use flate2::read::GzDecoder;
use gfx::traits::Device;
use glutin_window::*;
use piston::event_loop::{EventLoop, EventSettings, Events};
use piston::input::{AfterRenderEvent, MouseRelativeEvent, PressEvent, RenderEvent, UpdateEvent};
use piston::window::{AdvancedWindow, OpenGLWindow, Size, Window, WindowSettings};
use vecmath::{vec3_add, vec3_normalized, vec3_scale};
pub mod chunk;
pub mod minecraft;
pub mod shader;
use crate::minecraft::biome::Biomes;
use crate::minecraft::block_state::BlockStates;
static USAGE: &str = "
hematite, Minecraft made in Rust!
Usage:
hematite [options] <world>
Options:
-p, --path Fully qualified path for world folder.
--mcversion=<version> Minecraft version [default: 1.8.8].
";
#[derive(RustcDecodable)]
struct Args {
arg_world: String,
flag_path: bool,
flag_mcversion: String,
}
fn create_main_targets(
dim: gfx::texture::Dimensions,
) -> (
gfx::handle::RenderTargetView<gfx_device_gl::Resources, gfx::format::Srgba8>,
gfx::handle::DepthStencilView<gfx_device_gl::Resources, gfx::format::DepthStencil>,
) {
use gfx::format::{DepthStencil, Format, Formatted, Srgba8};
use gfx_core::memory::Typed;
let color_format: Format = <Srgba8 as Formatted>::get_format();
let depth_format: Format = <DepthStencil as Formatted>::get_format();
let (output_color, output_stencil) =
gfx_device_gl::create_main_targets_raw(dim, color_format.0, depth_format.0);
let output_color = Typed::new(output_color);
let output_stencil = Typed::new(output_stencil);
(output_color, output_stencil)
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|dopt| dopt.decode())
.unwrap_or_else(|e| e.exit());
// Automagically pull MC assets
minecraft::fetch_assets(&args.flag_mcversion);
// Automagically expand path if world is located at
// $MINECRAFT_ROOT/saves/<world_name>
let world = if args.flag_path {
PathBuf::from(&args.arg_world)
} else {
let mut mc_path = minecraft::vanilla_root_path();
mc_path.push("saves");
mc_path.push(args.arg_world);
mc_path
};
let file_name = world.join("level.dat");
let level_reader = GzDecoder::new(File::open(file_name).unwrap());
let level = minecraft::nbt::Nbt::from_reader(level_reader).unwrap();
println!("{:?}", level);
let player_pos: [f32; 3] = Array::from_iter(
level["Data"]["Player"]["Pos"]
.as_double_list()
.unwrap()
.iter()
.map(|&x| x as f32),
);
let player_chunk = [player_pos.x(), player_pos.z()].map(|x| (x / 16.0).floor() as i32);
let player_rot = level["Data"]["Player"]["Rotation"].as_float_list().unwrap();
let player_yaw = player_rot[0];
let player_pitch = player_rot[1];
let regions = player_chunk.map(|x| x >> 5);
let region_file = world.join(format!("region/r.{}.{}.mca", regions[0], regions[1]));
let region = minecraft::region::Region::open(®ion_file).unwrap();
let loading_title = format!(
"Hematite loading... - {}",
world.file_name().unwrap().to_str().unwrap()
);
let mut window: GlutinWindow = WindowSettings::new(loading_title, [854, 480])
.fullscreen(false)
.exit_on_esc(true)
.samples(0)
.vsync(false)
.graphics_api(shader_version::opengl::OpenGL::V3_2)
.build()
.unwrap();
let (mut device, mut factory) =
gfx_device_gl::create(|s| window.get_proc_address(s) as *const _);
let Size {
width: w,
height: h,
} = window.size();
let (target_view, depth_view) = create_main_targets((w as u16, h as u16, 1, (0_u8).into()));
let assets = Path::new("./assets");
// Load biomes.
let biomes = Biomes::load(assets);
// Load block state definitions and models.
let block_states = BlockStates::load(assets, &mut factory);
let encoder = factory.create_command_buffer().into();
let mut renderer = Renderer::new(
factory,
encoder,
target_view,
depth_view,
block_states.texture.surface.clone(),
);
let mut chunk_manager = chunk::ChunkManager::new();
println!("Started loading chunks...");
let c_bases = player_chunk.map(|x| max(0, (x & 0x1f) - 8) as u8);
for cz in c_bases[1]..c_bases[1] + 16 {
for cx in c_bases[0]..c_bases[0] + 16 {
if let Some(column) = region.get_chunk_column(cx, cz) {
let (cx, cz) = (cx as i32 + regions[0] * 32, cz as i32 + regions[1] * 32);
chunk_manager.add_chunk_column(cx, cz, column)
}
}
}
println!("Finished loading chunks.");
let projection_mat = camera_controllers::CameraPerspective {
fov: 70.0,
near_clip: 0.1,
far_clip: 1000.0,
aspect_ratio: {
let Size {
width: w,
height: h,
} = window.size();
(w as f32) / (h as f32)
},
}
.projection();
renderer.set_projection(projection_mat);
let mut first_person_settings = camera_controllers::FirstPersonSettings::keyboard_wasd();
first_person_settings.mouse_sensitivity_horizontal = 0.5;
first_person_settings.mouse_sensitivity_vertical = 0.5;
first_person_settings.speed_horizontal = 8.0;
first_person_settings.speed_vertical = 4.0;
let mut first_person = camera_controllers::FirstPerson::new(player_pos, first_person_settings);
first_person.yaw = PI - player_yaw / 180.0 * PI;
first_person.pitch = player_pitch / 180.0 * PI;
let mut fps_counter = fps_counter::FPSCounter::new();
let mut pending_chunks = vec![];
chunk_manager.each_chunk_and_neighbors(|coords, buffer, chunks, column_biomes| {
pending_chunks.push((coords, buffer, chunks, column_biomes));
});
let mut capture_cursor = false;
println!("Press C to capture mouse");
let mut staging_buffer = vec![];
let mut events = Events::new(EventSettings::new().ups(120).max_fps(10_000));
while let Some(e) = events.next(&mut window) {
use piston::input::Button::Keyboard;
use piston::input::Key;
if e.render_args().is_some() {
// Apply the same y/z camera offset vanilla minecraft has.
let mut camera = first_person.camera(0.0);
camera.position[1] += 1.62;
let mut xz_forward = camera.forward;
xz_forward[1] = 0.0;
xz_forward = vec3_normalized(xz_forward);
camera.position = vec3_add(camera.position, vec3_scale(xz_forward, 0.1));
let view_mat = camera.orthogonal();
renderer.set_view(view_mat);
renderer.clear();
let mut num_chunks: usize = 0;
let mut num_sorted_chunks: usize = 0;
let mut num_total_chunks: usize = 0;
let start_time = Instant::now();
chunk_manager.each_chunk(|cx, cy, cz, _, buffer| {
if let Some(buffer) = buffer.borrow_mut().as_mut() {
num_total_chunks += 1;
let inf = f32::INFINITY;
let mut bb_min = [inf, inf, inf];
let mut bb_max = [-inf, -inf, -inf];
let xyz = [cx, cy, cz].map(|x| x as f32 * 16.0);
for &dx in [0.0, 16.0].iter() {
for &dy in [0.0, 16.0].iter() {
for &dz in [0.0, 16.0].iter() {
use vecmath::col_mat4_transform;
let v = vec3_add(xyz, [dx, dy, dz]);
let xyzw = col_mat4_transform(view_mat, [v[0], v[1], v[2], 1.0]);
let v = col_mat4_transform(projection_mat, xyzw);
let xyz = vec3_scale([v[0], v[1], v[2]], 1.0 / v[3]);
bb_min = Array::from_fn(|i| bb_min[i].min(xyz[i]));
bb_max = Array::from_fn(|i| bb_max[i].max(xyz[i]));
}
}
}
let cull_bits: [bool; 3] = Array::from_fn(|i| {
let (min, max) = (bb_min[i], bb_max[i]);
min.signum() == max.signum() && min.abs().min(max.abs()) >= 1.0
});
if !cull_bits.iter().any(|&cull| cull) {
renderer.render(buffer);
num_chunks += 1;
if bb_min[0] < 0.0 && bb_max[0] > 0.0 || bb_min[1] < 0.0 && bb_max[1] > 0.0
{
num_sorted_chunks += 1;
}
}
}
});
let end_duration = start_time.elapsed();
renderer.flush(&mut device);
let frame_end_duration = start_time.elapsed();
let fps = fps_counter.tick();
let title = format!(
"Hematite sort={} render={} total={} in {:.2}ms+{:.2}ms @ {}FPS - {}",
num_sorted_chunks,
num_chunks,
num_total_chunks,
end_duration.as_secs() as f64
+ end_duration.subsec_nanos() as f64 / 1_000_000_000.0,
frame_end_duration.as_secs() as f64
+ frame_end_duration.subsec_nanos() as f64 / 1_000_000_000.0,
fps,
world.file_name().unwrap().to_str().unwrap()
);
window.set_title(title);
}
if e.after_render_args().is_some() {
device.cleanup();
}
if e.update_args().is_some() {
use std::i32;
// HACK(eddyb) find the closest chunk to the player.
// The pending vector should be sorted instead.
let pp = first_person.position.map(|x| (x / 16.0).floor() as i32);
let closest = pending_chunks
.iter()
.enumerate()
.fold(
(None, i32::max_value()),
|(best_i, best_dist), (i, &(cc, _, _, _))| {
let xyz = [cc[0] - pp[0], cc[1] - pp[1], cc[2] - pp[2]].map(|x| x * x);
let dist = xyz[0] + xyz[1] + xyz[2];
if dist < best_dist {
(Some(i), dist)
} else {
(best_i, best_dist)
}
},
)
.0;
let pending = closest.and_then(|i| {
// Vec swap_remove doesn't return Option anymore
match pending_chunks.len() {
0 => None,
_ => Some(pending_chunks.swap_remove(i)),
}
});
if let Some((coords, buffer, chunks, column_biomes)) = pending {
minecraft::block_state::fill_buffer(
&block_states,
&biomes,
&mut staging_buffer,
coords,
chunks,
column_biomes,
);
*buffer.borrow_mut() = Some(renderer.create_buffer(&staging_buffer[..]));
staging_buffer.clear();
if pending_chunks.is_empty() {
println!("Finished filling chunk vertex buffers.");
}
}
}
if let Some(Keyboard(Key::C)) = e.press_args() {
println!(
"Turned cursor capture {}",
if capture_cursor { "off" } else { "on" }
);
capture_cursor = !capture_cursor;
window.set_capture_cursor(capture_cursor);
}
if e.mouse_relative_args().is_some() && !capture_cursor {
// Don't send the mouse event to the FPS controller.
continue;
}
first_person.event(&e);
}
}
|
use liblumen_alloc::erts::exception;
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::Term;
/// `+/2` infix operator
#[native_implemented::function(erlang:+/2)]
pub fn result(process: &Process, augend: Term, addend: Term) -> exception::Result<Term> {
number_infix_operator!(augend, addend, process, checked_add, +)
}
|
#![allow(non_camel_case_types, non_upper_case_globals, overflowing_literals)]
pub mod plugin;
use std::os::raw::*;
// krb5/krb5.h:136
pub enum _profile_t {}
pub type krb5_octet = u8;
pub type krb5_int16 = i16;
pub type krb5_ui_2 = u16;
pub type krb5_int32 = i32;
pub type krb5_ui_4 = u32;
pub const VALID_INT_BITS: krb5_int32 = 2147483647;
pub const VALID_UINT_BITS: krb5_ui_4 = 4294967295;
pub const KRB5_INT32_MAX: krb5_int32 = 2147483647;
pub const KRB5_INT32_MIN: krb5_int32 = (-KRB5_INT32_MAX-1);
// not sure, it overflows a signed value, but its like this in
// the orignal source code.
pub const KRB5_INT16_MAX: krb5_int16 = 65535;
pub const KRB5_INT16_MIN: krb5_int16 = (-KRB5_INT16_MAX-1);
// krb5/krb5.h:167
pub const FALSE: krb5_boolean = 0;
pub const TRUE: krb5_boolean = 1;
pub type krb5_boolean = c_uint;
pub type krb5_msgtype = c_uint;
pub type krb5_kvno = c_uint;
pub type krb5_addrtype = krb5_int32;
pub type krb5_enctype = krb5_int32;
pub type krb5_cksumtype = krb5_int32;
pub type krb5_authdatatype = krb5_int32;
pub type krb5_keyusage = krb5_int32;
pub type krb5_cryptotype = krb5_int32;
pub type krb5_preauthtype = krb5_int32;
pub type krb5_flags = krb5_int32;
pub type krb5_timestamp = krb5_int32;
pub type krb5_error_code = krb5_int32;
pub type krb5_deltat = krb5_int32;
pub type krb5_magic = krb5_error_code;
#[repr(C)]
pub struct krb5_data {
pub magic: krb5_magic,
pub length: c_uint,
pub data: *mut c_char,
}
#[repr(C)]
pub struct krb5_octet_data {
pub magic: krb5_magic,
pub length: c_uint,
pub data: *mut krb5_octet,
}
pub const SALT_TYPE_AFS_LENGTH: c_uint = 65535;
pub const SALT_TYPE_NO_LENGTH: c_uint = 65535;
pub type krb5_pointer = *mut c_void;
pub type krb5_const_pointer = *const c_void;
#[repr(C)]
pub struct krb5_principal_data {
pub magic: krb5_magic,
pub realm: krb5_data,
/// An array of strings
pub data: *mut krb5_data,
pub length: krb5_int32,
pub type_: krb5_int32,
}
pub type krb5_principal = *mut krb5_principal_data;
/// Name type not known
pub const KRB5_NT_UNKNOWN: krb5_int32 = 0;
/// Just the name of the principal as in DCE, or for users
pub const KRB5_NT_PRINCIPAL: krb5_int32 = 1;
/// Service and ohter unique instance (krbtgt)
pub const KRB5_NT_SRV_INST: krb5_int32 = 2;
/// Service with host name as isntance (telnet, rcommands)
pub const KRB5_NT_SRV_HST: krb5_int32 = 3;
/// Service with host as remaining components
pub const KRB5_NT_SRV_XHST: krb5_int32 = 4;
/// Unique ID
pub const KRB5_NT_UID: krb5_int32 = 5;
/// PKINIT
pub const KRB5_NT_X500_PRINCIPAL: krb5_int32 = 6;
/// Name in form of SMTP email name
pub const KRB5_NT_SMTP_NAME: krb5_int32 = 7;
/// Windows 2000 UPN
pub const KRB5_NT_ENTERPRISE_PRINCIPAL: krb5_int32 = 10;
/// Well-known (special) principal
pub const KRB5_NT_WELLKNOWN: krb5_int32 = 11;
/// First component of NT_WELLKNOWN principals
pub const KRB5_WELLKNOWN_NAMESTR: &'static str = "WELLKNOWN";
/// Windows 2000 UPN and SID
pub const KRB5_NT_MS_PRINCIPAL: krb5_int32 = -128;
/// NT 4 style name
pub const KRB5_NT_MS_PRINCIPAL_AND_ID: krb5_int32 = -129;
/// NT 4 style name and SID
pub const KRB5_NT_ENT_PRINCIPAL_AND_ID: krb5_int32 = -130;
/// Constant version of `krb5_principal_data`
pub type krb5_const_principal = *const krb5_principal_data;
// not sure how to translate these functions since I'm unsure
// about the type of `context`.
// krb5/krb5.h:261
/// Constant for realm referrals
pub const KRB5_REFERRAL_REALM: &'static str = "";
// krb5/krb5.h:267
#[link(name = "krb5")]
extern "C" {
/// Check for a match with KRB5_REFERRAL_REALM
///
/// `r`: Realm to check
/// returns `TRUE` if `r` is zero-length, `FALSE` otherwise
pub fn krb5_is_referral_realm(r: *const krb5_data) -> krb5_boolean;
/// Return an anonymous realm data.
///
/// This function returns constant storage that must not be freed.
///
/// see also: `KRB5_ANONYMOUS_REALMSTR`
pub fn krb5_anonymous_realm() -> *const krb5_data;
/// Build an anonymous principal.
///
/// This function returns constant storage that must not be freed.
///
/// see also: `KRB5_ANONYMOUS_PRINCSTR`
pub fn krb5_anonymous_principal() -> krb5_const_principal;
}
/// Anonymous realm
pub const KRB5_ANONYMOUS_REALMSTR: &'static str = "WELLKNOWN:ANONYMOUS";
/// Anonymous principal name
pub const KRB5_ANONYMOUS_PRINCSTR: &'static str = "ANONYMOUS";
/// Structure for address
#[repr(C)]
pub struct krb5_address {
pub magic: krb5_magic,
pub addrtype: krb5_addrtype,
pub length: c_uint,
pub contents: *mut krb5_octet,
}
// krb5/krb5.h:316
pub const ADDRTYPE_INET: krb5_addrtype = 0x0002;
pub const ADDRTYPE_CHAOS: krb5_addrtype = 0x0005;
pub const ADDRTYPE_XNS: krb5_addrtype = 0x0006;
pub const ADDRTYPE_ISO: krb5_addrtype = 0x0007;
pub const ADDRTYPE_DDP: krb5_addrtype = 0x0010;
pub const ADDRTYPE_NETBIOS: krb5_addrtype = 0x0014;
pub const ADDRTYPE_INET6: krb5_addrtype = 0x0018;
pub const ADDRTYPE_ADDRPORT: krb5_addrtype = 0x0100;
pub const ADDRTYPE_IPPORT: krb5_addrtype = 0x0101;
#[allow(non_snake_case)]
pub fn ADDRTYPE_IS_LOCAL(addr: krb5_addrtype) -> bool {
addr & 0x8000 != 0
}
pub enum _krb5_context {}
pub type krb5_context = *mut _krb5_context;
pub enum _krb5_auth_context {}
pub type krb5_auth_context = *mut _krb5_auth_context;
pub enum _krb5_cryptosystem_entry {}
/// Exposed contents of a key
#[repr(C)]
pub struct krb5_keyblock {
pub magic: krb5_magic,
pub enctype: krb5_enctype,
pub length: c_uint,
pub contents: *mut krb5_octet,
}
pub enum krb5_key_st {}
/// Opaque identifier for a key.
///
/// Use with the `krb5_k` APIs for better performance for repeated operations with
/// the same key and usage. Key identifiers must not be used simultaneously
/// within multiple threads, as they may contain mutable internal state and are
/// not mutex-protected.
pub type krb5_key = *mut krb5_key_st;
// ifdef KRB5_OLD_CRYPTO
#[cfg(feature = "krb5_old_crypto")]
#[repr(C)]
pub struct krb5_encrypt_block {
pub magic: krb5_magic,
pub crypto_entry: krb5_enctype,
pub key: *mut krb5_keyblock,
}
#[repr(C)]
pub struct krb5_checksum {
pub magic: krb5_magic,
pub checksum_type: krb5_cksumtype,
pub length: c_uint,
pub contents: *mut krb5_octet,
}
#[repr(C)]
pub struct krb5_enc_data {
pub magic: krb5_magic,
pub enctype: krb5_enctype,
pub kvno: krb5_kvno,
pub ciphertext: krb5_data,
}
/// Structure to describe a region of text to be encrypted or decrypted.
///
/// The `flags` member describes the type of the iov
/// The `data` member points to the memory that will be manipulated.
/// All iov APIs take a ponter to the first element of an array of `krb5_crypto_iov`'s
/// alogn with the size of that array. Buffer contents are manipulated in-place;
/// data is overwritten. Callers must allocate the right numbers of `krb5_crypt_iov`
/// structures before calling into an iov API.
#[repr(C)]
pub struct krb5_crypto_iov {
/// `KRB5_CRYPTO_TYPE` type of the iov
pub flags: krb5_cryptotype,
pub data: krb5_data,
}
pub const ENCTYPE_NULL: krb5_enctype = 0x0000;
/// DES cbc mode with CRC-32
pub const ENCTYPE_DES_CBC_CRC: krb5_enctype = 0x0001;
/// DES cbc mode with RSA-MD4
pub const ENCTYPE_DES_CBC_MD4: krb5_enctype = 0x0002;
/// DES cbc mode with RSA-MD5
pub const ENCTYPE_DES_CBC_MD5: krb5_enctype = 0x0003;
/// DES cbc mode raw
#[deprecated]
pub const ENCTYPE_DES_CBC_RAW: krb5_enctype = 0x0004;
/// DES-3 cbc with SHA1
#[deprecated]
pub const ENCTYPE_DES3_CBC_SHA: krb5_enctype = 0x0005;
/// DES-3 cbc mode raw
#[deprecated]
pub const ENCTYPE_DES3_CBC_RAW: krb5_enctype = 0x0006;
#[deprecated]
pub const ENCTYPE_DES_HMAC_SHA1: krb5_enctype = 0x0008;
// PKINIT
/// DSA with SHA1, CMS signature
pub const ENCTYPE_DSA_SHA1_CMS: krb5_enctype = 0x0009;
/// MD5 with RSA, CMS signature
pub const ENCTYPE_MD5_RSA_CMS: krb5_enctype = 0x000a;
/// SHA1 with RSA, CMS signature
pub const ENCTYPE_SHA1_RSA_CMS: krb5_enctype = 0x000b;
/// RC2 cbc mode, CMS enveloped data
pub const ENCTYPE_RC2_CBC_ENV: krb5_enctype = 0x000c;
/// RSA encryption, CMS enveloped data
pub const ENCTYPE_RSA_ENV: krb5_enctype = 0x000d;
/// RSA w/OEAP encryption, CMS enveloped data
pub const ENCTYPE_RSA_ES_OAEP_ENV: krb5_enctype = 0x000e;
/// DES-3 cbc mode, CMS enveloped data
pub const ENCTYPE_DES3_CBC_ENV: krb5_enctype = 0x000f;
pub const ENCTYPE_DES3_CBC_SHA1: krb5_enctype = 0x0010;
/// RFC 3962
pub const ENCTYPE_AES128_CTS_HMAC_SHA1_96: krb5_enctype = 0x0011;
/// RFC 3962
pub const ENCTYPE_AES256_CTS_HMAC_SHA1_96: krb5_enctype = 0x0012;
pub const ENCTYPE_ARCFOUR_HMAC: krb5_enctype = 0x0017;
pub const ENCTYPE_ARCFOUR_HMAC_EXP: krb5_enctype = 0x0018;
/// RFC 6803
pub const ENCTYPE_CAMELLIA128_CTS_CMAC: krb5_enctype = 0x0019;
/// RFC 6803
pub const ENCTYPE_CAMELLIA256_CTS_CMAC: krb5_enctype = 0x001a;
pub const ENCTYPE_UNKNOWN: krb5_enctype = 0x01ff;
pub const CKSUMTYPE_CRC32: krb5_cksumtype = 0x0001;
pub const CKSUMTYPE_RSA_MD4: krb5_cksumtype = 0x0002;
pub const CKSUMTYPE_RSA_MD4_DES: krb5_cksumtype = 0x0003;
pub const CKSUMTYPE_DESCBC: krb5_cksumtype = 0x0004;
pub const CKSUMTYPE_RSA_MD5: krb5_cksumtype = 0x0007;
pub const CKSUMTYPE_RSA_MD5_DES: krb5_cksumtype = 0x0008;
pub const CKSUMTYPE_NIST_SHA: krb5_cksumtype = 0x0009;
pub const CKSUMTYPE_HMAC_SHA1_DES3: krb5_cksumtype = 0x000c;
/// RFC 3962. Used with `ENCTYPE_AES128_CTS_HMAC_SHA1_96`
pub const CKSUMTYPE_HMAC_SHA1_96_AES128: krb5_cksumtype = 0x000f;
/// RFC 3962. Used with `ENCTYPE_AES256_CTS_HMAC_SHA1_96`
pub const CKSUMTYPE_HMAC_SHA1_96_AES256: krb5_cksumtype = 0x0010;
/// RFC 6803.
pub const CKSUMTYPE_CMAC_CAMELLIA128: krb5_cksumtype = 0x0011;
/// RFC 6803
pub const CKSUMTYPE_CMAC_CAMELLIA256: krb5_cksumtype = 0x0012;
/// Microsoft netlogon cksumtype
pub const CKSUMTYPE_MD5_HMAC_ARCFOUR: krb5_cksumtype = -137;
/// Micorsoft md5 hmac cksumtype
pub const CKSUMTYPE_HMAC_MD5_ARCFOUR: krb5_cksumtype = -138;
// A wild enum appears!
pub const KRB5_C_RANDSOURCE_OLDAPI: u32 = 0;
pub const KRB5_C_RANDSORUCE_OSRAND: u32 = 1;
pub const KRB5_C_RANDSOURCE_TRUSTEDPARTY: u32 = 2;
pub const KRB5_C_RANDSOURCE_TIMING: u32 = 3;
pub const KRB5_C_RANDSOURCE_EXTERNAL_PROTOCOL: u32 = 4;
pub const KRB5_C_RANDSOURCE_MAX: u32 = 5;
// TODO: krb5_roundup
// krb5_x
// krb5_xc
#[link(name = "krb5")]
extern "C" {
/// Encrypt data using a key (operates on keyblock).
///
/// `context`: Library context
/// `key`: Encryption key
/// `usage`: Key usage (see `KRB5_KEYUSAGE` types)
/// `cipher_state`: Cipher state; specify `NULL` if not needed.
/// `input`: Data to be encrypted
/// `output`: Encrypted data.
///
/// This function encrypts the data block `input` and stores the output into
/// `output`. The actual encryption key will be derived from `key` and `usage`
/// if key derivation is specified for the encryption type. If non-null,
/// `cipher_state` specifies the beginning state for the encryption operation,
/// and is updated with the state to be passed as input to the next operation.
///
/// Note: the caller must initialize `output` and allocate at least enough
/// space for the result (using `krb5_c_encrypt_length()` to determine the amount
/// of space needed). `output.length` will be set to the actual length of the
/// ciphertetxt.
///
/// returns `0` on success, otherwise - Kerberos error codes
pub fn krb5_c_encrypt(context: krb5_context,
key: *const krb5_keyblock,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
input: *const krb5_data,
output: *mut krb5_enc_data) -> krb5_error_code;
/// Decrypt data using a key (operates on keyblock)
///
/// `context`: Library context
/// `key`: Encryption key
/// `usage`: Key usage (see `KRB5_KEYUSAGE` types)
/// `cipher_state`: Cipher state; specify NULL if not needed.
/// `input`: Encrypted data
/// `output`: Decrypted data
///
/// This function decryptes the data block `input` and stores the output into
/// `output`. The actual decryption key will be derived from `key` and `usage`
/// if key derivation is specified for the encryption type. If non-null,
/// `cipher_state` specifies the beginning state for the decryption operation,
/// and is updated with the state to be passed as input to the next operation.
///
/// Note: The caller must initialize `output` and allocate at least enough
/// space for the result. The usual practice is to allocate an output buffer as
/// long as the ciphertext, and let `krb5_c_decrypt()` trim `output.length`.
/// For some enctypes, the resulting `output.length` may include padding bytes.
///
/// returns 0 on success, kerberos error codes otherwise.
pub fn krb5_c_decrypt(context: krb5_context,
key: *const krb5_keyblock,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
input: *const krb5_enc_data,
output: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_encrypt_length(context: krb5_context,
enctype: krb5_enctype,
inputlen: usize,
length: *mut usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_block_size(context: krb5_context,
enctype: krb5_enctype,
blocksize: *mut usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_keylengths(context: krb5_context,
enctype: krb5_enctype,
keybytes: *mut usize,
keylength: *mut usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_init_state(context: krb5_context,
key: *const krb5_keyblock,
usage: krb5_keyusage,
new_state: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb_c_free_state(context: krb5_context,
key: *const krb5_keyblock,
state: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_prf(context: krb5_context,
keyblock: *const krb5_keyblock,
input: *mut krb5_data,
output: *mut krb5_data)-> krb5_error_code;
// TODO: Doc
pub fn krb5_c_prf_length(context: krb5_context,
enctype: krb5_enctype,
len: *mut usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_fx_cf2_simple(context: krb5_context,
k1: *mut krb5_keyblock,
pepper1: *const c_char,
k2: *mut krb5_keyblock,
pepper2: *const c_char,
out: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_make_random_key(context: krb5_context,
enctype: krb5_enctype,
k5_random_key: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_random_to_key(context: krb5_context,
enctype: krb5_enctype,
random_data: *mut krb5_data,
k5_random_key: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_random_add_entropy(context: krb5_context,
randsource: c_uint,
data: *const krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_random_make_octets(context: krb5_context,
data: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_random_os_entropy(context: krb5_context,
strong: c_int,
success: *mut c_int) -> krb5_error_code;
// TODO: Doc
#[deprecated]
pub fn krb5_c_random_seed(context: krb5_context,
data: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_string_to_key(context: krb5_context,
enctype: krb5_enctype,
string: *const krb5_data,
salt: *const krb5_data,
key: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_string_to_key_with_params(context: krb5_context,
enctype: krb5_enctype,
string: *const krb5_data,
salt: *const krb5_data,
params: *const krb5_data,
key: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_enctype_compare(context: krb5_context,
e1: krb5_enctype,
e2: krb5_enctype,
similiar: *mut krb5_boolean) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_make_checksum(context: krb5_context,
cksumtype: krb5_cksumtype,
key: *const krb5_keyblock,
usage: krb5_keyusage,
input: *const krb5_data,
cksum: *mut krb5_checksum) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_verify_checksum(context: krb5_context,
key: *const krb5_keyblock,
usage: krb5_keyusage,
data: *const krb5_data,
cksum: *const krb5_checksum,
valid: *mut krb5_boolean) -> krb5_error_code;
// TODO Doc
pub fn krb5_c_checksum_length(context: krb5_context,
cksumtype: krb5_cksumtype,
length: *mut usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_keyed_checksum_types(context: krb5_context,
enctype: krb5_enctype,
count: *mut c_uint,
cksumtypes: *mut *mut krb5_cksumtype) -> krb5_error_code;
}
pub const KRB5_KEYUSAGE_AS_REQ_PA_ENC_TS: krb5_keyusage = 1;
pub const KRB5_KEYUSAGE_KDC_REP_TICKET: krb5_keyusage = 2;
pub const KRB5_KEYUSAGE_AS_REP_ENCPART: krb5_keyusage = 3;
pub const KRB5_KEYUSAGE_TGS_REQ_AD_SESSKEY: krb5_keyusage = 4;
pub const KRB5_KEYUSAGE_TGS_REQ_AD_SUBKEY: krb5_keyusage = 5;
pub const KRB5_KEYUSAGE_TGS_REQ_AUTH_CKSUM: krb5_keyusage = 6;
pub const KRB5_KEYUSAGE_TGS_REQ_AUTH: krb5_keyusage = 7;
pub const KRB5_KEYUSAGE_TGS_REP_ENCPART_SESSKEY: krb5_keyusage = 8;
pub const KRB5_KEYUSAGE_TGS_REP_ENCPART_SUBKEY: krb5_keyusage = 9;
pub const KRB5_KEYUSAGE_AP_REQ_AUTH_CKSUM: krb5_keyusage = 10;
pub const KRB5_KEYUSAGE_AP_REQ_AUTH: krb5_keyusage = 11;
pub const KRB5_KEYUSAGE_AP_REP_ENCPART: krb5_keyusage = 12;
pub const KRB5_KEYUSAGE_KRB_PRIV_ENCPART: krb5_keyusage = 13;
pub const KRB5_KEYUSAGE_KRB_CRED_ENCPART: krb5_keyusage = 14;
pub const KRB5_KEYUSAGE_KRB_SAFE_CKSUM: krb5_keyusage = 15;
pub const KRB5_KEYUSAGE_APP_DATA_ENCRYPT: krb5_keyusage = 16;
pub const KRB5_KEYUSAGE_APP_DATA_CKSUM: krb5_keyusage = 17;
pub const KRB5_KEYUSAGE_KRB_ERROR_CKSUM: krb5_keyusage = 18;
pub const KRB5_KEYUSAGE_AD_KDCISSUED_CKSUM: krb5_keyusage = 19;
pub const KRB5_KEYUSAGE_AD_MTE: krb5_keyusage = 20;
pub const KRB5_KEYUSAGE_AD_ITE: krb5_keyusage = 21;
pub const KRB5_KEYUSAGE_GSS_TOK_MIC: krb5_keyusage = 22;
pub const KRB5_KEYUSAGE_GSS_TOK_WRAP_INTEG: krb5_keyusage = 23;
pub const KRB5_KEYUSAGE_GSS_TOK_WRAP_PRIV: krb5_keyusage = 24;
pub const KRB5_KEYUSAGE_PA_SAM_CHALLENGE_CKSUM: krb5_keyusage = 25;
/// Note conflict with `KRB5_KEYUSAGE_PA_SAM_CHALLENGE_TRAKCID`
pub const KRB5_KEYUSAGE_PA_S4U_X509_USER_REQUEST: krb5_keyusage = 26;
// Note conflict with `KRB5_KEYUSAGE_PA_SAM_RESPONSE`
pub const KRB5_KEYUSAGE_PA_S4U_X509_USER_REPLY: krb5_keyusage = 27;
pub const KRB5_KEYUSAGE_PA_REFERRAL: krb5_keyusage = 26;
pub const KRB5_KEYUSAGE_AD_SIGNEDPATH: krb5_keyusage = -21;
pub const KRB5_KEYUSAGE_IAKERB_FINISHED: krb5_keyusage = 42;
pub const KRB5_KEYUSAGE_PA_PKINIT_KX: krb5_keyusage = 44;
/// See RFC 6560 section 4.2
pub const KRB5_KEYUSAGE_PA_OTP_REQUEST: krb5_keyusage = 45;
pub const KRB5_KEYUSAGE_FAST_REQ_CHKSUM: krb5_keyusage = 50;
pub const KRB5_KEYUSAGE_FAST_ENC: krb5_keyusage = 51;
pub const KRB5_KEYUSAGE_FAST_REP: krb5_keyusage = 52;
pub const KRB5_KEYUSAGE_FAST_FINISHED: krb5_keyusage = 53;
pub const KRB5_KEYUSAGE_ENC_CHALLENGE_CLIENT: krb5_keyusage = 54;
pub const KRB5_KEYUSAGE_ENC_CHALLENGE_KDC: krb5_keyusage = 55;
pub const KRB5_KEYUSAGE_AS_REQ: krb5_keyusage = 56;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_c_valid_enctype(ktype: krb5_enctype) -> krb5_boolean;
// TODO: Doc
pub fn krb5_c_valid_cksumtype(ctype: krb5_cksumtype) -> krb5_boolean;
// TODO: Doc
pub fn krb5_c_is_coll_proof_cksum(ctype: krb5_cksumtype) -> krb5_boolean;
// TODO: Doc
pub fn krb5_c_is_keyed_cksum(ctype: krb5_cksumtype) -> krb5_boolean;
}
/// [in] ignored
pub const KRB5_CRYPTO_TYPE_EMPTY: krb5_cryptotype = 0;
/// [out] header
pub const KRB5_CRYPTO_TYPE_HEADER: krb5_cryptotype = 1;
/// [in, out] plaintext
pub const KRB5_CRYPTO_TYPE_DATA: krb5_cryptotype = 2;
/// [in] associated data
pub const KRB5_CRYPTO_TYPE_SIGN_ONLY: krb5_cryptotype = 3;
/// [out] padding
pub const KRB5_CRYPTO_TYPE_PADDING: krb5_cryptotype = 4;
/// [out] checksum for encrypt
pub const KRB5_CRYPTO_TYPE_TRAILER: krb5_cryptotype = 5;
/// [out] checksum for MIC
pub const KRB5_CRYPTO_TYPE_CHECKSUM: krb5_cryptotype = 6;
/// [in] entire message without decomposing the strucutre into
/// header, data and trailer buffers
pub const KRB5_CRYPTO_TYPE_STREAM: krb5_cryptotype = 7;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_c_make_checksum_iov(context: krb5_context,
cksumtype: krb5_cksumtype,
key: *const krb5_keyblock,
usage: krb5_keyusage,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_verify_checksum_iov(context: krb5_context,
cksumtype: krb5_cksumtype,
key: *const krb5_keyblock,
usage: krb5_keyusage,
data: *const krb5_crypto_iov,
num_data: usize,
valid: *mut krb5_boolean) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_encrypt_iov(context: krb5_context,
keyblock: *const krb5_keyblock,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_decypt_iov(context: krb5_context,
keyblock: *const krb5_keyblock,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_crypto_length(context: krb5_context,
enctype: krb5_enctype,
type_: krb5_cryptotype,
size: *mut c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_crypto_length_iov(context: krb5_context,
enctype: krb5_enctype,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_c_padding_length(context: krb5_context,
enctype: krb5_enctype,
data_length: usize,
size: *mut c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_create_key(context: krb5_context,
key_data: *const krb5_keyblock,
out: *mut krb5_key) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_reference_key(context: krb5_context,
key: krb5_key);
// TODO: Doc
pub fn krb5_k_key_keyblock(context: krb5_context,
key: krb5_key,
key_data: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_key_enctype(context: krb5_context,
key: krb5_key) -> krb5_enctype;
// TODO: Doc
pub fn krb5_k_encrypt(context: krb5_context,
key: krb5_key,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
input: *const krb5_data,
output: *mut krb5_enc_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_encrypt_iov(context: krb5_context,
key: krb5_key,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_decrypt(context: krb5_context,
key: krb5_key,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
input: *const krb5_enc_data,
output: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_decrypt_iov(context: krb5_context,
key: krb5_key,
usage: krb5_keyusage,
cipher_state: *const krb5_data,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_make_checksum(context: krb5_context,
cksumtype: krb5_cksumtype,
key: krb5_key,
usage: krb5_keyusage,
input: *const krb5_data,
cksum: *mut krb5_checksum) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_make_checksum_iov(context: krb5_context,
cksumtype: krb5_cksumtype,
key: krb5_key,
usage: krb5_keyusage,
data: *mut krb5_crypto_iov,
num_data: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_verify_checksum(context: krb5_context,
key: krb5_key,
usage: krb5_keyusage,
data: *const krb5_data,
cksum: *const krb5_checksum,
valid: *mut krb5_boolean) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_verify_checksum_iov(context: krb5_context,
cksumtype: krb5_cksumtype,
key: krb5_key,
usage: krb5_keyusage,
data: *const krb5_crypto_iov,
num_data: usize,
valid: *mut krb5_boolean) -> krb5_error_code;
// TODO: Doc
pub fn krb5_k_prf(context: krb5_context,
key: krb5_key,
input: *mut krb5_data,
output: *mut krb5_data) -> krb5_error_code;
//ifdef KRB5_OLD_CRYPTO
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_encrypt(context: krb5_context,
inptr: krb5_const_pointer,
outptr: krb5_pointer,
size: usize,
eblock: *mut krb5_encrypt_block,
ivec: krb5_pointer) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_decrypt(context: krb5_context,
inptr: krb5_const_pointer,
outpt: krb5_pointer,
size: usize,
eblock: *mut krb5_encrypt_block,
ivec: krb5_pointer) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_process_key(context: krb5_context,
eblock: *mut krb5_encrypt_block,
key: *const krb5_keyblock) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_finish_key(context: krb5_context,
eblock: *mut krb5_encrypt_block) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_string_to_key(context: krb5_context,
eblock: *const krb5_encrypt_block,
keyblock: *mut krb5_keyblock,
data: *const krb5_data,
salt: *const krb5_data) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_init_random_key(context: krb5_context,
eblock: *const krb5_encrypt_block,
keyblock: *const krb5_keyblock,
ptr: *mut krb5_pointer) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_finish_random_key(context: krb5_context,
eblock: *const krb5_encrypt_block,
ptr: *mut krb5_pointer) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_random_key(context: krb5_context,
eblock: *const krb5_encrypt_block,
ptr: krb5_pointer,
keyblock: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_eblock_enctype(context: krb5_context,
eblock: *const krb5_encrypt_block) -> krb5_enctype;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_use_enctype(context: krb5_context,
eblock: *mut krb5_encrypt_block,
enctype: krb5_enctype) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "Replaced by krb5_c_* API family.")]
pub fn krb5_encrypt_size(length: usize, crypto: krb5_enctype) -> usize;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "See `krb5_c_checksum_length()`")]
pub fn krb5_checksum_size(context: krb5_context, ctype: krb5_cksumtype) -> usize;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "See `krb5_c_make_chekcsum()`")]
pub fn krb5_calculate_checksum(context: krb5_context,
ctype: krb5_cksumtype,
in_: krb5_const_pointer,
in_length: usize,
seed: krb5_const_pointer,
seed_length: usize,
outcksum: *mut krb5_checksum) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_old_crypto")]
#[deprecated(note = "See `krb5_c_verify_checksum()`")]
pub fn krb5_verify_checksum(context: krb5_context,
ctype: krb5_cksumtype,
cksum: *const krb5_checksum,
in_: krb5_const_pointer,
in_length: usize,
seed: krb5_const_pointer,
seed_length: usize) -> krb5_error_code;
// endif KRB5_OLD_CRYPTO
}
pub const KDC_OPT_FORWARDABLE: krb5_flags = 0x40000000;
pub const KDC_OPT_FORWARDED: krb5_flags = 0x20000000;
pub const KDC_OPT_PROXIABLE: krb5_flags = 0x10000000;
pub const KDC_OPT_PROXY: krb5_flags = 0x08000000;
pub const KDC_OPT_ALLOW_POSTDATED: krb5_flags = 0x04000000;
pub const KDC_OPT_POSTDATED: krb5_flags = 0x02000000;
pub const KDC_OPT_RENEWABLE: krb5_flags = 0x00800000;
pub const KDC_OPT_CNAME_IN_ADDL_TKT: krb5_flags = 0x00020000;
pub const KDC_OPT_CANONICALIZE: krb5_flags = 0x00010000;
pub const KDC_OPT_REQUEST_ANONYMOUS: krb5_flags = 0x00008000;
pub const KDC_OPT_DISABLE_TRANSITED_CHEDK: krb5_flags = 0x00000020;
pub const KDC_OPT_RENEWABLE_OK: krb5_flags = 0x00000010;
pub const KDC_OPT_ENC_TKT_IN_SKEY: krb5_flags = 0x00000008;
pub const KDC_OPT_RENEW: krb5_flags = 0x00000002;
pub const KDC_OPT_VALIDATE: krb5_flags = 0x00000001;
pub const KDC_TKT_COMMON_MASK: krb5_flags = 0x54800000;
pub const AP_OPTS_RESERVERD: krb5_flags = 0x80000000;
/// Use session key
pub const AP_OPTS_USE_SESSION_KEY: krb5_flags = 0x40000000;
/// Perform a mutual authentiction exchange
pub const AP_OPTS_MUTUAL_REQUIRED: krb5_flags = 0x20000000;
pub const AP_OPTS_ETYPE_NEGOTIATION: krb5_flags = 0x00000002;
/// Generate a subsession key from the curretn session key obtained from
/// the credentials
pub const AP_OPTS_USE_SUBKEY: krb5_flags = 0x00000001;
pub const AP_OPTS_WIRE_MASK: krb5_flags = 0xfffffff0;
pub const AD_TYPE_RESERVED: u16 = 0x8000;
pub const AD_TYPE_EXTERNAL: u16 = 0x4000;
pub const AD_TYPE_REGISTERED: u16 = 0x2000;
pub const AD_TYPE_FIELD_TYPE_MASK: u16 = 0x1fff;
pub const TKT_FLG_FORWARDABLE: krb5_flags = 0x40000000;
pub const TKT_FLG_FORWARDED: krb5_flags = 0x20000000;
pub const TKT_FLG_PROXIABLE: krb5_flags = 0x10000000;
pub const TKT_FLG_PROXY: krb5_flags = 0x08000000;
pub const TKT_FLG_MAY_POSTDATE: krb5_flags = 0x04000000;
pub const TKT_FLG_POSTDATED: krb5_flags = 0x02000000;
pub const TKT_FLG_INVALID: krb5_flags = 0x01000000;
pub const TKT_FLG_RENEWABLE: krb5_flags = 0x00800000;
pub const TKT_FLG_INITIAL: krb5_flags = 0x00400000;
pub const TKT_FLG_PRE_AUTH: krb5_flags = 0x00200000;
pub const TKT_FLG_HW_AUTH: krb5_flags = 0x00100000;
pub const TKT_FLG_TRANSIT_POLICY_CHECKED: krb5_flags = 0x00080000;
pub const TKT_FLG_OK_AS_DELEGATE: krb5_flags = 0x00040000;
pub const TKT_FLG_ENC_PA_REP: krb5_flags = 0x00010000;
pub const TKT_FLG_ANONYMOUS: krb5_flags = 0x00008000;
pub const LR_TYPE_THIS_SERVER_ONLY: u16 = 0x8000;
pub const LR_TYPE_INTERPRETATION_MASK: u16 = 0x7fff;
pub const MSEC_DIRBIT: u16 = 0x8000;
pub const MSEC_VAL_MASK: u16 = 0x7fff;
pub const KRB5_PVNO: usize = 4;
/// Initial authentication request
pub const KRB5_AS_REQ: krb5_msgtype = 10;
/// Response to AS requset
pub const KRB5_AS_REP: krb5_msgtype = 11;
/// Ticket granting server request
pub const KRB5_TGS_REQ: krb5_msgtype = 12;
/// Response to TGS request
pub const KRB5_TGS_REP: krb5_msgtype = 13;
/// Auth req to application server
pub const KRB5_AP_REQ: krb5_msgtype = 14;
/// Repsonse to mutual AP request
pub const KRB5_AP_REP: krb5_msgtype = 15;
/// Safe application message
pub const KRB5_SAFE: krb5_msgtype = 20;
/// Private application message
pub const KRB5_PRIV: krb5_msgtype = 21;
/// Cred forwarding message
pub const KRB5_CRED: krb5_msgtype = 22;
/// Error response
pub const KRB5_ERROR: krb5_msgtype = 30;
// TODO: Find the proper type for these
pub const KRB5_LRQ_NONE: isize = 0;
pub const KRB5_LRQ_ALL_LAST_TGT: isize = 1;
pub const KRB5_LRQ_ONE_LAST_TGT: isize = -1;
pub const KRB5_LRQ_ALL_LAST_INITIAL: isize = 2;
pub const KRB5_LRQ_ONE_LAST_INITIAL: isize = -2;
pub const KRB5_LRQ_ALL_LAST_TGT_ISSUED: isize = 3;
pub const KRB5_LRQ_ONE_LAST_TGT_ISSUED: isize = -3;
pub const KRB5_LRQ_ALL_LAST_RENEWAL: isize = 4;
pub const KRB5_LRQ_ONE_LAST_RENEWAL: isize = -4;
pub const KRB5_LRQ_ALL_LAST_REQ: isize = 5;
pub const KRB5_LRQ_ONE_LAST_REQ: isize = -5;
pub const KRB5_LRQ_ALL_PW_EXPTIME: isize = 6;
pub const KRB5_LRQ_ONE_PW_EXPTIME: isize = -6;
pub const KRB5_LRQ_ALL_ACCT_EXPTIME: isize = 7;
pub const KRB5_LRQ_ONE_ACCT_EXPTIME: isize = -7;
pub const KRB5_PADATA_NONE: isize = 0;
pub const KRB5_PADATA_AP_REQ: isize = 1;
pub const KRB5_PADATA_TGS_REQ: isize = KRB5_PADATA_AP_REQ;
/// RFC 4120
pub const KRB5_PADATA_ENC_TIMESTAMP: isize = 2;
/// RFC 4120
pub const KRB5_PADATA_PW_SALT: isize = 3;
/// Not used, key encrypted within self
pub const KRB5_PADATA_ENC_ENCKEY: isize = 4;
/// timestamp encrytped in key, RFC 4120
pub const KRB5_PADATA_ENC_UNIX_TIME: isize = 5;
/// SecurId passcode. RFC 4120
pub const KRB5_PADATA_ENC_SANDIA_SECURID: isize = 6;
/// Sesame project. RFC 4120
pub const KRB5_PADATA_SESAME: isize = 7;
/// OSF DCE. RFC 4120
pub const KRB5_PADATA_OSF_DCE: isize = 8;
/// Cybersafe, RFC 4120
pub const KRB5_CYBERSAFE_SECUREID: isize = 9;
/// Cygnus, RFC 4120, 3961
pub const KRB5_PADATA_AFS3_SALT: isize = 10;
/// Etype info for preauth. RFC 4120
pub const KRB5_PADATA_ETYPE_INFO: isize = 11;
/// SAM/OTP
pub const KRB5_PADATA_SAM_CHALLENGE: isize = 12;
/// SAM/OTP
pub const KRB5_PADATA_SAM_RESPONSE: isize = 13;
/// PKINIT
pub const KRB5_PADATA_PK_AS_REQ_OLD: isize = 14;
/// PKINIT
pub const KRB5_PADATA_PK_AS_REP_OLD: isize = 15;
/// PKINIT. RFC 4556
pub const KRB5_PADATA_PK_AS_REQ: isize = 16;
/// PKINIT. RFC 4556
pub const KRB5_PADATA_PK_AS_REP: isize = 17;
/// RFC 4120
pub const KRB5_PADATA_ETYPE_INFO2: isize = 19;
/// RFC 4120
pub const KRB5_PADATA_USE_SEPCIFIED_KVNO: isize = 20;
/// Windows 2000 referrals. RFC 6820
pub const KRB5_PADATA_SVR_REFERRAL_INFO: isize = 20;
/// SAM/OTP. RFC 4120
pub const KRB5_PADATA_SAM_REDIRECT: isize = 21;
/// Embedded in typed data. RFC 4120
pub const KRB5_PADATA_GET_FROM_TYPED_DATA: isize = 22;
/// Draft challenge system
pub const KRB5_PADATA_REFERRAL: isize = 25;
/// draft challenge system, updated
pub const KRB5_PADATA_SAM_CHALLENGE_2: isize = 30;
/// draft challenge system, updated
pub const KRB5_PADATA_SAM_RESPONSE_2: isize = 31;
/// include Windows PAC
pub const KRB5_PADATA_PAC_REQUEST: isize = 128;
/// username protocol transition request
pub const KRB5_PADATA_FOR_USER: isize = 129;
/// certificate protocol transition request
pub const KRB5_PADATA_S4U_X509_USER: isize = 130;
/// AS checksum
pub const KRB5_PADATA_AS_CHECKSUM: isize = 132;
/// RFC 6113
pub const KRB5_PADATA_FX_COOKIE: isize = 133;
/// RFC 6113
pub const KRB5_PADATA_FX_FAST: isize = 136;
/// RFC 6113
pub const KRB5_PADATA_FX_ERROR: isize = 137;
/// RFC 6113
pub const KRB5_PADATA_ENCRYPTED_CHALLENGE: isize = 138;
/// RFC 6560 section 4.1
pub const KRB5_PADATA_OTP_CHALLENGE: isize = 141;
/// RFC 6560 section 4.2
pub const KRB5_PADATA_OTP_REQUEST: isize = 142;
/// RFC 6560 section 4.3
pub const KRB5_PADATA_OTP_PIN_CHANGE: isize = 144;
/// RFC 6112
pub const KRB5_PADATA_PKINIT_KX: isize = 147;
/// RFC 6806
pub const KRB5_ENCPADATA_REQ_ENC_PA_REP: isize = 149;
pub const KRB5_SAM_USE_SAD_AS_KEY: isize = 0x80000000;
pub const KRB5_SAM_SEND_ENCRYPTED_SAD: isize = 0x40000000;
/// currently must be zero
pub const KRB5_SAM_MUST_PK_ENCRYPT_SAD: isize = 0x20000000;
/// Transited encoding types
pub const KRB5_DOMAIN_X500_COMPRESS: isize = 1;
/// alternate authentication types
pub const KRB5_ALTAUTH_ATT_CHALLENGE_RESPONSE: isize = 64;
pub const KBR5_AUTHDATA_IF_RELEVANT: krb5_authdatatype = 1;
pub const KRB5_AUTHDATA_KDC_ISSUED: krb5_authdatatype = 4;
pub const KRB5_AUTHDATA_AND_OR: krb5_authdatatype = 5;
pub const KRB5_AUTHDATA_MANDATORY_FOR_KDC: krb5_authdatatype = 8;
pub const KRB5_AUTHDATA_INITIAL_VERIFIED_CAS: krb5_authdatatype = 9;
pub const KRB5_AUTHDATA_OSF_DC: krb5_authdatatype = 64;
pub const KRB5_AUTHDATA_SESAME: krb5_authdatatype = 65;
pub const KRB5_AUTHDATA_WIN2K_PAC: krb5_authdatatype = 128;
/// RFC 4537
pub const KRB5_AUTHDATA_ETYPE_NEGOTIATION: krb5_authdatatype = 129;
/// formerly 142 in krb5 1.8
pub const KRB5_AUTHDATA_SIGNTICKET: krb5_authdatatype = 512;
pub const KRB5_AUTHDATA_FX_ARMOR: krb5_authdatatype = 71;
// TODO: find the proper type for these
/// Success
pub const KRB5_KPASSWD_SUCCESS: isize = 0;
/// Malformed request
pub const KRB5_KPASSWD_MALFORMED: isize = 1;
/// Server error
pub const KRB5_KPASSWD_HARDERROR: isize = 2;
/// Authentication error
pub const KRB5_KPASSWD_AUTHERROR: isize = 3;
/// Password change rejected
pub const KRB5_KPASSWD_SOFTERROR: isize = 4;
/// Not authorized
pub const KRB5_KPASSWD_ACCESSDENIED: isize = 5;
/// Unknown RPC version
pub const KRB5_KPASSWD_BAD_VERSION: isize = 6;
/// The presented credentials were not obtained using a password directly
pub const KRB5_KPASSWD_INITIAL_FLAG_NEEDED: isize = 7;
// TODO: Docs
#[repr(C)]
pub struct krb5_ticket_times {
pub authtime: krb5_timestamp,
pub starttime: krb5_timestamp,
pub endtime: krb5_timestamp,
pub renew_till: krb5_timestamp,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_authdata {
pub magic: krb5_magic,
pub ad_type: krb5_authdatatype,
pub length: c_uint,
pub contents: *mut krb5_octet,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_transited {
pub magic: krb5_magic,
pub tr_type: krb5_octet,
pub tr_contents: krb5_data,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_enc_tkt_part {
pub magic: krb5_magic,
pub flags: krb5_flags,
pub session: *mut krb5_keyblock,
pub client: krb5_principal,
pub transited: krb5_transited,
pub times: krb5_ticket_times,
pub caddrs: *mut *mut krb5_address,
pub authorization_data: *mut *mut krb5_authdata,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_ticket {
pub magic: krb5_magic,
pub server: krb5_principal,
pub enc_part: krb5_enc_data,
pub enc_part2: *mut krb5_enc_tkt_part,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_authenticator {
pub magic: krb5_magic,
pub client: krb5_principal,
pub checksum: *mut krb5_checksum,
pub cusec: krb5_int32,
pub ctime: krb5_timestamp,
pub subkey: *mut krb5_keyblock,
pub seq_number: krb5_ui_4,
pub authorization_data: *mut *mut krb5_authdata,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_tkt_authent {
pub magic: krb5_magic,
pub ticket: *mut krb5_ticket,
pub authenticator: *mut krb5_authenticator,
pub ap_options: krb5_flags,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_creds {
pub magic: krb5_magic,
pub client: krb5_principal,
pub server: krb5_principal,
pub keyblock: krb5_keyblock,
pub times: krb5_ticket_times,
pub is_skey: krb5_boolean,
pub ticket_flags: krb5_flags,
pub addresses: *mut *mut krb5_address,
pub ticket: krb5_data,
pub second_ticket: krb5_data,
pub authdata: *mut *mut krb5_authdata,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_last_req_entry {
pub magic: krb5_magic,
pub lr_type: krb5_int32,
pub value: krb5_timestamp,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_pa_data {
pub magic: krb5_magic,
pub pa_type: krb5_preauthtype,
pub length: c_uint,
pub contents: *mut krb5_octet,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_typed_data {
pub magic: krb5_magic,
pub type_: krb5_int32,
pub length: c_uint,
pub data: *mut krb5_octet,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_kdc_req {
pub magic: krb5_magic,
pub msg_type: krb5_msgtype,
pub padata: *mut *mut krb5_pa_data,
pub kdc_options: krb5_flags,
pub client: krb5_principal,
pub server: krb5_principal,
pub from: krb5_timestamp,
pub till: krb5_timestamp,
pub rtime: krb5_timestamp,
pub nonce: krb5_int32,
pub nktypes: c_int,
pub ktype: *mut krb5_enctype,
pub addressses: *mut *mut krb5_address,
pub authorization_data: krb5_enc_data,
pub unenc_authdata: *mut *mut krb5_authdata,
pub second_ticket: *mut *mut krb5_ticket,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_enc_kdc_rep_part {
pub magic: krb5_magic,
pub msg_type: krb5_msgtype,
pub session: *mut krb5_keyblock,
pub last_req: *mut *mut krb5_last_req_entry,
pub nonce: krb5_int32,
pub key_exp: krb5_timestamp,
pub flags: krb5_flags,
pub times: krb5_ticket_times,
pub server: krb5_principal,
pub caddrs: *mut *mut krb5_address,
pub enc_padata: *mut *mut krb5_pa_data,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_kdc_rep {
pub magic: krb5_magic,
pub msg_type: krb5_msgtype,
pub padata: *mut *mut krb5_pa_data,
pub client: krb5_principal,
pub ticket: *mut krb5_ticket,
pub enc_part: krb5_enc_data,
pub enc_part2: *mut krb5_enc_kdc_rep_part,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_error {
pub magic: krb5_magic,
pub ctime: krb5_timestamp,
pub cusec: krb5_int32,
pub susec: krb5_int32,
pub stime: krb5_timestamp,
pub error: krb5_ui_4,
pub client: krb5_principal,
pub server: krb5_principal,
pub text: krb5_data,
pub e_data: krb5_data,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_ap_req {
pub magic: krb5_magic,
pub ap_options: krb5_flags,
pub ticket: *mut krb5_ticket,
pub authenticator: krb5_enc_data,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_ap_rep {
pub magic: krb5_magic,
pub enc_part: krb5_enc_data,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_ap_rep_enc_part {
pub magic: krb5_magic,
pub ctime: krb5_timestamp,
pub cusec: krb5_int32,
pub subkey: *mut krb5_keyblock,
pub seq_number: krb5_ui_4,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_response {
pub magic: krb5_magic,
pub message_type: krb5_octet,
pub response: krb5_data,
pub expected_nonce: krb5_int32,
pub request_time: krb5_timestamp,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_cred_info {
pub magic: krb5_magic,
pub session: *mut krb5_keyblock,
pub client: krb5_principal,
pub server: krb5_principal,
pub flags: krb5_flags,
pub times: krb5_ticket_times,
pub caddrs: *mut *mut krb5_address,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_cred_enc_part {
pub magic: krb5_magic,
pub nonce: krb5_int32,
pub timestamp: krb5_timestamp,
pub usec: krb5_int32,
pub s_address: *mut krb5_address,
pub r_address: *mut krb5_address,
pub ticket_info: *mut *mut krb5_cred_info,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_cred {
pub magic: krb5_magic,
pub tickets: *mut *mut krb5_ticket,
pub enc_part: krb5_enc_data,
pub enc_part2: *mut krb5_cred_enc_part,
}
// TODO: Docs
#[repr(C)]
pub struct passwd_phrase_element {
pub magic: krb5_magic,
pub passwd: *mut krb5_data,
pub phrase: *mut krb5_data,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_pwd_data {
pub magic: krb5_magic,
pub sequence_count: c_int,
pub element: *mut *mut passwd_phrase_element,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_pa_svr_referral_data {
pub principal: krb5_principal,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_pa_server_referral_data {
pub referred_realm: *mut krb5_data,
pub true_principal_name: krb5_principal,
pub requested_principal_name: krb5_principal,
pub referral_valid_until: krb5_timestamp,
pub rep_cksum: krb5_checksum,
}
// TODO: Docs
#[repr(C)]
pub struct krb5_pa_pac_req {
pub include_pac: krb5_boolean,
}
// krb5/krb5.h:2151
// TODO: Find the proper datatypes
/// Prevent replays with timestamps and replay cache
pub const KRB5_AUTH_CONTEXT_DO_TIME: krb5_flags = 0x00000001;
/// Save timestamps for application
pub const KRB5_AUTH_CONTEXT_RET_TIME: krb5_flags = 0x00000002;
/// Prevent replays with sequence numbers
pub const KRB5_AUTH_CONTEXT_DO_SEQUENCE: krb5_flags = 0x00000004;
/// Save sequence numbers for application
pub const KRB5_AUTH_CONTEXT_RET_SEQUENCE: krb5_flags = 0x00000008;
pub const KRB5_AUTH_CONTEXT_PERMIT_ALL: krb5_flags = 0x00000010;
pub const KRB5_AUTH_CONTEXT_USE_SUBKEY: krb5_flags = 0x00000020;
// TODO: Docs
#[repr(C)]
pub struct krb5_replay_data {
pub timestamp: krb5_timestamp,
pub usec: krb5_int32,
pub seq: krb5_ui_4,
}
/// Generate the local network address
pub const KRB5_AUTH_CONTEXT_GENERATE_LOCAL_ADDR: krb5_flags = 0x00000001;
/// Generate the remote network address
pub const KRB5_AUTH_CONTEXT_GENERATE_REMOTE_ADDR: krb5_flags = 0x00000002;
/// Generate the local network address and the local port
pub const KRB5_AUTH_CONTEXT_GENERATE_LOCAL_FULL_ADDR: krb5_flags = 0x00000004;
/// Generate the remote network address and the remote port
pub const KRB5_AUTH_CONTEXT_GENERATE_REMOTE_FULL_ADDR: krb5_flags = 0x00000008;
pub type krb5_mk_req_checksum_func = extern "C" fn(krb5_context, krb5_auth_context, *mut c_void, *mut *mut krb5_data) -> krb5_error_code;
pub type krb5_cc_cursor = krb5_pointer;
pub enum _krb5_ccache {}
pub type krb5_ccache = *mut _krb5_ccache;
pub enum _krb5_cc_ops {}
pub type krb5_cc_ops = *mut _krb5_cc_ops;
pub enum _krb5_cccol_cursor {}
pub type krb5_cccol_cursor = *mut _krb5_cccol_cursor;
/// The requested lifetime must be at least as great as the time specified.
pub const KRB5_TC_MATCH_TIMES: krb5_flags = 0x00000001;
/// The is_skey field must match exactly
pub const KRB5_TC_MATCH_IS_KEY: krb5_flags = 0x00000002;
/// All the flags set in the match credentials must be set
pub const KRB5_TC_MATCH_FLAGS: krb5_flags = 0x00000004;
/// All the time fields must match exactly
pub const KRB5_TC_MATCH_TIMES_EXACT: krb5_flags = 0x00000008;
/// All the flags must match exactly
pub const KRB5_TC_MATCH_FLAGS_EXACT: krb5_flags = 0x00000010;
/// The authorization data must match
pub const KRB5_TC_MATCH_AUTHDATA: krb5_flags = 0x00000020;
/// Only the name portion of the principal name must match
pub const KRB5_TC_MATCH_SRV_NAMEONLY: krb5_flags = 0x00000040;
/// The second ticket must match
pub const KRB5_TC_MATCH_2ND_TKT: krb5_flags = 0x00000080;
/// The encryption key type must match
pub const KRB5_TC_MATCH_KTYPE: krb5_flags = 0x00000100;
/// The supported key types must match
pub const KRB5_TC_SUPPORTED_KTYPES: krb5_flags = 0x00000200;
/// Open and close the file for each cache operation
pub const KRB5_TC_OPENCLOSE: krb5_flags = 0x00000001;
pub const KRB5_TC_NOTICKKET: krb5_flags = 0x00000002;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_cc_get_name(context: krb5_context,
cache: krb5_ccache) -> *const c_char;
// TODO: Doc
pub fn krb5_cc_get_full_name(context: krb5_context,
cache: krb5_ccache,
fullname_out: *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
pub fn krb5_cc_gen_new(context: krb5_context,
cache: *mut krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_initialize(context: krb5_context,
cache: krb5_ccache,
principal: krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_destroy(context: krb5_context,
cache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_close(context: krb5_context,
cache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_store_cred(context: krb5_context,
cache: krb5_ccache,
creds: *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_retrieve_cred(context: krb5_context,
cache: krb5_ccache,
flags: krb5_flags,
mcreds: *mut krb5_creds,
creds: *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_get_principal(context: krb5_context,
cache: krb5_ccache,
principal: *mut krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_start_seq_get(context: krb5_context,
cache: krb5_ccache,
cursor: *mut krb5_cc_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_next_cred(context: krb5_context,
cache: krb5_ccache,
cursor: *mut krb5_cc_cursor,
creds: *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_end_seq_get(context: krb5_context,
cache: krb5_ccache,
cursor: *mut krb5_cc_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_remove_cred(context: krb5_context,
cache: krb5_ccache,
flags: krb5_flags) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_set_flags(context: krb5_context,
cache: krb5_ccache,
flags: krb5_flags) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_get_flags(context: krb5_context,
cache: krb5_ccache,
flags: *mut krb5_flags) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_get_type(context: krb5_context,
cache: krb5_ccache) -> *const c_char;
// TODO: Doc
pub fn krb5_cc_move(context: krb5_context,
src: krb5_ccache,
dst: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_last_change_time(context: krb5_context,
ccache: krb5_ccache,
change_time: *mut krb5_timestamp) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_lock(context: krb5_context,
ccache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_unlock(context: krb5_context,
ccache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_cursor_new(context: krb5_context,
cursor: *mut krb5_cccol_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_cursor_next(context: krb5_context,
cursor: krb5_cccol_cursor,
ccache: *mut krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_cursor_free(context: krb5_context,
cursor: *mut krb5_cccol_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_have_content(context: krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_last_change_time(context: krb5_context,
change_time: *mut krb5_timestamp) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_lock(context: krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cccol_unlock(context: krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_new_unique(context: krb5_context,
type_: *const c_char,
hint: *const c_char,
id: *mut krb5_ccache) -> krb5_error_code;
}
pub enum krb5_rc_st {}
pub type krb5_rcache = *mut krb5_rc_st;
/// Long enough for MAXPATHLEN + some extra
pub const MAX_KEYTAB_NAME_LEN: usize = 1100;
pub type krb5_kt_cursor = krb5_pointer;
// TODO: Docs
#[repr(C)]
pub struct krb5_keytab_entry {
pub magic: krb5_magic,
pub principal: krb5_principal,
pub timestamp: krb5_timestamp,
pub vno: krb5_kvno,
pub key: krb5_keyblock
}
pub enum _krb5_kt {}
pub type krb5_keytab = *mut _krb5_kt;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_kt_get_type(context: krb5_context,
keytab: krb5_keytab) -> *const c_char;
// TODO: Doc
pub fn krb5_kt_get_name(context: krb5_context,
keytab: krb5_keytab,
name: *mut c_char,
namelen: c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_close(context: krb5_context,
keytab: krb5_keytab) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_get_entry(context: krb5_context,
keytab: krb5_keytab,
principal: krb5_principal,
vno: krb5_kvno,
enctype: krb5_enctype,
entry: *mut krb5_keytab_entry) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_start_seq_get(context: krb5_context,
keytab: krb5_keytab,
cursor: *mut krb5_kt_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_next_entry(context: krb5_context,
keytab: krb5_keytab,
entry: *mut krb5_keytab_entry,
cursor: *mut krb5_kt_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_end_seq_get(context: krb5_context,
keytab: krb5_keytab,
cursor: *mut krb5_kt_cursor) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_have_content(context: krb5_context,
keytab: krb5_keytab) -> krb5_error_code;
}
/// Use secure context configuration
pub const KRB5_INIT_CONTEXT_SECURE: krb5_flags = 0x1;
/// Use KDC configuration if available
pub const KRB5_INIT_CONTEXT_KDC: krb5_flags = 0x2;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_init_context(context: *mut krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_secure_context(context: *mut krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_context_profile(profile: *mut _profile_t,
flags: krb5_flags,
context: krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_free_context(context: krb5_context);
// TODO: Doc
pub fn krb5_copy_context(ctx: krb5_context,
nctx_out: krb5_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_default_tgs_enctypes(context: krb5_context,
etypes: *const krb5_enctype) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_permitted_enctypes(context: krb5_context,
ktypes: *mut *mut krb5_enctype) -> krb5_error_code;
// TODO: Doc
pub fn krb5_is_thread_safe() -> krb5_boolean;
// TODO: Doc
pub fn krb5_server_decrypt_ticket_keytab(context: krb5_context,
kt: krb5_keytab,
ticket: *mut krb5_ticket) -> krb5_error_code;
// TODO: Doc
pub fn krb5_free_tgt_creds(context: krb5_context,
rgts: *mut *mut krb5_creds);
}
/// Want user-user ticket
pub const KRB5_GC_USER_USER: krb5_flags = 1;
/// Want cached ticket only
pub const KRB5_GC_CACHED: krb5_flags = 2;
/// Set canonicalize KDC option
pub const KRB5_GC_CANONICALIZE: krb5_flags = 4;
/// Do not store in credential cache
pub const KRB5_GC_NO_STORE: krb5_flags = 8;
/// Acquire forwardable tickets
pub const KRB5_GC_FORWARDABLE: krb5_flags = 16;
/// Disable transited check
pub const KRB5_GC_NO_TRANSIT_CHECK: krb5_flags = 32;
/// Constrained delegation
pub const KRB5_GC_CONSTRAINED_DELEGATION: krb5_flags = 64;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_get_credentials(context: krb5_context,
options: krb5_flags,
ccache: krb5_ccache,
in_creds: *mut krb5_creds,
out_creds: *mut *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_credentials_validate(context: krb5_context,
options: krb5_flags,
ccache: krb5_ccache,
in_creds: *mut krb5_creds,
out_creds: *mut *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_credentials_renew(context: krb5_context,
options: krb5_flags,
ccache: krb5_ccache,
in_creds: *mut krb5_creds,
out_creds: *mut *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_req(context: krb5_context,
auth_context: *mut krb5_auth_context,
ap_req_options: krb5_flags,
service: *mut c_char,
hostname: *mut c_char,
in_data: *mut krb5_data,
ccache: krb5_ccache,
outbuf: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_req_extended(context: krb5_context,
auth_context: *mut krb5_auth_context,
ap_req_options: krb5_flags,
in_data: *mut krb5_data,
in_creds: *mut krb5_creds,
outbuf: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_rep(context: krb5_context,
auth_context: krb5_auth_context,
outbuf: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_rep_dce(context: krb5_context,
auth_context: krb5_auth_context,
outbuf: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_rep(context: krb5_context,
auth_context: krb5_auth_context,
inbuf: *const krb5_data,
repl: *mut *mut krb5_ap_rep_enc_part) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_rep_dce(context: krb5_context,
auth_context: krb5_auth_context,
inbuf: *const krb5_data,
nonce: *mut krb5_ui_4) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_error(context: krb5_context,
dec_err: *const krb5_error,
enc_err: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_error(context: krb5_context,
enc_errbuf: *const krb5_data,
dec_error: *mut *mut krb5_error) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_safe(context: krb5_context,
auth_context: krb5_auth_context,
inbuf: *const krb5_data,
outbuf: *mut krb5_data,
outdata: *mut krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_priv(context: krb5_context,
auth_context: krb5_auth_context,
inbuf: *const krb5_data,
outbuf: *mut krb5_data,
outdata: *mut krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_parse_name(context: krb5_context,
name: *const c_char,
principal_out: *mut krb5_principal) -> krb5_error_code;
}
/// Error if realm is present
pub const KRB5_PRINCIPAL_PARSE_NO_REALM: krb5_flags = 0x1;
/// Error if realm is not present
pub const KRB5_PRINCIPAL_PARSE_REQUIRE_REALM: krb5_flags = 0x2;
/// Create singe-component enterprise principle
pub const KRB5_PRINCIPAL_PARSE_ENTERPRSIE: krb5_flags = 0x4;
/// Ignore realm if present
pub const KRB5_PRINCIPAL_PARSE_IGNORE_REALM: krb5_flags = 0x8;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_parse_name_flags(context: krb5_context,
name: *const c_char,
flags: krb5_flags,
principal_out: *mut krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_unparse_name(context: krb5_context,
principal: krb5_const_principal,
name: *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_unparse_name_ext(context: krb5_context,
principal: krb5_const_principal,
name: *mut *mut c_char,
size: *mut c_uint) -> krb5_error_code;
}
/// Omit realm if it is the local realm
pub const KRB5_PRINCIPAL_UNPARSE_SHORT: krb5_flags = 0x1;
/// Omit realm always
pub const KRB5_PRINCIPAL_UNPARSE_NO_REALM: krb5_flags = 0x2;
/// Don't escape special characters
pub const KRB5_PRINCIPAL_UNPARSE_DISPLAY: krb5_flags = 0x4;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_unparse_name_flags(context: krb5_context,
principal: krb5_const_principal,
flags: krb5_flags,
name: *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_unparse_name_flags_ext(context: krb5_context,
principal: krb5_const_principal,
flags: krb5_flags,
name: *mut *mut c_char,
size: c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_principal_realm(context: krb5_context,
principal: krb5_principal,
realm: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_address_search(context: krb5_context,
addr: *const krb5_address,
addrlist: *mut krb5_address) -> krb5_boolean;
// TODO: Doc
pub fn krb5_address_compare(context: krb5_context,
addr1: *const krb5_address,
addr2: *const krb5_address) -> krb5_boolean;
// TODO: Doc
pub fn krb5_address_order(context: krb5_context,
addr1: *const krb5_address,
addr2: *const krb5_address) -> c_int;
// TODO: Doc
pub fn krb5_realm_compare(context: krb5_context,
princ1: krb5_const_principal,
princ2: krb5_const_principal) -> krb5_boolean;
// TODO: Doc
pub fn krb5_principal_compare(context: krb5_context,
princ1: krb5_const_principal,
princ2: krb5_const_principal) -> krb5_boolean;
// TODO: Doc
pub fn krb5_principal_compare_any_realm(context: krb5_context,
princ1: krb5_const_principal,
princ2: krb5_const_principal) -> krb5_boolean;
}
// TODO: Doc
pub const KRB5_PRINCIPAL_COMPARE_INGORE_REALM: krb5_flags = 1;
// TODO: Doc
pub const KRB5_PRINCIPAL_COMPARE_ENTERPRSIE: krb5_flags = 2;
// TODO: Doc
pub const KRB5_PRINCIPAL_COMPARE_CASEFOLD: krb5_flags = 4;
// TODO: Doc
pub const KRB5_PRINCIPAL_COMPARE_UTF8: krb5_flags = 8;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_principal_compare_flags(context: krb5_context,
princ1: krb5_const_principal,
princ2: krb5_const_principal,
flags: krb5_flags) -> krb5_boolean;
// TODO: Doc
pub fn krb5_init_keyblock(context: krb5_context,
enctype: krb5_enctype,
length: usize,
out: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_keyblock(context: krb5_context,
from: *const krb5_keyblock,
to: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_keyblock_contents(context: krb5_context,
from: *const krb5_keyblock,
to: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_creds(context: krb5_context,
incred: *const krb5_creds,
outcred: *mut *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_data(context: krb5_context,
indata: *const krb5_data,
outdata: *mut *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_principal(context: krb5_context,
inprinc: krb5_const_principal,
outprinc: *mut krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_addresses(context: krb5_context,
inaddr: *mut *const krb5_address,
outaddr: *mut *mut krb5_address) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_ticket(context: krb5_context,
from: *const krb5_ticket,
pto: *mut *mut krb5_ticket) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_authdata(context: krb5_context,
in_authdat: *mut *const krb5_authdata,
out: *mut *mut krb5_authdata) -> krb5_error_code;
// TODO: Doc
pub fn krb5_find_authdata(context: krb5_context,
ticket_authdata: *mut *const krb5_authdata,
ap_req_authdata: *mut *const krb5_authdata,
ad_type: krb5_authdatatype,
results: *mut *mut *mut krb5_authdata) -> krb5_error_code;
// TODO: Doc
pub fn krb5_merge_authdata(context: krb5_context,
inauthdat1: *mut *const krb5_authdata,
inauthdat2: *mut *const krb5_authdata,
outauthdat: *mut *mut *mut krb5_authdata) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_authenticator(context: krb5_context,
authfrom: *const krb5_authenticator,
authto: *mut *mut krb5_authenticator) -> krb5_error_code;
// TODO: Doc
pub fn krb5_copy_checksum(context: krb5_context,
ckfrom: *const krb5_checksum,
ckto: *mut *mut krb5_checksum) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_server_rcache(context: krb5_context,
piece: *const krb5_data,
rcptr: *mut krb5_rcache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_build_principal_ext(context: krb5_context,
princ: *mut krb5_principal,
rlen: c_uint,
realm: *const c_char, ...) -> krb5_error_code;
// TODO: Doc
pub fn krb5_build_principal(context: krb5_context,
princ: *mut krb5_principal,
rlen: c_uint,
real: *const c_char, ...) -> krb5_error_code;
// #[cfg(feature = "krb5_deprecated")]
// TODO: Doc
// TODO: krb5_build_principal_va
// TODO: Doc
// TODO: pub fn krb5_build_principal_alloc_va
// TODO: Doc
pub fn krb5_425_conv_principal(context: krb5_context,
name: *const c_char,
instance: *const c_char,
realm: *const c_char,
princ: *mut krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_524_conv_principal(context: krb5_context,
princ: krb5_const_principal,
name: *mut c_char,
inst: *mut c_char,
realm: *mut c_char) -> krb5_error_code;
}
#[deprecated]
pub enum credentials {}
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
#[allow(deprecated)]
pub fn krb5_524_convert_creds(context: krb5_context,
v5creds: *mut krb5_creds,
v4creds: *mut credentials) -> c_int;
// TODO: krb524_init_ets
// TODO: Doc
pub fn krb5_kt_resolve(context: krb5_context,
name: *const c_char,
ktid: *mut krb5_keytab) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_dup(context: krb5_context,
in_: krb5_keytab,
out: *mut krb5_keytab) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_default_name(context: krb5_context,
name: *mut c_char,
name_size: c_int) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_default(context: krb5_context,
id: *mut krb5_keytab) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_client_default(context: krb5_context,
keytab_out: *mut krb5_keytab) -> krb5_error_code;
// TODO: Doc
pub fn krb5_free_keytab_entry_contents(context: krb5_context,
entry: *mut krb5_keytab_entry) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_free_entry(context: krb5_context,
entry: *mut krb5_keytab_entry) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_remove_entry(context: krb5_context,
id: krb5_keytab,
entry: *mut krb5_keytab_entry) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_add_entry(context: krb5_context,
id: krb5_keytab,
entry: *mut krb5_keytab_entry) -> krb5_error_code;
// TODO: Doc
pub fn krb5_principal2salt(context: krb5_context,
pr: krb5_const_principal,
ret: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_resolve(context: krb5_context,
name: *const c_char,
cache: *mut krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_dup(context: krb5_context,
in_: krb5_ccache,
out: *mut krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_default_name(context: krb5_context) -> *const c_char;
// TODO: Doc
pub fn krb5_cc_set_default_name(context: krb5_context,
name: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_default(context: krb5_context,
ccache: *mut krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_copy_creds(context: krb5_context,
incc: krb5_ccache,
outcc: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_get_config(context: krb5_context,
id: krb5_ccache,
principal: krb5_const_principal,
key: *const c_char,
data: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_set_config(context: krb5_context,
id: krb5_ccache,
principal: krb5_const_principal,
key: *const c_char,
data: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_is_config_principal(context: krb5_context,
principal: krb5_const_principal) -> krb5_boolean;
// TODO: Doc
pub fn krb5_cc_switch(context: krb5_context,
cache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_support_switch(context: krb5_context,
type_: *const c_char) -> krb5_boolean;
// TODO: Doc
pub fn krb5_cc_cache_match(context: krb5_context,
client: krb5_principal,
cache_out: *mut krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cc_select(context: krb5_context,
server: krb5_principal,
cache_out: *mut krb5_ccache,
princ_out: *mut krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_free_principal(context: krb5_context,
val: krb5_principal);
// TODO: Doc
pub fn krb5_free_authenticator(context: krb5_context,
val: *mut krb5_authenticator);
// TODO: Doc
pub fn krb5_free_addresses(context: krb5_context,
val: *mut *mut krb5_address);
// TODO: Doc
pub fn krb5_free_authdata(context: krb5_context,
val: *mut *mut krb5_authdata);
// TODO: Doc
pub fn krb5_free_ticket(context: krb5_context,
val: *mut krb5_ticket);
// TODO: Doc
pub fn krb5_free_error(context: krb5_context,
val: *mut krb5_error);
// TODO: Doc
pub fn krb5_free_creds(context: krb5_context,
val: *mut krb5_creds);
// TODO: Doc
pub fn krb5_free_cred_contents(context: krb5_context,
val: *mut krb5_creds);
// TODO: Doc
pub fn krb5_free_checksum(context: krb5_context,
val: *mut krb5_checksum);
// TODO: Doc
pub fn krb5_free_checksum_contents(context: krb5_context,
val: *mut krb5_checksum);
// TODO: Doc
pub fn krb5_free_keyblock(context: krb5_context,
val: *mut krb5_keyblock);
// TODO: Doc
pub fn krb5_free_keyblock_contents(context: krb5_context,
val: *mut krb5_keyblock);
// TODO: Doc
pub fn krb5_free_ap_rep_enc_part(context: krb5_context,
val: *mut krb5_ap_rep_enc_part);
// TODO: Doc
pub fn krb5_free_data(context: krb5_context,
val: *mut krb5_data);
// TODO: Doc
pub fn krb5_free_octet_data(context: krb5_context,
val: *mut krb5_octet_data);
// TODO: Doc
pub fn krb5_free_data_contents(context: krb5_context,
val: *mut krb5_data);
// TODO: Doc
pub fn krb5_free_unparsed_name(context: krb5_context,
val: *mut c_char);
// TODO: Doc
pub fn krb5_free_string(context: krb5_context,
val: *mut c_char);
// TODO: Doc
pub fn krb5_free_enctypes(context: krb5_context,
val: *mut krb5_enctype);
// TODO: Doc
pub fn krb5_free_cksumtypes(context: krb5_context,
val: *mut krb5_cksumtype);
// TODO: Doc
pub fn krb5_us_timeofday(context: krb5_context,
seconds: *mut krb5_timestamp,
microseconds: *mut krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_timeofday(context: krb5_context,
timeret: *mut krb5_timestamp) -> krb5_error_code;
// TODO: Doc
pub fn krb5_check_clockskew(context: krb5_context,
date: krb5_timestamp) -> krb5_error_code;
// TODO: Doc
pub fn krb5_os_localaddr(context: krb5_context,
addr: *mut *mut *mut krb5_address) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_default_realm(context: krb5_context,
lrealm: *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_default_realm(context: krb5_context,
lrealm: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_free_default_realm(context: krb5_context,
lrealm: *mut c_char);
// TODO: Doc
pub fn krb5_sname_to_principal(context: krb5_context,
hostname: *const c_char,
sname: *const c_char,
type_: krb5_int32,
ret_princ: *mut krb5_principal) -> krb5_error_code;
// TODO: Doc
pub fn krb5_sname_match(context: krb5_context,
matching: krb5_const_principal,
princ: krb5_const_principal) -> krb5_boolean;
// TODO: Doc
pub fn krb5_change_password(context: krb5_context,
creds: *mut krb5_creds,
newpw: *mut c_char,
result_code: *mut c_int,
result_code_string: *mut krb5_data,
result_string: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_password(context: krb5_context,
creds: *mut krb5_creds,
newpw: *mut c_char,
change_password_for: krb5_principal,
result_code: *mut c_int,
result_code_string: *mut krb5_data,
result_string: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_password_useing_ccache(context: krb5_context,
ccache: krb5_ccache,
newpw: *mut c_char,
change_password_for: krb5_principal,
result_code: *mut c_int,
result_code_string: *mut krb5_data,
result_string: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_chpw_message(context: krb5_context,
server_string: *const krb5_data,
message_out: *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_profile(context: krb5_context,
profile: *mut *mut _profile_t) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
#[deprectated]
pub fn krb5_get_in_tkt_with_password(context: krb5_context,
options: krb5_flags,
addrs: *mut *const krb5_address,
ktypes: *mut krb5_enctype,
pre_auth_types: *mut krb5_preauthtype,
password: *const c_char,
ccache: krb5_ccache,
creds: *mut krb5_creds,
ret_as_reply: *mut *mut krb5_kdc_rep) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
#[deprectated]
pub fn krb5_get_in_tkt_with_skey(context: krb5_context,
options: krb5_flags,
addrs: *mut *const krb5_address,
ktypes: *mut krb5_enctype,
pre_auth_types: *mut krb5_preauthtype,
password: *const c_char,
ccache: krb5_ccache,
creds: *mut krb5_creds,
ret_as_reply: *mut *mut krb5_kdc_rep) -> krb5_error_code;
// krb5/krb5.h:5133
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
#[deprectated]
pub fn krb5_get_in_tkt_with_keytab(context: krb5_context,
options: krb5_flags,
addrs: *mut *const krb5_address,
ktypes: *mut krb5_enctype,
pre_auth_types: *mut krb5_preauthtype,
arg_keytab: krb5_keytab,
ccache: krb5_ccache,
creds: *mut krb5_creds,
ret_as_reply: *mut *mut krb5_kdc_rep) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_req(context: krb5_context,
auht_context: *mut krb5_auth_context,
inbuf: *const krb5_data,
server: krb5_const_principal,
keytab: krb5_keytab,
ap_req_options: *mut krb5_flags,
ticket: *mut *mut krb5_ticket) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kt_read_service_key(context: krb5_context,
keyprocarg: krb5_pointer,
principal: krb5_principal,
vno: krb5_kvno,
enctype: krb5_enctype,
key: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_safe(context: krb5_context,
auth_context: krb5_auth_context,
userdata: *mut krb5_data,
outbuf: *mut krb5_data,
outdat: krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_priv(context: krb5_context,
auth_context: krb5_auth_context,
userdata: *const krb5_data,
outbuf: *mut krb5_data,
outdata: *mut krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_sendauth(context: krb5_context,
auth_context: *mut krb5_auth_context,
fd: krb5_pointer,
aapl_version: *mut c_char,
client: krb5_principal,
server: krb5_principal,
ap_req_options: krb5_flags,
in_data: *mut krb5_data,
in_creds: *mut krb5_creds,
ccache: krb5_ccache,
error: *mut *mut krb5_error,
rep_result: *mut *mut krb5_ap_rep_enc_part,
out_creds: *mut *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_recvauth(context: krb5_context,
auth_context: krb5_auth_context,
fd: krb5_pointer,
appl_version: *mut c_char,
server: krb5_principal,
flags: krb5_int32,
keytab: krb5_keytab,
ticket: *mut *mut krb5_ticket) -> krb5_error_code;
// TODO: Doc
pub fn krb5_recvauth_version(context: krb5_context,
auth_context: *mut krb5_auth_context,
fd: krb5_pointer,
server: krb5_principal,
flags: krb5_int32,
keytab: krb5_keytab,
ticket: *mut *mut krb5_ticket,
version: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_ncred(context: krb5_context,
auth_context: krb5_auth_context,
ppcreds: *mut *mut krb5_creds,
ppdata: *mut *mut krb5_data,
outdata: *mut krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_mk_1cred(context: krb5_context,
auth_context: krb5_auth_context,
pcreds: *mut krb5_creds,
ppdata: *mut *mut krb5_data,
outdata: *mut krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_rd_cred(context: krb5_context,
auth_context: krb5_auth_context,
pcreddata: *mut krb5_data,
pppcreds: *mut *mut *mut krb5_creds,
outdata: *mut krb5_replay_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_fwd_tgt_creds(context: krb5_context,
auth_context: krb5_auth_context,
rhost: *mut c_char,
client: krb5_principal,
server: krb5_principal,
cc: krb5_ccache,
forwardable: c_int,
outbuf: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_init(context: krb5_context,
auth_context: *mut krb5_auth_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_free(context: krb5_context,
auth_context: krb5_auth_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setflags(context: krb5_context,
auth_context: krb5_auth_context,
flags: krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getflags(context: krb5_context,
auth_context: krb5_auth_context,
flags: *mut krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_set_checksum_func(context: krb5_context,
auth_context: krb5_auth_context,
func: Option<krb5_mk_req_checksum_func>,
data: *mut c_void) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_get_checksum_func(context: krb5_context,
auth_context: krb5_auth_context,
func: *mut Option<krb5_mk_req_checksum_func>,
data: *mut *mut c_void) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setaddrs(context: krb5_context,
auth_context: krb5_auth_context,
local_addr: *mut krb5_address,
remote_addr: *mut krb5_address) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getaddrs(context: krb5_context,
auth_context: krb5_auth_context,
local_addr: *mut *mut krb5_address,
remote_addr: *mut *mut krb5_address) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setports(context: krb5_context,
auth_context: krb5_auth_context,
local_port: *mut krb5_address,
remote_port: *mut krb5_address) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setuseruserkey(context: krb5_context,
auth_context: krb5_auth_context,
keyblock: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getkey(context: krb5_context,
auth_context: krb5_auth_context,
keyblock: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getkey_k(context: krb5_context,
auth_context: krb5_auth_context,
key: *mut krb5_key) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getsendsubkey(ctx: krb5_context,
ac: krb5_auth_context,
keyblock: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getsendsubkey_k(ctx: krb5_context,
ac: krb5_auth_context,
key: *mut krb5_key) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getrecvsubkey(ctx: krb5_context,
ac: krb5_auth_context,
keyblock: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getrecvsubkey_k(ctx: krb5_context,
ac: krb5_auth_context,
key: *mut krb5_key) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setsendsubkey(ctx: krb5_context,
ac: krb5_auth_context,
keyblock: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setsendsubkey_k(ctx: krb5_context,
ac: krb5_auth_context,
key: krb5_key) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setrecvsubkey(ctx: krb5_context,
ac: krb5_auth_context,
keyblock: *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setrecvsubkey_k(ctx: krb5_context,
ac: krb5_auth_context,
key: krb5_key) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
#[deprecated]
pub fn krb5_auth_con_getlocalsubkey(context: krb5_context,
auth_context: krb5_auth_context,
keyblock: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
#[deprecated]
pub fn krb5_auth_con_getremotesubkey(context: krb5_context,
auth_context: krb5_auth_context,
keyblock: *mut *mut krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getlocalseqnumber(context: krb5_context,
auth_context: krb5_auth_context,
seqnumber: *mut krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getremoteseqnumber(context: krb5_context,
auth_context: krb5_auth_context,
seqnumber: *mut krb5_int32) -> krb5_error_code;
// TODO: Doc
#[cfg(feature = "krb5_deprecated")]
#[deprecated]
pub fn krb5_auth_con_initivector(context: krb5_context,
auth_context: krb5_auth_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_setrcache(context: krb5_context,
auth_context: krb5_auth_context,
rcache: krb5_rcache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getrcache(context: krb5_context,
auth_context: krb5_auth_context,
rcache: *mut krb5_rcache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_getauthenticator(context: krb5_context,
auth_context: krb5_auth_context,
authenticator: *mut *mut krb5_authenticator) -> krb5_error_code;
// TODO: Doc
pub fn krb5_auth_con_set_req_cksumtype(context: krb5_context,
auth_context: krb5_auth_context,
cksumtype: krb5_cksumtype) -> krb5_error_code;
}
pub const KRB5_REALM_BRANCH_CHAR: c_char = b'.' as c_char;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_read_password(context: krb5_context,
prompt: *const c_char,
prompt2: *const c_char,
return_pwd: *mut c_char,
size_return: *mut c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_aname_to_localname(context: krb5_context,
aname: krb5_const_principal,
lnsize_in: c_int,
lname: *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_host_realm(context: krb5_error_code,
host: *const c_char,
realmsp: *mut *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_fallback_host_realm(context: krb5_context,
hdata: *mut krb5_data,
realmsp: *mut *mut *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_free_host_realm(context: krb5_context,
realmlist: *mut *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_kuserok(context: krb5_error_code,
principal: krb5_principal,
luser: *const c_char) -> krb5_boolean;
// TODO: Doc
pub fn krb5_auth_con_getnaddrs(context: krb5_context,
auth_context: krb5_auth_context,
infd: c_int,
flags: c_int) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_real_time(context: krb5_context,
seconds: krb5_timestamp,
microseconds: krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_time_offsets(context: krb5_context,
seconds: *mut krb5_timestamp,
microseconds: *mut krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_string_to_enctype(string: *mut c_char,
enctypep: *mut krb5_enctype) -> krb5_error_code;
// TODO: Doc
pub fn krb5_string_to_salttype(string: *mut c_char,
salttypep: *mut krb5_int32) -> krb5_error_code;
// TODO: Doc
pub fn krb5_string_to_cksumtypep(string: *mut c_char,
cksumtypep: *mut krb5_cksumtype) -> krb5_error_code;
// TODO: Doc
pub fn krb5_string_to_timestamp(string: *mut c_char,
timestamp: *mut krb5_timestamp) -> krb5_error_code;
// TODO: Doc
pub fn krb5_string_to_deltat(string: *mut c_char,
deltatp: *mut krb5_deltat) -> krb5_error_code;
// TODO: Doc
pub fn krb5_enctype_to_string(enctype: krb5_enctype,
buffer: *mut c_char,
buflen: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_enctype_to_name(enctype: krb5_enctype,
shortest: krb5_boolean,
buffer: *mut c_char,
buflen: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_salttype_to_string(salttype: krb5_int32,
buffer: *mut c_char,
buflen: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_cksumtype_to_string(cksumtype: krb5_cksumtype,
buffer: *mut c_char,
buflen: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_timestamp_to_string(timestamp: krb5_timestamp,
buffer: *mut c_char,
buflen: usize) -> krb5_error_code;
// TODO: Doc
pub fn krb5_timestamp_to_sfstring(timestamp: krb5_timestamp,
buffer: *mut c_char,
buflen: usize,
pad: *mut c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_deltat_to_string(deltat: krb5_deltat,
buffer: *mut c_char,
buflen: usize) -> krb5_error_code;
}
// TODO: `KRB5_TGS_NAME` constant
pub const KRB5_TGS_NAME_SIZE: usize = 6;
pub const KRB5_RECVAUTH_SKIP_VERSION: krb5_flags = 0x0001;
pub const KRB5_RECVAUTH_BADAUTHVERS: krb5_flags = 0x0002;
// TODO: Doc
#[repr(C)]
pub struct krb5_prompt {
pub prompt: *mut c_char,
pub hidden: c_int,
pub reply: *mut krb5_data,
}
// NOTE: last argument is actually `krb5_prompt prompts[]` in the orignal source,
// But this should be equivalent.
pub type krb5_prompter_fct = extern "C" fn(context: krb5_context,
data: *mut c_void,
name: *const c_char,
banner: *const c_char,
num_prompts: c_int,
prompts: *mut krb5_prompt) -> krb5_error_code;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
// NOTE: last argument is actually `krb5_prompt prompts[]` in the orignal source,
// But this should be equivalent.
pub fn krb5_prompter_posix(context: krb5_context,
data: *mut c_void,
name: *const c_char,
banner: *const c_char,
num_prompts: c_int,
prompts: *mut krb5_prompt) -> krb5_error_code;
}
// TODO: `KRB5_RESPONDER_QUESTION_PASSWRD` constant
// TODO: `KRB5_RESPONDER_QUESTION_OTP` constant
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FORMAT_DECIMAL: krb5_flags = 0;
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FORMAT_HEXADECIMAL: krb5_flags = 1;
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FORMAT_ALPHANUMERIC: krb5_flags = 2;
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FLAGS_COLLECT_TOKEN: krb5_flags = 0x0001;
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FLAGS_COLLECT_PIN: krb5_flags = 0x0002;
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FLAGS_NEXTOTP: krb5_flags = 0x0004;
// TODO: Doc
pub const KRB5_RESPONDER_OTP_FLAGS_SEPERATE_PIN: krb5_flags = 0x0008;
// TODO: `KRB5_RESPONDER_QUESTION_PKINIT` cosntant
// TODO: Doc
pub const KRB5_RESPONDER_PKINIT_FLAGS_TOKEN_USER_PIN_COUNT_LOW: krb5_flags = (1 << 0);
// TODO: Doc
pub const KRB5_RESPONDER_PKINIT_FLAGS_TOKEN_USER_PIN_FINAL_TRY: krb5_flags = (1 << 1);
// TODO: Doc
pub const KRB5_RESPONDER_PKINIT_FLAGS_TOKEN_USER_PIN_LOCKED: krb5_flags = (1 << 2);
// TODO: Doc
// NOTE: where is `krb5_respoinder_context_st` really defined?
// I cannot find it in the orignal source file.
// Opaque struct for now
pub enum krb5_responder_context_st {}
pub type krb5_responder_context = *mut krb5_responder_context_st;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_responder_list_questions(ctx: krb5_context,
rctx: krb5_responder_context) -> *const *const c_char;
// TODO: Doc
pub fn krb5_responder_get_challenge(ctx: krb5_context,
rctx: krb5_responder_context,
question: *const c_char) -> *const c_char;
// TODO: Doc
pub fn krb5_responder_set_answer(ctx: krb5_context,
rctx: krb5_responder_context,
question: *const c_char,
answer: *const c_char) -> krb5_error_code;
}
// TODO: Doc
pub type krb5_responder_fn = extern "C" fn(ctx: krb5_context,
data: *mut c_void,
rctx: krb5_responder_context) -> krb5_error_code;
// TODO: Doc
#[repr(C)]
pub struct krb5_responder_otp_tokeninfo {
pub flags: krb5_flags,
pub format: krb5_int32,
pub length: krb5_int32,
pub vendor: *mut c_char,
pub challenge: *mut c_char,
pub token_id: *mut c_char,
pub alg_id: *mut c_char,
}
// TODO: Doc
#[repr(C)]
pub struct krb5_responder_otp_challenge {
pub service: *mut c_char,
pub tokeninfo: *mut *mut krb5_responder_otp_challenge,
}
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_responder_otp_get_challenge(ctx: krb5_context,
rctx: krb5_responder_context,
chl: *mut *mut krb5_responder_otp_challenge) -> krb5_error_code;
// TODO: Doc
pub fn krb5_responder_otp_set_answer(ctx: krb5_context,
rctx: krb5_responder_context,
ti: usize,
value: *const c_char,
pin: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_responder_otp_challenge_free(ctx: krb5_context,
rctx: krb5_responder_context,
chl: *mut krb5_responder_otp_challenge);
}
// TODO: Doc
#[repr(C)]
pub struct krb5_responder_pkinit_identity {
pub identity: *mut c_char,
pub token_flags: krb5_int32,
}
// TODO: Doc
#[repr(C)]
pub struct krb5_responder_pkinit_challenge {
pub identities: *mut *mut krb5_responder_pkinit_identity,
}
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_responder_pkinit_get_challenge(ctx: krb5_context,
rctx: krb5_responder_context,
chl_out: *mut *mut krb5_responder_pkinit_challenge) -> krb5_error_code;
// TODO: Doc
pub fn krb5_responder_pkinit_set_answer(ctx: krb5_context,
rctx: krb5_responder_context,
identity: *const c_char,
pin: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_responder_pkinit_challenge_free(ctx: krb5_context,
rctx: krb5_responder_context,
chl: *mut krb5_responder_pkinit_identity);
}
// TODO: Doc
#[repr(C)]
pub struct krb5_get_init_creds_opt {
pub flags: krb5_flags,
pub tkt_life: krb5_deltat,
pub renew_life: krb5_deltat,
pub forwardable: c_int,
pub proxiable: c_int,
pub etype_list: *mut krb5_enctype,
pub etype_list_length: c_int,
pub address_list: *mut *mut krb5_address,
pub preauth_list: *mut krb5_preauthtype,
pub preauth_list_length: c_int,
pub salt: *mut krb5_data,
}
pub const KRB5_GET_INIT_CREDS_OPT_TKT_LIFE: krb5_flags = 0x0001;
pub const KRB5_GET_INIT_CREDS_OPT_RENEW_LIFE: krb5_flags = 0x0002;
pub const KRB5_GET_INIT_CREDS_OPT_FORWARDABLE: krb5_flags = 0x0004;
pub const KRB5_GET_INIT_CREDS_OPT_PROXIABLE: krb5_flags = 0x0008;
pub const KRB5_GET_INIT_CREDS_OPT_ETYPE_LIST: krb5_flags = 0x0010;
pub const KRB5_GET_INIT_CREDS_OPT_ADDRESS_LIST: krb5_flags = 0x0020;
pub const KRB5_GET_INIT_CREDS_OPT_PREAUTH_LIST: krb5_flags = 0x0040;
pub const KRB5_GET_INIT_CREDS_OPT_SALT: krb5_flags = 0x0080;
pub const KRB5_GET_INIT_CREDS_OPT_CHG_PWD_PRMPT: krb5_flags = 0x0100;
pub const KRB5_GET_INIT_CREDS_OPT_CANONICALIZE: krb5_flags = 0x0200;
pub const KRB5_GET_INIT_CREDS_OPT_ANONYMOUS: krb5_flags = 0x0400;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_get_init_creds_opt_alloc(context: krb5_context,
opt: *mut *mut krb5_get_init_creds_opt) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_free(context: krb5_context,
opt: *mut krb5_get_init_creds_opt);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_tkt_life(opt: *mut krb5_get_init_creds_opt,
tkt_life: krb5_deltat);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_renew_life(opt: *mut krb5_get_init_creds_opt,
renew_life: krb5_deltat);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_forwardable(opt: *mut krb5_get_init_creds_opt,
forwardable: c_int);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_proxiable(opt: *mut krb5_get_init_creds_opt,
proxiable: c_int);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_canonicalize(opt: *mut krb5_get_init_creds_opt,
canonicalize: c_int);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_anonymous(opt: *mut krb5_get_init_creds_opt,
anonymous: c_int);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_etype_list(opt: *mut krb5_get_init_creds_opt,
etype_list: *mut krb5_enctype,
etype_list_length: c_int);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_address_list(opt: *mut krb5_get_init_creds_opt,
addresses: *mut *mut krb5_address);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_preauth_list(opt: *mut krb5_get_init_creds_opt,
preauth_list: *mut krb5_preauthtype,
preauth_list_length: c_int);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_salt(opt: *mut krb5_get_init_creds_opt,
salt: *mut krb5_data);
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_change_password_prompt(opt: *mut krb5_get_init_creds_opt,
prompt: c_int);
}
// TODO: Doc
#[repr(C)]
pub struct krb5_gic_opt_pa_data {
pub attr: *mut c_char,
pub value: *mut c_char,
}
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_pa(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
attr: *const c_char,
value: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_fast_ccache_name(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
fast_ccache_name: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_fast_ccache(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
ccache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_in_ccache(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
ccache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_out_ccache(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
ccache: krb5_ccache) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_fast_flags(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
flags: krb5_flags) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_get_fast_flags(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
out_flags: *mut krb5_flags) -> krb5_error_code;
}
// TODO: Doc
pub const KRB5_FAST_REQUIRED: krb5_flags = 0x0001;
type krb5_expire_callback_func = extern "C" fn(context: krb5_context,
data: *mut c_void,
password_expiration: krb5_timestamp,
account_expiration: krb5_timestamp,
is_last_req: krb5_boolean);
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_expire_callback(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
cb: krb5_expire_callback_func,
data: *mut c_void) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_opt_set_responder(context: krb5_context,
opt: *mut krb5_get_init_creds_opt,
responder: Option<krb5_responder_fn>,
data: *mut c_void) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_password(context: krb5_context,
creds: *mut krb5_creds,
client: krb5_principal,
password: *const c_char,
prompter: Option<krb5_prompter_fct>,
data: *mut c_void,
start_time: krb5_deltat,
in_tkt_service: *const c_char,
k5_gic_options: *const krb5_get_init_creds_opt) -> krb5_error_code;
}
pub enum _krb5_init_creds_context {}
pub type krb5_init_creds_context = *mut _krb5_init_creds_context;
// TODO: Doc
pub const KRB5_INIT_CREDS_STEP_FLAG_CONTINUE: krb5_flags = 0x1;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_init_creds_free(context: krb5_context,
ctx: krb5_init_creds_context);
// TODO: Doc
pub fn krb5_init_creds_get(context: krb5_context,
ctx: krb5_init_creds_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_get_creds(context: krb5_context,
ctx: krb5_init_creds_context,
creds: *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_get_error(context: krb5_context,
ctx: krb5_init_creds_context,
error: *mut *mut krb5_error) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_init(context: krb5_context,
client: krb5_principal,
prompter: Option<krb5_prompter_fct>,
data: *mut c_void,
start_time: krb5_deltat,
options: *mut krb5_get_init_creds_opt,
ctx: *mut krb5_init_creds_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_set_keytab(context: krb5_context,
ctx: krb5_init_creds_context,
keytab: krb5_keytab) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_step(context: krb5_context,
ctx: krb5_init_creds_context,
in_: *mut krb5_data,
out: *mut krb5_data,
realm: *mut krb5_data,
flags: *mut c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_set_password(context: krb5_context,
ctx: krb5_init_creds_context,
password: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_set_service(context: krb5_context,
ctx: krb5_init_creds_context,
service: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_init_creds_get_times(context: krb5_context,
ctx: krb5_init_creds_context,
times: *mut krb5_ticket_times) -> krb5_error_code;
}
pub enum _krb5_tkt_creds_context {}
pub type krb5_tkt_creds_context = *mut _krb5_tkt_creds_context;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_tkt_creds_init(context: krb5_context,
ccache: krb5_ccache,
creds: *mut krb5_creds,
options: krb5_flags,
ctx: krb5_tkt_creds_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_tkt_creds_get(context: krb5_context,
ctx: krb5_tkt_creds_context) -> krb5_error_code;
// TODO: Doc
pub fn krb5_tkt_creds_get_creds(context: krb5_context,
ctx: krb5_tkt_creds_context,
creds: *mut krb5_creds) -> krb5_error_code;
// TODO: Doc
pub fn krb5_tkt_creds_free(context: krb5_context,
ctx: krb5_tkt_creds_context);
}
// TODO: Doc
pub const KRB5_TKT_CREDS_STEP_FLAG_CONTINUE: krb5_flags = 0x1;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_tkt_creds_step(context: krb5_context,
ctx: krb5_tkt_creds_context,
in_: *mut krb5_data,
out: *mut krb5_data,
realm: *mut krb5_data,
flags: *mut c_uint) -> krb5_error_code;
// TODO: Doc
pub fn krb5_tkt_creds_get_times(context: krb5_context,
ctx: krb5_tkt_creds_context,
times: *mut krb5_ticket_times) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_init_creds_keytab(context: krb5_context,
creds: *mut krb5_creds,
client: krb5_principal,
arg_keytab: krb5_keytab,
start_time: krb5_deltat,
in_tkt_service: *const c_char,
k5_gic_options: *const krb5_get_init_creds_opt) -> krb5_error_code;
}
// TODO: Docs
#[repr(C)]
pub struct krb5_verify_init_creds_opt {
pub flags: krb5_flags,
pub ap_req_nofail: c_int,
}
// TODO: Doc
pub const KRB5_VERIFY_INIT_CREDS_OPT_AP_REQ_NOFAIL: krb5_flags = 0x0001;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_verify_init_creds_opt_init(k5_vic_options: *mut krb5_verify_init_creds_opt);
// TODO: Doc
pub fn krb5_verify_init_creds_opt_set_ap_req_nofail(k5_vic_options: *mut krb5_verify_init_creds_opt,
ap_req_nofail: c_int);
// TODO: Doc
pub fn krb5_verify_init_creds(context: krb5_context,
creds: *mut krb5_creds,
server: krb5_principal,
keytab: krb5_keytab,
ccache: *mut krb5_ccache,
options: *mut krb5_verify_init_creds_opt) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_validated_creds(context: krb5_context,
creds: *mut krb5_creds,
client: krb5_principal,
ccache: krb5_ccache,
in_tkt_service: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_get_renewed_creds(context: krb5_context,
creds: *mut krb5_creds,
client: krb5_principal,
ccache: krb5_ccache,
in_tkt_service: *const c_char) -> krb5_error_code;
// TODO: Doc
pub fn krb5_decode_ticket(code: *const krb5_data,
rep: *mut *mut krb5_ticket) -> krb5_error_code;
// TODO: Doc
pub fn krb5_appdefault_string(context: krb5_context,
appname: *const c_char,
realm: *const krb5_data,
option: *const c_char,
default_value: *const c_char,
ret_value: *mut *mut c_char);
// TODO: Doc
pub fn krb5_appdefault_boolean(context: krb5_context,
appname: *const c_char,
realm: *const krb5_data,
option: *const c_char,
default_value: c_int,
ret_value: *mut c_int);
}
// TODO: Doc
pub const KRB5_PROMPT_TYPE_PASSWORD: krb5_prompt_type = 0x1;
// TODO: Doc
pub const KRB5_PROMPT_TYPE_NEW_PASSWORD: krb5_prompt_type = 0x2;
// TODO: Doc
pub const KRB5_PROMPT_TYPE_NEW_PASSWORD_AGAIN: krb5_prompt_type = 0x3;
// TODO: Doc
pub const KRB5_PROMPT_TYPE_PREAUTH: krb5_prompt_type = 0x4;
pub type krb5_prompt_type = krb5_int32;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_get_prompt_types(context: krb5_context) -> *mut krb5_prompt_type;
// TODO: Doc
pub fn krb5_set_error_message(ctx: krb5_context,
code: krb5_error_code,
fmt: *const c_char, ...);
// TODO: Doc
// TODO: `krb5_vset_error_message` function (va_list)!
// TODO: Doc
pub fn krb5_copy_error_message(dest_ctx: krb5_context,
src_ctx: krb5_context);
// TODO: Doc
pub fn krb5_get_error_message(ctx: krb5_context,
code: krb5_error_code) -> *const c_char;
// TODO: Doc
pub fn krb5_free_error_message(ctx: krb5_context,
msg: *const c_char);
// TODO: Doc
pub fn krb5_clear_error_message(ctx: krb5_context);
// TODO: Doc
pub fn krb5_decode_authdata_container(context: krb5_context,
type_: krb5_authdatatype,
container: *const krb5_authdata,
authdata: *mut *mut *mut krb5_authdata) -> krb5_error_code;
// TODO: Doc
pub fn krb5_make_authdata_kdc_issued(context: krb5_context,
key: *const krb5_keyblock,
issuer: krb5_const_principal,
authdata: *mut *const krb5_authdata,
ad_kdcissued: *mut *mut *mut krb5_authdata) -> krb5_error_code;
// TODO: Doc
pub fn krb5_verify_authdata_kdc_issued(context: krb5_context,
key: *const krb5_keyblock,
ad_kdcissued: *const krb5_authdata,
issuer: *mut krb5_principal,
authdata: *mut *mut *mut krb5_authdata) -> krb5_error_code;
}
// TODO: Doc
pub const KRB5_PAC_LOGON_INFO: krb5_ui_4 = 1;
// TODO: Doc
pub const KRB5_PAC_CREDENTIALS_INFO: krb5_ui_4 = 2;
// TODO: Doc
pub const KRB5_PAC_SERVER_CHECKSUM: krb5_ui_4 = 6;
// TODO: Doc
pub const KRB5_PRIVSVR_CHECKSUM: krb5_ui_4 = 7;
// TODO: Doc
pub const KRB5_PAC_CLIENT_INFO: krb5_ui_4 = 10;
// TODO: Doc
pub const KRB5_PAC_DELEGATION_INFO: krb5_ui_4 = 11;
// TODO: Doc
pub const KRB5_PAC_UPN_DNS_INFO: krb5_ui_4 = 12;
pub enum krb5_pac_data {}
pub type krb5_pac = *mut krb5_pac_data;
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_pac_add_buffer(context: krb5_context,
pac: krb5_pac,
type_: krb5_ui_4,
data: *const krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_pac_free(context: krb5_context,
pac: krb5_pac);
// TODO: Doc
pub fn krb5_pac_get_buffer(context: krb5_context,
pac: krb5_pac,
type_: krb5_ui_4,
data: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_pac_get_types(context: krb5_context,
pac: krb5_pac,
len: *mut usize,
types: *mut *mut krb5_ui_4) -> krb5_error_code;
// TODO: Doc
pub fn krb5_pac_init(context: krb5_context,
pac: *mut krb5_pac) -> krb5_error_code;
// TODO: Doc
pub fn krb5_pac_parse(context: krb5_context,
ptr: *const c_void,
len: usize,
pac: *mut krb5_pac) -> krb5_error_code;
// TODO: Doc
pub fn krb5_pac_verify(context: krb5_context,
pac: krb5_pac,
authtime: krb5_timestamp,
principal: krb5_const_principal,
server: *const krb5_keyblock,
privsvr: *const krb5_keyblock) -> krb5_error_code;
// TODO: Doc
pub fn krb5_pac_sign(context: krb5_context,
pac: krb5_pac,
authtime: krb5_timestamp,
principal: krb5_const_principal,
server_key: *const krb5_keyblock,
privsvr_key: *const krb5_keyblock,
data: *mut krb5_data) -> krb5_error_code;
// TODO: Doc
pub fn krb5_allow_weak_crypt(context: krb5_context,
enable: krb5_boolean) -> krb5_error_code;
}
// TODO: Docs
#[repr(C)]
pub struct krb5_trace_info {
pub message: *const c_char,
}
pub type krb5_trace_callback = extern "C" fn(context: krb5_context,
info: *const krb5_trace_info,
cb_data: *mut c_void);
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
pub fn krb5_set_trace_callback(context: krb5_context,
fn_: Option<krb5_trace_callback>,
cb_data: *mut c_void) -> krb5_error_code;
// TODO: Doc
pub fn krb5_set_trace_filename(context: krb5_context,
filename: *const c_char) -> krb5_error_code;
}
// include <et/com_err.h>
pub const KRB5KDC_ERR_NONE: krb5_error_code = (-1765328384);
pub const KRB5KDC_ERR_NAME_EXP: krb5_error_code = (-1765328383);
pub const KRB5KDC_ERR_SERVICE_EXP: krb5_error_code = (-1765328382);
pub const KRB5KDC_ERR_BAD_PVNO: krb5_error_code = (-1765328381);
pub const KRB5KDC_ERR_C_OLD_MAST_KVNO: krb5_error_code = (-1765328380);
pub const KRB5KDC_ERR_S_OLD_MAST_KVNO: krb5_error_code = (-1765327379);
pub const KRB5KDC_ERR_C_PRINCIPAL_UNKNOWN: krb5_error_code = (-1765328378);
pub const KRB5KDC_ERR_S_PRINCIPAL_UNKNOWN: krb5_error_code = (-1765328377);
pub const KRB5KDC_ERR_PRINCIPAL_NOT_UNIQUE: krb5_error_code = (-1765328376);
pub const KRB5KDC_ERR_NULL_KEY: krb5_error_code = (-1765328375);
pub const KRB5KDC_ERR_CANNOT_POSTDATE: krb5_error_code = (-1765328374);
pub const KRB5KDC_ERR_NEVER_VALID: krb5_error_code = (-1765328373);
pub const KRB5KDC_ERR_POLICY: krb5_error_code = (-1765328372);
pub const KRB5KDC_ERR_BADOPTION: krb5_error_code = (-1765328371);
pub const KRB5KDC_ERR_ETYPE_NOSUPP: krb5_error_code = (-1765328370);
pub const KRB5KDC_ERR_SUMTYPE_NOSUPP: krb5_error_code = (-1765328369);
pub const KRB5KDC_ERR_PADATA_TYPE_NOSUPP: krb5_error_code = (-1765328368);
pub const KRB5KDC_ERR_TRTYPE_NOSUPPP: krb5_error_code = (-1765328367);
pub const KRB5KDC_ERR_CLIENT_REVOKED: krb5_error_code = (-1765328366);
pub const KRB5KDC_ERR_SERVICE_REVOKED: krb5_error_code = (-1765328365);
pub const KRB5KDC_ERR_TGT_REVOKED: krb5_error_code = (-1765328364);
pub const KRB5KDC_ERR_CLIENT_NOTYET: krb5_error_code = (-1765328363);
pub const KRB5KDC_ERR_SERVICE_NOTYET: krb5_error_code = (-1765328362);
pub const KRB5KDC_ERR_KEY_EXP: krb5_error_code = (-1765328361);
pub const KRB5KDC_ERR_PREAUTH_FAILED: krb5_error_code = (-1765328360);
pub const KRB5KDC_ERR_PREAUTH_REQUIRED: krb5_error_code = (-1765328359);
pub const KRB5KDC_ERR_SERVER_NOMATCH: krb5_error_code = (-1765328358);
pub const KRB5KDC_ERR_MUST_USE_USER2USER: krb5_error_code = (-1765328357);
pub const KRB5KDC_ERR_PATH_NOT_ACCEPTED: krb5_error_code = (-1765328356);
pub const KRB5KDC_ERR_SVC_UNAVAILABLE: krb5_error_code = (-1765328355);
pub const KRB5PLACEHOLD_30: krb5_error_code = (-1765328354);
pub const KRB5KRB_AP_ERR_BAD_INTEGRITY: krb5_error_code = (-1765328353);
pub const KRB5KRB_AP_ERR_TKT_EXPIRED: krb5_error_code = (-1765328352);
pub const KRB5KRB_AP_ERR_TKT_NYV: krb5_error_code = (-1765328351);
pub const KRB5KRB_AP_ERR_REPEAT: krb5_error_code = (-1765328350);
pub const KRB5KRB_AP_ERR_NOT_US: krb5_error_code = (-1765328349);
pub const KRB5KRB_AP_ERR_BADMATCH: krb5_error_code = (-1765328348);
pub const KRB5KRB_AP_ERR_SKES: krb5_error_code = (-1765328347);
pub const KRB5KRB_AP_ERR_BADADDR: krb5_error_code = (-1765328346);
pub const KRB5KRB_AP_ERR_BADVERSION: krb5_error_code = (-1765328345);
pub const KRB5KRB_AP_ERR_MSG_TYPE: krb5_error_code = (-1765328344);
pub const KRB5KRB_AP_ERR_MODIFIED: krb5_error_code = (-1765328343);
pub const KRB5KRB_AP_ERR_BADORDER: krb5_error_code = (-1765328342);
pub const KRB5KRB_AP_ERR_ILL_CR_TKT: krb5_error_code = (-1765328341);
pub const KRB5KRB_AP_ERR_BADKEYVER: krb5_error_code = (-1765328340);
pub const KRB5KRB_AP_ERR_NOKEY: krb5_error_code = (-1765328339);
pub const KRB5KRB_AP_ERR_MUT_FAIL: krb5_error_code = (-1765328338);
pub const KRB5KRB_AP_ERR_BADDIRECTION: krb5_error_code = (-1765328337);
pub const KRB5KRB_AP_ERR_METHOD: krb5_error_code = (-1765328336);
pub const KRB5KRB_AP_ERR_BADSEQ: krb5_error_code = (-1765328335);
pub const KRB5KRB_AP_ERR_INAPP_CKSUM: krb5_error_code = (-1765328334);
pub const KRB5KRB_AP_PATH_NOT_ACCEPTED: krb5_error_code = (-1765328333);
pub const KRB5KRB_ERR_RESPONSE_TOO_BIG: krb5_error_code = (-1765328332);
pub const KRB5PLACEHOLD_53: krb5_error_code = (-1765328331);
pub const KRB5PLACEHOLD_54: krb5_error_code = (-1765328330);
pub const KRB5PLACEHOLD_55: krb5_error_code = (-1765328329);
pub const KRB5PLACEHOLD_56: krb5_error_code = (-1765328328);
pub const KRB5PLACEHOLD_57: krb5_error_code = (-1765328327);
pub const KRB5PLACEHOLD_58: krb5_error_code = (-1765328326);
pub const KRB5PLACEHOLD_59: krb5_error_code = (-1765328325);
pub const KRB5KRB_ERR_GENERIC: krb5_error_code = (-1765328324);
pub const KRB5KRB_ERR_FIELD_TOOLONG: krb5_error_code = (-1765328323);
pub const KRB5KRB_ERR_CLIENT_NOT_TRUSTED: krb5_error_code = (-1765328322);
pub const KRB5KRB_ERR_KDC_NOT_TRUSTED: krb5_error_code = (-1765328321);
pub const KRB5KRB_ERR_INVALID_SIG: krb5_error_code = (-1765328320);
pub const KRB5KRB_ERR_DH_KEY_PARAMETERS_NOT_ACCEPTED: krb5_error_code = (-1765328319);
pub const KRB5KRB_ERR_CERTIFICATE_MISMATCH: krb5_error_code = (-1765328318);
pub const KRB5KRB_AP_ERR_NO_TGT: krb5_error_code = (-1765328317);
pub const KRB5KDC_ERR_WRONG_REALM: krb5_error_code = (-1765328316);
pub const KRB5KRB_APP_ERR_USER_TO_USER_REQUIRED: krb5_error_code = (-1765328315);
pub const KRB5KDC_ERR_CANT_VERIFY_CERTIFICATE: krb5_error_code = (-1765328314);
pub const KRB5KDC_ERR_INVALID_CERTIFICATE: krb5_error_code = (-1765328313);
pub const KRB5KDC_ERR_REVOKED_CERTIFICATE: krb5_error_code = (-1765328312);
pub const KRB5KDC_ERR_REVOCATION_STATUS_UNKNOWN: krb5_error_code = (-1765328311);
pub const KRB5KDC_ERR_REVOCATION_STATUS_UNAVAILABLE: krb5_error_code = (-1765328310);
pub const KRB5KDC_ERR_CLIENT_NAME_MISMATCH: krb5_error_code = (-1765328309);
pub const KRB5KDC_ERR_KDC_NAME_MISMATCH: krb5_error_code = (-1765328308);
pub const KRB5KDC_ERR_INCONSISTENT_KEY_PURPOSE: krb5_error_code = (-1765328307);
pub const KRB5KDC_ERR_DIGEST_IN_CERT_NOT_ACCEPTED: krb5_error_code = (-1765328306);
pub const KRB5KDC_ERR_PA_CHECKSUM_IN_CERT_NOT_ACCEPTED: krb5_error_code = (-1765328305);
pub const KRB5KDC_ERR_DIGEST_IN_SIGNED_DATA_NOT_ACCEPTED: krb5_error_code = (-1765328304);
pub const KRB5KDC_ERR_PUBLIC_KEY_ENCRYPTION_NOT_SUPPORTED: krb5_error_code = (-1765328303);
pub const KRB5PLACEHOLD_82: krb5_error_code = (-1765328302);
pub const KRB5PLACEHOLD_83: krb5_error_code = (-1765328301);
pub const KRB5PLACEHOLD_84: krb5_error_code = (-1765328300);
pub const KRB5KRB_AP_ERR_IAKERB_KDC_NOT_FOUND: krb5_error_code = (-1765328299);
pub const KRB5KRB_AP_ERR_IAKERB_KDC_NO_RESPONSE: krb5_error_code = (-1765328298);
pub const KRB5PLACEHOLD_87: krb5_error_code = (-1765328297);
pub const KRB5PLACEHOLD_88: krb5_error_code = (-1765328296);
pub const KRB5PLACEHOLD_89: krb5_error_code = (-1765328295);
pub const KRB5PLACEHOLD_90: krb5_error_code = (-1765328294);
pub const KRB5PLACEHOLD_91: krb5_error_code = (-1765328293);
pub const KRB5PLACEHOLD_92: krb5_error_code = (-1765328292);
pub const KRB5KDC_ERR_UNKNOWN_CRITICAL_FAST_OPTION: krb5_error_code = (-1765328291);
pub const KRB5PLACEHOLD_94: krb5_error_code = (-1765328290);
pub const KRB5PLACEHOLD_95: krb5_error_code = (-1765328289);
pub const KRB5PLACEHOLD_96: krb5_error_code = (-1765328288);
pub const KRB5PLACEHOLD_97: krb5_error_code = (-1765328287);
pub const KRB5PLACEHOLD_98: krb5_error_code = (-1765328286);
pub const KRB5PLACEHOLD_99: krb5_error_code = (-1765328285);
pub const KRB5KDC_ERR_NO_ACCEPTABLE_KDF: krb5_error_code = (-1765328284);
pub const KRB5PLACEHOLD_101: krb5_error_code = (-1765328283);
pub const KRB5PLACEHOLD_102: krb5_error_code = (-1765328282);
pub const KRB5PLACEHOLD_103: krb5_error_code = (-1765328281);
pub const KRB5PLACEHOLD_104: krb5_error_code = (-1765328280);
pub const KRB5PLACEHOLD_105: krb5_error_code = (-1765328279);
pub const KRB5PLACEHOLD_106: krb5_error_code = (-1765328278);
pub const KRB5PLACEHOLD_107: krb5_error_code = (-1765328277);
pub const KRB5PLACEHOLD_108: krb5_error_code = (-1765328276);
pub const KRB5PLACEHOLD_109: krb5_error_code = (-1765328275);
pub const KRB5PLACEHOLD_110: krb5_error_code = (-1765328274);
pub const KRB5PLACEHOLD_111: krb5_error_code = (-1765328273);
pub const KRB5PLACEHOLD_112: krb5_error_code = (-1765328272);
pub const KRB5PLACEHOLD_113: krb5_error_code = (-1765328271);
pub const KRB5PLACEHOLD_114: krb5_error_code = (-1765328270);
pub const KRB5PLACEHOLD_115: krb5_error_code = (-1765328269);
pub const KRB5PLACEHOLD_116: krb5_error_code = (-1765328268);
pub const KRB5PLACEHOLD_117: krb5_error_code = (-1765328267);
pub const KRB5PLACEHOLD_118: krb5_error_code = (-1765328266);
pub const KRB5PLACEHOLD_119: krb5_error_code = (-1765328265);
pub const KRB5PLACEHOLD_120: krb5_error_code = (-1765328264);
pub const KRB5PLACEHOLD_121: krb5_error_code = (-1765328263);
pub const KRB5PLACEHOLD_122: krb5_error_code = (-1765328262);
pub const KRB5PLACEHOLD_123: krb5_error_code = (-1765328261);
pub const KRB5PLACEHOLD_124: krb5_error_code = (-1765328260);
pub const KRB5PLACEHOLD_125: krb5_error_code = (-1765328259);
pub const KRB5PLACEHOLD_126: krb5_error_code = (-1765328258);
pub const KRB5PLACEHOLD_127: krb5_error_code = (-1765328257);
pub const KRB5_ERR_RCSID: krb5_error_code = (-1765328256);
pub const KRB5_LIBOS_BADLOCKFLAG: krb5_error_code = (-1765328255);
pub const KRB5_LIBOS_CANTREADPWD: krb5_error_code = (-1765328254);
pub const KRB5_LIBOS_BADPWDMATCH: krb5_error_code = (-1765328253);
pub const KRB5_LIBOS_PWDINTR: krb5_error_code = (-1765328252);
pub const KRB5_PARSE_ILLCHAR: krb5_error_code = (-1765328251);
pub const KRB5_PARSE_MALFORMED: krb5_error_code = (-1765328250);
pub const KRB5_CONFIG_CANTOPEN: krb5_error_code = (-1765328249);
pub const KRB5_CONFIG_BADFORMAT: krb5_error_code = (-1765328248);
pub const KRB5_CONFIG_NOTENUFSPACE: krb5_error_code = (-1765328247);
pub const KRB5_BADMSGTYPE: krb5_error_code = (-1765328246);
pub const KRB5_CC_BADNAME: krb5_error_code = (-1765328245);
pub const KRB5_CC_UNKNOWN_TYPE: krb5_error_code = (-1765328244);
pub const KRB5_CC_NOTFOUND: krb5_error_code = (-1765328243);
pub const KRB5_CC_END: krb5_error_code = (-1765328242);
pub const KRB5_NO_TKT_SUPPLIED: krb5_error_code = (-1765328241);
pub const KRB5KRB_AP_WRONG_PRINC: krb5_error_code = (-1765328240);
pub const KRB5KRB_AP_ERR_TKT_INVALID: krb5_error_code = (-1765328239);
pub const KRB5_PRINC_NOMATCH: krb5_error_code = (-1765328238);
pub const KRB5_KDCREP_MODIFIED: krb5_error_code = (-1765328237);
pub const KRB5_KDCREP_SKEW: krb5_error_code = (-1765328236);
pub const KRB5_IN_TKT_REALM_MISMATCH: krb5_error_code = (-1765328235);
pub const KRB5_PROG_ETYPE_NOSUPP: krb5_error_code = (-1765328234);
pub const KRB5_PROG_KEYTYPE_NOSUPP: krb5_error_code = (-1765328233);
pub const KRB5_WRONG_ETYPE: krb5_error_code = (-1765328232);
pub const KRB5_PROG_SUMTYPE_NOSUPP: krb5_error_code = (-1765328231);
pub const KRB5_REALM_UNKNOWN: krb5_error_code = (-1765328230);
pub const KRB5_SERVICE_UNKNOWN: krb5_error_code = (-1765328229);
pub const KRB5_KDC_UNREACH: krb5_error_code = (-1765328228);
pub const KRB5_NO_LOCALNAME: krb5_error_code = (-1765328227);
pub const KRB5_MUTUAL_FAILED: krb5_error_code = (-1765328226);
pub const KRB5_RC_TYPE_EXISTS: krb5_error_code = (-1765328225);
pub const KRB5_RC_MALLOC: krb5_error_code = (-1765328224);
pub const KRB5_RC_TYPE_NOTFOUND: krb5_error_code = (-1765328223);
pub const KRB5_RC_UNKNOWN: krb5_error_code = (-1765328222);
pub const KRB5_RC_REPLAY: krb5_error_code = (-1765328221);
pub const KRB5_RC_IO: krb5_error_code = (-1765328220);
pub const KRB5_RC_NOIO: krb5_error_code = (-1765328219);
pub const KRB5_RC_PARSE: krb5_error_code = (-1765328218);
pub const KRB5_RC_IO_EOF: krb5_error_code = (-1765328217);
pub const KRB5_RC_IO_MALLOC: krb5_error_code = (-1765328216);
pub const KRB5_RC_IO_PERM: krb5_error_code = (-1765328215);
pub const KRB5_RC_IO_IO: krb5_error_code = (-1765328214);
pub const KRB5_RC_IO_SPACE: krb5_error_code = (-1765328212);
pub const KRB5_TRANS_CANTOPEN: krb5_error_code = (-1765328211);
pub const KRB5_TRANS_BADFORMAT: krb5_error_code = (-1765328210);
pub const KRB5_LNAME_CANTOPEN: krb5_error_code = (-1765328209);
pub const KRB5_LNAME_NOTRANS: krb5_error_code = (-1765328208);
pub const KRB5_LNAME_BADFORMAT: krb5_error_code = (-1765328207);
pub const KRB5_CRYPTO_INTERNAL: krb5_error_code = (-1765328206);
pub const KRB5_KT_BADNAME: krb5_error_code = (-1765328205);
pub const KRB5_KT_UNKNOWN_TYPE: krb5_error_code = (-1765328204);
pub const KRB5_KT_NOTFOUND: krb5_error_code = (-1765328203);
pub const KRB5_KT_END: krb5_error_code = (-1765328202);
pub const KRB5_KT_NOWRITE: krb5_error_code = (-1765328201);
pub const KRB5_KT_IOERR: krb5_error_code = (-1765328200);
pub const KRB5_NO_TKT_IN_RLM: krb5_error_code = (-1765328199);
pub const KRB5DES_BAD_KEYPAR: krb5_error_code = (-1765328198);
pub const KRB5DES_WEAK_KEY: krb5_error_code = (-1765328197);
pub const KRB5_BAD_ENCTYPE: krb5_error_code = (-1765328196);
pub const KRB5_BAD_KEYSIZE: krb5_error_code = (-1765328195);
pub const KRB5_BAD_MSIZE: krb5_error_code = (-1765328194);
pub const KRB5_CC_TYPE_EXISTS: krb5_error_code = (-1765328193);
pub const KRB5_KT_TYPE_EXISTS: krb5_error_code = (-1765328192);
pub const KRB5_CC_IO: krb5_error_code = (-1765328191);
pub const KRB5_FCC_PERM: krb5_error_code = (-1765328190);
pub const KRB5_FCC_NOFILE: krb5_error_code = (-1765328189);
pub const KRB5_FCC_INTERNAL: krb5_error_code = (-1765328188);
pub const KRB5_CC_WRITE: krb5_error_code = (-1765328187);
pub const KRB5_CC_NOMEM: krb5_error_code = (-1765328186);
pub const KRB5_CC_FORMAT: krb5_error_code = (-1765328185);
pub const KRB5_CC_NOT_KTYPE: krb5_error_code = (-1765328184);
pub const KRB5_INVALID_FLAGS: krb5_error_code = (-1765328183);
pub const KRB5_NO_2ND_TKT: krb5_error_code = (-1765328182);
pub const KRB5_NOCREDS_SUPPLIED: krb5_error_code = (-1765328181);
pub const KRB5_SENDAUTH_BADAUTHVERS: krb5_error_code = (-1765328180);
pub const KRB5_SENDAUTH_BADAPPLVERS: krb5_error_code = (-1765328179);
pub const KRB5_SENDAUTH_BADRESPONSE: krb5_error_code = (-1765328178);
pub const KRB5_SENDAUTH_REJECTED: krb5_error_code = (-1765328177);
pub const KRB5_PREAUTH_BAD_TYPE: krb5_error_code = (-1765328176);
pub const KRB5_PREAUTH_NO_KEY: krb5_error_code = (-1765328175);
pub const KRB5_PREAUTH_FAILED: krb5_error_code = (-1765328174);
pub const KRB5_RCACHE_BADVNO: krb5_error_code = (-1765328173);
pub const KRB5_CCACHE_BADVNO: krb5_error_code = (-1765328172);
pub const KRB5_KEYTAB_BADVNO: krb5_error_code = (-1765328171);
pub const KRB5_PROG_ATYPE_NOSUPP: krb5_error_code = (-1765328170);
pub const KRB5_RC_REQUIRED: krb5_error_code = (-1765328169);
pub const KRB5_ERR_BAD_HOSTNAME: krb5_error_code = (-1765328168);
pub const KRB5_ERR_HOST_REALM_UNKNOWN: krb5_error_code = (-1765328167);
pub const KRB5_SNAME_UNSUPP_NAMETYPE: krb5_error_code = (-1765328166);
pub const KRB5KRB_AP_ERR_V4_REPLY: krb5_error_code = (-1765328165);
pub const KRB5_REALM_CANT_RESOLVE: krb5_error_code = (-1765328164);
pub const KRB5_TKT_NOT_FORWARDABLE: krb5_error_code = (-1765328163);
pub const KRB5_FWD_BAD_PRINCIPAL: krb5_error_code = (-1765328162);
pub const KRB5_GET_IN_TKT_LOOP: krb5_error_code = (-1765328161);
pub const KRB5_CONFIG_NODEFREALM: krb5_error_code = (-1765328160);
pub const KRB5_SAM_UNSUPPORTED: krb5_error_code = (-1765328159);
pub const KRB5_SAM_INVALID_ETYPE: krb5_error_code = (-1765328158);
pub const KRB5_SAM_NO_CHECKSUM: krb5_error_code = (-1765328157);
pub const KRB5_SAM_BAD_CHECKSUM: krb5_error_code = (-1765328156);
pub const KRB5_KT_NAME_TOOLONG: krb5_error_code = (-1765328155);
pub const KRB5_KT_KVNONOTFOUND: krb5_error_code = (-1765328154);
pub const KRB5_APPL_EXPIRED: krb5_error_code = (-1765328153);
pub const KRB5_LIB_EXPIRED: krb5_error_code = (-1765328152);
pub const KRB5_CHPW_PWDNULL: krb5_error_code = (-1765328151);
pub const KRB5_CHPW_FAIL: krb5_error_code = (-1765328150);
pub const KRB5_KT_FORMAT: krb5_error_code = (-1765328149);
pub const KRB5_NOPERM_ETYPE: krb5_error_code = (-1765328148);
pub const KRB5_CONFIG_ETYPE_NOSUPP: krb5_error_code = (-1765328147);
pub const KRB5_OBSOLETE_FN: krb5_error_code = (-1765328146);
pub const KRB5_EAI_FAIL: krb5_error_code = (-1765328145);
pub const KRB5_EAI_NODATA: krb5_error_code = (-1765328144);
pub const KRB5_EAI_NONAME: krb5_error_code = (-1765328143);
pub const KRB5_EAI_SERVICE: krb5_error_code = (-1765328142);
pub const KRB5_ERR_NUMERIC_REALM: krb5_error_code = (-1765328141);
pub const KRB5_ERR_BAD_S2K_PARAMS: krb5_error_code = (-1765328140);
pub const KRB5_ERR_NO_SERVICE: krb5_error_code = (-1765328139);
pub const KRB5_CC_READONLY: krb5_error_code = (-1765328138);
pub const KRB5_CC_NOSUPP: krb5_error_code = (-1765328137);
pub const KRB5_DELTAT_BADFORMAT: krb5_error_code = (-1765328136);
pub const KRB5_PLUGIN_NO_HANDLE: krb5_error_code = (-1765328135);
pub const KRB5_PLUGIN_OP_NOTSUPP: krb5_error_code = (-1765328134);
pub const KRB5_ERR_INVALID_UTF8: krb5_error_code = (-1765328133);
pub const KRB5_ERR_FAST_REQUIRED: krb5_error_code = (-1765328132);
pub const KRB5_LOCAL_ADDR_REQUIRED: krb5_error_code = (-1765328131);
pub const KRB5_REMOTE_ADDR_REQUIRED: krb5_error_code = (-1765328130);
pub const KRB5_TRACE_NOSUPP: krb5_error_code = (-1765328129);
// NOTE: from krb5/krb5.h : 8445
// not quite sure how to translate this.
pub enum error_table {}
pub enum et_krb5_error_table {}
// TODO: not defined here. search where from!
pub enum et_list {}
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
// NOTE: also extern in header
pub fn initialize_krb5_error_table();
pub fn initialize_krb5_error_table_r(list: *mut *mut et_list);
}
pub const ERROR_TABLE_BASE_krb5: krb5_error_code = (-1765328384);
// TODO: Two defines here for compability with older versions
// include et/com_err.h
pub const KRB5_PLUGIN_VER_NOTSUPP: krb5_error_code = (-1750600192);
pub const KRB5_PLUGIN_BAD_MODULE_SPEC: krb5_error_code = (-1750600191);
pub const KRB5_PLUGIN_NAME_NOTFOUND: krb5_error_code = (-1750600190);
pub const KRB5KDC_ERR_DISCARD: krb5_error_code = (-1750600189);
pub const KRB5_DCC_CANNOT_CREATE: krb5_error_code = (-1750600188);
pub const KRB5_KCC_INVALID_ANCHOR: krb5_error_code = (-1750600187);
pub const KRB5_KCC_UNKNOWN_VERSION: krb5_error_code = (-1750600186);
pub const KRB5_KCC_INVALID_UID: krb5_error_code = (-1750600185);
pub const KRB5_KCM_MALFORMED_REPLY: krb5_error_code = (-1750600184);
pub const KRB5_KCM_RPC_ERROR: krb5_error_code = (-1750600183);
pub const KRB5_KCM_REPLY_TOO_BIG: krb5_error_code = (-1750600182);
pub const KRB5_KCM_NO_SERVER: krb5_error_code = (-1750600181);
// extern const here
#[link(name = "krb5")]
extern "C" {
// TODO: Doc
// NOTE: Also extern in header
pub fn initialize_k5e1_error_table();
// TODO: Doc
// NOTE: also extern in header
pub fn initialize_k5e1_error_table_r(list: *mut *mut et_list);
}
pub const ERROR_TABLE_BASE_k5e1: krb5_error_code = (-1750600192);
// TODO: two defines for compability with older versions.
// TODO: include et/com_err.h
pub const KRB5_KDB_RCSID: krb5_error_code = (-1780008448);
pub const KRB5_KDB_INUSE: krb5_error_code = (-1780008447);
pub const KRB5_KDB_UK_SERROR: krb5_error_code = (-1780008446);
pub const KRB5_KDB_UK_RERROR: krb5_error_code = (-1780008445);
pub const KRB5_KDB_UNAUTH: krb5_error_code = (-1780008444);
pub const KRB5_KDB_NOENTRY: krb5_error_code = (-1780008443);
pub const KRB5_KDB_ILL_WILDCARD: krb5_error_code = (-1780008442);
pub const KRB5_KDB_DB_INUSE: krb5_error_code = (-1780008441);
pub const KRB5_KDB_DB_CHANGED: krb5_error_code = (-1780008440);
pub const KRB5_KDB_TRUNCATED_RECORD: krb5_error_code = (-1780008439);
pub const KRB5_KDB_RECURSIVELOCK: krb5_error_code = (-1780008438);
pub const KRB5_KDB_NOTLOCKED: krb5_error_code = (-1780008437);
pub const KRB5_KDB_BADLOCKMODE: krb5_error_code = (-1780008436);
pub const KRB5_KDB_DBNOTINITED: krb5_error_code = (-1780008435);
pub const KRB5_KDB_DBINITED: krb5_error_code = (-1780008434);
pub const KRB5_KDB_ILLDIRECTION: krb5_error_code = (-1780008433);
pub const KRB5_KDB_NOMASTERKEY: krb5_error_code = (-1780008432);
pub const KRB5_KDB_BADMASTERKEY: krb5_error_code = (-1780008431);
pub const KRB5_KDB_INVALIDKEYSIZE: krb5_error_code = (-1780008430);
pub const KRB5_KDB_CANTREAD_STORED: krb5_error_code = (-1780008429);
pub const KRB5_KDB_BADSTORED_MKEY: krb5_error_code = (-1780008428);
pub const KRB5_KDB_NOACTMASTERKEY: krb5_error_code = (-1780008427);
pub const KRB5_KDB_KVNONOMATCH: krb5_error_code = (-1780008426);
pub const KRB5_KDB_STORED_MKEY_NOTCURRENT: krb5_error_code = (-1780008425);
pub const KRB5_KDB_CANTLOCK_DB: krb5_error_code = (-1780008424);
pub const KRB5_KDB_DB_CORRUPT: krb5_error_code = (-1780008423);
pub const KRB5_KDB_BAD_VERSION: krb5_error_code = (-1780008422);
pub const KRB5_KDB_BAD_SALTTYPE: krb5_error_code = (-1780008421);
pub const KRB5_KDB_BAD_ENCTYPE: krb5_error_code = (-1780008420);
pub const KRB5_KDB_BAD_CREATEFLAGS: krb5_error_code = (-1780008419);
pub const KRB5_KDB_NO_PERMITTED_KEY: krb5_error_code = (-1780008418);
pub const KRB5_KDB_NO_MATCHING_KEY: krb5_error_code = (-1780008417);
pub const KRB5_KDB_DBTYPE_NOTFOUND: krb5_error_code = (-1780008416);
pub const KRB5_KDB_DBTYPE_NOSUP: krb5_error_code = (-1780008415);
pub const KRB5_KDB_DBTYPE_INIT: krb5_error_code = (-1780008414);
pub const KRB5_KDB_SERVER_INTERNAL_ERR: krb5_error_code = (-1780008413);
pub const KRB5_KDB_ACCESS_ERROR: krb5_error_code = (-1780008412);
pub const KRB5_KDB_INTERNAL_ERROR: krb5_error_code = (-1780008411);
pub const KRB5_KDB_CONSTRAINT_VIOLATION: krb5_error_code = (-1780008410);
pub const KRB5_LOG_CONV: krb5_error_code = (-1780008409);
pub const KRB5_LOG_UNSTABLE: krb5_error_code = (-1780008408);
pub const KRB5_LOG_CORRUPT: krb5_error_code = (-1780008407);
pub const KRB5_LOG_ERROR: krb5_error_code = (-1780008406);
pub const KRB5_KDB_DBTYPE_MISMATCH: krb5_error_code = (-1780008405);
pub const KRB5_KDB_POLICY_REF: krb5_error_code = (-1780008404);
pub const KRB5_KDB_STRINGS_TOOLONG: krb5_error_code = (-1780008403);
// TODO: extern const struct.
#[link(name = "krb5")]
extern "C" {
// NOTE: also extern in header
pub fn initialize_kdb5_error_table();
// NOTE: also extern in header
pub fn initialize_kdb5_error_table_r(list: *mut *mut et_list);
}
pub const ERROR_TABLE_BASE_kdb5: krb5_error_code = (-1780008448);
// TODO: two macros for compability with older versions
// TODO: include et/com_err.h
pub const KV5M_NONE: krb5_error_code = (-1760647424);
pub const KV5M_PRINCIPAL: krb5_error_code = (-1760647423);
pub const KV5M_DATA: krb5_error_code = (-1760647422);
pub const KV5M_KEYBLOCK: krb5_error_code = (-1760647421);
pub const KV5M_CHECKSUM: krb5_error_code = (-1760647420);
pub const KV5M_ENCRYPT_BLOCK: krb5_error_code = (-1760647419);
pub const KV5M_ENC_DATA: krb5_error_code = (-1760647418);
pub const KV5M_CRYPTOSYSTEM_ENTRY: krb5_error_code = (-1760647417);
pub const KV5M_CS_TABLE_ENTRY: krb5_error_code = (-1760647416);
pub const KV5M_CHECKSUM_ENTRY: krb5_error_code = (-1760647415);
pub const KV5M_AUTHDATA: krb5_error_code = (-1760647414);
pub const KV5M_TRANSITED: krb5_error_code = (-1760647413);
pub const KV5M_ENC_TKT_PART: krb5_error_code = (-1760647412);
pub const KV5M_TICKET: krb5_error_code = (-1760647411);
pub const KV5M_AUTHENTICATOR: krb5_error_code = (-1760647410);
pub const KV5M_TKT_AUTHENT: krb5_error_code = (-1760647409);
pub const KV5M_CREDS: krb5_error_code = (-1760647408);
pub const KV5M_LAST_REQ_ENTRY: krb5_error_code = (-1760647407);
pub const KV5M_PA_DATA: krb5_error_code = (-1760647406);
pub const KV5M_KDC_REQ: krb5_error_code = (-1760647405);
pub const KV5M_ENC_KDC_REP_PART: krb5_error_code = (-1760647404);
pub const KV5M_KDC_REP: krb5_error_code = (-1760647403);
pub const KV5M_ERROR: krb5_error_code = (-1760647402);
pub const KV5M_AP_REQ: krb5_error_code = (-1760647401);
pub const KV5M_AP_REP: krb5_error_code = (-1760647400);
pub const KV5M_AP_REP_ENC_PART: krb5_error_code = (-1760647399);
pub const KV5M_RESPONSE: krb5_error_code = (-1760647398);
pub const KV5M_SAFE: krb5_error_code = (-1760647397);
pub const KV5M_PRIV: krb5_error_code = (-1760647396);
pub const KV5M_PRIV_ENC_PART: krb5_error_code = (-1760647395);
pub const KV5M_CRED: krb5_error_code = (-1760647394);
pub const KV5M_CRED_INFO: krb5_error_code = (-1760647393);
pub const KV5M_CRED_ENC_PART: krb5_error_code = (-1760647392);
pub const KV5M_PWD_DATA: krb5_error_code = (-1760647391);
pub const KV5M_ADDRESS: krb5_error_code = (-1760647390);
pub const KV5M_KEYTAB_ENTRY: krb5_error_code = (-1760647389);
pub const KV5M_CONTEXT: krb5_error_code = (-1760647388);
pub const KV5M_OS_CONTEXT: krb5_error_code = (-1760647387);
pub const KV5M_ALT_METHOD: krb5_error_code = (-1760647386);
pub const KV5M_ETYPE_INFO_ENTRY: krb5_error_code = (-1760647385);
pub const KV5M_DB_CONTEXT: krb5_error_code = (-1760647384);
pub const KV5M_AUTH_CONTEXT: krb5_error_code = (-1760647383);
pub const KV5M_KEYTAB: krb5_error_code = (-1760647382);
pub const KV5M_RCACHE: krb5_error_code = (-1760647381);
pub const KV5M_CCACHE: krb5_error_code = (-1760647380);
pub const KV5M_PREAUTH_OPS: krb5_error_code = (-1760647379);
pub const KV5M_SAM_CHALLENGE: krb5_error_code = (-1760647378);
pub const KV5M_SAM_CHALLENGE_2: krb5_error_code = (-1760647377);
pub const KV5M_SAM_KEY: krb5_error_code = (-1760647376);
pub const KV5M_ENC_SAM_RESPONSE_ENC: krb5_error_code = (-1760647375);
pub const KV5M_ENC_SAM_RESPONSE_ENC_2: krb5_error_code = (-1760647374);
pub const KV5M_SAM_RESPONSE: krb5_error_code = (-1760647373);
pub const KV5M_SAM_RESPONSE_2: krb5_error_code = (-1760647372);
pub const KV5M_PREDICTED_SAM_RESPONSE: krb5_error_code = (-1760647371);
pub const KV5M_PASSWD_PHRASE_ELEMENT: krb5_error_code = (-1760647370);
pub const KV5M_GSS_OID: krb5_error_code = (-1760647369);
pub const KV5M_GSS_QUEUE: krb5_error_code = (-1760647368);
pub const KV5M_FAST_ARMORED_REQ: krb5_error_code = (-1760647367);
pub const KV5M_FAST_REQ: krb5_error_code = (-1760647366);
pub const KV5M_FAST_RESPONSE: krb5_error_code = (-1760647365);
pub const KV5M_AUTHDATA_CONTEXT: krb5_error_code = (-1760647364);
// TODO: extern const here
#[link(name = "krb5")]
extern "C" {
// NOTE: also extern in the header
pub fn initialize_kv5m_error_table();
// NOTE: also extern in the header
pub fn initialize_kv5m_error_table_r(list: *mut *mut et_list);
}
pub const ERROR_TABLE_BASE_kv5m: krb5_error_code = (-1760647424);
// TODO: Two macros for compability with older versions
// TODO: include et/com_err.h
pub const KRB524_BADKEY: krb5_error_code = (-1750206208);
pub const KRB524_BADADDR: krb5_error_code = (-1750206207);
pub const KRB524_BADPRINC: krb5_error_code = (-1750206206);
pub const KRB524_BADREALM: krb5_error_code = (-1750206205);
pub const KRB524_V4ERR: krb5_error_code = (-1750206204);
pub const KRB524_ENCFULL: krb5_error_code = (-1750206203);
pub const KRB524_DECEMPTY: krb5_error_code = (-1750206202);
pub const KRB524_NOTRESP: krb5_error_code = (-1750206201);
pub const KRB524_KRB4_DISABLED: krb5_error_code = (-1750206200);
// TODO extern const here
#[link(name = "krb5")]
extern "C" {
// NOTE: also extern in header
pub fn initialize_k524_error_table();
// NOTE: also extern in header
pub fn initialize_k524_error_table_r(list: *mut *mut et_list);
}
pub const ERROR_TABLE_BASE_k524: krb5_error_code = (-1750206208);
// TODO: two macros for compability with older versions
// TODO: include et/com_err.h
pub const ASN1_BAD_TIMEFORMAT: krb5_error_code = (1859794432);
pub const ASN1_MISSING_FIELD: krb5_error_code = (1859794433);
pub const ASN1_MISPLACED_FIELD: krb5_error_code = (1859794434);
pub const ASN1_TYPE_MISMATCH: krb5_error_code = (1859794435);
pub const ASN1_OVERFLOW: krb5_error_code = (1859794436);
pub const ASN1_OVERRUN: krb5_error_code = (1859794437);
pub const ASN1_BAD_ID: krb5_error_code = (1859794438);
pub const ASN1_BAD_LENGTH: krb5_error_code = (1859794439);
pub const ASN1_BAD_FORMAT: krb5_error_code = (1859794440);
pub const ASN1_PARSE_ERROR: krb5_error_code = (1859794441);
pub const ASN1_BAD_GMTIME: krb5_error_code = (1859794442);
pub const ASN1_MISMATCH_INDEF: krb5_error_code = (1859794443);
pub const ASN1_MISSING_EOC: krb5_error_code = (1859794444);
pub const ASN1_OMITTED: krb5_error_code = (1859794445);
// TODO: extern const here..
#[link(name = "krb5")]
extern "C" {
// NOTE: also extern in header
pub fn initlialize_asn1_error_table();
// NOTE: also extern in header
pub fn initlialize_asn1_error_table_r(list: *mut *mut et_list);
}
pub const ERROR_TABLE_BASE_asn1: krb5_error_code = (1859794432);
// TODO: two macros for compatibility with older versions
#[cfg(test)]
mod test_nullable_callbacks {
use std::ptr;
use std::os::raw::*;
use crate as k5;
#[test]
fn test_checksum_fn_set_get_null() {
let mut ctx : k5::krb5_context = ptr::null_mut();
let mut actx: k5::krb5_auth_context = ptr::null_mut();
assert_eq!(0, unsafe {
k5::krb5_init_context(&mut ctx)
});
assert_eq!(0, unsafe {
k5::krb5_auth_con_init(ctx, &mut actx)
});
/* Erase callback. */
assert_eq!(0, unsafe {
k5::krb5_auth_con_set_checksum_func
(ctx, actx, None, ptr::null_mut())
});
let mut dst_func: Option<k5::krb5_mk_req_checksum_func> = None;
let mut dst_data: *mut c_void = ptr::null_mut();
/* Retrieve callback; result should be None. */
assert_eq!(0, unsafe {
k5::krb5_auth_con_get_checksum_func
(ctx, actx, &mut dst_func, &mut dst_data)
});
assert_eq!(dst_func, None);
assert_eq!(dst_data, ptr::null_mut());
assert_eq!(0, unsafe { k5::krb5_auth_con_free(ctx, actx) });
unsafe { k5::krb5_free_context(ctx) };
}
#[test]
fn test_responder_fn_set_null() {
let mut ctx : k5::krb5_context = ptr::null_mut();
assert_eq!(0, unsafe {
k5::krb5_init_context(&mut ctx)
});
let mut opts: *mut k5::krb5_get_init_creds_opt = ptr::null_mut();
assert_eq!(0, unsafe {
k5::krb5_get_init_creds_opt_alloc(ctx, &mut opts)
});
/* Erase callback. */
assert_eq!(0, unsafe {
k5::krb5_get_init_creds_opt_set_responder
(ctx, opts, None, ptr::null_mut())
});
unsafe { k5::krb5_get_init_creds_opt_free(ctx, opts) };
unsafe { k5::krb5_free_context(ctx) };
}
/* Test ignored by default as it will normally cause a contacting a
non-existent KDC or another error. */
#[ignore]
#[test]
fn test_prompter_fn_set_null() {
let mut ctx : k5::krb5_context = ptr::null_mut();
assert_eq!(0, unsafe {
k5::krb5_init_context(&mut ctx)
});
let mut creds: k5::krb5_creds = unsafe {
std::mem::MaybeUninit::zeroed().assume_init()
};
let name: &'static [u8] = b"test\0";
let mut princ: k5::krb5_principal = ptr::null_mut();
assert_eq!(0, unsafe {
k5::krb5_parse_name(ctx, name.as_ptr() as *const c_char, &mut princ)
});
/* Call without password and with a null callback; this operation
is expected to fail. The test is considered successful if below
call does not segfault on account of a null pointer dereference. */
assert_ne!(0, unsafe {
k5::krb5_get_init_creds_password
(ctx,
&mut creds,
princ,
ptr::null(), /* don't set password -> use prompter */
None, /* unset prompter callback */
ptr::null_mut(),
0,
ptr::null(),
ptr::null())
});
unsafe { k5::krb5_free_principal(ctx, princ) };
unsafe { k5::krb5_free_context(ctx) };
}
#[test]
fn test_trace_fn_set_null() {
let mut ctx : k5::krb5_context = ptr::null_mut();
assert_eq!(0, unsafe {
k5::krb5_init_context(&mut ctx)
});
/* Erase callback. */
assert_eq!(0, unsafe {
k5::krb5_set_trace_callback
(ctx, None, ptr::null_mut())
});
unsafe { k5::krb5_free_context(ctx) };
}
}
|
// Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
#![cfg_attr(test, feature(test))]
#![feature(core_intrinsics)]
#![feature(ptr_offset_from)]
#[macro_use]
extern crate quick_error;
#[cfg(test)]
extern crate test;
#[allow(unused_extern_crates)]
extern crate tikv_alloc;
mod buffer;
mod byte;
mod convert;
mod error;
mod number;
pub mod prelude {
pub use super::buffer::{BufferReader, BufferWriter};
pub use super::byte::MemComparableByteCodec;
pub use super::number::{BufferNumberDecoder, BufferNumberEncoder};
}
pub use self::buffer::{BufferReader, BufferWriter};
pub use self::byte::MemComparableByteCodec;
pub use self::error::{Error, Result};
pub use self::number::NumberCodec;
|
#![allow(missing_docs)]
/// Converts AST to a pretty printed source string.
pub mod pretty_print;
use super::ast::*;
pub use self::pretty_print::PrettyPrintVisitor;
macro_rules! make_visitor {
($visitor_trait_name:ident, $($mutability:ident)*) => {
#[cfg_attr(rustfmt, rustfmt_skip)]
pub trait $visitor_trait_name<'ast> {
// Override the following functions. The `walk` functions are the default behavior.
/// This is the initial function used to start traversing the AST. By default, this
/// will simply recursively walk down the AST without performing any meaningful action.
fn visit(&mut self, definitions: &'ast $($mutability)* [Definition]) {
for definition in definitions {
self.visit_definition(definition);
}
}
fn visit_argument(&mut self, argument: &'ast $($mutability)* Argument) {
self.walk_argument(argument);
}
fn visit_argument_list_extended_attribute(
&mut self,
ex: &'ast $($mutability)* ArgumentListExtendedAttribute)
{
self.walk_argument_list_extended_attribute(ex);
}
fn visit_attribute(&mut self, attribute: &'ast $($mutability)* Attribute) {
self.walk_attribute(attribute);
}
fn visit_callback(&mut self, callback: &'ast $($mutability)* Callback) {
self.walk_callback(callback);
}
fn visit_callback_interface(
&mut self,
callback_interface: &'ast $($mutability)* CallbackInterface)
{
self.walk_callback_interface(callback_interface);
}
fn visit_const(&mut self, const_: &'ast $($mutability)* Const) {
self.walk_const(const_);
}
fn visit_const_type(&mut self, const_type: &'ast $($mutability)* ConstType) {
self.walk_const_type(const_type);
}
fn visit_const_value(&mut self, _const_value: &'ast $($mutability)* ConstValue) {}
fn visit_default_value(&mut self, default_value: &'ast $($mutability)* DefaultValue) {
self.walk_default_value(default_value);
}
fn visit_definition(&mut self, definition: &'ast $($mutability)* Definition) {
self.walk_definition(definition);
}
fn visit_dictionary(&mut self, dictionary: &'ast $($mutability)* Dictionary) {
self.walk_dictionary(dictionary);
}
fn visit_dictionary_member(&mut self,
dictionary_member: &'ast $($mutability)* DictionaryMember) {
self.walk_dictionary_member(dictionary_member);
}
fn visit_enum(&mut self, enum_: &'ast $($mutability)* Enum) {
self.walk_enum(enum_);
}
fn visit_explicit_stringifier_operation(
&mut self,
op: &'ast $($mutability)* ExplicitStringifierOperation)
{
self.walk_explicit_stringifier_operation(op);
}
fn visit_extended_attribute(&mut self,
ex: &'ast $($mutability)* ExtendedAttribute) {
self.walk_extended_attribute(ex);
}
fn visit_identifier(&mut self, _identifier: &'ast $($mutability)* str) {}
fn visit_identifier_extended_attribute(
&mut self,
ex: &'ast $($mutability)* IdentifierExtendedAttribute)
{
self.walk_identifier_extended_attribute(ex);
}
fn visit_identifier_list_extended_attribute(
&mut self,
ex: &'ast $($mutability)* IdentifierListExtendedAttribute)
{
self.walk_identifier_list_extended_attribute(ex);
}
fn visit_implicit_stringifier_operation(
&mut self,
op: &'ast $($mutability)* ImplicitStringifierOperation)
{
self.walk_implicit_stringifier_operation(op);
}
fn visit_includes(&mut self, includes: &'ast $($mutability)* Includes) {
self.walk_includes(includes);
}
fn visit_interface(&mut self, interface: &'ast $($mutability)* Interface) {
self.walk_interface(interface);
}
fn visit_interface_member(&mut self,
interface_member: &'ast $($mutability)* InterfaceMember) {
self.walk_interface_member(interface_member);
}
fn visit_iterable(&mut self, iterable: &'ast $($mutability)* Iterable) {
self.walk_iterable(iterable);
}
fn visit_maplike(&mut self, maplike: &'ast $($mutability)* Maplike) {
self.walk_maplike(maplike);
}
fn visit_mixin(&mut self, mixin: &'ast $($mutability)* Mixin) {
self.walk_mixin(mixin);
}
fn visit_mixin_member(&mut self, mixin_member: &'ast $($mutability)* MixinMember) {
self.walk_mixin_member(mixin_member);
}
fn visit_named_argument_list_extended_attribute(
&mut self,
ex: &'ast $($mutability)* NamedArgumentListExtendedAttribute)
{
self.walk_named_argument_list_extended_attribute(ex);
}
fn visit_namespace(&mut self, namespace: &'ast $($mutability)* Namespace) {
self.walk_namespace(namespace);
}
fn visit_namespace_member(&mut self,
namespace_member: &'ast $($mutability)* NamespaceMember) {
self.walk_namespace_member(namespace_member);
}
fn visit_non_partial_dictionary(
&mut self,
dictionary: &'ast $($mutability)* NonPartialDictionary)
{
self.walk_non_partial_dictionary(dictionary);
}
fn visit_non_partial_interface(&mut self,
interface: &'ast $($mutability)* NonPartialInterface) {
self.walk_non_partial_interface(interface);
}
fn visit_non_partial_mixin(&mut self,
mixin: &'ast $($mutability)* NonPartialMixin) {
self.walk_non_partial_mixin(mixin);
}
fn visit_non_partial_namespace(&mut self,
namespace: &'ast $($mutability)* NonPartialNamespace) {
self.walk_non_partial_namespace(namespace);
}
fn visit_operation(&mut self, operation: &'ast $($mutability)* Operation) {
self.walk_operation(operation);
}
fn visit_other(&mut self, other: &'ast $($mutability)* Other) {
self.walk_other(other);
}
fn visit_other_extended_attribute(&mut self,
ex: &'ast $($mutability)* OtherExtendedAttribute) {
self.walk_other_extended_attribute(ex);
}
fn visit_partial_dictionary(&mut self,
dictionary: &'ast $($mutability)* PartialDictionary) {
self.walk_partial_dictionary(dictionary);
}
fn visit_partial_interface(&mut self,
interface: &'ast $($mutability)* PartialInterface) {
self.walk_partial_interface(interface);
}
fn visit_partial_mixin(&mut self,
mixin: &'ast $($mutability)* PartialMixin) {
self.walk_partial_mixin(mixin);
}
fn visit_partial_namespace(&mut self,
namespace: &'ast $($mutability)* PartialNamespace) {
self.walk_partial_namespace(namespace);
}
fn visit_regular_attribute(&mut self,
regular_attribute: &'ast $($mutability)* RegularAttribute) {
self.walk_regular_attribute(regular_attribute);
}
fn visit_regular_operation(&mut self,
regular_operation: &'ast $($mutability)* RegularOperation) {
self.walk_regular_operation(regular_operation);
}
fn visit_return_type(&mut self, return_type: &'ast $($mutability)* ReturnType) {
self.walk_return_type(return_type);
}
fn visit_setlike(&mut self, setlike: &'ast $($mutability)* Setlike) {
self.walk_setlike(setlike);
}
fn visit_special(&mut self, _special: &'ast $($mutability)* Special) {}
fn visit_special_operation(&mut self,
special_operation: &'ast $($mutability)* SpecialOperation) {
self.walk_special_operation(special_operation);
}
fn visit_static_attribute(&mut self,
static_attribute: &'ast $($mutability)* StaticAttribute) {
self.walk_static_attribute(static_attribute);
}
fn visit_static_operation(&mut self,
static_operation: &'ast $($mutability)* StaticOperation) {
self.walk_static_operation(static_operation);
}
fn visit_string_type(&mut self, _string_type: &'ast $($mutability)* StringType) {}
fn visit_stringifier_attribute(&mut self,
attribute: &'ast $($mutability)* StringifierAttribute) {
self.walk_stringifier_attribute(attribute);
}
fn visit_stringifier_operation(&mut self,
operation: &'ast $($mutability)* StringifierOperation) {
self.walk_stringifier_operation(operation);
}
fn visit_type(&mut self, type_: &'ast $($mutability)* Type) {
self.walk_type(type_);
}
fn visit_type_kind(&mut self, type_kind: &'ast $($mutability)* TypeKind) {
self.walk_type_kind(type_kind);
}
fn visit_typedef(&mut self, typedef: &'ast $($mutability)* Typedef) {
self.walk_typedef(typedef);
}
// The `walk` functions are not meant to be overridden.
fn walk_argument(&mut self, argument: &'ast $($mutability)* Argument) {
for extended_attribute in &$($mutability)* argument.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* argument.type_);
self.visit_identifier(&$($mutability)* argument.name);
if let Some(ref $($mutability)* default_value) = argument.default {
self.visit_default_value(default_value);
}
}
fn walk_argument_list_extended_attribute(
&mut self,
ex: &'ast $($mutability)* ArgumentListExtendedAttribute)
{
self.visit_identifier(&$($mutability)* ex.name);
for argument in &$($mutability)* ex.arguments {
self.visit_argument(argument);
}
}
fn walk_attribute(&mut self, attribute: &'ast $($mutability)* Attribute) {
match *attribute {
Attribute::Regular(ref $($mutability)* attribute) => {
self.visit_regular_attribute(attribute);
}
Attribute::Static(ref $($mutability)* attribute) => {
self.visit_static_attribute(attribute);
}
Attribute::Stringifier(ref $($mutability)* attribute) => {
self.visit_stringifier_attribute(attribute);
}
}
}
fn walk_callback(&mut self, callback: &'ast $($mutability)* Callback) {
for extended_attribute in &$($mutability)* callback.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* callback.name);
self.visit_return_type(&$($mutability)* callback.return_type);
for argument in &$($mutability)* callback.arguments {
self.visit_argument(argument);
}
}
fn walk_callback_interface(&mut self,
callback_interface: &'ast $($mutability)* CallbackInterface)
{
for extended_attribute in &$($mutability)* callback_interface.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* callback_interface.name);
if let Some(ref $($mutability)* inherits) = callback_interface.inherits {
self.visit_identifier(inherits);
}
for member in &$($mutability)* callback_interface.members {
self.visit_interface_member(member);
}
}
fn walk_const(&mut self, const_: &'ast $($mutability)* Const) {
for extended_attribute in &$($mutability)* const_.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_const_type(&$($mutability)* const_.type_);
self.visit_identifier(&$($mutability)* const_.name);
self.visit_const_value(&$($mutability)* const_.value);
}
fn walk_const_type(&mut self, const_type: &'ast $($mutability)* ConstType) {
if let ConstType::Identifier(ref $($mutability)* identifier) = *const_type {
self.visit_identifier(identifier);
}
}
fn walk_default_value(&mut self, default_value: &'ast $($mutability)* DefaultValue) {
if let DefaultValue::ConstValue(ref $($mutability)* const_value) = *default_value {
self.visit_const_value(const_value);
}
}
fn walk_definition(&mut self, definition: &'ast $($mutability)* Definition) {
match *definition {
Definition::Callback(ref $($mutability)* callback) => {
self.visit_callback(callback);
}
Definition::Dictionary(ref $($mutability)* dictionary) => {
self.visit_dictionary(dictionary);
}
Definition::Enum(ref $($mutability)* enum_) => {
self.visit_enum(enum_);
}
Definition::Includes(ref $($mutability)* includes) => {
self.visit_includes(includes);
}
Definition::Interface(ref $($mutability)* interface) => {
self.visit_interface(interface);
}
Definition::Mixin(ref $($mutability)* mixin) => {
self.visit_mixin(mixin);
}
Definition::Namespace(ref $($mutability)* namespace) => {
self.visit_namespace(namespace);
}
Definition::Typedef(ref $($mutability)* typedef) => {
self.visit_typedef(typedef);
}
}
}
fn walk_dictionary(&mut self, dictionary: &'ast $($mutability)* Dictionary) {
match *dictionary {
Dictionary::NonPartial(ref $($mutability)* dictionary) => {
self.visit_non_partial_dictionary(dictionary);
}
Dictionary::Partial(ref $($mutability)* dictionary) => {
self.visit_partial_dictionary(dictionary);
}
}
}
fn walk_dictionary_member(&mut self,
dictionary_member: &'ast $($mutability)* DictionaryMember)
{
for extended_attribute in &$($mutability)* dictionary_member.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* dictionary_member.type_);
self.visit_identifier(&$($mutability)* dictionary_member.name);
if let Some(ref $($mutability)* default_value) = dictionary_member.default {
self.visit_default_value(default_value);
}
}
fn walk_enum(&mut self, enum_: &'ast $($mutability)* Enum) {
for extended_attribute in &$($mutability)* enum_.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* enum_.name);
}
fn walk_explicit_stringifier_operation(
&mut self,
op: &'ast $($mutability)* ExplicitStringifierOperation)
{
for extended_attribute in &$($mutability)* op.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_return_type(&$($mutability)* op.return_type);
if let Some(ref $($mutability)* name) = op.name {
self.visit_identifier(name);
}
for argument in &$($mutability)* op.arguments {
self.visit_argument(argument);
}
}
fn walk_extended_attribute(&mut self,
ex: &'ast $($mutability)* ExtendedAttribute)
{
use ast::ExtendedAttribute::*;
match *ex {
ArgumentList(ref $($mutability)* extended_attribute) => {
self.visit_argument_list_extended_attribute(extended_attribute);
}
Identifier(ref $($mutability)* extended_attribute) => {
self.visit_identifier_extended_attribute(extended_attribute);
}
IdentifierList(ref $($mutability)* extended_attribute) => {
self.visit_identifier_list_extended_attribute(extended_attribute);
}
NamedArgumentList(ref $($mutability)* extended_attribute) => {
self.visit_named_argument_list_extended_attribute(extended_attribute);
}
NoArguments(ref $($mutability)* extended_attribute) => {
self.visit_other(extended_attribute);
}
}
}
fn walk_identifier_extended_attribute(
&mut self,
ex: &'ast $($mutability)* IdentifierExtendedAttribute)
{
self.visit_identifier(&$($mutability)* ex.lhs);
self.visit_other(&$($mutability)* ex.rhs);
}
fn walk_identifier_list_extended_attribute(
&mut self,
ex: &'ast $($mutability)* IdentifierListExtendedAttribute)
{
self.visit_identifier(&$($mutability)* ex.lhs);
for identifier in &$($mutability)* ex.rhs {
self.visit_identifier(identifier);
}
}
fn walk_implicit_stringifier_operation(
&mut self,
op: &'ast $($mutability)* ImplicitStringifierOperation)
{
for extended_attribute in &$($mutability)* op.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
}
fn walk_includes(&mut self, includes: &'ast $($mutability)* Includes) {
for extended_attribute in &$($mutability)* includes.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* includes.includer);
self.visit_identifier(&$($mutability)* includes.includee);
}
fn walk_interface(&mut self, interface: &'ast $($mutability)* Interface) {
match *interface {
Interface::Callback(ref $($mutability)* interface) => {
self.visit_callback_interface(interface);
}
Interface::NonPartial(ref $($mutability)* interface) => {
self.visit_non_partial_interface(interface);
}
Interface::Partial(ref $($mutability)* interface) => {
self.visit_partial_interface(interface);
}
}
}
fn walk_interface_member(&mut self,
interface_member: &'ast $($mutability)* InterfaceMember) {
match *interface_member {
InterfaceMember::Attribute(ref $($mutability)* member) => {
self.visit_attribute(member);
}
InterfaceMember::Const(ref $($mutability)* member) => {
self.visit_const(member);
}
InterfaceMember::Iterable(ref $($mutability)* member) => {
self.visit_iterable(member);
}
InterfaceMember::Maplike(ref $($mutability)* member) => {
self.visit_maplike(member);
}
InterfaceMember::Operation(ref $($mutability)* member) => {
self.visit_operation(member);
}
InterfaceMember::Setlike(ref $($mutability)* member) => {
self.visit_setlike(member);
}
}
}
fn walk_iterable(&mut self, iterable: &'ast $($mutability)* Iterable) {
for extended_attribute in &$($mutability)* iterable.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
if let Some(ref $($mutability)* key_type) = iterable.key_type {
self.visit_type(key_type);
}
self.visit_type(&$($mutability)* iterable.value_type);
}
fn walk_maplike(&mut self, maplike: &'ast $($mutability)* Maplike) {
for extended_attribute in &$($mutability)* maplike.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* maplike.key_type);
self.visit_type(&$($mutability)* maplike.value_type);
}
fn walk_mixin(&mut self, mixin: &'ast $($mutability)* Mixin) {
match *mixin {
Mixin::NonPartial(ref $($mutability)* mixin) => {
self.visit_non_partial_mixin(mixin);
}
Mixin::Partial(ref $($mutability)* mixin) => {
self.visit_partial_mixin(mixin);
}
}
}
fn walk_mixin_member(&mut self, mixin_member: &'ast $($mutability)* MixinMember) {
match *mixin_member {
MixinMember::Attribute(ref $($mutability)* member) => {
self.visit_attribute(member);
}
MixinMember::Const(ref $($mutability)* member) => {
self.visit_const(member);
}
MixinMember::Operation(ref $($mutability)* member) => {
self.visit_operation(member);
}
}
}
fn walk_named_argument_list_extended_attribute(
&mut self,
ex: &'ast $($mutability)* NamedArgumentListExtendedAttribute)
{
self.visit_identifier(&$($mutability)* ex.lhs_name);
self.visit_identifier(&$($mutability)* ex.rhs_name);
for argument in &$($mutability)* ex.rhs_arguments {
self.visit_argument(argument);
}
}
fn walk_namespace(&mut self, namespace: &'ast $($mutability)* Namespace) {
match *namespace {
Namespace::NonPartial(ref $($mutability)* namespace) => {
self.visit_non_partial_namespace(namespace);
}
Namespace::Partial(ref $($mutability)* namespace) => {
self.visit_partial_namespace(namespace);
}
}
}
fn walk_namespace_member(&mut self,
namespace_member: &'ast $($mutability)* NamespaceMember) {
match *namespace_member {
NamespaceMember::Attribute(ref $($mutability)* member) => {
self.visit_attribute(member);
}
NamespaceMember::Operation(ref $($mutability)* member) => {
self.visit_operation(member);
}
}
}
fn walk_non_partial_dictionary(
&mut self,
dictionary: &'ast $($mutability)* NonPartialDictionary)
{
for extended_attribute in &$($mutability)* dictionary.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* dictionary.name);
if let Some(ref $($mutability)* inherits) = dictionary.inherits {
self.visit_identifier(inherits);
}
for member in &$($mutability)* dictionary.members {
self.visit_dictionary_member(member);
}
}
fn walk_non_partial_interface(
&mut self,
interface: &'ast $($mutability)* NonPartialInterface)
{
for extended_attribute in &$($mutability)* interface.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* interface.name);
if let Some(ref $($mutability)* inherits) = interface.inherits {
self.visit_identifier(inherits);
}
for member in &$($mutability)* interface.members {
self.visit_interface_member(member);
}
}
fn walk_non_partial_mixin( &mut self, mixin: &'ast $($mutability)* NonPartialMixin)
{
for extended_attribute in &$($mutability)* mixin.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* mixin.name);
for member in &$($mutability)* mixin.members {
self.visit_mixin_member(member);
}
}
fn walk_non_partial_namespace(
&mut self,
namespace: &'ast $($mutability)* NonPartialNamespace)
{
for extended_attribute in &$($mutability)* namespace.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* namespace.name);
for member in &$($mutability)* namespace.members {
self.visit_namespace_member(member);
}
}
fn walk_operation(&mut self, operation: &'ast $($mutability)* Operation) {
match *operation {
Operation::Regular(ref $($mutability)* operation) => {
self.visit_regular_operation(operation);
}
Operation::Special(ref $($mutability)* operation) => {
self.visit_special_operation(operation);
}
Operation::Static(ref $($mutability)* operation) => {
self.visit_static_operation(operation);
}
Operation::Stringifier(ref $($mutability)* operation) => {
self.visit_stringifier_operation(operation);
}
}
}
fn walk_other(&mut self, other: &'ast $($mutability)* Other) {
if let Other::Identifier(ref $($mutability)* identifier) = *other {
self.visit_identifier(identifier);
}
}
fn walk_other_extended_attribute(&mut self,
ex: &'ast $($mutability)* OtherExtendedAttribute)
{
match *ex {
OtherExtendedAttribute::Nested {
ref $($mutability)* inner,
ref $($mutability)* rest,
..
} => {
if let Some(ref $($mutability)* inner) = *inner {
self.visit_extended_attribute(inner);
}
if let Some(ref $($mutability)* rest) = *rest {
self.visit_extended_attribute(rest);
}
}
OtherExtendedAttribute::Other {
ref $($mutability)* other,
ref $($mutability)* rest,
..
} => {
if let Some(ref $($mutability)* other) = *other {
self.visit_other(other);
}
if let Some(ref $($mutability)* rest) = *rest {
self.visit_extended_attribute(rest);
}
}
}
}
fn walk_partial_dictionary(&mut self,
partial_dictionary: &'ast $($mutability)* PartialDictionary)
{
for extended_attribute in &$($mutability)* partial_dictionary.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* partial_dictionary.name);
for member in &$($mutability)* partial_dictionary.members {
self.visit_dictionary_member(member);
}
}
fn walk_partial_interface(&mut self,
partial_interface: &'ast $($mutability)* PartialInterface)
{
for extended_attribute in &$($mutability)* partial_interface.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* partial_interface.name);
for member in &$($mutability)* partial_interface.members {
self.visit_interface_member(member);
}
}
fn walk_partial_mixin(&mut self, partial_mixin: &'ast $($mutability)* PartialMixin)
{
for extended_attribute in &$($mutability)* partial_mixin.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* partial_mixin.name);
for member in &$($mutability)* partial_mixin.members {
self.visit_mixin_member(member);
}
}
fn walk_partial_namespace(&mut self,
partial_namespace: &'ast $($mutability)* PartialNamespace)
{
for extended_attribute in &$($mutability)* partial_namespace.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_identifier(&$($mutability)* partial_namespace.name);
for member in &$($mutability)* partial_namespace.members {
self.visit_namespace_member(member);
}
}
fn walk_regular_attribute(&mut self,
regular_attribute: &'ast $($mutability)* RegularAttribute)
{
for extended_attribute in &$($mutability)* regular_attribute.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* regular_attribute.type_);
self.visit_identifier(&$($mutability)* regular_attribute.name);
}
fn walk_regular_operation(&mut self,
regular_operation: &'ast $($mutability)* RegularOperation)
{
for extended_attribute in &$($mutability)* regular_operation.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_return_type(&$($mutability)* regular_operation.return_type);
if let Some(ref $($mutability)* name) = regular_operation.name {
self.visit_identifier(name);
}
for argument in &$($mutability)* regular_operation.arguments {
self.visit_argument(argument);
}
}
fn walk_return_type(&mut self, return_type: &'ast $($mutability)* ReturnType) {
if let ReturnType::NonVoid(ref $($mutability)* type_) = *return_type {
self.visit_type(type_);
}
}
fn walk_setlike(&mut self, setlike: &'ast $($mutability)* Setlike) {
for extended_attribute in &$($mutability)* setlike.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* setlike.type_);
}
fn walk_special_operation(&mut self,
special_operation: &'ast $($mutability)* SpecialOperation)
{
for extended_attribute in &$($mutability)* special_operation.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
for special_keyword in &$($mutability)* special_operation.special_keywords {
self.visit_special(special_keyword);
}
self.visit_return_type(&$($mutability)* special_operation.return_type);
if let Some(ref $($mutability)* name) = special_operation.name {
self.visit_identifier(name);
}
for argument in &$($mutability)* special_operation.arguments {
self.visit_argument(argument);
}
}
fn walk_static_attribute(&mut self,
static_attribute: &'ast $($mutability)* StaticAttribute)
{
for extended_attribute in &$($mutability)* static_attribute.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* static_attribute.type_);
self.visit_identifier(&$($mutability)* static_attribute.name);
}
fn walk_static_operation(&mut self,
static_operation: &'ast $($mutability)* StaticOperation)
{
for extended_attribute in &$($mutability)* static_operation.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_return_type(&$($mutability)* static_operation.return_type);
if let Some(ref $($mutability)* name) = static_operation.name {
self.visit_identifier(name);
}
for argument in &$($mutability)* static_operation.arguments {
self.visit_argument(argument);
}
}
fn walk_stringifier_attribute(
&mut self,
attribute: &'ast $($mutability)* StringifierAttribute)
{
for extended_attribute in &$($mutability)* attribute.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* attribute.type_);
self.visit_identifier(&$($mutability)* attribute.name);
}
fn walk_stringifier_operation(
&mut self,
stringifier_operation: &'ast $($mutability)* StringifierOperation)
{
match *stringifier_operation {
StringifierOperation::Explicit(ref $($mutability)* operation) => {
self.visit_explicit_stringifier_operation(operation);
}
StringifierOperation::Implicit(ref $($mutability)* operation) => {
self.visit_implicit_stringifier_operation(operation);
}
}
}
fn walk_type(&mut self, type_: &'ast $($mutability)* Type) {
for extended_attribute in &$($mutability)* type_.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type_kind(&$($mutability)* type_.kind);
}
fn walk_type_kind(&mut self, type_kind: &'ast $($mutability)* TypeKind) {
match *type_kind {
TypeKind::FrozenArray(ref $($mutability)* type_) => {
self.visit_type(type_);
}
TypeKind::Identifier(ref $($mutability)* identifier) => {
self.visit_identifier(identifier);
}
TypeKind::Promise(ref $($mutability)* return_type) => {
self.visit_return_type(return_type);
}
TypeKind::Record(_, ref $($mutability)* type_) => {
self.visit_type(type_);
}
TypeKind::Sequence(ref $($mutability)* type_) => {
self.visit_type(type_);
}
TypeKind::Union(ref $($mutability)* types) => {
for type_ in types {
self.visit_type(type_);
}
}
_ => (),
}
}
fn walk_typedef(&mut self, typedef: &'ast $($mutability)* Typedef) {
for extended_attribute in &$($mutability)* typedef.extended_attributes {
self.visit_extended_attribute(extended_attribute);
}
self.visit_type(&$($mutability)* typedef.type_);
self.visit_identifier(&$($mutability)* typedef.name);
}
}
}
}
make_visitor!(ImmutableVisitor,);
make_visitor!(MutableVisitor, mut);
|
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::panic;
use errors::FatalError;
use proc_macro::{TokenStream, __internal};
use syntax::ast::{self, ItemKind, Attribute, Mac};
use syntax::attr::{mark_used, mark_known};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::visit::Visitor;
struct MarkAttrs<'a>(&'a [ast::Name]);
impl<'a> Visitor<'a> for MarkAttrs<'a> {
fn visit_attribute(&mut self, attr: &Attribute) {
if self.0.contains(&attr.name()) {
mark_used(attr);
mark_known(attr);
}
}
fn visit_mac(&mut self, _mac: &Mac) {}
}
pub struct ProcMacroDerive {
inner: fn(TokenStream) -> TokenStream,
attrs: Vec<ast::Name>,
}
impl ProcMacroDerive {
pub fn new(inner: fn(TokenStream) -> TokenStream, attrs: Vec<ast::Name>) -> ProcMacroDerive {
ProcMacroDerive { inner: inner, attrs: attrs }
}
}
impl MultiItemModifier for ProcMacroDerive {
fn expand(&self,
ecx: &mut ExtCtxt,
span: Span,
_meta_item: &ast::MetaItem,
item: Annotatable)
-> Vec<Annotatable> {
let item = match item {
Annotatable::Item(item) => item,
Annotatable::ImplItem(_) |
Annotatable::TraitItem(_) |
Annotatable::ForeignItem(_) |
Annotatable::Stmt(_) |
Annotatable::Expr(_) => {
ecx.span_err(span, "proc-macro derives may only be \
applied to a struct, enum, or union");
return Vec::new()
}
};
match item.node {
ItemKind::Struct(..) |
ItemKind::Enum(..) |
ItemKind::Union(..) => {},
_ => {
ecx.span_err(span, "proc-macro derives may only be \
applied to a struct, enum, or union");
return Vec::new()
}
}
// Mark attributes as known, and used.
MarkAttrs(&self.attrs).visit_item(&item);
let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone()));
let res = __internal::set_sess(ecx, || {
let inner = self.inner;
panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input)))
});
let stream = match res {
Ok(stream) => stream,
Err(e) => {
let msg = "proc-macro derive panicked";
let mut err = ecx.struct_span_fatal(span, msg);
if let Some(s) = e.downcast_ref::<String>() {
err.help(&format!("message: {}", s));
}
if let Some(s) = e.downcast_ref::<&'static str>() {
err.help(&format!("message: {}", s));
}
err.emit();
FatalError.raise();
}
};
let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
__internal::set_sess(ecx, || {
let msg = "proc-macro derive produced unparseable tokens";
match __internal::token_stream_parse_items(stream) {
// fail if there have been errors emitted
Ok(_) if ecx.parse_sess.span_diagnostic.err_count() > error_count_before => {
ecx.struct_span_fatal(span, msg).emit();
FatalError.raise();
}
Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(),
Err(_) => {
// FIXME: handle this better
ecx.struct_span_fatal(span, msg).emit();
FatalError.raise();
}
}
})
}
}
|
extern crate bootstrap_rs as bootstrap;
extern crate bootstrap_gl as gl;
extern crate stopwatch;
use bootstrap::window::*;
use gl::types::*;
use std::time::{Duration, Instant};
use stopwatch::PrettyDuration;
static VERTEX_POSITIONS: &'static [f32] = &[
-1.0, -1.0, 0.0,
1.0, -1.0, 0.0,
0.0, 1.0, 0.0,
];
fn main() {
// Open a window to be used as a target for rendering.
let mut window = Window::new("Hello, Triangle!").unwrap();
// Create an OpenGL context for the window.
let device_context = window.platform().device_context();
let context = unsafe {
let context = gl::create_context(device_context).unwrap();
gl::make_current(context);
context
};
// Create a vertex buffer to store the vertices of the triangle. We provide it with data and
// specify the layout of that data.
let buffer_name = gl::gen_buffer().unwrap();
unsafe {
gl::bind_buffer(BufferTarget::Array, buffer_name);
gl::buffer_data(BufferTarget::Array, VERTEX_POSITIONS, BufferUsage::StaticDraw);
}
// Create the vertex array object to hold the state needed to draw.
let vertex_array_name = gl::gen_vertex_array().unwrap();
unsafe {
gl::bind_vertex_array(vertex_array_name);
gl::enable_vertex_attrib_array(AttributeLocation::from_index(0));
gl::vertex_attrib_pointer(
AttributeLocation::from_index(0),
3, // components per vertex
GlType::Float,
False,
0, // stride (bytes)
0, // offset (bytes)
);
}
unsafe { gl::platform::set_swap_interval(0); }
let mut last_frame_time = Instant::now();
'outer: loop {
while let Some(message) = window.next_message() {
match message {
Message::Close => break 'outer,
_ => {},
}
}
unsafe {
gl::clear(ClearBufferMask::Color | ClearBufferMask::Depth);
gl::draw_arrays(
DrawMode::Triangles,
0, // offset
3, // elements to draw
);
}
let swap_time = unsafe {
let timer = Instant::now();
gl::swap_buffers(context);
timer.elapsed()
};
if last_frame_time.elapsed() > Duration::new(0, 17_000_000) {
println!("!!! loop time exceeded: {:?}, swap time: {:?}", PrettyDuration(last_frame_time.elapsed()), PrettyDuration(swap_time));
} else {
// println!("loop time: {:?}, swap time: {:?}", PrettyDuration(last_frame_time.elapsed()), PrettyDuration(swap_time));
}
// while last_frame_time.elapsed() < Duration::new(0, 16_666_666) {}
last_frame_time = Instant::now();
}
}
|
// Copyright 2022 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::cmp::Ordering;
use std::cmp::Ordering::Less;
use std::intrinsics::assume;
use std::mem;
use std::ptr;
use common_arrow::arrow::bitmap::MutableBitmap;
use crate::types::*;
use crate::with_number_mapped_type;
use crate::Column;
use crate::Scalar;
#[derive(Clone)]
pub struct TopKSorter {
data: Vec<Scalar>,
limit: usize,
asc: bool,
}
impl TopKSorter {
pub fn new(limit: usize, asc: bool) -> Self {
Self {
data: Vec::with_capacity(limit),
limit,
asc,
}
}
// Push the column into this sorted and update the bitmap
// The bitmap could be used in filter
pub fn push_column(&mut self, col: &Column, bitmap: &mut MutableBitmap) {
with_number_mapped_type!(|NUM_TYPE| match col.data_type() {
DataType::Number(NumberDataType::NUM_TYPE) =>
self.push_column_internal::<NumberType::<NUM_TYPE>>(col, bitmap),
DataType::String => self.push_column_internal::<StringType>(col, bitmap),
DataType::Timestamp => self.push_column_internal::<TimestampType>(col, bitmap),
DataType::Date => self.push_column_internal::<DateType>(col, bitmap),
_ => {}
});
}
fn push_column_internal<T: ValueType>(&mut self, col: &Column, bitmap: &mut MutableBitmap)
where for<'a> T::ScalarRef<'a>: Ord {
let col = T::try_downcast_column(col).unwrap();
for (i, value) in T::iter_column(&col).enumerate() {
if !bitmap.get(i) {
continue;
}
if self.data.len() < self.limit {
self.data.push(T::upcast_scalar(T::to_owned_scalar(value)));
if self.data.len() == self.limit {
self.make_heap();
}
} else if !self.push_value(value) {
bitmap.set(i, false);
}
}
}
#[inline]
fn push_value<T: ValueType>(&mut self, value: T::ScalarRef<'_>) -> bool
where for<'a> T::ScalarRef<'a>: Ord {
let order = self.ordering();
unsafe {
assume(self.data.len() == self.limit);
}
let data = self.data[0].as_ref();
let data = T::try_downcast_scalar(&data).unwrap();
let value = T::upcast_gat(value);
if Ord::cmp(&data, &value) != order {
drop(data);
self.data[0] = T::upcast_scalar(T::to_owned_scalar(value));
self.adjust();
true
} else {
false
}
}
#[inline]
pub fn never_match(&self, (min, max): &(Scalar, Scalar)) -> bool {
if self.data.len() != self.limit {
return false;
}
(self.asc && &self.data[0] < min) || (!self.asc && &self.data[0] > max)
}
#[inline]
pub fn never_match_value(&self, val: &Scalar) -> bool {
if self.data.len() != self.limit {
return false;
}
(self.asc && &self.data[0] < val) || (!self.asc && &self.data[0] > val)
}
#[inline]
pub fn never_match_any(&self, col: &Column) -> bool {
if self.data.len() != self.limit {
return false;
}
with_number_mapped_type!(|NUM_TYPE| match col.data_type() {
DataType::Number(NumberDataType::NUM_TYPE) =>
self.never_match_any_internal::<NumberType::<NUM_TYPE>>(col),
DataType::String => self.never_match_any_internal::<StringType>(col),
DataType::Timestamp => self.never_match_any_internal::<TimestampType>(col),
DataType::Date => self.never_match_any_internal::<DateType>(col),
_ => false,
})
}
fn never_match_any_internal<T: ValueType>(&self, col: &Column) -> bool
where for<'a> T::ScalarRef<'a>: Ord {
let col = T::try_downcast_column(col).unwrap();
let data = self.data[0].as_ref();
for val in T::iter_column(&col) {
let data = T::try_downcast_scalar(&data).unwrap();
if (self.asc && data >= val) || (!self.asc && data <= val) {
return false;
}
}
true
}
fn make_heap(&mut self) {
let ordering = self.ordering();
let data = self.data.as_mut_slice();
make_heap(data, &mut |a, b| a.cmp(b) == ordering);
}
fn adjust(&mut self) {
let ordering = self.ordering();
let data = self.data.as_mut_slice();
adjust_heap(data, 0, data.len(), &mut |a, b| a.cmp(b) == ordering);
}
fn ordering(&self) -> Ordering {
if self.asc { Less } else { Less.reverse() }
}
}
#[inline]
fn make_heap<T, F>(v: &mut [T], is_less: &mut F)
where F: FnMut(&T, &T) -> bool {
let len = v.len();
if len < 2 {
// no need to adjust heap
return;
}
let mut parent = (len - 2) / 2;
loop {
adjust_heap(v, parent, len, is_less);
if parent == 0 {
return;
}
parent -= 1;
}
}
/// adjust_heap is a shift up adjust op for the heap
#[inline]
fn adjust_heap<T, F>(v: &mut [T], hole_index: usize, len: usize, is_less: &mut F)
where F: FnMut(&T, &T) -> bool {
let mut left_child = hole_index * 2 + 1;
// SAFETY: we ensure hole_index point to a properly initialized value of type T
let mut tmp = unsafe { mem::ManuallyDrop::new(ptr::read(&v[hole_index])) };
let mut hole = InsertionHole {
src: &mut *tmp,
dest: &mut v[hole_index],
};
// Panic safety:
//
// If `is_less` panics at any point during the process, `hole` will get dropped and fill the
// hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
// object it initially held exactly once.
// SAFETY:
// we ensure src/dest point to a properly initialized value of type T
// src is valid for reads of `count * size_of::()` bytes.
// dest is valid for reads of `count * size_of::()` bytes.
// Both `src` and `dst` are properly aligned.
unsafe {
while left_child < len {
// SAFETY:
// we ensure left_child and left_child + 1 are between [0, len)
if left_child + 1 < len {
left_child +=
is_less(v.get_unchecked(left_child), v.get_unchecked(left_child + 1)) as usize;
}
// SAFETY:
// left_child and hole.dest point to a properly initialized value of type T
if is_less(&*tmp, v.get_unchecked(left_child)) {
ptr::copy_nonoverlapping(&v[left_child], hole.dest, 1);
hole.dest = &mut v[left_child];
} else {
break;
}
left_child = left_child * 2 + 1;
}
}
// These codes is from std::sort_by
// When dropped, copies from `src` into `dest`.
struct InsertionHole<T> {
src: *mut T,
dest: *mut T,
}
impl<T> Drop for InsertionHole<T> {
fn drop(&mut self) {
// SAFETY:
// we ensure src/dest point to a properly initialized value of type T
// src is valid for reads of `count * size_of::()` bytes.
// dest is valid for reads of `count * size_of::()` bytes.
// Both `src` and `dst` are properly aligned.
unsafe {
ptr::copy_nonoverlapping(self.src, self.dest, 1);
}
}
}
}
|
use cipher::{self, Mode};
use encoding::base64::*;
use std::fs;
fn main() {
println!("🔓 Challenge 10");
let cbc_cipher = cipher::new(Mode::CBC);
let ct_base64: String = fs::read_to_string("challenges/data/chal10.txt")
.unwrap()
.lines()
.collect();
let ct_bytes = Base64::from_str(&ct_base64).unwrap().as_bytes();
let key = b"YELLOW SUBMARINE".to_vec();
let pt = cbc_cipher.decrypt(&key, &ct_bytes);
println!("decrypted message: \n{:?}", String::from_utf8(pt).unwrap());
}
|
// -*- rust -*-
import driver.session;
import front.ast;
import lib.llvm.False;
import lib.llvm.llvm;
import lib.llvm.mk_object_file;
import lib.llvm.mk_section_iter;
import middle.fold;
import middle.metadata;
import middle.trans;
import middle.ty;
import back.x86;
import util.common;
import util.common.span;
import std._str;
import std._uint;
import std._vec;
import std.ebml;
import std.fs;
import std.io;
import std.option;
import std.option.none;
import std.option.some;
import std.os;
import std.map.hashmap;
// TODO: map to a real type here.
type env = @rec(
session.session sess,
@hashmap[str, int] crate_cache,
vec[str] library_search_paths,
mutable int next_crate_num
);
tag resolve_result {
rr_ok(ast.def_id);
rr_not_found(ast.ident);
}
// Type decoding
// Compact string representation for ty.t values. API ty_str & parse_from_str.
// (The second has to be authed pure.) Extra parameters are for converting
// to/from def_ids in the string rep. Whatever format you choose should not
// contain pipe characters.
// Callback to translate defs to strs or back.
type str_def = fn(str) -> ast.def_id;
type pstate = rec(str rep, mutable uint pos, uint len,
@ty.type_store tystore);
fn peek(@pstate st) -> u8 {
if (st.pos < st.len) {ret st.rep.(st.pos) as u8;}
else {ret ' ' as u8;}
}
fn next(@pstate st) -> u8 {
if (st.pos >= st.len) {fail;}
auto ch = st.rep.(st.pos);
st.pos = st.pos + 1u;
ret ch as u8;
}
fn parse_ty_str(str rep, str_def sd, @ty.type_store tystore) -> @ty.t {
auto len = _str.byte_len(rep);
auto st = @rec(rep=rep, mutable pos=0u, len=len, tystore=tystore);
auto result = parse_ty(st, sd);
if (st.pos != len) {
log_err "parse_ty_str: incomplete parse, stopped at byte "
+ _uint.to_str(st.pos, 10u) + " of "
+ _uint.to_str(len, 10u) + " in str '" + rep + "'";
}
ret result;
}
fn parse_ty(@pstate st, str_def sd) -> @ty.t {
alt (next(st) as char) {
case ('n') { ret ty.mk_nil(st.tystore); }
case ('b') { ret ty.mk_bool(st.tystore); }
case ('i') { ret ty.mk_int(st.tystore); }
case ('u') { ret ty.mk_uint(st.tystore); }
case ('l') { ret ty.mk_float(st.tystore); }
case ('M') {
alt (next(st) as char) {
case ('b') { ret ty.mk_mach(st.tystore, common.ty_u8); }
case ('w') { ret ty.mk_mach(st.tystore, common.ty_u16); }
case ('l') { ret ty.mk_mach(st.tystore, common.ty_u32); }
case ('d') { ret ty.mk_mach(st.tystore, common.ty_u64); }
case ('B') { ret ty.mk_mach(st.tystore, common.ty_i8); }
case ('W') { ret ty.mk_mach(st.tystore, common.ty_i16); }
case ('L') { ret ty.mk_mach(st.tystore, common.ty_i32); }
case ('D') { ret ty.mk_mach(st.tystore, common.ty_i64); }
case ('f') { ret ty.mk_mach(st.tystore, common.ty_f32); }
case ('F') { ret ty.mk_mach(st.tystore, common.ty_f64); }
}
}
case ('c') { ret ty.mk_char(st.tystore); }
case ('s') { ret ty.mk_str(st.tystore); }
case ('t') {
check(next(st) as char == '[');
auto def = parse_def(st, sd);
let vec[@ty.t] params = vec();
while (peek(st) as char != ']') {
params += vec(parse_ty(st, sd));
}
st.pos = st.pos + 1u;
ret ty.mk_tag(st.tystore, def, params);
}
case ('p') { ret ty.mk_param(st.tystore, parse_int(st) as uint); }
case ('@') { ret ty.mk_box(st.tystore, parse_mt(st, sd)); }
case ('V') { ret ty.mk_vec(st.tystore, parse_mt(st, sd)); }
case ('P') { ret ty.mk_port(st.tystore, parse_ty(st, sd)); }
case ('C') { ret ty.mk_chan(st.tystore, parse_ty(st, sd)); }
case ('T') {
check(next(st) as char == '[');
let vec[ty.mt] params = vec();
while (peek(st) as char != ']') {
params += vec(parse_mt(st, sd));
}
st.pos = st.pos + 1u;
ret ty.mk_tup(st.tystore, params);
}
case ('R') {
check(next(st) as char == '[');
let vec[ty.field] fields = vec();
while (peek(st) as char != ']') {
auto name = "";
while (peek(st) as char != '=') {
name += _str.unsafe_from_byte(next(st));
}
st.pos = st.pos + 1u;
fields += vec(rec(ident=name, mt=parse_mt(st, sd)));
}
st.pos = st.pos + 1u;
ret ty.mk_rec(st.tystore, fields);
}
case ('F') {
auto func = parse_ty_fn(st, sd);
ret ty.mk_fn(st.tystore, ast.proto_fn, func._0, func._1);
}
case ('W') {
auto func = parse_ty_fn(st, sd);
ret ty.mk_fn(st.tystore, ast.proto_iter, func._0, func._1);
}
case ('N') {
auto abi;
alt (next(st) as char) {
case ('r') {abi = ast.native_abi_rust;}
case ('c') {abi = ast.native_abi_cdecl;}
case ('l') {abi = ast.native_abi_llvm;}
}
auto func = parse_ty_fn(st, sd);
ret ty.mk_native_fn(st.tystore,abi,func._0,func._1);
}
case ('O') {
check(next(st) as char == '[');
let vec[ty.method] methods = vec();
while (peek(st) as char != ']') {
auto proto;
alt (next(st) as char) {
case ('W') {proto = ast.proto_iter;}
case ('F') {proto = ast.proto_fn;}
}
auto name = "";
while (peek(st) as char != '[') {
name += _str.unsafe_from_byte(next(st));
}
auto func = parse_ty_fn(st, sd);
methods += vec(rec(proto=proto,
ident=name,
inputs=func._0,
output=func._1));
}
st.pos += 1u;
ret ty.mk_obj(st.tystore, methods);
}
case ('X') { ret ty.mk_var(st.tystore, parse_int(st)); }
case ('E') { ret ty.mk_native(st.tystore); }
case ('Y') { ret ty.mk_type(st.tystore); }
}
}
fn parse_mt(@pstate st, str_def sd) -> ty.mt {
auto mut;
alt (peek(st) as char) {
case ('m') {next(st); mut = ast.mut;}
case ('?') {next(st); mut = ast.maybe_mut;}
case (_) {mut=ast.imm;}
}
ret rec(ty=parse_ty(st, sd), mut=mut);
}
fn parse_def(@pstate st, str_def sd) -> ast.def_id {
auto def = "";
while (peek(st) as char != '|') {
def += _str.unsafe_from_byte(next(st));
}
st.pos = st.pos + 1u;
ret sd(def);
}
fn parse_int(@pstate st) -> int {
auto n = 0;
while (true) {
auto cur = peek(st) as char;
if (cur < '0' || cur > '9') {break;}
st.pos = st.pos + 1u;
n *= 10;
n += (cur as int) - ('0' as int);
}
ret n;
}
fn parse_ty_fn(@pstate st, str_def sd) -> tup(vec[ty.arg], @ty.t) {
check(next(st) as char == '[');
let vec[ty.arg] inputs = vec();
while (peek(st) as char != ']') {
auto mode = ast.val;
if (peek(st) as char == '&') {
mode = ast.alias;
st.pos = st.pos + 1u;
}
inputs += vec(rec(mode=mode, ty=parse_ty(st, sd)));
}
st.pos = st.pos + 1u;
ret tup(inputs, parse_ty(st, sd));
}
// Rust metadata parsing
fn parse_def_id(vec[u8] buf) -> ast.def_id {
auto colon_idx = 0u;
auto len = _vec.len[u8](buf);
while (colon_idx < len && buf.(colon_idx) != (':' as u8)) {
colon_idx += 1u;
}
if (colon_idx == len) {
log_err "didn't find ':' when parsing def id";
fail;
}
auto crate_part = _vec.slice[u8](buf, 0u, colon_idx);
auto def_part = _vec.slice[u8](buf, colon_idx + 1u, len);
auto crate_num = _uint.parse_buf(crate_part, 10u) as int;
auto def_num = _uint.parse_buf(def_part, 10u) as int;
ret tup(crate_num, def_num);
}
fn lookup_hash(&ebml.doc d, fn(vec[u8]) -> bool eq_fn, uint hash)
-> option.t[ebml.doc] {
auto index = ebml.get_doc(d, metadata.tag_index);
auto table = ebml.get_doc(index, metadata.tag_index_table);
auto hash_pos = table.start + (hash % 256u) * 4u;
auto pos = ebml.be_uint_from_bytes(d.data, hash_pos, 4u);
auto bucket = ebml.doc_at(d.data, pos);
// Awkward logic because we can't ret from foreach yet
auto result = option.none[ebml.doc];
auto belt = metadata.tag_index_buckets_bucket_elt;
for each (ebml.doc elt in ebml.tagged_docs(bucket, belt)) {
alt (result) {
case (option.none[ebml.doc]) {
auto pos = ebml.be_uint_from_bytes(elt.data, elt.start, 4u);
if (eq_fn(_vec.slice[u8](elt.data, elt.start+4u, elt.end))) {
result = option.some[ebml.doc](ebml.doc_at(d.data, pos));
}
}
case (_) {}
}
}
ret result;
}
// Given a path and serialized crate metadata, returns the ID of the
// definition the path refers to.
fn resolve_path(vec[ast.ident] path, vec[u8] data) -> resolve_result {
fn eq_item(vec[u8] data, str s) -> bool {
ret _str.eq(_str.unsafe_from_bytes(data), s);
}
auto s = _str.connect(path, ".");
auto md = ebml.new_doc(data);
auto paths = ebml.get_doc(md, metadata.tag_paths);
auto eqer = bind eq_item(_, s);
alt (lookup_hash(paths, eqer, metadata.hash_path(s))) {
case (option.some[ebml.doc](?d)) {
auto did_doc = ebml.get_doc(d, metadata.tag_def_id);
ret rr_ok(parse_def_id(ebml.doc_data(did_doc)));
}
case (option.none[ebml.doc]) {
ret rr_not_found(s);
}
}
}
fn maybe_find_item(int item_id, &ebml.doc items) -> option.t[ebml.doc] {
fn eq_item(vec[u8] bytes, int item_id) -> bool {
ret ebml.be_uint_from_bytes(bytes, 0u, 4u) as int == item_id;
}
auto eqer = bind eq_item(_, item_id);
ret lookup_hash(items, eqer, metadata.hash_def_num(item_id));
}
fn find_item(int item_id, &ebml.doc items) -> ebml.doc {
alt (maybe_find_item(item_id, items)) {
case (option.some[ebml.doc](?d)) {ret d;}
}
}
// Looks up an item in the given metadata and returns an EBML doc pointing
// to the item data.
fn lookup_item(int item_id, vec[u8] data) -> ebml.doc {
auto items = ebml.get_doc(ebml.new_doc(data), metadata.tag_items);
ret find_item(item_id, items);
}
fn item_kind(&ebml.doc item) -> u8 {
auto kind = ebml.get_doc(item, metadata.tag_items_data_item_kind);
ret ebml.doc_as_uint(kind) as u8;
}
fn item_symbol(&ebml.doc item) -> str {
auto sym = ebml.get_doc(item, metadata.tag_items_data_item_symbol);
ret _str.unsafe_from_bytes(ebml.doc_data(sym));
}
fn variant_tag_id(&ebml.doc d) -> ast.def_id {
auto tagdoc = ebml.get_doc(d, metadata.tag_items_data_item_tag_id);
ret parse_def_id(ebml.doc_data(tagdoc));
}
fn item_type(&ebml.doc item, int this_cnum, @ty.type_store tystore) -> @ty.t {
fn parse_external_def_id(int this_cnum, str s) -> ast.def_id {
// FIXME: This is completely wrong when linking against a crate
// that, in turn, links against another crate. We need a mapping
// from crate ID to crate "meta" attributes as part of the crate
// metadata.
auto buf = _str.bytes(s);
auto external_def_id = parse_def_id(buf);
ret tup(this_cnum, external_def_id._1);
}
auto tp = ebml.get_doc(item, metadata.tag_items_data_item_type);
auto s = _str.unsafe_from_bytes(ebml.doc_data(tp));
ret parse_ty_str(s, bind parse_external_def_id(this_cnum, _), tystore);
}
fn item_ty_param_count(&ebml.doc item, int this_cnum) -> uint {
let uint ty_param_count = 0u;
auto tp = metadata.tag_items_data_item_ty_param_count;
for each (ebml.doc p in ebml.tagged_docs(item, tp)) {
ty_param_count = ebml.vint_at(ebml.doc_data(p), 0u)._0;
}
ret ty_param_count;
}
fn tag_variant_ids(&ebml.doc item, int this_cnum) -> vec[ast.def_id] {
let vec[ast.def_id] ids = vec();
auto v = metadata.tag_items_data_item_variant;
for each (ebml.doc p in ebml.tagged_docs(item, v)) {
auto ext = parse_def_id(ebml.doc_data(p));
_vec.push[ast.def_id](ids, tup(this_cnum, ext._1));
}
ret ids;
}
fn get_metadata_section(str filename) -> option.t[vec[u8]] {
auto mb = llvm.LLVMRustCreateMemoryBufferWithContentsOfFile
(_str.buf(filename));
if (mb as int == 0) {ret option.none[vec[u8]];}
auto of = mk_object_file(mb);
auto si = mk_section_iter(of.llof);
while (llvm.LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False) {
auto name_buf = llvm.LLVMGetSectionName(si.llsi);
auto name = _str.str_from_cstr(name_buf);
if (_str.eq(name, x86.get_meta_sect_name())) {
auto cbuf = llvm.LLVMGetSectionContents(si.llsi);
auto csz = llvm.LLVMGetSectionSize(si.llsi);
auto cvbuf = cbuf as _vec.vbuf;
ret option.some[vec[u8]](_vec.vec_from_vbuf[u8](cvbuf, csz));
}
llvm.LLVMMoveToNextSection(si.llsi);
}
ret option.none[vec[u8]];
}
fn load_crate(session.session sess,
int cnum,
ast.ident ident,
vec[str] library_search_paths) {
auto filename = parser.default_native_name(sess, ident);
for (str library_search_path in library_search_paths) {
auto path = fs.connect(library_search_path, filename);
alt (get_metadata_section(path)) {
case (option.some[vec[u8]](?cvec)) {
sess.set_external_crate(cnum, rec(name=ident, data=cvec));
ret;
}
case (_) {}
}
}
log_err #fmt("can't open crate '%s' (looked for '%s' in lib search path)",
ident, filename);
fail;
}
fn fold_view_item_use(&env e, &span sp, ast.ident ident,
vec[@ast.meta_item] meta_items, ast.def_id id, option.t[int] cnum_opt)
-> @ast.view_item {
auto cnum;
if (!e.crate_cache.contains_key(ident)) {
cnum = e.next_crate_num;
load_crate(e.sess, cnum, ident, e.library_search_paths);
e.crate_cache.insert(ident, e.next_crate_num);
e.next_crate_num += 1;
} else {
cnum = e.crate_cache.get(ident);
}
auto viu = ast.view_item_use(ident, meta_items, id, some[int](cnum));
ret @fold.respan[ast.view_item_](sp, viu);
}
// Reads external crates referenced by "use" directives.
fn read_crates(session.session sess,
@ast.crate crate,
vec[str] library_search_paths) -> @ast.crate {
auto e = @rec(
sess=sess,
crate_cache=@common.new_str_hash[int](),
library_search_paths=library_search_paths,
mutable next_crate_num=1
);
auto f = fold_view_item_use;
auto fld = @rec(fold_view_item_use=f with *fold.new_identity_fold[env]());
ret fold.fold_crate[env](e, fld, crate);
}
fn kind_has_type_params(u8 kind_ch) -> bool {
// FIXME: It'd be great if we had u8 char literals.
if (kind_ch == ('c' as u8)) { ret false; }
else if (kind_ch == ('f' as u8)) { ret true; }
else if (kind_ch == ('F' as u8)) { ret true; }
else if (kind_ch == ('y' as u8)) { ret true; }
else if (kind_ch == ('o' as u8)) { ret true; }
else if (kind_ch == ('t' as u8)) { ret true; }
else if (kind_ch == ('T' as u8)) { ret false; }
else if (kind_ch == ('m' as u8)) { ret false; }
else if (kind_ch == ('n' as u8)) { ret false; }
else if (kind_ch == ('v' as u8)) { ret true; }
else {
log_err #fmt("kind_has_type_params(): unknown kind char: %d",
kind_ch as int);
fail;
}
}
// Crate metadata queries
fn lookup_def(session.session sess, int cnum, vec[ast.ident] path)
-> option.t[ast.def] {
auto data = sess.get_external_crate(cnum).data;
auto did;
alt (resolve_path(path, data)) {
case (rr_ok(?di)) { did = di; }
case (rr_not_found(?name)) {
ret none[ast.def];
}
}
auto item = lookup_item(did._1, data);
auto kind_ch = item_kind(item);
did = tup(cnum, did._1);
// FIXME: It'd be great if we had u8 char literals.
auto def;
if (kind_ch == ('c' as u8)) { def = ast.def_const(did); }
else if (kind_ch == ('f' as u8)) { def = ast.def_fn(did); }
else if (kind_ch == ('F' as u8)) { def = ast.def_native_fn(did); }
else if (kind_ch == ('y' as u8)) { def = ast.def_ty(did); }
else if (kind_ch == ('o' as u8)) { def = ast.def_obj(did); }
else if (kind_ch == ('T' as u8)) { def = ast.def_native_ty(did); }
else if (kind_ch == ('t' as u8)) {
// We treat references to tags as references to types.
def = ast.def_ty(did);
} else if (kind_ch == ('m' as u8)) { def = ast.def_mod(did); }
else if (kind_ch == ('n' as u8)) { def = ast.def_native_mod(did); }
else if (kind_ch == ('v' as u8)) {
auto tid = variant_tag_id(item);
tid = tup(cnum, tid._1);
def = ast.def_variant(tid, did);
} else {
log_err #fmt("lookup_def(): unknown kind char: %d", kind_ch as int);
fail;
}
ret some[ast.def](def);
}
fn get_type(session.session sess, @ty.type_store tystore, ast.def_id def)
-> ty.ty_param_count_and_ty {
auto external_crate_id = def._0;
auto data = sess.get_external_crate(external_crate_id).data;
auto item = lookup_item(def._1, data);
auto t = item_type(item, external_crate_id, tystore);
auto tp_count;
auto kind_ch = item_kind(item);
auto has_ty_params = kind_has_type_params(kind_ch);
if (has_ty_params) {
tp_count = item_ty_param_count(item, external_crate_id);
} else {
tp_count = 0u;
}
ret tup(tp_count, t);
}
fn get_symbol(session.session sess, ast.def_id def) -> str {
auto external_crate_id = def._0;
auto data = sess.get_external_crate(external_crate_id).data;
auto item = lookup_item(def._1, data);
ret item_symbol(item);
}
fn get_tag_variants(session.session sess,
@ty.type_store tystore,
ast.def_id def) -> vec[trans.variant_info] {
auto external_crate_id = def._0;
auto data = sess.get_external_crate(external_crate_id).data;
auto items = ebml.get_doc(ebml.new_doc(data), metadata.tag_items);
auto item = find_item(def._1, items);
let vec[trans.variant_info] infos = vec();
auto variant_ids = tag_variant_ids(item, external_crate_id);
for (ast.def_id did in variant_ids) {
auto item = find_item(did._1, items);
auto ctor_ty = item_type(item, external_crate_id, tystore);
let vec[@ty.t] arg_tys = vec();
alt (ctor_ty.struct) {
case (ty.ty_fn(_, ?args, _)) {
for (ty.arg a in args) {
arg_tys += vec(a.ty);
}
}
case (_) {
// Nullary tag variant.
}
}
infos += vec(rec(args=arg_tys, ctor_ty=ctor_ty, id=did));
}
ret infos;
}
fn list_file_metadata(str path, io.writer out) {
alt (get_metadata_section(path)) {
case (option.some[vec[u8]](?bytes)) {
list_crate_metadata(bytes, out);
}
case (option.none[vec[u8]]) {
out.write_str("Could not find metadata in " + path + ".\n");
}
}
}
fn read_path(&ebml.doc d) -> tup(str, uint) {
auto desc = ebml.doc_data(d);
auto pos = ebml.be_uint_from_bytes(desc, 0u, 4u);
auto pathbytes = _vec.slice[u8](desc, 4u, _vec.len[u8](desc));
auto path = _str.unsafe_from_bytes(pathbytes);
ret tup(path, pos);
}
fn list_crate_metadata(vec[u8] bytes, io.writer out) {
auto md = ebml.new_doc(bytes);
auto paths = ebml.get_doc(md, metadata.tag_paths);
auto items = ebml.get_doc(md, metadata.tag_items);
auto index = ebml.get_doc(paths, metadata.tag_index);
auto bs = ebml.get_doc(index, metadata.tag_index_buckets);
for each (ebml.doc bucket in
ebml.tagged_docs(bs, metadata.tag_index_buckets_bucket)) {
auto et = metadata.tag_index_buckets_bucket_elt;
for each (ebml.doc elt in ebml.tagged_docs(bucket, et)) {
auto data = read_path(elt);
auto def = ebml.doc_at(bytes, data._1);
auto did_doc = ebml.get_doc(def, metadata.tag_def_id);
auto did = parse_def_id(ebml.doc_data(did_doc));
out.write_str(#fmt("%s (%s)\n", data._0,
describe_def(items, did)));
}
}
}
fn describe_def(&ebml.doc items, ast.def_id id) -> str {
if (id._0 != 0) {ret "external";}
alt (maybe_find_item(id._1 as int, items)) {
case (option.some[ebml.doc](?item)) {
ret item_kind_to_str(item_kind(item));
}
case (option.none[ebml.doc]) {
ret "??"; // Native modules don't seem to get item entries.
}
}
}
fn item_kind_to_str(u8 kind) -> str {
alt (kind as char) {
case ('c') {ret "const";}
case ('f') {ret "fn";}
case ('F') {ret "native fn";}
case ('y') {ret "type";}
case ('o') {ret "obj";}
case ('T') {ret "native type";}
case ('t') {ret "type";}
case ('m') {ret "mod";}
case ('n') {ret "native mod";}
case ('v') {ret "tag";}
}
}
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
|
mod websocket;
use crate::lttp::{
app_state::Update,
server_config::ServerConfigUpdate,
AppState,
DungeonState,
DungeonUpdate,
GameState,
LocationState,
LocationUpdate,
ServerConfig,
};
use axum::{
extract::{
self,
ws::{
Message,
WebSocket,
WebSocketUpgrade,
},
Extension,
Path,
},
response::{
IntoResponse,
Json,
},
routing::{
get,
post,
},
Router,
};
use serde_json::json;
use std::sync::Arc;
use tower::ServiceBuilder;
use tower_http::cors::CorsLayer;
use tracing::info;
pub fn build(app_state: Arc<AppState>) -> Router {
let cors_layer = CorsLayer::permissive();
Router::new()
.route("/config", get(get_config).post(post_config))
.route("/dungeon_state", get(get_dungeon_state))
.route("/dungeon_state/:dungeon", post(post_dungeon_state))
.route("/game_state", get(get_game_state))
.route("/location_state", get(get_location_state))
.route("/location_state/:location", post(post_location_state))
.route("/ws", get(websocket_upgrade_handler))
.layer(ServiceBuilder::new().layer(cors_layer).layer(Extension(app_state)))
}
#[allow(clippy::unused_async)]
async fn get_config(Extension(app_state): Extension<Arc<AppState>>) -> impl IntoResponse {
let server_config = match app_state.server_config.read() {
Ok(ac) => ac.clone(),
Err(e) => return Err(format!("Unable to get app config: {e:?}")),
};
Ok(Json(json!(server_config)))
}
#[allow(clippy::unused_async)]
async fn post_config(
Extension(app_state): Extension<Arc<AppState>>,
extract::Json(config_update): extract::Json<ServerConfigUpdate>,
) -> impl IntoResponse {
info!("Received server config update: {:?}", config_update);
if let Err(e) = app_state.update_server_config(config_update) {
return Err(format!("Unable to update server config: {e:?}"));
};
let server_config = match app_state.server_config.read() {
Ok(sc) => sc.clone(),
Err(e) => return Err(format!("Unable to get new server config: {e:?}")),
};
Ok(Json(json!(server_config)))
}
#[allow(clippy::unused_async)]
async fn get_game_state(Extension(app_state): Extension<Arc<AppState>>) -> impl IntoResponse {
let game_state = match app_state.game_state.read() {
Ok(gs) => gs.clone(),
Err(e) => return Err(format!("Unable to get game state: {e:?}")),
};
Ok(Json(json!(game_state)))
}
#[allow(clippy::unused_async)]
async fn get_location_state(Extension(app_state): Extension<Arc<AppState>>) -> impl IntoResponse {
let location_state = match app_state.location_state.read() {
Ok(ls) => ls.clone(),
Err(e) => return Err(format!("Unable to get location state: {e:?}")),
};
Ok(Json(json!(location_state.locations)))
}
#[allow(clippy::unused_async)]
async fn get_dungeon_state(Extension(app_state): Extension<Arc<AppState>>) -> impl IntoResponse {
let dungeon_state = match app_state.dungeon_state.read() {
Ok(ds) => ds.clone(),
Err(e) => return Err(format!("Unable to get dungeon state: {e:?}")),
};
Ok(Json(json!(dungeon_state.dungeons)))
}
#[allow(clippy::unused_async)]
async fn post_dungeon_state(
Path(dungeon): Path<String>,
Extension(app_state): Extension<Arc<AppState>>,
extract::Json(dungeon_update): extract::Json<DungeonUpdate>,
) -> impl IntoResponse {
if let Err(e) = app_state.set_dungeon_state(&dungeon, dungeon_update) {
return Err(format!("Unable to set dungeon state: {e:?}"));
};
let new_state = match app_state.dungeon_state.read() {
Ok(ds) => ds.get(&dungeon),
Err(e) => return Err(format!("Unable to get dungeon state: {e:?}")),
};
Ok(Json(json!(new_state)))
}
#[allow(clippy::unused_async)]
async fn post_location_state(
Path(location): Path<String>,
Extension(app_state): Extension<Arc<AppState>>,
extract::Json(location_update): extract::Json<LocationUpdate>,
) -> impl IntoResponse {
if let Err(e) = app_state.set_location_state(&location, location_update) {
return Err(format!("Unable to set location state: {e:?}"));
};
let new_state = match app_state.location_state.read() {
Ok(ls) => ls.get(&location),
Err(e) => return Err(format!("Unable to get location state: {e:?}")),
};
Ok(Json(json!(new_state)))
}
#[allow(clippy::unused_async)]
async fn websocket_upgrade_handler(
ws: WebSocketUpgrade,
Extension(app_state): Extension<Arc<AppState>>,
) -> impl IntoResponse {
ws.on_upgrade(|socket| websocket_handler(socket, Extension(app_state)))
}
#[allow(clippy::unused_async)]
async fn websocket_handler(mut socket: WebSocket, Extension(app_state): Extension<Arc<AppState>>) {
if let Some(game_state) = clone_game_state(&app_state) {
if let Ok(message) = serde_json::to_string(&websocket::ServerMessage::Item(game_state)) {
socket.send(Message::Text(message)).await.ok();
}
}
if let Some(dungeon_state) = clone_dungeon_state(&app_state) {
if let Ok(message) =
serde_json::to_string(&websocket::ServerMessage::Dungeon(dungeon_state.dungeons))
{
socket.send(Message::Text(message)).await.ok();
}
}
if let Some(location_state) = clone_location_state(&app_state) {
if let Ok(message) =
serde_json::to_string(&websocket::ServerMessage::Location(location_state.locations))
{
socket.send(Message::Text(message)).await.ok();
}
}
let mut updates = app_state.update_sender.subscribe();
while let Ok(update_type) = updates.recv().await {
let update_message = match update_type {
Update::Items => clone_game_state(&app_state).map(websocket::ServerMessage::Item),
Update::Dungeons => {
clone_dungeon_state(&app_state)
.map(|ds| websocket::ServerMessage::Dungeon(ds.dungeons))
}
Update::Locations => {
clone_location_state(&app_state)
.map(|ls| websocket::ServerMessage::Location(ls.locations))
}
Update::Config => clone_server_config(&app_state).map(websocket::ServerMessage::Config),
};
if let Some(message) = update_message {
if let Ok(string_message) = serde_json::to_string(&message) {
socket.send(Message::Text(string_message)).await.ok();
}
}
}
}
fn clone_game_state(app_state: &Arc<AppState>) -> Option<GameState> {
if let Ok(game_state) = app_state.game_state.read().map(|gs| gs.clone()) {
Some(game_state)
} else {
None
}
}
fn clone_dungeon_state(app_state: &Arc<AppState>) -> Option<DungeonState> {
if let Ok(dungeon_state) = app_state.dungeon_state.read().map(|ds| ds.clone()) {
Some(dungeon_state)
} else {
None
}
}
fn clone_location_state(app_state: &Arc<AppState>) -> Option<LocationState> {
if let Ok(location_state) = app_state.location_state.read().map(|ls| ls.clone()) {
Some(location_state)
} else {
None
}
}
fn clone_server_config(app_state: &Arc<AppState>) -> Option<ServerConfig> {
if let Ok(server_config) = app_state.server_config.read().map(|sc| sc.clone()) {
Some(server_config)
} else {
None
}
}
|
extern crate rayon_logs;
use itertools::Itertools;
use itertools::{kmerge, merge};
use rayon::prelude::*;
use rayon::{join, join_context, ThreadPool, ThreadPoolBuilder};
pub fn merge_n(input: &mut [u64], buffer: &mut [u64], n: usize) {
let chunksize = input.len() / n;
let inputs: Vec<&mut [u64]> = input.chunks_mut(chunksize).collect();
buffer
.iter_mut()
.zip(kmerge(inputs))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i);
}
pub fn merge_2(input: &mut [u64], buffer: &mut [u64]) {
let chunksize = input.len() / 2;
let inputs: Vec<&mut [u64]> = input.chunks_mut(chunksize).collect();
buffer
.iter_mut()
.zip(inputs[0].iter().merge(inputs[1].iter()))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i);
}
pub fn mergesort_n(input: &mut [u64], buffer: &mut [u64], split: usize) {
if input.len() == 0 || input.len() == 1 {
// those are sorted by default
return;
}
let mut chunksize = input.len() / split;
if chunksize == 0 || input.len() % split != 0 {
// if we have less elements than tasks (chunsize == 0)
// just use a few less tasks
// if we can't evently divide input on tasks, we give the first tasks a bit more
chunksize = chunksize + 1;
}
let mut inputs: Vec<&mut [u64]> = input.chunks_mut(chunksize).collect();
let buffers: Vec<&mut [u64]> = buffer.chunks_mut(chunksize).collect();
inputs.iter_mut().zip(buffers).for_each(|(i, b)| {
mergesort_n(i, b, split);
});
buffer
.iter_mut()
.zip(kmerge(inputs))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i); // write back
}
pub fn mergesort_2(input: &mut [u64], buffer: &mut [u64]) {
if input.len() == 0 || input.len() == 1 {
// those are sorted by default
return;
}
let (input1, input2) = input.split_at_mut(input.len() / 2);
let (buffer1, buffer2) = buffer.split_at_mut(buffer.len() / 2);
mergesort_2(input1, buffer1);
mergesort_2(input2, buffer2);
buffer
.iter_mut()
.zip(input1.iter().merge(input2.iter()))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i); // write back
}
pub fn mergesort_n_stop(input: &mut [u64], buffer: &mut [u64], split: usize, level: u64) {
if level == 0 {
input.sort();
return;
}
let mut chunksize = input.len() / split;
if chunksize == 0 || input.len() % split != 0 {
// if we have less elements than tasks (chunsize == 0)
// just use a few less tasks
// if we can't evently divide input on tasks, we give the first tasks a bit more
chunksize = chunksize + 1;
}
let mut inputs: Vec<&mut [u64]> = input.chunks_mut(chunksize).collect();
let buffers: Vec<&mut [u64]> = buffer.chunks_mut(chunksize).collect();
// inputs.iter().zip(buffers).collect().par_iter();
inputs
.iter_mut()
.zip(buffers)
.collect::<Vec<(&mut &mut [u64], &mut [u64])>>()
.par_iter_mut()
.for_each(|(i, b)| {
//inputs.par_iter_mut().zip(buffers).for_each(|(i, b)| {
mergesort_n(i, b, split);
});
buffer
.iter_mut()
.zip(kmerge(inputs))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i); // write back
}
pub fn mergesort_2_stop(input: &mut [u64], buffer: &mut [u64], level: u64) {
if level == 0 {
input.sort();
return;
}
let (input1, input2) = input.split_at_mut(input.len() / 2);
let (buffer1, buffer2) = buffer.split_at_mut(buffer.len() / 2);
mergesort_2_stop(buffer1, input1, level - 1);
mergesort_2_stop(buffer2, input2, level - 1);
input
.iter_mut()
.zip(buffer1.iter().merge(buffer2.iter()))
.for_each(|(o, i)| *o = *i);
}
pub fn parallel_mergesort_2(input: &mut [u64], buffer: &mut [u64], level: u64) {
if level == 0 {
input.sort();
return;
}
let (input1, input2) = input.split_at_mut(input.len() / 2);
let (buffer1, buffer2) = buffer.split_at_mut(buffer.len() / 2);
rayon_logs::join_context(
|_| parallel_mergesort_2(input1, buffer1, level - 1),
|c| {
let level = if c.migrated() { 2 } else { level - 1 };
parallel_mergesort_2(input2, buffer2, level);
},
);
buffer
.iter_mut()
.zip(input1.iter().merge(input2.iter()))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i); // write back
}
pub fn parallel_mergesort_n(input: &mut [u64], buffer: &mut [u64], split: usize, level: u64) {
if level == 0 {
input.sort();
return;
}
let mut chunksize = input.len() / split;
if chunksize == 0 || input.len() % split != 0 {
// if we have less elements than tasks (chunsize == 0)
// just use a few less tasks
// if we can't evently divide input on tasks, we give the first tasks a bit more
chunksize = chunksize + 1;
}
let mut inputs: Vec<&mut [u64]> = input.chunks_mut(chunksize).collect();
let buffers: Vec<&mut [u64]> = buffer.chunks_mut(chunksize).collect();
/*
inputs.par_iter_mut().zip(buffers).for_each(|(i, b)| {
parallel_mergesort_n(i, b, split, level - 1);
});
*/
// join can only do 2 tasks, we need n. par_iter can't check if a process is migrated, so we do
// that manually with current_thread_index()
let idx = rayon::current_thread_index().unwrap();
rayon_logs::Logged::new(inputs.par_iter_mut().zip(buffers)).for_each(|(i, b)| {
let level = if rayon::current_thread_index().unwrap() == idx {
level - 1
} else {
2
};
parallel_mergesort_n(i, b, split, level);
});
buffer
.iter_mut()
.zip(kmerge(inputs))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i); // write back
}
pub fn parallel_mergesort_rayon(input: &mut [u64], buffer: &mut [u64]) {
// basically how the paralallism is implemented in rayon. Just use chunks of 2000 elements.
// They also have parallel merging and some more stuff,
if input.len() <= 2000 {
input.sort();
return;
}
let (input1, input2) = input.split_at_mut(input.len() / 2);
let (buffer1, buffer2) = buffer.split_at_mut(buffer.len() / 2);
rayon_logs::join(
|| parallel_mergesort_rayon(input1, buffer1),
|| parallel_mergesort_rayon(input2, buffer2),
);
buffer
.iter_mut()
.zip(input1.iter().merge(input2.iter()))
.for_each(|(o, i)| *o = *i);
input.iter_mut().zip(buffer).for_each(|(o, i)| *o = *i); // write back
}
|
// Copyright (c) 2020 DarkWeb Design
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
/// Return part of a string.
///
/// # Description
///
/// Returns the portion of string specified by the start and length parameters.
///
/// # Parameters
///
/// **start**
///
/// If start is non-negative, the returned string will start at the start'th position in string,
/// counting from zero. For instance, in the string 'abcdef', the character at position 0 is 'a',
/// the character at position 2 is 'c', and so forth.
///
/// If start is negative, the returned string will start at the start'th character from the end of
/// string.
///
/// If string is less than start characters long, *None* will be returned.
///
/// **length**
///
/// If length is given and is positive, the string returned will contain at most length characters
/// beginning from start (depending on the length of string).
///
/// If length is given and is negative, then that many characters will be omitted from the end of
/// string (after the start position has been calculated when a start is negative). If start denotes
/// the position of this truncation or beyond, *NONE* will be returned.
///
/// If length is given and is 0, an empty string will be returned.
///
/// # Examples
///
/// Example #1 substr() examples
///
/// ```
/// use phpify::string::substr;
///
/// assert_eq!(substr("abcdef", 1, std::isize::MAX).unwrap(), "bcdef");
/// assert_eq!(substr("abcdef", 1, 3).unwrap(), "bcd");
/// assert_eq!(substr("abcdef", 0, 4).unwrap(), "abcd");
/// assert_eq!(substr("abcdef", 0, 8).unwrap(), "abcdef");
/// assert_eq!(substr("abcdef", -1, 1).unwrap(), "f");
/// ```
///
/// Example #2 using a negative start
///
/// ```
/// use phpify::string::substr;
///
/// assert_eq!(substr("abcdef", -1, std::isize::MAX).unwrap(), "f");
/// assert_eq!(substr("abcdef", -2, std::isize::MAX).unwrap(), "ef");
/// assert_eq!(substr("abcdef", -3, 1).unwrap(), "d");
/// ```
///
/// Example #3 using a negative length
///
/// ```
/// use phpify::string::substr;
///
/// assert_eq!(substr("abcdef", 0, -1).unwrap(), "abcde");
/// assert_eq!(substr("abcdef", 2, -1).unwrap(), "cde");
/// assert_eq!(substr("abcdef", 4, -4), None);
/// assert_eq!(substr("abcdef", -3, -1).unwrap(), "de");
/// ```
pub fn substr<S>(string: S, start: isize, length: isize) -> Option<String>
where
S: AsRef<str> {
let string = string.as_ref();
let mut start = start;
let mut length = length;
let string_length = string.len() as isize;
if start > 0 && start >= string_length {
return None;
}
if length == 0 {
return None;
}
if start < 0 {
start = string_length + start;
if start < 0 {
start = 0;
}
}
if length < 0 {
if start > string_length + length {
return None;
}
length = string_length + length - start;
}
if start == 0 && length == string_length {
return Some(string.to_string());
}
Some(string.chars().skip(start as usize).take(length as usize).collect::<String>())
}
#[cfg(test)]
mod tests {
use crate::string::substr;
#[test]
fn test() {
let string = &"Hello World".to_string();
assert_eq!(substr(string, 0, 5), Some("Hello".to_string()));
assert_eq!(substr(string, 6, 5), Some("World".to_string()));
assert_eq!(substr(string, 0, 20), Some("Hello World".to_string()));
assert_eq!(substr(string, 0, -6), Some("Hello".to_string()));
assert_eq!(substr(string, 3, -3), Some("lo Wo".to_string()));
assert_eq!(substr(string, -11, 11), Some("Hello World".to_string()));
assert_eq!(substr(string, -20, 11), Some("Hello World".to_string()));
assert_eq!(substr(string, 0, 0), None);
assert_eq!(substr(string, 11, 1), None);
assert_eq!(substr(string, 7, -5), None);
}
}
|
use crate::{
event::{log_schema, Event, Value},
sinks::util::{
http::{BatchedHttpSink, HttpClient, HttpSink},
service2::TowerRequestConfig,
BatchConfig, BatchSettings, BoxedRawValue, JsonArrayBuffer, UriSerde,
},
topology::config::{DataType, SinkConfig, SinkContext, SinkDescription},
};
use futures::TryFutureExt;
use futures01::Sink;
use http::{Request, StatusCode, Uri};
use serde::{Deserialize, Serialize};
use serde_json::json;
lazy_static::lazy_static! {
static ref HOST: UriSerde = Uri::from_static("https://api.honeycomb.io/1/batch").into();
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct HoneycombConfig {
api_key: String,
// TODO: we probably want to make this a template
// but this limits us in how we can do our healthcheck.
dataset: String,
#[serde(default)]
batch: BatchConfig,
#[serde(default)]
request: TowerRequestConfig,
}
inventory::submit! {
SinkDescription::new_without_default::<HoneycombConfig>("honeycomb")
}
#[typetag::serde(name = "honeycomb")]
impl SinkConfig for HoneycombConfig {
fn build(&self, cx: SinkContext) -> crate::Result<(super::RouterSink, super::Healthcheck)> {
let request_settings = self.request.unwrap_with(&TowerRequestConfig::default());
let batch_settings = BatchSettings::default()
.bytes(bytesize::kib(100u64))
.timeout(1)
.parse_config(self.batch)?;
let client = HttpClient::new(cx.resolver(), None)?;
let sink = BatchedHttpSink::new(
self.clone(),
JsonArrayBuffer::new(batch_settings.size),
request_settings,
batch_settings.timeout,
client.clone(),
cx.acker(),
)
.sink_map_err(|e| error!("Fatal honeycomb sink error: {}", e));
let healthcheck = Box::new(Box::pin(healthcheck(self.clone(), client)).compat());
Ok((Box::new(sink), healthcheck))
}
fn input_type(&self) -> DataType {
DataType::Log
}
fn sink_type(&self) -> &'static str {
"honeycomb"
}
}
#[async_trait::async_trait]
impl HttpSink for HoneycombConfig {
type Input = serde_json::Value;
type Output = Vec<BoxedRawValue>;
fn encode_event(&self, event: Event) -> Option<Self::Input> {
let mut log = event.into_log();
let timestamp = if let Some(Value::Timestamp(ts)) = log.remove(log_schema().timestamp_key())
{
ts
} else {
chrono::Utc::now()
};
Some(json!({
"timestamp": timestamp.to_rfc3339_opts(chrono::SecondsFormat::Nanos, true),
"data": log.all_fields(),
}))
}
async fn build_request(&self, events: Self::Output) -> crate::Result<http::Request<Vec<u8>>> {
let uri = self.build_uri();
let request = Request::post(uri).header("X-Honeycomb-Team", self.api_key.clone());
let buf = serde_json::to_vec(&events).unwrap();
request.body(buf).map_err(Into::into)
}
}
impl HoneycombConfig {
fn build_uri(&self) -> Uri {
let uri = format!("{}/{}", HOST.clone(), self.dataset);
uri.parse::<http::Uri>()
.expect("This should be a valid uri")
}
}
async fn healthcheck(config: HoneycombConfig, mut client: HttpClient) -> crate::Result<()> {
let req = config
.build_request(Vec::new())
.await?
.map(hyper::Body::from);
let res = client.send(req).await?;
let status = res.status();
let body = hyper::body::to_bytes(res.into_body()).await?;
if status == StatusCode::BAD_REQUEST {
Ok(())
} else if status == StatusCode::UNAUTHORIZED {
let json: serde_json::Value = serde_json::from_slice(&body[..])?;
let message = if let Some(s) = json
.as_object()
.and_then(|o| o.get("error"))
.and_then(|s| s.as_str())
{
s.to_string()
} else {
"Token is not valid, 401 returned.".to_string()
};
Err(message.into())
} else {
let body = String::from_utf8_lossy(&body[..]);
Err(format!(
"Server returned unexpected error status: {} body: {}",
status, body
)
.into())
}
}
|
use piston::input::RenderArgs;
use opengl_graphics::GlGraphics;
use graphics::Context;
pub trait Renderable {
fn render(&mut self, ctx: &Context, gl: &mut GlGraphics);
} |
#![cfg_attr(not(feature = "std"), no_std)]
/// A runtime module template with necessary imports
/// Feel free to remove or edit this file as needed.
/// If you change the name of this file, make sure to update its references in runtime/src/lib.rs
/// If you remove this file, you can remove those references
/// For more guidance on Substrate modules, see the example module
/// https://github.com/paritytech/substrate/blob/master/srml/example/src/lib.rs
extern crate perml_tokens as tokens;
extern crate srml_support as support;
extern crate sr_primitives as runtime_primitives;
extern crate parity_codec;
extern crate srml_system as system;
extern crate sr_std;
extern crate core as core_;
extern crate perml_collections;
use support::{decl_module, decl_storage, decl_event, StorageMap, StorageValue, dispatch::Result, Parameter, rstd};
use runtime_primitives::traits::{SimpleArithmetic, Bounded, One, CheckedAdd, CheckedSub, Zero};
use parity_codec::{Encode, Decode};
use system::{ensure_signed, RawOrigin};
use tokens::{Symbol, Token};
use rstd::result;
use perml_collections::CodecBTreeMap;
use sr_std::prelude::*;
use core_::cmp::Ordering;
use runtime_primitives::codec::{Input, Output};
use support::rstd::collections::btree_map::BTreeMap;
#[derive(Encode, Decode, PartialEq, Eq, Clone, Debug)]
pub enum OrderType {
Buy = 0,
Sell = 1,
}
impl Ord for OrderType {
fn cmp(&self, other: &Self) -> Ordering {
self.cmp(other)
}
}
impl PartialOrd for OrderType {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
pub trait Trait: tokens::Trait {
type OrderId: Parameter + Default + Bounded + SimpleArithmetic;
type PriceType: Parameter + Default + Bounded + SimpleArithmetic + Eq + PartialEq + Ord;
type Event: From<Event<Self>> + Into<<Self as system::Trait>::Event>;
}
type TokenBalanceOf<T> = <T as tokens::Trait>::TokenBalance;
type BlockNumber<T> = <T as system::Trait>::BlockNumber;
#[derive(Encode, Decode, Default, PartialEq, Clone)]
pub struct Filled<T: Trait> {
pub price: T::PriceType,
// 成交价格
pub amount: <T as tokens::Trait>::TokenBalance,
// 成交数量
pub block_number: BlockNumber<T>, // 成交区块号
}
#[derive(Encode, Decode, PartialEq, Clone)]
pub struct Order<T: Trait> {
// 挂单id
pub id: T::OrderId,
// 用户账号
pub acc: T::AccountId,
// 交易对的第一个token
pub sym0: Symbol,
// 交易对的第二个token
pub sym1: Symbol,
// 买/卖
pub side: OrderType,
// 挂单价格
pub price: T::PriceType,
// 总挂单数量
pub total: <T as tokens::Trait>::TokenBalance,
// 总成交数量
pub total_filled: <T as tokens::Trait>::TokenBalance,
// 成交数组
pub fills: Vec<Filled<T>>,
// 挂单区块号
pub block_number: BlockNumber<T>,
}
impl <T: Trait> Order<T> {
pub fn update(&mut self, fill_amount: <T as tokens::Trait>::TokenBalance, fill_price: T::PriceType) -> bool {
let remain_amount = self.total.clone() - self.total_filled.clone();
let mut finish = false;
let mut fill_amount_ = fill_amount.clone();
if fill_amount >= remain_amount {
fill_amount_ = remain_amount;
finish = true;
} else {
finish = false;
}
self.total_filled = self.total_filled.clone() - fill_amount_.clone();
let fill = Filled {
price: fill_price,
amount: fill_amount_,
block_number: <system::Module<T>>::block_number(),
};
self.fills.push(fill);
finish
}
}
decl_storage! {
trait Store for Module<T: Trait> as pendingorders {
// 每个挂单的唯一id
pub OrderSeq get(order_id): T::OrderId;
// (交易对, 交易方向) => 有序挂单价
pub PriceList get(price_list): map (Symbol, Symbol, OrderType) => CodecBTreeMap<T::PriceType, ()>;
// (交易对, 交易方向,挂单价) => blocknum有序的挂单id列表
pub OrderIdMap get(order_id_map): map(Symbol, Symbol, OrderType, T::PriceType) => CodecBTreeMap<BlockNumber<T>, Vec<T::OrderId>>;
// order_id => Order
pub OrderMap get(order_map): map T::OrderId => Option<Order<T>>;
// OrderId 属于哪个用户
pub OrderOf get(order_of): map T::OrderId => T::AccountId;
// 某个用户的所有order
pub Orders get(orders): map T::AccountId => Vec<T::OrderId>;
}
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
fn deposit_event<T>() = default;
// 挂单
pub fn order(origin,
sym0: Symbol,
sym1: Symbol,
price: T::PriceType,
amount: TokenBalanceOf<T>,
side: OrderType
) -> Result
{
Self::do_order(origin, sym0, sym1, price, amount, side)
}
// 取消挂单
pub fn cancel_order(origin, order_id: T::OrderId, order_type: OrderType) -> Result {
Self::do_cancel_order(origin, order_id, order_type)
}
}
}
decl_event!(
pub enum Event<T> where
<T as system::Trait>::AccountId,
<T as self::Trait>::OrderId,
<T as self::Trait>::PriceType,
TokenBalance = TokenBalanceOf<T>
{
// 挂单事件
Ordered(AccountId, Symbol, Symbol, PriceType, TokenBalance, OrderType),
// 取消挂单事件
OrderCanceled(AccountId, OrderId),
}
);
impl<T: Trait> Module<T> {
fn next_order_id() -> result::Result<T::OrderId, &'static str> {
let order_id = Self::order_id();
let next_id = order_id.checked_add(&One::one()).ok_or("Token id overflow")?;
Ok(next_id)
}
fn insert_orderid_to_orders(acc: T::AccountId, order_id: T::OrderId) -> Result{
// 某个用户的所有order
let mut order_vec = Self::orders(&acc);
order_vec.push(order_id);
<Orders<T>>::insert(acc, order_vec);
Ok(())
}
fn do_order(origin: T::Origin,
sym0: Symbol,
sym1: Symbol,
price: T::PriceType,
amount: TokenBalanceOf<T>,
side: OrderType) -> Result
{
let sender = ensure_signed(origin)?;
let new_origin: T::Origin = RawOrigin::Signed(sender.clone()).into();
// 检查交易对是否已注册
let symbols = (sym0.clone(), sym1.clone());
let symbol_pair_vec: Vec<(Symbol, Symbol)> = <tokens::Module<T>>::symbol_pairs(&1u32);
assert!(symbol_pair_vec.contains(&symbols), "Symbol pair not registered");
// 检查余额是否足够
let token_key = (sender.clone(), sym1.clone());
let free_token = <tokens::Module<T>>::free_token(token_key);
assert!(free_token > amount, "Insufficient tokens to order");
// 构造order
let order_id = Self::next_order_id()?;
let filled_vec:Vec<Filled<T>> = Vec::new();
let block_num = <system::Module<T>>::block_number();
let order: Order<T> = Order {
id: order_id.clone(),
acc: sender.clone(),
sym0: sym0.clone(),
sym1: sym1.clone(),
side: side.clone(),
price: price.clone(),
total: amount.clone(),
total_filled: Zero::zero(),
fills: filled_vec,
block_number: block_num.clone()
};
// 冻结资金
if side == OrderType::Buy {
<tokens::Module<T>>::freeze(new_origin, sender.clone(), sym1.clone(), amount.clone())?;
} else {
<tokens::Module<T>>::freeze(new_origin, sender.clone(), sym0.clone(), amount.clone())?;
}
// 检查price
let price_list_key = (sym0.clone(), sym1.clone(), side.clone());
let prices = Self::price_list(price_list_key.clone());
let order_id_map_key = (sym0.clone(), sym1.clone(), side.clone(), price.clone());
if prices.0.contains_key(&price) {
// 已有当前报价的挂单
// 当前block number的所有order_id列表
let mut order_ids_by_bn = Self::order_id_map(&order_id_map_key);
let tmp_vec: Vec<T::OrderId> = Vec::new();
let mut order_ids_vec = order_ids_by_bn.0.get(&block_num).unwrap_or(&tmp_vec).clone();
// 插入当前order
order_ids_vec.push(order_id.clone());
// 插入OrderIdMap
order_ids_by_bn.0.insert(block_num.clone(), order_ids_vec);
<OrderIdMap<T>>::insert(&order_id_map_key, order_ids_by_bn);
} else {
// 没有当前报价的挂单
// 插入当前报价
let mut btm: BTreeMap<T::PriceType, ()> = BTreeMap::new();
btm.insert(price.clone(), ());
let cbtm = CodecBTreeMap(btm);
<PriceList<T>>::insert(price_list_key.clone(), cbtm);
// 插入OrderIdMap
let mut order_id_vec:Vec<T::OrderId> = Vec::new();
order_id_vec.push(order_id.clone());
let mut btm: BTreeMap<BlockNumber<T>, Vec<T::OrderId>> = BTreeMap::new();
btm.insert(block_num.clone(), order_id_vec);
let cbtm = CodecBTreeMap(btm);
<OrderIdMap<T>>::insert(&order_id_map_key, cbtm);
}
// 当前order插入OrderMap
<OrderMap<T>>::insert(order_id.clone(), order);
// 当前orderid插入用户的orders
Self::insert_orderid_to_orders(sender.clone(), order_id.clone());
Self::deposit_event(RawEvent::Ordered(sender, sym0, sym1, price, amount, side));
Ok(())
}
fn do_cancel_order(origin: T::Origin, order_id: T::OrderId, order_type: OrderType) -> Result {
let sender = ensure_signed(origin)?;
let new_origin: T::Origin = RawOrigin::Signed(sender.clone()).into();
// 检查订单号属于本人
let mut order_id_vec = Self::orders(sender.clone());
let order_idx = match order_id_vec.binary_search(&order_id) {
Ok(idx) => idx,
_ => return Err("Order id dose not belong to this account"),
};
// 0.得到order实例
let order = match Self::order_map(&order_id) {
Some(order) => order,
None => return Err("Order dose not exists"),
};
// 1.从用户所有order中删除
order_id_vec.remove(order_idx);
<Orders<T>>::insert(sender.clone(), order_id_vec);
// 2.从OrderOf中删除
<OrderOf<T>>::remove(&order_id);
// 3.从OrderMap中删除
<OrderMap<T>>::remove(&order_id);
// 4.修改OrderIdMap,如果对供应区块号的订单列表为空,则删除对应的区块号
let price = order.price;
let sym0 = order.sym0;
let sym1 = order.sym1;
let side = order.side;
let remain_amount = order.total - order.total_filled;
let block_number = order.block_number;
let price_list_key = (sym0.clone(), sym1.clone(), side.clone());
let order_id_map_key = (sym0.clone(), sym1.clone(), side.clone(), price.clone());
let mut bn_btreemap = Self::order_id_map(order_id_map_key.clone());
// 该区块提交的order_id列表
let mut bn_order_id_vec:Vec<T::OrderId> = match bn_btreemap.0.get(&block_number) {
Some(vec) => vec.to_vec(),
None => Vec::new(),
};
let order_id_idx = match bn_order_id_vec.binary_search(&order_id) {
Ok(idx) => idx,
_ => bn_order_id_vec.len() + 1,
};
bn_order_id_vec.remove(order_id_idx);
if bn_order_id_vec.len() == 0 {
bn_btreemap.0.remove(&block_number);
if bn_btreemap.0.len() == 0 {
<OrderIdMap<T>>::remove(&order_id_map_key);
// 5.如果当前报价没有挂单,在priceList中删除报价
<PriceList<T>>::remove(&price_list_key);
}
} else {
bn_btreemap.0.insert(block_number, bn_order_id_vec);
<OrderIdMap<T>>::insert(&order_id_map_key, bn_btreemap);
}
// 6.解冻token
if side == OrderType::Buy {
<tokens::Module<T>>::unfreeze(new_origin, sender.clone(), sym1.clone(), remain_amount.clone())?;
} else {
<tokens::Module<T>>::unfreeze(new_origin, sender.clone(), sym0.clone(), remain_amount.clone())?;
}
Ok(())
}
}
/// tests for this module
#[cfg(test)]
mod tests {
use super::*;
use runtime_io::with_externalities;
use primitives::{H256, Blake2Hasher};
use support::{impl_outer_origin, assert_ok};
use runtime_primitives::{
BuildStorage,
traits::{BlakeTwo256, IdentityLookup},
testing::{Digest, DigestItem, Header},
};
impl_outer_origin! {
pub enum Origin for Test {}
}
#[derive(Clone, Eq, PartialEq)]
pub struct Test;
impl system::Trait for Test {
type Origin = Origin;
type Index = u64;
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type Digest = Digest;
type AccountId = u64;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = ();
type Log = DigestItem;
}
impl Trait for Test {
type Event = ();
}
type pendingorders = Module<Test>;
// This function basically just builds a genesis storage key/value store according to
// our desired mockup.
fn new_test_ext() -> runtime_io::TestExternalities<Blake2Hasher> {
system::GenesisConfig::<Test>::default().build_storage().unwrap().0.into()
}
#[test]
fn it_works_for_default_value() {
with_externalities(&mut new_test_ext(), || {
// Just a dummy test for the dummy funtion `do_something`
// calling the `do_something` function with a value 42
assert_ok!(pendingorders::do_something(Origin::signed(1), 42));
// asserting that the stored value is equal to what we stored
assert_eq!(pendingorders::something(), Some(42));
});
}
}
|
use std::time::Duration;
use libusb::{DeviceDescriptor, DeviceHandle, Error, Result};
#[derive(Debug)]
pub struct DeviceInfo {
pub manufacturer: String,
pub name: String,
pub serial: String,
}
impl DeviceInfo {
pub(super) fn read(handle: &DeviceHandle, descriptor: &DeviceDescriptor) -> Result<DeviceInfo> {
let timeout = Duration::from_secs(1);
let languages = handle.read_languages(timeout)?;
let language = match languages.first() {
Some(value) => *value,
None => return Err(Error::NotFound),
};
let manufacturer = handle.read_manufacturer_string(language, descriptor, timeout)?;
let name = handle.read_product_string(language, descriptor, timeout)?;
let serial = handle.read_serial_number_string(language, descriptor, timeout)?;
Ok(DeviceInfo {
manufacturer,
name,
serial,
})
}
}
|
use crate::cpu::CPU;
use crate::memory_map::{IOMem, Mem, Mem16};
use crate::register::{Flag, Imm16, Imm8, Reg16, Reg8, SignedImm8};
use crate::{Either, ReadWriteError, Src};
use std::fmt;
/// Describes a condition, used in jumps, calls and returns.
/// It can be read to get the boolean value of the condition.
#[derive(Debug, Copy, Clone)]
pub enum Condition {
NZ,
NC,
Z,
C,
}
impl fmt::Display for Condition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Condition::NZ => write!(f, "NZ"),
Condition::NC => write!(f, "NC"),
Condition::Z => write!(f, "Z"),
Condition::C => write!(f, "C"),
}
}
}
impl Src<bool> for Condition {
fn try_read(&self, cpu: &CPU) -> Result<bool, ReadWriteError> {
match self {
Condition::NZ => Flag::Z.try_read(cpu).map(|b| !b),
Condition::NC => Flag::C.try_read(cpu).map(|b| !b),
Condition::Z => Flag::Z.try_read(cpu),
Condition::C => Flag::C.try_read(cpu),
}
}
}
/// A Decoder, used to translate bytes into the corresponding
/// instruction.
pub struct Decoder;
impl Decoder {
/// Parses a CB instruction byte into its opcode.
fn parse_cb(cpu: &CPU, pc: u16) -> Opcode {
let value = Mem(pc).read(cpu);
let location: Either<Reg8, Mem<Reg16>> = match value % 8 {
0 => Either::Left(Reg8::B),
1 => Either::Left(Reg8::C),
2 => Either::Left(Reg8::D),
3 => Either::Left(Reg8::E),
4 => Either::Left(Reg8::H),
5 => Either::Left(Reg8::L),
6 => Either::Right(Mem(Reg16::HL)),
7 => Either::Left(Reg8::A),
_ => unreachable!(),
};
let bit = (value / 8) % 8;
match value {
0x00..=0x05 | 0x07 => Opcode::RLC(location.left().unwrap()),
0x06 => Opcode::RLCMem(location.right().unwrap()),
0x08..=0x0D | 0x0F => Opcode::RRC(location.left().unwrap()),
0x0E => Opcode::RRCMem(location.right().unwrap()),
0x10..=0x15 | 0x17 => Opcode::RL(location.left().unwrap()),
0x16 => Opcode::RLMem(location.right().unwrap()),
0x18..=0x1D | 0x1F => Opcode::RR(location.left().unwrap()),
0x1E => Opcode::RRMem(location.right().unwrap()),
0x20..=0x25 | 0x27 => Opcode::SLA(location.left().unwrap()),
0x26 => Opcode::SLAMem(location.right().unwrap()),
0x28..=0x2D | 0x2F => Opcode::SRA(location.left().unwrap()),
0x2E => Opcode::SRAMem(location.right().unwrap()),
0x30..=0x35 | 0x37 => Opcode::SWAP(location.left().unwrap()),
0x36 => Opcode::SWAPMem(location.right().unwrap()),
0x38..=0x3D | 0x3F => Opcode::SRL(location.left().unwrap()),
0x3E => Opcode::SRLMem(location.right().unwrap()),
0x40..=0x45
| 0x47..=0x4D
| 0x4F
| 0x50..=0x55
| 0x57..=0x5D
| 0x5F
| 0x60..=0x65
| 0x67..=0x6D
| 0x6F
| 0x70..=0x75
| 0x77..=0x7D
| 0x7F => Opcode::BIT(bit, location.left().unwrap()),
0x46 | 0x4E | 0x56 | 0x5E | 0x66 | 0x6E | 0x76 | 0x7E => {
Opcode::BITMem(bit, location.right().unwrap())
}
0x80..=0x85
| 0x87..=0x8D
| 0x8F
| 0x90..=0x95
| 0x97..=0x9D
| 0x9F
| 0xA0..=0xA5
| 0xA7..=0xAD
| 0xAF
| 0xB0..=0xB5
| 0xB7..=0xBD
| 0xBF => Opcode::RES(bit, location.left().unwrap()),
0x86 | 0x8E | 0x96 | 0x9E | 0xA6 | 0xAE | 0xB6 | 0xBE => {
Opcode::RESMem(bit, location.right().unwrap())
}
0xC0..=0xC5
| 0xC7..=0xCD
| 0xCF
| 0xD0..=0xD5
| 0xD7..=0xDD
| 0xDF
| 0xE0..=0xE5
| 0xE7..=0xED
| 0xEF
| 0xF0..=0xF5
| 0xF7..=0xFD
| 0xFF => Opcode::SET(bit, location.left().unwrap()),
0xC6 | 0xCE | 0xD6 | 0xDE | 0xE6 | 0xEE | 0xF6 | 0xFE => {
Opcode::SETMem(bit, location.right().unwrap())
}
}
}
/// Decodes the current instruction pointed to by the PC register, and returns
/// it.
pub fn decode(cpu: &CPU) -> Opcode {
Decoder::decode_with_context(cpu, Reg16::PC.read(cpu)).unwrap()
}
/// Decodes the instruction pointed to by a provided address in memory.
pub fn decode_with_context(cpu: &CPU, pc: u16) -> Option<Opcode> {
let value = Mem(pc).try_read(cpu);
match value {
Err(_) => None,
Ok(value) => {
let reg16_arithmetic: Reg16 = match (value / 0x10) % 4 {
0 => Reg16::BC,
1 => Reg16::DE,
2 => Reg16::HL,
3 => Reg16::SP,
_ => unreachable!(),
};
let reg16_stack = match (value / 0x10) % 4 {
0 => Reg16::BC,
1 => Reg16::DE,
2 => Reg16::HL,
3 => Reg16::AF,
_ => unreachable!(),
};
let dst: Either<Reg8, Mem<Reg16>> = match (value / 8) % 8 {
0 => Either::Left(Reg8::B),
1 => Either::Left(Reg8::C),
2 => Either::Left(Reg8::D),
3 => Either::Left(Reg8::E),
4 => Either::Left(Reg8::H),
5 => Either::Left(Reg8::L),
6 => Either::Right(Mem(Reg16::HL)),
7 => Either::Left(Reg8::A),
_ => unreachable!(),
};
let src: Either<Reg8, Mem<Reg16>> = match value % 8 {
0 => Either::Left(Reg8::B),
1 => Either::Left(Reg8::C),
2 => Either::Left(Reg8::D),
3 => Either::Left(Reg8::E),
4 => Either::Left(Reg8::H),
5 => Either::Left(Reg8::L),
6 => Either::Right(Mem(Reg16::HL)),
7 => Either::Left(Reg8::A),
_ => unreachable!(),
};
let rst_vector = ((value % 0x40) / 8) * 8;
Some(match value {
0x00 => Opcode::NOP,
0x10 => Opcode::STOP,
0x20 => Opcode::JR(Some(Condition::NZ), SignedImm8),
0x30 => Opcode::JR(Some(Condition::NC), SignedImm8),
0x01 | 0x11 | 0x21 | 0x31 => Opcode::LD16(reg16_arithmetic, Imm16),
0x02 | 0x12 => Opcode::LDRegMemA(Mem(reg16_arithmetic), Reg8::A),
0x22 => Opcode::LDHLMemIncA(Mem(Reg16::HL), Reg8::A),
0x32 => Opcode::LDHLMemDecA(Mem(Reg16::HL), Reg8::A),
0x03 | 0x13 | 0x23 | 0x33 => Opcode::INC16(reg16_arithmetic),
0x04 | 0x14 | 0x24 => Opcode::INC(dst.left().unwrap()),
0x34 => Opcode::INCHLMem(Mem(Reg16::HL)),
0x05 | 0x15 | 0x25 => Opcode::DEC(dst.left().unwrap()),
0x35 => Opcode::DECHLMem(Mem(Reg16::HL)),
0x06 | 0x16 | 0x26 => Opcode::LDRegImm(dst.left().unwrap(), Imm8),
0x36 => Opcode::LDHLMemImm(Mem(Reg16::HL), Imm8),
0x07 => Opcode::RLCA,
0x17 => Opcode::RLA,
0x27 => Opcode::DAA,
0x37 => Opcode::SCF,
0x08 => Opcode::LDImmMemSP(Mem(Imm16), Reg16::SP),
0x18 => Opcode::JR(None, SignedImm8),
0x28 => Opcode::JR(Some(Condition::Z), SignedImm8),
0x38 => Opcode::JR(Some(Condition::C), SignedImm8),
0x09 | 0x19 | 0x29 | 0x39 => Opcode::ADDHL(Reg16::HL, reg16_arithmetic),
0x0A | 0x1A => Opcode::LDARegMem(Reg8::A, Mem(reg16_arithmetic)),
0x2A => Opcode::LDAHLMemInc(Reg8::A, Mem(Reg16::HL)),
0x3A => Opcode::LDAHLMemDec(Reg8::A, Mem(Reg16::HL)),
0x0B | 0x1B | 0x2B | 0x3B => Opcode::DEC16(reg16_arithmetic),
0x0C | 0x1C | 0x2C | 0x3C => Opcode::INC(dst.left().unwrap()),
0x0D | 0x1D | 0x2D | 0x3D => Opcode::DEC(dst.left().unwrap()),
0x0E | 0x1E | 0x2E | 0x3E => Opcode::LDRegImm(dst.left().unwrap(), Imm8),
0x0F => Opcode::RRCA,
0x1F => Opcode::RRA,
0x2F => Opcode::CPL,
0x3F => Opcode::CCF,
0x40..=0x7F => {
if value == 0x76 {
Opcode::HALT
} else if value % 8 == 6 {
Opcode::LDRegHLMem(dst.left().unwrap(), src.right().unwrap())
} else if (0x70..=0x77).contains(&value) {
Opcode::LDHLMemReg(dst.right().unwrap(), src.left().unwrap())
} else {
Opcode::LD(dst.left().unwrap(), src.left().unwrap())
}
}
0x80..=0x85 | 0x87 => Opcode::ADD(Reg8::A, src.left().unwrap()),
0x86 => Opcode::ADDHLMem(Reg8::A, src.right().unwrap()),
0x88..=0x8D | 0x8F => Opcode::ADC(Reg8::A, src.left().unwrap()),
0x8E => Opcode::ADCHLMem(Reg8::A, src.right().unwrap()),
0x90..=0x95 | 0x97 => Opcode::SUB(Reg8::A, src.left().unwrap()),
0x96 => Opcode::SUBHLMem(Reg8::A, src.right().unwrap()),
0x98..=0x9D | 0x9F => Opcode::SBC(Reg8::A, src.left().unwrap()),
0x9E => Opcode::SBCHLMem(Reg8::A, src.right().unwrap()),
0xA0..=0xA5 | 0xA7 => Opcode::AND(Reg8::A, src.left().unwrap()),
0xA6 => Opcode::ANDHLMem(Reg8::A, src.right().unwrap()),
0xA8..=0xAD | 0xAF => Opcode::XOR(Reg8::A, src.left().unwrap()),
0xAE => Opcode::XORHLMem(Reg8::A, src.right().unwrap()),
0xB0..=0xB5 | 0xB7 => Opcode::OR(Reg8::A, src.left().unwrap()),
0xB6 => Opcode::ORHLMem(Reg8::A, src.right().unwrap()),
0xB8..=0xBD | 0xBF => Opcode::CP(Reg8::A, src.left().unwrap()),
0xBE => Opcode::CPHLMem(Reg8::A, src.right().unwrap()),
0xC0 => Opcode::RET(Some(Condition::NZ)),
0xD0 => Opcode::RET(Some(Condition::NC)),
0xE0 => Opcode::LDHImmMemA(IOMem(Imm8), Reg8::A),
0xF0 => Opcode::LDHAImmMem(Reg8::A, IOMem(Imm8)),
0xC1 | 0xD1 | 0xE1 | 0xF1 => Opcode::POP(reg16_stack),
0xC2 => Opcode::JP(Some(Condition::NZ), Imm16),
0xD2 => Opcode::JP(Some(Condition::NC), Imm16),
0xE2 => Opcode::LDHCMemA(IOMem(Reg8::C), Reg8::A),
0xF2 => Opcode::LDHACMem(Reg8::A, IOMem(Reg8::C)),
0xC3 => Opcode::JP(None, Imm16),
0xD3 | 0xE3 => Opcode::Illegal,
0xF3 => Opcode::DI,
0xC4 => Opcode::CALL(Some(Condition::NZ), Imm16),
0xD4 => Opcode::CALL(Some(Condition::NC), Imm16),
0xE4 | 0xF4 => Opcode::Illegal,
0xC5 | 0xD5 | 0xE5 | 0xF5 => Opcode::PUSH(reg16_stack),
0xC6 => Opcode::ADDImm(Reg8::A, Imm8),
0xD6 => Opcode::SUBImm(Reg8::A, Imm8),
0xE6 => Opcode::ANDImm(Reg8::A, Imm8),
0xF6 => Opcode::ORImm(Reg8::A, Imm8),
0xC7 | 0xD7 | 0xE7 | 0xF7 => Opcode::RST(rst_vector),
0xC8 => Opcode::RET(Some(Condition::Z)),
0xD8 => Opcode::RET(Some(Condition::C)),
0xE8 => Opcode::ADDSP(Reg16::SP, SignedImm8),
0xF8 => Opcode::LDHLSPOffset(SignedImm8),
0xC9 => Opcode::RET(None),
0xD9 => Opcode::RETI,
0xE9 => Opcode::JPHL,
0xF9 => Opcode::LDSPHL,
0xCA => Opcode::JP(Some(Condition::Z), Imm16),
0xDA => Opcode::JP(Some(Condition::C), Imm16),
0xEA => Opcode::LDImmMemA(Mem(Imm16), Reg8::A),
0xFA => Opcode::LDAImmMem(Reg8::A, Mem(Imm16)),
0xCB => Decoder::parse_cb(cpu, pc + 1),
0xDB | 0xEB => Opcode::Illegal,
0xFB => Opcode::EI,
0xCC => Opcode::CALL(Some(Condition::Z), Imm16),
0xDC => Opcode::CALL(Some(Condition::C), Imm16),
0xEC | 0xFC => Opcode::Illegal,
0xCD => Opcode::CALL(None, Imm16),
0xDD | 0xED | 0xFD => Opcode::Illegal,
0xCE => Opcode::ADCImm(Reg8::A, Imm8),
0xDE => Opcode::SBCImm(Reg8::A, Imm8),
0xEE => Opcode::XORImm(Reg8::A, Imm8),
0xFE => Opcode::CPImm(Reg8::A, Imm8),
0xCF | 0xDF | 0xEF | 0xFF => Opcode::RST(rst_vector),
})
}
}
}
}
/// The different instructions available for the CPU.
#[derive(Debug, Copy, Clone)]
pub enum Opcode {
// Special (Pink)
NOP,
STOP,
HALT,
DI,
EI,
// Jumps, calls, returns (Orange)
JR(Option<Condition>, SignedImm8),
RET(Option<Condition>),
JP(Option<Condition>, Imm16),
CALL(Option<Condition>, Imm16),
RETI,
JPHL,
RST(u8),
// 16-bit operations (Salmon)
INC16(Reg16),
DEC16(Reg16),
ADDHL(Reg16, Reg16),
ADDSP(Reg16, SignedImm8),
// 16-bit loads & stack operations (Green)
LD16(Reg16, Imm16),
LDImmMemSP(Mem<Imm16>, Reg16), // 0x08
POP(Reg16),
PUSH(Reg16),
LDHLSPOffset(SignedImm8),
LDSPHL,
// Loads (Blue)
LDARegMem(Reg8, Mem<Reg16>),
LDHLMemIncA(Mem<Reg16>, Reg8),
LDHLMemDecA(Mem<Reg16>, Reg8),
LDRegMemA(Mem<Reg16>, Reg8),
LDAHLMemInc(Reg8, Mem<Reg16>),
LDAHLMemDec(Reg8, Mem<Reg16>),
LDRegImm(Reg8, Imm8),
LDHLMemImm(Mem<Reg16>, Imm8),
LD(Reg8, Reg8),
LDRegHLMem(Reg8, Mem<Reg16>),
LDHLMemReg(Mem<Reg16>, Reg8),
LDHImmMemA(IOMem<Imm8>, Reg8),
LDHAImmMem(Reg8, IOMem<Imm8>),
LDHCMemA(IOMem<Reg8>, Reg8),
LDHACMem(Reg8, IOMem<Reg8>),
LDImmMemA(Mem<Imm16>, Reg8),
LDAImmMem(Reg8, Mem<Imm16>),
// Arithmetic operations (Yellow)
INC(Reg8),
INCHLMem(Mem<Reg16>),
DEC(Reg8),
DECHLMem(Mem<Reg16>),
DAA,
SCF,
CPL,
CCF,
ADD(Reg8, Reg8),
ADDHLMem(Reg8, Mem<Reg16>),
ADDImm(Reg8, Imm8),
ADC(Reg8, Reg8),
ADCHLMem(Reg8, Mem<Reg16>),
ADCImm(Reg8, Imm8),
SUB(Reg8, Reg8),
SUBHLMem(Reg8, Mem<Reg16>),
SUBImm(Reg8, Imm8),
SBC(Reg8, Reg8),
SBCHLMem(Reg8, Mem<Reg16>),
SBCImm(Reg8, Imm8),
AND(Reg8, Reg8),
ANDHLMem(Reg8, Mem<Reg16>),
ANDImm(Reg8, Imm8),
XOR(Reg8, Reg8),
XORHLMem(Reg8, Mem<Reg16>),
XORImm(Reg8, Imm8),
OR(Reg8, Reg8),
ORHLMem(Reg8, Mem<Reg16>),
ORImm(Reg8, Imm8),
CP(Reg8, Reg8),
CPHLMem(Reg8, Mem<Reg16>),
CPImm(Reg8, Imm8),
// Bit operations (Cyan)
RLCA,
RLA,
RRCA,
RRA,
RLC(Reg8),
RLCMem(Mem<Reg16>),
RRC(Reg8),
RRCMem(Mem<Reg16>),
RL(Reg8),
RLMem(Mem<Reg16>),
RR(Reg8),
RRMem(Mem<Reg16>),
SLA(Reg8),
SLAMem(Mem<Reg16>),
SRA(Reg8),
SRAMem(Mem<Reg16>),
SWAP(Reg8),
SWAPMem(Mem<Reg16>),
SRL(Reg8),
SRLMem(Mem<Reg16>),
BIT(u8, Reg8),
BITMem(u8, Mem<Reg16>),
RES(u8, Reg8),
RESMem(u8, Mem<Reg16>),
SET(u8, Reg8),
SETMem(u8, Mem<Reg16>),
// Illegal
Illegal,
}
impl Opcode {
/// Returns the number of cycles an instruction takes. If there is a
/// condition associated to the instruction, both the "action taken" count
/// and the "action not taken" count are returned.
pub fn cycles(&self) -> (usize, Option<usize>) {
match self {
Opcode::CALL(c, _) => match c {
Some(_) => (6, Some(3)),
None => (6, None),
},
Opcode::RET(c) => match c {
Some(_) => (5, Some(2)),
None => (4, None),
},
Opcode::LDImmMemSP(_, _) => (5, None),
Opcode::JP(c, _) => match c {
Some(_) => (4, Some(3)),
None => (4, None),
},
Opcode::PUSH(_)
| Opcode::RST(_)
| Opcode::ADDSP(_, _)
| Opcode::RETI
| Opcode::LDImmMemA(_, _)
| Opcode::RLCMem(_)
| Opcode::RRCMem(_)
| Opcode::RLMem(_)
| Opcode::RRMem(_)
| Opcode::SLAMem(_)
| Opcode::SRAMem(_)
| Opcode::SWAPMem(_)
| Opcode::SRLMem(_)
| Opcode::BITMem(_, _)
| Opcode::SETMem(_, _)
| Opcode::RESMem(_, _)
| Opcode::LDAImmMem(_, _) => (4, None),
Opcode::JR(c, _) => match c {
Some(_) => (3, Some(2)),
None => (3, None),
},
Opcode::LD16(_, _)
| Opcode::INCHLMem(_)
| Opcode::DECHLMem(_)
| Opcode::LDHLMemImm(_, _)
| Opcode::LDHImmMemA(_, _)
| Opcode::LDHAImmMem(_, _)
| Opcode::POP(_)
| Opcode::LDHLSPOffset(_) => (3, None),
Opcode::LDRegMemA(_, _)
| Opcode::LDHLMemDecA(_, _)
| Opcode::LDHLMemIncA(_, _)
| Opcode::INC16(_)
| Opcode::LDRegImm(_, _)
| Opcode::ADDHL(_, _)
| Opcode::LDARegMem(_, _)
| Opcode::LDAHLMemDec(_, _)
| Opcode::LDAHLMemInc(_, _)
| Opcode::DEC16(_)
| Opcode::LDRegHLMem(_, _)
| Opcode::LDHLMemReg(_, _)
| Opcode::ADDHLMem(_, _)
| Opcode::ADCHLMem(_, _)
| Opcode::SUBHLMem(_, _)
| Opcode::SBCHLMem(_, _)
| Opcode::ANDHLMem(_, _)
| Opcode::XORHLMem(_, _)
| Opcode::ORHLMem(_, _)
| Opcode::CPHLMem(_, _)
| Opcode::ADDImm(_, _)
| Opcode::ADCImm(_, _)
| Opcode::SUBImm(_, _)
| Opcode::SBCImm(_, _)
| Opcode::ANDImm(_, _)
| Opcode::XORImm(_, _)
| Opcode::ORImm(_, _)
| Opcode::CPImm(_, _)
| Opcode::LDHCMemA(_, _)
| Opcode::LDHACMem(_, _)
| Opcode::RLC(_)
| Opcode::RRC(_)
| Opcode::RL(_)
| Opcode::RR(_)
| Opcode::SLA(_)
| Opcode::SRA(_)
| Opcode::SWAP(_)
| Opcode::SRL(_)
| Opcode::BIT(_, _)
| Opcode::SET(_, _)
| Opcode::RES(_, _)
| Opcode::LDSPHL => (2, None),
_ => (1, None),
}
}
/// Returns the size of the instruction, in bytes.
pub fn size(&self) -> usize {
match self {
Opcode::LD16(_, _)
| Opcode::LDImmMemSP(_, _)
| Opcode::JP(_, _)
| Opcode::CALL(_, _)
| Opcode::LDImmMemA(_, _)
| Opcode::LDAImmMem(_, _) => 3,
Opcode::STOP
| Opcode::JR(_, _)
| Opcode::LDRegImm(_, _)
| Opcode::LDHLMemImm(_, _)
| Opcode::ADDImm(_, _)
| Opcode::ADCImm(_, _)
| Opcode::SUBImm(_, _)
| Opcode::SBCImm(_, _)
| Opcode::ANDImm(_, _)
| Opcode::XORImm(_, _)
| Opcode::ORImm(_, _)
| Opcode::CPImm(_, _)
| Opcode::LDHImmMemA(_, _)
| Opcode::LDHAImmMem(_, _)
| Opcode::ADDSP(_, _)
| Opcode::RLC(_)
| Opcode::RLCMem(_)
| Opcode::RRC(_)
| Opcode::RRCMem(_)
| Opcode::RL(_)
| Opcode::RLMem(_)
| Opcode::RR(_)
| Opcode::RRMem(_)
| Opcode::SLA(_)
| Opcode::SLAMem(_)
| Opcode::SRA(_)
| Opcode::SRAMem(_)
| Opcode::SWAP(_)
| Opcode::SWAPMem(_)
| Opcode::SRL(_)
| Opcode::SRLMem(_)
| Opcode::BIT(_, _)
| Opcode::BITMem(_, _)
| Opcode::SET(_, _)
| Opcode::SETMem(_, _)
| Opcode::RES(_, _)
| Opcode::RESMem(_, _)
| Opcode::LDHLSPOffset(_) => 2,
_ => 1,
}
}
pub fn try_display(&self, cpu: &CPU, pc: Option<u16>) -> Result<String, String> {
let pc = pc.unwrap_or_else(|| Reg16::PC.read(cpu));
let signed_imm8 = Mem(pc + 1).try_read(cpu).map(|i| i as i8);
let imm8 = Mem(pc + 1).try_read(cpu);
let imm16 = Mem16(pc + 1).try_read(cpu);
match (signed_imm8, imm8, imm16) {
(Ok(signed_imm8), Ok(imm8), Ok(imm16)) => Ok(match self {
Opcode::NOP => String::from("NOP"),
Opcode::STOP => String::from("STOP 0"),
Opcode::HALT => String::from("HALT"),
Opcode::DI => String::from("DI"),
Opcode::EI => String::from("EI"),
Opcode::JR(c, _) => match c {
Some(c) => format!("JR {}, %{:+}", c, signed_imm8),
None => format!("JR %{:+}", signed_imm8),
},
Opcode::RET(c) => match c {
Some(c) => format!("RET {}", c),
None => String::from("RET"),
},
Opcode::JP(c, _) => match c {
Some(c) => format!("JP {}, $0x{:04x}", c, imm16),
None => format!("JP $0x{:04x}", imm16),
},
Opcode::CALL(c, _) => match c {
Some(c) => format!("CALL {}, $0x{:04x}", c, imm16),
None => format!("CALL $0x{:04x}", imm16),
},
Opcode::RETI => String::from("RETI"),
Opcode::JPHL => String::from("JP (HL)"),
Opcode::RST(v) => format!("RST 0x{:02x}", v),
Opcode::INC16(r) => format!("INC {}", r),
Opcode::DEC16(r) => format!("DEC {}", r),
Opcode::ADDHL(dst, src) => format!("ADD {}, {}", dst, src),
Opcode::ADDSP(dst, _) => format!("ADD {}, %{:+}", dst, signed_imm8),
Opcode::LD16(r, _) => format!("LD {}, %0x{:04x}", r, imm16),
Opcode::LDImmMemSP(_, r) => format!("LD (0x{:04x}), {}", imm16, r),
Opcode::POP(r) => format!("POP {}", r),
Opcode::PUSH(r) => format!("PUSH {}", r),
Opcode::LDHLSPOffset(_) => format!("LD HL, SP+0x{:02x}", signed_imm8),
Opcode::LDSPHL => String::from("LD SP, HL"),
Opcode::LDARegMem(r, m) => format!("LD {}, {}", r, m),
Opcode::LDHLMemIncA(m, r) => format!("LD ({}+), {}", m.0, r),
Opcode::LDHLMemDecA(m, r) => format!("LD ({}-), {}", m.0, r),
Opcode::LDRegMemA(m, r) => format!("LD {}, {}", r, m),
Opcode::LDAHLMemInc(r, m) => format!("LD {}, ({}+)", r, m.0),
Opcode::LDAHLMemDec(r, m) => format!("LD {}, ({}-)", r, m.0),
Opcode::LDRegImm(r, _) => format!("LD {}, %0x{:02x}", r, imm8),
Opcode::LDHLMemImm(r, _) => format!("LD {}, %0x{:02x}", r, imm8),
Opcode::LD(d, s) => format!("LD {}, {}", d, s),
Opcode::LDRegHLMem(r, m) => format!("LD {}, {}", r, m),
Opcode::LDHLMemReg(m, r) => format!("LD {}, {}", m, r),
Opcode::LDHImmMemA(_, r) => format!("LDH ($FF00+0x{:02x}), {}", imm8, r),
Opcode::LDHAImmMem(r, _) => format!("LDH {}, ($FF00+0x{:02x})", r, imm8),
Opcode::LDHCMemA(_, r) => format!("LDH ($FF00+C), {}", r),
Opcode::LDHACMem(r, _) => format!("LDH {}, ($FF00+C)", r),
Opcode::LDImmMemA(_, r) => format!("LD ($0x{:04x}), {}", imm16, r),
Opcode::LDAImmMem(r, _) => format!("LD {}, ($0x{:04x})", r, imm16),
Opcode::INC(r) => format!("INC {}", r),
Opcode::INCHLMem(m) => format!("INC {}", m),
Opcode::DEC(r) => format!("DEC {}", r),
Opcode::DECHLMem(m) => format!("DEC {}", m),
Opcode::DAA => String::from("DAA"),
Opcode::SCF => String::from("SCF"),
Opcode::CPL => String::from("CPL"),
Opcode::CCF => String::from("CCF"),
Opcode::ADD(d, s) => format!("ADD {}, {}", d, s),
Opcode::ADDHLMem(r, m) => format!("ADD {}, {}", r, m),
Opcode::ADDImm(r, _) => format!("ADD {}, %0x{:02x}", r, imm16),
Opcode::SUB(d, s) => format!("SUB {}, {}", d, s),
Opcode::SUBHLMem(r, m) => format!("SUB {}, {}", r, m),
Opcode::SUBImm(r, _) => format!("SUB {}, %0x{:02x}", r, imm16),
Opcode::ADC(d, s) => format!("ADC {}, {}", d, s),
Opcode::ADCHLMem(r, m) => format!("ADC {}, {}", r, m),
Opcode::ADCImm(r, _) => format!("ADC {}, %0x{:02x}", r, imm16),
Opcode::SBC(d, s) => format!("SBC {}, {}", d, s),
Opcode::SBCHLMem(r, m) => format!("SBC {}, {}", r, m),
Opcode::SBCImm(r, _) => format!("SBC {}, %0x{:02x}", r, imm16),
Opcode::AND(d, s) => format!("AND {}, {}", d, s),
Opcode::ANDHLMem(r, m) => format!("AND {}, {}", r, m),
Opcode::ANDImm(r, _) => format!("AND {}, %0x{:02x}", r, imm16),
Opcode::XOR(d, s) => format!("XOR {}, {}", d, s),
Opcode::XORHLMem(r, m) => format!("XOR {}, {}", r, m),
Opcode::XORImm(r, _) => format!("XOR {}, %0x{:02x}", r, imm16),
Opcode::OR(d, s) => format!("OR {}, {}", d, s),
Opcode::ORHLMem(r, m) => format!("OR {}, {}", r, m),
Opcode::ORImm(r, _) => format!("OR {}, %0x{:02x}", r, imm16),
Opcode::CP(d, s) => format!("CP {}, {}", d, s),
Opcode::CPHLMem(r, m) => format!("CP {}, {}", r, m),
Opcode::CPImm(r, _) => format!("CP {}, %0x{:02x}", r, imm16),
Opcode::RLCA => String::from("RLCA"),
Opcode::RLA => String::from("RLA"),
Opcode::RRCA => String::from("RRCA"),
Opcode::RRA => String::from("RRA"),
Opcode::RLC(d) => format!("RLC {}", d),
Opcode::RLCMem(d) => format!("RLC {}", d),
Opcode::RRC(d) => format!("RRC {}", d),
Opcode::RRCMem(d) => format!("RRC {}", d),
Opcode::RL(d) => format!("RL {}", d),
Opcode::RLMem(d) => format!("RL {}", d),
Opcode::RR(d) => format!("RR {}", d),
Opcode::RRMem(d) => format!("RR {}", d),
Opcode::SLA(d) => format!("SLA {}", d),
Opcode::SLAMem(d) => format!("SLA {}", d),
Opcode::SRA(d) => format!("SRA {}", d),
Opcode::SRAMem(d) => format!("SRA {}", d),
Opcode::SWAP(d) => format!("SWAP {}", d),
Opcode::SWAPMem(d) => format!("SWAP {}", d),
Opcode::SRL(d) => format!("SRL {}", d),
Opcode::SRLMem(d) => format!("SRL {}", d),
Opcode::BIT(b, d) => format!("BIT {}, {}", b, d),
Opcode::BITMem(b, d) => format!("BIT {}, {}", b, d),
Opcode::SET(b, d) => format!("SET {}, {}", b, d),
Opcode::SETMem(b, d) => format!("SET {}, {}", b, d),
Opcode::RES(b, d) => format!("RES {}, {}", b, d),
Opcode::RESMem(b, d) => format!("RES {}, {}", b, d),
Opcode::Illegal => String::from("<illegal opcode>"),
}),
_ => Err("??".to_string()),
}
}
/// Returns a `String` representation of the instruction, using the
/// provided address to interpret the context. If the provided address
/// is `None`, the current PC register is used as an address.
pub fn display(&self, cpu: &CPU, pc: Option<u16>) -> String {
self.try_display(cpu, pc).ok().unwrap()
}
}
|
fn main() {
println!("Forward Euler!");
}
|
#![allow(non_snake_case)]
pub fn sum(slice: &[i32], default: i32) -> i32 {
let mut iSum = default;
for iVal in slice {
let val = iVal;
iSum = iSum + val;
}
iSum
}
pub fn distinct(vec: &Vec<i32>) -> Vec<i32> {
let mut distinctVec: Vec<i32> = Vec::new();
for iVal in vec {
let hasAlreadyBeenSeen = distinctVec.contains(iVal);
if hasAlreadyBeenSeen {
continue;
} else {
let valueToPush: i32 = *(iVal);
distinctVec.push(valueToPush);
}
}
distinctVec
}
pub fn filter(vec: &Vec<i32>, oxPred: &(Fn(i32) -> bool)) -> Vec<i32> {
let mut filtered:Vec<i32> = Vec::new();
for iElem in vec {
let predRes = oxPred(*iElem);
if predRes {
filtered.push(*iElem);
}
}
return filtered;
}
|
use core::mem::size_of;
use color::*;
use {EntryPoint, elf, kernel_proto};
use conf::Configuration;
use uefi::status::*;
use uefi::boot_services::protocols;
use uefi::boot_services::{BootServices,
AllocateType,
MemoryDescriptor,
MemoryType};
use {PATH_CONFIG,
PATH_FALLBACK_KERNEL};
use {get_conout,
get_boot_services,
get_image_handle,
get_runtime_services,
get_graphics_output};
pub extern fn init() -> Result<(), ()> {
let boot_services = get_boot_services();
let image_handle = get_image_handle();
let runtime_services = get_runtime_services();
let gop = get_graphics_output();
{
// Obtain the "LoadedImage" representing the bootloader, from which we get the boot volume
let image_proto: &protocols::LoadedImage = boot_services.handle_protocol(&image_handle).expect("image_handle - LoadedImage");
if image_proto.file_path.type_code() != (4,4) {
panic!("Loader wans't loaded from a filesystem - type_code = {:?}", image_proto.file_path.type_code());
}
let system_volume_fs: &protocols::SimpleFileSystem = boot_services.handle_protocol(&image_proto.device_handle).expect("image_proto - FileProtocol");
// - Get the root of this volume and load the bootloader configuration file from it
let system_volume_root = system_volume_fs.open_volume().expect("system_volume_fs - File");
let config = match Configuration::from_file(boot_services, &system_volume_root, PATH_CONFIG.into()) {
Ok(c) => c,
Err(e) => panic!("Failed to load config file: {:?}", e),
};
// - Load the kernel.
let entrypoint = load_kernel_file(boot_services, &system_volume_root, &config.kernel).expect("Unable to load kernel");
// Save memory map
let (map_size, map_key, ent_size, ent_ver, map) = {
let mut map_size = 0;
let mut map_key = 0;
let mut ent_size = 0;
let mut ent_ver = 0;
match unsafe { (boot_services.get_memory_map)(&mut map_size, ::core::ptr::null_mut(), &mut map_key, &mut ent_size, &mut ent_ver) }
{
SUCCESS => {},
BUFFER_TOO_SMALL => {},
e => panic!("Sorry, get_memory_map() Failed :( - {:?}", e),
}
assert_eq!( ent_size, size_of::<MemoryDescriptor>() );
let mut map;
loop
{
map = boot_services.allocate_pool_vec( MemoryType::LoaderData, map_size / ent_size ).unwrap();
match unsafe { (boot_services.get_memory_map)(&mut map_size, map.as_mut_ptr(), &mut map_key, &mut ent_size, &mut ent_ver) }
{
SUCCESS => break,
BUFFER_TOO_SMALL => continue,
e => panic!("get_memory_map 2 - {:?}", e),
}
}
unsafe {
map.set_len( map_size / ent_size );
}
(map_size, map_key, ent_size, ent_ver, map)
};
unsafe {
(boot_services.exit_boot_services)(image_handle, map_key).expect("Sorry, exit_boot_services() failed");
(runtime_services.set_virtual_address_map)(map_size, ent_size, ent_ver, map.as_ptr()).expect("Sorry, set_virtual_address_map() failed :(");
}
let boot_info = kernel_proto::Info {
runtime_services: runtime_services as *const _ as *const (),
// TODO: Get from the configuration
cmdline_ptr: 1 as *const u8,
cmdline_len: 0,
map_addr: map.as_ptr() as usize as u64,
map_entnum: map.len() as u32,
map_entsz: size_of::<MemoryDescriptor>() as u32,
vid_addr: gop.mode.frame_buffer_base as *mut Color,
width: unsafe { (*gop.mode.info).horizontal_resolution },
height: unsafe { (*gop.mode.info).vertical_resolution }
};
// - Execute kernel (passing a magic value and general boot information)
entrypoint(0x71FF0EF1, &boot_info);
}
Ok(())
}
fn load_kernel_file(boot_services: &::uefi::boot_services::BootServices, sys_vol: &protocols::File, filename: &::uefi::CStr16) -> Result<EntryPoint, ::uefi::Status>
{
let mut kernel_file = match sys_vol.open_read(filename) {
Ok(k) => k,
Err(e) => panic!("Failed to open kernel '{}' - {:?}", filename, e),
};
// Load kernel from this file (ELF).
let elf_hdr = {
let mut hdr = elf::ElfHeader::default();
// SAFE: Converts to POD for read
kernel_file.read( unsafe { ::core::slice::from_raw_parts_mut( &mut hdr as *mut _ as *mut u8, size_of::<elf::ElfHeader>() ) } ).expect("ElfHeader read");
hdr
};
elf_hdr.check_header();
for i in 0 .. elf_hdr.e_phnum
{
let mut ent = elf::PhEnt::default();
kernel_file.set_position(elf_hdr.e_phoff as u64 + (i as usize * size_of::<elf::PhEnt>()) as u64 ).expect("PhEnt seek");
// SAFE: Converts to POD for read
kernel_file.read( unsafe { ::core::slice::from_raw_parts_mut( &mut ent as *mut _ as *mut u8, size_of::<elf::PhEnt>() ) } ).expect("PhEnt read");
if ent.p_type == 1
{
println!("- {:#x}+{:#x} loads +{:#x}+{:#x}",
ent.p_paddr, ent.p_memsz,
ent.p_offset, ent.p_filesz
);
let mut addr = ent.p_paddr as u64;
// SAFE: Correct call to FFI
unsafe { (boot_services.allocate_pages)(
AllocateType::Address,
MemoryType::LoaderData,
ent.p_memsz as usize / 0x4096,
&mut addr
).expect("Allocating pages for program segment") };
// SAFE: This memory has just been allocated by the above
let data_slice = unsafe { ::core::slice::from_raw_parts_mut(ent.p_paddr as usize as *mut u8, ent.p_memsz as usize) };
kernel_file.set_position(ent.p_offset as u64).expect("seek segment");
kernel_file.read( &mut data_slice[.. ent.p_filesz as usize] ).expect("read segment");
for b in &mut data_slice[ent.p_filesz as usize .. ent.p_memsz as usize] {
*b = 0;
}
}
}
// SAFE: Assuming that the executable is sane, and that it follows the correct calling convention
Ok(unsafe { ::core::mem::transmute(elf_hdr.e_entry as usize) })
}
|
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use spec::{LinkArgs, LinkerFlavor, TargetOptions};
use std::default::Default;
pub fn opts() -> TargetOptions {
let mut args = LinkArgs::new();
args.insert(LinkerFlavor::Msvc,
vec!["/NOLOGO".to_string(),
"/NXCOMPAT".to_string()]);
TargetOptions {
function_sections: true,
dynamic_linking: true,
executables: true,
dll_prefix: "".to_string(),
dll_suffix: ".dll".to_string(),
exe_suffix: ".exe".to_string(),
staticlib_prefix: "".to_string(),
staticlib_suffix: ".lib".to_string(),
target_family: Some("windows".to_string()),
is_like_windows: true,
is_like_msvc: true,
pre_link_args: args,
crt_static_allows_dylibs: true,
crt_static_respected: true,
abi_return_struct_as_int: true,
emit_debug_gdb_scripts: false,
requires_uwtable: true,
.. Default::default()
}
}
|
mod error;
use gstreamer::glib::{MainLoop, SourceId};
use gstreamer::prelude::*;
use gstreamer::*;
use gstreamer_audio::{AudioFormat, AudioInfo};
use std::io::Write;
use std::slice;
use std::sync::{Arc, Mutex};
const CHUNK_SIZE: usize = 1024; /* Amount of bytes we are sending in each buffer */
const SAMPLE_RATE: u32 = 44100; /* Samples per second we are sending */
/* Structure to contain all our information, so we can pass it to callbacks */
struct CustomData {
/* For waveform generation */
a: f64,
b: f64,
c: f64,
d: f64,
num_samples: usize,
source_id: Option<SourceId>,
}
/* This method is called by the idle GSource in the mainloop, to feed CHUNK_SIZE bytes into appsrc.
* The idle handler is added to the mainloop when appsrc requests us to start sending data (need-data signal)
* and is removed when appsrc has enough data (enough-data signal).
*/
fn push_data(data: Arc<Mutex<CustomData>>, app_source: &Element) -> Continue {
let num_samples = CHUNK_SIZE / 2; /* Because each sample is 16 bits */
/* Create a new empty buffer */
let mut buffer = Buffer::with_size(CHUNK_SIZE).unwrap();
{
let mut data = data.lock().unwrap();
/* Set its timestamp and duration */
let buffer_inner = buffer.get_mut().unwrap();
buffer_inner.set_pts(Some(ClockTime::from_nseconds(
(data.num_samples * 1_000_000_000 / SAMPLE_RATE as usize) as _,
)));
buffer_inner.set_duration(Some(ClockTime::from_nseconds(
(num_samples * 1_000_000_000 / SAMPLE_RATE as usize) as _,
)));
/* Generate some psychodelic waveforms */
data.c += data.d;
data.d -= data.c / 1000.0;
let freq = 1100.0 + 1000.0 * data.d;
let mut map = buffer_inner.map_writable().unwrap();
unsafe {
let map = slice::from_raw_parts_mut(
map.as_mut_slice().as_mut_ptr() as *mut i16,
map.len() / 2,
);
for raw in map {
data.a += data.b;
data.b -= data.a / freq;
let value = (500.0 * data.a).floor() as i64;
*raw = i16::try_from(value).unwrap();
}
}
data.num_samples += num_samples;
}
/* Push the buffer into the appsrc */
let ret: FlowReturn = app_source.emit_by_name("push-buffer", &[&buffer]);
/* Free the buffer now that we are done with it */
if ret != FlowReturn::Ok {
/* We got some error, stop sending data */
return Continue(false);
}
Continue(true)
}
/* This signal callback triggers when appsrc needs data. Here, we add an idle handler
* to the mainloop to start pushing data into the appsrc */
fn start_feed(data: Arc<Mutex<CustomData>>, app_source: &Element) {
let data_for_idle = Arc::clone(&data);
let mut data = data.lock().unwrap();
if data.source_id.is_some() {
return;
}
println!("Start feeding");
let app_source = app_source.clone();
data.source_id = Some(glib::idle_add(move || {
let mut data = Arc::clone(&data_for_idle);
push_data(data, &app_source)
}));
}
/* This callback triggers when appsrc has enough data and we can stop sending.
* We remove the idle handler from the mainloop */
fn stop_feed(source_id: &mut Option<SourceId>) {
if source_id.is_none() {
return;
}
println!("Stop feeding");
std::mem::replace(source_id, None).unwrap().remove();
}
/* The appsink has received a buffer */
fn new_sample(sink: &Element) -> FlowReturn {
/* Retrieve the buffer */
let _sample: Sample = sink.emit_by_name("pull-sample", &[]);
print!("*");
let _ = std::io::stdout().flush();
return FlowReturn::Ok;
}
/* This function is called when an error message is posted on the bus */
fn error_cb(err: &message::Error, main_loop: &MainLoop) {
/* Print error details on the screen */
let name_src = err.src().unwrap().name();
let debug_info = err.debug().unwrap_or_else(|| "none".into());
eprintln!("Error received from element {}: {}", name_src, err.error());
eprintln!("Debugging information: {}", debug_info);
main_loop.quit();
}
fn main() -> Result<(), error::Error> {
let data = Arc::new(Mutex::new(CustomData {
a: 0.0,
b: 1.0,
c: 0.0,
d: 1.0,
num_samples: 0,
source_id: None,
}));
/* Initialize GStreamer */
init()?;
/* Create the elements */
let app_source = ElementFactory::make("appsrc")
.name("audio_source")
.build()?;
let tee = ElementFactory::make("tee").name("tee").build()?;
let audio_queue = ElementFactory::make("queue").name("audio_queue").build()?;
let audio_convert1 = ElementFactory::make("audioconvert")
.name("audio_convert1")
.build()?;
let audio_resample = ElementFactory::make("audioresample")
.name("audio_resample")
.build()?;
let audio_sink = ElementFactory::make("autoaudiosink")
.name("audio_sink")
.build()?;
let video_queue = ElementFactory::make("queue").name("video_queue").build()?;
let audio_convert2 = ElementFactory::make("audioconvert")
.name("audio_convert2")
.build()?;
let visual = ElementFactory::make("wavescope").name("visual").build()?;
let video_convert = ElementFactory::make("videoconvert")
.name("video_convert")
.build()?;
let video_sink = ElementFactory::make("autovideosink")
.name("video_sink")
.build()?;
let app_queue = ElementFactory::make("queue").name("app_queue").build()?;
let app_sink = ElementFactory::make("appsink").name("app_sink").build()?;
/* Create the empty pipeline */
let pipeline = Pipeline::new(Some("test-pipeline"));
/* Configure wavescope */
visual.set_property_from_str("shader", "none");
visual.set_property_from_str("style", "dots");
/* Configure appsrc */
let info = AudioInfo::builder(AudioFormat::S16le, SAMPLE_RATE, 1).build()?;
let audio_caps = info.to_caps()?;
app_source.set_property("caps", &audio_caps);
app_source.set_property_from_str("format", "time");
let app_source_for_need_data = app_source.clone();
let data_for_need_data = Arc::clone(&data);
app_source.connect("need-data", false, move |_| {
start_feed(Arc::clone(&data_for_need_data), &app_source_for_need_data);
None
});
let data_for_enough_data = Arc::clone(&data);
app_source.connect("enough-data", false, move |_| {
let mut data = data_for_enough_data.lock().unwrap();
stop_feed(&mut data.source_id);
None
});
/* Configure appsink */
app_sink.set_properties(&[("emit-signals", &true), ("caps", &audio_caps)]);
let app_sink_for_new_sample = app_sink.clone();
app_sink.connect("new-sample", false, move |_| {
Some(new_sample(&app_sink_for_new_sample).to_value())
});
drop(audio_caps);
/* Link all elements that can be automatically linked because they have "Always" pads */
pipeline.add_many(&[
&app_source,
&tee,
&audio_queue,
&audio_convert1,
&audio_resample,
&audio_sink,
&video_queue,
&audio_convert2,
&visual,
&video_convert,
&video_sink,
&app_queue,
&app_sink,
])?;
if Element::link_many(&[&app_source, &tee]).is_err()
|| Element::link_many(&[&audio_queue, &audio_convert1, &audio_resample, &audio_sink])
.is_err()
|| Element::link_many(&[
&video_queue,
&audio_convert2,
&visual,
&video_convert,
&video_sink,
])
.is_err()
|| Element::link_many(&[&app_queue, &app_sink]).is_err()
{
panic!("Elements could not be linked.");
}
/* Manually link the Tee, which has "Request" pads */
let tee_audio_pad = tee.request_pad_simple("src_%u").unwrap();
println!(
"Obtained request pad {} for audio branch.",
tee_audio_pad.name()
);
let queue_audio_pad = audio_queue.static_pad("sink").unwrap();
let tee_video_pad = tee.request_pad_simple("src_%u").unwrap();
println!(
"Obtained request pad {} for video branch.",
tee_video_pad.name()
);
let queue_video_pad = video_queue.static_pad("sink").unwrap();
let tee_app_pad = tee.request_pad_simple("src_%u").unwrap();
println!(
"Obtained request pad {} for app branch.",
tee_app_pad.name()
);
let queue_app_pad = app_queue.static_pad("sink").unwrap();
if tee_audio_pad.link(&queue_audio_pad).is_err()
|| tee_video_pad.link(&queue_video_pad).is_err()
|| tee_app_pad.link(&queue_app_pad).is_err()
{
panic!("Tee could not be linked");
}
drop(queue_audio_pad);
drop(queue_video_pad);
drop(queue_app_pad);
/* Create a GLib Main Loop and set it to run */
let main_loop = glib::MainLoop::new(None, false);
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
let bus = pipeline.bus().unwrap();
bus.add_signal_watch();
let main_loop_for_error_cb = main_loop.clone();
bus.connect_message(Some("error"), move |_, message| match message.view() {
MessageView::Error(err) => {
error_cb(err, &main_loop_for_error_cb);
}
_ => {
panic!();
}
});
drop(bus);
/* Start playing the pipeline */
let _ = pipeline.set_state(State::Playing);
main_loop.run();
/* Release the request pads from the Tee, and unref them */
tee.release_request_pad(&tee_audio_pad);
tee.release_request_pad(&tee_video_pad);
tee.release_request_pad(&tee_app_pad);
drop(tee_audio_pad);
drop(tee_video_pad);
drop(tee_app_pad);
/* Free resources */
let _ = pipeline.set_state(State::Null);
drop(pipeline);
Ok(())
}
|
pub mod treapimpl;
pub use crate::treapimpl::v2::Treap;
|
use quicksilver::geom::Vector;
#[derive(Clone, Debug, PartialEq)]
pub struct Asteroid {
pub pos: Vector,
pub velocity: Vector,
pub color: &'static str,
}
impl Asteroid {
pub fn update(&mut self, time_delta: f32) {
self.pos += self.velocity * time_delta;
}
}
#[derive(Clone,Copy)]
// Represent a section screen area, is useful for draw single object inside the grid instead of
// using absolute coordinates.
pub struct Grid {
// The surface of the screen.
screen_size: quicksilver::geom::Vector,
// How many slot the grid have, is always a square.
pub grid_size: f32,
// Computed tile size.
tile_size: quicksilver::geom::Vector,
}
impl Grid {
pub fn new(
width: f32,
height: f32,
grid: f32,
) -> Grid {
let screen_size = Vector::new(width, height);
let tile_size = screen_size.times(Vector::new(
1.0 / grid,
1.0 / grid
));
return Grid {
screen_size,
grid_size: grid,
tile_size,
}
}
// Check if a point is inside the grid.
pub fn is_in(self, point: Vector) -> bool {
return (point.x >= 0.0 && point.x < self.grid_size) && (point.y >= 0.0 && point.y < self.grid_size)
}
// Take a point, check if is inside the grid, translate to screen px, for center the grid.
pub fn translate_to_screen(self, point: Vector) -> Result<Vector, &'static str> {
if !self.is_in(point) {
return Err("the point is outside of drawable surface")
}
let offset_px = Vector::new(0.5, 0.5);
return Ok(self.tile_size.times(offset_px) + point.times(self.tile_size));
}
// Collide take two point and check if fit in the same grid cell,
// in reality check the euclidian distance between the two point, if this
// distance is below the half tile_size the collide append.
pub fn collide(self, a: Vector, b: Vector) -> bool {
let first = match self.translate_to_screen(a) {
Ok(p) => p,
Err(_) => a,
};
let second = match self.translate_to_screen(b) {
Ok(p) => p,
Err(_) => b,
};
return first.distance(second) < (self.tile_size.len() / 2.0)
}
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_grid_new() {
let grid = Grid::new(100.0, 100.0, 100.0);
assert_eq!(grid.grid_size, 100.0);
assert_eq!(grid.tile_size.x, 1.0);
assert_eq!(grid.tile_size.y, 1.0);
}
#[test]
fn test_is_in() {
let grid = Grid::new(100.0, 100.0, 10.0);
assert!(grid.is_in(Vector::new(0.0, 0.0)));
assert!(grid.is_in(Vector::new(9.0, 9.0)));
assert!(grid.is_in(Vector::new(5.0, 5.0)));
assert!(!grid.is_in(Vector::new(0.0, -1.0)));
assert!(!grid.is_in(Vector::new(10.0, 1.0)));
assert!(!grid.is_in(Vector::new(10.0, 5.0)));
}
#[test]
fn test_translate() {
let grid = Grid::new(100.0, 100.0, 10.0);
let point = Vector::new(2.0,2.0);
match grid.translate_to_screen(point) {
Ok(p) => assert_eq!(p, Vector::new(25.0, 25.0)),
Err(e) => panic!("Fail translation: {}", e),
}
let point = Vector::new(12.0,8.0);
match grid.translate_to_screen(point) {
Ok(_) => panic!("Expected error"),
Err(_) => (),
}
}
#[test]
fn test_collide() {
let grid = Grid::new(100.0, 100.0, 10.0);
assert!(
grid.collide(Vector::new(0.2, 0.2), Vector::new(0.2, 0.2)),
"the same point always collide."
);
assert!(
grid.collide(Vector::new(0.2, 0.2), Vector::new(0.3, 0.3)),
"points in same cell is a collision."
);
assert!(
!grid.collide(Vector::new(5.0, 5.0), Vector::new(10.0, 10.0)),
"points near but in different cell don't collide."
);
}
}
|
use crate::abst::{Controller, Input, Presenter};
pub struct Mock {
inputs: Vec<Input>,
expected: f64,
}
impl Mock {
#[allow(dead_code)]
pub fn new(inputs: impl IntoIterator<Item = Input>, expected: f64) -> Self {
Self {
inputs: inputs.into_iter().collect(),
expected,
}
}
}
impl Controller for Mock {
fn get_inputs(&self) -> Vec<Input> {
self.inputs.iter().cloned().collect()
}
}
impl Presenter for Mock {
fn show_error(&self, error: crate::exp::SyntaxError) {
eprintln!("Occured an error: {:?}", error);
}
fn show_result(&self, calculated: f64) {
assert_eq!(calculated, self.expected);
}
}
|
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::ty::same_type_and_consts;
use clippy_utils::{meets_msrv, msrvs};
use if_chain::if_chain;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::{
self as hir,
def::{CtorOf, DefKind, Res},
def_id::LocalDefId,
intravisit::{walk_inf, walk_ty, NestedVisitorMap, Visitor},
Expr, ExprKind, FnRetTy, FnSig, GenericArg, HirId, Impl, ImplItemKind, Item, ItemKind, Path, QPath, TyKind,
};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::ty::AssocKind;
use rustc_semver::RustcVersion;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::Span;
use rustc_typeck::hir_ty_to_ty;
declare_clippy_lint! {
/// ### What it does
/// Checks for unnecessary repetition of structure name when a
/// replacement with `Self` is applicable.
///
/// ### Why is this bad?
/// Unnecessary repetition. Mixed use of `Self` and struct
/// name
/// feels inconsistent.
///
/// ### Known problems
/// - Unaddressed false negative in fn bodies of trait implementations
/// - False positive with assotiated types in traits (#4140)
///
/// ### Example
/// ```rust
/// struct Foo {}
/// impl Foo {
/// fn new() -> Foo {
/// Foo {}
/// }
/// }
/// ```
/// could be
/// ```rust
/// struct Foo {}
/// impl Foo {
/// fn new() -> Self {
/// Self {}
/// }
/// }
/// ```
#[clippy::version = "pre 1.29.0"]
pub USE_SELF,
nursery,
"unnecessary structure name repetition whereas `Self` is applicable"
}
#[derive(Default)]
pub struct UseSelf {
msrv: Option<RustcVersion>,
stack: Vec<StackItem>,
}
impl UseSelf {
#[must_use]
pub fn new(msrv: Option<RustcVersion>) -> Self {
Self {
msrv,
..Self::default()
}
}
}
#[derive(Debug)]
enum StackItem {
Check {
impl_id: LocalDefId,
in_body: u32,
types_to_skip: FxHashSet<HirId>,
},
NoCheck,
}
impl_lint_pass!(UseSelf => [USE_SELF]);
const SEGMENTS_MSG: &str = "segments should be composed of at least 1 element";
impl<'tcx> LateLintPass<'tcx> for UseSelf {
fn check_item(&mut self, _cx: &LateContext<'_>, item: &Item<'_>) {
if matches!(item.kind, ItemKind::OpaqueTy(_)) {
// skip over `ItemKind::OpaqueTy` in order to lint `foo() -> impl <..>`
return;
}
// We push the self types of `impl`s on a stack here. Only the top type on the stack is
// relevant for linting, since this is the self type of the `impl` we're currently in. To
// avoid linting on nested items, we push `StackItem::NoCheck` on the stack to signal, that
// we're in an `impl` or nested item, that we don't want to lint
let stack_item = if_chain! {
if let ItemKind::Impl(Impl { self_ty, .. }) = item.kind;
if let TyKind::Path(QPath::Resolved(_, item_path)) = self_ty.kind;
let parameters = &item_path.segments.last().expect(SEGMENTS_MSG).args;
if parameters.as_ref().map_or(true, |params| {
!params.parenthesized && !params.args.iter().any(|arg| matches!(arg, GenericArg::Lifetime(_)))
});
then {
StackItem::Check {
impl_id: item.def_id,
in_body: 0,
types_to_skip: std::iter::once(self_ty.hir_id).collect(),
}
} else {
StackItem::NoCheck
}
};
self.stack.push(stack_item);
}
fn check_item_post(&mut self, _: &LateContext<'_>, item: &Item<'_>) {
if !matches!(item.kind, ItemKind::OpaqueTy(_)) {
self.stack.pop();
}
}
fn check_impl_item(&mut self, cx: &LateContext<'_>, impl_item: &hir::ImplItem<'_>) {
// We want to skip types in trait `impl`s that aren't declared as `Self` in the trait
// declaration. The collection of those types is all this method implementation does.
if_chain! {
if let ImplItemKind::Fn(FnSig { decl, .. }, ..) = impl_item.kind;
if let Some(&mut StackItem::Check {
impl_id,
ref mut types_to_skip,
..
}) = self.stack.last_mut();
if let Some(impl_trait_ref) = cx.tcx.impl_trait_ref(impl_id);
then {
// `self_ty` is the semantic self type of `impl <trait> for <type>`. This cannot be
// `Self`.
let self_ty = impl_trait_ref.self_ty();
// `trait_method_sig` is the signature of the function, how it is declared in the
// trait, not in the impl of the trait.
let trait_method = cx
.tcx
.associated_items(impl_trait_ref.def_id)
.find_by_name_and_kind(cx.tcx, impl_item.ident, AssocKind::Fn, impl_trait_ref.def_id)
.expect("impl method matches a trait method");
let trait_method_sig = cx.tcx.fn_sig(trait_method.def_id);
let trait_method_sig = cx.tcx.erase_late_bound_regions(trait_method_sig);
// `impl_inputs_outputs` is an iterator over the types (`hir::Ty`) declared in the
// implementation of the trait.
let output_hir_ty = if let FnRetTy::Return(ty) = &decl.output {
Some(&**ty)
} else {
None
};
let impl_inputs_outputs = decl.inputs.iter().chain(output_hir_ty);
// `impl_hir_ty` (of type `hir::Ty`) represents the type written in the signature.
//
// `trait_sem_ty` (of type `ty::Ty`) is the semantic type for the signature in the
// trait declaration. This is used to check if `Self` was used in the trait
// declaration.
//
// If `any`where in the `trait_sem_ty` the `self_ty` was used verbatim (as opposed
// to `Self`), we want to skip linting that type and all subtypes of it. This
// avoids suggestions to e.g. replace `Vec<u8>` with `Vec<Self>`, in an `impl Trait
// for u8`, when the trait always uses `Vec<u8>`.
//
// See also https://github.com/rust-lang/rust-clippy/issues/2894.
for (impl_hir_ty, trait_sem_ty) in impl_inputs_outputs.zip(trait_method_sig.inputs_and_output) {
if trait_sem_ty.walk(cx.tcx).any(|inner| inner == self_ty.into()) {
let mut visitor = SkipTyCollector::default();
visitor.visit_ty(impl_hir_ty);
types_to_skip.extend(visitor.types_to_skip);
}
}
}
}
}
fn check_body(&mut self, _: &LateContext<'_>, _: &hir::Body<'_>) {
// `hir_ty_to_ty` cannot be called in `Body`s or it will panic (sometimes). But in bodies
// we can use `cx.typeck_results.node_type(..)` to get the `ty::Ty` from a `hir::Ty`.
// However the `node_type()` method can *only* be called in bodies.
if let Some(&mut StackItem::Check { ref mut in_body, .. }) = self.stack.last_mut() {
*in_body = in_body.saturating_add(1);
}
}
fn check_body_post(&mut self, _: &LateContext<'_>, _: &hir::Body<'_>) {
if let Some(&mut StackItem::Check { ref mut in_body, .. }) = self.stack.last_mut() {
*in_body = in_body.saturating_sub(1);
}
}
fn check_ty(&mut self, cx: &LateContext<'_>, hir_ty: &hir::Ty<'_>) {
if_chain! {
if !hir_ty.span.from_expansion();
if meets_msrv(self.msrv.as_ref(), &msrvs::TYPE_ALIAS_ENUM_VARIANTS);
if let Some(&StackItem::Check {
impl_id,
in_body,
ref types_to_skip,
}) = self.stack.last();
if let TyKind::Path(QPath::Resolved(_, path)) = hir_ty.kind;
if !matches!(path.res, Res::SelfTy(..) | Res::Def(DefKind::TyParam, _));
if !types_to_skip.contains(&hir_ty.hir_id);
let ty = if in_body > 0 {
cx.typeck_results().node_type(hir_ty.hir_id)
} else {
hir_ty_to_ty(cx.tcx, hir_ty)
};
if same_type_and_consts(ty, cx.tcx.type_of(impl_id));
let hir = cx.tcx.hir();
// prevents false positive on `#[derive(serde::Deserialize)]`
if !hir.span(hir.get_parent_node(hir_ty.hir_id)).in_derive_expansion();
then {
span_lint(cx, hir_ty.span);
}
}
}
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if !expr.span.from_expansion();
if meets_msrv(self.msrv.as_ref(), &msrvs::TYPE_ALIAS_ENUM_VARIANTS);
if let Some(&StackItem::Check { impl_id, .. }) = self.stack.last();
if cx.typeck_results().expr_ty(expr) == cx.tcx.type_of(impl_id);
then {} else { return; }
}
match expr.kind {
ExprKind::Struct(QPath::Resolved(_, path), ..) => match path.res {
Res::SelfTy(..) => (),
Res::Def(DefKind::Variant, _) => lint_path_to_variant(cx, path),
_ => span_lint(cx, path.span),
},
// tuple struct instantiation (`Foo(arg)` or `Enum::Foo(arg)`)
ExprKind::Call(fun, _) => {
if let ExprKind::Path(QPath::Resolved(_, path)) = fun.kind {
if let Res::Def(DefKind::Ctor(ctor_of, _), ..) = path.res {
match ctor_of {
CtorOf::Variant => lint_path_to_variant(cx, path),
CtorOf::Struct => span_lint(cx, path.span),
}
}
}
},
// unit enum variants (`Enum::A`)
ExprKind::Path(QPath::Resolved(_, path)) => lint_path_to_variant(cx, path),
_ => (),
}
}
extract_msrv_attr!(LateContext);
}
#[derive(Default)]
struct SkipTyCollector {
types_to_skip: Vec<HirId>,
}
impl<'tcx> Visitor<'tcx> for SkipTyCollector {
type Map = Map<'tcx>;
fn visit_infer(&mut self, inf: &hir::InferArg) {
self.types_to_skip.push(inf.hir_id);
walk_inf(self, inf);
}
fn visit_ty(&mut self, hir_ty: &hir::Ty<'_>) {
self.types_to_skip.push(hir_ty.hir_id);
walk_ty(self, hir_ty);
}
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
NestedVisitorMap::None
}
}
fn span_lint(cx: &LateContext<'_>, span: Span) {
span_lint_and_sugg(
cx,
USE_SELF,
span,
"unnecessary structure name repetition",
"use the applicable keyword",
"Self".to_owned(),
Applicability::MachineApplicable,
);
}
fn lint_path_to_variant(cx: &LateContext<'_>, path: &Path<'_>) {
if let [.., self_seg, _variant] = path.segments {
let span = path
.span
.with_hi(self_seg.args().span_ext().unwrap_or(self_seg.ident.span).hi());
span_lint(cx, span);
}
}
|
use std::convert::Into;
use std::fmt::{Display, Formatter};
#[derive(Clone, Debug)]
pub struct Node(pub String);
impl Display for Node {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
impl Node {
pub fn new(key: String) -> Self {
Self(key)
}
}
impl Into<String> for Node {
fn into(self) -> String {
self.0
}
}
|
use std::char;
use std::collections::BTreeSet;
use fst::Map;
const BYTES: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/unicode/name_fst.bin"));
include!(concat!(env!("OUT_DIR"), "/unicode/names.rs"));
fn query_fst(word: &str) -> Vec<char> {
lazy_static! {
static ref FST: Map<Vec<u8>> = Map::new(BYTES.to_owned()).unwrap();
}
let mut chars: Vec<char> = vec![];
if let Some(cp) = FST.get(word) {
if cp & (0xff << 32) != 0 {
let index: usize = (cp as u32) as usize;
for ch in AMBIGUOUS_CHARS[index].chars() {
chars.push(ch);
}
} else if let Some(ch) = char::from_u32(cp as u32) {
chars.push(ch)
}
}
chars
}
pub fn lookup_by_query(query: &str) -> Vec<char> {
let query = query.to_lowercase();
// try the original query first:
let original_results = query_fst(query.as_str());
if !original_results.is_empty() {
return original_results;
}
let mut candidates = BTreeSet::new();
if query.contains(char::is_whitespace) {
// Split multiple-word queries, AND them together:
let mut words = query.split_whitespace();
if let Some(word) = words.next() {
for ch in query_fst(word) {
candidates.insert(ch);
}
}
for word in words {
let mut merge_candidates = BTreeSet::new();
for ch in query_fst(word) {
merge_candidates.insert(ch);
}
candidates = candidates
.intersection(&merge_candidates)
.cloned()
.collect();
if candidates.is_empty() {
return vec![];
}
}
}
candidates.into_iter().collect()
}
|
#[doc = "Reader of register SR"]
pub type R = crate::R<u32, super::SR>;
#[doc = "Reader of field `TAMP1F`"]
pub type TAMP1F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP2F`"]
pub type TAMP2F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP3F`"]
pub type TAMP3F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP4F`"]
pub type TAMP4F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP5F`"]
pub type TAMP5F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP6F`"]
pub type TAMP6F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP7F`"]
pub type TAMP7F_R = crate::R<bool, bool>;
#[doc = "Reader of field `TAMP8F`"]
pub type TAMP8F_R = crate::R<bool, bool>;
#[doc = "Reader of field `ITAMP1F`"]
pub type ITAMP1F_R = crate::R<bool, bool>;
#[doc = "Reader of field `ITAMP2F`"]
pub type ITAMP2F_R = crate::R<bool, bool>;
#[doc = "Reader of field `ITAMP3F`"]
pub type ITAMP3F_R = crate::R<bool, bool>;
#[doc = "Reader of field `ITAMP5F`"]
pub type ITAMP5F_R = crate::R<bool, bool>;
#[doc = "Reader of field `ITAMP8F`"]
pub type ITAMP8F_R = crate::R<bool, bool>;
impl R {
#[doc = "Bit 0 - TAMP1F"]
#[inline(always)]
pub fn tamp1f(&self) -> TAMP1F_R {
TAMP1F_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - TAMP2F"]
#[inline(always)]
pub fn tamp2f(&self) -> TAMP2F_R {
TAMP2F_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - TAMP3F"]
#[inline(always)]
pub fn tamp3f(&self) -> TAMP3F_R {
TAMP3F_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - TAMP4F"]
#[inline(always)]
pub fn tamp4f(&self) -> TAMP4F_R {
TAMP4F_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - TAMP5F"]
#[inline(always)]
pub fn tamp5f(&self) -> TAMP5F_R {
TAMP5F_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - TAMP6F"]
#[inline(always)]
pub fn tamp6f(&self) -> TAMP6F_R {
TAMP6F_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - TAMP7F"]
#[inline(always)]
pub fn tamp7f(&self) -> TAMP7F_R {
TAMP7F_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - TAMP8F"]
#[inline(always)]
pub fn tamp8f(&self) -> TAMP8F_R {
TAMP8F_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 16 - ITAMP1F"]
#[inline(always)]
pub fn itamp1f(&self) -> ITAMP1F_R {
ITAMP1F_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - ITAMP2F"]
#[inline(always)]
pub fn itamp2f(&self) -> ITAMP2F_R {
ITAMP2F_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - ITAMP3F"]
#[inline(always)]
pub fn itamp3f(&self) -> ITAMP3F_R {
ITAMP3F_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 20 - ITAMP5F"]
#[inline(always)]
pub fn itamp5f(&self) -> ITAMP5F_R {
ITAMP5F_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 23 - ITAMP8F"]
#[inline(always)]
pub fn itamp8f(&self) -> ITAMP8F_R {
ITAMP8F_R::new(((self.bits >> 23) & 0x01) != 0)
}
}
|
use objc_foundation::NSObject;
use super::{AvCaptureInput, AvCaptureInputPort};
use super::super::AvCaptureDevice;
/// Capture Device Input
///
/// `AVCaptureDeviceInput` is a concrete sub-class of `AVCaptureInput` you use to capture data from
/// an `AVCaptureDevice` object.
pub struct AvCaptureDeviceInput {
/// The device with which the input is associated.
pub device: AvCaptureDevice,
/// The capture input’s ports.
///
/// # Discussion
///
/// The array contains one or more instances of `AVCaptureInputPort`.
///
/// Each individual `AVCaptureInputPort` instance posts
/// an `AVCaptureInputPortFormatDescriptionDidChange` when the `formatDescription` of that
/// port changes.
pub ports: Vec<Box<AvCaptureInputPort>>,
/// `AVCaptureDeviceInput`
pub(super::super) obj: *mut NSObject,
}
impl AvCaptureDeviceInput {
/// Initializes an input to use a specified device.
pub fn init__device(device: AvCaptureDevice) -> AvCaptureDeviceInput {
use ffi::AVCaptureDeviceInput;
unsafe {
let error = 0 as *mut NSObject; // TODO
// Create an instance of `AVCaptureDeviceInput` by calling `alloc`, then `init`.
let obj: *mut NSObject = msg_send![&AVCaptureDeviceInput, alloc];
let obj: *mut NSObject = msg_send![obj, initWithDevice:device.obj error:&error];
AvCaptureDeviceInput { obj, device, ports: vec![] }
}
}
}
impl AvCaptureInput for AvCaptureDeviceInput {
fn ports(&self) -> &Vec<Box<AvCaptureInputPort>> {
&self.ports
}
}
impl Drop for AvCaptureDeviceInput {
fn drop(&mut self) {
let _: () = unsafe {
msg_send![self.obj, release]
};
}
} |
use std::io;
fn main() {
let mut buf = String::new();
io::stdin().read_line(&mut buf).unwrap();
let mut iter = buf.split_whitespace();
let a: i64 = iter.next().unwrap().parse().unwrap();
let b: i64 = iter.next().unwrap().parse().unwrap();
if a <= 8 && b <= 8 {
println!("Yay!");
} else {
println!(":(");
}
}
|
// Copyright 2023 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::atomic::AtomicU64;
use std::sync::atomic::Ordering;
#[derive(Default)]
pub struct FusePruningStatistics {
/// Segment range pruning stats.
pub segments_range_pruning_before: AtomicU64,
pub segments_range_pruning_after: AtomicU64,
/// Block range pruning stats.
pub blocks_range_pruning_before: AtomicU64,
pub blocks_range_pruning_after: AtomicU64,
/// Block bloom filter pruning stats.
pub blocks_bloom_pruning_before: AtomicU64,
pub blocks_bloom_pruning_after: AtomicU64,
}
impl FusePruningStatistics {
pub fn set_segments_range_pruning_before(&self, v: u64) {
self.segments_range_pruning_before
.fetch_add(v, Ordering::Relaxed);
}
pub fn get_segments_range_pruning_before(&self) -> u64 {
self.segments_range_pruning_before.load(Ordering::Relaxed)
}
pub fn set_segments_range_pruning_after(&self, v: u64) {
self.segments_range_pruning_after
.fetch_add(v, Ordering::Relaxed);
}
pub fn get_segments_range_pruning_after(&self) -> u64 {
self.segments_range_pruning_after.load(Ordering::Relaxed)
}
pub fn set_blocks_range_pruning_before(&self, v: u64) {
self.blocks_range_pruning_before
.fetch_add(v, Ordering::Relaxed);
}
pub fn get_blocks_range_pruning_before(&self) -> u64 {
self.blocks_range_pruning_before.load(Ordering::Relaxed)
}
pub fn set_blocks_range_pruning_after(&self, v: u64) {
self.blocks_range_pruning_after
.fetch_add(v, Ordering::Relaxed);
}
pub fn get_blocks_range_pruning_after(&self) -> u64 {
self.blocks_range_pruning_after.load(Ordering::Relaxed)
}
pub fn set_blocks_bloom_pruning_before(&self, v: u64) {
self.blocks_bloom_pruning_before
.fetch_add(v, Ordering::Relaxed);
}
pub fn get_blocks_bloom_pruning_before(&self) -> u64 {
self.blocks_bloom_pruning_before.load(Ordering::Relaxed)
}
pub fn set_blocks_bloom_pruning_after(&self, v: u64) {
self.blocks_bloom_pruning_after
.fetch_add(v, Ordering::Relaxed);
}
pub fn get_blocks_bloom_pruning_after(&self) -> u64 {
self.blocks_bloom_pruning_after.load(Ordering::Relaxed)
}
}
|
use dlal_component_base::{component, json, serde_json, Body, CmdResult};
component!(
{"in": ["midi"], "out": ["midi"]},
[
"run_size",
"sample_rate",
"multi",
{"name": "field_helpers", "fields": ["pattern"], "kinds": ["json"]},
{"name": "field_helpers", "fields": ["duration"], "kinds": ["rw", "json"]},
],
{
pattern: Vec<String>,
duration: f32,
msgs: Vec<Vec<u8>>,
age: f32,
i_pattern: usize,
i_msgs: usize,
},
{
"pattern": {
"args": [
{
"name": "pattern",
"desc": "array of strings; d for downstroke, u for upstroke",
},
],
},
"duration": {
"args": [
{
"name": "duration",
"desc": "how long a strum lasts in seconds",
},
],
},
},
);
impl Component {
fn next_stroke(&mut self) {
self.msgs.clear();
self.i_msgs = 0;
loop {
self.i_pattern += 1;
if self.i_pattern == self.pattern.len() {
self.i_pattern = 0;
}
match self.pattern[self.i_pattern].as_str() {
"d" | "u" => break,
_ => (),
}
}
}
}
impl ComponentTrait for Component {
fn init(&mut self) {
self.pattern = vec!["d".into()];
self.duration = 0.03;
}
fn run(&mut self) {
if self.age == 0.0 {
self.msgs.sort_by(|a, b| a[1].cmp(&b[1]));
if self.pattern[self.i_pattern] == "u" {
self.msgs.reverse();
}
}
self.age += self.run_size as f32 / self.sample_rate as f32;
match self.msgs.len() {
0 => (),
1 => {
self.multi_midi(&self.msgs[0]);
self.next_stroke();
}
_ => {
while self.age > self.duration * self.i_msgs as f32 / (self.msgs.len() as f32 - 1.0) {
self.multi_midi(&self.msgs[self.i_msgs]);
self.i_msgs += 1;
if self.i_msgs == self.msgs.len() {
self.next_stroke();
break;
}
}
}
}
}
fn midi(&mut self, msg: &[u8]) {
if msg.len() < 3 || msg[0] & 0xf0 != 0x90 || msg[2] == 0 {
self.multi_midi(msg);
return;
}
self.msgs.push(msg.to_vec());
self.age = 0.0;
}
}
impl Component {
fn pattern_cmd(&mut self, body: serde_json::Value) -> CmdResult {
if let Ok(v) = body.arg::<Vec<_>>(0) {
if v.len() == 0 {
self.pattern = vec!["d".into()];
} else {
self.pattern = v;
}
}
Ok(Some(json!(self.pattern)))
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.