blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 5
140
| path
stringlengths 5
183
| src_encoding
stringclasses 6
values | length_bytes
int64 12
5.32M
| score
float64 2.52
4.94
| int_score
int64 3
5
| detected_licenses
listlengths 0
47
| license_type
stringclasses 2
values | text
stringlengths 12
5.32M
| download_success
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|
3cc573cf2eced240871c8e24e00fad78e5afe3d4
|
Rust
|
denysvitali/aoc-2020
|
/days/dec18/src/test.rs
|
UTF-8
| 446
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
mod test {
use crate::{solve_puzzle, evaluate_expr};
use utils::get_file;
#[test]
fn example(){
let f = get_file(file!(), "1.txt");
assert_eq!(26 + 437 + 12240 + 13632, solve_puzzle(f.as_str()).unwrap());
}
#[test]
fn expr_0(){
assert_eq!(51, evaluate_expr("1 + (2 * 3) + (4 * (5 + 6))"))
}
#[test]
fn expr_1(){
assert_eq!(26, evaluate_expr("2 * 3 + (4 * 5)"))
}
}
| true
|
d4cb420667648607b4e5131685104960132f1a58
|
Rust
|
isso0424/vc_time_keeper
|
/src/discord/client.rs
|
UTF-8
| 2,283
| 2.75
| 3
|
[] |
no_license
|
use crate::discord::action::kick;
use crate::timer::event_loop::lazy_event;
use chrono::offset::Local;
use chrono::{Datelike, Duration, NaiveTime, TimeZone};
use serenity::framework::standard::macros::{command, group};
use serenity::framework::standard::{CommandResult, StandardFramework};
use serenity::model::prelude::Message;
use serenity::prelude::Context;
use serenity::prelude::{Client, EventHandler};
use std::convert::TryFrom;
#[group]
#[commands(set)]
struct General;
struct Handler;
impl EventHandler for Handler {}
pub fn start(token: String) -> CommandResult {
let mut client = Client::new(&token, Handler).expect("Error creating client");
client.with_framework(
StandardFramework::new()
.configure(|c| c.prefix("!vcTimer "))
.group(&GENERAL_GROUP),
);
if let Err(why) = client.start() {
println!(
"Error ocurred!!!\n-----------\nReason\n-----------\n{}",
why
);
}
Ok(())
}
#[command]
pub fn set(context: &mut Context, message: &Message) -> CommandResult {
let content: Vec<&str> = message.content.split_whitespace().collect();
if content.len() < 3 {
return Ok(());
}
let raw_date = match content.get(2) {
Some(date) => date,
None => return Ok(()),
};
let duration: u64 = match get_duration(raw_date) {
Some(duration) => match TryFrom::try_from(duration.num_seconds()) {
Ok(duration) => duration,
Err(_) => return Ok(()),
},
None => return Ok(()),
};
println!("{}", duration);
if let Some(guild_id) = message.guild_id {
let user_id = message.author.id;
lazy_event(duration, guild_id, user_id, context, kick);
}
Ok(())
}
fn get_duration(raw_date: &str) -> Option<Duration> {
let now = Local::now();
let time = match Local.datetime_from_str(
format!("{}-{}-{} {}", now.year(), now.month(), now.day(), raw_date).as_str(),
"%F %R",
) {
Ok(date_time) => date_time.time(),
Err(_) => return None,
};
let mut duration = NaiveTime::signed_duration_since(time, now.time());
if duration.num_seconds() < 0 {
duration = duration + Duration::hours(24);
}
Some(duration)
}
| true
|
41cc5b7e32fe10ec5018c73a9fcdc2172ab0fb40
|
Rust
|
benrady/rust-tdd-katas
|
/fizzbuzz/src/fizzlib.rs
|
UTF-8
| 323
| 3.578125
| 4
|
[
"Unlicense"
] |
permissive
|
use std::string::String;
pub fn fizzbuzz(number: &int) -> String{
if *number % 15 == 0 {
String::from_str("fizzbuzz")
}
else if *number % 3 == 0 {
String::from_str("fizz")
}
else if *number % 5 == 0 {
String::from_str("buzz")
}
else {
number.to_string()
}
}
| true
|
20d58d3d6fb962c3d3f4eeca38ce4e53b98d1c5f
|
Rust
|
theseus-os/Theseus
|
/tools/serialize_nano_core/src/main.rs
|
UTF-8
| 1,014
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
//! Tool that creates a serialized representation of the symbols in the `nano_core` binary.
mod parse;
use crate_metadata_serde::SerializedCrate;
use std::io::Write;
fn main() -> Result<(), Box<dyn std::error::Error>> {
let path = &std::env::args().nth(1).expect("no path provided");
let symbol_file = std::fs::read_to_string(path)?;
let crate_items = parse::parse_nano_core_symbol_file(symbol_file)?;
let serialized_crate = SerializedCrate {
crate_name: "nano_core".to_string(),
sections: crate_items.sections,
global_sections: crate_items.global_sections,
tls_sections: crate_items.tls_sections,
cls_sections: crate_items.cls_sections,
data_sections: crate_items.data_sections,
init_symbols: crate_items.init_symbols,
};
let mut stdout = std::io::stdout();
bincode::serde::encode_into_std_write(
&serialized_crate,
&mut stdout,
bincode::config::standard(),
)?;
stdout.flush()?;
Ok(())
}
| true
|
11d070c3c3f36c942c702ead591c6a1888ed6d50
|
Rust
|
pwoolcoc/zmq-tokio
|
/zmq-mio/src/lib.rs
|
UTF-8
| 14,847
| 2.875
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Asynchronous `ØMQ`, a.k.a.`(ZeroMQ)` in `Rust` with `mio`.
//!
//! Run ØMQ sockets that implement `mio::Evented`, as well as non-blocking
//! implementations of `io::Write` and `io::Read`.
//!
//! # Example
//!
//! ```
//! extern crate mio;
//! extern crate zmq;
//! extern crate zmq_mio;
//!
//! use std::io;
//! use mio::{Events, Poll, PollOpt, Ready, Token};
//! use zmq_mio::{Context, Socket};
//!
//! // We use ØMQ's `inproc://` scheme for intelligent and ready-to-use
//! // inter-process communications (IPC).
//! const EXAMPLE_ADDR: &str = "inproc://example_addr";
//! const LISTENER: Token = Token(0);
//! const SENDER: Token = Token(1);
//!
//! // An example of a typical ZMQ-flow, using asynchronous mode.
//! fn main() {
//! // Create the context.
//! let context = Context::new();
//! // Use the context to generate sockets.
//! let listener = context.socket(zmq::PAIR).unwrap();
//! let sender = context.socket(zmq::PAIR).unwrap();
//!
//! // Bind and connect our sockets.
//! let _ = listener.bind(EXAMPLE_ADDR).unwrap();
//! let _ = sender.connect(EXAMPLE_ADDR).unwrap();
//!
//! // Now, for the asynchronous stuff...
//! // First, we setup a `mio::Poll` instance.
//! let poll = Poll::new().unwrap();
//!
//! // Then we register our sockets for polling the events that
//! // interest us.
//! poll.register(&listener, LISTENER, Ready::readable(),
//! PollOpt::edge()).unwrap();
//! poll.register(&sender, SENDER, Ready::writable(),
//! PollOpt::edge()).unwrap();
//!
//! // We setup a loop which will poll our sockets at every turn,
//! // handling the events just the way we want them to be handled.
//! let mut events = Events::with_capacity(1024);
//!
//! // We also setup some variables to control the main loop flow.
//! let mut msg_sent = false;
//! let mut msg_received = false;
//!
//! // will loop until the listener gets a message.
//! while !msg_received {
//! // Poll for our registered events.
//! poll.poll(&mut events, None).unwrap();
//!
//! // Handle each event accordingly...
//! for event in &events {
//! match event.token() {
//! SENDER => {
//! // if the sender is writable and the message hasn't
//! // been sent, then we try to send it. If sending
//! // is not possible because the socket would block,
//! // then we just continue with handling polled events.
//! if event.readiness().is_writable() && !msg_sent {
//! if let Err(e) = sender.send("hello", 0) {
//! if e.kind() == io::ErrorKind::WouldBlock {
//! continue;
//! }
//! panic!("trouble sending message");
//! }
//! msg_sent = true;
//! }
//! }
//! LISTENER => {
//! // if the listener is readable, then we try to receive
//! // it. If reading is not possible because of blocking, then
//! // we continue handling events.
//! let msg = match listener.recv_msg(0) {
//! Ok(m) => m,
//! Err(e) => {
//! if e.kind() == io::ErrorKind::WouldBlock {
//! continue;
//! }
//! panic!("trouble receiving message");
//! }
//! };
//! msg_received = true;
//! }
//! _ => unreachable!(),
//! }
//! }
//! }
//! }
//! ```
#[macro_use]
extern crate log;
extern crate mio;
extern crate zmq;
use std::io;
use std::io::{Read, Write};
use std::fmt;
use std::os::unix::io::RawFd;
use mio::unix::EventedFd;
use mio::{PollOpt, Ready, Token};
/// Wrapper for ØMQ context.
#[derive(Clone, Default)]
pub struct Context {
// Wrapper for `zmq::Context`
inner: zmq::Context,
}
impl fmt::Debug for Context {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<zmq_mio::Context>")
}
}
impl Context {
/// Create a new `Context` instance. Use the `Context::socket` method
/// to create sockets that can talk via `inproc://*` addresses.
pub fn new() -> Self {
Context {
inner: zmq::Context::new(),
}
}
/// Create a new `Socket` instance for asynchronous communications.
pub fn socket(&self, typ: zmq::SocketType) -> io::Result<Socket> {
Ok(Socket::new(try!(self.inner.socket(typ))))
}
/// Try to destroy the underlying context. This is different than the destructor;
/// the destructor will loop when zmq_ctx_destroy returns EINTR.
pub fn destroy(&mut self) -> io::Result<()> {
self.inner.destroy().map_err(|e| e.into())
}
/// Get a cloned instance of the underlying `zmq::Context`.
pub fn get_inner(&self) -> zmq::Context {
self.inner.clone()
}
}
// mio integration, should probably be put into its own crate eventually
/// Asynchronous ØMQ socket.
pub struct Socket {
inner: zmq::Socket,
}
impl fmt::Debug for Socket {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Socket<{:?}>", self.inner.get_socket_type())
}
}
impl Socket {
/// Create a new event-wrapped ØMQ socket. Takes an existing `zmq::Socket`
/// instance as an only argument.
pub fn new(socket: zmq::Socket) -> Self {
Socket { inner: socket }
}
/// Returns an `io::Result` with the raw socket file-descriptor.
pub fn as_raw_fd(&self) -> io::Result<RawFd> {
let fd = try!(self.inner.get_fd());
trace!("socket raw FD: {}", fd);
Ok(fd)
}
/// Returns a reference to the underlying `zmq::Socket`.
/// Useful for setting socket options at runtime.
pub fn get_ref(&self) -> &zmq::Socket {
&self.inner
}
/// Bind the socket to the given address.
pub fn bind(&self, address: &str) -> io::Result<()> {
self.inner.bind(address).map_err(|e| e.into())
}
/// Connect the socket to the given address.
pub fn connect(&self, address: &str) -> io::Result<()> {
self.inner.connect(address).map_err(|e| e.into())
}
/// Subscribe this socket to the given `prefix`.
pub fn set_subscribe(&self, prefix: &[u8]) -> io::Result<()> {
self.inner.set_subscribe(prefix).map_err(|e| e.into())
}
/// Send a message.
///
/// Due to the provided From implementations, this works for
/// `&[u8]`, `Vec<u8>` and `&str`, as well as `zmq::Message`
/// itself.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn send<T>(&self, item: T, flags: i32) -> io::Result<()>
where
T: zmq::Sendable,
{
let r = self.inner.send(item, zmq::DONTWAIT | flags).map_err(|e| e.into());
r
}
/// Send a multi-part message. Takes any iterator of valid message
/// types.
///
/// Due to the provided From implementations, this works for
/// `&[u8]`, `Vec<u8>` and `&str`, as well as `zmq::Message`
/// itself.
///
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn send_multipart<I, T>(&self, iter: I, flags: i32) -> io::Result<()>
where
I: IntoIterator<Item = T>,
T: Into<zmq::Message>,
{
let r = self.inner
.send_multipart(iter, zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
/// Read a single `zmq::Message` from the socket.
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn recv(&self, msg: &mut zmq::Message, flags: i32) -> io::Result<()> {
let r = self.inner
.recv(msg, zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
/// Receive bytes into a slice. The length passed to `zmq_recv` is the length
/// of the slice. The return value is the number of bytes in the message,
/// which may be larger than the length of the slice, indicating truncation.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn recv_into(&self, msg: &mut [u8], flags: i32) -> io::Result<usize> {
let r = self.inner
.recv_into(msg, zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
/// Receive a message into a fresh `zmq::Message`.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn recv_msg(&self, flags: i32) -> io::Result<zmq::Message> {
let r = self.inner
.recv_msg(zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
/// Receive a message as a byte vector.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn recv_bytes(&self, flags: i32) -> io::Result<Vec<u8>> {
let r = self.inner
.recv_bytes(zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
/// Receive a `String` from the socket.
///
/// If the received message is not valid UTF-8, it is returned as the
/// original Vec in the `Err` part of the inner result.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn recv_string(&self, flags: i32) -> io::Result<Result<String, Vec<u8>>> {
let r = self.inner
.recv_string(zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
/// Receive a multipart message from the socket.
///
/// Note that this will allocate a new vector for each message part;
/// for many applications it will be possible to process the different
/// parts sequentially and reuse allocations that way.
///
/// Any flags set will be combined with `zmq::DONTWAIT`, which is
/// needed for non-blocking mode. The internal `zmq::Error::EAGAIN`
/// is automatically translated to `io::ErrorKind::WouldBlock`,
/// which you MUST handle without failing.
pub fn recv_multipart(&self, flags: i32) -> io::Result<Vec<Vec<u8>>> {
let r = self.inner
.recv_multipart(zmq::DONTWAIT | flags)
.map_err(|e| e.into());
r
}
}
unsafe impl Send for Socket {}
/// This implementation is meant for asynchronous `Read`. It might fail
/// if not handled via polling.
impl Read for Socket {
/// Asynchronously read a byte buffer from the `Socket`.
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let r = self.recv_into(buf, 0)?;
Ok(r)
}
}
/// This implementation is meant for asynchronous `Write`. It might fail
/// if not handled via polling.
impl Write for Socket {
/// Asynchronously write a byte buffer to the `Socket`.
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let sent = buf.len();
let _ = self.send(buf, 0)?;
Ok(sent)
}
/// Flush is not implemented since ØMQ guarantees that a message is
/// either fully sent, or not sent at all.
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl mio::Evented for Socket {
fn register(
&self,
poll: &mio::Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
let fd = try!(self.as_raw_fd());
trace!("ZmqSocket::register: fd={}", fd);
EventedFd(&fd).register(poll, token, interest, opts)
}
fn reregister(
&self,
poll: &mio::Poll,
token: Token,
interest: Ready,
opts: PollOpt,
) -> io::Result<()> {
let fd = try!(self.as_raw_fd());
trace!("ZmqSocket::reregister: fd={}", fd);
EventedFd(&fd).reregister(poll, token, interest, opts)
}
fn deregister(&self, poll: &mio::Poll) -> io::Result<()> {
let fd = try!(self.as_raw_fd());
trace!("ZmqSocket::deregister: fd={}", fd);
EventedFd(&fd).deregister(poll)
}
}
#[cfg(test)]
mod tests {
use std::io::{Read, Write};
use zmq;
use super::*;
const TEST_STR: &[u8] = b"test-test-one-2-3";
const TEST_ADDR: &str = "inproc://test";
const TEST_BUFFER_SIZE: usize = 64;
// Returns a `Socket` pair ready to talk to each other.
fn get_async_test_pair() -> (Socket, Socket) {
let ctx = Context::new();
let bound = ctx.socket(zmq::PAIR).unwrap();
let _ = bound.bind(TEST_ADDR).unwrap();
let connected = ctx.socket(zmq::PAIR).unwrap();
let _ = connected.connect(TEST_ADDR).unwrap();
(bound, connected)
}
#[test]
fn socket_sends_and_receives_a_byte_buffer() {
let (mut receiver, mut sender) = get_async_test_pair();
let sent = sender.write(TEST_STR).unwrap();
assert_eq!(sent, TEST_STR.len());
let mut buf = vec![0; TEST_BUFFER_SIZE];
let recvd = receiver.read(&mut buf).unwrap();
assert_eq!(&buf[..recvd], TEST_STR);
}
}
| true
|
4116a3204c8a5727dc57c55906315572f77acdee
|
Rust
|
bpglaser/advent
|
/2021/day02_part01/src/main.rs
|
UTF-8
| 767
| 3.171875
| 3
|
[] |
no_license
|
use std::env::args;
use std::error::Error;
use std::fs::read_to_string;
use std::panic;
fn main() -> Result<(), Box<dyn Error>> {
let path = args().skip(1).next().ok_or("not enough args")?;
let content = read_to_string(&path)?;
let (x, y) = content.lines().map(parse_line).fold((0, 0), do_move);
println!("{}", x * y);
Ok(())
}
fn parse_line(line: &str) -> (&str, i32) {
let i = line.find(' ').expect("no space in line");
(&line[0..i], line[i + 1..].parse().expect("invalid number in line"))
}
fn do_move((x, y): (i32, i32), (action, n): (&str, i32)) -> (i32, i32) {
match action {
"forward" => (x + n, y),
"down" => (x, y + n),
"up" => (x, y - n),
_ => panic!("unknown action: {}", action),
}
}
| true
|
d91af54ea4f41316b8b76b09527bb4d1ae34505c
|
Rust
|
rlebre/dicom-rs
|
/object/src/lib.rs
|
UTF-8
| 3,836
| 3.203125
| 3
|
[
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! This crate contains a high-level abstraction for reading and manipulating
//! DICOM objects.
//! At this level, objects are comparable to a dictionary of elements,
//! in which some of them can have DICOM objects themselves.
//! The end user should prefer using this abstraction when dealing with DICOM
//! objects.
//!
//! # Examples
//!
//! Loading a file and reading some attributes by their standard alias:
//!
//! ```no_run
//! use dicom_object::open_file;
//! # use dicom_object::Result;
//! # fn foo() -> Result<()> {
//! let obj = open_file("0001.dcm")?;
//! let patient_name = obj.element_by_name("PatientName")?.to_str()?;
//! let modality = obj.element_by_name("Modality")?.to_str()?;
//! # Ok(())
//! # }
//! ```
//!
//! Elements can also be fetched by tag:
//!
//! ```
//! # use dicom_object::{DicomObject, Result, Tag};
//! # fn something<T: DicomObject>(obj: T) -> Result<()> {
//! let e = obj.element(Tag(0x0002, 0x0002))?;
//! # Ok(())
//! # }
//! ```
//!
pub mod file;
pub mod loader;
pub mod mem;
pub mod meta;
pub mod pixeldata;
mod util;
pub use crate::file::{from_reader, open_file};
pub use crate::meta::FileMetaTable;
pub use dicom_core::Tag;
pub use dicom_dictionary_std::StandardDataDictionary;
pub use dicom_parser::error::{Error, Result};
/// The default implementation of a root DICOM object.
pub type DefaultDicomObject = RootDicomObject<mem::InMemDicomObject<StandardDataDictionary>>;
use dicom_core::header::Header;
/// Trait type for a DICOM object.
/// This is a high-level abstraction where an object is accessed and
/// manipulated as dictionary of entries indexed by tags, which in
/// turn may contain a DICOM object.
///
/// This trait interface is experimental and prone to sudden changes.
pub trait DicomObject {
type Element: Header;
/// Retrieve a particular DICOM element by its tag.
fn element(&self, tag: Tag) -> Result<Self::Element>;
/// Retrieve a particular DICOM element by its name.
fn element_by_name(&self, name: &str) -> Result<Self::Element>;
/// Retrieve the processed meta information table, if available.
///
/// This table will generally not be reachable from children objects
/// in another object with a valid meta table. As such, it is recommended
/// for this method to be called at the root of a DICOM object.
fn meta(&self) -> Option<&FileMetaTable> {
None
}
}
/** A root DICOM object contains additional meta information about the object
* (such as the DICOM file's meta header).
*/
#[derive(Debug, Clone, PartialEq)]
pub struct RootDicomObject<T> {
meta: FileMetaTable,
obj: T,
}
impl<T> RootDicomObject<T> {
/// Retrieve the processed meta header table.
pub fn meta(&self) -> &FileMetaTable {
&self.meta
}
}
impl<T> ::std::ops::Deref for RootDicomObject<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.obj
}
}
impl<T> ::std::ops::DerefMut for RootDicomObject<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.obj
}
}
impl<T> DicomObject for RootDicomObject<T>
where
T: DicomObject,
{
type Element = <T as DicomObject>::Element;
fn element(&self, tag: Tag) -> Result<Self::Element> {
self.obj.element(tag)
}
fn element_by_name(&self, name: &str) -> Result<Self::Element> {
self.obj.element_by_name(name)
}
fn meta(&self) -> Option<&FileMetaTable> {
Some(&self.meta)
}
}
impl<'a, T: 'a> DicomObject for &'a RootDicomObject<T>
where
T: DicomObject,
{
type Element = <T as DicomObject>::Element;
fn element(&self, tag: Tag) -> Result<Self::Element> {
self.obj.element(tag)
}
fn element_by_name(&self, name: &str) -> Result<Self::Element> {
self.obj.element_by_name(name)
}
}
#[cfg(test)]
mod tests {}
| true
|
9a5200333caaee064807b295ed4565cd27f97fbd
|
Rust
|
inda20plusplus/hansing-chess
|
/hansing-chess/src/square.rs
|
UTF-8
| 1,733
| 3.453125
| 3
|
[] |
no_license
|
use std::fmt;
#[derive(Eq, PartialEq, Hash, Debug, Clone, Copy)]
pub struct Square(i32, i32);
impl Square {
pub fn is_in_bounds(&self) -> bool {
self.0 >= 0 && self.0 < 8 && self.1 >= 0 && self.1 < 8
}
pub fn offset(&self, rank_offset: i32, file_offset: i32) -> Option<Self> {
let s = Self(self.0 + rank_offset, self.1 + file_offset);
if s.is_in_bounds() {
Some(s)
} else {
None
}
}
pub fn new(rank: i32, file: i32) -> Option<Self> {
let s = Square(rank, file);
if s.is_in_bounds() {
Some(s)
} else {
None
}
}
pub fn rank(&self) -> i32 {
self.0
}
pub fn file(&self) -> i32 {
self.1
}
}
pub const FILE_SIGN: [char; 8] = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'];
impl fmt::Display for Square {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{}", FILE_SIGN[self.1 as usize], self.0 + 1)
}
}
#[cfg(test)]
mod test_square {
use super::*;
#[test]
fn eq() {
let s1 = Square::new(1, 4).unwrap();
let s2 = s1.clone();
assert_eq!(s1, s2)
}
#[test]
fn out_of_bounds() {
let s1 = Square::new(-1, 4);
let s2 = Square::new(1, 8);
let s3 = Square::new(6, -1);
let s4 = Square::new(8, 4);
assert_eq!(s1, None);
assert_eq!(s2, None);
assert_eq!(s3, None);
assert_eq!(s4, None);
}
#[test]
fn offset() {
let s = Square::new(4, 4).unwrap();
let s1 = s.offset(2, 2);
let s2 = s.offset(7, -7);
assert_eq!(s1, Some(Square::new(6, 6).unwrap()));
assert_eq!(s2, None);
}
}
| true
|
732d7680262e7c225a75265aad7743f1ad18c11e
|
Rust
|
ackintosh/sandbox
|
/rust/threading/src/thread_pool.rs
|
UTF-8
| 1,590
| 2.828125
| 3
|
[] |
no_license
|
use futures::executor::block_on;
use futures::task::SpawnExt;
use futures::FutureExt;
// https://crates.io/crates/futures
// M:Nモデルのスレッドプール
#[test]
fn test() {
handle();
// TODO
// tx_rx();
}
// //////////////////////////////////////////////////////////
// handleを使ってスレッドの処理が終わるのを待つパターン
// //////////////////////////////////////////////////////////
fn handle() {
let thread_pool = futures::executor::ThreadPool::builder()
.pool_size(5)
.name_prefix("ackintosh-sandbox-")
.create()
.unwrap();
// async関数
async fn print_number(n: u32) {
println!("print_number: {}", n);
}
let mut futures = Vec::new();
for n in 0..4 {
futures.push(thread_pool.spawn_with_handle(print_number(n)).unwrap());
}
// 各スレッドの処理が終わるの待つ
block_on(futures::future::join_all(futures));
}
// //////////////////////////////////////////////////////////
// channelでやり取りするパターン
// //////////////////////////////////////////////////////////
// fn tx_rx() {
// let thread_pool = futures::executor::ThreadPool::builder()
// .pool_size(5)
// .name_prefix("ackintosh-sandbox-")
// .create().unwrap();
//
// let (mut sender, receiver) = futures::channel::oneshot::channel::<String>();
//
// thread_pool.spawn_ok(async {
// sender.send("ok".to_owned());
// });
//
// let r = async {
// receiver.await.unwrap();
// }.left_future::<String>();
// }
| true
|
4cfd50409d6dde727ff54dd78657135d79eaa693
|
Rust
|
truchi/lay
|
/src/style/gen/attributes/overline.rs
|
UTF-8
| 903
| 2.921875
| 3
|
[] |
no_license
|
////////////////////////////////////////////////////////////////////////////////
// 🚨🚨🚨🚨🚨🚨🚨🚨 This file is @generated by build script. 🚨🚨🚨🚨🚨🚨🚨🚨 //
// 🚧🚧🚧🚧🚧🚧🚧🚧 ⛔ DO NOT MODIFY! ⛔ 🚧🚧🚧🚧🚧🚧🚧🚧 //
////////////////////////////////////////////////////////////////////////////////
pub use Overline::*;
/// [`Overline`](crate::Overline) (`Overlined`, `ResetOverline`).
///
/// Prints the corresponding CSI to the terminal when `Display`ed.
///
/// `Default`s to `Overline::ResetOverline`, the unsetting CSI.
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum Overline {
Overlined,
ResetOverline,
}
/// Returns `Overline::ResetOverline`.
impl Default for Overline {
/// Returns `Overline::ResetOverline`.
fn default() -> Self {
Overline::ResetOverline
}
}
| true
|
fe669735c715fecddeefbd8fd6b0a15bd6c10c5a
|
Rust
|
ktalmadge/wasm-tracer
|
/src/ray_tracer/tone/mod.rs
|
UTF-8
| 867
| 2.921875
| 3
|
[] |
no_license
|
use super::color::Color;
// e ^ (1/n SUM( ln( luminance[x][y] + delta ) ) )
pub fn log_average_luminance(
color_buffer: &mut Vec<Vec<Color>>,
width: usize,
height: usize,
delta: f64,
) -> f64 {
let mut sum: f64 = 0f64;
for x in 0..width {
for y in 0..height {
sum += (color_buffer[x][y].to_luminance() + delta).ln();
}
}
(sum / (width * height) as f64).exp()
}
pub fn reinhard_tone_correction(
color_buffer: &mut Vec<Vec<Color>>,
width: usize,
height: usize,
key_value: f64,
delta: f64,
) {
let scale_factor: f64 = key_value / log_average_luminance(color_buffer, width, height, delta);
for x in 0..width {
for y in 0..height {
let color: Color = color_buffer[x][y] * scale_factor;
color_buffer[x][y] = color / (color + 1f64);
}
}
}
| true
|
a0fa47f3d6e838adf70a279dd1648d8dc874c291
|
Rust
|
amarant/hackerrank-rust
|
/src/algorithms/warmup/extra_long_factorials.rs
|
UTF-8
| 464
| 3.109375
| 3
|
[
"MIT"
] |
permissive
|
extern crate num;
use std::io;
use std::io::BufRead;
use num::bigint::{BigUint, ToBigUint};
use num::One;
fn big_factorial(n: usize) -> BigUint {
if n == 0 {
return One::one();
}
n.to_biguint().unwrap() * big_factorial(n - 1)
}
fn main() {
let stdin = io::stdin();
let line = stdin.lock().lines().next().unwrap().unwrap();
let n = line.trim().parse::<usize>().unwrap();
let res = big_factorial(n);
println!("{}", res);
}
| true
|
73c1a527fde75011444497037ab7e6277ffc8566
|
Rust
|
yutopp/yterm
|
/yterm_backend/src/terminal.rs
|
UTF-8
| 4,507
| 2.625
| 3
|
[] |
no_license
|
use std::ffi::CString;
use std::os::unix::io::AsRawFd;
use std::sync::Arc;
use crate::pty;
use crate::state::Shared;
use crate::window;
#[derive(Debug)]
pub enum Event {
Terminal(Vec<u8>),
}
pub struct Terminal {
master: pty::Master,
slave: pty::Slave,
shared: Shared,
state: State,
}
pub struct State {
pub columns: i32,
pub rows: i32,
pub font_width: i32,
pub font_height: i32,
pub texts: Vec<char>,
}
impl Terminal {
pub fn new(shared: Shared) -> Self {
let master = unsafe { pty::Master::open() };
let slave = unsafe { master.open_slave() };
Self {
master,
slave,
shared,
state: State {
columns: 40,
rows: 16,
font_width: 20,
font_height: 26,
texts: vec![],
},
}
}
pub fn start_thread(self, win: Arc<window::Window>) -> Handle {
self.set_winsize();
let pid = unsafe { libc::fork() };
if pid == -1 {
panic!("failed to fork");
} else if pid == 0 {
// child process!
unsafe { self.into_child_process() };
unreachable!()
} else if pid > 0 {
// parent process
return self.parent_process(win);
}
unreachable!();
}
fn set_winsize(&self) {
let ws = libc::winsize {
ws_col: self.state.columns as u16,
ws_row: self.state.rows as u16,
ws_xpixel: 0,
ws_ypixel: 0,
};
let fd = self.master.handle.as_raw_fd();
unsafe {
libc::ioctl(fd, libc::TIOCSWINSZ, &ws); // TODO: error
}
}
unsafe fn into_child_process(self) {
drop(self.master);
if libc::setsid() == -1 {
panic!("{}", std::io::Error::last_os_error());
}
{
let fd = self.slave.handle.as_raw_fd();
if libc::ioctl(fd, libc::TIOCSCTTY, 0) == -1 {
panic!("{}", std::io::Error::last_os_error());
}
if libc::dup2(fd, libc::STDIN_FILENO) == -1 {
panic!("{}", std::io::Error::last_os_error());
}
if libc::dup2(fd, libc::STDOUT_FILENO) == -1 {
panic!("{}", std::io::Error::last_os_error());
}
if libc::dup2(fd, libc::STDERR_FILENO) == -1 {
panic!("{}", std::io::Error::last_os_error());
}
}
drop(self.slave);
std::env::set_var("TERM", "term-256color");
std::env::set_var("COLORTERM", "truecolor");
std::env::set_var("COLUMNS", &self.state.columns.to_string());
std::env::set_var("LINES", &self.state.rows.to_string());
let path = CString::new("/usr/bin/sh").expect("Failed to CString::new");
let mut args = vec![];
args.push(std::ptr::null());
if libc::execvp(path.as_ptr(), args.as_ptr()) == -1 {
panic!("{}", std::io::Error::last_os_error());
}
unreachable!();
}
fn parent_process(self, win: Arc<window::Window>) -> Handle {
drop(self.slave);
let reader = self.master.handle;
let writer = reader.try_clone().unwrap();
self.shared.rt.spawn({
let win = win.clone();
let mut reader = tokio::fs::File::from_std(reader);
async move {
use tokio::prelude::*;
const BUFFER_SIZE: usize = 32 * 1024;
let mut buf = [0u8; BUFFER_SIZE];
loop {
if let Ok(n) = reader.read(&mut buf[..]).await {
let s = String::from_utf8(buf[..n].into()).unwrap();
println!("{:?}", s);
win.write(s).await;
//tx.send(Event::Terminal(buf[..n].into())).unwrap();
}
}
}
});
Handle {
win,
state: self.state,
writer,
}
}
}
pub struct Handle {
win: Arc<window::Window>,
pub state: State,
writer: std::fs::File,
}
impl Handle {
pub fn recv_char(&mut self, c: char) {
self.state.texts.push(c)
}
pub fn send_char(&mut self, c: char) {
use std::io::Write;
let mut buf = [0; 4];
let str = c.encode_utf8(&mut buf);
self.writer.write(str.as_bytes());
}
}
| true
|
75b995dc9e54cd01393e35295f7a49884147057e
|
Rust
|
rustkas/rust-by-example-imp
|
/rust-by-example/fn_/src/capture3.rs
|
UTF-8
| 542
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
// cargo run -p fn_ --bin capture3
fn do_twice<F>(mut func: F)
where F: FnMut()
{
for _i in 0..=100 {
func();
}
// func();
// func();
}
fn main() {
let mut x: usize = 1;
{
let add_two_to_x = || x += 2;
do_twice(add_two_to_x);
// do_twice(add_two_to_x);
}
let mut x1=1;
for _i in 0..=100 {
x1 += 2;
}
assert_eq!(x1, x);
let mut x = 5;
{
let mut square_x = || x *= x;
square_x();
square_x();
}
// assert_eq!(x, 25);
}
| true
|
2881444f5ef8221d22d7d089f8a8be7a4e2f453b
|
Rust
|
Rahix/ws2812-spi-rs
|
/src/lib.rs
|
UTF-8
| 2,682
| 3
| 3
|
[] |
no_license
|
#![no_std]
extern crate embedded_hal as hal;
use hal::spi::{FullDuplex, Mode, Phase, Polarity};
use nb;
use nb::block;
/// SPI mode
pub const MODE: Mode = Mode {
polarity: Polarity::IdleLow,
phase: Phase::CaptureOnFirstTransition,
};
pub struct Ws2812<SPI> {
spi: SPI,
}
/// RGB
pub type Color = (u8, u8, u8);
impl<SPI, E> Ws2812<SPI>
where
SPI: FullDuplex<u8, Error = E>,
{
/// The SPI bus should run with 3 Mhz, otherwise this won't work
/// Please ensure that the mcu is pretty fast, otherwise weird timing
/// issues will occurr
pub fn new(spi: SPI) -> Ws2812<SPI> {
Self { spi }
}
pub fn write<'a, T>(&mut self, iterator: T) -> Result<(), E>
where
T: Iterator<Item = Color>,
{
for item in iterator {
self.write_byte(item.1)?;
self.write_byte(item.0)?;
self.write_byte(item.2)?;
}
for _ in 0..20 {
block!(self.spi.send(0))?;
self.spi.read().ok();
}
Ok(())
}
pub fn write_byte(&mut self, mut data: u8) -> Result<(), E> {
let mut serial_bits: u32 = 0;
for _ in 0..3 {
let bit = data & 0x80;
let pattern = if bit == 0x80 { 0b110 } else { 0b100 };
serial_bits = pattern | (serial_bits << 3);
data <<= 1;
}
block!(self.spi.send((serial_bits >> 1) as u8))?;
// Split this up to have a bit more lenient timing
for _ in 3..8 {
let bit = data & 0x80;
let pattern = if bit == 0x80 { 0b110 } else { 0b100 };
serial_bits = pattern | (serial_bits << 3);
data <<= 1;
}
// Some implementations (stm32f0xx-hal) want a matching read
// We don't want to block so we just hope it's ok this way
self.spi.read().ok();
block!(self.spi.send((serial_bits >> 8) as u8))?;
self.spi.read().ok();
block!(self.spi.send(serial_bits as u8))?;
self.spi.read().ok();
Ok(())
}
}
pub struct Brightness<I> {
iter: I,
brightness: u8,
}
impl<'a, I> Iterator for Brightness<I>
where
I: Iterator<Item = Color>,
{
type Item = Color;
fn next(&mut self) -> Option<Color> {
self.iter.next().map(|a| {
(
(a.0 as u32 * self.brightness as u32 / 256) as u8,
(a.1 as u32 * self.brightness as u32 / 256) as u8,
(a.2 as u32 * self.brightness as u32 / 256) as u8,
)
})
}
}
pub fn brightness<I>(iter: I, brightness: u8) -> Brightness<I>
where
I: Iterator<Item = Color>,
{
Brightness { iter, brightness }
}
| true
|
7b2511610617e254f32594f8c687047ea5f1b340
|
Rust
|
marc47marc47/leetcode-cn
|
/63unique-paths-ii/uniquepath/src/main.rs
|
UTF-8
| 975
| 2.984375
| 3
|
[] |
no_license
|
fn main() {
println!("Hello, world!");
}
pub struct Solution {}
impl Solution {
pub fn unique_paths_with_obstacles(obstacle_grid: Vec<Vec<i32>>) -> i32 {
let rows = obstacle_grid.len();
if rows == 0 {
return 0
}
let cols = obstacle_grid[0].len();
if cols == 0 {
return 0
}
if obstacle_grid[0][0] == 1 || obstacle_grid[rows as usize -1][cols as usize -1]==1{
return 0
}
let mut dp = vec![vec![0;cols];2];
dp[1][0] = 1;
for row in 0..rows{
let pre_id = (row+1)&1 as usize;
for col in 0..cols{
if obstacle_grid[row][col] == 1{
dp[row&1][col]=0
}else if col == 0{
dp[row&1][col] = dp[pre_id][col]
}else{
dp[row&1][col] = dp[pre_id][col] + dp[row&1][col-1]
}
}
}
dp[(rows-1)&1][cols - 1]
}
}
| true
|
8450c0cb500e5212f5149b438de36f3a1978b602
|
Rust
|
Fantom-foundation/libnode-membership
|
/src/hash.rs
|
UTF-8
| 1,371
| 3.1875
| 3
|
[
"MIT"
] |
permissive
|
use bincode;
use failure::Fail;
use serde::{Deserialize, Serialize};
use sha3::{digest::generic_array::transmute, Digest, Sha3_256};
/// Type of hash commonly used within the library.
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Hash(pub [u8; 32]);
/// Hashing errors.
#[derive(Debug, Fail)]
pub enum Error {
/// A serialization error in `compute_hash`.
#[fail(display = "Serialization error: {}", _0)]
ComputeHashSerialize(bincode::Error),
}
/// Computes the hash of serializable data.
pub fn compute_hash<B: Serialize>(b: &B) -> Result<Hash, Error> {
let mut hasher = Sha3_256::new();
let ser = bincode::serialize(b).map_err(Error::ComputeHashSerialize)?;
hasher.input(ser);
let r = hasher.result();
Ok(Hash(unsafe { transmute(r) }))
}
#[cfg(test)]
mod tests {
use super::compute_hash;
use sha3::{Digest, Sha3_256};
#[test]
fn compute_hash_works() {
let b = "Hash me";
// Compute the hash using the function.
let hash1 = compute_hash(&b).unwrap();
// Compute the same hash inline.
let mut hasher = Sha3_256::new();
let ser = bincode::serialize(b).unwrap();
hasher.input(ser);
let hash2 = hasher.result();
// The two hashes should be the same.
assert_eq!(hash1.0, hash2.as_slice());
}
}
| true
|
3d2e48407b3706992376b6b938a1e53ab8aadc4e
|
Rust
|
amol9/matasano
|
/src/set2/cbcadmin.rs
|
UTF-8
| 3,281
| 3.03125
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use common::{err, challenge, hex, ascii};
use common::cipher::{aes, key};
pub static info: challenge::Info = challenge::Info {
no: 16,
title: "CBC bitflipping attacks",
help: "",
execute_fn: interactive
};
const escape_chars: [char; 3] = [';', '=', ' '];
pub struct AuthBox {
key: Vec<u8>,
mode: aes::Mode,
escape_map: HashMap<String, String>
}
impl AuthBox {
fn new() -> Result<Self, err::Error> {
let mode = aes::cbc_128_pkcs7;
let mut escape_map = HashMap::new();
for c in escape_chars.iter() {
let mut chex = String::from("%");
chex.push(try!(hex::u8_to_hex_char::<hex::Upper>(*c as u8 >> 4)));
chex.push(try!(hex::u8_to_hex_char::<hex::Upper>(*c as u8 & 0xF)));
escape_map.insert(c.to_string(), chex);
}
Ok(AuthBox {
key: try!(key::random(mode.blocksize.unwrap())),
mode: mode,
escape_map: escape_map
})
}
fn authenticate(&self, cipher: &Vec<u8>) -> Result<&str, err::Error> {
let plain_raw = try!(aes::decrypt(&cipher, &self.key, &self.mode));
let plain_str = rts!(&plain_raw);
println!("decrypted: {}", plain_str);
let role = match plain_str.find(";admin=true") {
Some(_) => "admin",
None => "other"
};
Ok(role)
}
fn submit(&self, comment1: &str, userdata: &str, comment2: &str) -> Result<Vec<u8>, err::Error> {
let raw = raw!(strjoin!("comment1=", &self.escape(&comment1),
";userdata=", &self.escape(&userdata),
";comment2=", &self.escape(&comment2)).as_ref());
println!("submitted: {}", rts!(&raw));
Ok(try!(aes::encrypt(&raw, &self.key, &self.mode)))
}
fn escape(&self, input: &str) -> String {
let mut output = String::from(input);
for (c, r) in &self.escape_map {
output = output.replace(c, &r);
}
output
}
}
pub fn auth_as_admin(authbox: &AuthBox) -> Result<bool, err::Error> {
let blocksize = 16;
let comment1 = "cooking MCs";
let comment2 = " like a pound of bacon";
let len1 = strjoin!("comment1=", comment1, ";userdata=").len() + 2;
let padsize = blocksize - (len1 % blocksize) + blocksize;
let padstr = strn!('A', padsize);
let userdata = strjoin!(&padstr, &chr!(';' as u8 ^ 1).to_string(), "admin", &chr!('=' as u8 ^ 1).to_string(), "true");
let flip_idx1 = len1 + padsize;
let flip_idx2 = flip_idx1 + "admin=".len();
let mut cipher = try!(authbox.submit(&comment1, &userdata, &comment2));
cipher[flip_idx1] ^= 1;
cipher[flip_idx2] ^= 1;
match authbox.authenticate(&cipher) {
Ok("admin") => { println!("admin access granted !!"); Ok(true) },
_ => { println!("admin access denied :("); Ok(false) }
}
}
pub fn init_authbox() -> Result<AuthBox, err::Error> {
Ok(try!(AuthBox::new()))
}
pub fn interactive() -> err::ExitCode {
let authbox = rtry!(init_authbox(), exit_err!());
rtry!(auth_as_admin(&authbox), exit_err!());
exit_ok!()
}
| true
|
dd6c0dd9978876e470d7ecb1511254984cd2e0e7
|
Rust
|
KilianVounckx/rt1w
|
/src/material/metal.rs
|
UTF-8
| 867
| 2.78125
| 3
|
[] |
no_license
|
use rand::rngs::ThreadRng;
use super::Material;
use crate::ray::Ray;
use crate::shape::HitRecord;
use crate::vec3::{Color, Vec3};
pub struct Metal {
color: Color,
fuzz: f64,
}
impl Metal {
pub fn new(color: Color, fuzz: f64) -> Self {
Self { color, fuzz }
}
}
impl Default for Metal {
fn default() -> Self {
Self {
color: Color::default(),
fuzz: 0.0,
}
}
}
impl Material for Metal {
fn scatter(&self, ray: &Ray, rec: &HitRecord, rng: &mut ThreadRng) -> Option<(Ray, Color)> {
let reflected = ray.direction().reflect(rec.normal());
let scattered = Ray::new(
rec.point(),
reflected + self.fuzz * Vec3::rand_in_unit_sphere(rng),
ray.time(),
);
let attenuation = self.color;
Some((scattered, attenuation))
}
}
| true
|
d5493059ea3c80acd09652b2ce38a29b306bff38
|
Rust
|
cpralea/xlang
|
/xlc/src/main.rs
|
UTF-8
| 4,791
| 2.734375
| 3
|
[
"MIT"
] |
permissive
|
extern crate argparse;
extern crate itertools;
#[macro_use]
extern crate maplit;
extern crate ref_eq;
mod common;
mod analyzer;
mod ast;
mod cdata;
mod config;
mod dumper;
mod emitter;
#[macro_use]
mod io;
mod parser;
mod tokenizer;
use std::process;
fn main() {
let config = parse_cmd_line();
let source = load_source(&config);
let tokens = tokenize(&source, &config);
let node = parse(&tokens, &source, &config);
let cdata = analyze(&node, &source, &config);
let code = emit_llvm(&cdata, &config);
write_code(&code, &config);
}
fn write_code(code: &String, config: &config::Configuration) {
if config.no_output() {
return;
}
let ll = config.output();
let result = io::Destination::to_file(&ll);
if let Err(_) = result {
error!("Could not create output file '{}'.", ll);
process::exit(255);
}
let result = result.ok().unwrap().write(&code);
if let Err(_) = result {
error!("Could not write output file '{}'.", ll);
process::exit(255);
}
}
fn emit_llvm<'a>(block: &cdata::Block<'a>, config: &config::Configuration) -> String {
let status = emitter::emit_llvm(block);
let code = status.result;
if config.verbose() {
dump_code(&code);
}
assert!(status.error.is_none());
code
}
fn analyze<'a>(
node: &'a ast::Node<'a>,
source: &io::Source,
config: &config::Configuration,
) -> cdata::Block<'a> {
let status = analyzer::analyze(node);
let block = status.result;
if config.verbose() {
dump_cdata(&block);
}
if let Some(ref error) = status.error {
print_error(source, error);
process::exit(255);
}
block
}
fn parse<'a>(
tokens: &'a ast::Tokens,
source: &io::Source,
config: &config::Configuration,
) -> ast::Node<'a> {
let status = parser::parse(tokens);
let node = status.result;
if config.verbose() {
dump_node(&node);
}
if let Some(ref error) = status.error {
print_error(source, error);
process::exit(255);
}
node
}
fn tokenize(source: &io::Source, config: &config::Configuration) -> ast::Tokens {
let status = tokenizer::tokenize(source);
let tokens = status.result;
if config.verbose() {
dump_tokens(&tokens);
}
if let Some(ref error) = status.error {
print_error(source, error);
process::exit(255);
}
tokens
}
fn load_source(config: &config::Configuration) -> io::Source {
let file = config.file();
let result = io::Source::from_file(file);
if let Err(_) = result {
error!("Could not read source file '{}'.", file);
process::exit(255);
}
result.ok().unwrap()
}
fn parse_cmd_line() -> config::Configuration {
let status = config::parse_cmd_line();
let config = status.result;
if config.verbose() {
dump_config(&config);
}
assert!(status.error.is_none());
config
}
fn print_error(source: &io::Source, error: &common::Error) {
if let Some(location) = error.location {
if let Some(line) = source.get_line(location.line) {
println!("{}", line);
println!("{}^", common::take(location.column - 1, " "));
}
}
error!("{}", error);
}
fn dump_code(code: &String) {
debug!("Code:");
if !code.is_empty() {
for line in code.split(common::NL) {
debug!("{}{}", common::take(1, common::TAB), line);
}
} else {
debug!("{}N/A", common::take(1, common::TAB));
}
}
fn dump_cdata(block: &cdata::Block) {
debug!("Compiler data:");
debug!("{}Parent:", common::take(1, common::TAB));
debug!("{}{}", common::take(2, common::TAB), dumper::dump_bare_node(block.node));
debug!("{}Execution steps:", common::take(1, common::TAB));
if !block.steps.is_empty() {
for step in block.steps.iter() {
debug!("{}{}", common::take(2, common::TAB), dumper::dump_step(step));
}
} else {
debug!("{}N/A", common::take(2, common::TAB));
}
}
fn dump_node(node: &ast::Node) {
debug!("Parsed program:");
for line in dumper::dump_node(node).split(common::NL) {
debug!("{}{}", common::take(1, common::TAB), line);
}
}
fn dump_tokens(tokens: &ast::Tokens) {
debug!("Parsed tokens:");
if !tokens.is_empty() {
for token in tokens.iter() {
debug!("{}{}", common::take(1, common::TAB), dumper::dump_token(token));
}
} else {
debug!("{}N/A", common::take(1, common::TAB));
}
}
fn dump_config(config: &config::Configuration) {
debug!("Configuration:");
for line in dumper::dump_config(config).split(common::NL) {
debug!("{}{}", common::take(1, common::TAB), line);
}
}
| true
|
69842c0302c27a5d455be2875e5a8f15ec6395fa
|
Rust
|
Ethan826/connect-four
|
/src/game.rs
|
UTF-8
| 5,383
| 3.546875
| 4
|
[] |
no_license
|
use super::{Dimensions, GameError, Space};
use std::fmt;
#[derive(Debug)]
pub struct Game {
state: Vec<Vec<Space>>,
number_for_win: usize,
dimensions: Dimensions,
}
#[derive(Debug, Copy, Clone)]
pub enum Player {
AI,
Opponent,
}
#[derive(Debug, Copy, Clone)]
struct RunData {
ai: usize,
opponent: usize,
}
struct LongestRuns {
horizontal: RunData,
vertical: RunData,
diagonal: RunData,
}
impl LongestRuns {
fn winner(&self, number_for_win: usize) -> Result<Option<Player>, GameError> {
let ai_wins = self.horizontal.ai >= number_for_win
|| self.vertical.ai >= number_for_win
|| self.diagonal.ai >= number_for_win;
let opponent_wins = self.horizontal.opponent >= number_for_win
|| self.vertical.opponent >= number_for_win
|| self.diagonal.opponent >= number_for_win;
match (ai_wins, opponent_wins) {
(false, false) => Ok(None),
(true, false) => Ok(Some(Player::AI)),
(false, true) => Ok(Some(Player::Opponent)),
(true, true) => Err(GameError::InvalidGameState),
}
}
}
impl Game {
/// Return a new `Game` given a state for that game and the number of pieces needed to
/// constitute a win (the "four" part of connect four).
///
/// # Errors
///
/// Returns an error if the `number_for_win` argument is larger than any dimension.
pub fn new(state: Vec<Vec<Space>>, number_for_win: usize) -> Result<Self, GameError> {
let dimensions = Dimensions {
height: state.len(),
width: state[0].len(),
};
if dimensions.width < number_for_win && dimensions.height < number_for_win {
Err(GameError::InvalidGameDefinition)
} else {
Ok(Game {
number_for_win,
dimensions,
state,
})
}
}
/// Return a new empty `Game` given dimensions for that game and the number of pieces needed to
/// constitute a win (the "four" part of connect four).
///
/// # Errors
///
/// Returns an error if the `number_for_win` argument is larger than any dimension.
pub fn new_empty(dimensions: Dimensions, number_for_win: usize) -> Result<Self, GameError> {
Game::new(
vec![vec![Space::Empty; dimensions.width]; dimensions.height],
number_for_win,
)
}
/// Fulfills the roles of determining if we are at a leaf node in the game graph, and if so
/// what is the value of the position.
///
/// The leaf node functionality is represented by whether the return value is a `None` or
/// `Some` value. We could be at a leaf node either because the game is won, lost, or drawn, or
/// because the `depth` argument is 0, meaning we are at a leaf node by fiat.
///
/// If the game is over, the value of the position is evaluated based on a winning state
/// (maximum `isize` value), a losing state (minimum `isize` value), or a draw (0). If the
/// position is not a game-over state, apply a heuristic, currently based on the AI's number of
/// pieces in a row relative to the `number_for_win` argument and the opponent's status
/// regarding the same.
pub fn evaluate(&self, depth: usize) -> Result<Option<isize>, GameError> {
let longest_runs = self.find_longest_runs();
let winner = self.winner(&longest_runs)?;
match winner {
Some(Player::AI) => Ok(Some(isize::max_value())),
Some(Player::Opponent) => Ok(Some(isize::min_value())),
None => {
if depth == 0 {
Ok(Some(self.score_incomplete_game(&longest_runs)))
} else {
Ok(None)
}
}
}
}
fn find_longest_runs(&self) -> LongestRuns {
// We need some way of computing the diagonals.
LongestRuns {
vertical: RunData { ai: 0, opponent: 1 },
horizontal: RunData { ai: 0, opponent: 1 },
diagonal: RunData { ai: 0, opponent: 1 },
}
}
fn winner(&self, longest_runs: &LongestRuns) -> Result<Option<Player>, GameError> {
longest_runs.winner(self.number_for_win)
}
fn score_incomplete_game(&self, longest_runs: &LongestRuns) -> isize {
100
}
}
impl fmt::Display for Game {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for row in &self.state {
writeln!(f)?;
for (i, space) in row.iter().enumerate() {
if i > 0 && i < self.dimensions.width {
write!(f, " ")?;
}
write!(f, "{}", space)?;
}
}
Ok(())
}
}
// =================================================================================================
// Tests
// =================================================================================================
#[test]
fn test_new_empty_game_valid() {
assert!(Game::new_empty(
Dimensions {
width: 7,
height: 6,
},
4,
)
.is_ok());
}
#[test]
fn test_new_empty_game_invalid() {
assert!(Game::new_empty(
Dimensions {
width: 3,
height: 3,
},
4,
)
.is_err());
}
| true
|
e52efbf9771f412929bed966716faf658151cdb1
|
Rust
|
rico22/ev3dev-lang-rs
|
/src/device.rs
|
UTF-8
| 7,491
| 2.609375
| 3
|
[] |
no_license
|
use std::collections::HashSet;
use std::collections::HashMap;
use std::fs;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::Read;
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::ops::Deref;
use std::io::{Result, Error, ErrorKind};
pub type Matches = HashSet<String>;
pub type AttributeMatches = HashMap<String, Matches>;
pub struct InputPort(pub &'static str);
pub static INPUT_AUTO: InputPort = InputPort("");
pub struct OutputPort(&'static str);
pub static OUTPUT_AUTO: OutputPort = OutputPort("");
#[cfg(not(feature="brickpi"))]
mod ports {
use super::{InputPort, OutputPort};
pub static INPUT_1: InputPort = InputPort("in1");
pub static INPUT_2: InputPort = InputPort("in2");
pub static INPUT_3: InputPort = InputPort("in3");
pub static INPUT_4: InputPort = InputPort("in4");
pub static OUTPUT_A: OutputPort = OutputPort("outA");
pub static OUTPUT_B: OutputPort = OutputPort("outB");
pub static OUTPUT_C: OutputPort = OutputPort("outC");
pub static OUTPUT_D: OutputPort = OutputPort("outD");
}
#[cfg(feature="brickpi")]
mod ports {
use super::{InputPort, OutputPort};
pub static INPUT_1: InputPort = InputPort("ttyAMA0:in1");
pub static INPUT_2: InputPort = InputPort("ttyAMA0:in2");
pub static INPUT_3: InputPort = InputPort("ttyAMA0:in3");
pub static INPUT_4: InputPort = InputPort("ttyAMA0:in4");
pub static OUTPUT_A: OutputPort = OutputPort("ttyAMA0:outA");
pub static OUTPUT_B: OutputPort = OutputPort("ttyAMA0:outB");
pub static OUTPUT_C: OutputPort = OutputPort("ttyAMA0:outC");
pub static OUTPUT_D: OutputPort = OutputPort("ttyAMA0:outD");
}
pub use self::ports::*;
pub trait Connected {
fn connected(&self) -> bool;
}
pub trait DeviceIndex {
fn device_index(&self) -> Result<isize>;
}
pub struct Device {
path: PathBuf,
device_index: Option<isize>,
}
impl Device {
pub fn new() -> Device {
Device {
path: PathBuf::new(),
device_index: None,
}
}
pub fn get_attr_string(&self, name: &str) -> Result<String> {
// assert!(self.path.deref().is_dir());
let mut s = String::new();
try!(File::open(&self.path.join(name))
.and_then(|mut f| f.read_to_string(&mut s)));
Ok(s.trim().to_owned())
}
pub fn set_attr_string(&self, name: &str, value: &str) -> Result<()> {
// assert!(self.path.is_dir());
OpenOptions::new()
.append(true)
.write(true)
.open(&self.path.join(name))
.and_then(|mut f| write!(&mut f, "{}", value))
}
pub fn get_attr_int(&self, name: &str) -> Result<isize> {
let text = try!(self.get_attr_string(name));
Ok(text.parse::<isize>().unwrap())
}
pub fn set_attr_int(&self, name: &str, value: isize) -> Result<()> {
self.set_attr_string(name, &format!("{}", value))
}
pub fn get_attr_set(&self, name: &str) -> Result<HashSet<String>> {
let text = try!(self.get_attr_string(name));
let mut set = HashSet::<String>::new();
for x in text.trim().split(' ') {
set.insert(x.to_owned());
}
Ok(set)
}
fn parse_device_index(&self) -> isize {
self.path
.deref()
.file_name()
.map(|e| {
e.to_str()
.expect("ZOMG!")
.trim_left_matches(|c: char| !c.is_digit(10u32))
})
.unwrap()
.parse::<isize>()
.unwrap()
}
fn get_device_index(&mut self) -> isize {
if self.device_index.is_none() {
self.device_index = Some(self.parse_device_index());
}
self.device_index.unwrap()
}
pub fn connect(&mut self,
dir: &Path,
pattern: &str,
match_spec: AttributeMatches)
-> Option<()> {
let paths = match fs::read_dir(dir) {
Err(_) => {
println!("dir walk error");
return None;
}
Ok(paths) => paths,
};
let mut is_match = Some(());
for path in paths.filter(|e| e.is_ok()) {
is_match = Some(());
self.path = path.unwrap().path().clone();
if !self.path
.to_str()
.expect("ZOUNDS!")
.starts_with(pattern) {
continue;
}
self.get_device_index();
println!("trying path {}", self.path.display());
for (k, v) in &match_spec {
let value = self.get_attr_string(k).unwrap();
println!("k,matches,value {},{}", k, value);
println!("contains? {}", v.contains(&value));
if !v.contains(&value) {
is_match = None;
self.path = PathBuf::new();
break;
}
}
}
is_match
}
}
impl Connected for Device {
// TODO rico: use is_empty() when it is available.
fn connected(&self) -> bool { self.path != PathBuf::new() }
}
impl DeviceIndex for Device {
fn device_index(&self) -> Result<isize> {
match self.device_index {
Some(index) => Ok(index),
None => Err(Error::new(ErrorKind::NotConnected,
"device is not connected!")),
}
}
}
#[cfg(test)]
mod test {
extern crate tempdir;
use super::Device;
use system::SystemShim;
use std::collections::{HashSet, HashMap};
use std::path::PathBuf;
use std::fs::{DirBuilder, File};
use std::io::prelude::*;
// TODO rico: a bunch of these names match stuff in testbase and
// sensor::test. Sort it all out.
pub struct TestSystem {
dir: tempdir::TempDir,
}
impl SystemShim for TestSystem {
fn root_path(&self) -> PathBuf { self.dir.path().to_path_buf() }
}
pub trait TestCase {
fn setup(&mut self);
}
fn init_file(path: &PathBuf, name: &str, value: &[u8]) {
let fname = path.join(name);
println!("fname {}", fname.display());
File::create(&fname).and_then(|mut f| f.write_all(value))
.expect("bad write");
}
impl TestCase for TestSystem {
fn setup(&mut self) {
let path = self.root_path()
.join("sys").join("class").join("msensor").join("sensor0");
println!("path {}", path.display());
DirBuilder::new().recursive(true)
.create(&path).expect("bad dir");
init_file(&path, "value0", b"0");
}
}
#[test]
fn try_types() {
let mut matches = HashSet::new();
matches.insert("Linux");
}
test!(device_basics system {
let mut dut = Device::new();
let mut matchy = HashMap::new();
let mut matches = HashSet::new();
matches.insert("in1".to_owned());
matchy.insert("port_name".to_owned(), matches);
let sensor_dir = system.root_path()
.join("sys")
.join("class")
.join("msensor");
assert!(dut.connect(&sensor_dir, "sensor", matchy).is_some());
assert!(dut.get_device_index() == 0);
assert!(dut.set_attr_int("value0", 1).is_ok());
assert!(dut.get_attr_int("value0").unwrap() == 1);
});
}
| true
|
b9eae47a492f15479007a2302712f9e480a7fba6
|
Rust
|
aviansie-ben/yet-another-static-java-compiler
|
/compiler/src/mil/il/known_objects.rs
|
UTF-8
| 1,470
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
use std::collections::HashMap;
use crate::resolve::ClassId;
use crate::static_heap::JavaStaticRef;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MilKnownObjectId(pub u32);
#[derive(Debug, Clone)]
pub struct MilKnownObjectRefs {
pub classes: HashMap<ClassId, MilKnownObjectId>,
pub strings: Vec<MilKnownObjectId>
}
impl MilKnownObjectRefs {
pub fn new() -> MilKnownObjectRefs {
MilKnownObjectRefs {
classes: HashMap::new(),
strings: vec![]
}
}
}
#[derive(Debug, Clone)]
pub struct MilKnownObjectMap<'a> {
pub refs: MilKnownObjectRefs,
objs: HashMap<MilKnownObjectId, JavaStaticRef<'a>>,
rev_objs: HashMap<JavaStaticRef<'a>, MilKnownObjectId>,
next: MilKnownObjectId
}
impl <'a> MilKnownObjectMap<'a> {
pub fn new() -> MilKnownObjectMap<'a> {
MilKnownObjectMap {
refs: MilKnownObjectRefs::new(),
objs: HashMap::new(),
rev_objs: HashMap::new(),
next: MilKnownObjectId(0)
}
}
pub fn add(&mut self, obj: JavaStaticRef<'a>) -> MilKnownObjectId {
let id = self.next;
self.objs.insert(id, obj.clone());
self.rev_objs.insert(obj, id);
self.next.0 += 1;
id
}
pub fn get(&self, id: MilKnownObjectId) -> &JavaStaticRef<'a> {
&self.objs[&id]
}
pub fn id_of(&self, obj: &JavaStaticRef<'a>) -> MilKnownObjectId {
self.rev_objs[obj]
}
}
| true
|
04a162de08821338f806ec03c8dc31ec9d9711f2
|
Rust
|
IThawk/rust-project
|
/rust-master/src/test/run-pass/foreign/foreign-fn-with-byval.rs
|
UTF-8
| 635
| 2.515625
| 3
|
[
"MIT",
"LicenseRef-scancode-other-permissive",
"Apache-2.0",
"BSD-3-Clause",
"BSD-2-Clause",
"NCSA"
] |
permissive
|
// run-pass
#![allow(improper_ctypes)]
// ignore-wasm32-bare no libc to test ffi with
#[derive(Copy, Clone)]
pub struct S {
x: u64,
y: u64,
z: u64,
}
#[link(name = "rust_test_helpers", kind = "static")]
extern {
pub fn get_x(x: S) -> u64;
pub fn get_y(x: S) -> u64;
pub fn get_z(x: S) -> u64;
}
#[inline(never)]
fn indirect_call(func: unsafe extern fn(s: S) -> u64, s: S) -> u64 {
unsafe {
func(s)
}
}
fn main() {
let s = S { x: 1, y: 2, z: 3 };
assert_eq!(s.x, indirect_call(get_x, s));
assert_eq!(s.y, indirect_call(get_y, s));
assert_eq!(s.z, indirect_call(get_z, s));
}
| true
|
3c0359d2ef758742947322d1f1182bf21b69dd70
|
Rust
|
TOETOE55/gen-rs
|
/src/impls/helper.rs
|
UTF-8
| 544
| 2.515625
| 3
|
[] |
no_license
|
use crate::Gen;
use std::pin::Pin;
pub struct Resume<'a, 'b, Send, Recv> {
gen: Pin<&'a mut Gen<'b, Send, Recv>>,
}
impl<'a, 'b, Send, Recv> Resume<'a, 'b, Send, Recv> {
pub fn new(gen: Pin<&'a mut Gen<'b, Send, Recv>>) -> Self {
Self { gen }
}
pub fn resume(&mut self, send: Send) -> Option<Recv> {
Gen::resume(&mut self.gen, send)
}
}
pub fn generator<'a, Send, Recv, F>(f: F) -> Pin<Box<Gen<'a, Send, Recv>>>
where
F: for<'g> FnOnce(Pin<&'g mut Gen<'a, Recv, Send>>, Send) + 'a
{
Gen::new(f)
}
| true
|
350ad15ca0221f1cc44d9b39a1d067ec9b2ba184
|
Rust
|
cyclopunk/labyrinth-brew-game
|
/crates/lab-entities/src/player.rs
|
UTF-8
| 2,538
| 2.734375
| 3
|
[
"Apache-2.0"
] |
permissive
|
use bevy::{ prelude::* };
use lab_core::prelude::*;
use std::time::Duration;
#[derive(Clone, Copy, Debug, Properties)]
pub struct Player {
pub god_mode : bool
}
impl Default for Player {
fn default() -> Player {
Player {
god_mode: false
}
}
}
#[derive(Debug, Bundle)]
pub struct PlayerComponents {
player : Player,
job : Job,
inventory : Inventory,
stats : Stats,
abilities : Abilities,
skills : Skills,
named : Named,
location: Location,
movement: Movement,
zoomable: Zoomable
}
impl PlayerComponents {
pub fn new(name : &'static str) -> PlayerComponents{
PlayerComponents {
named: Named(String::from(name)),
..Default::default()
}
}
}
impl Default for PlayerComponents {
fn default() -> Self {
PlayerComponents {
player : Player { god_mode: false },
job : Job::Undecided,
inventory : Inventory::new(),
stats : Stats::new(),
abilities : Abilities::new(),
skills : Skills::new(),
named: Named("Unnamed".to_string()),
location: Location::default(),
//input_timer: InputTimer (Timer::new(Duration::from_millis(110), false)),
movement: Movement::default(),
zoomable: Zoomable
}
}
}
#[derive(Debug, Default, Clone, PartialEq, Properties)]
pub struct Stats {
strength: u32,
dextarity: u32,
wit: u32,
creativity: u32,
wisdom: u32,
charisma: u32
}
#[derive(Debug, Default, Clone, PartialEq, Properties)]
pub struct Abilities {
magic_power: u32,
brewing_power: u32
}
#[derive(Debug, Default, Clone, PartialEq, Properties)]
pub struct Skills {
brewing: u32,
stealth: u32,
melee: u32,
throwing: u32,
diplomacy: u32
}
impl Abilities {
pub fn new() -> Abilities {
Abilities { ..Default::default() }
}
}
impl Skills {
pub fn new() -> Skills {
Skills { ..Default::default() }
}
}
impl Stats {
pub fn new() -> Stats {
Stats { ..Default::default() }
}
}
#[derive(Debug, PartialEq)]
#[allow(dead_code)]
pub enum Job {
BeerWizard,
Brewer,
BeerBitch,
Regular,
Undecided,
Custom(Stats, Abilities)
}
#[allow(dead_code)]
pub struct Power <'a, T> {
name: String,
cost: u32,
effect: fn() -> &'a T
}
#[allow(dead_code)]
pub struct Damage (f32);
#[derive(Copy, Clone, Default, Debug)]
pub struct NonPlayer;
| true
|
5b830f5aea474b6013e1ec9610db57e687cb64a8
|
Rust
|
Buzzec/concurrency_traits
|
/src/mutex/timeout.rs
|
UTF-8
| 2,298
| 3.265625
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] |
permissive
|
use crate::mutex::{CustomMutex, CustomMutexGuard, RawMutex, TryMutex, TryMutexSized};
use core::ops::DerefMut;
use core::time::Duration;
/// A raw mutex that can be timed out and holds no data.
pub unsafe trait RawTimeoutMutex: RawMutex {
/// Locks the mutex on a timeout. Returns true if locked.
fn lock_timeout(&self, timeout: Duration) -> bool;
}
/// A mutex that can timeout for locking
///
/// ## Implementation
/// It is recommended to implement [`TimeoutMutexSized`] if the implement-ee can
/// be sized.
pub trait TimeoutMutex<'a>: TryMutex<'a> {
/// Locks the mutex blocking for timeout or until locked
fn lock_timeout(&'a self, timeout: Duration) -> Option<Self::Guard>;
}
/// The functions for [`TimeoutMutex`] that only work for sized types.
/// Separated to allow [`TimeoutMutex`] to be a trait object.
pub trait TimeoutMutexSized<'a>: Sized + TimeoutMutex<'a> + TryMutexSized<'a> {
/// Attempts to lock the mutex before timeout has passed and runs func on
/// the result
///
/// ## Implementation
/// Should be overwritten by implementors if can be more optimal than
/// creating a guard
fn lock_timeout_func<O>(
&'a self,
timeout: Duration,
func: impl FnOnce(Option<&mut Self::Item>) -> O,
) -> O {
match self.lock_timeout(timeout) {
None => func(None),
Some(mut guard) => func(Some(guard.deref_mut())),
}
}
}
impl<'a, T, M> TimeoutMutex<'a> for CustomMutex<T, M>
where
T: 'a,
M: RawTimeoutMutex + 'a,
{
fn lock_timeout(&'a self, timeout: Duration) -> Option<Self::Guard> {
match self.raw_mutex.lock_timeout(timeout) {
true => Some(CustomMutexGuard { mutex: self }),
false => None,
}
}
}
impl<'a, T, M> TimeoutMutexSized<'a> for CustomMutex<T, M>
where
T: 'a,
M: RawTimeoutMutex + 'a,
{
fn lock_timeout_func<O>(
&'a self,
timeout: Duration,
func: impl FnOnce(Option<&mut Self::Item>) -> O,
) -> O {
match self.raw_mutex.lock_timeout(timeout) {
true => unsafe {
let out = func(Some(&mut *self.data.get()));
self.raw_mutex.unlock();
out
},
false => func(None),
}
}
}
| true
|
bb691100db0f10f75c28ea05595fd0149c37f919
|
Rust
|
isgasho/drue
|
/src/bin/drue.rs
|
UTF-8
| 2,623
| 2.625
| 3
|
[] |
no_license
|
#[macro_use]
extern crate clap;
use crate::core::*;
use algorithms::*;
use drue::*;
use huemanity::Bridge;
// TODO: Implement default algorithms
/// Main entrypoint for the CLI
fn main() {
println!(
"
________________________________________________________________________________
██████╗ ██████╗ ██╗ ██╗███████╗
██╔══██╗██╔══██╗██║ ██║██╔════╝
██║ ██║██████╔╝██║ ██║█████╗
██║ ██║██╔══██╗██║ ██║██╔══╝
██████╔╝██║ ██║╚██████╔╝███████╗
╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝
________________________________________________________________________________
"
);
// Collect command line arguments
let matches = create_app().get_matches();
let bridge = Bridge::link();
println!("Sucessfully linked to a bridge");
match matches.value_of("METHOD") {
Some("blink") => {
if let Some(pad) = matches.value_of("PAD") {
if let Ok(ipad) = pad.parse::<u8>() {
let new_blink = Blink {
duration: 1,
midi_notes: Some(vec![ipad]),
};
go!(bridge, new_blink);
} else {
panic!("Can not parse the pads")
}
} else {
go!(bridge, Blink::default());
}
}
Some("hpm") => go!(bridge, HPM::default()),
Some("variety") => go!(bridge, Variety::default()),
Some("debug") => go!(bridge, [Blink::default(), Debug]),
None => {
println!("Incorrect method passed!");
}
_ => (),
}
}
/// Simply creates a `clap` app with certain parsing capabilities and which is the main entry
/// point for the tool.
pub fn create_app() -> clap::App<'static, 'static> {
let app = clap_app!(drue =>
(version: "0.1")
(author: "Art Eidukas <iwiivi@gmail.com>")
(@arg METHOD: -m --method +takes_value "Pick a method: blink|variety|hpm|debug")
(@arg PAD: -p --pad +takes_value "Pick which pad gets mapped to specialised blinking")
);
// TODO: make sure this is consistent with the way the API is designed
app
}
| true
|
88e7f65557475c881b17b58d3f43d4fd53e2814e
|
Rust
|
erochest/git-branch-stack
|
/src/actions/pop.rs
|
UTF-8
| 478
| 2.78125
| 3
|
[] |
no_license
|
/// # Pop Command
///
/// This implements the `pop` command.
use git2::Repository;
use crate::errors::{BranchStackError, Result};
use crate::git::change_branch;
use crate::stack::FileStack;
pub fn pop_branch_stack(repo: &Repository, stack: &mut FileStack) -> Result<()> {
stack
.pop()
.ok_or(BranchStackError::EmptyStack)
.and_then(|branch_name| {
println!("{}", branch_name);
change_branch(repo, &branch_name)
})
}
| true
|
897ee40c85195de6234d194b96ea47572a88e533
|
Rust
|
electricherd/audiobookfinder
|
/native/adbflib/src/net/sm_behaviour.rs
|
UTF-8
| 5,325
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
//! Taken from dummy behaviour to have a layer of communication which reacts with
//! the embedded state machine (and inner ui), also back to net services:
//! currently kademlia, mdns
//! https://docs.rs/libp2p/latest/libp2p/swarm/struct.DummyBehaviour.html
use super::{
super::data::ipc::{IFCollectionOutputData, IPC},
sm::{
self, AdbfStateChart, Error as SMError, Events, Events::*, NewPeerData, States, UpdateData,
},
ui_data::UiData,
};
use crossbeam::channel::Receiver;
use libp2p::{
core::{
connection::{ConnectedPoint, ConnectionId},
Multiaddr, PeerId,
},
swarm::{
protocols_handler, NetworkBehaviour,
NetworkBehaviourAction::{self, GenerateEvent},
PollParameters, ProtocolsHandler,
},
};
use std::{
collections::vec_deque::VecDeque,
task::{Context, Poll},
};
#[allow(non_camel_case_types)]
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum sm_to_net {
FoundNewPeer(String),
}
/// Events going from StateMachine back to the net behavior
#[allow(non_camel_case_types)]
#[derive(Serialize, Deserialize, Debug)]
pub enum SMOutEvents {
ForwardSM(sm_to_net),
ForwardIPC(IPC),
}
//#[derive(Clone, Default)]
pub struct SMBehaviour {
sm: sm::StateMachine<AdbfStateChart>,
own_peer: PeerId,
send_buffer: VecDeque<SMOutEvents>,
ipc_receiver: Receiver<IPC>,
}
impl SMBehaviour {
pub fn new(ipc_receiver: Receiver<IPC>, own_peer: PeerId, ui_data: UiData) -> Self {
Self {
sm: AdbfStateChart::init(AdbfStateChart::new(ui_data)),
own_peer,
send_buffer: VecDeque::new(),
ipc_receiver,
}
}
pub fn own_peer(&self) -> PeerId {
self.own_peer.clone()
}
// mdns actions
pub fn mdns_new_peer(&mut self, peer_id: &PeerId, multi_addr: &Multiaddr) {
let new_peer_event = GotANewPeer(NewPeerData {
id: peer_id.clone(),
addr: multi_addr.clone(),
});
self.process_and_react(new_peer_event);
}
pub fn mdns_remove(&mut self, peer_id: &PeerId) {
let remove_peer_event = HaveToRemovePeer(peer_id.clone());
self.process_and_react(remove_peer_event);
}
pub fn update_peer_data(&mut self, peer_id: &PeerId, data: IFCollectionOutputData) {
let to_update_peer = UpdatePeer(UpdateData {
id: peer_id.clone(),
data: data.clone(),
});
// todo: this later will be a referenced data (as in SM example on webside)
self.process_and_react(to_update_peer);
}
fn process_and_react(&mut self, event: Events) {
let return_state = self.sm.process_event(event);
match return_state {
Ok(good_state) => match good_state {
States::Start => (), // nothing to do
States::WaitingForPeerAction => (), // is just waiting
},
Err(bad_state) => {
match bad_state {
SMError::InvalidEvent => warn!("unexpected event transition"),
SMError::GuardFailed => (), // this is quite normal, this is what guards are for
}
}
}
}
}
/// This is an almost empty SMBehaviour, but callable and with a return OutEvent
/// and a queue that holds the Polling event, and can be influenced. It basically
/// lacks all higher network behaviors, but that was just needed.
/// todo: look how to handle #[behaviour(poll_method = "poll")]
impl NetworkBehaviour for SMBehaviour {
type ProtocolsHandler = protocols_handler::DummyProtocolsHandler;
type OutEvent = SMOutEvents;
fn new_handler(&mut self) -> Self::ProtocolsHandler {
protocols_handler::DummyProtocolsHandler::default()
}
fn addresses_of_peer(&mut self, _: &PeerId) -> Vec<Multiaddr> {
Vec::new()
}
fn inject_connected(&mut self, _: &PeerId) {}
fn inject_disconnected(&mut self, _: &PeerId) {}
fn inject_connection_established(&mut self, _: &PeerId, _: &ConnectionId, _: &ConnectedPoint) {}
fn inject_connection_closed(&mut self, _: &PeerId, _: &ConnectionId, _: &ConnectedPoint) {}
fn inject_event(
&mut self,
_: PeerId,
_: ConnectionId,
_: <Self::ProtocolsHandler as ProtocolsHandler>::OutEvent,
) {
// todo ... maybe use inject_event rather than direkt SMBehaviour calls from net_actors?
}
fn poll(
&mut self,
_: &mut Context,
_: &mut impl PollParameters,
) -> Poll<
NetworkBehaviourAction<
<Self::ProtocolsHandler as ProtocolsHandler>::InEvent,
Self::OutEvent,
>,
> {
// use this poll for ipc, ipc message will be sent raw for now (not through SM)
match self.ipc_receiver.try_recv() {
Ok(ipc_msg) => {
// todo: maybe filter to which IPC messages go directly to net/kademlia
// and which to SM first?
self.send_buffer.push_back(SMOutEvents::ForwardIPC(ipc_msg))
}
Err(_) => (), // just continue
}
// and
if let Some(item) = self.send_buffer.pop_front() {
Poll::Ready(GenerateEvent(item))
} else {
Poll::Pending
}
}
}
| true
|
89f78d687de1dac514793dfd7e7e094d53238ca9
|
Rust
|
isgasho/MoonZoon
|
/crates/zoon/src/cache.rs
|
UTF-8
| 3,180
| 2.84375
| 3
|
[
"MIT"
] |
permissive
|
use crate::runtime::CACHES;
use crate::cache_map::{Id, Creator};
use crate::relations::__Relations;
use crate::block_call_stack::__Block;
use std::marker::PhantomData;
use std::any::Any;
pub fn cache<T: 'static>(id: Id, creator: impl FnOnce() -> T + Clone + 'static) -> Cache<T> {
let id_exists = CACHES.with(|caches| {
caches.borrow().contains_id(id)
});
let creator = Box::new(move || Box::new(Some(creator.clone()())) as Box<dyn Any>);
let data = creator();
if !id_exists {
CACHES.with(move |caches| {
caches.borrow_mut().insert(id, data, creator);
});
}
Cache::new(id)
}
pub struct Cache<T> {
pub id: Id,
phantom_data: PhantomData<T>,
}
impl<T> std::fmt::Debug for Cache<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({:#?})", self.id)
}
}
impl<T> Copy for Cache<T> {}
impl<T> Clone for Cache<T> {
fn clone(&self) -> Cache<T> {
Cache::<T> {
id: self.id,
phantom_data: PhantomData::<T>,
}
}
}
impl<T> Cache<T>
where
T: 'static,
{
pub(crate) fn new(id: Id) -> Cache<T> {
Cache {
id,
phantom_data: PhantomData,
}
}
pub(crate) fn set(self, data: T, creator: Creator) {
let data = Box::new(Some(data));
CACHES.with(|caches| {
caches
.borrow_mut()
.insert(self.id, data, creator)
});
__Relations::refresh_dependents(&__Block::Cache(self.id));
}
pub(crate) fn remove(self) -> Option<(T, Creator)> {
CACHES.with(|caches| {
caches
.borrow_mut()
.remove::<T>(self.id)
})
}
pub fn update(self, updater: impl FnOnce(T) -> T) {
let (data, creator) = self.remove().expect("an cache data with the given id");
self.set(updater(data), creator);
}
pub fn update_mut(self, updater: impl FnOnce(&mut T)) {
let (mut data, creator) = self.remove().expect("an cache data with the given id");
updater(&mut data);
self.set(data, creator);
}
pub fn map<U>(self, mapper: impl FnOnce(&T) -> U) -> U {
CACHES.with(|caches| {
let cache_map = caches.borrow();
let data = cache_map.data(self.id)
.expect("an cache data with the given id");
mapper(data)
})
}
pub fn map_mut<U>(self, mapper: impl FnOnce(&mut T) -> U) -> U {
let (mut data, creator) = self.remove().expect("an cache data with the given id");
let output = mapper(&mut data);
self.set(data, creator);
output
}
pub fn use_ref<U>(self, user: impl FnOnce(&T)) {
CACHES.with(|caches| {
let cache_map = caches.borrow();
let data = cache_map.data(self.id)
.expect("an cache data with the given id");
user(data)
})
}
}
pub trait CloneCache<T: Clone + 'static> {
fn inner(&self) -> T;
}
impl<T: Clone + 'static> CloneCache<T> for Cache<T> {
fn inner(&self) -> T {
self.map(Clone::clone)
}
}
| true
|
229524945fba42da3c4978b3a3a96382a5f98da2
|
Rust
|
deepinthebuild/rust-wfc
|
/src/overlappingmodel.rs
|
UTF-8
| 18,730
| 2.515625
| 3
|
[] |
no_license
|
#![allow(dead_code)]
use utils::*;
use bit_vec::BitVec;
use sourceimage::{Color, SeedImage};
use png::{Encoder, ColorType, BitDepth, HasParameters};
use ndarray::prelude::*;
use rand;
use std::collections::HashMap;
use std::cell::RefCell;
use std::{f64, usize};
use std::hash::Hash;
use std::convert::TryInto;
use std::collections::HashSet;
use std::fs::File;
use std::path::Path;
use std::io::BufWriter;
const NOISE_LEVEL: f64 = 1.;
#[derive(Debug, Copy, Clone)]
pub enum ModelError {
NoValidStates((usize, usize)),
UnexpectedNaN((usize, usize)),
AllStatesDecided,
}
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
enum WrappingType {
NoWrap,
Torus,
}
#[derive(Debug)]
struct UncertainCell {
possible_colors: RefCell<BitVec>,
possible_states: RefCell<BitVec>,
}
impl UncertainCell {
pub fn new(num_colors: usize, num_states: usize) -> UncertainCell {
let possible_colors = RefCell::new(BitVec::from_elem(num_colors, true));
let possible_states = RefCell::new(BitVec::from_elem(num_states, true));
UncertainCell {
possible_colors: possible_colors,
possible_states: possible_states,
}
}
pub fn entropy<T>(&self, concrete_states: &[(T, usize)]) -> Option<f64> {
let possible_states = self.possible_states.borrow();
debug_assert_eq!(possible_states.len(), concrete_states.len());
if possible_states.none() {
return None;
};
if possible_states.iter().filter(|p| *p).count() == 1 {
return Some(0.);
};
// Counts the number of possible states permitted by the UncertainCell
let possible_state_count: usize = concrete_states.iter()
.map(|&(_, count)| count)
.zip(possible_states.iter())
.filter(|&(_, p)| p)
.map(|(count, _)| count)
.sum();
let possible_state_count = possible_state_count as f64;
let entropy: f64 = concrete_states.iter()
.map(|&(_, count)| count)
.zip(possible_states.iter())
.filter(|&(_, p)| p)
.map(|(count, _)| {
let x = count as f64 / possible_state_count;
x * x.ln()
})
.sum();
Some(-entropy)
}
pub fn collapse<T>(&self, concrete_states: &[(T, usize)]) {
/// Marks all but a single state of the BitVec as forbidden, randomly chosen
/// from the states still permitted and weighted by their frequency in the original image.
let mut possible_states = self.possible_states.borrow_mut();
let chosen_state = masked_weighted_choice(concrete_states, &*possible_states).unwrap();
possible_states.clear();
possible_states.set(chosen_state, true);
}
pub fn consistent(&self) -> bool {
//! Returns true if any states are permitted.
self.possible_colors.borrow().any()
}
pub fn to_color(&self, palette: &[Color]) -> Color {
//! Returns the average color of all remaining possible colors.
if !self.consistent() { return Color(255, 0, 128);}
let mut r = 0usize;
let mut g = 0usize;
let mut b = 0usize;
let mut count = 0usize;
let colors = self.possible_colors.borrow();
for (index, c) in palette.iter().enumerate() {
if colors.get(index).unwrap() {
count += 1;
r += c.0 as usize;
g += c.1 as usize;
b += c.2 as usize;
}
}
Color((r / count) as u8, (g / count) as u8, (b / count) as u8)
}
}
pub struct OverlappingModel {
model: Array2<UncertainCell>,
palette: Vec<Color>,
states: Vec<(Array2<Color>, usize)>,
state_size: usize,
wrap: WrappingType,
color_changes: RefCell<HashSet<(usize, usize)>>,
state_changes: RefCell<HashSet<(usize, usize)>>,
}
impl OverlappingModel {
pub fn from_seed_image(seed_image: SeedImage,
output_dims: (usize, usize),
block_size: usize)
-> OverlappingModel {
let palette = OverlappingModel::build_color_palette(&seed_image.image_data);
let states = OverlappingModel::build_augmented_block_frequency_map(&seed_image.image_data,
block_size);
let num_colors = palette.len();
let num_states = states.len();
let (x, y) = output_dims;
let mut model_data = Vec::<UncertainCell>::with_capacity(x * y);
for _ in 0..(x * y) {
model_data.push(UncertainCell::new(num_colors, num_states));
}
let model = Array::from_shape_vec((y, x), model_data).unwrap();
//TODO add wrapping patches
OverlappingModel {
model: model,
palette: palette,
states: states,
state_size: block_size,
wrap: WrappingType::NoWrap,
color_changes: RefCell::new(HashSet::new()),
state_changes: RefCell::new(HashSet::new()),
}
}
pub fn to_image(&self, file_path: &str) {
let (y, x) = self.model.dim();
let file_path = Path::new(file_path);
let file = File::create(file_path).unwrap();
let w = &mut BufWriter::new(file);
let mut encoder = Encoder::new(w, x as u32, y as u32);
encoder.set(ColorType::RGB).set(BitDepth::Eight);
let mut writer = encoder.write_header().unwrap();
let mut raw_data = Vec::<u8>::with_capacity(self.model.len() * 3);
for rgb in self.model.iter().map(|x| x.to_color(&self.palette)) {
raw_data.push(rgb.0);
raw_data.push(rgb.1);
raw_data.push(rgb.2);
}
writer.write_image_data(&raw_data).unwrap();
}
pub fn collapse_and_propagate(&self) -> Result<(), ModelError> {
use overlappingmodel::ModelError::*;
loop {
let collapse_point = match self.find_lowest_nonzero_entropy_coordinates() {
Ok(u) => u,
Err(AllStatesDecided) => return Ok(()),
Err(NoValidStates(u)) => return Err(NoValidStates(u)),
Err(UnexpectedNaN(u)) => return Err(UnexpectedNaN(u)),
};
self.model[collapse_point].collapse(&self.states);
let changes = self.get_downstream_coordinates(collapse_point);
{
self.color_changes.borrow_mut().extend(changes);
}
'inner: loop {
{
let mut color_changes = self.color_changes.borrow_mut();
color_changes.iter().map(|&c| self.update_colors_at_position(c)).count();
color_changes.clear();
}
{
let mut state_changes = self.state_changes.borrow_mut();
state_changes.iter().map(|&c| self.update_states_at_position(c)).count();
state_changes.clear();
}
{
if self.model.iter().any(|s| !s.consistent()) {break}
}
{
let color_changes = self.color_changes.borrow();
let state_changes = self.state_changes.borrow();
if color_changes.is_empty() && state_changes.is_empty() {
break 'inner;
}
}
}
}
}
fn find_lowest_nonzero_entropy_coordinates(&self) -> Result<(usize, usize), ModelError> {
let mut output: Option<(usize, usize)> = None;
let mut entropy: f64 = f64::MAX;
for (index, cell) in self.model.indexed_iter() {
match cell.entropy(&self.states) {
None => return Err(ModelError::NoValidStates(index)),
Some(u) if u > 0. => {
let noise = rand::random::<f64>() * NOISE_LEVEL;
let u = u + noise;
if u < entropy {
entropy = u;
output = Some(index);
} else if u.is_nan() {
return Err(ModelError::UnexpectedNaN(index));
};
}
Some(_) => continue,
}
}
match output {
None => Err(ModelError::AllStatesDecided),
Some(u) => Ok(u),
}
}
fn color_to_index(&self, color: &Color) -> usize {
self.palette.binary_search(color).expect("Color not found in palette!")
}
fn get_downstream_coordinates(&self, position: (usize, usize)) -> HashSet<(usize, usize)> {
//! This function generates a set of coordinates representing the cells that need to be
//! updated due to the cell at position having changes made. The coordinates returned are
//! those in an NxN box with 'position' at the top left, or concretely in the 2x2 case:
//! _____________________________
//! | pos | pos + (0, 1) |
//! ------------------------------
//! |pos + (1, 0) | pos + (1, 1) |
//! -----------------------------
let s = self.state_size;
let mut output = HashSet::with_capacity(s * s);
match self.wrap {
WrappingType::NoWrap => {
for t in 0..s * s {
let offset = (t / s, t % s);
let coordinate = (position.0 + offset.0, position.1 + offset.1);
if self.valid_coord(coordinate) {
output.insert(coordinate);
};
}
}
WrappingType::Torus => unimplemented!(),
}
output
}
fn get_upstream_coordinates(&self, position: (usize, usize)) -> HashSet<(usize, usize)> {
//! This function works similarly to get_downstream_coordinates, but returns coordinates
//! up and to the left of the input position and does some additional bounds checking
//! to test for potentially negative coordinates before casting back to (usize, usize).
let s = self.state_size;
let mut output = HashSet::with_capacity(s * s);
let s = s as isize;
match self.wrap {
WrappingType::NoWrap => {
for t in 0..s * s {
let offset = (t / s, t % s);
let coordinate = (position.0 as isize - offset.0,
position.1 as isize - offset.1);
if self.valid_coord(coordinate) {
let coordinate = (coordinate.0 as usize, coordinate.1 as usize);
output.insert(coordinate);
};
}
}
WrappingType::Torus => unimplemented!(),
}
output
}
fn update_states_at_position(&self, position: (usize, usize)) {
let new_states = self.valid_states_at_position(position);
let changed: bool;
{
changed = self.model[position].possible_states.borrow_mut().intersect(&new_states);
}
if changed {
let states_that_need_to_be_looked_at = self.get_downstream_coordinates(position);
self.color_changes.borrow_mut().extend(states_that_need_to_be_looked_at);
}
}
fn update_colors_at_position(&self, position: (usize, usize)) {
let new_colors = self.valid_colors_at_position(position);
let changed: bool;
{
changed = self.model[position].possible_colors.borrow_mut().intersect(&new_colors);
}
if changed {
let states_that_need_to_be_looked_at = self.get_upstream_coordinates(position);
self.state_changes.borrow_mut().extend(states_that_need_to_be_looked_at)
}
}
fn valid_states_at_position(&self, position: (usize, usize)) -> BitVec {
//! Queries an NxN grid with the top left at function argument "position" for the states
//! that their current color possibilities allow, then takes the intersection of all of
//! those possibilites.
let s = self.state_size;
let wrap = self.wrap;
let mut patch_possibilites = Vec::<BitVec>::with_capacity(s * s);
let cell_states = self.model[position].possible_states.borrow();
for t in 0..s * s {
let pixel_coords = (t / s, t % s);
let cell_coords = (pixel_coords.0 + position.0, pixel_coords.1 + position.1);
match wrap {
WrappingType::NoWrap => {
if !self.valid_coord(cell_coords) {
continue;
}
}
WrappingType::Torus => unimplemented!(),
}
let color_states = self.model[cell_coords].possible_colors.borrow();
let new_cell_states: BitVec = cell_states.iter()
.enumerate()
.map(|(i, x)| if x {
let c = self.color_to_index(&self.states[i].0[pixel_coords]);
color_states.get(c).unwrap()
} else {
false
})
.collect();
patch_possibilites.push(new_cell_states);
}
mass_intersect(patch_possibilites).unwrap()
}
fn valid_colors_at_position(&self, position: (usize, usize)) -> BitVec {
let wrap = self.wrap;
let s: isize = self.state_size.try_into().unwrap();
let mut patch_possibilites = Vec::<BitVec>::with_capacity((s * s) as usize);
let pos = (position.0 as isize, position.1 as isize);
for t in 0..s * s {
let pixel_coords = ((t / s) as usize, (t % s) as usize);
let offset = (pixel_coords.0 as isize, pixel_coords.1 as isize);
let cell_coords = (pos.0 - offset.0, pos.1 - offset.1);
match wrap {
WrappingType::NoWrap => {
if !self.valid_coord(cell_coords) {
continue;
}
}
WrappingType::Torus => unimplemented!(),
}
let cell_coords = (cell_coords.0 as usize, cell_coords.1 as usize);
let cell_states = self.model[cell_coords].possible_states.borrow();
let mut new_color_states: BitVec = BitVec::from_elem(self.palette.len(), false);
for state_index in cell_states.iter().enumerate().filter(|&(_, s)| s).map(|(i, _)| i) {
let v = self.color_to_index(&self.states[state_index].0[pixel_coords]);
new_color_states.set(v, true);
}
patch_possibilites.push(new_color_states);
}
mass_intersect(patch_possibilites).unwrap()
}
fn valid_coord<T: TryInto<usize>>(&self, coord: (T, T)) -> bool {
let y: usize = match coord.0.try_into() {
Ok(u) => u,
Err(_) => return false,
};
let x: usize = match coord.1.try_into() {
Ok(u) => u,
Err(_) => return false,
};
let (safe_y, safe_x) = self.model.dim();
(y < safe_y) && (x < safe_x)
}
fn wrap_coord<T: TryInto<usize>>(&self, coord: (T, T)) -> Result<(usize, usize), ()> {
unimplemented!()
}
fn build_color_palette(image_data: &Array2<Color>) -> Vec<Color> {
let mut palette: Vec<Color> = image_data.iter().cloned().collect();
palette.sort();
palette.dedup();
palette
}
fn build_block_frequency_map<T: Copy + Eq + Hash>(image_data: &Array2<T>,
block_size: usize)
-> Vec<(Array2<T>, usize)> {
let mut block_counts = HashMap::new();
for block in image_data.windows((block_size, block_size)) {
let block = block.to_owned();
let count = block_counts.entry(block).or_insert(0);
*count += 1;
}
block_counts.into_iter().collect()
}
fn build_augmented_block_frequency_map<T: Copy + Eq + Hash>(image_data: &Array2<T>,
block_size: usize)
-> Vec<(Array2<T>, usize)> {
let mut block_counts = HashMap::<Array2<_>, usize>::new();
for block in image_data.windows((block_size, block_size)) {
let blocks = generate_rotations_and_reflections(&block.to_owned());
for b in blocks {
let count = block_counts.entry(b).or_insert(0);
*count += 1;
}
}
block_counts.into_iter().collect()
}
}
#[test]
fn color_palette_test() {
let array = Array2::from_shape_vec((3, 3),
vec![Color(0, 0, 0),
Color(1, 1, 1),
Color(1, 1, 1),
Color(0, 0, 0),
Color(0, 0, 1),
Color(0, 0, 1),
Color(0, 0, 1),
Color(0, 0, 1),
Color(0, 0, 2)])
.unwrap();
let p = vec![Color(0, 0, 0), Color(0, 0, 1), Color(0, 0, 2), Color(1, 1, 1)];
let p_test = OverlappingModel::build_color_palette(&array);
assert_eq!(p, p_test);
}
#[test]
fn build_block_frequency_map_test_1() {
let array = Array2::from_shape_vec((3, 3),
vec![Color(0, 0, 0),
Color(1, 1, 1),
Color(1, 1, 1),
Color(0, 0, 0),
Color(0, 0, 1),
Color(0, 0, 1),
Color(0, 0, 1),
Color(0, 0, 1),
Color(0, 0, 2)])
.unwrap();
let p_test = OverlappingModel::build_block_frequency_map(&array, 2);
assert_eq!(p_test.len(), 4);
}
#[test]
fn build_block_frequency_map_test_2() {
let array: Array2<usize> = Array2::eye(10);
let p_test = OverlappingModel::build_block_frequency_map(&array, 2);
let p_count: usize = p_test.iter().map(|&(_, u)| u).sum();
assert_eq!(p_count, 81);
}
| true
|
404cf98f426179a0b83df8aa1aaf52a15eb4843f
|
Rust
|
melkibalbino/rust-conc-e-perf-seguro
|
/08-testar-o-tempo-todo/calculator-01/tests/method_test.rs
|
UTF-8
| 537
| 3.171875
| 3
|
[] |
no_license
|
extern crate calculator_01;
#[test]
fn sum_test() {
assert_eq!(4, calculator_01::sum(2, 2));
assert_eq!(10, calculator_01::sum(8, 2));
}
#[test]
fn subtract_test() {
assert_eq!(0, calculator_01::subtract(2, 2));
assert_eq!(6, calculator_01::subtract(8, 2));
}
#[test]
fn multiply_test() {
assert_eq!(4, calculator_01::multiply(2, 2));
assert_eq!(16, calculator_01::multiply(8, 2));
}
#[test]
fn divide_test() {
assert_eq!(1, calculator_01::divide(2, 2));
assert_eq!(4, calculator_01::divide(8, 2));
}
| true
|
031466505c38231d7e4bd148d5b2d715441f18f4
|
Rust
|
clark-lindsay/geometry
|
/src/canvas.rs
|
UTF-8
| 3,285
| 3.40625
| 3
|
[] |
no_license
|
use crate::pixel;
use crate::color;
use std::ops::IndexMut;
use std::ops::Index;
#[derive(Clone)]
pub struct Canvas {
width: usize,
height: usize,
pixels: Vec<Vec<pixel::Pixel>>
}
pub fn new(height: usize, width: usize) -> Canvas {
let pixels = vec![vec![pixel::new(color::new(0.0, 0.0, 0.0)); width]; height];
Canvas {
width,
height,
pixels
}
}
impl Canvas {
pub fn to_ppm(self) -> String {
//TODO: some ppm programs don't allow lines over 70 chars
let mut ppm = String::new();
ppm.push_str("P6\n");
ppm = ppm + &format!("{} {}\n",self.width.to_string(), self.height.to_string());
ppm = ppm + "255\n";
let mut pixel_grid = Vec::<String>::new();
for i in 0..self.height {
pixel_grid.push(self.pixels[i].iter()
.enumerate()
.map(|(index, pixel)| if index < self.width - 1 {pixel.color.to_string() + " "} else {pixel.color.to_string()})
.collect());
}
ppm + &pixel_grid.join("\n") + "\n" //some ppm programs are picky about a trailing newline
}
}
impl Index<(usize, usize)> for Canvas {
type Output = pixel::Pixel;
fn index(&self, index: (usize, usize)) -> &Self::Output {
&self.pixels[index.0][index.1]
}
}
impl IndexMut<(usize, usize)> for Canvas {
fn index_mut(&mut self, index: (usize, usize)) -> &mut pixel::Pixel{
&mut self.pixels[index.0][index.1]
}
}
#[cfg(test)]
mod canvas_tests {
use crate::canvas;
use crate::color;
use crate::pixel;
#[test]
fn init_correctly() {
let canvas = canvas::new(20, 20);
assert_eq!(canvas.width, 20);
assert_eq!(canvas.height, 20);
for i in 0..canvas.height {
for j in 0..canvas.width {
assert_eq!(canvas.pixels[i][j].color, (color::new(0.0, 0.0, 0.0)));
}
}
}
#[test]
fn writing_pixels() {
let mut canvas = canvas::new(10, 10);
let red = color::new(1.0, 0.0, 0.0);
let c1 = color::new(0.25, 0.5, 0.75);
canvas[(0, 5)] = pixel::new(red);
canvas[(0, 0)] = pixel::new(red);
canvas[(3 ,4)] = pixel::new(c1);
canvas[(9, 9)] = pixel::new(c1);
assert_eq!(canvas[(0, 5)].color, red);
assert_eq!(canvas[(3, 4)].color, c1);
assert_eq!(canvas[(9, 9)].color, c1);
assert_eq!(canvas[(0, 0)].color, red);
}
#[test]
fn canvas_to_ppm() {
let mut canvas = canvas::new(3, 3);
let c1 = color::new(0.1, 0.7, 0.7);
let c2 = color::new(-0.5, 0.5, 1.1);
let c3 = color::new(0.0, 0.0, 0.7);
canvas[(0, 1)] = pixel::new(c1);
canvas[(1, 1)] = pixel::new(c2);
canvas[(2, 2)] = pixel::new(c3);
let ppm = canvas.to_ppm();
let correct_ppm = "P6\n3 3\n255\n\
0 0 0 25 178 178 0 0 0\n\
0 0 0 0 127 255 0 0 0\n\
0 0 0 0 0 0 0 0 178\n".to_string();
let mut ppm_lines = ppm.lines();
assert_eq!(ppm_lines.next(), Some("P6"));
assert_eq!(ppm_lines.next(), Some("3 3"));
assert_eq!(ppm_lines.next(), Some("255"));
assert_eq!(ppm, correct_ppm);
}
}
| true
|
0c7375066e00d0fb2a08e2f74cda9c0399275850
|
Rust
|
slowli/arithmetic-parser
|
/typing/src/lib.rs
|
UTF-8
| 10,713
| 3.609375
| 4
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
//! Hindley–Milner type inference for arithmetic expressions parsed
//! by the [`arithmetic-parser`] crate.
//!
//! This crate allows parsing type annotations as a part of a [`Grammar`], and to infer
//! and check types for expressions / statements produced by `arithmetic-parser`.
//! Type inference is *partially* compatible with the interpreter from [`arithmetic-eval`];
//! if the inference algorithm succeeds on a certain expression / statement / block,
//! it will execute successfully, but not all successfully executing items pass type inference.
//! (An exception here is [`Type::Any`], which is specifically designed to circumvent
//! the type system limitations. If `Any` is used too liberally, it can result in code passing
//! type checks, but failing during execution.)
//!
//! # Type system
//!
//! The type system corresponds to types of `Value`s in `arithmetic-eval`:
//!
//! - Primitive types are customizeable via [`PrimitiveType`] impl. In the simplest case,
//! there can be 2 primitive types: Booleans (`Bool`) and numbers (`Num`),
//! as ecapsulated in [`Num`].
//! - There are two container types - [tuples](Tuple) and [objects](Object).
//! - Tuple types can be represented either
//! in the tuple form, such as `(Num, Bool)`, or as a slice, such as `[Num; 3]`.
//! As in Rust, all slice elements must have the same type. Unlike Rust, tuple and slice
//! forms are equivalent; e.g., `[Num; 3]` and `(Num, Num, Num)` are the same type.
//! - Object types are represented in a brace form, such as `{ x: Num }`. Objects can act as
//! either specific types or type constraints.
//! - Functions are first-class types. Functions can have type and/or const params.
//! Const params always specify tuple length.
//! - Type params can be constrained. Constraints are expressed via [`Constraint`]s.
//! As an example, [`Num`] has a few known constraints, such as type [`Linearity`].
//!
//! [`Constraint`]: crate::arith::Constraint
//! [`Num`]: crate::arith::Num
//! [`Linearity`]: crate::arith::Linearity
//!
//! # Inference rules
//!
//! Inference mostly corresponds to [Hindley–Milner typing rules]. It does not require
//! type annotations, but utilizes them if present. Type unification (encapsulated in
//! [`Substitutions`]) is performed at each variable use or assignment. Variable uses include
//! function calls and unary and binary ops; the op behavior is customizable
//! via [`TypeArithmetic`].
//!
//! Whenever possible, the most generic type satisfying the constraints is used. In particular,
//! this means that all type / length variables not resolved at the function definition site become
//! parameters of the function. Likewise, each function call instantiates a separate instance
//! of a generic function; type / length params for each call are assigned independently.
//! See the example below for more details.
//!
//! [Hindley–Milner typing rules]: https://en.wikipedia.org/wiki/Hindley%E2%80%93Milner_type_system#Typing_rules
//! [`Substitutions`]: crate::arith::Substitutions
//! [`TypeArithmetic`]: crate::arith::TypeArithmetic
//!
//! # Operations
//!
//! ## Field access
//!
//! See [`Tuple` docs](Tuple#indexing) for discussion of indexing expressions, such as `xs.0`,
//! and [`Object` docs](Object) for discussion of field access, such as `point.x`.
//!
//! ## Type casts
//!
//! [A type cast](arithmetic_parser::Expr::TypeCast) is equivalent to introducing a new var
//! with the specified annotation, assigning to it and returning the new var. That is,
//! `x as Bool` is equivalent to `{ _x: Bool = x; _x }`. As such, casts are safe (cannot be used
//! to transmute the type arbitrarily), unless `any` type is involved.
//!
//! # Examples
//!
//! ```
//! use arithmetic_parser::grammars::{F32Grammar, Parse};
//! use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//!
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum = |xs| xs.fold(0, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! // Evaluate `code` to get the inferred `sum` function signature.
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(env["sum"].to_string(), "([Num; N]) -> Num");
//! # Ok(())
//! # }
//! ```
//!
//! Defining and using generic functions:
//!
//! ```
//! # use arithmetic_parser::grammars::{F32Grammar, Parse};
//! # use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum_with = |xs, init| xs.fold(init, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(
//! env["sum_with"].to_string(),
//! "for<'T: Ops> (['T; N], 'T) -> 'T"
//! );
//! // Note that `sum_with` is parametric by the element of the slice
//! // (for which the linearity constraint is applied based on the arg usage)
//! // *and* by its length.
//!
//! let usage_code = r#"
//! num_sum: Num = (1, 2, 3).sum_with(0);
//! tuple_sum: (Num, Num) = ((1, 2), (3, 4)).sum_with((0, 0));
//! "#;
//! let ast = Annotated::<F32Grammar>::parse_statements(usage_code)?;
//! // Both lengths and element types differ in these invocations,
//! // but it works fine since they are treated independently.
//! env.process_statements(&ast)?;
//! # Ok(())
//! # }
//! ```
//!
//! [`arithmetic-parser`]: https://crates.io/crates/arithmetic-parser
//! [`Grammar`]: arithmetic_parser::grammars::Grammar
//! [`arithmetic-eval`]: https://crates.io/crates/arithmetic-eval
#![doc(html_root_url = "https://docs.rs/arithmetic-typing/0.3.0")]
#![warn(missing_docs, missing_debug_implementations)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(
clippy::missing_errors_doc,
clippy::must_use_candidate,
clippy::module_name_repetitions,
clippy::similar_names, // too many false positives because of lhs / rhs
clippy::option_if_let_else // too many false positives
)]
use std::{fmt, marker::PhantomData, str::FromStr};
use arithmetic_parser::{
grammars::{Features, Grammar, Parse, ParseLiteral},
InputSpan, NomResult,
};
pub mod arith;
pub mod ast;
pub mod defs;
mod env;
pub mod error;
mod types;
pub mod visit;
pub use self::{
env::TypeEnvironment,
types::{
DynConstraints, FnWithConstraints, Function, FunctionBuilder, LengthVar, Object, Slice,
Tuple, TupleIndex, TupleLen, Type, TypeVar, UnknownLen,
},
};
use self::{arith::ConstraintSet, ast::TypeAst};
/// Primitive types in a certain type system.
///
/// More complex types, like [`Type`] and [`Function`], are defined with a type param
/// which determines the primitive type(s). This type param must implement [`PrimitiveType`].
///
/// [`TypeArithmetic`] has a `PrimitiveType` impl as an associated type, and one of the required
/// operations of this trait is to be able to infer type for literal values from a [`Grammar`].
///
/// # Implementation Requirements
///
/// - [`Display`](fmt::Display) and [`FromStr`] implementations must be consistent; i.e.,
/// `Display` should produce output parseable by `FromStr`. `Display` will be used in
/// `Display` impls for `Type` etc. `FromStr` will be used to read type annotations.
/// - `Display` presentations must be identifiers, such as `Num`.
/// - While not required, a `PrimitiveType` should usually contain a Boolean type and
/// implement [`WithBoolean`]. This allows to reuse [`BoolArithmetic`] and/or [`NumArithmetic`]
/// as building blocks for your [`TypeArithmetic`].
///
/// [`Grammar`]: arithmetic_parser::grammars::Grammar
/// [`TypeArithmetic`]: crate::arith::TypeArithmetic
/// [`WithBoolean`]: crate::arith::WithBoolean
/// [`BoolArithmetic`]: crate::arith::BoolArithmetic
/// [`NumArithmetic`]: crate::arith::NumArithmetic
///
/// # Examples
///
/// ```
/// # use std::{fmt, str::FromStr};
/// use arithmetic_typing::PrimitiveType;
///
/// #[derive(Debug, Clone, Copy, PartialEq)]
/// enum NumOrBytes {
/// /// Numeric value, such as 1.
/// Num,
/// /// Bytes value, such as 0x1234 or "hello".
/// Bytes,
/// }
///
/// // `NumOrBytes` should correspond to a "value" type in the `Grammar`,
/// // for example:
/// enum NumOrBytesValue {
/// Num(f64),
/// Bytes(Vec<u8>),
/// }
///
/// impl fmt::Display for NumOrBytes {
/// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// match self {
/// Self::Num => formatter.write_str("Num"),
/// Self::Bytes => formatter.write_str("Bytes"),
/// }
/// }
/// }
///
/// impl FromStr for NumOrBytes {
/// type Err = anyhow::Error;
///
/// fn from_str(s: &str) -> Result<Self, Self::Err> {
/// match s {
/// "Num" => Ok(Self::Num),
/// "Bytes" => Ok(Self::Bytes),
/// _ => Err(anyhow::anyhow!("expected `Num` or `Bytes`")),
/// }
/// }
/// }
///
/// impl PrimitiveType for NumOrBytes {}
/// ```
pub trait PrimitiveType:
Clone + PartialEq + fmt::Debug + fmt::Display + FromStr + Send + Sync + 'static
{
/// Returns well-known constraints for this type. These constraints are used
/// in standalone parsing of type signatures.
///
/// The default implementation returns an empty set.
fn well_known_constraints() -> ConstraintSet<Self> {
ConstraintSet::default()
}
}
/// Grammar with support of type annotations. Works as a decorator.
///
/// # Examples
///
/// ```
/// use arithmetic_parser::grammars::{F32Grammar, Parse};
/// use arithmetic_typing::Annotated;
///
/// # fn main() -> anyhow::Result<()> {
/// let code = "x: [Num] = (1, 2, 3);";
/// let ast = Annotated::<F32Grammar>::parse_statements(code)?;
/// # assert_eq!(ast.statements.len(), 1);
/// # Ok(())
/// # }
/// ```
#[derive(Debug)]
pub struct Annotated<T>(PhantomData<T>);
impl<T: ParseLiteral> ParseLiteral for Annotated<T> {
type Lit = T::Lit;
fn parse_literal(input: InputSpan<'_>) -> NomResult<'_, Self::Lit> {
<T as ParseLiteral>::parse_literal(input)
}
}
impl<'a, T: ParseLiteral> Grammar<'a> for Annotated<T> {
type Type = TypeAst<'a>;
fn parse_type(input: InputSpan<'a>) -> NomResult<'a, Self::Type> {
use nom::combinator::map;
map(TypeAst::parse, |ast| ast.extra)(input)
}
}
/// Supports all syntax features.
impl<T: ParseLiteral> Parse<'_> for Annotated<T> {
type Base = Self;
const FEATURES: Features = Features::all();
}
| true
|
5881853f8e6a9e3292071ba853a3899cead25d1b
|
Rust
|
aurumcodex/othello
|
/rust/src/othello/bot.rs
|
UTF-8
| 4,869
| 2.984375
| 3
|
[
"MIT",
"MPL-2.0"
] |
permissive
|
// bot.rs
#![allow(clippy::ptr_arg)]
use std::collections::HashMap;
// use rand::prelude::*;
use crate::othello::{algorithms::Algorithm, moves::Move, player::Player, Board};
pub enum MoveType {
Auto, // automatically decide which move type would be best to use at current state
RNG,
AlphaBeta, // fail-soft enabled or disabled (to be determined)
Negamax,
// MTDf, // may or may not be added in the future
}
pub trait Bot {
fn make_move(
&self,
moveset: &Vec<Move>,
game: Board,
_turn_count: usize,
move_type: MoveType, // made mutable in implementation(?)
debug: bool,
) -> usize;
}
// #[cfg(feature = "sequential")]
impl Bot for Player {
fn make_move(
&self,
moveset: &Vec<Move>,
game: Board,
_turn_count: usize,
move_type: MoveType,
debug: bool,
) -> usize {
// let mut rng = rand::
let mut best_move = usize::MAX;
let depth = 0;
let maxing = true;
let alpha = f64::MIN;
let beta = f64::MAX;
let color = self.color;
// let moveset = game.generate_move()
let mut _used_move = move_type;
// match rand::thread_rng().gen_range(0, 20) {
// 0..=4 => _used_move = MoveType::RNG,
// 5..=9 => _used_move = MoveType::AlphaBeta,
// 10..=14 => _used_move = MoveType::Negamax,
// _ => _used_move = MoveType::RNG,
// }
match _used_move {
MoveType::Auto => {
// implement recursive calls?
unimplemented!();
}
MoveType::RNG => {
println!("bot is using a random move");
best_move = Board::rng_move(&moveset, debug);
}
MoveType::AlphaBeta => {
println!("bot is using a move generated from alpha_beta");
let mut ab_hash: HashMap<usize, isize> = HashMap::new();
let list = &moveset[..];
// think about making this a while loop for the time being
// better yet: use iterators directly
for mv in list.iter() {
let mut temp = game.clone();
temp.apply(color, mv.cell, debug);
temp.flip_discs(color, mv.cell, -mv.direction, debug);
let ab_score = temp.alpha_beta(alpha, beta, -color, depth, !maxing, debug);
println!("[alpha_beta] output at cell: {} :: {}", mv.cell, ab_score);
ab_hash.insert(mv.cell, ab_score);
}
// if debug {
println!("alpha_beta output: {:?}", ab_hash);
// }
let mut max = 0;
for (key, val) in ab_hash {
if val > max {
max = val;
best_move = key;
}
}
}
MoveType::Negamax => {
println!("bot is using a move generated from negamax");
let mut nm_hash: HashMap<usize, isize> = HashMap::new();
for mv in moveset {
let mut temp = game.clone();
temp.apply(color, mv.cell, debug);
temp.flip_discs(color, mv.cell, -mv.direction, debug);
let nm_score = temp.negamax(alpha, beta, -color, depth, debug);
println!("[negamax] output at cell: {} :: {}", mv.cell, nm_score);
nm_hash.insert(mv.cell, nm_score);
}
// if debug {
println!("alpha_beta output: {:?}", nm_hash);
// }
let mut max = 0;
for (key, val) in nm_hash {
if val > max {
max = val;
best_move = key;
}
}
}
}
best_move
}
}
// #[cfg(feature = "parallel")]
// impl Bot for Player {
// fn make_move(&self, _game: Board, _turn_count: usize, _debug: bool) -> usize {
// // let mut rng = rand::
// let mut _best_move = usize::MAX;
// let _depth = 0;
// let _maxing = true;
// let _alpha = f64::MIN;
// let _beta = f64::MAX;
// let _color = self.color;
// let mut _move_type = MoveType::Auto;
// 9999
// }
// }
// helper functions (these need to be implemented parallel style and sequential)
// dummy functions for now
fn _generate_alpha_beta_move(_game: Board, _alpha: f64, _beta: f64) -> usize {
999
}
// dummy functions for now
fn _generate_negamax_move(_game: Board, _alpha: f64, _beta: f64) -> usize {
999
}
// fn generate_mtdf_move(game: Board, alpha: f64, beta: f64) {}
| true
|
e79fd097c9034d4722b3ad5a391b72587fc70b25
|
Rust
|
pantsbuild/pants
|
/src/rust/engine/concrete_time/src/lib.rs
|
UTF-8
| 4,562
| 2.875
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2018 Pants project contributors (see CONTRIBUTORS.md).
// Licensed under the Apache License, Version 2.0 (see LICENSE).
#![deny(warnings)]
// Enable all clippy lints except for many of the pedantic ones. It's a shame this needs to be copied and pasted across crates, but there doesn't appear to be a way to include inner attributes from a common source.
#![deny(
clippy::all,
clippy::default_trait_access,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::needless_continue,
clippy::unseparated_literal_suffix,
clippy::used_underscore_binding
)]
// It is often more clear to show that nothing is being moved.
#![allow(clippy::match_ref_pats)]
// Subjective style.
#![allow(
clippy::len_without_is_empty,
clippy::redundant_field_names,
clippy::too_many_arguments
)]
// Default isn't as big a deal as people seem to think it is.
#![allow(clippy::new_without_default, clippy::new_ret_no_self)]
// Arc<Mutex> can be more clear than needing to grok Orderings:
#![allow(clippy::mutex_atomic)]
use deepsize::DeepSizeOf;
use serde_derive::Serialize;
/// A concrete data representation of a duration.
/// Unlike std::time::Duration, it doesn't hide how the time is stored as the purpose of this
/// `struct` is to expose it.
///
/// This type can be serialized with serde.
///
/// This type can be converted from and into a `std::time::Duration` as this should be the goto
/// data representation for a `Duration` when one isn't concerned about serialization.
///
/// It can be used to represent a timestamp (as a duration since the unix epoch) or simply a
/// duration between two arbitrary timestamps.
#[derive(Debug, DeepSizeOf, Clone, Copy, PartialEq, Eq, Hash, Serialize)]
pub struct Duration {
/// How many seconds did this `Duration` last?
pub secs: u64,
/// How many sub-second nanoseconds did this `Duration` last?
pub nanos: u32,
}
impl Duration {
/// Construct a new duration with `secs` seconds and `nanos` nanoseconds
pub fn new(secs: u64, nanos: u32) -> Self {
Self { secs, nanos }
}
}
impl From<std::time::Duration> for Duration {
fn from(duration: std::time::Duration) -> Self {
Self {
secs: duration.as_secs(),
nanos: duration.subsec_nanos(),
}
}
}
impl From<Duration> for std::time::Duration {
fn from(duration: Duration) -> std::time::Duration {
std::time::Duration::new(duration.secs, duration.nanos)
}
}
/// A timespan
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize)]
pub struct TimeSpan {
/// Duration since the UNIX_EPOCH
pub start: Duration,
/// Duration since `start`
pub duration: Duration,
}
impl TimeSpan {
fn since_epoch(time: &std::time::SystemTime) -> std::time::Duration {
time
.duration_since(std::time::UNIX_EPOCH)
.expect("Surely you're not before the unix epoch?")
}
/// Construct a TimeSpan that started at `start` and ends now.
pub fn since(start: &std::time::SystemTime) -> TimeSpan {
let start = Self::since_epoch(start);
let duration = Self::since_epoch(&std::time::SystemTime::now()) - start;
TimeSpan {
start: start.into(),
duration: duration.into(),
}
}
/// Construct a TimeSpan that started at `start` and ends at `end`.
pub fn from_start_and_end_systemtime(
start: &std::time::SystemTime,
end: &std::time::SystemTime,
) -> TimeSpan {
let start = Self::since_epoch(start);
let end = Self::since_epoch(end);
let duration = match end.checked_sub(start) {
Some(d) => d,
None => {
log::debug!("Invalid TimeSpan - start: {:?}, end: {:?}", start, end);
std::time::Duration::new(0, 0)
}
};
TimeSpan {
start: start.into(),
duration: duration.into(),
}
}
fn std_duration_from_timestamp(t: &prost_types::Timestamp) -> std::time::Duration {
std::time::Duration::new(t.seconds as u64, t.nanos as u32)
}
/// Construct a `TimeSpan` given a start and an end `Timestamp` from protobuf timestamp.
pub fn from_start_and_end(
start: &prost_types::Timestamp,
end: &prost_types::Timestamp,
time_span_description: &str,
) -> Result<Self, String> {
let start = Self::std_duration_from_timestamp(start);
let end = Self::std_duration_from_timestamp(end);
match end.checked_sub(start) {
Some(duration) => Ok(TimeSpan {
start: start.into(),
duration: duration.into(),
}),
None => Err(format!(
"Got negative {time_span_description} time: {end:?} - {start:?}"
)),
}
}
}
#[cfg(test)]
mod tests;
| true
|
f1f9ab28ea0a5629ec26b5f57c7c4fef303d6960
|
Rust
|
mzumi/rust-aws-lambda
|
/division_calculator/src/main.rs
|
UTF-8
| 874
| 2.90625
| 3
|
[] |
no_license
|
#[macro_use]
extern crate lambda_runtime as lambda;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate log;
extern crate simple_logger;
use lambda::error::HandlerError;
use std::error::Error;
#[derive(Deserialize, Clone)]
struct CustomEvent {
x: i64,
y: i64,
}
#[derive(Serialize, Clone)]
struct CustomOutput {
result: i64,
}
fn main() -> Result<(), Box<dyn Error>> {
simple_logger::init_with_level(log::Level::Info)?;
lambda!(my_handler);
Ok(())
}
fn my_handler(e: CustomEvent, c: lambda::Context) -> Result<CustomOutput, HandlerError> {
println!("{}", c.aws_request_id);
println!("x: {}, y: {}", e.x, e.y);
if e.y == 0 {
error!("Invalid denominator in request {}", c.aws_request_id);
return Err(c.new_error("denominator is 0"));
}
Ok(CustomOutput {
result: (e.x / e.y),
})
}
| true
|
0c86b600b6c6609b2c01117529ac7d50fbd779a5
|
Rust
|
King-Coyote/ciso-rust
|
/src/events/event_queue.rs
|
UTF-8
| 1,216
| 3
| 3
|
[] |
no_license
|
use crate::events::{Event,};
use crossbeam_channel::{Sender, Receiver, unbounded,};
pub struct EventQueue {
inbound: Receiver<Event>,
outbound_tx: Sender<Event>,
}
impl EventQueue {
pub fn new() -> (Sender<Event>, Receiver<Event>, EventQueue) {
let (inbound_tx, inbound_rx) = unbounded();
let (outbound_tx_, outbound_rx_) = unbounded();
let event_queue = EventQueue{
// events_current: VecDeque::new(),
// events_next: VecDeque::new(),
inbound: inbound_rx,
outbound_tx: outbound_tx_,
};
// give channel that can send this queue events to post
(inbound_tx, outbound_rx_, event_queue)
}
pub fn transmit(&self) {
loop {
match self.inbound.try_recv() {
Ok(event) => self.outbound_tx.try_send(event).expect("Could not send from event queue"),
_ => break
}
}
}
// pub fn new_frame(&mut self) {
// if self.events_next.len() == 0 && self.events_current.len() == 0 {
// return;
// }
// swap(&mut self.events_next, &mut self.events_current);
// self.events_next.clear();
// }
}
| true
|
c7f804fbeb3e5ee9fa94876afcdba7029ee5951d
|
Rust
|
okamotonr/hack_assembler
|
/src/code.rs
|
UTF-8
| 3,946
| 3.203125
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use crate::parser::AsmLine;
#[derive(Debug)]
struct SymbolTable {
table: HashMap<String, u32>,
rom_address: u32,
index: u32,
limit: u32,
}
impl SymbolTable {
pub fn new() -> Self {
let mut table = HashMap::new();
let mut index = 0;
let defalut_symbol = vec!["SP", "LCL", "ARG", "THIS", "THAT"];
for (value, name) in (0_u32..).zip(defalut_symbol.iter()) {
table.insert(name.to_string(), value);
}
let defalut_symbol = [
"R0", "R1", "R2", "R3", "R4", "R5", "R6", "R7", "R8", "R9", "R10", "R11", "R12", "R13",
"R14", "R15",
];
for (value, name) in (0_u32..).zip(defalut_symbol.iter()) {
table.insert(name.to_string(), value);
index = value;
}
index += 1;
let limit = 16384;
table.insert("SCREEN".to_string(), limit);
table.insert("KBD".to_string(), 24576);
let rom_address = 0;
SymbolTable {
table,
rom_address,
index,
limit,
}
}
pub fn insert(&mut self, name: &str) {
if self.index == self.limit {
panic!("You cannot insert symbol anymore");
}
if name.chars().all(char::is_numeric) {
return;
}
if self.table.get(name).is_none() {
self.table.insert(name.to_string(), self.index);
self.index += 1;
}
}
pub fn get(&self, name: String) -> Option<u32> {
if name.chars().all(char::is_numeric) {
let ret: u32 = name.parse().unwrap();
return Some(ret);
}
self.table.get(&name).copied()
}
pub fn set_rom(&mut self, line: &AsmLine) {
match line {
AsmLine::LCommand(_) => self.insert_rom(&line.symbol().unwrap()),
_ => self.rom_address += 1,
}
}
fn insert_rom(&mut self, name: &str) {
if self.table.get(name).is_none() {
self.table.insert(name.to_string(), self.rom_address);
}
}
}
#[derive(Debug)]
pub struct CodeGenerator {
table: SymbolTable,
lines: Vec<AsmLine>,
}
impl CodeGenerator {
pub fn new(lines: Vec<AsmLine>) -> Self {
let mut table = SymbolTable::new();
for line in lines.iter() {
table.set_rom(line);
}
for line in lines.iter() {
if line.symbol().is_some() {
table.insert(&line.symbol().unwrap());
}
}
CodeGenerator { table, lines }
}
pub fn gen(&self) -> Vec<String> {
let mut ret: Vec<String> = Vec::new();
for line in self.lines.iter() {
let code = self.translate(line);
if code.is_empty() {
continue;
}
ret.push(code);
}
for line in ret.iter() {
println!("{}", line)
}
ret
}
fn translate(&self, line: &AsmLine) -> String {
match line {
AsmLine::CCommand(_, _, _) => self.translate_c_command(line),
AsmLine::ACommand(_) => self.translate_a_command(line),
AsmLine::LCommand(_) => self.translate_l_command(),
}
}
fn translate_c_command(&self, line: &AsmLine) -> String {
let mut base_str = "111".to_string();
let comp = &format!("{:07b}", line.comp().unwrap() as u32);
let dest = &format!("{:03b}", line.dest().unwrap() as u32);
let jump = &format!("{:03b}", line.jump().unwrap() as u32);
base_str = base_str + comp + dest + jump;
base_str
}
fn translate_a_command(&self, line: &AsmLine) -> String {
let address = self.table.get(line.symbol().unwrap());
let b_str: String = format!("{:016b}", address.unwrap());
b_str
}
fn translate_l_command(&self) -> String {
"".to_string()
}
}
| true
|
5ca9c47abcdeb00ebc3f0e7fb5b0d0e828d36426
|
Rust
|
DarthStrom/2020-Rust-Advent-of-Code
|
/src/day06.rs
|
UTF-8
| 1,354
| 3.515625
| 4
|
[] |
no_license
|
pub fn solve1(lines: &Vec<&str>) -> usize {
let groups = lines.split(|line| line.is_empty());
groups
.map(|group| {
let mut group_vec = group.to_vec().join("").chars().collect::<Vec<_>>();
group_vec.sort();
group_vec.dedup();
group_vec.len()
})
.sum()
}
pub fn solve2(lines: &Vec<&str>) -> usize {
let groups = lines.split(|line| line.is_empty());
groups
.map(|group| {
let mut shared = group[0].chars().collect::<Vec<_>>();
for i in 1..group.len() {
for &c in shared.clone().iter() {
if !group[i].chars().collect::<Vec<_>>().contains(&c) {
let index = shared.iter().position(|x| *x == c).unwrap();
shared.remove(index);
}
}
}
shared.len()
})
.sum()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example1() {
let input = "abc
a
b
c
ab
ac
a
a
a
a
b";
let lines = input.lines().collect::<Vec<_>>();
assert_eq!(solve1(&lines), 11);
}
#[test]
fn example2() {
let input = "abc
a
b
c
ab
ac
a
a
a
a
b";
let lines = input.lines().collect::<Vec<_>>();
assert_eq!(solve2(&lines), 6);
}
}
| true
|
502352b18b008e387fd690cca6e8e5258aa76b1f
|
Rust
|
kezenator/adventofcode
|
/2020/src/y2019/d23/mod.rs
|
UTF-8
| 2,259
| 2.859375
| 3
|
[
"MIT"
] |
permissive
|
use crate::support::*;
use crate::y2019::intcode::Intcode;
const INPUT: &str = include_str!("input.txt");
fn run(part_2: bool) -> i64
{
let mut computers: Vec<Intcode> = Vec::new();
for addr in 0..50
{
computers.push(Intcode::new_from_input(INPUT));
computers.last_mut().unwrap().input(addr);
}
let mut nat: Option<(i64, i64)> = None;
let mut nat_last_y: Option<i64> = None;
loop
{
let mut idle_count = 0;
let mut any_output = false;
for addr in 0..50
{
if computers[addr].is_input_buffer_empty()
{
idle_count += 1;
computers[addr].input(-1);
}
let _ = computers[addr].run_until_halt_or_input_required();
while computers[addr].output_len() >= 3
{
any_output = true;
let other_addr = computers[addr].pop_output() as usize;
let x = computers[addr].pop_output();
let y = computers[addr].pop_output();
if other_addr == 255
{
if !part_2
{
return y;
}
else
{
nat = Some((x, y));
}
}
else
{
computers[other_addr].input(x);
computers[other_addr].input(y);
}
}
}
if (idle_count == 50) && !any_output
{
let nat_val = nat.unwrap();
nat = None;
if let Some(last_y) = nat_last_y
{
if last_y == nat_val.1
{
return last_y;
}
}
computers[0].input(nat_val.0);
computers[0].input(nat_val.1);
nat_last_y = Some(nat_val.1);
}
}
}
fn part_1() -> i64
{
run(false)
}
fn part_2() -> i64
{
run(true)
}
pub fn puzzles() -> PuzzleDay
{
puzzle_day(23)
.part_1(|| Answer { calculated: part_1(), expected: 24555, })
.part_2(|| Answer { calculated: part_2(), expected: 19463, })
}
| true
|
780f19583b5cfde016b97ebb70ce55bee6c7bc88
|
Rust
|
sgreenlay/aoc-2019
|
/src/day25.rs
|
UTF-8
| 10,620
| 3.03125
| 3
|
[] |
no_license
|
use std::collections::HashMap;
use std::io;
use std::fmt;
use regex::Regex;
use lazy_static;
use crate::intcode::{VirtualMachine, VirtualMachineState, load_program};
#[derive(PartialEq, Clone, Copy)]
enum Direction {
North,
South,
East,
West
}
impl Direction {
fn inverse(&self) -> Direction {
match self {
Direction::North => Direction::South,
Direction::South => Direction::North,
Direction::East => Direction::West,
Direction::West => Direction::East,
}
}
}
impl fmt::Display for Direction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Direction::North => write!(f, "north"),
Direction::South => write!(f, "south"),
Direction::East => write!(f, "east"),
Direction::West => write!(f, "west"),
}
}
}
fn run_interactive(
vm: &mut VirtualMachine,
output: &mut String
) {
loop {
match vm.run() {
VirtualMachineState::WaitForInput => {
if !output.is_empty() {
print!("{}", output);
*output = String::new();
}
let mut input = String::new();
match io::stdin().read_line(&mut input) {
Ok(_) => {
for ch in input.chars() {
if ch == '\r' {
continue;
}
vm.add_input(ch as i128);
}
}
Err(error) => println!("error: {}", error),
}
},
VirtualMachineState::Output(v) => {
let ch = (v as u8) as char;
output.push(ch);
},
VirtualMachineState::Terminated => {
break;
}
}
}
}
fn parse_output(
output: &String
) -> (Option<String>, Vec<Direction>, Vec<String>) {
let mut name: Option<String> = None;
let mut directions: Vec<Direction> = Vec::new();
let mut items: Vec<String> = Vec::new();
let lines: Vec<String> = output.split('\n')
.filter_map(|s: &str| {
if s.len() == 0 {
None
} else {
Some(s.chars().collect())
}
})
.collect();
for line in lines.iter() {
lazy_static! {
// == XXX ==
static ref ROOM_RE: Regex = Regex::new(r"== (.*) ==").unwrap();
}
if ROOM_RE.is_match(&line) {
for line_cap in ROOM_RE.captures_iter(&line) {
let room = &line_cap[0];
name = Some(room.to_string());
break;
}
} else {
let ch = line.chars().next().unwrap();
if ch == '-' {
if line.eq("- north") {
directions.push(Direction::North);
} else if line.eq("- south") {
directions.push(Direction::South);
} else if line.eq("- east") {
directions.push(Direction::East);
} else if line.eq("- west") {
directions.push(Direction::West);
} else {
items.push(line.split_at(2).1.to_string());
}
}
}
}
(name, directions, items)
}
fn visit_all_the_rooms(
vm: &mut VirtualMachine,
output: &mut String,
visit_room: &mut dyn FnMut(&String, &Vec<Direction>, &Vec<String>, &mut Vec<String>) -> bool
) {
let mut frontier: Vec<Direction> = Vec::new();
let mut inputs: Vec<String> = Vec::new();
let mut current: String = String::new();
let mut rooms_visited: HashMap<String, Vec<Direction>> = HashMap::new();
loop {
match vm.run() {
VirtualMachineState::WaitForInput => {
if inputs.is_empty() {
if !output.is_empty() {
let (room, directions, items) = parse_output(&output);
let next = room.unwrap();
if next.eq(¤t) {
frontier.pop();
} else {
current = next;
if !rooms_visited.contains_key(¤t) {
if visit_room(¤t, &directions, &items, &mut inputs) {
break;
}
rooms_visited.insert(current.clone(), directions.clone());
}
}
}
if !rooms_visited.contains_key(¤t) {
panic!("Haven't visited this room!");
}
let room = rooms_visited.get_mut(¤t).unwrap();
let next: Direction;
if room.is_empty() {
if frontier.is_empty() {
break;
} else {
next = frontier.pop().unwrap().inverse();
}
} else {
next = room.pop().unwrap();
frontier.push(next);
}
let next_room = format!("{}\n", next).to_string();
inputs.push(next_room);
*output = String::new();
}
if !inputs.is_empty() {
let input = inputs.remove(0);
for ch in input.chars() {
vm.add_input(ch as i128);
}
}
},
VirtualMachineState::Output(v) => {
let ch = (v as u8) as char;
output.push(ch);
},
VirtualMachineState::Terminated => {
break;
}
}
}
}
fn run_with_input(
vm: &mut VirtualMachine,
output: &mut String,
inputs: &mut Vec<String>
) {
loop {
match vm.run() {
VirtualMachineState::WaitForInput => {
if inputs.is_empty() {
break;
} else {
let input = inputs.remove(0);
for ch in input.chars() {
vm.add_input(ch as i128);
}
*output = String::new();
}
},
VirtualMachineState::Output(v) => {
let ch = (v as u8) as char;
output.push(ch);
},
VirtualMachineState::Terminated => {
break;
}
}
}
}
fn generate_all_combinations<T: Clone>(arr: &Vec<T>) -> Vec<Vec<T>> {
let mut output: Vec<Vec<T>> = Vec::new();
let n = arr.len();
let i_max = 2u32.pow(n as u32);
for i in 0..i_max {
let mut entry: Vec<T> = Vec::new();
let mut m = 1;
for j in 0..n {
if (i & m) > 0 {
entry.push(arr[j].clone());
}
m = m << 1;
}
if !entry.is_empty() {
output.push(entry);
}
}
return output;
}
pub fn run() {
let program = load_program("data/day25.txt".to_string());
let mut vm = VirtualMachine::new(&program);
let mut output: String = String::new();
let interactive = false;
if interactive {
run_interactive(&mut vm, &mut output);
} else {
let mut rooms: HashMap<String, (Vec<Direction>, Vec<String>)> = HashMap::new();
let mut cache_room = |name: &String, directions: &Vec<Direction>, items: &Vec<String>, _: &mut Vec<String>| -> bool {
rooms.insert(name.clone(), (directions.clone(), items.clone()));
false
};
visit_all_the_rooms(&mut vm, &mut output, &mut cache_room);
let mut pick_up_all_things = |_: &String, _: &Vec<Direction>, items: &Vec<String>, inputs: &mut Vec<String>| -> bool {
for i in items {
let dont_pick_up = vec![
"escape pod",
"giant electromagnet",
"infinite loop",
"molten lava",
"photons",
];
let mut pick_up = true;
for d in dont_pick_up {
if i.eq(d) {
pick_up = false;
break;
}
}
if pick_up {
let take = format!("take {}\n", i).to_string();
inputs.push(take);
}
}
false
};
visit_all_the_rooms(&mut vm, &mut output, &mut pick_up_all_things);
let mut find_the_security_checkpoint = |name: &String, _: &Vec<Direction>, _: &Vec<String>, _: &mut Vec<String>| -> bool {
name.eq(&"== Security Checkpoint ==")
};
visit_all_the_rooms(&mut vm, &mut output, &mut find_the_security_checkpoint);
// Get all possible combinations of all items
let mut input: Vec<String> = vec![
"inv\n".to_string()
];
run_with_input(&mut vm, &mut output, &mut input);
let (_, _, mut items) = parse_output(&output);
items.sort();
let item_combinations = generate_all_combinations(&items);
// Try all possible combinations of items
let mut drop_all_items: Vec<String> = items.iter().map(|item| {
format!("drop {}\n", item)
}).collect();
run_with_input(&mut vm, &mut output, &mut drop_all_items);
// Use a save state to find the correct set of items to cross the pressure plate
let save_state = vm.clone();
for items in item_combinations {
let mut pick_up_items: Vec<String> = items.iter().map(|item| {
format!("take {}\n", item)
}).collect();
run_with_input(&mut vm, &mut output, &mut pick_up_items);
let mut input: Vec<String> = vec![
"south\n".to_string()
];
run_with_input(&mut vm, &mut output, &mut input);
let (name, _, _) = parse_output(&output);
if !name.unwrap().eq(&"== Security Checkpoint ==") {
break;
}
vm = save_state.clone();
}
println!("{}", output);
}
}
| true
|
b96d8da907598f6a1b029ed488b6b206c8efc8e9
|
Rust
|
wlh320/shuaOJ
|
/ProjectEuler/Euler/src/bin/p057.rs
|
UTF-8
| 296
| 2.890625
| 3
|
[] |
no_license
|
use num_bigint::BigUint;
fn main() {
let (mut a, mut b) = (BigUint::from(0u32), BigUint::from(1u32));
let mut ans = 0;
for _ in 0..1000 {
let temp = b.clone();
b = b * 2u32 + &a;
a = temp;
if (&a+&b).to_string().len() > b.to_string().len() {
ans += 1;
}
}
println!("{}", ans);
}
| true
|
f369abd057d889cfd71e49ceec6024bc68fc6727
|
Rust
|
traviskaufman/piet-rs
|
/src/color_block.rs
|
UTF-8
| 3,033
| 3.1875
| 3
|
[] |
no_license
|
use std::collections::HashSet;
use std::cmp::Ordering;
use image::RgbImage;
use state::{Position, Direction};
use util;
// See: https://en.wikipedia.org/wiki/Flood_fill
// Inspired by how npiet does color block checking
fn flood_check(img: &RgbImage, x: i32, y: i32, mut blk: &mut ColorBlock) {
let out_of_bounds = x < 0 || y < 0 || (x as u32) == img.width() || (y as u32) == img.height();
if out_of_bounds {
return;
}
let pos = Position {
left: x as u32,
top: y as u32,
};
if blk.codels.contains(&pos) {
return;
}
if util::get_px(&img, &pos) != blk.color {
return;
}
blk.codels.insert(pos);
// TODO(perf): Use vecdeque-based method
// South
flood_check(&img, x, y + 1, &mut blk);
// North
flood_check(&img, x, y - 1, &mut blk);
// East
flood_check(&img, x - 1, y, &mut blk);
// West
flood_check(&img, x + 1, y, &mut blk);
}
fn compare_boundary_positions(p1: &Position,
p2: &Position,
sort_x: bool,
reverse: bool)
-> Ordering {
let order = if sort_x {
p1.left.cmp(&p2.left)
} else {
p1.top.cmp(&p2.top)
};
if reverse { order.reverse() } else { order }
}
#[derive(Debug)]
pub struct ColorBlock {
pub color: (u8, u8, u8),
codels: HashSet<Position>,
}
impl ColorBlock {
pub fn from_position_in_img(img: &RgbImage, pos: &Position) -> ColorBlock {
let mut blk = ColorBlock {
color: (0, 0, 0),
codels: HashSet::new(),
};
let target_color = util::get_px(&img, &pos);
blk.color = target_color;
// Note that initial direction here does not matter
flood_check(&img, pos.left as i32, pos.top as i32, &mut blk);
blk
}
pub fn value(&self) -> i32 {
self.codels.len() as i32
}
pub fn boundary_codel_position(&self, dp: &Direction, cc: &Direction) -> Position {
let initially_sort_x = match *dp {
Direction::Up | Direction::Down => true,
_ => false,
};
let subsequently_sort_x = !initially_sort_x;
let reverse_first_sort = match (*dp, *cc) {
(Direction::Right, Direction::Right) => true,
(Direction::Down, Direction::Left) => true,
(Direction::Left, Direction::Left) => true,
(Direction::Up, Direction::Right) => true,
_ => false,
};
let reverse_second_sort = match *dp {
Direction::Right | Direction::Down => true,
_ => false,
};
let mut cvec: Vec<&Position> = self.codels.iter().collect();
cvec.sort_by(|p1, p2| {
compare_boundary_positions(p1, p2, initially_sort_x, reverse_first_sort)
});
cvec.sort_by(|p1, p2| {
compare_boundary_positions(p1, p2, subsequently_sort_x, reverse_second_sort)
});
*cvec[0]
}
}
| true
|
fe5948d5f6945cc2d45a6087e048dfdb5d0ff84c
|
Rust
|
laptou/bluez-rs
|
/src/management/client/oob.rs
|
UTF-8
| 5,775
| 2.8125
| 3
|
[
"MIT"
] |
permissive
|
use crate::AddressType;
use enumflags2::BitFlags;
use super::interact::{address_bytes, get_address};
use super::*;
use crate::util::BufExt;
/// This command is used to read the local Out of Band data.
///
/// This command can only be used when the controller is powered.
///
/// If Secure Connections support is enabled, then this command
/// will return P-192 versions of hash and randomizer as well as
/// P-256 versions of both.
///
/// Values returned by this command become invalid when the controller
/// is powered down. After each power-cycle it is required to call
/// this command again to get updated values.
pub async fn read_local_oob_data(
socket: &mut ManagementStream,
controller: Controller,
event_tx: Option<mpsc::Sender<Response>>,
) -> Result<OutOfBandData> {
let (_, param) = exec_command(
socket,
Command::ReadLocalOutOfBand,
controller,
None,
event_tx,
)
.await?;
let mut param = param.ok_or(Error::NoData)?;
Ok(OutOfBandData {
hash_192: param.get_array_u8(),
randomizer_192: param.get_array_u8(),
hash_256: if param.has_remaining() {
Some(param.get_array_u8())
} else {
None
},
randomizer_256: if param.has_remaining() {
Some(param.get_array_u8())
} else {
None
},
})
}
// Right now, this command just returns the EIR data as a blob.
// Maybe implement parsing later. See BT Core Spec sec 3.C.8, BT Core Spec Supplement Part A,
// and https://www.bluetooth.com/specifications/assigned-numbers/generic-access-profile/
pub async fn read_local_oob_ext_data(
socket: &mut ManagementStream,
controller: Controller,
address_types: BitFlags<AddressTypeFlag>,
event_tx: Option<mpsc::Sender<Response>>,
) -> Result<(BitFlags<AddressTypeFlag>, Bytes)> {
let (_, param) = exec_command(
socket,
Command::ReadLocalOutOfBandExtended,
controller,
Some(Bytes::copy_from_slice(&[address_types.bits() as u8])),
event_tx,
)
.await?;
let mut param = param.ok_or(Error::NoData)?;
Ok((
param.get_flags_u8(),
// read eir data length param, then use that to split
// should just end up splitting at the end but just to be safe
{
let eir_data_len = param.get_u16_le();
param.split_to(eir_data_len as usize)
},
))
}
/// This command is used to provide Out of Band data for a remote
/// device.
///
/// Provided Out Of Band data is persistent over power down/up toggles.
///
/// This command also accept optional P-256 versions of hash and
/// randomizer. If they are not provided, then they are set to
/// zero value.
///
/// The P-256 versions of both can also be provided when the
/// support for Secure Connections is not enabled. However in
/// that case they will never be used.
///
/// To only provide the P-256 versions of hash and randomizer,
/// it is valid to leave both P-192 fields as zero values. If
/// Secure Connections is disabled, then of course this is the
/// same as not providing any data at all.
///
/// When providing data for remote LE devices, then the Hash_192 and
/// and Randomizer_192 fields are not used and shell be set to zero.
///
/// The Hash_256 and Randomizer_256 fields can be used for LE secure
/// connections Out Of Band data. If only LE secure connections data
/// is provided the Hash_P192 and Randomizer_P192 fields can be set
/// to zero. Currently there is no support for providing the Security
/// Manager TK Value for LE legacy pairing.
///
/// If Secure Connections Only mode has been enabled, then providing
/// Hash_P192 and Randomizer_P192 is not allowed. They are required
/// to be set to zero values.
///
/// This command can be used when the controller is not powered and
/// all settings will be programmed once powered.
pub async fn add_remote_oob_data(
socket: &mut ManagementStream,
controller: Controller,
address: Address,
address_type: AddressType,
data: OutOfBandData,
event_tx: Option<mpsc::Sender<Response>>,
) -> Result<(Address, AddressType)> {
let mut param = BytesMut::with_capacity(39);
param.put_slice(address.as_ref());
param.put_u8(address_type as u8);
param.put_slice(&data.hash_192[..]);
param.put_slice(&data.randomizer_192[..]);
if let Some(hash_256) = data.hash_256 {
param.put_slice(&hash_256[..]);
}
if let Some(randomizer_256) = data.randomizer_256 {
param.put_slice(&randomizer_256[..]);
}
let (_, param) = exec_command(
socket,
Command::AddRemoteOutOfBand,
controller,
Some(param.freeze()),
event_tx,
)
.await?;
get_address(param)
}
/// This command is used to remove data added using the Add Remote
/// Out Of Band Data command.
///
/// When the `address` parameter is `00:00:00:00:00:00`, then all
/// previously added data will be removed.
///
/// This command can be used when the controller is not powered and
/// all settings will be programmed once powered.
pub async fn remove_remote_oob_data(
socket: &mut ManagementStream,
controller: Controller,
address: Address,
address_type: AddressType,
event_tx: Option<mpsc::Sender<Response>>,
) -> Result<(Address, AddressType)> {
let (_, param) = exec_command(
socket,
Command::RemoveRemoteOutOfBand,
controller,
Some(address_bytes(address, address_type)),
event_tx,
)
.await?;
get_address(param)
}
#[derive(Debug)]
pub struct OutOfBandData {
pub hash_192: [u8; 16],
pub randomizer_192: [u8; 16],
pub hash_256: Option<[u8; 16]>,
pub randomizer_256: Option<[u8; 16]>,
}
| true
|
d1cefdc9eae68c5a560ef646ea5601f811b0703e
|
Rust
|
hyber-gp/hyber
|
/src/widget/slider.rs
|
UTF-8
| 15,588
| 3.1875
| 3
|
[
"MIT"
] |
permissive
|
use crate::event;
use crate::event::Event;
use crate::renderer::{Message, RenderInstruction};
use crate::util::{Color, Queue, Vector2D};
use crate::widget::{Layout, Widget};
use std::cell::RefCell;
use std::rc::Weak;
/// Current slider position
#[derive(Clone)]
pub struct Position {
/// The current value of the slider
pub slider_value: i32,
/// The current x-coordinate position of the slider
pub x_coordinate: f64,
}
/// Slider is a component that lets the user graphically select a value
/// by sliding a button within a bounded interval. The button
/// is always positioned at the points that match integer values
/// within the specified interval.
#[derive(Clone)]
pub struct SliderWidget {
/// The slider's identifier
id: usize,
/// The slider background color
background_color: Color,
/// The slider button color
button_color: Color,
/// The slider button size
button_size: Vector2D,
/// The slider's range (minimum and maximum)
range: (i32, i32),
/// The slider's step
step: i32,
/// The current value of slider step
slider_value: i32,
/// The message to be handled when a user slide the slider button
on_slide: Option<Box<dyn Message>>,
/// The possible positions for the slider button
slider_positions: Vec<Position>,
/// Whether the slider is pressed
is_pressed: bool,
/// The cursor's position
cursor_pos: Vector2D,
/// The current slider's index on the `slider_positions`
slider_index: usize,
/// The dirty flag (i.e., flag used to mark the widgets needed to be rebuilt)
dirty: bool,
/// The slider's children (i.e., his widgets tree)
children: Vec<Weak<RefCell<dyn Widget>>>,
/// The slider's position, on a two-dimensional space (x-coordinate and y-coordinate)
/// relative to the top left corner
position: Vector2D,
/// The slider's current size (width and height)
size: Vector2D,
/// The slider's original size (width and height)
original_size: Vector2D,
/// The slider's layout
layout: Layout,
/// The slider's offset vector coordinates
offset: Vector2D,
}
impl SliderWidget {
/// Creates a new `SliderWidget`
///
/// # Returns
/// The slider created
///
/// # Arguments
/// * `size` - the size (width and height) to be assigned to the slider
/// * `background_color` - the color to be assigned to the slider's background
/// * `button_color` - the color to be assigned to the slider button
/// * `button_size` - the size to be assigned to the slider button
/// * `range` - the range to be assigned to the slider
/// * `step` - the step to be assigned to the slider
/// * `slider_value` - the initial value to be assigned to the slider
/// * `on_slide` - the message to be handled when the user slides the slider button
pub fn new(
size: Vector2D,
background_color: Color,
button_color: Color,
button_size: Vector2D,
range: (i32, i32),
step: i32,
slider_value: i32,
on_slide: Option<Box<dyn Message>>,
) -> SliderWidget {
let slider_positions =
SliderWidget::get_slider_positions(range.0, range.1, step, Vector2D::new(0., 0.), size);
SliderWidget {
id: 0,
background_color: background_color,
button_color: button_color,
button_size: button_size,
slider_value: slider_value,
range: range,
step: step,
on_slide: on_slide,
slider_index: SliderWidget::get_slider_index(slider_value, &slider_positions),
slider_positions: slider_positions,
is_pressed: false,
cursor_pos: Vector2D::new(-1., -1.),
dirty: true,
children: Vec::<Weak<RefCell<dyn Widget>>>::new(),
position: Vector2D::new(0., 0.),
size: size,
original_size: size,
layout: Layout::None,
offset: Vector2D::new(0., 0.),
}
}
/// Sets the message to be handled when the user slides the slider button
///
/// # Returns
/// No returns
///
/// # Arguments
/// * `on_slide` - the message to be handled when the user slides the slider button
pub fn set_message(&mut self, on_slide: Option<Box<dyn Message>>) {
self.on_slide = on_slide;
}
/// Gets the current slider value
///
/// # Returns
/// The current slider value
///
/// # Arguments
/// No arguments
pub fn get_slider_value(&self) -> i32 {
self.slider_value
}
/// Gets all the possible slider positions for a given configuration, for internal use only
///
/// # Returns
/// A vector of type Position with all the possible slider positions for the given configuration
///
/// # Arguments
/// * `start` - the minimum range value to be considered
/// * `end` - the maximum range value to be considered
/// * `step` - the step to be considered
/// * `position` - the current slider's position
/// * `size` - the current slider's size
fn get_slider_positions(
start: i32,
end: i32,
step: i32,
position: Vector2D,
size: Vector2D,
) -> Vec<Position> {
let limit = end - start;
let mut slider_positions: Vec<Position> = Vec::new();
let step_size = (step as f64 * size.x) / limit as f64;
let mut incremental_size = position.x;
for i in (start..end + 1).step_by(step as usize) {
slider_positions.push(Position {
slider_value: i,
x_coordinate: incremental_size,
});
incremental_size = incremental_size + step_size;
}
slider_positions
}
/// Gets the slider's index based on a value and a vector with all his possible positions, for internal use only
///
/// # Returns
/// The slider's index within the `vector`
///
/// # Arguments
/// * `value` - the value to be considered
/// * `vector` - a vector with all slider's possible positions
fn get_slider_index(value: i32, vector: &Vec<Position>) -> usize {
if let Ok(result) = vector.binary_search_by_key(
&value,
|&Position {
slider_value,
x_coordinate,
}| slider_value,
) {
return result;
}
0
}
}
impl Widget for SliderWidget {
fn on_event(&mut self, event: Event, messages: &mut Queue<Box<dyn Message>>) {
match event {
event::Event::Mouse(event::Mouse::CursorMoved { x: x_pos, y: y_pos }) => {
//update cursor_pos on mouse move
self.cursor_pos = Vector2D::new(x_pos as f64, y_pos as f64);
//when the slider button is being pressed and we move the cursor
//we want to limit the cursor x position to the limits of the slider size on x-axis
if self.is_pressed {
if self.cursor_pos.x > self.position.x + self.size.x {
self.cursor_pos.x = self.position.x + self.size.x
} else if self.cursor_pos.x < self.position.x {
self.cursor_pos.x = self.position.x;
}
//update the UI when we make a move on slider
self.set_dirty(true);
} else {
for value in self.children.iter_mut() {
if let Some(child) = value.upgrade() {
child.borrow_mut().on_event(event, messages);
}
}
}
}
event::Event::Mouse(event::Mouse::ButtonPressed(event::MouseButton::Left)) => {
//if cursor is inside slider button switch the state of is_pressed
if self.is_cursor_inside(self.cursor_pos) {
self.is_pressed = true;
}
}
event::Event::Mouse(event::Mouse::ButtonReleased(event::MouseButton::Left)) => {
if self.is_pressed {
//compute the value of half step size
let half_step_size = (self.slider_positions[1].x_coordinate
- self.slider_positions[0].x_coordinate)
* 0.5;
//if cursor_pos.x is being move to right side
if self.cursor_pos.x
> self.slider_positions[self.slider_index].x_coordinate + half_step_size
{
if self.slider_index != self.slider_positions.len() - 1 {
self.slider_index = self.slider_index + 1;
//find the new slider_index
while self.slider_positions[self.slider_index].x_coordinate
< self.cursor_pos.x
{
self.slider_index = self.slider_index + 1;
}
self.slider_value =
self.slider_positions[self.slider_index].slider_value;
if let Some(mut message) = self.on_slide.clone() {
message.set_event(event);
messages.enqueue(message);
}
}
} else if self.cursor_pos.x
< self.slider_positions[self.slider_index].x_coordinate - half_step_size
{
if self.slider_index != 0 {
self.slider_index = self.slider_index - 1;
while self.slider_positions[self.slider_index].x_coordinate
> self.cursor_pos.x
{
self.slider_index = self.slider_index - 1;
}
self.slider_value =
self.slider_positions[self.slider_index].slider_value;
if let Some(mut message) = self.on_slide.clone() {
message.set_event(event);
messages.enqueue(message);
}
}
}
//update UI
self.set_dirty(true);
self.is_pressed = false;
}
}
_ => {
for value in self.children.iter_mut() {
if let Some(child) = value.upgrade() {
child.borrow_mut().on_event(event, messages);
}
}
}
}
}
fn set_id(&mut self, id: usize) {
self.id = id;
}
fn id(&self) -> usize {
self.id
}
fn recipe(&self) -> Vec<RenderInstruction> {
if self.is_pressed {
vec![
RenderInstruction::DrawRect {
point: self.position,
color: self.background_color,
size: self.size,
clip_point: self.position,
clip_size: self.size,
},
RenderInstruction::DrawRect {
point: Vector2D::new(
self.cursor_pos.x - (self.button_size.x * 0.5),
self.position.y + (self.size.y * 0.5) - (self.button_size.y * 0.5),
),
color: self.button_color,
size: self.button_size,
clip_point: self.position,
clip_size: self.size,
},
]
} else {
vec![
RenderInstruction::DrawRect {
point: self.position,
color: self.background_color,
size: self.size,
clip_point: self.position,
clip_size: self.size,
},
RenderInstruction::DrawRect {
point: Vector2D::new(
self.slider_positions[self.slider_index].x_coordinate
- (self.button_size.x * 0.5),
self.position.y + (self.size.y * 0.5) - (self.button_size.y * 0.5),
),
color: self.button_color,
size: self.button_size,
clip_point: self.position,
clip_size: self.size,
},
]
}
}
fn set_dirty(&mut self, value: bool) {
self.dirty = value;
}
fn is_dirty(&self) -> bool {
self.dirty
}
fn add_as_child(&mut self, child: Weak<RefCell<dyn Widget>>) {
self.children.push(child);
}
fn get_children(&mut self) -> &mut Vec<Weak<RefCell<dyn Widget>>> {
&mut self.children
}
fn position(&mut self) -> Vector2D {
self.position
}
fn size(&mut self) -> Vector2D {
self.size
}
fn original_size(&mut self) -> Vector2D {
self.original_size
}
fn layout(&mut self) -> &Layout {
&self.layout
}
fn offset(&mut self) -> Vector2D {
self.offset
}
fn get_fields(
&mut self,
) -> (
bool,
&mut Vec<Weak<RefCell<dyn Widget>>>,
Vector2D,
Vector2D,
Vector2D,
&Layout,
Vector2D,
) {
(
self.dirty,
&mut self.children,
self.position,
self.size,
self.original_size,
&self.layout,
self.offset,
)
}
fn set_position(&mut self, position: Vector2D) {
self.position = position;
}
fn set_size(&mut self, size: Vector2D) {
self.dirty = true;
self.size = size;
//when the size changes we need to recompute the slider_positions
self.slider_positions = SliderWidget::get_slider_positions(
self.range.0,
self.range.1,
self.step,
self.position(),
size,
);
}
fn set_original_size(&mut self, size: Vector2D) {
self.dirty = true;
self.original_size = size;
}
fn set_offset(&mut self, offset: Vector2D) {
self.offset = offset;
}
fn set_clip_point(&mut self, _clip_point: Option<Vector2D>) {
unimplemented!();
}
fn set_clip_size(&mut self, _clip_size: Option<Vector2D>) {
unimplemented!();
}
fn is_cursor_inside(&mut self, cursor_pos: Vector2D) -> bool {
let button_upper_left_corner_x =
self.slider_positions[self.slider_index].x_coordinate - (self.button_size.x * 0.5);
let button_upper_left_corner_y =
self.position.y + (self.size.y * 0.5) - (self.button_size.y * 0.5);
if cursor_pos.x >= button_upper_left_corner_x
&& cursor_pos.x <= (button_upper_left_corner_x + self.button_size.x)
&& cursor_pos.y >= button_upper_left_corner_y
&& cursor_pos.y <= (button_upper_left_corner_y + self.button_size.y)
{
true
} else {
false
}
}
}
| true
|
bf81b843e68a67e245a927797f56bec056afb359
|
Rust
|
tdgne/voicething
|
/src/audio/stream/dewindower.rs
|
UTF-8
| 3,677
| 2.671875
| 3
|
[] |
no_license
|
use super::super::common::*;
use super::node::*;
use getset::Getters;
use serde::{Deserialize, Serialize};
use std::collections::VecDeque;
#[derive(Getters, Serialize, Deserialize, Debug)]
pub struct Dewindower {
io: NodeIo,
id: NodeId,
#[serde(skip)]
buffer: Vec<VecDeque<f32>>,
out_chunk_size: usize,
}
impl HasNodeIo for Dewindower {
fn node_io(&self) -> &NodeIo {
&self.io
}
fn node_io_mut(&mut self) -> &mut NodeIo {
&mut self.io
}
}
impl Dewindower {
pub fn new(out_chunk_size: usize) -> Self {
Self {
io: NodeIo::new(),
id: NodeId::new(),
buffer: vec![],
out_chunk_size,
}
}
fn id(&self) -> NodeId {
self.id
}
fn triangular_window(x: usize, length: usize) -> f32 {
let x = x as f32 / length as f32;
1.0 - (x - 0.5).abs() * 2.0
}
fn hanning_window(x: usize, length: usize) -> f32 {
let x = x as f32 / length as f32;
0.5 - 0.5 * (2.0 * 3.141592 * x).cos()
}
pub fn process_chunk(&mut self, chunk: DataChunk) -> Vec<DataChunk> {
let chunk = match chunk {
DataChunk::Real(chunk) => {
if chunk.window_info().is_none() {
eprintln!("not windowed {}: {}", file!(), line!());
return vec![];
}
chunk
}
_ => {
eprintln!("incompatible input {}: {}", file!(), line!());
return vec![];
}
};
let delay = *chunk.window_info().clone().unwrap().delay();
for c in 0..*chunk.metadata().channels() {
if self.buffer.len() <= c {
self.buffer.push(chunk.samples(c).to_vec().into());
} else {
for _ in 0..delay {
self.buffer[c].push_back(0.0);
}
for i in 0..*chunk.duration() {
let l = self.buffer[c].len();
self.buffer[c][l - chunk.duration() + i] +=
chunk.samples(c)[i] * delay as f32 / *chunk.duration() as f32;
}
}
}
let mut dewindowed_chunks = vec![];
while self.buffer[0].len() >= self.out_chunk_size * 2 {
let mut dewindowed_chunk = GenericDataChunk::from_flat_sata(
&vec![0.0; self.buffer.len() * self.out_chunk_size],
chunk.metadata().clone(),
)
.unwrap();
for (c, b) in self.buffer.iter().enumerate() {
let samples = dewindowed_chunk.samples_mut(c);
for (i, s) in b.iter().take(self.out_chunk_size).enumerate() {
samples[i] = *s;
}
}
for b in self.buffer.iter_mut() {
for _ in 0..self.out_chunk_size {
b.pop_front();
}
}
dewindowed_chunks.push(dewindowed_chunk);
}
dewindowed_chunks
.iter()
.map(|c| DataChunk::Real(c.clone()))
.collect::<Vec<_>>()
}
}
impl NodeTrait for Dewindower {
fn id(&self) -> NodeId {
self.id
}
fn run_once(&mut self) {
if self.inputs().len() != 1 {
return;
}
while let Some(chunk) = self.inputs()[0].try_recv().ok() {
let chunks = self.process_chunk(chunk);
for output in self.outputs().iter() {
for chunk in chunks.iter() {
let _ = output.try_send(chunk.clone());
}
}
}
}
}
| true
|
f2bac69816d2d175f48c27d12c6f8cefd7c4d85d
|
Rust
|
konradsz/adventofcode2020
|
/day20/src/main.rs
|
UTF-8
| 10,084
| 3.28125
| 3
|
[] |
no_license
|
use std::collections::{HashMap, HashSet};
use std::fs;
const WIDTH: usize = 10;
type Orientation = (bool, u8);
const ORIENTATIONS: [Orientation; 8] = [
(false, 0),
(false, 1),
(false, 2),
(false, 3),
(true, 0),
(true, 1),
(true, 2),
(true, 3),
];
const SEA_MONSTER: [&str; 3] = [
" # ",
"# ## ## ###",
" # # # # # # ",
];
const SEA_MONSTER_WIDTH: usize = 20;
const SEA_MONSTER_HEIGHT: usize = 3;
#[derive(Clone)]
struct Tile {
id: u32,
data: Vec<String>,
}
impl Tile {
fn new(id: u32, data: Vec<String>) -> Self {
Self { id, data }
}
fn left(&self) -> String {
self.data
.iter()
.map(|line| line.chars().next().unwrap())
.collect::<String>()
}
fn top(&self) -> String {
self.data.first().unwrap().clone()
}
fn right(&self) -> String {
self.data
.iter()
.map(|line| line.chars().last().unwrap())
.collect::<String>()
}
fn bottom(&self) -> String {
self.data.last().unwrap().clone()
}
fn rotate(&mut self) {
let mut new_data = Vec::with_capacity(self.data.len());
let length = self.data.first().unwrap().len();
(0..length).for_each(|i| {
new_data.push(
self.data
.iter()
.rev()
.map(|line| line.chars().nth(i).unwrap())
.collect::<String>(),
);
});
self.data = new_data;
}
fn flip(&mut self) {
self.data
.iter_mut()
.for_each(|line| *line = line.chars().rev().collect::<String>());
}
fn transform(&self, orientation: &Orientation) -> Self {
let mut new_tile = self.clone();
if orientation.0 {
new_tile.flip();
}
(0..orientation.1).for_each(|_| new_tile.rotate());
new_tile
}
}
type Picture = HashMap<(i32, i32), Tile>;
fn reassemble_picture(tiles: &[Tile]) -> Picture {
let mut possible_neighbours = HashSet::new();
possible_neighbours.insert((0, 1));
possible_neighbours.insert((0, -1));
possible_neighbours.insert((1, 0));
possible_neighbours.insert((-1, 0));
let mut picture = HashMap::new();
picture.insert((0, 0), tiles[0].clone());
if !find_neighbours(tiles, &mut picture, possible_neighbours) {
panic!("Cannot assemble picture!");
}
picture
}
fn find_neighbours(
tiles: &[Tile],
picture: &mut HashMap<(i32, i32), Tile>,
mut possible_neighbours: HashSet<(i32, i32)>,
) -> bool {
while !possible_neighbours.is_empty() {
let coordinates = *possible_neighbours.iter().next().unwrap();
possible_neighbours.remove(&coordinates);
if picture.get(&coordinates).is_some() {
continue;
}
for tile in tiles {
if picture
.values()
.any(|assembled_tile| assembled_tile.id == tile.id)
{
continue;
}
for orientation in &ORIENTATIONS {
let tile_candidate = tile.transform(orientation);
if let Some(left_neighbour) = picture.get(&(coordinates.0 - 1, coordinates.1)) {
if left_neighbour.right() != tile_candidate.left() {
continue;
}
}
if let Some(top_neighbour) = picture.get(&(coordinates.0, coordinates.1 - 1)) {
if top_neighbour.bottom() != tile_candidate.top() {
continue;
}
}
if let Some(right_neighbour) = picture.get(&(coordinates.0 + 1, coordinates.1)) {
if right_neighbour.left() != tile_candidate.right() {
continue;
}
}
if let Some(bottom_neighbour) = picture.get(&(coordinates.0, coordinates.1 + 1)) {
if bottom_neighbour.top() != tile_candidate.bottom() {
continue;
}
}
picture.insert(coordinates, tile_candidate);
let mut new_possible_neighbours = possible_neighbours.clone();
new_possible_neighbours.insert((coordinates.0 + 1, coordinates.1));
new_possible_neighbours.insert((coordinates.0 - 1, coordinates.1));
new_possible_neighbours.insert((coordinates.0, coordinates.1 + 1));
new_possible_neighbours.insert((coordinates.0, coordinates.1 - 1));
if find_neighbours(tiles, picture, new_possible_neighbours) {
return true;
}
picture.remove(&coordinates);
}
}
}
picture.len() == tiles.len()
}
fn part_1(picture: &Picture) -> usize {
let min_x = picture.keys().map(|k| k.0).min().unwrap();
let max_x = picture.keys().map(|k| k.0).max().unwrap();
let min_y = picture.keys().map(|k| k.1).min().unwrap();
let max_y = picture.keys().map(|k| k.1).max().unwrap();
let top_left_id = picture.get(&(min_x, min_y)).unwrap().id;
let top_right_id = picture.get(&(max_x, min_y)).unwrap().id;
let bottom_left_id = picture.get(&(min_x, max_y)).unwrap().id;
let bottom_right_id = picture.get(&(max_x, max_y)).unwrap().id;
top_left_id as usize
* top_right_id as usize
* bottom_left_id as usize
* bottom_right_id as usize
}
fn merge_tiles(picture: &Picture) -> Vec<String> {
let min_x = picture.keys().map(|k| k.0).min().unwrap();
let max_x = picture.keys().map(|k| k.0).max().unwrap();
let min_y = picture.keys().map(|k| k.1).min().unwrap();
let max_y = picture.keys().map(|k| k.1).max().unwrap();
let width_in_tiles = (max_x - min_x + 1) as usize;
let mut merged = vec![String::default(); width_in_tiles * WIDTH - 2 * width_in_tiles];
for (iy, y) in (min_y..=max_y).enumerate() {
for x in min_x..=max_x {
let tile = &picture.get(&(x, y)).unwrap().data;
for (current_tile_y, line) in tile.iter().skip(1).take(WIDTH - 2).enumerate() {
let current_line = merged.get_mut(iy * (WIDTH - 2) + current_tile_y).unwrap();
let stripped_line = &line[1..line.len() - 1];
(*current_line) += stripped_line;
}
}
}
merged
}
fn get_monsters_indices() -> Vec<Vec<usize>> {
SEA_MONSTER
.iter()
.map(|line| {
line.char_indices()
.filter(|(_, c)| *c == '#')
.map(|(index, _)| index)
.collect::<Vec<_>>()
})
.collect::<Vec<_>>()
}
fn find_monster(
picture: &Vec<String>,
monsters_indices: &Vec<Vec<usize>>,
) -> Option<(usize, usize)> {
for y in 0..picture.len() - SEA_MONSTER_HEIGHT {
let line_1 = &picture[y];
let line_2 = &picture[y + 1];
let line_3 = &picture[y + 2];
for x in 0..line_1.len() - SEA_MONSTER_WIDTH {
let line_1_part = &line_1[x..x + SEA_MONSTER_WIDTH];
let line_2_part = &line_2[x..x + SEA_MONSTER_WIDTH];
let line_3_part = &line_3[x..x + SEA_MONSTER_WIDTH];
if monsters_indices[0]
.iter()
.all(|index| line_1_part.as_bytes()[*index] == b'#')
&& monsters_indices[1]
.iter()
.all(|index| line_2_part.as_bytes()[*index] == b'#')
&& monsters_indices[2]
.iter()
.all(|index| line_3_part.as_bytes()[*index] == b'#')
{
return Some((x, y));
}
}
}
None
}
fn replace_monster(
picture: &mut Vec<String>,
coordinates: (usize, usize),
monsters_indices: &Vec<Vec<usize>>,
) {
for (i, line) in picture
.iter_mut()
.skip(coordinates.1)
.take(SEA_MONSTER_HEIGHT)
.enumerate()
{
let mut chars = line.chars().collect::<Vec<char>>();
monsters_indices[i]
.iter()
.for_each(|index| chars[coordinates.0 + *index] = 'O');
*line = chars.into_iter().collect();
}
}
fn find_all_monsters(picture: &mut Vec<String>) {
let monsters_indices = get_monsters_indices();
while let Some(coordinates) = find_monster(&picture, &monsters_indices) {
replace_monster(picture, coordinates, &monsters_indices);
}
}
fn part_2(picture: &Picture) -> usize {
let merged_picture = merge_tiles(&picture);
let mut fake_tile = Tile::new(0, merged_picture);
let monsters_indices = get_monsters_indices();
for orientation in &ORIENTATIONS {
let orientation_candidate = fake_tile.transform(orientation);
if find_monster(&orientation_candidate.data, &monsters_indices).is_some() {
fake_tile = orientation_candidate;
break;
}
}
find_all_monsters(&mut fake_tile.data);
fake_tile
.data
.iter()
.map(|line| line.chars().filter(|c| *c == '#').count())
.sum()
}
fn main() {
let content = fs::read_to_string("input").unwrap();
let mut tiles = Vec::new();
let mut tile_data = Vec::new();
let mut tile_id = 0;
for line in content.lines() {
if line.contains("Tile") {
tile_id = line
.trim_start_matches("Tile ")
.trim_end_matches(':')
.parse::<u32>()
.unwrap();
} else if line.is_empty() {
tile_data.clear();
} else {
tile_data.push(line.to_owned());
}
if tile_data.len() == WIDTH {
tiles.push(Tile::new(tile_id, tile_data));
tile_data = Vec::new();
}
}
let picture = reassemble_picture(&tiles);
assert_eq!(108_603_771_107_737, part_1(&picture));
assert_eq!(2_129, part_2(&picture));
}
| true
|
f96642667f8f2c43afb6804c86cb7b14c6eb209d
|
Rust
|
jTitor/leek2
|
/src/open-source/engine/modules/src/math/linear_algebra/vec_base.rs
|
UTF-8
| 1,462
| 3.59375
| 4
|
[] |
no_license
|
/*!
Base traits for vector operations.
Represents a generic vector.
#Implementing Equality
By default, vectors should use nearly_equal in their comparison operations.
*/
pub trait VecOps<T=Self> {
///Gets the i'th element of this vector.
/// # Panics if:
/// * i is out of range [0, num_elems()-1]
fn elem_at(&self, i: usize) -> f32;
fn mut_elem_at(&mut self, i: usize) -> &mut f32;
///Gets the squared magnitude of this vector.
fn sqr_mag(&self) -> f32;
///Gets the magnitude of this vector.
fn mag(&self) -> f32 { self.sqr_mag().sqrt() }
///Performs the dot product between two vectors.
///TODO: don't like how this dispatches - can we template on implementing type, or something?
///Then we can be sure the underlying type is always the same.
fn dot(&self, rhs: &T) -> f32;
///Returns a normalized version of the vector.
///TODO: Again, can we have this return its underlying type?
fn as_normalized(&self) -> T;
///Returns this vector with all elements set to their absolute value.
fn as_abs(&self) -> T;
///Returns a vector with all elements set to their respective element's reciprocal.
fn as_reciprocal(&self) -> T;
///Performs a componentwise multiplication.
fn component_mul(&self, rhs: T) -> T;
///Performs componentwise division.
fn component_div(&self, rhs: T) -> T;
///Gets the maximum element in this vector.
fn max_elem(&self) -> f32;
///Gets the minimum element in this vector.
fn min_elem(&self) -> f32;
}
| true
|
9cece3cace689a8914bb321ed76c92f8e5cd315b
|
Rust
|
commieprincess/aoc2017
|
/day_20/src/main.rs
|
UTF-8
| 2,572
| 3.1875
| 3
|
[] |
no_license
|
use std::collections::HashSet;
fn main() {
let input : Vec<Vec<&str>> = include_str!("input.txt").trim().lines().map(|x| x.split(',').map(|y| y.trim()).collect::<Vec<&str>>()).collect();
let mut input : Vec<Particle> = input.iter().map(|vec| {
let x0 : i64 = vec[0][3..].parse().unwrap();
let y0 = vec[1].parse().unwrap();
let z0 = vec[2][..vec[2].len() - 1].parse().unwrap();
let pos_vector = Vector::new(x0, y0, z0);
let x1 : i64 = vec[3][3..].parse().unwrap();
let y1 = vec[4].parse().unwrap();
let z1 = vec[5][..vec[5].len() - 1].parse().unwrap();
let vel_vector = Vector::new(x1, y1, z1);
let x2 : i64 = vec[6][3..].parse().unwrap();
let y2 = vec[7].parse().unwrap();
let z2 = vec[8][..vec[8].len() - 1].parse().unwrap();
let acc_vector = Vector::new(x2, y2, z2);
Particle::new(pos_vector, vel_vector, acc_vector)
}).collect();
for _ in 0..1000 {
let mut collision_positions : HashSet<Vector> = HashSet::new();
for p in &mut input {
p.update();
}
for p0 in input.iter() {
for p1 in input.iter() {
if p0 != p1 && p0.pos == p1.pos {
collision_positions.insert(p0.pos.clone());
break;
}
}
}
input.retain(|x| !collision_positions.contains(&x.pos));
}
let mut shortest : (usize, u64) = (0, 999999999999);
for i in input.iter().enumerate() {
if i.1.largest_distance_from_origin < shortest.1 {
shortest.0 = i.0;
shortest.1 = i.1.largest_distance_from_origin;
}
}
println!("{:?}", shortest.0);
println!("{}", input.len());
}
#[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd, Clone)]
struct Particle {
pos: Vector,
vel: Vector,
acc: Vector,
largest_distance_from_origin: u64,
}
impl Particle {
fn new (pos: Vector, vel: Vector, acc: Vector) -> Particle {
Particle {pos, vel, acc, largest_distance_from_origin: 0}
}
fn update(&mut self) {
self.vel.add(&self.acc);
self.pos.add(&self.vel);
if self.pos.distance_from_origin() > self.largest_distance_from_origin {
self.largest_distance_from_origin = self.pos.distance_from_origin();
}
}
}
#[derive(Debug, PartialEq, Eq, Hash, Ord, PartialOrd, Clone)]
struct Vector {
x: i64,
y: i64,
z: i64,
}
impl Vector {
fn new (x: i64, y: i64, z: i64) -> Vector {
Vector {x, y, z}
}
fn distance_from_origin (&self) -> u64 {
(self.x.abs() + self.y.abs() + self.z.abs()) as u64
}
fn add (&mut self, other: &Vector) {
self.x += other.x;
self.y += other.y;
self.z += other.z;
}
}
| true
|
80c7b7f0bcc805bf0af0ec43e0917fec74bbc7d5
|
Rust
|
hexium310/git-issue
|
/src/main.rs
|
UTF-8
| 3,072
| 3.046875
| 3
|
[
"MIT"
] |
permissive
|
#![cfg_attr(test, allow(unused_imports))]
#[macro_use]
extern crate clap;
use std::process::Command;
use std::str;
use clap::{ App, ArgMatches };
use regex::Regex;
mod subcommands;
use crate::subcommands::*;
#[cfg(not(test))]
fn main() {
let yaml = load_yaml!("cli.yml");
let mut matcher = App::from_yaml(yaml);
let matches = matcher.clone().get_matches();
let output = Command::new("git")
.arg("remote")
.arg("-v")
.output()
.expect("failed to execute process");
let rg = Regex::new(r"origin\tgit@github.com:(.*)/(.*).git \(fetch\)").unwrap();
let captured = rg.captures(str::from_utf8(&output.stdout).unwrap()).unwrap();
let (owner, repo) = (&captured[1], &captured[2]);
match matches.subcommand() {
("list", Some(sub_matches)) => {
list(owner, repo, &build_query_string(sub_matches));
},
("show", Some(sub_matches)) => {
if let Ok(number) = validate_number(sub_matches.value_of("number")) {
show(owner, repo, number);
} else {
println!("{}", sub_matches.usage());
}
},
("", None) => {
matcher.print_help().unwrap();
},
_ => unreachable!(),
}
}
fn build_query_string(matches: &ArgMatches<'_>) -> String {
let query = &matches.args.iter().map(|(name, value)| {
format!("{}={}", name, value.vals[0].to_str().unwrap())
}).collect::<Vec<_>>().join("&");
let mut qs = vec!["", query];
qs.dedup();
qs.join("?")
}
fn validate_number(value: Option<&str>) -> Result<i32, CliError> {
Ok(value.ok_or(CliError::NotEnoughCommands)?.parse::<i32>()?)
}
#[cfg(test)]
mod tests {
use super::{
build_query_string,
validate_number,
};
use clap::{ App, Arg };
#[test]
fn test_build_query_string() {
let matcher = App::new("test")
.arg(
Arg::with_name("opt1")
.long("opt1")
.takes_value(true)
)
.arg(
Arg::with_name("opt2")
.long("opt2")
.takes_value(true)
);
let no_matches = matcher.clone().get_matches();
let one_matche = matcher.clone().get_matches_from(vec![
"test", "--opt1=val1"
]);
let two_matches = matcher.clone().get_matches_from(vec![
"test", "--opt1=val1", "--opt2=val2"
]);
assert_eq!(build_query_string(&no_matches), "");
assert_eq!(build_query_string(&one_matche), "?opt1=val1");
let case_two = build_query_string(&two_matches);
assert!(case_two == "?opt1=val1&opt2=val2" || case_two == "?opt2=val2&opt1=val1" );
}
#[test]
fn test_valid_number() {
assert_eq!(validate_number(Some("1")).ok(), Some(1));
}
#[test]
fn test_invalid_number() {
assert!(validate_number(Some("")).is_err());
assert!(validate_number(Some("a")).is_err());
}
}
| true
|
f790cc11cd7450af153d04c6ace590d95030d282
|
Rust
|
tinetti/krust
|
/src/lib.rs
|
UTF-8
| 3,544
| 3.171875
| 3
|
[] |
no_license
|
use clap::{App, Arg, ArgMatches, SubCommand};
fn create_clap_app<'a, 'b>() -> App<'a, 'b> {
let broker_arg = Arg::with_name("broker")
.short("b")
.long("broker")
.value_name("broker")
.help("Bootstrap broker(s)")
.multiple(true)
.takes_value(true);
return App::new("krust")
.version("1.0")
.about("Krust is a kafka command line client implemented in Rust")
.arg(&broker_arg)
.arg(
Arg::with_name("verbose")
.short("v")
.multiple(true)
.help("Sets the verbosity level\n(use -vv for even more verbosity)"),
)
.subcommand(
SubCommand::with_name("consumer")
.about("kafka consumer")
.arg(&broker_arg)
.arg(
Arg::with_name("topic")
.short("t")
.long("topic")
.value_name("topic")
.help("Topic(s) from which to consume")
.takes_value(true)
.multiple(true)
.required(true),
),
)
.subcommand(
SubCommand::with_name("topic")
.alias("topics")
.about("Get information about one or more topics")
.subcommand(
SubCommand::with_name("list")
.about("List topics (this is the default subcommand)"),
),
);
}
fn get_command<'a, 'b>(
app: App<'a, 'b>,
get_matches: fn(App<'a, 'b>) -> ArgMatches<'a>,
) -> Option<String> {
let matches = get_matches(app.clone());
if let Some(brokers) = matches.value_of("brokers") {
println!("Value for brokers: {}", brokers);
}
match matches.occurrences_of("verbose") {
0 => println!("Verbose mode: off"),
1 => println!("Verbose mode: kind of on"),
2 => println!("Verbose mode: on"),
_ => println!("Don't be crazy"),
}
match matches.subcommand() {
("consumer", Some(consumer_matches)) => {
let topics = consumer_matches
.values_of("topic")
.unwrap()
.collect::<Vec<_>>();
println!("TODO: topics: {:?}", topics);
if let Some(b) = consumer_matches.values_of("broker") {
let brokers = b.collect::<Vec<_>>();
println!("TODO: brokers: {:?}", brokers);
}
}
("topic", Some(topic_matches)) => {
match topic_matches.subcommand() {
// list is the default
(_, _) => {
println!("TODO: list topics");
}
}
}
_ => {
if let Err(err) = app.clone().print_help() {
println!("error printing help: {}", err);
}
}
}
// Continued program logic goes here...
return None;
}
pub fn run<'a>() {
let app = create_clap_app();
let _command = get_command(app.clone(), App::get_matches);
}
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
// let arg_vec = vec!["my_prog", "some", "args", "to", "parse"];
//
// let mut app = App::new("myprog");
// // Args and options go here...
// let matches = app
// .get_matches_from_safe_borrow(arg_vec)
// .unwrap_or_else(|e| panic!("An error occurs: {}", e));
}
}
| true
|
e18404f62c617f5b80b796ac76ee8d6b2dd732b5
|
Rust
|
tomzhang/scylla-rust-driver
|
/scylla/src/frame/types.rs
|
UTF-8
| 9,614
| 2.75
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! CQL binary protocol in-wire types.
use anyhow::Result;
use byteorder::{BigEndian, ReadBytesExt};
use bytes::BufMut;
use std::collections::HashMap;
use std::str;
use uuid::Uuid;
use crate::frame::value::Value;
fn read_raw_bytes<'a>(count: usize, buf: &mut &'a [u8]) -> Result<&'a [u8]> {
if buf.len() < count {
return Err(anyhow!(
"not enough bytes in buffer: expected {}, was {}",
count,
buf.len()
));
}
let (ret, rest) = buf.split_at(count);
*buf = rest;
Ok(ret)
}
pub fn read_int(buf: &mut &[u8]) -> Result<i32> {
let v = buf.read_i32::<BigEndian>()?;
Ok(v)
}
pub fn write_int(v: i32, buf: &mut impl BufMut) {
buf.put_i32(v);
}
fn read_int_length(buf: &mut &[u8]) -> Result<usize> {
let v = read_int(buf)?;
if v < 0 {
return Err(anyhow!("invalid length of type `int`: {}", v));
}
Ok(v as usize)
}
fn write_int_length(v: usize, buf: &mut impl BufMut) -> Result<()> {
if v > i32::MAX as usize {
return Err(anyhow!("length too big to be encoded as `int`: {}", v));
}
write_int(v as i32, buf);
Ok(())
}
#[test]
fn type_int() {
let vals = vec![i32::MIN, -1, 0, 1, i32::MAX];
for val in vals.iter() {
let mut buf = Vec::new();
write_int(*val, &mut buf);
assert_eq!(read_int(&mut &buf[..]).unwrap(), *val);
}
}
pub fn read_long(buf: &mut &[u8]) -> Result<i64> {
let v = buf.read_i64::<BigEndian>()?;
Ok(v)
}
pub fn write_long(v: i64, buf: &mut impl BufMut) {
buf.put_i64(v);
}
#[test]
fn type_long() {
let vals = vec![i64::MIN, -1, 0, 1, i64::MAX];
for val in vals.iter() {
let mut buf = Vec::new();
write_long(*val, &mut buf);
assert_eq!(read_long(&mut &buf[..]).unwrap(), *val);
}
}
pub fn read_short(buf: &mut &[u8]) -> Result<i16> {
let v = buf.read_i16::<BigEndian>()?;
Ok(v)
}
pub fn write_short(v: i16, buf: &mut impl BufMut) {
buf.put_i16(v);
}
fn read_short_length(buf: &mut &[u8]) -> Result<usize> {
let v = read_short(buf)?;
if v < 0 {
return Err(anyhow!("invalid length of type `short`: {}", v));
}
Ok(v as usize)
}
fn write_short_length(v: usize, buf: &mut impl BufMut) -> Result<()> {
if v > i16::MAX as usize {
return Err(anyhow!("length too big to be encoded as `short`: {}", v));
}
write_short(v as i16, buf);
Ok(())
}
#[test]
fn type_short() {
let vals = vec![i16::MIN, -1, 0, 1, i16::MAX];
for val in vals.iter() {
let mut buf = Vec::new();
write_short(*val, &mut buf);
assert_eq!(read_short(&mut &buf[..]).unwrap(), *val);
}
}
// https://github.com/apache/cassandra/blob/trunk/doc/native_protocol_v4.spec#L208
pub fn read_bytes_opt<'a>(buf: &mut &'a [u8]) -> Result<Option<&'a [u8]>> {
let len = read_int(buf)?;
if len < 0 {
return Ok(None);
}
let len = len as usize;
let v = Some(read_raw_bytes(len, buf)?);
Ok(v)
}
// Same as read_bytes, but we assume the value won't be `null`
pub fn read_bytes<'a>(buf: &mut &'a [u8]) -> Result<&'a [u8]> {
let len = read_int_length(buf)?;
let v = read_raw_bytes(len, buf)?;
Ok(v)
}
pub fn write_bytes(v: &[u8], buf: &mut impl BufMut) -> Result<()> {
write_int_length(v.len(), buf)?;
buf.put_slice(v);
Ok(())
}
pub fn write_short_bytes(v: &[u8], buf: &mut impl BufMut) -> Result<()> {
write_short_length(v.len(), buf)?;
buf.put_slice(v);
Ok(())
}
pub fn read_bytes_map(buf: &mut &[u8]) -> Result<HashMap<String, Vec<u8>>> {
let len = read_short_length(buf)?;
let mut v = HashMap::with_capacity(len);
for _ in 0..len {
let key = read_string(buf)?.to_owned();
let val = read_bytes(buf)?.to_owned();
v.insert(key, val);
}
Ok(v)
}
pub fn write_bytes_map<B>(v: &HashMap<String, B>, buf: &mut impl BufMut) -> Result<()>
where
B: AsRef<[u8]>,
{
let len = v.len();
write_short_length(len, buf)?;
for (key, val) in v.iter() {
write_string(key, buf)?;
write_bytes(val.as_ref(), buf)?;
}
Ok(())
}
#[test]
fn type_bytes_map() {
let mut val = HashMap::new();
val.insert("".to_owned(), vec![]);
val.insert("EXTENSION1".to_owned(), vec![1, 2, 3]);
val.insert("EXTENSION2".to_owned(), vec![4, 5, 6]);
let mut buf = Vec::new();
write_bytes_map(&val, &mut buf).unwrap();
assert_eq!(read_bytes_map(&mut &*buf).unwrap(), val);
}
pub fn read_string<'a>(buf: &mut &'a [u8]) -> Result<&'a str> {
let len = read_short_length(buf)?;
let raw = read_raw_bytes(len, buf)?;
let v = str::from_utf8(raw)?;
Ok(v)
}
pub fn write_string(v: &str, buf: &mut impl BufMut) -> Result<()> {
let raw = v.as_bytes();
write_short_length(v.len(), buf)?;
buf.put_slice(raw);
Ok(())
}
#[test]
fn type_string() {
let vals = vec![String::from(""), String::from("hello, world!")];
for val in vals.iter() {
let mut buf = Vec::new();
write_string(val, &mut buf).unwrap();
assert_eq!(read_string(&mut &buf[..]).unwrap(), *val);
}
}
pub fn read_long_string<'a>(buf: &mut &'a [u8]) -> Result<&'a str> {
let len = read_int_length(buf)?;
let raw = read_raw_bytes(len, buf)?;
let v = str::from_utf8(raw)?;
Ok(v)
}
pub fn write_long_string(v: &str, buf: &mut impl BufMut) -> Result<()> {
let raw = v.as_bytes();
let len = raw.len();
write_int_length(len, buf)?;
buf.put_slice(raw);
Ok(())
}
#[test]
fn type_long_string() {
let vals = vec![String::from(""), String::from("hello, world!")];
for val in vals.iter() {
let mut buf = Vec::new();
write_long_string(val, &mut buf).unwrap();
assert_eq!(read_long_string(&mut &buf[..]).unwrap(), *val);
}
}
pub fn read_string_map(buf: &mut &[u8]) -> Result<HashMap<String, String>> {
let len = read_short_length(buf)?;
let mut v = HashMap::with_capacity(len);
for _ in 0..len {
let key = read_string(buf)?.to_owned();
let val = read_string(buf)?.to_owned();
v.insert(key, val);
}
Ok(v)
}
pub fn write_string_map(v: &HashMap<String, String>, buf: &mut impl BufMut) -> Result<()> {
let len = v.len();
write_short_length(len, buf)?;
for (key, val) in v.iter() {
write_string(key, buf)?;
write_string(val, buf)?;
}
Ok(())
}
#[test]
fn type_string_map() {
let mut val = HashMap::new();
val.insert(String::from(""), String::from(""));
val.insert(String::from("CQL_VERSION"), String::from("3.0.0"));
val.insert(String::from("THROW_ON_OVERLOAD"), String::from(""));
let mut buf = Vec::new();
write_string_map(&val, &mut buf).unwrap();
assert_eq!(read_string_map(&mut &buf[..]).unwrap(), val);
}
pub fn read_string_list(buf: &mut &[u8]) -> Result<Vec<String>> {
let len = read_short_length(buf)?;
let mut v = Vec::with_capacity(len);
for _ in 0..len {
v.push(read_string(buf)?.to_owned());
}
Ok(v)
}
pub fn write_string_list(v: &[String], buf: &mut impl BufMut) -> Result<()> {
let len = v.len();
write_short_length(len, buf)?;
for v in v.iter() {
write_string(v, buf)?;
}
Ok(())
}
#[test]
fn type_string_list() {
let mut val = Vec::new();
val.push("".to_owned());
val.push("CQL_VERSION".to_owned());
val.push("THROW_ON_OVERLOAD".to_owned());
let mut buf = Vec::new();
write_string_list(&val, &mut buf).unwrap();
assert_eq!(read_string_list(&mut &buf[..]).unwrap(), val);
}
pub fn read_string_multimap(buf: &mut &[u8]) -> Result<HashMap<String, Vec<String>>> {
let len = read_short_length(buf)?;
let mut v = HashMap::with_capacity(len);
for _ in 0..len {
let key = read_string(buf)?.to_owned();
let val = read_string_list(buf)?;
v.insert(key, val);
}
Ok(v)
}
pub fn write_string_multimap(
v: &HashMap<String, Vec<String>>,
buf: &mut impl BufMut,
) -> Result<()> {
let len = v.len();
write_short_length(len, buf)?;
for (key, val) in v.iter() {
write_string(key, buf)?;
write_string_list(val, buf)?;
}
Ok(())
}
#[test]
fn type_string_multimap() {
let mut val = HashMap::new();
val.insert(String::from(""), vec![String::from("")]);
val.insert(
String::from("versions"),
vec![String::from("3.0.0"), String::from("4.2.0")],
);
val.insert(String::from("empty"), vec![]);
let mut buf = Vec::new();
write_string_multimap(&val, &mut buf).unwrap();
assert_eq!(read_string_multimap(&mut &buf[..]).unwrap(), val);
}
pub fn read_uuid(buf: &mut &[u8]) -> Result<Uuid> {
let raw = read_raw_bytes(16, buf)?;
// It's safe to unwrap here because Uuid::from_slice only fails
// if the argument slice's length is not 16.
Ok(Uuid::from_slice(raw).unwrap())
}
pub fn write_uuid(uuid: &Uuid, buf: &mut impl BufMut) {
buf.put_slice(&uuid.as_bytes()[..]);
}
#[test]
fn type_uuid() {
let u = Uuid::parse_str("f3b4958c-52a1-11e7-802a-010203040506").unwrap();
let mut buf = Vec::new();
write_uuid(&u, &mut buf);
let u2 = read_uuid(&mut &*buf).unwrap();
assert_eq!(u, u2);
}
pub fn write_values(values: &[Value], buf: &mut impl BufMut) {
buf.put_i16(values.len() as i16);
for value in values {
match value {
Value::Val(v) => {
write_int(v.len() as i32, buf);
buf.put_slice(&v[..]);
}
Value::Null => write_int(-1, buf),
Value::NotSet => write_int(-2, buf),
}
}
}
| true
|
b82bdce3faac06b70476ba4580acc46c61680ad8
|
Rust
|
tycobbb/scribe-lite
|
/api/src/core/socket/socket.rs
|
UTF-8
| 1,197
| 2.828125
| 3
|
[] |
no_license
|
use super::channel::Channel;
use super::routes::Routes;
use crate::core::empty;
use yansi::{Color, Paint};
// constants
const HOST: &'static str = "127.0.0.1:8080";
// -- types --
pub struct Socket;
// -- impls --
impl Socket {
// -- impls/commands
pub fn listen<R>(&self, routes: R)
where
R: Routes + Clone,
{
info!(
"🧦 {} {}",
Paint::default("Socket is starting on").bold(),
Paint::default(HOST.replace("127.0.0.1", "http://localhost"))
.bold()
.underline()
);
let finished = ws::WebSocket::new(Channel::new(routes))
.and_then(|s| s.listen(HOST))
.map(empty::ignore);
self.on_finished(finished);
}
// -- impls/events
fn on_finished(&self, result: ws::Result<()>) {
if let Err(error) = result {
error!(
"🧦 {}: {}",
Paint::default("Socket finished with error")
.bold()
.fg(Color::Red),
error
);
} else {
info!("🧦 {}", Paint::default("Socket finished").bold());
}
}
}
| true
|
5f625ac627328ca56a8300fc601546c5ae92c23f
|
Rust
|
Anwesh43/rust-udemy-course-practice
|
/union_demo.rs
|
UTF-8
| 435
| 3.484375
| 3
|
[] |
no_license
|
union IntOrFloat {
i : i32,
f : f32
}
fn check_value(un : IntOrFloat) {
unsafe {
match un {
IntOrFloat {i : 5} => println!("value is {}", un.i),
IntOrFloat {f} => println!("float value is {}", un.f)
}
}
}
fn main() {
let un = IntOrFloat {i : 5};
unsafe {println!("value is {}", un.i)};
check_value(un);
let un1 = IntOrFloat {f : 2.0};
check_value(un1);
}
| true
|
631007cc1a3fe2dff3fca1c4209f94f07dafa100
|
Rust
|
0b01/dyn-grammar
|
/src/grammar/test_grammar.rs
|
UTF-8
| 11,463
| 3.28125
| 3
|
[] |
no_license
|
use crate::grammar::*;
use self::Token::*;
macro_rules! sentence {
($($i: ident),*) => {
{
let mut v = vec![];
$(
v.push(Terminal(stringify!($i)));
)*
v
}
};
}
#[test]
fn test_parse_simple_grammar() {
// Grammar
// S -> a S b .
// S -> .
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("a"),
NonTerminal("S".to_owned()),
Terminal("b"),
]
},
Rule {
name: "S".to_owned(),
id: 1,
production: vec![
Epsilon,
]
}
]
);
g.build().unwrap();
// assert!(g.parse(sentence!(a,b)).is_ok());
// assert!(g.parse(sentence!(a,a,b,b)).is_ok());
// assert!(g.parse(sentence!(a,a)).is_err());
// assert!(g.parse(sentence!(a,a,b)).is_err());
// assert!(g.parse(sentence!(a,a,b,b,b)).is_err());
let ret = g.parse(sentence!(a,b));
println!("{:#?}", ret);
}
#[test]
fn test_first_set_clash() {
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("a"),
Terminal("a"),
NonTerminal("S".to_owned()),
]
},
Rule {
name: "S".to_owned(),
id: 1,
production: vec![
Terminal("a"),
Epsilon,
]
}
]
);
assert!(g.build().is_err());
}
#[test]
fn test_ab() {
// Grammar:
// S -> a A b B .
// A -> a A .
// B -> b B .
// A -> .
// B -> .
let mut g = Grammar::new("S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("a"),
NonTerminal("A".to_owned()),
Terminal("b"),
NonTerminal("B".to_owned()),
]
},
Rule {
name: "A".to_owned(),
id: 1,
production: vec![
Terminal("a"),
NonTerminal("A".to_owned()),
]
},
Rule {
name: "B".to_owned(),
id: 2,
production: vec![
Terminal("b"),
NonTerminal("B".to_owned()),
]
},
Rule {
name: "A".to_owned(),
id: 3,
production: vec![
Epsilon,
]
},
Rule {
name: "B".to_owned(),
id: 4,
production: vec![
Epsilon,
]
},
]
);
g.build().unwrap();
g.parse(sentence!(a, b)).unwrap();
assert!(g.parse(sentence!(a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, a, b)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, b, b, b, b)).is_ok());
assert!(g.parse(sentence!(a, a)).is_err());
}
#[test]
fn test_abc() {
// Grammar:
// S -> a A .
// A -> a A .
// A -> b B .
// B -> b B .
// B -> c C .
// C -> c C .
// C -> .
// Example:
// TODO:
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("a"),
NonTerminal("A".to_owned()),
]
},
Rule {
name: "A".to_owned(),
id: 1,
production: vec![
Terminal("a"),
NonTerminal("A".to_owned()),
]
},
Rule {
name: "A".to_owned(),
id: 2,
production: vec![
NonTerminal("B".to_owned()),
]
},
Rule {
name: "B".to_owned(),
id: 3,
production: vec![
Terminal("c"),
]
},
Rule {
name: "B".to_owned(),
id: 4,
production: vec![
Terminal("b"),
NonTerminal("B".to_owned()),
]
},
]
);
g.build().unwrap();
assert!(g.parse(sentence!(a, c)).is_ok());
assert!(g.parse(sentence!(a, a, c)).is_ok());
assert!(g.parse(sentence!(a, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, c)).is_ok());
assert!(g.parse(sentence!(a, a, b, c)).is_ok());
assert!(g.parse(sentence!(a, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, a, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, b, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, b, b, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, b, b, b, b, b, b, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, a, a, c)).is_ok());
assert!(g.parse(sentence!(a, a, a, a, a, a, a, b, c)).is_ok());
assert!(g.parse(sentence!(a, a)).is_err());
assert!(g.parse(sentence!(a, b)).is_err());
assert!(g.parse(sentence!(b, b)).is_err());
assert!(g.parse(sentence!(b, c)).is_err());
assert!(g.parse(sentence!(b, a)).is_err());
assert!(g.parse(sentence!(c, b, a)).is_err());
assert!(g.parse(sentence!(c, a)).is_err());
}
#[test]
fn test_stackoverflow() {
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
NonTerminal("S".to_owned()),
Terminal("a"),
Terminal("b"),
]
},
Rule {
name: "S".to_owned(),
id: 1,
production: vec![
Epsilon,
]
}
]
);
assert!(g.build().is_err());
}
#[test]
fn test_stackoverflow_other_case() {
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("a"),
NonTerminal("S".to_owned()),
Terminal("b"),
]
},
Rule {
name: "S".to_owned(),
id: 1,
production: vec![
NonTerminal("S".to_owned()),
Terminal("a"),
Terminal("a"),
Epsilon,
]
}
]
);
assert!(g.build().is_err());
}
#[test]
fn test_parens() {
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("L"),
NonTerminal("A".to_owned()),
Terminal("R"),
]
},
Rule {
name: "S".to_owned(),
id: 1,
production: vec![
Terminal("a")
]
},
Rule {
name: "A".to_owned(),
id: 2,
production: vec![
NonTerminal("S".to_owned()),
NonTerminal("S".to_owned()),
Epsilon,
]
}
]
);
assert!(g.build().is_ok());
let ret = g.parse(sentence!( L, a, a, R ));
println!("{:?}", ret);
}
#[test]
fn test_undefined_nonterm() {
let mut g = Grammar::new(
"S".to_owned(),
vec![
Rule {
name: "S".to_owned(),
id: 0,
production: vec![
Terminal("L"),
NonTerminal("A".to_owned()),
Terminal("R"),
]
},
Rule {
name: "S".to_owned(),
id: 1,
production: vec![
Terminal("a")
]
},
]
);
g.build().unwrap();
}
| true
|
0c06f453765db5dbb0d9473855df5e8203c88a43
|
Rust
|
davechallis/rust-raytracer
|
/src/material/metal.rs
|
UTF-8
| 1,159
| 2.8125
| 3
|
[
"Apache-2.0"
] |
permissive
|
use crate::vec3::Vec3;
use crate::ray::Ray;
use crate::hitable::HitRecord;
use super::Material;
use crate::utils;
use crate::texture::Texture;
#[derive(Clone)]
pub struct Metal<T: Texture + Clone> {
albedo: T,
fuzz: f32,
}
impl<T: Texture + Clone> Metal<T> {
pub fn new(albedo: T, fuzz: f32) -> Self {
let fuzz = if fuzz < 1.0 {
fuzz
} else {
1.0
};
Metal { albedo, fuzz }
}
}
impl<T: Texture + Clone> Material for Metal<T> {
fn scatter(&self, ray_in: &Ray, hit_rec: &HitRecord) -> Option<(Vec3, Ray)> {
let unit_dir = ray_in.direction().to_unit_vector();
let reflected = utils::reflect(&unit_dir, &hit_rec.normal);
// new ray from hit point
let scattered_ray = Ray::new_at_time(hit_rec.point.clone(), reflected + self.fuzz * utils::random_in_unit_sphere(), ray_in.time());
let x = scattered_ray.direction().dot(&hit_rec.normal);
if x > 0.0 {
let attenuation = self.albedo.value(hit_rec.u, hit_rec.v, &hit_rec.point);
Some((attenuation, scattered_ray))
} else {
None
}
}
}
| true
|
2e098963e0c9681af52939f017589e754b2d1086
|
Rust
|
mahimachander/slide
|
/libslide/src/utils/iter.rs
|
UTF-8
| 4,108
| 3.90625
| 4
|
[
"BSD-3-Clause"
] |
permissive
|
use std::collections::VecDeque;
use std::vec::IntoIter;
/// A [`TakeWhile`]-like struct that tests a predicate by peeking rather than consuming an iterator.
///
/// rustlib's [`TakeWhile`] consumes items in an iterator until its predicate is no longer satisfied.
/// This means that the first item that fails the predicate will also be consumed. For example,
///
/// ```rust
/// let nums = vec![1, 2, 3, 4, 5];
/// let mut iter = nums.iter();
/// let less_than_4: Vec<usize> = iter.by_ref().take_while(|n| **n < 4).cloned().collect();
/// assert_eq!(less_than_4, &[1, 2, 3]);
/// assert_eq!(iter.next(), Some(&5)); // 4 was consumed!
/// ```
///
/// `PeekingTakeWhile` implements a [`TakeWhile`]-like functionality without consuming items that fail
/// its predicate.
///
/// TODO: Ideally a `PeekingTakeWhile` would take a [`Peekable`] trait object rather than a
/// `PeekIter`, but rustlib doesn't provide a [`Peekable`] trait yet. See the [Pre-RFC].
///
/// [`TakeWhile`]: core::iter::TakeWhile
/// [`Peekable`]: core::iter::Peekable
/// [Pre-RFC]: https://internals.rust-lang.org/t/pre-rfc-make-peekable-trait-for-iterator
struct PeekingTakeWhile<'a, T, P>
where
T: Clone + 'a,
P: Fn(&T) -> bool,
{
/// A mutable reference to the underlying iterator is taken because we actually do want to
/// consume items that match the predicate.
peeker: &'a mut PeekIter<T>,
predicate: P,
}
impl<'a, T, P> Iterator for PeekingTakeWhile<'a, T, P>
where
T: Clone + 'a,
P: Fn(&T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
if let Some(v) = self.peeker.peek() {
if (self.predicate)(&v) {
return self.peeker.next();
}
}
None
}
}
/// An iterator that supports arbitrary-length peeking.
///
/// This struct is a beefed-up version of rustlib's [`Peekable`], which supports only peeking at the
/// next item in an iterator. Multi-length peeks may be required by applications that need to
/// establish a context; for example, a parser.
///
/// [`Peekable`]: core::iter::Peekable
pub struct PeekIter<T>
where
T: Clone,
{
iter: IntoIter<T>,
/// A store of items we had to consume from the iterator for peeking.
lookahead: VecDeque<Option<T>>,
}
impl<T> PeekIter<T>
where
T: Clone,
{
pub fn new(iter: IntoIter<T>) -> Self {
let mut lookahead = VecDeque::new();
lookahead.reserve(5); // optimistically we won't be peeking more than this
Self { iter, lookahead }
}
/// Returns a reference to the next value in the iterator, without consuming it, or `None` if
/// the iteration is complete.
pub fn peek(&mut self) -> Option<&T> {
if self.lookahead.is_empty() {
// Hopefully the branch gets optimized out. Not sure if we can reduce it.
let next = self.iter.next();
self.lookahead.push_back(next);
}
self.lookahead[0].as_ref()
}
/// Returns a deque of up to `n` peeked items mapped over a function `f`.
///
/// The length of the returned deque is `n` or the number of items remaining in the iteration,
/// whichever is lower.
pub fn peek_map_n<R>(&mut self, n: usize, f: fn(&T) -> R) -> VecDeque<R> {
while self.lookahead.len() < n {
let next = self.iter.next();
self.lookahead.push_back(next);
}
self.lookahead
.iter()
.take(n)
.filter_map(|o| o.as_ref())
.map(f)
.collect()
}
/// Adds an item to the front of the current iteration.
pub fn push_front(&mut self, item: T) {
self.lookahead.push_front(Some(item));
}
}
impl<T> Iterator for PeekIter<T>
where
T: Clone,
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
self.lookahead
.pop_front()
// Note that unwrap_or *cannot* be used here because it is easily evaluated, and will
// evaluate `self.iter.next()` before the lookahead is checked!
.unwrap_or_else(|| self.iter.next())
}
}
| true
|
08d0d3e2e2e6773bc55512034288a18ac0d8037c
|
Rust
|
image-rs/canvas
|
/canvas/benchmarks/bitpack.rs
|
UTF-8
| 2,523
| 3.109375
| 3
|
[] |
no_license
|
//! Benchmarks sRGB to sRGB conversions.
use brunch::Bench;
use image_canvas::color::Color;
use image_canvas::layout::{Block, CanvasLayout, LayoutError, SampleBits, SampleParts, Texel};
use image_canvas::Canvas;
struct Convert {
texel_in: Texel,
color_in: Color,
texel_out: Texel,
color_out: Color,
sz: u32,
}
impl Convert {
fn name(&self) -> String {
format!(
"bitpack({:?}/{:?}, {:?}/{:?}, {})",
self.texel_in, self.color_in, self.texel_out, self.color_out, self.sz
)
}
fn prepare(self) -> Result<impl FnMut(), LayoutError> {
let layout = CanvasLayout::with_texel(&self.texel_in, self.sz, self.sz)?;
let mut from = Canvas::new(layout.clone());
from.set_color(self.color_in.clone())?;
let layout = CanvasLayout::with_texel(&self.texel_out, self.sz, self.sz)?;
let mut into = Canvas::new(layout);
into.set_color(self.color_out.clone())?;
Ok(move || from.convert(&mut into))
}
}
fn main() {
let tests = [
/* conversion between same color luma */
Convert {
texel_in: Texel {
block: Block::Pack1x8,
parts: SampleParts::Luma,
bits: SampleBits::UInt1x8,
},
color_in: Color::BT709,
texel_out: Texel {
block: Block::Pixel,
parts: SampleParts::Luma,
bits: SampleBits::UInt8,
},
color_out: Color::BT709,
sz: 128,
},
Convert {
texel_in: Texel {
block: Block::Pixel,
parts: SampleParts::Luma,
bits: SampleBits::UInt8,
},
color_in: Color::BT709,
texel_out: Texel {
block: Block::Pack1x8,
parts: SampleParts::Luma,
bits: SampleBits::UInt1x8,
},
color_out: Color::BT709,
sz: 128,
},
];
let mut benches = tests.map(|convert| {
Bench::new("framebuf::conversion::main", &convert.name())
.with(convert.prepare().expect("Failed to setup benchmark"))
});
// Technically, we're not meant to call this directly but this makes me sad.. Why are we forced
// to use a macro to setup such a simple data structure. Not like the macro makes it possible
// to define any more complicated thing than a linear list as well..
brunch::analyze(&mut benches[..])
}
| true
|
fe0188ed81fd2a3d7356d10f3d1bd4d9c83fb0b2
|
Rust
|
Sanya2007/stm32_rust
|
/src/stm32f4xx/regs/pwr.rs
|
UTF-8
| 2,653
| 2.71875
| 3
|
[] |
no_license
|
#![allow(dead_code)]
//! Power Control registers
use ::volatile_reg32::*;
use super::constants::PWR_BASE;
pub struct PwrRegs
{
/// PWR power control register
pub cr : VolatileReg32,
/// PWR power control/status register
pub csr : VolatileReg32,
}
impl PwrRegs {
pub fn init() -> PwrRegs {
let pwr_base: *mut u32 = PWR_BASE as *mut u32;
let pwr = PwrRegs {
cr : VolatileReg32::new(pwr_base),
csr : VolatileReg32::new_offset(pwr_base, 1),
};
pwr
}
}
// Bit definition for PWR_CR register
pub const PWR_CR_LPDS : u32 = 0x00000001; // Low-Power Deepsleep
pub const PWR_CR_PDDS : u32 = 0x00000002; // Power Down Deepsleep
pub const PWR_CR_CWUF : u32 = 0x00000004; // Clear Wakeup Flag
pub const PWR_CR_CSBF : u32 = 0x00000008; // Clear Standby Flag
pub const PWR_CR_PVDE : u32 = 0x00000010; // Power Voltage Detector Enable
pub const PWR_CR_PLS : u32 = 0x000000E0; // PLS[2:0] bits (PVD Level Selection)
pub const PWR_CR_PLS_0 : u32 = 0x00000020; // Bit 0
pub const PWR_CR_PLS_1 : u32 = 0x00000040; // Bit 1
pub const PWR_CR_PLS_2 : u32 = 0x00000080; // Bit 2
// PVD level configuration
pub const PWR_CR_PLS_LEV0 : u32 = 0x00000000; // PVD level 0
pub const PWR_CR_PLS_LEV1 : u32 = 0x00000020; // PVD level 1
pub const PWR_CR_PLS_LEV2 : u32 = 0x00000040; // PVD level 2
pub const PWR_CR_PLS_LEV3 : u32 = 0x00000060; // PVD level 3
pub const PWR_CR_PLS_LEV4 : u32 = 0x00000080; // PVD level 4
pub const PWR_CR_PLS_LEV5 : u32 = 0x000000A0; // PVD level 5
pub const PWR_CR_PLS_LEV6 : u32 = 0x000000C0; // PVD level 6
pub const PWR_CR_PLS_LEV7 : u32 = 0x000000E0; // PVD level 7
pub const PWR_CR_DBP : u32 = 0x00000100; // Disable Backup Domain write protection
pub const PWR_CR_FPDS : u32 = 0x00000200; // Flash power down in Stop mode
pub const PWR_CR_VOS : u32 = 0x00004000; // Regulator voltage scaling output selection
// Bit definition for PWR_CSR register
pub const PWR_CSR_WUF : u32 = 0x00000001; // Wakeup Flag
pub const PWR_CSR_SBF : u32 = 0x00000002; // Standby Flag
pub const PWR_CSR_PVDO : u32 = 0x00000004; // PVD Output
pub const PWR_CSR_BRR : u32 = 0x00000008; // Backup regulator ready
pub const PWR_CSR_EWUP : u32 = 0x00000100; // Enable WKUP pin
pub const PWR_CSR_BRE : u32 = 0x00000200; // Backup regulator enable
pub const PWR_CSR_VOSRDY : u32 = 0x00004000; // Regulator voltage scaling output selection ready
| true
|
ffe50a6372829c5db54312da5ce230dd9336e82d
|
Rust
|
utilForever/BOJ
|
/Rust/26071 - Chongchong who went to Arcade.rs
|
UTF-8
| 2,621
| 3.140625
| 3
|
[
"MIT"
] |
permissive
|
use io::Write;
use std::{io, str};
pub struct UnsafeScanner<R> {
reader: R,
buf_str: Vec<u8>,
buf_iter: str::SplitAsciiWhitespace<'static>,
}
impl<R: io::BufRead> UnsafeScanner<R> {
pub fn new(reader: R) -> Self {
Self {
reader,
buf_str: vec![],
buf_iter: "".split_ascii_whitespace(),
}
}
pub fn token<T: str::FromStr>(&mut self) -> T {
loop {
if let Some(token) = self.buf_iter.next() {
return token.parse().ok().expect("Failed parse");
}
self.buf_str.clear();
self.reader
.read_until(b'\n', &mut self.buf_str)
.expect("Failed read");
self.buf_iter = unsafe {
let slice = str::from_utf8_unchecked(&self.buf_str);
std::mem::transmute(slice.split_ascii_whitespace())
}
}
}
}
fn main() {
let (stdin, stdout) = (io::stdin(), io::stdout());
let mut scan = UnsafeScanner::new(stdin.lock());
let mut out = io::BufWriter::new(stdout.lock());
let n = scan.token::<usize>();
let mut ret = [(0, 0); 4];
let mut gomgoms = Vec::new();
for i in 0..n {
let s = scan.token::<String>();
for (j, c) in s.chars().enumerate() {
if c == 'G' {
gomgoms.push((i, j));
ret[0] = (ret[0].0.max(i), ret[0].1.max(j));
ret[1] = (ret[1].0.max(n - i - 1), ret[1].1.max(j));
ret[2] = (ret[2].0.max(i), ret[2].1.max(n - j - 1));
ret[3] = (ret[3].0.max(n - i - 1), ret[3].1.max(n - j - 1));
}
}
}
if gomgoms.len() == 1 {
writeln!(out, "0").unwrap();
} else if gomgoms.len() <= n {
if gomgoms.iter().all(|(i, _)| *i == gomgoms[0].0) {
let vec = gomgoms.iter().map(|(_, j)| *j).collect::<Vec<_>>();
let left = *vec.iter().max().unwrap();
let right = (n - 1) - *vec.iter().min().unwrap();
writeln!(out, "{}", left.min(right)).unwrap();
} else if gomgoms.iter().all(|(_, j)| *j == gomgoms[0].1) {
let vec = gomgoms.iter().map(|(i, _)| *i).collect::<Vec<_>>();
let up = *vec.iter().max().unwrap();
let down = (n - 1) - *vec.iter().min().unwrap();
writeln!(out, "{}", up.min(down)).unwrap();
} else {
writeln!(out, "{}", ret.iter().map(|(x, y)| x + y).min().unwrap()).unwrap();
}
} else {
writeln!(out, "{}", ret.iter().map(|(x, y)| x + y).min().unwrap()).unwrap();
}
}
| true
|
3eb6e41aa922bd9bae9c4ce7d9d79553ff52a6ef
|
Rust
|
devinschulz/advent-of-code
|
/2015/src/day01/mod.rs
|
UTF-8
| 697
| 3.40625
| 3
|
[] |
no_license
|
fn input() -> &'static str {
include_str!("input.txt")
}
fn part1(input: &str) -> i32 {
input.chars().fold(0, |acc, x| match x {
')' => acc - 1,
'(' => acc + 1,
_ => acc,
})
}
fn part2(input: &str) -> usize {
let mut pos = 0i32;
for (index, char) in input.chars().enumerate() {
if char.eq(&')') {
pos -= 1;
} else if char.eq(&'(') {
pos += 1;
}
if pos < 0 {
return index + 1;
}
}
panic!("Failed to enter the basement");
}
#[test]
fn test_day01_part1() {
assert_eq!(part1(input()), 138);
}
#[test]
fn test_day01_part2() {
assert_eq!(part2(input()), 1771);
}
| true
|
8a3c667495b56155845dee4b67709d0afdaedf07
|
Rust
|
CollinValley/Exercism-rust
|
/nth-prime/src/lib.rs
|
UTF-8
| 449
| 3.515625
| 4
|
[] |
no_license
|
fn is_prime(num: u32) -> bool {
let mut ret = true;
for i in 2 .. num {
if num % i == 0 {
ret = false;
break;
}
}
ret
}
pub fn nth(n: u32) -> u32 {
let mut max_prime = 2;
let mut number = 2;
let mut nth = n;
while nth > 0 {
number = number + 1;
if is_prime(number) {
max_prime = number;
nth = nth - 1;
}
}
max_prime
}
| true
|
7566c1a068275b7997cfebf428870c1a0dc6b12f
|
Rust
|
TimonPost/anasaizi
|
/anasaizi-core/src/vulkan/layer.rs
|
UTF-8
| 2,768
| 3.015625
| 3
|
[] |
no_license
|
use crate::utils::vk_to_string;
use ash::version::EntryV1_0;
use std::fmt;
pub struct VkValidationLayerProperties {
pub name: String,
pub description: String,
pub specs_version: u32,
pub implementation_version: u32,
}
/// Validation layers are optional components that hook into Vulkan function calls to apply additional operations.
pub struct VkValidationLayers {
supported_layers: Vec<VkValidationLayerProperties>,
required_layers: Vec<String>,
}
impl VkValidationLayers {
/// Creates a new validation layer.
pub fn new(entry: &ash::Entry, required_layers: Vec<String>) -> VkValidationLayers {
VkValidationLayers {
supported_layers: Self::initialize_validation_layers(entry),
required_layers,
}
}
pub fn has_required_layers(&self) -> bool {
for required_layer in self.required_layers.iter() {
let contains_layer = self
.supported_layers
.iter()
.any(|l| l.name == *required_layer);
if !contains_layer {
return false;
}
}
true
}
fn initialize_validation_layers(entry: &ash::Entry) -> Vec<VkValidationLayerProperties> {
let layer_properties = entry
.enumerate_instance_layer_properties()
.expect("Failed to enumerate Instance Layers Properties!");
let mut supported_layers = vec![];
for layer in layer_properties.iter() {
let layer_name = vk_to_string(&layer.layer_name).unwrap();
let description = vk_to_string(&layer.description).unwrap();
let specs_version = layer.spec_version;
let implementation_version = layer.implementation_version;
supported_layers.push(VkValidationLayerProperties {
name: layer_name,
description,
specs_version,
implementation_version,
});
}
supported_layers
}
}
impl fmt::Debug for VkValidationLayers {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "Required Layers:\n")?;
for required_layer in &self.required_layers {
write!(f, "\t - {}\n", required_layer)?;
}
write!(f, "\nSupported Layers:\n")?;
for supported_layer in &self.supported_layers {
write!(
f,
"\t - name: {}; description: {}; spec-version: {}; implementation-version: {}\n",
supported_layer.name,
supported_layer.description,
supported_layer.specs_version,
supported_layer.implementation_version
)?;
}
Ok(())
}
}
| true
|
0c41a6afe32b49fd9a52c2cf4b2ef0e4d0d733c0
|
Rust
|
wedaly/devlog
|
/src/status.rs
|
UTF-8
| 12,892
| 3.21875
| 3
|
[
"MIT"
] |
permissive
|
//! Report tasks from the most recent devlog entry file,
//! grouped by task status type.
use crate::error::Error;
use crate::file::LogFile;
use crate::repository::LogRepository;
use crate::task::{Task, TaskStatus};
use std::io::Write;
/// Controls how tasks are displayed in the status report.
#[derive(Debug, Copy, Clone)]
pub enum DisplayMode {
/// Show all tasks, grouped by task status.
ShowAll,
/// Show only tasks with the specified status.
ShowOnly(TaskStatus),
}
impl DisplayMode {
/// Whether to show the section names (e.g. "To Do" or "In Progress")
pub fn show_section_names(&self) -> bool {
match self {
DisplayMode::ShowAll => true,
DisplayMode::ShowOnly(_) => false,
}
}
/// Whether to show the specified task status
pub fn show_status(&self, s: &TaskStatus) -> bool {
match self {
DisplayMode::ShowAll => true,
DisplayMode::ShowOnly(status) => s == status,
}
}
}
/// Prints the status report using the provided writer.
pub fn print<W: Write>(
w: &mut W,
repo: &LogRepository,
num_back: usize,
d: DisplayMode,
) -> Result<(), Error> {
let g = load_tasks_group_by_status(repo, num_back)?;
print_status_report(w, &g, d)
}
fn load_tasks_group_by_status(
repo: &LogRepository,
num_back: usize,
) -> Result<GroupedTasks, Error> {
let mut grouped = GroupedTasks::new();
if let Some(logpath) = repo.nth_from_latest(num_back)? {
let f = LogFile::load(logpath.path())?;
f.tasks().iter().for_each(|t| grouped.insert(t));
}
Ok(grouped)
}
const ALL_STATUSES: &[TaskStatus] = &[
TaskStatus::Started,
TaskStatus::ToDo,
TaskStatus::Blocked,
TaskStatus::Done,
];
fn print_status_report<W: Write>(w: &mut W, g: &GroupedTasks, d: DisplayMode) -> Result<(), Error> {
let mut has_prev = false;
for status in ALL_STATUSES {
if d.show_status(status) {
let tasks = g.retrieve(status);
if tasks.len() > 0 {
if has_prev {
write!(w, "\n")?;
}
print_section(w, status, tasks, d)?;
has_prev = true;
}
}
}
Ok(())
}
fn print_section<W: Write>(
w: &mut W,
status: &TaskStatus,
tasks: &[Task],
d: DisplayMode,
) -> Result<(), Error> {
if d.show_section_names() {
write!(w, "{}:\n", status.display_name())?;
}
for t in tasks {
write!(w, "{}\n", t)?;
}
Ok(())
}
struct GroupedTasks {
todo: Vec<Task>,
started: Vec<Task>,
blocked: Vec<Task>,
done: Vec<Task>,
}
impl GroupedTasks {
fn new() -> GroupedTasks {
GroupedTasks {
todo: Vec::new(),
started: Vec::new(),
blocked: Vec::new(),
done: Vec::new(),
}
}
fn insert(&mut self, task: &Task) {
let t = task.clone();
match t.status() {
TaskStatus::ToDo => self.todo.push(t),
TaskStatus::Started => self.started.push(t),
TaskStatus::Blocked => self.blocked.push(t),
TaskStatus::Done => self.done.push(t),
}
}
fn retrieve(&self, status: &TaskStatus) -> &[Task] {
match status {
TaskStatus::ToDo => &self.todo,
TaskStatus::Started => &self.started,
TaskStatus::Blocked => &self.blocked,
TaskStatus::Done => &self.done,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::task::Task;
use std::fs::OpenOptions;
use std::path::Path;
use std::str;
use tempfile::tempdir;
fn write_tasks_to_file(p: &Path, tasks: &[Task]) {
let mut f = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(p)
.unwrap();
for t in tasks {
write!(&mut f, "{}\n", t).unwrap();
}
}
fn init_repo_with_tasks(dir_path: &Path, tasks: &[Task]) -> LogRepository {
let repo = LogRepository::new(dir_path);
let logpath = repo.init().unwrap();
write_tasks_to_file(logpath.path(), tasks);
repo
}
fn check_status(
repo: &LogRepository,
num_back: usize,
display_mode: DisplayMode,
expected_status: &str,
) {
let mut buf = Vec::new();
print(&mut buf, &repo, num_back, display_mode).unwrap();
let actual_status = str::from_utf8(&buf).unwrap();
assert_eq!(actual_status, expected_status);
}
fn check_current_status(
repo: &LogRepository,
display_mode: DisplayMode,
expected_status: &str,
) {
let num_back = 0;
check_status(repo, num_back, display_mode, expected_status)
}
#[test]
fn test_status_no_tasks() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(dir.path(), &[]);
check_current_status(&repo, DisplayMode::ShowAll, "");
}
#[test]
fn test_status_only_todo() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::ToDo, "Bar"),
],
);
check_current_status(&repo, DisplayMode::ShowAll, "To Do:\n* Foo\n* Bar\n");
}
#[test]
fn test_status_only_started() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::Started, "Foo"),
Task::new(TaskStatus::Started, "Bar"),
],
);
check_current_status(&repo, DisplayMode::ShowAll, "In Progress:\n^ Foo\n^ Bar\n")
}
#[test]
fn test_status_only_blocked() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::Blocked, "Foo"),
Task::new(TaskStatus::Blocked, "Bar"),
],
);
check_current_status(&repo, DisplayMode::ShowAll, "Blocked:\n- Foo\n- Bar\n");
}
#[test]
fn test_status_only_done() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::Done, "Foo"),
Task::new(TaskStatus::Done, "Bar"),
],
);
check_current_status(&repo, DisplayMode::ShowAll, "Done:\n+ Foo\n+ Bar\n");
}
#[test]
fn test_status_todo_and_blocked() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::Blocked, "Bar"),
],
);
check_current_status(
&repo,
DisplayMode::ShowAll,
"To Do:\n* Foo\n\nBlocked:\n- Bar\n",
);
}
#[test]
fn test_status_blocked_and_done() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::Blocked, "Bar"),
Task::new(TaskStatus::Done, "Baz"),
],
);
check_current_status(
&repo,
DisplayMode::ShowAll,
"Blocked:\n- Bar\n\nDone:\n+ Baz\n",
);
}
#[test]
fn test_status_all_task_types() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(
&repo,
DisplayMode::ShowAll,
"In Progress:\n^ Bar\n\nTo Do:\n* Foo\n\nBlocked:\n- Baz\n\nDone:\n+ Boo\n",
);
}
#[test]
fn test_show_only_todo() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::ToDo), "* Foo\n");
}
#[test]
fn test_show_only_started() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::Started), "^ Bar\n");
}
#[test]
fn test_show_only_blocked() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::Blocked), "- Baz\n");
}
#[test]
fn test_show_only_done() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Foo"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::Done), "+ Boo\n");
}
#[test]
fn test_show_only_todo_no_tasks() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::ToDo), "");
}
#[test]
fn test_show_only_started_no_tasks() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::Started), "");
}
#[test]
fn test_show_only_blocked_no_tasks() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Bar"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Done, "Boo"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::Blocked), "");
}
#[test]
fn test_show_only_done_no_tasks() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Bar"),
Task::new(TaskStatus::Started, "Bar"),
Task::new(TaskStatus::Blocked, "Baz"),
],
);
check_current_status(&repo, DisplayMode::ShowOnly(TaskStatus::Done), "");
}
#[test]
fn test_previous_status() {
let dir = tempdir().unwrap();
let repo = init_repo_with_tasks(
dir.path(),
&[
Task::new(TaskStatus::ToDo, "Bar"),
Task::new(TaskStatus::Done, "Bar"),
Task::new(TaskStatus::Done, "Baz"),
],
);
// Create a new devlog file with only the "todo" task
let p = repo.latest().unwrap().unwrap();
let next = p.next().unwrap();
write_tasks_to_file(&next.path(), &[Task::new(TaskStatus::ToDo, "Bar")]);
// check before the first logfile
check_status(&repo, 2, DisplayMode::ShowAll, "");
// check the first logfile
check_status(
&repo,
1,
DisplayMode::ShowAll,
"To Do:\n* Bar\n\nDone:\n+ Bar\n+ Baz\n",
);
// check the latest logfile
check_status(&repo, 0, DisplayMode::ShowAll, "To Do:\n* Bar\n");
}
}
| true
|
8e46272222884862ad2c9eb7b714d79319a3f72c
|
Rust
|
zhao1jin4/vscode_rust_workspace
|
/cargo_projects/third_parent/third_wasm/src/main.rs
|
UTF-8
| 129
| 2.53125
| 3
|
[] |
no_license
|
use third_gtk4;
fn main() {
let num = 10;
println!("Hello, world! {} plus one is {}!", num, third_gtk4::add_one(num));
}
| true
|
6504c2ab4a99a5ef906154106fcf9fce7f9d8779
|
Rust
|
pop-os/gir
|
/src/codegen/property_body.rs
|
UTF-8
| 5,462
| 2.71875
| 3
|
[
"MIT"
] |
permissive
|
use crate::{
analysis,
chunk::Chunk,
env::Env,
nameutil::{use_glib_type, use_gtk_type},
};
pub struct Builder<'a> {
name: String,
in_trait: bool,
var_name: String,
is_get: bool,
is_child_property: bool,
type_: String,
env: &'a Env,
}
#[allow(clippy::wrong_self_convention)]
impl<'a> Builder<'a> {
pub fn new(env: &'a Env) -> Self {
Self {
env,
name: Default::default(),
in_trait: Default::default(),
var_name: Default::default(),
is_get: Default::default(),
is_child_property: Default::default(),
type_: Default::default(),
}
}
pub fn new_for_child_property(env: &'a Env) -> Self {
Self {
is_child_property: true,
env,
name: Default::default(),
in_trait: Default::default(),
var_name: Default::default(),
is_get: Default::default(),
type_: Default::default(),
}
}
pub fn name(&mut self, name: &str) -> &mut Self {
self.name = name.into();
self
}
pub fn in_trait(&mut self, value: bool) -> &mut Self {
self.in_trait = value;
self
}
pub fn var_name(&mut self, name: &str) -> &mut Self {
self.var_name = name.into();
self
}
pub fn is_get(&mut self, value: bool) -> &mut Self {
self.is_get = value;
self
}
pub fn type_(&mut self, type_: &str) -> &mut Self {
self.type_ = type_.into();
self
}
pub fn generate(&self) -> Chunk {
let chunks = if self.is_get {
self.chunks_for_get()
} else {
self.chunks_for_set()
};
Chunk::BlockHalf(chunks)
}
fn chunks_for_get(&self) -> Vec<Chunk> {
let mut params = Vec::new();
let cast_target = if self.is_child_property {
use_gtk_type(self.env, "ffi::GtkContainer")
} else {
use_glib_type(self.env, "gobject_ffi::GObject")
};
if self.in_trait {
params.push(Chunk::Custom(format!(
"self.to_glib_none().0 as *mut {}",
cast_target
)));
} else {
params.push(Chunk::Custom(format!(
"self.as_ptr() as *mut {}",
cast_target
)));
}
if self.is_child_property {
params.push(Chunk::Custom("item.to_glib_none().0 as *mut _".into()));
}
params.push(Chunk::Custom(format!(
"b\"{}\\0\".as_ptr() as *const _",
self.name
)));
params.push(Chunk::Custom("value.to_glib_none_mut().0".into()));
let mut body = Vec::new();
let ffi_call = Chunk::FfiCall {
name: self.get_ffi_func(),
params,
};
body.push(Chunk::Let {
name: "value".into(),
is_mut: true,
value: Box::new(Chunk::Custom(format!(
"glib::Value::from_type(<{} as StaticType>::static_type())",
self.type_
))),
type_: None,
});
body.push(Chunk::FfiCallConversion {
ret: analysis::return_value::Info::default(),
array_length_name: None,
call: Box::new(ffi_call),
});
body.push(Chunk::Custom(format!(
"value.get().expect(\"Return Value for property `{}` getter\")",
self.name,
)));
vec![Chunk::Unsafe(body)]
}
fn chunks_for_set(&self) -> Vec<Chunk> {
let mut params = Vec::new();
let cast_target = if self.is_child_property {
use_gtk_type(self.env, "ffi::GtkContainer")
} else {
use_glib_type(self.env, "gobject_ffi::GObject")
};
if self.in_trait {
params.push(Chunk::Custom(format!(
"self.to_glib_none().0 as *mut {}",
cast_target
)));
} else {
params.push(Chunk::Custom(format!(
"self.as_ptr() as *mut {}",
cast_target
)));
}
if self.is_child_property {
params.push(Chunk::Custom("item.to_glib_none().0 as *mut _".into()));
}
params.push(Chunk::Custom(format!(
"b\"{}\\0\".as_ptr() as *const _",
self.name
)));
params.push(Chunk::Custom(format!(
"{}.to_value().to_glib_none().0",
self.var_name
)));
let mut body = Vec::new();
let ffi_call = Chunk::FfiCall {
name: self.set_ffi_func(),
params,
};
body.push(Chunk::FfiCallConversion {
ret: analysis::return_value::Info::default(),
array_length_name: None,
call: Box::new(ffi_call),
});
vec![Chunk::Unsafe(body)]
}
fn get_ffi_func(&self) -> String {
if self.is_child_property {
use_gtk_type(self.env, "ffi::gtk_container_child_get_property")
} else {
use_glib_type(self.env, "gobject_ffi::g_object_get_property")
}
}
fn set_ffi_func(&self) -> String {
if self.is_child_property {
use_gtk_type(self.env, "ffi::gtk_container_child_set_property")
} else {
use_glib_type(self.env, "gobject_ffi::g_object_set_property")
}
}
}
| true
|
3620ad392aa93d6a1b470e03fe329a1f3b8c4fa6
|
Rust
|
turnikuta/iota-base-rs
|
/src/create_value_transaction/get_account_info.rs
|
UTF-8
| 1,589
| 2.65625
| 3
|
[] |
no_license
|
use std::env;
use std::process::exit;
use anyhow::Result;
use iota_base_rs::{generate_named_seed,
prepare_iota_seed,
get_address_trytes};
#[tokio::main]
async fn main() -> Result<()> {
let args: Vec<_> = env::args().collect::<Vec<_>>();
if args.len() < 2 {
eprintln!("Usage: get_account_info <name-for-seed>");
eprintln!("\nPlease provide the string (name) that was used to generate an 'easy to remember' seed");
exit(1);
}
let name = args[1].to_string();
let provider = "https://nodes.comnet.thetangle.org";
iota::Client::add_node(&provider)?;
// generate 'easy to remember' seed from name
// (but, never use such a seed with real values!)
let seed = generate_named_seed(name);
let iota_seed = prepare_iota_seed(&seed);
// get the first unused address
let security: u8 = 2;
let start_index: u64 = 0;
let (index, addr) = iota::Client::get_new_address(&iota_seed)
.security(security)
.index(start_index)
.generate()
.await?;
// get balance from address
let response = iota::Client::get_balances()
.addresses(&[addr.clone()])
.send()
.await?;
// print account info: <index address balance>
println!("{} {:?} {}", index, get_address_trytes(&addr.clone()), response.balances[0]);
Ok(())
}
| true
|
da4e8d889f854e292865564dbeec8d4e6c986ba0
|
Rust
|
rayon-rs/rayon
|
/src/iter/map_with.rs
|
UTF-8
| 14,235
| 3.078125
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
use super::plumbing::*;
use super::*;
use std::fmt::{self, Debug};
/// `MapWith` is an iterator that transforms the elements of an underlying iterator.
///
/// This struct is created by the [`map_with()`] method on [`ParallelIterator`]
///
/// [`map_with()`]: trait.ParallelIterator.html#method.map_with
/// [`ParallelIterator`]: trait.ParallelIterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct MapWith<I: ParallelIterator, T, F> {
base: I,
item: T,
map_op: F,
}
impl<I: ParallelIterator + Debug, T: Debug, F> Debug for MapWith<I, T, F> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MapWith")
.field("base", &self.base)
.field("item", &self.item)
.finish()
}
}
impl<I, T, F> MapWith<I, T, F>
where
I: ParallelIterator,
{
/// Creates a new `MapWith` iterator.
pub(super) fn new(base: I, item: T, map_op: F) -> Self {
MapWith { base, item, map_op }
}
}
impl<I, T, F, R> ParallelIterator for MapWith<I, T, F>
where
I: ParallelIterator,
T: Send + Clone,
F: Fn(&mut T, I::Item) -> R + Sync + Send,
R: Send,
{
type Item = R;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
let consumer1 = MapWithConsumer::new(consumer, self.item, &self.map_op);
self.base.drive_unindexed(consumer1)
}
fn opt_len(&self) -> Option<usize> {
self.base.opt_len()
}
}
impl<I, T, F, R> IndexedParallelIterator for MapWith<I, T, F>
where
I: IndexedParallelIterator,
T: Send + Clone,
F: Fn(&mut T, I::Item) -> R + Sync + Send,
R: Send,
{
fn drive<C>(self, consumer: C) -> C::Result
where
C: Consumer<Self::Item>,
{
let consumer1 = MapWithConsumer::new(consumer, self.item, &self.map_op);
self.base.drive(consumer1)
}
fn len(&self) -> usize {
self.base.len()
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where
CB: ProducerCallback<Self::Item>,
{
return self.base.with_producer(Callback {
callback,
item: self.item,
map_op: self.map_op,
});
struct Callback<CB, U, F> {
callback: CB,
item: U,
map_op: F,
}
impl<T, U, F, R, CB> ProducerCallback<T> for Callback<CB, U, F>
where
CB: ProducerCallback<R>,
U: Send + Clone,
F: Fn(&mut U, T) -> R + Sync,
R: Send,
{
type Output = CB::Output;
fn callback<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = MapWithProducer {
base,
item: self.item,
map_op: &self.map_op,
};
self.callback.callback(producer)
}
}
}
}
/// ////////////////////////////////////////////////////////////////////////
struct MapWithProducer<'f, P, U, F> {
base: P,
item: U,
map_op: &'f F,
}
impl<'f, P, U, F, R> Producer for MapWithProducer<'f, P, U, F>
where
P: Producer,
U: Send + Clone,
F: Fn(&mut U, P::Item) -> R + Sync,
R: Send,
{
type Item = R;
type IntoIter = MapWithIter<'f, P::IntoIter, U, F>;
fn into_iter(self) -> Self::IntoIter {
MapWithIter {
base: self.base.into_iter(),
item: self.item,
map_op: self.map_op,
}
}
fn min_len(&self) -> usize {
self.base.min_len()
}
fn max_len(&self) -> usize {
self.base.max_len()
}
fn split_at(self, index: usize) -> (Self, Self) {
let (left, right) = self.base.split_at(index);
(
MapWithProducer {
base: left,
item: self.item.clone(),
map_op: self.map_op,
},
MapWithProducer {
base: right,
item: self.item,
map_op: self.map_op,
},
)
}
fn fold_with<G>(self, folder: G) -> G
where
G: Folder<Self::Item>,
{
let folder1 = MapWithFolder {
base: folder,
item: self.item,
map_op: self.map_op,
};
self.base.fold_with(folder1).base
}
}
struct MapWithIter<'f, I, U, F> {
base: I,
item: U,
map_op: &'f F,
}
impl<'f, I, U, F, R> Iterator for MapWithIter<'f, I, U, F>
where
I: Iterator,
F: Fn(&mut U, I::Item) -> R + Sync,
R: Send,
{
type Item = R;
fn next(&mut self) -> Option<R> {
let item = self.base.next()?;
Some((self.map_op)(&mut self.item, item))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.base.size_hint()
}
}
impl<'f, I, U, F, R> DoubleEndedIterator for MapWithIter<'f, I, U, F>
where
I: DoubleEndedIterator,
F: Fn(&mut U, I::Item) -> R + Sync,
R: Send,
{
fn next_back(&mut self) -> Option<R> {
let item = self.base.next_back()?;
Some((self.map_op)(&mut self.item, item))
}
}
impl<'f, I, U, F, R> ExactSizeIterator for MapWithIter<'f, I, U, F>
where
I: ExactSizeIterator,
F: Fn(&mut U, I::Item) -> R + Sync,
R: Send,
{
}
/// ////////////////////////////////////////////////////////////////////////
/// Consumer implementation
struct MapWithConsumer<'f, C, U, F> {
base: C,
item: U,
map_op: &'f F,
}
impl<'f, C, U, F> MapWithConsumer<'f, C, U, F> {
fn new(base: C, item: U, map_op: &'f F) -> Self {
MapWithConsumer { base, item, map_op }
}
}
impl<'f, T, U, R, C, F> Consumer<T> for MapWithConsumer<'f, C, U, F>
where
C: Consumer<R>,
U: Send + Clone,
F: Fn(&mut U, T) -> R + Sync,
R: Send,
{
type Folder = MapWithFolder<'f, C::Folder, U, F>;
type Reducer = C::Reducer;
type Result = C::Result;
fn split_at(self, index: usize) -> (Self, Self, Self::Reducer) {
let (left, right, reducer) = self.base.split_at(index);
(
MapWithConsumer::new(left, self.item.clone(), self.map_op),
MapWithConsumer::new(right, self.item, self.map_op),
reducer,
)
}
fn into_folder(self) -> Self::Folder {
MapWithFolder {
base: self.base.into_folder(),
item: self.item,
map_op: self.map_op,
}
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<'f, T, U, R, C, F> UnindexedConsumer<T> for MapWithConsumer<'f, C, U, F>
where
C: UnindexedConsumer<R>,
U: Send + Clone,
F: Fn(&mut U, T) -> R + Sync,
R: Send,
{
fn split_off_left(&self) -> Self {
MapWithConsumer::new(self.base.split_off_left(), self.item.clone(), self.map_op)
}
fn to_reducer(&self) -> Self::Reducer {
self.base.to_reducer()
}
}
struct MapWithFolder<'f, C, U, F> {
base: C,
item: U,
map_op: &'f F,
}
impl<'f, T, U, R, C, F> Folder<T> for MapWithFolder<'f, C, U, F>
where
C: Folder<R>,
F: Fn(&mut U, T) -> R,
{
type Result = C::Result;
fn consume(mut self, item: T) -> Self {
let mapped_item = (self.map_op)(&mut self.item, item);
self.base = self.base.consume(mapped_item);
self
}
fn consume_iter<I>(mut self, iter: I) -> Self
where
I: IntoIterator<Item = T>,
{
fn with<'f, T, U, R>(
item: &'f mut U,
map_op: impl Fn(&mut U, T) -> R + 'f,
) -> impl FnMut(T) -> R + 'f {
move |x| map_op(item, x)
}
{
let mapped_iter = iter.into_iter().map(with(&mut self.item, self.map_op));
self.base = self.base.consume_iter(mapped_iter);
}
self
}
fn complete(self) -> C::Result {
self.base.complete()
}
fn full(&self) -> bool {
self.base.full()
}
}
// ------------------------------------------------------------------------------------------------
/// `MapInit` is an iterator that transforms the elements of an underlying iterator.
///
/// This struct is created by the [`map_init()`] method on [`ParallelIterator`]
///
/// [`map_init()`]: trait.ParallelIterator.html#method.map_init
/// [`ParallelIterator`]: trait.ParallelIterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct MapInit<I: ParallelIterator, INIT, F> {
base: I,
init: INIT,
map_op: F,
}
impl<I: ParallelIterator + Debug, INIT, F> Debug for MapInit<I, INIT, F> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MapInit").field("base", &self.base).finish()
}
}
impl<I, INIT, F> MapInit<I, INIT, F>
where
I: ParallelIterator,
{
/// Creates a new `MapInit` iterator.
pub(super) fn new(base: I, init: INIT, map_op: F) -> Self {
MapInit { base, init, map_op }
}
}
impl<I, INIT, T, F, R> ParallelIterator for MapInit<I, INIT, F>
where
I: ParallelIterator,
INIT: Fn() -> T + Sync + Send,
F: Fn(&mut T, I::Item) -> R + Sync + Send,
R: Send,
{
type Item = R;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
let consumer1 = MapInitConsumer::new(consumer, &self.init, &self.map_op);
self.base.drive_unindexed(consumer1)
}
fn opt_len(&self) -> Option<usize> {
self.base.opt_len()
}
}
impl<I, INIT, T, F, R> IndexedParallelIterator for MapInit<I, INIT, F>
where
I: IndexedParallelIterator,
INIT: Fn() -> T + Sync + Send,
F: Fn(&mut T, I::Item) -> R + Sync + Send,
R: Send,
{
fn drive<C>(self, consumer: C) -> C::Result
where
C: Consumer<Self::Item>,
{
let consumer1 = MapInitConsumer::new(consumer, &self.init, &self.map_op);
self.base.drive(consumer1)
}
fn len(&self) -> usize {
self.base.len()
}
fn with_producer<CB>(self, callback: CB) -> CB::Output
where
CB: ProducerCallback<Self::Item>,
{
return self.base.with_producer(Callback {
callback,
init: self.init,
map_op: self.map_op,
});
struct Callback<CB, INIT, F> {
callback: CB,
init: INIT,
map_op: F,
}
impl<T, INIT, U, F, R, CB> ProducerCallback<T> for Callback<CB, INIT, F>
where
CB: ProducerCallback<R>,
INIT: Fn() -> U + Sync,
F: Fn(&mut U, T) -> R + Sync,
R: Send,
{
type Output = CB::Output;
fn callback<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = MapInitProducer {
base,
init: &self.init,
map_op: &self.map_op,
};
self.callback.callback(producer)
}
}
}
}
/// ////////////////////////////////////////////////////////////////////////
struct MapInitProducer<'f, P, INIT, F> {
base: P,
init: &'f INIT,
map_op: &'f F,
}
impl<'f, P, INIT, U, F, R> Producer for MapInitProducer<'f, P, INIT, F>
where
P: Producer,
INIT: Fn() -> U + Sync,
F: Fn(&mut U, P::Item) -> R + Sync,
R: Send,
{
type Item = R;
type IntoIter = MapWithIter<'f, P::IntoIter, U, F>;
fn into_iter(self) -> Self::IntoIter {
MapWithIter {
base: self.base.into_iter(),
item: (self.init)(),
map_op: self.map_op,
}
}
fn min_len(&self) -> usize {
self.base.min_len()
}
fn max_len(&self) -> usize {
self.base.max_len()
}
fn split_at(self, index: usize) -> (Self, Self) {
let (left, right) = self.base.split_at(index);
(
MapInitProducer {
base: left,
init: self.init,
map_op: self.map_op,
},
MapInitProducer {
base: right,
init: self.init,
map_op: self.map_op,
},
)
}
fn fold_with<G>(self, folder: G) -> G
where
G: Folder<Self::Item>,
{
let folder1 = MapWithFolder {
base: folder,
item: (self.init)(),
map_op: self.map_op,
};
self.base.fold_with(folder1).base
}
}
/// ////////////////////////////////////////////////////////////////////////
/// Consumer implementation
struct MapInitConsumer<'f, C, INIT, F> {
base: C,
init: &'f INIT,
map_op: &'f F,
}
impl<'f, C, INIT, F> MapInitConsumer<'f, C, INIT, F> {
fn new(base: C, init: &'f INIT, map_op: &'f F) -> Self {
MapInitConsumer { base, init, map_op }
}
}
impl<'f, T, INIT, U, R, C, F> Consumer<T> for MapInitConsumer<'f, C, INIT, F>
where
C: Consumer<R>,
INIT: Fn() -> U + Sync,
F: Fn(&mut U, T) -> R + Sync,
R: Send,
{
type Folder = MapWithFolder<'f, C::Folder, U, F>;
type Reducer = C::Reducer;
type Result = C::Result;
fn split_at(self, index: usize) -> (Self, Self, Self::Reducer) {
let (left, right, reducer) = self.base.split_at(index);
(
MapInitConsumer::new(left, self.init, self.map_op),
MapInitConsumer::new(right, self.init, self.map_op),
reducer,
)
}
fn into_folder(self) -> Self::Folder {
MapWithFolder {
base: self.base.into_folder(),
item: (self.init)(),
map_op: self.map_op,
}
}
fn full(&self) -> bool {
self.base.full()
}
}
impl<'f, T, INIT, U, R, C, F> UnindexedConsumer<T> for MapInitConsumer<'f, C, INIT, F>
where
C: UnindexedConsumer<R>,
INIT: Fn() -> U + Sync,
F: Fn(&mut U, T) -> R + Sync,
R: Send,
{
fn split_off_left(&self) -> Self {
MapInitConsumer::new(self.base.split_off_left(), self.init, self.map_op)
}
fn to_reducer(&self) -> Self::Reducer {
self.base.to_reducer()
}
}
| true
|
16397b3f3bf22b89d8ed34c94f78c91b6dde7e7a
|
Rust
|
binp-dev/ksfc-lxi
|
/src/main.rs
|
UTF-8
| 3,934
| 2.515625
| 3
|
[
"MIT"
] |
permissive
|
//#![allow(dead_code)]
use std::time::{Duration};
use std::thread::{sleep};
use ksfc_lxi::{
KsFc, Error,
types::{EventReg, ChannelNo, TriggerSource},
};
static FREQ: f64 = 7e3;
static FREPS: f64 = 1e-2;
static MEAS_TIME: Duration = Duration::from_secs(1);
fn assert_feq(val: f64, refv: f64, reps: f64) {
let eps = reps*refv.abs();
if (val - refv).abs() > eps {
panic!("Float assertion error: {} != {}, relative eps: {}", val, refv, reps);
}
}
fn assert_freq(val: f64) {
assert_feq(val, FREQ, FREPS);
}
macro_rules! test_all {
( $fc_new:expr, [ $( $func:ident, )* ] ) => {{
$( {
println!("{} ... ", stringify!($func));
match std::panic::catch_unwind(|| {
let mut fc = $fc_new;
fc.rst().unwrap();
$func(&mut fc);
}) {
Ok(()) => println!("... [ ok ]"),
Err(e) => {
println!("... [FAIL]");
std::panic::resume_unwind(e);
},
}
} )*
}};
}
fn main() {
test_all!(
{
let (dev, res) = KsFc::new(
&"10.0.0.9", None,
Duration::from_secs(2),
);
res.unwrap();
dev
},
[
test_abort,
test_cls,
//test_cal,
test_ese,
test_autoscale,
test_init,
test_fetch,
test_read,
test_trig_count,
test_trig_delay,
test_r,
test_conf_freq,
test_trig_source,
]
);
}
fn test_abort(fc: &mut KsFc) {
fc.abort().unwrap();
}
fn test_cls(fc: &mut KsFc) {
fc.cls().unwrap();
}
#[allow(dead_code)]
fn test_cal(fc: &mut KsFc) {
assert!(fc.cal().unwrap());
}
fn test_ese(fc: &mut KsFc) {
fc.ese_set(EventReg::all()).unwrap();
assert_eq!(fc.ese_get().unwrap(), EventReg::all());
}
fn test_autoscale(fc: &mut KsFc) {
fc.autoscale().unwrap();
}
fn test_init(fc: &mut KsFc) {
fc.initiate().unwrap();
if let Err(Error::Dev(x)) = fc.initiate() {
assert_eq!(x.code(), -213);
}
sleep(MEAS_TIME);
}
fn test_fetch(fc: &mut KsFc) {
fc.initiate().unwrap();
assert_freq(fc.fetch().unwrap());
}
fn test_read(fc: &mut KsFc) {
assert_freq(fc.read().unwrap());
}
fn test_trig_count(fc: &mut KsFc) {
fc.trigger_count_set(1).unwrap();
assert_eq!(fc.trigger_count_get().unwrap(), 1);
fc.trigger_count_set(1000000).unwrap();
assert_eq!(fc.trigger_count_get().unwrap(), 1000000);
if let Err(Error::Dev(x)) = fc.trigger_count_set(0) {
assert_eq!(x.code(), -222);
}
if let Err(Error::Dev(x)) = fc.trigger_count_set(1000001) {
assert_eq!(x.code(), -222);
}
}
fn test_trig_delay(fc: &mut KsFc) {
fc.trigger_delay_set(Duration::from_secs(10)).unwrap();
assert_eq!(fc.trigger_delay_get().unwrap().as_secs(), 10);
fc.trigger_delay_set(Duration::from_micros(10)).unwrap();
assert_eq!(fc.trigger_delay_get().unwrap().as_micros(), 10);
if let Err(Error::Dev(x)) = fc.trigger_delay_set(Duration::from_secs(3601)) {
assert_eq!(x.code(), -222);
}
}
fn test_r(fc: &mut KsFc) {
fc.trigger_count_set(4).unwrap();
fc.initiate().unwrap();
sleep(Duration::from_secs(1));
let r = fc.r(None).unwrap();
assert_eq!(r.len(), 4);
for x in r {
assert_freq(x);
}
}
fn test_conf_freq(fc: &mut KsFc) {
fc.configure_frequency(ChannelNo::Ch1).unwrap();
fc.initiate().unwrap();
assert_freq(fc.fetch().unwrap());
fc.configure_frequency(ChannelNo::Ch2).unwrap();
fc.initiate().unwrap();
assert_freq(fc.fetch().unwrap());
}
fn test_trig_source(fc: &mut KsFc) {
fc.trigger_source_set(TriggerSource::Immediate).unwrap();
fc.initiate().unwrap();
assert_freq(fc.fetch().unwrap());
}
| true
|
0d30b724a0bfb05fb04e9791bcd8b326c51c4efc
|
Rust
|
emlaufer/juntos
|
/src/multiboot/tag/mod.rs
|
UTF-8
| 6,998
| 2.875
| 3
|
[] |
no_license
|
pub mod elf_symbols;
pub mod memory_map;
use core::marker::PhantomData;
use core::{slice, str};
pub use elf_symbols::ElfSymbols;
pub use memory_map::MemoryMap;
pub struct TagIterator<'a> {
current_tag: *const TagHeader,
_marker: PhantomData<&'a TagHeader>,
}
impl<'a> TagIterator<'a> {
/// # Safety
/// This method is safe so long as `first_tag` correctly points to the first tag in a valid
/// multiboot2 tag list, is non-null, and is properly aligned to an 8 byte boundary.
pub unsafe fn new(first_tag: *const TagHeader) -> TagIterator<'a> {
TagIterator {
current_tag: first_tag,
_marker: PhantomData,
}
}
}
impl<'a> Iterator for TagIterator<'a> {
type Item = &'a TagHeader;
fn next(&mut self) -> Option<Self::Item> {
// SAFETY: We know current_tag is a valid pointer to a tag because the first is correct due
// to the safety of the constructor of the TagIterator, and the following tags are
// correct as multiboot2 ensures size (with alignment) will point to the next tag.
let tag = unsafe { &*self.current_tag };
// if ending tag, return None
if tag.tag_type == 0 && tag.size == 8 {
return None;
}
let next_tag_start = self.current_tag as usize + tag.size as usize;
let next_tag_addr = (next_tag_start - 1 + 8) & !(8 - 1); // align to 8 byte boundary
self.current_tag = next_tag_addr as *const TagHeader;
Some(tag)
}
}
/// A struct representing an 'internal' C-style string (i.e. within the struct itself)
#[repr(transparent)]
pub struct InternalCStr(u8);
impl InternalCStr {
/// # Safety:
/// This method is safe so long as `self` is a byte within a null-terminated UTF-8 string.
/// NOTE: Be careful not to construct InternalCStr i.e. `let str = InternalCStr(*mem);`, as
/// this will copy the first byte. Instead, use `let str = &*(mem as *const
/// InternalCStr);` or as the last member of a struct.
unsafe fn to_str(&self) -> &str {
// SAFETY: The string is a null-terminated UTF-8 string. We know the length
// is correct as we calculated it above by finding the null terminator.
str::from_utf8_unchecked(slice::from_raw_parts(self.to_ptr(), self.len()))
}
fn to_ptr(&self) -> *const u8 {
&self.0 as *const u8
}
/// # Safety:
/// This method is safe so long as `self` is a byte within a null-terminated string.
unsafe fn len(&self) -> usize {
let mut len = 0;
while *self.to_ptr().offset(len) != b'\0' {
len += 1;
}
len as usize
}
}
#[derive(Eq, PartialEq)]
pub enum Type {
MemoryInfo,
BiosBoot,
BootCmdLine,
Modules,
ElfSymbols,
MemoryMap,
BootLoaderName,
ApmTable,
VbeInfo,
FramebufferInfo,
Efi32SystemTable,
Efi64SystemTable,
SMBiosTable,
AcpiOldRdsp,
AcpiNewRdsp,
NetworkingInfo,
EfiMemoryMap,
EfiBootServicesNotTerminated,
Efi32ImageHandle,
Efi64ImageHandle,
ImageLoadBase,
Unknown,
}
#[derive(Debug)]
#[repr(C)]
pub struct TagHeader {
pub tag_type: u32,
size: u32,
}
impl TagHeader {
pub fn tag_type(&self) -> Type {
match self.tag_type {
4 => Type::MemoryInfo,
5 => Type::BiosBoot,
1 => Type::BootCmdLine,
3 => Type::Modules,
9 => Type::ElfSymbols,
6 => Type::MemoryMap,
2 => Type::BootLoaderName,
10 => Type::ApmTable,
7 => Type::VbeInfo,
8 => Type::FramebufferInfo,
11 => Type::Efi32SystemTable,
12 => Type::Efi64SystemTable,
13 => Type::SMBiosTable,
14 => Type::AcpiOldRdsp,
15 => Type::AcpiNewRdsp,
16 => Type::NetworkingInfo,
17 => Type::EfiMemoryMap,
18 => Type::EfiBootServicesNotTerminated,
19 => Type::Efi32ImageHandle,
20 => Type::Efi64ImageHandle,
21 => Type::ImageLoadBase,
_ => Type::Unknown,
}
}
}
#[derive(Debug)]
#[repr(C)]
pub struct MemoryInfo {
header: TagHeader,
mem_lower: u32,
mem_upper: u32,
}
#[derive(Debug)]
#[repr(C)]
pub struct BiosBoot {
header: TagHeader,
bios_dev: u32,
partition: u32,
sub_partition: u32,
}
#[repr(C, packed)]
pub struct BootCmdLine {
header: TagHeader,
string: InternalCStr,
}
impl BootCmdLine {
fn string(&self) -> &str {
// SAFETY: This is safe, because we know the BootCmdLine tag will have an internal
// null-terminated UTF-8 string within the tag itself from the multiboot2 standard.
unsafe { self.string.to_str() }
}
}
#[repr(C, packed)]
pub struct Modules {
header: TagHeader,
mod_start: u32,
mod_end: u32,
string: InternalCStr,
}
impl Modules {
fn string(&self) -> &str {
// SAFETY: This is safe, because we know the Modules tag will have an internal
// null-terminated UTF-8 string within the tag itself from the multiboot2 standard.
unsafe { self.string.to_str() }
}
}
#[repr(C, packed)]
pub struct BootLoaderName {
header: TagHeader,
string: InternalCStr,
}
impl BootLoaderName {
fn string(&self) -> &str {
// SAFETY: This is safe, because we know the BootLoaderName tag will have an internal
// null-terminated UTF-8 string within the tag itself from the multiboot2 standard.
unsafe { self.string.to_str() }
}
}
#[derive(Debug)]
#[repr(C)]
pub struct ApmTable {
header: TagHeader,
version: u16,
cseg: u16,
offset: u32,
cseg_16: u16,
dseg: u16,
flags: u16,
cseg_len: u16,
cseg_16_len: u16,
dseg_len: u16,
}
#[repr(C)]
pub struct VbeInfo {
header: TagHeader,
vbe_mode: u16,
vbe_interface_seg: u16,
vge_interface_off: u16,
vbe_interface_len: u16,
vbe_control_info: [u8; 512],
vbe_mode_info: [u8; 256],
}
#[derive(Debug)]
#[repr(C)]
struct FramebufferInfo {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct Efi32SystemTable {
header: TagHeader,
pointer: u32,
}
#[derive(Debug)]
#[repr(C)]
pub struct Efi64SystemTable {
header: TagHeader,
pointer: u64,
}
#[derive(Debug)]
#[repr(C)]
struct SMBiosTable {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct AcpiOldRdsp {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct AcpiNewRdsp {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct NetworkingInfo {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct EfiMemoryMap {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct EfiBootServicesNotTerminated {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct Efi32ImageHandle {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct Efi64ImageHandle {
// TODO
}
#[derive(Debug)]
#[repr(C)]
pub struct ImageLoadBase {
// TODO
}
| true
|
469068dfe25eb1bf538019bee414dcd29d316d1f
|
Rust
|
BurntSushi/fst
|
/fst-levenshtein/src/lib.rs
|
UTF-8
| 9,793
| 3.4375
| 3
|
[
"MIT",
"Unlicense"
] |
permissive
|
use std::cmp;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
use std::fmt;
use utf8_ranges::{Utf8Range, Utf8Sequences};
use fst::automaton::Automaton;
pub use self::error::Error;
mod error;
const STATE_LIMIT: usize = 10_000; // currently at least 20MB >_<
/// A Unicode aware Levenshtein automaton for running efficient fuzzy queries.
///
/// A Levenshtein automata is one way to search any finite state transducer
/// for keys that *approximately* match a given query. A Levenshtein automaton
/// approximates this by returning all keys within a certain edit distance of
/// the query. The edit distance is defined by the number of insertions,
/// deletions and substitutions required to turn the query into the key.
/// Insertions, deletions and substitutions are based on
/// **Unicode characters** (where each character is a single Unicode scalar
/// value).
///
/// # Example
///
/// This example shows how to find all keys within an edit distance of `1`
/// from `foo`.
///
/// ```rust
/// extern crate fst;
/// extern crate fst_levenshtein;
///
/// use fst::{IntoStreamer, Streamer, Set};
/// use fst_levenshtein::Levenshtein;
///
/// fn main() {
/// let keys = vec!["fa", "fo", "fob", "focus", "foo", "food", "foul"];
/// let set = Set::from_iter(keys).unwrap();
///
/// let lev = Levenshtein::new("foo", 1).unwrap();
/// let mut stream = set.search(&lev).into_stream();
///
/// let mut keys = vec![];
/// while let Some(key) = stream.next() {
/// keys.push(key.to_vec());
/// }
/// assert_eq!(keys, vec![
/// "fo".as_bytes(), // 1 deletion
/// "fob".as_bytes(), // 1 substitution
/// "foo".as_bytes(), // 0 insertions/deletions/substitutions
/// "food".as_bytes(), // 1 insertion
/// ]);
/// }
/// ```
///
/// This example only uses ASCII characters, but it will work equally well
/// on Unicode characters.
///
/// # Warning: experimental
///
/// While executing this Levenshtein automaton against a finite state
/// transducer will be very fast, *constructing* an automaton may not be.
/// Namely, this implementation is a proof of concept. While I believe the
/// algorithmic complexity is not exponential, the implementation is not speedy
/// and it can use enormous amounts of memory (tens of MB before a hard-coded
/// limit will cause an error to be returned).
///
/// This is important functionality, so one should count on this implementation
/// being vastly improved in the future.
pub struct Levenshtein {
prog: DynamicLevenshtein,
dfa: Dfa,
}
impl Levenshtein {
/// Create a new Levenshtein query.
///
/// The query finds all matching terms that are at most `distance`
/// edit operations from `query`. (An edit operation may be an insertion,
/// a deletion or a substitution.)
///
/// If the underlying automaton becomes too big, then an error is returned.
///
/// A `Levenshtein` value satisfies the `Automaton` trait, which means it
/// can be used with the `search` method of any finite state transducer.
#[inline]
pub fn new(query: &str, distance: u32) -> Result<Levenshtein, Error> {
let lev = DynamicLevenshtein {
query: query.to_owned(),
dist: distance as usize,
};
let dfa = DfaBuilder::new(lev.clone()).build()?;
Ok(Levenshtein { prog: lev, dfa })
}
}
impl fmt::Debug for Levenshtein {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Levenshtein(query: {:?}, distance: {:?})",
self.prog.query, self.prog.dist
)
}
}
#[derive(Clone)]
struct DynamicLevenshtein {
query: String,
dist: usize,
}
impl DynamicLevenshtein {
fn start(&self) -> Vec<usize> {
(0..self.query.chars().count() + 1).collect()
}
fn is_match(&self, state: &[usize]) -> bool {
state.last().map(|&n| n <= self.dist).unwrap_or(false)
}
fn can_match(&self, state: &[usize]) -> bool {
state.iter().min().map(|&n| n <= self.dist).unwrap_or(false)
}
fn accept(&self, state: &[usize], chr: Option<char>) -> Vec<usize> {
let mut next = vec![state[0] + 1];
for (i, c) in self.query.chars().enumerate() {
let cost = if Some(c) == chr { 0 } else { 1 };
let v = cmp::min(
cmp::min(next[i] + 1, state[i + 1] + 1),
state[i] + cost,
);
next.push(cmp::min(v, self.dist + 1));
}
next
}
}
impl Automaton for Levenshtein {
type State = Option<usize>;
#[inline]
fn start(&self) -> Option<usize> {
Some(0)
}
#[inline]
fn is_match(&self, state: &Option<usize>) -> bool {
state.map(|state| self.dfa.states[state].is_match).unwrap_or(false)
}
#[inline]
fn can_match(&self, state: &Option<usize>) -> bool {
state.is_some()
}
#[inline]
fn accept(&self, state: &Option<usize>, byte: u8) -> Option<usize> {
state.and_then(|state| self.dfa.states[state].next[byte as usize])
}
}
#[derive(Debug)]
pub struct Dfa {
states: Vec<State>,
}
struct State {
next: [Option<usize>; 256],
is_match: bool,
}
impl fmt::Debug for State {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "State {{")?;
writeln!(f, " is_match: {:?}", self.is_match)?;
for i in 0..256 {
if let Some(si) = self.next[i] {
writeln!(f, " {:?}: {:?}", i, si)?;
}
}
write!(f, "}}")
}
}
struct DfaBuilder {
dfa: Dfa,
lev: DynamicLevenshtein,
cache: HashMap<Vec<usize>, usize>,
}
impl DfaBuilder {
fn new(lev: DynamicLevenshtein) -> DfaBuilder {
DfaBuilder {
dfa: Dfa { states: Vec::with_capacity(16) },
lev,
cache: HashMap::with_capacity(1024),
}
}
fn build(mut self) -> Result<Dfa, Error> {
let mut stack = vec![self.lev.start()];
let mut seen = HashSet::new();
let query = self.lev.query.clone(); // temp work around of borrowck
while let Some(lev_state) = stack.pop() {
let dfa_si = self.cached_state(&lev_state).unwrap();
let mismatch = self.add_mismatch_utf8_states(dfa_si, &lev_state);
if let Some((next_si, lev_next)) = mismatch {
if !seen.contains(&next_si) {
seen.insert(next_si);
stack.push(lev_next);
}
}
for (i, c) in query.chars().enumerate() {
if lev_state[i] > self.lev.dist {
continue;
}
let lev_next = self.lev.accept(&lev_state, Some(c));
let next_si = self.cached_state(&lev_next);
if let Some(next_si) = next_si {
self.add_utf8_sequences(true, dfa_si, next_si, c, c);
if !seen.contains(&next_si) {
seen.insert(next_si);
stack.push(lev_next);
}
}
}
if self.dfa.states.len() > STATE_LIMIT {
return Err(Error::TooManyStates(STATE_LIMIT));
}
}
Ok(self.dfa)
}
fn cached_state(&mut self, lev_state: &[usize]) -> Option<usize> {
self.cached(lev_state).map(|(si, _)| si)
}
fn cached(&mut self, lev_state: &[usize]) -> Option<(usize, bool)> {
if !self.lev.can_match(lev_state) {
return None;
}
Some(match self.cache.entry(lev_state.to_vec()) {
Entry::Occupied(v) => (*v.get(), true),
Entry::Vacant(v) => {
let is_match = self.lev.is_match(lev_state);
self.dfa.states.push(State { next: [None; 256], is_match });
(*v.insert(self.dfa.states.len() - 1), false)
}
})
}
fn add_mismatch_utf8_states(
&mut self,
from_si: usize,
lev_state: &[usize],
) -> Option<(usize, Vec<usize>)> {
let mismatch_state = self.lev.accept(lev_state, None);
let to_si = match self.cached(&mismatch_state) {
None => return None,
Some((si, _)) => si,
// Some((si, true)) => return Some((si, mismatch_state)),
// Some((si, false)) => si,
};
self.add_utf8_sequences(false, from_si, to_si, '\u{0}', '\u{10FFFF}');
return Some((to_si, mismatch_state));
}
fn add_utf8_sequences(
&mut self,
overwrite: bool,
from_si: usize,
to_si: usize,
from_chr: char,
to_chr: char,
) {
for seq in Utf8Sequences::new(from_chr, to_chr) {
let mut fsi = from_si;
for range in &seq.as_slice()[0..seq.len() - 1] {
let tsi = self.new_state(false);
self.add_utf8_range(overwrite, fsi, tsi, range);
fsi = tsi;
}
self.add_utf8_range(
overwrite,
fsi,
to_si,
&seq.as_slice()[seq.len() - 1],
);
}
}
fn add_utf8_range(
&mut self,
overwrite: bool,
from: usize,
to: usize,
range: &Utf8Range,
) {
for b in range.start as usize..range.end as usize + 1 {
if overwrite || self.dfa.states[from].next[b].is_none() {
self.dfa.states[from].next[b] = Some(to);
}
}
}
fn new_state(&mut self, is_match: bool) -> usize {
self.dfa.states.push(State { next: [None; 256], is_match });
self.dfa.states.len() - 1
}
}
| true
|
218f8fc2360c825274f831d5163600e6d9a9c15f
|
Rust
|
TheCharlatan/bitbox02-firmware
|
/src/rust/apps/ethereum/src/keypath.rs
|
UTF-8
| 3,678
| 2.78125
| 3
|
[
"Apache-2.0"
] |
permissive
|
// Copyright 2020 Shift Cryptosecurity AG
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use util::bip32::HARDENED;
const ACCOUNT_MAX: u32 = 99; // 100 accounts
/// Does limit checks the keypath, whitelisting bip44 purpose, account and change.
/// Only allows the well-known xpub of m'/44'/60'/0'/0 for now.
/// Since ethereum doesn't use the "change" path part it is always 0 and have become part of the
/// xpub keypath.
/// @return true if the keypath is valid, false if it is invalid.
pub fn is_valid_keypath_xpub(keypath: &[u32], expected_coin: u32) -> bool {
keypath.len() == 4 && keypath[..4] == [44 + HARDENED, expected_coin, 0 + HARDENED, 0]
}
/// Does limit checks the keypath, whitelisting bip44 purpose, account and change.
/// Returns true if the keypath is valid, false if it is invalid.
pub fn is_valid_keypath_address(keypath: &[u32], expected_coin: u32) -> bool {
if keypath.len() != 5 {
return false;
}
if !is_valid_keypath_xpub(&keypath[..4], expected_coin) {
return false;
}
if keypath[4] > ACCOUNT_MAX {
return false;
}
true
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_is_valid_keypath_xpub() {
let expected_coin = 60 + HARDENED;
assert!(is_valid_keypath_xpub(
&[44 + HARDENED, expected_coin, 0 + HARDENED, 0],
expected_coin
));
// wrong coin.
assert!(!is_valid_keypath_xpub(
&[44 + HARDENED, expected_coin, 0 + HARDENED, 0],
expected_coin + 1,
));
// too short
assert!(!is_valid_keypath_xpub(
&[44 + HARDENED, expected_coin, 0 + HARDENED],
expected_coin + 1,
));
// too long
assert!(!is_valid_keypath_xpub(
&[44 + HARDENED, expected_coin, 0 + HARDENED, 0, 0],
expected_coin + 1,
));
}
#[test]
fn test_is_valid_keypath_address() {
let expected_coin = 60 + HARDENED;
let keypath_for_account =
|account| [44 + HARDENED, expected_coin, 0 + HARDENED, 0, account];
// 100 good paths.
for account in 0..100 {
assert!(is_valid_keypath_address(
&keypath_for_account(account),
expected_coin
));
// wrong coin
assert!(!is_valid_keypath_address(
&keypath_for_account(account),
expected_coin + 1
));
}
assert!(!is_valid_keypath_address(
&keypath_for_account(100),
expected_coin
));
// too short
assert!(!is_valid_keypath_address(
&[44 + HARDENED, expected_coin, 0 + HARDENED, 0],
expected_coin
));
// too long
assert!(!is_valid_keypath_address(
&[44 + HARDENED, expected_coin, 0 + HARDENED, 0, 0, 0],
expected_coin
));
// tweak keypath elements
for i in 0..4 {
let mut keypath = keypath_for_account(0);
keypath[i] += 1;
assert!(!is_valid_keypath_address(&keypath, expected_coin));
}
}
}
| true
|
d3c71e35fb65c13ad2d23f3b06b15a8842386b3c
|
Rust
|
thehabbos007/anchors
|
/src/ext.rs
|
UTF-8
| 12,986
| 3.5625
| 4
|
[] |
no_license
|
use super::{Anchor, AnchorInner, Engine};
use std::panic::Location;
mod cutoff;
mod map;
mod map_mut;
mod refmap;
mod then;
/// A trait automatically implemented for all Anchors.
/// You'll likely want to `use` this trait in most of your programs, since it can create many
/// useful Anchors that derive their output incrementally from some other Anchors.
///
/// AnchorExt is also implemented for all tuples of up to 9 Anchor references. For example, you can combine three
/// values incrementally into a tuple with:
///
/// ```
/// use anchors::{singlethread::Engine, Constant, AnchorExt};
/// let mut engine = Engine::new();
/// let a = Constant::new(1);
/// let b = Constant::new(2);
/// let c = Constant::new("hello");
///
/// // here we use AnchorExt to map three values together:
/// let res = (&a, &b, &c).map(|a_val, b_val, c_val| (*a_val, *b_val, *c_val));
///
/// assert_eq!((1, 2, "hello"), engine.get(&res));
/// ```
pub trait AnchorExt<E: Engine>: Sized {
type Target;
/// Creates an Anchor that maps a number of incremental input values to some output value.
/// The function `f` accepts inputs as references, and must return an owned value.
/// `f` will always be recalled any time any input value changes.
/// For example, you can add two numbers together with `map`:
///
/// ```
/// use anchors::{singlethread::Engine, Anchor, Constant, AnchorExt};
/// let mut engine = Engine::new();
/// let a = Constant::new(1);
/// let b = Constant::new(2);
///
/// // add the two numbers together; types have been added for clarity but are optional:
/// let res: Anchor<usize, Engine> = (&a, &b).map(|a_val: &usize, b_val: &usize| -> usize {
/// *a_val+*b_val
/// });
///
/// assert_eq!(3, engine.get(&res));
/// ```
fn map<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
map::Map<Self::Target, F, Out>: AnchorInner<E, Output = Out>;
fn map_mut<F, Out>(self, initial: Out, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
map_mut::MapMut<Self::Target, F, Out>: AnchorInner<E, Output = Out>;
/// Creates an Anchor that maps a number of incremental input values to some output Anchor.
/// With `then`, your computation graph can dynamically select an Anchor to recalculate based
/// on some other incremental computation..
/// The function `f` accepts inputs as references, and must return an owned `Anchor`.
/// `f` will always be recalled any time any input value changes.
///
/// For example, you can select which of two additions gets calculated:
///
/// ```
/// use anchors::{singlethread::Engine, Anchor, Constant, AnchorExt};
/// let mut engine = Engine::new();
/// let decision = Constant::new(true);
/// let num = Constant::new(1);
///
/// // because of how we're using the `then` below, only one of these two
/// // additions will actually be run
/// let a = num.map(|num| *num + 1);
/// let b = num.map(|num| *num + 2);
///
/// // types have been added for clarity but are optional:
/// let res: Anchor<usize, Engine> = decision.then(move |decision: &bool| {
/// if *decision {
/// a.clone()
/// } else {
/// b.clone()
/// }
/// });
///
/// assert_eq!(2, engine.get(&res));
/// ```
fn then<F, Out>(self, f: F) -> Anchor<Out, E>
where
F: 'static,
Out: 'static,
then::Then<Self::Target, Out, F, E>: AnchorInner<E, Output = Out>;
/// Creates an Anchor that outputs its input. However, even if a value changes
/// you may not want to recompute downstream nodes unless the value changes substantially.
/// The function `f` accepts inputs as references, and must return true if Anchors that derive
/// values from this cutoff should recalculate, or false if derivative Anchors should not recalculate.
/// If this is the first calculation, `f` will be called, but return values of `false` will be ignored.
/// `f` will always be recalled any time the input value changes.
/// For example, you can only perform an addition if an input changes by more than 10:
///
/// ```
/// use anchors::{singlethread::Engine, Anchor, Var, AnchorExt};
/// let mut engine = Engine::new();
/// let (num, set_num) = Var::new(1i32);
/// let cutoff = {
/// let mut old_num_opt: Option<i32> = None;
/// num.cutoff(move |num| {
/// if let Some(old_num) = old_num_opt {
/// if (old_num - *num).abs() < 10 {
/// return false;
/// }
/// }
/// old_num_opt = Some(*num);
/// true
/// })
/// };
/// let res = cutoff.map(|cutoff| *cutoff + 1);
///
/// assert_eq!(2, engine.get(&res));
///
/// // small changes don't cause recalculations
/// set_num.set(5);
/// assert_eq!(2, engine.get(&res));
///
/// // but big changes do
/// set_num.set(11);
/// assert_eq!(12, engine.get(&res));
/// ```
fn cutoff<F, Out>(self, _f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
cutoff::Cutoff<Self::Target, F>: AnchorInner<E, Output = Out>;
/// Creates an Anchor that maps some input reference to some output reference.
/// Performance is critical here: `f` will always be recalled any time any downstream node
/// requests the value of this Anchor, *not* just when an input value changes.
/// It's also critical to note that due to constraints
/// with Rust's lifetime system, these output references can not be owned values, and must
/// live exactly as long as the input reference.
/// For example, you can lookup a particular value inside a tuple without cloning:
///
/// ```
/// use anchors::{singlethread::Engine, Anchor, Constant, AnchorExt};
/// struct CantClone {val: usize};
/// let mut engine = Engine::new();
/// let tuple = Constant::new((CantClone{val: 1}, CantClone{val: 2}));
///
/// // lookup the first value inside the tuple; types have been added for clarity but are optional:
/// let res: Anchor<CantClone, Engine> = tuple.refmap(|tuple: &(CantClone, CantClone)| -> &CantClone {
/// &tuple.0
/// });
///
/// // check if the cantclone value is correct:
/// let is_one = res.map(|tuple: &CantClone| -> bool {
/// tuple.val == 1
/// });
///
/// assert_eq!(true, engine.get(&is_one));
/// ```
fn refmap<F, Out>(self, _f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
refmap::RefMap<Self::Target, F>: AnchorInner<E, Output = Out>;
}
pub trait AnchorSplit<E: Engine>: Sized {
type Target;
fn split(&self) -> Self::Target;
}
impl<O1, E> AnchorExt<E> for &Anchor<O1, E>
where
O1: 'static,
E: Engine,
{
type Target = (Anchor<O1, E>,);
#[track_caller]
fn map<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
map::Map<Self::Target, F, Out>: AnchorInner<E, Output = Out>,
{
E::mount(map::Map {
anchors: (self.clone(),),
f,
output: None,
output_stale: true,
location: Location::caller(),
})
}
#[track_caller]
fn map_mut<F, Out>(self, initial: Out, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
map_mut::MapMut<Self::Target, F, Out>: AnchorInner<E, Output = Out>,
{
E::mount(map_mut::MapMut {
anchors: (self.clone(),),
f,
output: initial,
output_stale: true,
location: Location::caller(),
})
}
#[track_caller]
fn then<F, Out>(self, f: F) -> Anchor<Out, E>
where
F: 'static,
Out: 'static,
then::Then<Self::Target, Out, F, E>: AnchorInner<E, Output = Out>,
{
E::mount(then::Then {
anchors: (self.clone(),),
f,
f_anchor: None,
location: Location::caller(),
lhs_stale: true,
})
}
#[track_caller]
fn refmap<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
refmap::RefMap<Self::Target, F>: AnchorInner<E, Output = Out>,
{
E::mount(refmap::RefMap {
anchors: (self.clone(),),
f,
location: Location::caller(),
})
}
#[track_caller]
fn cutoff<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
cutoff::Cutoff<Self::Target, F>: AnchorInner<E, Output = Out>,
{
E::mount(cutoff::Cutoff {
anchors: (self.clone(),),
f,
location: Location::caller(),
})
}
}
macro_rules! impl_tuple_ext {
($([$output_type:ident, $num:tt])+) => {
impl <$($output_type,)+ E> AnchorSplit<E> for Anchor<($($output_type,)+), E>
where
$(
$output_type: Clone + PartialEq + 'static,
)+
E: Engine,
{
type Target = ($(Anchor<$output_type, E>,)+);
fn split(&self) -> Self::Target {
($(
self.refmap(|v| &v.$num),
)+)
}
}
impl<$($output_type,)+ E> AnchorExt<E> for ($(&Anchor<$output_type, E>,)+)
where
$(
$output_type: 'static,
)+
E: Engine,
{
type Target = ($(Anchor<$output_type, E>,)+);
#[track_caller]
fn map<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
map::Map<Self::Target, F, Out>: AnchorInner<E, Output=Out>,
{
E::mount(map::Map {
anchors: ($(self.$num.clone(),)+),
f,
output: None,
output_stale: true,
location: Location::caller(),
})
}
#[track_caller]
fn map_mut<F, Out>(self, initial: Out, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
map_mut::MapMut<Self::Target, F, Out>: AnchorInner<E, Output=Out>,
{
E::mount(map_mut::MapMut {
anchors: ($(self.$num.clone(),)+),
f,
output: initial,
output_stale: true,
location: Location::caller(),
})
}
#[track_caller]
fn then<F, Out>(self, f: F) -> Anchor<Out, E>
where
F: 'static,
Out: 'static,
then::Then<Self::Target, Out, F, E>: AnchorInner<E, Output=Out>,
{
E::mount(then::Then {
anchors: ($(self.$num.clone(),)+),
f,
f_anchor: None,
location: Location::caller(),
lhs_stale: true,
})
}
#[track_caller]
fn refmap<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
refmap::RefMap<Self::Target, F>: AnchorInner<E, Output = Out>,
{
E::mount(refmap::RefMap {
anchors: ($(self.$num.clone(),)+),
f,
location: Location::caller(),
})
}
#[track_caller]
fn cutoff<F, Out>(self, f: F) -> Anchor<Out, E>
where
Out: 'static,
F: 'static,
cutoff::Cutoff<Self::Target, F>: AnchorInner<E, Output = Out>,
{
E::mount(cutoff::Cutoff {
anchors: ($(self.$num.clone(),)+),
f,
location: Location::caller(),
})
}
}
}
}
impl_tuple_ext! {
[O0, 0]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
[O3, 3]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
[O3, 3]
[O4, 4]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
[O3, 3]
[O4, 4]
[O5, 5]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
[O3, 3]
[O4, 4]
[O5, 5]
[O6, 6]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
[O3, 3]
[O4, 4]
[O5, 5]
[O6, 6]
[O7, 7]
}
impl_tuple_ext! {
[O0, 0]
[O1, 1]
[O2, 2]
[O3, 3]
[O4, 4]
[O5, 5]
[O6, 6]
[O7, 7]
[O8, 8]
}
| true
|
5379db9f5e82c058c464bfb01f21cec8121c9a10
|
Rust
|
rrcoco/test_2018_2021
|
/18_rust/HelloWorld/comments.rs
|
UTF-8
| 150
| 2.53125
| 3
|
[
"MIT"
] |
permissive
|
/// lib doc for follow item
fn main() {
let x = 5+ /* 90 + */ 5; /// lib dod for this line item
println!("Is `x` 10 or 100? x = {}, {}", x, x*x);
}
| true
|
a8231884273f52382eb63a596830a6752f7977d3
|
Rust
|
JialuGong/leetcode-rust
|
/rust-ac/1385.find-the-distance-value-between-two-arrays.55737591.ac.rs
|
UTF-8
| 270
| 2.765625
| 3
|
[] |
no_license
|
impl Solution {
pub fn find_the_distance_value(arr1: Vec<i32>, arr2: Vec<i32>, d: i32) -> i32 {
let mut cnt:i32=0;
for i in &arr1{
let mut flag=true;
for j in &arr2{
if (i-j).abs()<=d {flag=false;break;}
}
if flag {cnt+=1;}
}
cnt
}
}
| true
|
51f7d4aec92efb9e137b8c4fb9286f86ed5de222
|
Rust
|
silverweed/ecsde
|
/inle/inle_core/src/env.rs
|
UTF-8
| 2,458
| 2.96875
| 3
|
[] |
no_license
|
use std::boxed::Box;
use std::env;
use std::ffi::OsStr;
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Clone)]
pub struct Env_Info {
pub full_exe_path: Box<Path>,
pub working_dir: Box<Path>,
pub assets_root: Box<Path>,
pub cfg_root: Box<Path>,
}
impl Env_Info {
pub fn gather() -> std::io::Result<Env_Info> {
let full_exe_path = fs::canonicalize(env::current_exe()?)?;
let mut working_dir = PathBuf::from(
full_exe_path
.parent()
.unwrap_or_else(|| panic!("Wierd exe path: {:?}", full_exe_path)),
);
// Find out if we're in a dev environment and, if so, set the working dir to the repository
// root (so we don't have to symlink/copy assets, cfg etc).
// @Cleanup: this should be a dev-only thing, maybe turn it on with a feature flag?
let cur_dir = working_dir
.as_path()
.file_name()
.map(OsStr::to_str)
.flatten();
let parent_dir = working_dir
.as_path()
.parent()
.and_then(Path::file_name)
.map(OsStr::to_str)
.flatten();
if matches!(cur_dir, Some("debug" | "release" | "profile"))
&& matches!(parent_dir, Some("target"))
{
working_dir.pop();
working_dir.pop();
} else if matches!(cur_dir, Some("deps"))
&& matches!(parent_dir, Some("debug" | "release" | "profile"))
{
working_dir.pop();
working_dir.pop();
working_dir.pop();
}
let assets_root = {
let mut assets_root_buf = working_dir.clone();
assets_root_buf.push("assets");
assets_root_buf.into_boxed_path()
};
let cfg_root = {
let mut cfgs_root_buf = working_dir.clone();
cfgs_root_buf.push("cfg");
cfgs_root_buf.into_boxed_path()
};
Ok(Env_Info {
full_exe_path: full_exe_path.into_boxed_path(),
working_dir: working_dir.into_boxed_path(),
assets_root,
cfg_root,
})
}
}
// @Speed: when we have a frame temp allocator, this should probably allocate there.
#[inline]
pub fn asset_path(env: &Env_Info, dir: &str, file: &str) -> Box<Path> {
let mut s = PathBuf::from(env.assets_root.as_ref());
s.push(dir);
s.push(file);
s.into_boxed_path()
}
| true
|
e79093b5e198c6ee8949d56c74336d931e9148f6
|
Rust
|
steadylearner/Rust-Full-Stack
|
/database/sqlite_rust/src/main_crud_prototype.rs
|
UTF-8
| 4,308
| 3.4375
| 3
|
[
"MIT"
] |
permissive
|
use std::io::stdin;
use rusqlite::NO_PARAMS;
use rusqlite::{params, Connection, Result};
// // https://docs.rs/time/0.2.9/time/struct.Instant.html
// use time::Instant;
// https://github.com/jgallagher/rusqlite#optional-features - chrono
// Refer to Cargo.toml
use chrono::naive::NaiveDateTime;
// 1. created_at: NaiveDateTime
#[derive(Debug)]
struct Message {
query: String, // Query is unique so use it as id.
used: i64, // See how many time it was used.
created_at: NaiveDateTime,
}
// Message {
// query: "rust",
// used: 1,
// created_at: 2020-03-24T11:01:34,
// }
// 2. created_at: String, // without Chrono
// Message {
// query: "rust",
// used: 1,
// created_at: "2020-03-24 10:54:05",
// }
pub fn from_stdin() -> String {
let mut input = String::new();
stdin().read_line(&mut input).unwrap();
let input = input[..(input.len() - 1)].to_string();
input
}
// time::Instant::now
fn main() -> Result<()> {
let conn = Connection::open("messages.db")?;
conn.execute(
"create table if not exists messages (
query text not null unique,
used integer default 1,
created_at DATE DEFAULT (datetime('now','localtime'))
)",
NO_PARAMS,
)?;
loop {
println!("What do you want?[c, r, u, d, l, e]"); // create, update(used +1), read, delete, list
let action = from_stdin();
match action.as_ref() {
"c" => {
// rust, golang
println!("What do you want to save in messages?");
let query = from_stdin();
conn.execute("INSERT INTO messages (query) values (?1)", &[&query])?;
println!("{:#?} is included in messages.", query)
}
"r" => {
println!("Which query you want to read?");
let query = from_stdin();
let mut stmt = conn.prepare("SELECT * FROM messages WHERE query = (?1);")?;
// You can use others instead of query_map.
// https://docs.rs/rusqlite/0.21.0/rusqlite/struct.Statement.html#method.query
let message = stmt.query_map(params![&query], |row| {
Ok(Message {
query: row.get(0)?,
used: row.get(1)?,
created_at: row.get(2)?,
})
})?;
for row in message {
println!("{:#?}", row?);
}
}
"u" => {
println!("What query you want to increment its used number?");
let query = from_stdin();
// https://stackoverflow.com/questions/744289/sqlite-increase-value-by-a-certain-number/744290
// Find I can do the same with Postgresql.
// https://www.sqlitetutorial.net/sqlite-update/
conn.execute("UPDATE messages SET used = used + 1 WHERE query = (?1);", &[&query])?;
println!("{:#?} is used one more time.", &query)
}
"d" => {
println!("What query you want to delete?");
let query = from_stdin();
conn.execute("DELETE FROM messages WHERE query = (?1);", &[&query])?;
println!("{:#?} is deleted from the messages", &query)
}
"l" => {
let mut stmt = conn.prepare("SELECT * FROM messages;")?;
let messages = stmt.query_map(NO_PARAMS, |row| {
Ok(Message {
query: row.get(0)?,
used: row.get(1)?,
created_at: row.get(2)?,
})
})?;
for message in messages {
println!("{:#?}", message?);
}
}
"e" => {
println!("You want to end the SQLite CLI example.");
break
}
_ => {
println!("You should use [c, r, u, d, l, e] to create, read, update, delete, list messages and e to end.");println!("You should use [c, r, u, l, e] to create, read, update, list messages and e to end.");
}
};
}
Ok(())
}
| true
|
b60007262963bacafa1e7adb050ba1af54828cf1
|
Rust
|
sugyan/leetcode
|
/problems/0560-subarray-sum-equals-k/lib.rs
|
UTF-8
| 659
| 3.4375
| 3
|
[] |
no_license
|
use std::collections::HashMap;
pub struct Solution;
impl Solution {
pub fn subarray_sum(nums: Vec<i32>, k: i32) -> i32 {
let mut hm = HashMap::from([(0, 1)]);
let (mut sum, mut answer) = (0, 0);
for num in &nums {
sum += num;
answer += hm.get(&(sum - k)).unwrap_or(&0);
*hm.entry(sum).or_insert(0) += 1;
}
answer
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn example_1() {
assert_eq!(2, Solution::subarray_sum(vec![1, 1, 1], 2));
}
#[test]
fn example_2() {
assert_eq!(2, Solution::subarray_sum(vec![1, 2, 3], 3));
}
}
| true
|
476bf53588e5303ed308a8196843bc419bf8ddfd
|
Rust
|
hunterlester/safe_cli
|
/tests/safe_authenticator_service_integration.rs
|
UTF-8
| 3,587
| 2.5625
| 3
|
[
"MIT"
] |
permissive
|
use actix_web::{http::Method, test, App, HttpMessage};
use rand::Rng;
use safe_authenticator::{AuthError, Authenticator};
use safe_cli::{authorise, create_acc, index, login, AuthenticatorStruct};
use std::str::from_utf8;
use std::sync::{Arc, Mutex};
fn create_test_service() -> App<AuthenticatorStruct> {
let handle: Arc<Mutex<Option<Result<Authenticator, AuthError>>>> = Arc::new(Mutex::new(None));
App::with_state(AuthenticatorStruct {
handle: handle.clone(),
})
.resource("/", |r| {
r.method(Method::GET).with(index);
})
.resource("/login/{locator}/{password}", |r| {
r.method(Method::POST).with(login);
})
.resource("/create_acc/{locator}/{password}/{invite}", |r| {
r.method(Method::POST).with(create_acc);
})
.resource("/authorise/{auth_req}", |r| {
r.method(Method::POST).with(authorise);
})
}
#[test]
fn get_index() {
let mut srv = test::TestServer::with_factory(create_test_service);
let request = srv.client(Method::GET, "/").finish().unwrap();
let response = srv.execute(request.send()).unwrap();
assert!(response.status().is_success());
}
#[test]
fn post_create_acc() {
let mut rng = rand::thread_rng();
let locator: u32 = rng.gen();
let password: u32 = rng.gen();
let invite: u16 = rng.gen();
let mut srv = test::TestServer::with_factory(create_test_service);
let endpoint = format!("/create_acc/{}/{}/{}", locator, password, invite);
let request = srv.client(Method::POST, &endpoint).finish().unwrap();
let response = srv.execute(request.send()).unwrap();
assert!(response.status().is_success());
}
#[test]
fn post_login() {
let mut rng = rand::thread_rng();
let locator: u32 = rng.gen();
let password: u32 = rng.gen();
let invite: u16 = rng.gen();
let mut srv = test::TestServer::with_factory(create_test_service);
let create_acc_endpoint = format!("/create_acc/{}/{}/{}", locator, password, invite);
let create_acc_request = srv
.client(Method::POST, &create_acc_endpoint)
.finish()
.unwrap();
let create_acc_response = srv.execute(create_acc_request.send()).unwrap();
assert!(create_acc_response.status().is_success());
let login_endpoint = format!("/login/{}/{}", locator, password);
let login_request = srv.client(Method::POST, &login_endpoint).finish().unwrap();
let login_response = srv.execute(login_request.send()).unwrap();
assert!(login_response.status().is_success());
}
#[test]
#[ignore]
fn post_authorise() {
let mut rng = rand::thread_rng();
let locator: u32 = rng.gen();
let password: u32 = rng.gen();
let invite: u16 = rng.gen();
let mut srv = test::TestServer::with_factory(create_test_service);
let create_acc_endpoint = format!("/create_acc/{}/{}/{}", locator, password, invite);
let create_acc_request = srv
.client(Method::POST, &create_acc_endpoint)
.finish()
.unwrap();
let create_acc_response = srv.execute(create_acc_request.send()).unwrap();
assert!(create_acc_response.status().is_success());
let auth_req = "bAAAAAACTBZGGMAAAAAABGAAAAAAAAAAANB2W45DFOIXGYZLTORSXELRUHAXDGOAACYAAAAAAAAAAAR3VNFWGM33SMQQEQ5LOORSXEICMMVZXIZLSCEAAAAAAAAAAATLBNFSFGYLGMUXG4ZLUEBGHIZBOAEBAAAAAAAAAAAAHAAAAAAAAAAAF64DVMJWGSYYFAAAAAAAAAAAAAAAAAAAQAAAAAIAAAAADAAAAABAAAAAAYAAAAAAAAAAAL5YHKYTMNFRU4YLNMVZQKAAAAAAAAAAAAAAAAAABAAAAAAQAAAAAGAAAAACAAAAAAE";
let endpoint = format!("/authorise/{}", auth_req);
let request = srv.client(Method::POST, &endpoint).finish().unwrap();
let response = srv.execute(request.send()).unwrap();
assert!(response.status().is_client_error());
let bytes = srv.execute(response.body()).unwrap();
let body = from_utf8(&bytes).unwrap();
assert_eq!(body, "Hello world!");
}
| true
|
e989c21fcd8378d065a73abc33e25c924f8cff15
|
Rust
|
leontoeides/google_maps
|
/src/roads/snap_to_roads/mod.rs
|
UTF-8
| 2,202
| 3.078125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! The Roads API **Snap To Roads** service takes up to 100 GPS points collected
//! along a route, and returns a similar set of data, with the points snapped to
//! the most likely roads the vehicle was traveling along. Optionally, you can
//! request that the points be interpolated, resulting in a path that smoothly
//! follows the geometry of the road.
//!
//! # [Required parameters](https://developers.google.com/maps/documentation/roads/snap#required-parameters)
//!
//! * `path` - The path to be snapped. The path parameter accepts a list of
//! latitude/longitude pairs. Latitude and longitude values should be separated
//! by commas. Coordinates should be separated by the pipe character: "|". For
//! example: `path=60.170880,24.942795|60.170879,24.942796|60.170877,24.942796`.
//!
//! Note: The snapping algorithm works best for points that are not too far
//! apart. If you observe odd snapping behavior, try creating paths that have
//! points closer together. To ensure the best snap-to-road quality, you should
//! aim to provide paths on which consecutive pairs of points are within 300m of
//! each other. This will also help in handling any isolated, long jumps between
//! consecutive points caused by GPS signal loss, or noise.
//!
//! # [Optional parameters](https://developers.google.com/maps/documentation/roads/snap#optional-parameters)
//!
//! * `interpolate` - Whether to interpolate a path to include all points
//! forming the full road-geometry. When `true`, additional interpolated points
//! will also be returned, resulting in a path that smoothly follows the
//! geometry of the road, even around corners and through tunnels. Interpolated
//! paths will most likely contain more points than the original path. Defaults
//! to `false`.
pub mod request;
pub mod response;
// -----------------------------------------------------------------------------
const SERVICE_URL: &str = "https://roads.googleapis.com/v1/snapToRoads";
// -----------------------------------------------------------------------------
pub use crate::roads::snap_to_roads::request::Request as SnapToRoadsRequest;
pub use crate::roads::snap_to_roads::response::Response as SnapToRoadsResponse;
| true
|
079fe8e50ed2fc8eeafff9440c96459194e641ea
|
Rust
|
RafeWoo/asteroids
|
/src/states/pause.rs
|
UTF-8
| 2,254
| 3.421875
| 3
|
[] |
no_license
|
//! PauseState is entered when player has paused gameplay
//!
//! Can go to gameplay state
//! Waiting for player to press unpause key
//!
//! Display "Paused" on Screen
//! set a paused flag
use amethyst::{
ecs::prelude::*,
input::is_key_down,
prelude::*,
renderer::VirtualKeyCode,
ui::{Anchor, UiText, UiTransform, FontHandle},
};
use crate::game_constants::*;
use crate::resources;
pub struct PauseState
{
message: Option<Entity>,
}
impl PauseState
{
pub fn new() -> PauseState
{
PauseState{
message: None,
}
}
}
fn display_pause_message(world: &mut World)->Entity
{
let font_handle = world.read_resource::<FontHandle>().clone();
let message_transform = UiTransform::new(
"MESSAGE".to_string(), Anchor::Middle,
0., 0., 1.,
600., 100.,
0,
);
world.create_entity()
.with( message_transform )
.with( UiText::new(
font_handle,
"PAUSED".to_string(),
COLOUR_WHITE,
50.,
)).build()
}
impl SimpleState for PauseState
{
fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {
let world = data.world;
//set global pause flag
world.write_resource::<resources::PauseFlag>().toggle_paused();
//display paused text
self.message = Some( display_pause_message(world));
}
fn on_stop(&mut self, data: StateData<'_, GameData<'_, '_>>)
{
let world = data.world;
//remove paused text
if let Some(message_entity) = self.message {
world.delete_entity( message_entity ).expect("failed to delete message");
self.message = None;
}
//unset global pause flag
world.write_resource::<resources::PauseFlag>().toggle_paused();
}
fn handle_event(&mut self, _data: StateData<'_, GameData<'_, '_>>, event: StateEvent) -> SimpleTrans
{
let mut transition = Trans::None;
if let StateEvent::Window(event) = &event {
if is_key_down(&event, VirtualKeyCode::Escape) {
transition = Trans::Pop;
}
}
transition
}
}
| true
|
3b3be686e1093557b7a24b69a868404539b7bf14
|
Rust
|
rust-lang/miri
|
/tests/pass/char.rs
|
UTF-8
| 163
| 2.765625
| 3
|
[
"MIT",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
fn main() {
let c = 'x';
assert_eq!(c, 'x');
assert!('a' < 'z');
assert!('1' < '9');
assert_eq!(std::char::from_u32('x' as u32), Some('x'));
}
| true
|
8aa166e8b21502a265329f4ee98a2e8a3662a289
|
Rust
|
Caruso33/rust
|
/ultimate_rust_crash_course/exercise/z_final_project/src/lib.rs
|
UTF-8
| 222
| 2.875
| 3
|
[
"MIT"
] |
permissive
|
use std::fs::create_dir;
use std::path::Path;
pub fn create_output_if_not_exist(output_path: &Path) -> () {
if !output_path.exists() {
create_dir(output_path).expect("Can't create output directory");
};
}
| true
|
ebb6299c51787534004c66604718b87e5e891561
|
Rust
|
Lawliet-Chan/phala-pruntime
|
/vendor/rustcrypto-utils/hex-literal/hex-literal-impl/src/lib.rs
|
UTF-8
| 1,446
| 3.0625
| 3
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
extern crate proc_macro;
use proc_macro::{TokenStream, TokenTree};
use proc_macro_hack::proc_macro_hack;
fn is_hex_char(c: &char) -> bool {
match *c {
'0'...'9' | 'a'...'f' | 'A'...'F' => true,
_ => false,
}
}
fn is_format_char(c: &char) -> bool {
match *c {
' ' | '\r' | '\n' | '\t' => true,
_ => false,
}
}
#[proc_macro_hack]
pub fn hex(input: TokenStream) -> TokenStream {
let mut ts = input.into_iter();
let input = match (ts.next(), ts.next()) {
(Some(TokenTree::Literal(literal)), None) => literal.to_string(),
_ => panic!("expected one string literal"),
};
let bytes = input.as_bytes();
let n = bytes.len();
// trim quote characters
let input = &input[1..n-1];
for (i, c) in input.chars().enumerate() {
if !(is_hex_char(&c) || is_format_char(&c)) {
panic!("invalid character (position {}): {:?})", i + 1, c);
}
};
let n = input.chars().filter(is_hex_char).count() / 2;
let mut s = String::with_capacity(2 + 7*n);
s.push('[');
let mut iter = input.chars().filter(is_hex_char);
while let Some(c1) = iter.next() {
if let Some(c2) = iter.next() {
s += "0x";
s.push(c1);
s.push(c2);
s += "u8,";
} else {
panic!("expected even number of hex characters");
}
}
s.push(']');
s.parse().unwrap()
}
| true
|
c44e6b99d6bda5afcba63a07e266a52eacbed8ea
|
Rust
|
rust-lang/rust
|
/tests/ui/parser/ident-recovery.rs
|
UTF-8
| 335
| 3.125
| 3
|
[
"Apache-2.0",
"LLVM-exception",
"NCSA",
"BSD-2-Clause",
"LicenseRef-scancode-unicode",
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
fn ,comma() {
//~^ ERROR expected identifier, found `,`
struct Foo {
x: i32,,
//~^ ERROR expected identifier, found `,`
y: u32,
}
}
fn break() {
//~^ ERROR expected identifier, found keyword `break`
let continue = 5;
//~^ ERROR expected identifier, found keyword `continue`
}
fn main() {}
| true
|
c4aa71bedce3cb8ea78a285d1f1462a04af6ca8d
|
Rust
|
alex-dranoel/advent-of-code
|
/2018/Day1_ChronalCalibration/rust/chronal_calibration/src/main.rs
|
UTF-8
| 954
| 3.453125
| 3
|
[] |
no_license
|
use std::{
fs::File,
io::{BufRead, BufReader},
path::Path,
str::FromStr,
fmt::Debug,
collections::HashSet,
};
fn vec_from_file<T: FromStr>(filename: impl AsRef<Path>) -> Vec<T>
where T::Err: Debug
{
BufReader::new(File::open(filename).expect("Could not open file"))
.lines()
.map(|l| l.expect("Could not read line"))
.map(|l| l.parse().expect(&format!("Could not parse line '{}'", l)))
.collect()
}
fn part_one(data: &[i32]) {
let sum = data.iter().sum::<i32>();
println!("Part 1: {}", sum);
}
fn part_two(data: &[i32]) {
let mut frequency = 0;
let mut set = HashSet::new();
for change in data.iter().cycle() {
frequency += change;
if !set.insert(frequency) {
break;
}
}
println!("Part 2: {}", frequency);
}
fn main() {
let data: Vec<i32> = vec_from_file("../../input.txt");
part_one(&data);
part_two(&data);
}
| true
|
b70b97419b055de6e6d3e741448ee5923b228333
|
Rust
|
heavypackets/rusty-datatypes
|
/examples/sigma_p.rs
|
UTF-8
| 2,156
| 3.015625
| 3
|
[] |
no_license
|
#![feature(try_from)]
#![feature(custom_attribute)]
extern crate rusty_dt;
use std::convert::TryFrom;
#[derive(Debug)]
#[sigma_p(derive = "PartialEqA, Add")]
struct CalendarMonth(u32, u8);
impl std::convert::TryFrom<u32> for CalendarMonth {
type Error = u32;
fn try_from(day: u32) -> std::result::Result<CalendarMonth, Self::Error> {
let m = (day / 31) + 1;
if m <= 12 {
if let Ok(m) = u8::try_from(m) {
return Ok(CalendarMonth(day, m));
}
}
Err(day)
}
}
fn main() {
let day: u32 = std::env::args()
.skip(1)
.next()
.unwrap()
.parse::<u32>()
.unwrap();
if let Ok(month) = CalendarMonth::try_from(day) {
println!("{:?}", month);
let one_month = CalendarMonth::try_from(30).unwrap();
let r = month + one_month;
println!("{:?}", r);
} else {
eprintln!("Not a valid day in the calendar year")
}
}
// ----------------------- Generated --------------------------
impl rusty_dt::p::Singleton<u32> for CalendarMonth {}
impl rusty_dt::p::Sigma<u32, u8> for CalendarMonth {
fn a(&self) -> &u32 {
&self.0
}
fn b(&self) -> &u8 {
&self.1
}
}
impl std::ops::Deref for CalendarMonth {
type Target = u32;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl std::convert::Into<u32> for CalendarMonth {
fn into(self) -> u32 {
self.0
}
}
// Derived & Promoted Traits
impl rusty_dt::p::PartialEqA<u32, u8> for CalendarMonth {}
impl std::cmp::PartialEq<Self> for CalendarMonth {
fn eq(&self, other: &Self) -> bool {
use rusty_dt::p::Sigma;
*self.a() == *other.a()
}
}
impl rusty_dt::p::Add<u32> for CalendarMonth {}
impl std::ops::Add for CalendarMonth {
type Output = CalendarMonth;
fn add(self, other: Self) -> Self {
use rusty_dt::p::Sigma;
use std::convert::TryFrom;
match Self::try_from(*self.a() + *other.a()) {
Ok(s) => s,
Err(p) => panic!(format!("Result of add operation refuted: {:?}", p)),
}
}
}
| true
|
b3bb36684ec30b4677a3ce61ed9685b5394ebea6
|
Rust
|
lonesometraveler/img-to-byte-array
|
/src/img2bytes.rs
|
UTF-8
| 1,967
| 3.1875
| 3
|
[
"MIT"
] |
permissive
|
//! # LCD bitmap
//!
//! Usage: cargo run path_to_image name_of_array > file_to_be_saved
//! Example: cargo run sample/arrow_up.png arrow_up > sample/arrow_up.h
use image::Luma;
use std::path::Path;
pub struct ImgToBytes {
file: String,
array_name: String,
}
impl ImgToBytes {
pub fn new(mut args: std::env::Args) -> Result<ImgToBytes, &'static str> {
args.next(); // skip the first argument which is the name of the program
let file = match args.next() {
Some(arg) => arg,
None => return Err("no file specified. Usage: cargo run path_to_image_folder name_of_array > filename_to_be_saved.h"),
};
let array_name = match args.next() {
Some(arg) => arg,
None => return Err("no array name specified. Usage: cargo run path_to_image_folder name_of_array > filename_to_be_saved.h")
};
Ok(ImgToBytes { file, array_name })
}
pub fn run(&self) -> String {
match print_array(Path::new(&self.file), &self.array_name) {
Ok(f) => f,
_ => String::from("error"),
}
}
}
fn print_array(path: &Path, array_name: &str) -> Result<String, &'static str> {
// TODO: error handling
let img = image::open(path).unwrap().to_luma();
let (width, height) = img.dimensions();
let mut byte: u8 = 0;
let mut output = format!(
"static const unsigned char {}[{}] = \r\n{{ // {}",
array_name,
(width / 8) * height,
path.to_str().unwrap()
);
for (bit, pixel) in img.pixels().enumerate() {
match pixel {
Luma([0]) => byte &= 0xFE,
_ => byte |= 0x01,
}
if bit % 8 == 7 {
output = format!("{}0x{:02x},", output, byte);
byte = 0;
}
if bit % (12 * 8) == 0 {
output.push_str("\n\t")
}
byte = byte.rotate_left(1);
}
Ok(format!("{}\r\n}};", output))
}
| true
|
367ead1340ff454c5ac86b4ad3470b93a2d036bc
|
Rust
|
dstreet26/adventofcode
|
/2020/day5/src/main.rs
|
UTF-8
| 1,526
| 3.609375
| 4
|
[] |
no_license
|
use std::collections::HashSet;
use std::fs;
fn main() {
let contents = fs::read_to_string("input.txt").expect("Couldn't read input file :(");
let list: Vec<&str> = contents.split("\n").collect();
let mut highest = 0;
for i in &list {
let x = get_front_or_back(&i[..7]);
let y = get_left_or_right(&i[7..]);
let id = (x * 8) + y;
println!("id: {:?}", id);
if id > highest {
highest = id;
}
}
println!("hightest id: {}", highest);
}
fn get_front_or_back(input: &str) -> u32 {
let mut min = 0;
let mut max = 127;
let mut out = 0;
for i in input.chars() {
if i == 'B' {
min = subdivide_back(min, max);
out = min;
} else if i == 'F' {
max = subdivide_front(min, max);
out = max;
}
}
out
}
fn get_left_or_right(input: &str) -> u32 {
let mut min = 0;
let mut max = 7;
let mut out = 0;
for i in input.chars() {
if i == 'R' {
min = subdivide_back(min, max);
out = min;
} else if i == 'L' {
max = subdivide_front(min, max);
out = max;
}
}
out
}
fn subdivide_back(min: u32, max: u32) -> u32 {
let diff = ((max - min) + 1) / 2;
if diff == 1 {
max
} else {
min + diff
}
}
fn subdivide_front(min: u32, max: u32) -> u32 {
let diff = ((max - min) + 1) / 2;
if diff == 1 {
min
} else {
max - diff
}
}
| true
|
d919f6dc34a5e3c09ca85f0e55ce9884ae2c5d54
|
Rust
|
gakonst/ethers-structopt
|
/src/lib.rs
|
UTF-8
| 2,296
| 2.78125
| 3
|
[] |
no_license
|
use ethers::{prelude::*, signers::coins_bip39::English};
use std::convert::TryFrom;
use std::str::FromStr;
use structopt::StructOpt;
// TODO: Add more options, e.g. for generic CLI-type calls
#[derive(StructOpt, Debug, Clone)]
pub struct EthereumOpts {
#[structopt(long = "eth.url", short, help = "The tracing / archival node's URL")]
pub url: String,
#[structopt(long = "eth.private_key", help = "Your private key string")]
pub private_key: Option<String>,
#[structopt(long = "eth.mnemonic", help = "Path to your mnemonic file")]
pub mnemonic_path: Option<String>,
#[structopt(
long = "eth.hd_index",
help = "your index in the standard hd path",
default_value = "0"
)]
pub index: u32,
}
// TODO: Improve these so that we return a middleware trait object
use std::sync::Arc;
impl EthereumOpts {
pub fn provider(&self) -> color_eyre::Result<Arc<Provider<Http>>> {
Ok(Arc::new(Provider::try_from(self.url.as_str())?))
}
/// Returns a [`LocalWallet`] corresponding to the provided private key or mnemonic
pub fn signer(&self) -> color_eyre::Result<LocalWallet> {
if let Some(ref private_key) = self.private_key {
Ok(LocalWallet::from_str(private_key)?)
} else if let Some(ref mnemonic_path) = self.mnemonic_path {
let mnemonic = std::fs::read_to_string(mnemonic_path)?.replace("\n", "");
Ok(MnemonicBuilder::<English>::default()
.phrase(mnemonic.as_str())
.index(self.index)?
.build()?)
} else {
panic!("Expected mnemonic or private key");
}
}
}
#[derive(StructOpt, Debug, Clone)]
pub struct FlashBotsOpts {
#[structopt(
long = "flashbots.bribe_receiver",
help = "The address that will receive the bribe. Ideally it should be a smart contract with a block.coinbase transfer"
)]
pub bribe_receiver: Option<Address>,
#[structopt(long = "flashbots.bribe", parse(from_str = parse_u256), help = "The amount to be sent to the miner")]
pub bribe: Option<U256>,
// TODO: Any extra function arguments to specify? Maybe a function selector + more args a-la
// seth?
}
fn parse_u256(s: &str) -> U256 {
U256::from_dec_str(s).unwrap()
}
| true
|
8cb64208254fe996b6f9e2873b843febe7505126
|
Rust
|
erismart/ErlangRT
|
/src/term/raw/rcons.rs
|
UTF-8
| 1,185
| 3.03125
| 3
|
[] |
no_license
|
use defs::Word;
use term::lterm::LTerm;
pub struct RawConsMut {
p: *mut Word,
}
impl RawConsMut {
pub fn from_pointer(p: *mut Word) -> RawConsMut {
RawConsMut { p }
}
pub unsafe fn set_hd(&self, val: LTerm) {
*self.p = val.raw()
}
pub unsafe fn set_tl(&self, val: LTerm) {
*self.p.offset(1) = val.raw()
}
pub fn make_cons(&self) -> LTerm {
LTerm::make_cons(self.p)
}
// /// Peek into the cons, and get head value.
// pub unsafe fn hd(&self) -> LTerm {
// LTerm::from_raw(*self.p)
// }
// /// Peek into the cons, and get tail value.
// pub unsafe fn tl(&self) -> LTerm {
// LTerm::from_raw(*(self.p.offset(1)))
// }
}
pub struct RawCons {
p: *const Word,
}
impl RawCons {
pub fn from_pointer(p: *const Word) -> RawCons {
RawCons { p }
}
// pub fn raw_pointer(&self) -> *const Word { self.p }
// pub fn make_cons(&self) -> LTerm {
// LTerm::make_cons(self.p)
// }
/// Peek into the cons, and get head value.
pub unsafe fn hd(&self) -> LTerm {
LTerm::from_raw(*self.p)
}
/// Peek into the cons, and get tail value.
pub unsafe fn tl(&self) -> LTerm {
LTerm::from_raw(*(self.p.offset(1)))
}
}
| true
|
df248cd021eb93227c844fffcbc9ae14fc4ae276
|
Rust
|
daviswahl/monkey_rs
|
/src/sandbox.rs
|
UTF-8
| 1,828
| 3.5625
| 4
|
[] |
no_license
|
struct Node<'a> {
token: Token<'a>,
node: &'a Node<'a>
}
#[derive(Clone, Copy)]
struct Token<'a> {
literal: &'a [u8],
}
struct LexerCache {
strings: Vec<Vec<u8>>
}
impl LexerCache {
fn push_string(&mut self, s: Vec<u8>) -> &Self {
self.strings.push(s);
self
}
fn last_string(& self) -> &Vec<u8> {
self.strings.last().unwrap()
}
}
struct Lexer<'b> {
data: Vec<u8>,
cache: LexerCache,
cur_pos: usize,
cur_token: Option<Token<'b>>
}
impl <'b>Lexer<'b> {
fn new(mut cache: LexerCache, data: String) -> Lexer<'b> {
let d = data.into_bytes();
Lexer{data: d, cache: cache, cur_pos: 0, cur_token: None}
}
pub fn next_token(&'b mut self, len: usize) -> Option<Token<'b>> {
self.cur_pos += 1;
let data = self.data[self.cur_pos..self.cur_pos+len].to_owned();
self.cache.push_string(data);
let s = self.cache.last_string();
let cur_token = self.cur_token;
self.cur_token = Some(Token{literal: s});
cur_token
}
}
fn wtf(v: &Vec<Option<&str>>) -> Result<&str, &str> {
let mut result = "";
for s in v {
if let Some(st) = s {
println!("entered: {:?}", st);
if let Object::Return(ret) = tmp {
println!("early return {:?}", ret);
return Ok(*ret);
} else {
println!("not returning {:?}", tmp);
result = tmp;
}
}
}
Ok(result)
}
#[cfg(test)]
mod tests{
use super::*;
#[test]
fn test_lexer() {
let mut cache = LexerCache{strings: vec![]};
let mut l = Lexer::new(cache, String::from("asdflkjsdfasdf"));
let mut tokens: Vec<Token> = vec![];
l.next_token(1).map(|t| tokens.push(t));
}
}
| true
|
10b875205d3b4a78c12459d1be9315fbb8ea34e0
|
Rust
|
getsentry/symbolic
|
/symbolic-ppdb/src/lib.rs
|
UTF-8
| 2,934
| 3.03125
| 3
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
//! Provides support for reading Portable PDB files,
//! specifically line information resolution for functions.
//!
//! [Portable PDB](https://github.com/dotnet/runtime/blob/main/docs/design/specs/PortablePdb-Metadata.md)
//! is a debugging information file format for Common Language Infrastructure (CLI) languages.
//! It is an extension of the [ECMA-335 format](https://www.ecma-international.org/wp-content/uploads/ECMA-335_6th_edition_june_2012.pdf).
//!
//! # Functionality
//!
//! * Parse Portable PDB files with [`PortablePdb::parse`].
//! * Convert Portable PDB files to [`PortablePdbCaches`](PortablePdbCache) with
//! [`PortablePdbCacheConverter::process_portable_pdb`].
//! * Serialize `PortablePdbCaches` with [`PortablePdbCacheConverter::serialize`]
//! and parse them with [`PortablePdbCache::parse`].
//! * Look up line information for a function on a `PortablePdbCache` with
//! [`PortablePdbCache::lookup`].
//!
//! ## Example
//! ```
//! use symbolic_testutils::fixture;
//! use symbolic_ppdb::{LineInfo, PortablePdb, PortablePdbCacheConverter, PortablePdbCache};
//!
//! let buf = std::fs::read(fixture("windows/portable.pdb")).unwrap();
//! let pdb = PortablePdb::parse(&buf).unwrap();
//!
//! let mut converter = PortablePdbCacheConverter::new();
//! converter.process_portable_pdb(&pdb).unwrap();
//! let mut buf = Vec::new();
//! converter.serialize(&mut buf).unwrap();
//!
//! let cache = PortablePdbCache::parse(&buf).unwrap();
//! let line_info = cache.lookup(7, 10).unwrap();
//! assert_eq!(line_info.line, 81);
//! ```
//!
//! # Structure of a Portable PDB file
//!
//! An ECMA-335 file is divided into sections called _streams_. The possible streams are:
//!
//! * `#~` ("metadata"), comprising information about classes, methods, modules, &c.,
//! organized into tables adhering to various schemas. The original ECMA-335 tables
//! are described in Section II.22 of the ECMA-335 spec, the tables added by Portable PDB are described
//! in the Portable PDB spec.
//! The [`MethodDebugInformation`](https://github.com/dotnet/runtime/blob/main/docs/design/specs/PortablePdb-Metadata.md#methoddebuginformation-table-0x31)
//! table is of particular interest to `symbolic`, as it contains
//! line information for functions.
//! * `#Strings`, comprising null-terminated UTF-8 strings.
//! * `#GUID`, a list of GUIDs.
//! * `#US` ("user strings"), comprising UTF-16 encoded strings.
//! * `#Blob`, comprising blobs of data that don't fit in any of the other streams.
//!
//! The Portable PDB format extends ECMA-335 by the addition of another steam, `#PDB`, as well
//! as several tables to the `#~` stream.
#![warn(missing_docs)]
mod cache;
mod format;
pub use cache::lookup::LineInfo;
pub use cache::writer::PortablePdbCacheConverter;
pub use cache::{CacheError, CacheErrorKind, PortablePdbCache};
pub use format::{Document, EmbeddedSource, FormatError, FormatErrorKind, PortablePdb};
| true
|
33189217e6c70ba9b57e205e987d117a87e831f0
|
Rust
|
RickdeJager/AdventOfCode2020
|
/day14/src/main.rs
|
UTF-8
| 2,582
| 3.140625
| 3
|
[] |
no_license
|
use regex::Regex;
use std::collections::HashMap;
fn part1() -> u64 {
let mut mem = HashMap::new();
let re = Regex::new(r"mem\[(?P<idx>\d+)\] = (?P<num>\d+)").unwrap();
let mut mask_or : u64 = 0;
let mut mask_and: u64 = 0;
for line in include_str!("input.txt").lines() {
match &line[..4] {
"mask" => {
mask_or = u64::from_str_radix(&line[7..].replace("X", "0"), 2).unwrap();
mask_and = u64::from_str_radix(&line[7..].replace("X", "1"), 2).unwrap();
},
"mem[" => {
let cap = re.captures(line).unwrap();
let idx = cap.name("idx").unwrap().as_str().parse::<u64>().unwrap();
let num = cap.name("num").unwrap().as_str().parse::<u64>().unwrap();
mem.insert(
idx,
(num | mask_or) & mask_and,
);
},
_ => panic!("wtf?"),
}
}
mem.iter().map(|(_, val)| *val).sum()
}
fn part2() -> u64 {
let mut mem = HashMap::<u64, u64>::new();
let re = Regex::new(r"mem\[(?P<idx>\d+)\] = (?P<num>\d+)").unwrap();
let mut mask : u64 = 0;
let mut floats: Vec<u64> = vec![];
for line in include_str!("input.txt").lines() {
match &line[..4] {
"mask" => {
mask = u64::from_str_radix(&line[7..].replace("X", "0"), 2).unwrap();
floats = line[7..].chars().rev().enumerate().filter(|(_, c)| *c == 'X')
.map(|(i, _)| 1 << i).collect::<Vec<u64>>();
},
"mem[" => {
let cap = re.captures(line).unwrap();
let idx = cap.name("idx").unwrap().as_str().parse::<u64>().unwrap();
let num = cap.name("num").unwrap().as_str().parse::<u64>().unwrap();
let t:u64 = floats.iter().sum();
for w in 0..(1u64 << floats.len()) {
let s: u64 = floats
.iter().enumerate()
.filter(|(i, _)| w & (1 << i) != 0)
.map(|(_, c)| *c).sum();
mem.insert(
idx^(idx&t) | mask | s,
num,
);
}
},
_ => panic!("wtf?"),
}
}
mem.iter().map(|(_, val)| *val).sum()
}
fn main() {
println!("===== Part 1 ====");
println!("The first number is {}", part1());
println!("===== Part 2 ====");
println!("The second number is {}", part2());
}
| true
|
37bdc841627d0bfe85623f4fbc582ae6ff4ff0f6
|
Rust
|
first-rust-competition/first-rust-competition
|
/wpilib/src/i2c.rs
|
UTF-8
| 5,910
| 3.125
| 3
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
use std::cmp;
use std::io;
use wpilib_sys::*;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[repr(i32)]
pub enum Port {
Onboard = HAL_I2CPort::HAL_I2C_kOnboard,
MXP = HAL_I2CPort::HAL_I2C_kMXP,
}
pub struct I2C {
port: Port,
device_address: u16,
}
impl I2C {
/// Constructs a new I2C
///
/// `port` is the I2C port to which the device is connected, and `device_address` is the address of the device on the bus
pub fn new(port: Port, device_address: u16) -> HalResult<Self> {
hal_call!(HAL_InitializeI2C(port as HAL_I2CPort::Type))?;
usage::report(usage::resource_types::I2C, 0);
Ok(I2C {
port,
device_address,
})
}
/// Generic transaction.
///
/// This is a lower-level interface to the I2C hardware giving you more control
/// over each transaction.
///
/// This function will send all the bytes in `data_to_send` and will read data into `data_received`. Callers should make sure these buffers are sized appropriately
///
/// Returns a result based on whether the transaction was successful
pub fn transaction(&self, data_to_send: &[u8], data_received: &mut [u8]) -> io::Result<usize> {
let status = unsafe {
HAL_TransactionI2C(
self.port as HAL_I2CPort::Type,
i32::from(self.device_address),
data_to_send.as_ptr(),
data_to_send.len() as i32,
data_received.as_mut_ptr(),
data_received.len() as i32,
)
};
io_result(status)
}
/// Attempt to address a device on the I2C bus.
///
/// This allows you to figure out if there is a device on the I2C bus that
/// responds to the address specified in the constructor.
///
/// Returns a result based on whether the transaction was successful
pub fn address_only(&self) -> io::Result<usize> {
self.transaction(&[], &mut [])
}
/// Execute a write transaction with the device.
///
/// Write a single byte to a register on a device and wait until the
/// transaction is complete.
///
/// Returns a result based on whether the transaction was successful
pub fn write(&self, register_address: u8, data: u8) -> io::Result<usize> {
let buf = [register_address, data];
let status = unsafe {
HAL_WriteI2C(
self.port as HAL_I2CPort::Type,
i32::from(self.device_address),
buf.as_ptr(),
buf.len() as i32,
)
};
io_result(status)
}
/// Execute a bulk write transaction with the device.
///
/// Write multiple bytes to a device and wait until the
/// transaction is complete.
///
/// Returns a result based on whether the transaction was successful
pub fn write_bulk(&self, data: &[u8]) -> io::Result<usize> {
let status = unsafe {
HAL_WriteI2C(
self.port as HAL_I2CPort::Type,
i32::from(self.device_address),
data.as_ptr(),
data.len() as i32,
)
};
io_result(status)
}
/// Execute a read transaction with the device.
///
/// Read bytes from a device.
/// Most I2C devices will auto-increment the register pointer internally
/// allowing you to read consecutive registers on a device in a single
/// transaction.
///
/// Returns a result based on whether the transaction was successful
pub fn read(&self, register_address: i32, buf: &mut [u8]) -> io::Result<usize> {
if buf.is_empty() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Write buffer length < 1",
));
}
self.transaction(&[register_address as u8], buf)
}
/// Execute a read only transaction with the device.
///
/// Read bytes from a device. This method does not write any data to prompt the
/// device.
///
/// Returns a result based on whether the transaction was successful
pub fn read_only(&self, buf: &mut [u8]) -> io::Result<usize> {
let status = unsafe {
HAL_ReadI2C(
self.port as HAL_I2CPort::Type,
i32::from(self.device_address),
buf.as_mut_ptr(),
buf.len() as i32,
)
};
io_result(status)
}
/// Verify that a device's registers contain expected values.
///
/// Most devices will have a set of registers that contain a known value that
/// can be used to identify them. This allows an I2C device driver to easily
/// verify that the device contains the expected value.
///
/// The device must support and be configured to use register
/// auto-increment.
pub fn verify_sensor(&self, register_address: i32, expected: &[u8]) -> bool {
// (register_address..).step_by(4) gets truncated to the length of the first iter when we zip
for (i, cur_register_address) in (0..expected.len())
.step_by(4)
.zip((register_address..).step_by(4))
{
let to_read = cmp::min(4, expected.len() - i);
let mut buf = vec![0; to_read];
if self.read(cur_register_address, &mut buf[..]).is_err() {
return false;
}
for j in 0..to_read {
if buf[j] != expected[i + j] {
return false;
}
}
}
true
}
}
impl Drop for I2C {
fn drop(&mut self) {
unsafe {
HAL_CloseI2C(self.port as HAL_I2CPort::Type);
}
}
}
fn io_result(rv: i32) -> io::Result<usize> {
if rv < 0 {
Err(io::Error::last_os_error())
} else {
Ok(rv as usize)
}
}
| true
|
1f2d7c709ecb37a992912b0903018bf5c13298eb
|
Rust
|
redtankd/project-euler
|
/src/bin/00037.rs
|
UTF-8
| 1,594
| 3.171875
| 3
|
[] |
no_license
|
use std::collections::BTreeSet;
use project_euler::is_prime;
#[cfg(not(test))]
fn main() {
let t = project_euler::start_timer();
println!("\nsolution:");
println!("The answer is {:?}\n", s1());
project_euler::stop_timer(t);
}
fn s1() -> u32 {
(11..)
.scan(
vec![2, 3, 5, 7].into_iter().collect::<BTreeSet<u32>>(),
|primes, x: u32| {
if is_prime(x as u64) {
primes.insert(x);
// truncate from left to right
let mut str = x.to_string();
for _ in 1..str.len() {
str = str.split_off(1);
if !primes.contains(&str.parse::<u32>().unwrap()) {
return Some(0);
}
}
// truncate from right to left
let mut str = x.to_string();
for _ in 1..str.len() {
let len = str.len() - 1;
let _ = str.split_off(len);
if !primes.contains(&str.parse::<u32>().unwrap()) {
return Some(0);
}
}
println!("{:?}", x);
Some(x)
} else {
Some(0)
}
},
)
.filter(|&x| x != 0)
.take(11)
.sum()
}
#[cfg(test)]
mod tests {
use super::s1;
#[test]
fn test_s1() {
assert_eq!(748317, s1());
}
}
| true
|
98abf5fcbf33210b9fd44bd880a33a22ecea6c39
|
Rust
|
marco-c/gecko-dev-wordified
|
/third_party/rust/derive_arbitrary/src/field_attributes.rs
|
UTF-8
| 3,439
| 2.71875
| 3
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Apache-2.0"
] |
permissive
|
use
crate
:
:
ARBITRARY_ATTRIBUTE_NAME
;
use
proc_macro2
:
:
{
Span
TokenStream
TokenTree
}
;
use
quote
:
:
quote
;
use
syn
:
:
{
spanned
:
:
Spanned
*
}
;
/
/
/
Determines
how
a
value
for
a
field
should
be
constructed
.
#
[
cfg_attr
(
test
derive
(
Debug
)
)
]
pub
enum
FieldConstructor
{
/
/
/
Assume
that
Arbitrary
is
defined
for
the
type
of
this
field
and
use
it
(
default
)
Arbitrary
/
/
/
Places
Default
:
:
default
(
)
as
a
field
value
.
Default
/
/
/
Use
custom
function
or
closure
to
generate
a
value
for
a
field
.
With
(
TokenStream
)
/
/
/
Set
a
field
always
to
the
given
value
.
Value
(
TokenStream
)
}
pub
fn
determine_field_constructor
(
field
:
&
Field
)
-
>
Result
<
FieldConstructor
>
{
let
opt_attr
=
fetch_attr_from_field
(
field
)
?
;
let
ctor
=
match
opt_attr
{
Some
(
attr
)
=
>
parse_attribute
(
attr
)
?
None
=
>
FieldConstructor
:
:
Arbitrary
}
;
Ok
(
ctor
)
}
fn
fetch_attr_from_field
(
field
:
&
Field
)
-
>
Result
<
Option
<
&
Attribute
>
>
{
let
found_attributes
:
Vec
<
_
>
=
field
.
attrs
.
iter
(
)
.
filter
(
|
a
|
{
let
path
=
a
.
path
(
)
;
let
name
=
quote
!
(
#
path
)
.
to_string
(
)
;
name
=
=
ARBITRARY_ATTRIBUTE_NAME
}
)
.
collect
(
)
;
if
found_attributes
.
len
(
)
>
1
{
let
name
=
field
.
ident
.
as_ref
(
)
.
unwrap
(
)
;
let
msg
=
format
!
(
"
Multiple
conflicting
#
[
{
ARBITRARY_ATTRIBUTE_NAME
}
]
attributes
found
on
field
{
name
}
"
)
;
return
Err
(
syn
:
:
Error
:
:
new
(
field
.
span
(
)
msg
)
)
;
}
Ok
(
found_attributes
.
into_iter
(
)
.
next
(
)
)
}
fn
parse_attribute
(
attr
:
&
Attribute
)
-
>
Result
<
FieldConstructor
>
{
if
let
Meta
:
:
List
(
ref
meta_list
)
=
attr
.
meta
{
parse_attribute_internals
(
meta_list
)
}
else
{
let
msg
=
format
!
(
"
#
[
{
ARBITRARY_ATTRIBUTE_NAME
}
]
must
contain
a
group
"
)
;
Err
(
syn
:
:
Error
:
:
new
(
attr
.
span
(
)
msg
)
)
}
}
fn
parse_attribute_internals
(
meta_list
:
&
MetaList
)
-
>
Result
<
FieldConstructor
>
{
let
mut
tokens_iter
=
meta_list
.
tokens
.
clone
(
)
.
into_iter
(
)
;
let
token
=
tokens_iter
.
next
(
)
.
ok_or_else
(
|
|
{
let
msg
=
format
!
(
"
#
[
{
ARBITRARY_ATTRIBUTE_NAME
}
]
cannot
be
empty
.
"
)
;
syn
:
:
Error
:
:
new
(
meta_list
.
span
(
)
msg
)
}
)
?
;
match
token
.
to_string
(
)
.
as_ref
(
)
{
"
default
"
=
>
Ok
(
FieldConstructor
:
:
Default
)
"
with
"
=
>
{
let
func_path
=
parse_assigned_value
(
"
with
"
tokens_iter
meta_list
.
span
(
)
)
?
;
Ok
(
FieldConstructor
:
:
With
(
func_path
)
)
}
"
value
"
=
>
{
let
value
=
parse_assigned_value
(
"
value
"
tokens_iter
meta_list
.
span
(
)
)
?
;
Ok
(
FieldConstructor
:
:
Value
(
value
)
)
}
_
=
>
{
let
msg
=
format
!
(
"
Unknown
option
for
#
[
{
ARBITRARY_ATTRIBUTE_NAME
}
]
:
{
token
}
"
)
;
Err
(
syn
:
:
Error
:
:
new
(
token
.
span
(
)
msg
)
)
}
}
}
/
/
Input
:
/
/
=
2
+
2
/
/
Output
:
/
/
2
+
2
fn
parse_assigned_value
(
opt_name
:
&
str
mut
tokens_iter
:
impl
Iterator
<
Item
=
TokenTree
>
default_span
:
Span
)
-
>
Result
<
TokenStream
>
{
let
eq_sign
=
tokens_iter
.
next
(
)
.
ok_or_else
(
|
|
{
let
msg
=
format
!
(
"
Invalid
syntax
for
#
[
{
ARBITRARY_ATTRIBUTE_NAME
}
]
{
opt_name
}
is
missing
assignment
.
"
)
;
syn
:
:
Error
:
:
new
(
default_span
msg
)
}
)
?
;
if
eq_sign
.
to_string
(
)
=
=
"
=
"
{
Ok
(
tokens_iter
.
collect
(
)
)
}
else
{
let
msg
=
format
!
(
"
Invalid
syntax
for
#
[
{
ARBITRARY_ATTRIBUTE_NAME
}
]
expected
=
after
{
opt_name
}
got
:
{
eq_sign
}
"
)
;
Err
(
syn
:
:
Error
:
:
new
(
eq_sign
.
span
(
)
msg
)
)
}
}
| true
|
c85bde34e3e006b6f54b78d891264c789553edcc
|
Rust
|
guangie88/rs-mega-coll
|
/src/util/process.rs
|
UTF-8
| 1,584
| 2.609375
| 3
|
[] |
no_license
|
use error::custom::{CodeMsgError, MsgError};
use error::{Error, ErrorKind};
use failure::{Context, Fail, ResultExt};
use std::fmt::Debug;
use std::io::Read;
use std::process::{Child, ChildStdout, Output};
pub fn extract_child_stdout<K>(child: Child) -> Result<ChildStdout, Error<K>>
where
K: From<ErrorKind> + Copy + Clone + Eq + PartialEq + Debug + Fail,
{
let (stdout, stderr) = (child.stdout, child.stderr);
let stdout = stdout.ok_or_else(|| {
let msg_err = stderr
.ok_or_else(|| -> Error<K> {
Context::new(ErrorKind::StderrEmpty).into()
})
.and_then(|mut bytes| -> Result<Error<K>, Error<K>> {
let mut msg = String::new();
bytes
.read_to_string(&mut msg)
.context(ErrorKind::StderrRead)?;
Ok(MsgError::new(msg).context(ErrorKind::StderrValidMsg).into())
});
match msg_err {
Ok(e) | Err(e) => e,
}
})?;
Ok(stdout)
}
pub fn extract_output_stdout_str<K>(output: Output) -> Result<String, Error<K>>
where
K: From<ErrorKind> + Copy + Clone + Eq + PartialEq + Debug + Fail,
{
let output = if output.status.success() {
String::from_utf8(output.stdout)
.context(ErrorKind::StdoutUtf8Conversion)
} else {
let msg = String::from_utf8(output.stderr)
.context(ErrorKind::StderrUtf8Conversion)?;
Err(CodeMsgError::new(output.status.code(), msg))
.context(ErrorKind::ChildOutput)
}?;
Ok(output)
}
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.