text stringlengths 8 4.13M |
|---|
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
mod health;
mod spec;
mod config;
pub mod hooks;
use std;
use std::collections::HashMap;
use std::env;
use std::fmt;
use std::fs::File;
use std::io::BufWriter;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::result;
use std::str::FromStr;
use std::sync::{Arc, RwLock, RwLockReadGuard};
use std::time::{Duration, Instant};
use ansi_term::Colour::{Yellow, Red, Green};
use butterfly;
use butterfly::rumor::service::Service as ServiceRumor;
use common::ui::UI;
use hcore::fs::FS_ROOT_PATH;
use hcore::service::ServiceGroup;
use hcore::crypto::hash;
use hcore::package::{PackageIdent, PackageInstall};
use hcore::util::deserialize_using_from_str;
use hcore::util::perm::{set_owner, set_permissions};
use serde;
use toml;
use self::hooks::{HOOK_PERMISSIONS, Hook, HookTable};
use config::GossipListenAddr;
use error::{Error, Result, SupError};
use http_gateway;
use fs;
use manager::{self, signals};
use census::{CensusRing, ElectionStatus};
use supervisor::{Supervisor, RuntimeConfig};
use util;
pub use self::config::{ServiceConfig, Pkg};
pub use self::health::{HealthCheck, SmokeCheck};
pub use self::spec::{DesiredState, ServiceBind, ServiceSpec, StartStyle};
static LOGKEY: &'static str = "SR";
lazy_static! {
static ref HEALTH_CHECK_INTERVAL: Duration = {
Duration::from_millis(30_000)
};
}
#[derive(Debug, Serialize)]
pub struct Service {
pub config: ServiceConfig,
current_service_files: HashMap<String, u64>,
pub depot_url: String,
health_check: HealthCheck,
initialized: bool,
last_election_status: ElectionStatus,
needs_reload: bool,
needs_reconfiguration: bool,
#[serde(serialize_with="serialize_lock")]
package: Arc<RwLock<PackageInstall>>,
pub service_group: ServiceGroup,
smoke_check: SmokeCheck,
pub spec_file: PathBuf,
pub spec_ident: PackageIdent,
pub start_style: StartStyle,
pub topology: Topology,
pub update_strategy: UpdateStrategy,
#[serde(skip_serializing)]
spec_binds: Vec<ServiceBind>,
hooks: HookTable,
config_from: Option<PathBuf>,
#[serde(skip_serializing)]
last_health_check: Instant,
#[serde(skip_serializing)]
manager_fs_cfg: Arc<manager::FsCfg>,
supervisor: Supervisor,
}
impl Service {
fn new(package: PackageInstall,
spec: ServiceSpec,
gossip_listen: &GossipListenAddr,
http_listen: &http_gateway::ListenAddr,
manager_fs_cfg: Arc<manager::FsCfg>,
organization: Option<&str>)
-> Result<Service> {
spec.validate(&package)?;
let spec_file = manager_fs_cfg.specs_path.join(spec.file_name());
let service_group = ServiceGroup::new(&package.ident().name, spec.group, organization)?;
let runtime_cfg = Self::runtime_config_from(&package)?;
let config_root = spec.config_from
.clone()
.unwrap_or(package.installed_path.clone());
let svc_cfg = ServiceConfig::new(&package,
&runtime_cfg,
config_root,
spec.binds.clone(),
&gossip_listen,
&http_listen)?;
let hook_template_path = svc_cfg.config_root.join("hooks");
let hooks_path = fs::svc_hooks_path(service_group.service());
let locked_package = Arc::new(RwLock::new(package));
Ok(Service {
config: svc_cfg,
current_service_files: HashMap::new(),
depot_url: spec.depot_url,
health_check: HealthCheck::default(),
hooks: HookTable::default().load_hooks(&service_group,
&hooks_path,
&hook_template_path),
initialized: false,
last_election_status: ElectionStatus::None,
needs_reload: false,
needs_reconfiguration: false,
manager_fs_cfg: manager_fs_cfg,
supervisor: Supervisor::new(locked_package.clone(), &service_group, runtime_cfg),
package: locked_package,
service_group: service_group,
smoke_check: SmokeCheck::default(),
spec_binds: spec.binds,
spec_ident: spec.ident,
spec_file: spec_file,
start_style: spec.start_style,
topology: spec.topology,
update_strategy: spec.update_strategy,
config_from: spec.config_from,
last_health_check: Instant::now() - *HEALTH_CHECK_INTERVAL,
})
}
fn runtime_config_from(package: &PackageInstall) -> Result<RuntimeConfig> {
let (svc_user, svc_group) = util::users::get_user_and_group(&package)?;
let mut env = match package.runtime_environment() {
Ok(r) => r,
Err(e) => return Err(sup_error!(Error::HabitatCore(e))),
};
// FIXME: Devise a way to make OS independent so we don't have to muck with env.
Self::run_path(&mut env)?;
Ok(RuntimeConfig::new(svc_user, svc_group, env))
}
pub fn load(spec: ServiceSpec,
gossip_listen: &GossipListenAddr,
http_listen: &http_gateway::ListenAddr,
manager_fs_cfg: Arc<manager::FsCfg>,
organization: Option<&str>)
-> Result<Service> {
let mut ui = UI::default();
let package = match PackageInstall::load(&spec.ident, Some(&Path::new(&*FS_ROOT_PATH))) {
Ok(package) => {
match spec.update_strategy {
UpdateStrategy::AtOnce => {
try!(util::pkg::maybe_install_newer(&mut ui, &spec, package))
}
UpdateStrategy::None | UpdateStrategy::Rolling => package,
}
}
Err(_) => {
outputln!("Package {} not found locally, installing from {}",
Yellow.bold().paint(spec.ident.to_string()),
&spec.depot_url);
try!(util::pkg::install(&mut ui, &spec.depot_url, &spec.ident))
}
};
Self::new(package,
spec,
gossip_listen,
http_listen,
manager_fs_cfg,
organization)
}
pub fn add(&self) -> Result<()> {
outputln!("Adding {}",
Yellow.bold().paint(self.package().ident().to_string()));
self.create_svc_path()?;
Ok(())
}
/// Create the service path for this package.
fn create_svc_path(&self) -> Result<()> {
let (user, group) = try!(util::users::get_user_and_group(&self.package()));
debug!("Creating svc paths");
if let Err(e) = Self::create_dir_all(self.svc_path()) {
outputln!("Can't create directory {}",
self.svc_path().to_str().unwrap());
outputln!("If this service is running as non-root, you'll need to create \
{} and give the current user write access to it",
self.svc_path().to_str().unwrap());
return Err(e);
}
try!(Self::create_dir_all(self.svc_config_path()));
try!(set_owner(self.svc_config_path(), &user, &group));
try!(set_permissions(self.svc_config_path(), 0o700));
try!(Self::create_dir_all(self.svc_data_path()));
try!(set_owner(self.svc_data_path(), &user, &group));
try!(set_permissions(self.svc_data_path(), 0o700));
try!(Self::create_dir_all(self.svc_files_path()));
try!(set_owner(self.svc_files_path(), &user, &group));
try!(set_permissions(self.svc_files_path(), 0o700));
try!(Self::create_dir_all(self.svc_hooks_path()));
try!(Self::create_dir_all(self.svc_var_path()));
try!(set_owner(self.svc_var_path(), &user, &group));
try!(set_permissions(self.svc_var_path(), 0o700));
try!(Self::remove_symlink(self.svc_static_path()));
try!(Self::create_dir_all(self.svc_static_path()));
try!(set_owner(self.svc_static_path(), &user, &group));
try!(set_permissions(self.svc_static_path(), 0o700));
try!(Self::create_dir_all(self.svc_logs_path()));
// TODO: Not 100% if this directory is still needed, but for the moment it's still here -
// FIN
try!(Self::create_dir_all(self.svc_path().join("toml")));
try!(set_permissions(self.svc_path().join("toml"), 0o700));
Ok(())
}
fn start(&mut self) {
if let Some(err) = self.supervisor.start().err() {
outputln!(preamble self.service_group, "Service start failed: {}", err);
} else {
self.needs_reload = false;
self.needs_reconfiguration = false;
}
}
pub fn stop(&mut self) {
if let Err(err) = self.supervisor.stop() {
outputln!(preamble self.service_group, "Service stop failed: {}", err);
}
}
fn reload(&mut self) {
self.needs_reload = false;
if self.is_down() || self.hooks.reload.is_none() {
if let Some(err) = self.supervisor.restart().err() {
outputln!(preamble self.service_group, "Service restart failed: {}", err);
}
} else {
let hook = self.hooks.reload.as_ref().unwrap();
hook.run(&self.service_group, self.runtime_cfg());
}
}
pub fn down(&mut self) -> Result<()> {
self.supervisor.down()
}
pub fn send_signal(&self, signal: u32) -> Result<()> {
match self.supervisor.child {
Some(ref child) => signals::send_signal(child.id(), signal),
None => {
debug!("No process to send the signal to");
Ok(())
}
}
}
fn is_down(&self) -> bool {
self.supervisor.child.is_none()
}
/// Instructs the service's process supervisor to reap dead children.
fn check_process(&mut self) {
self.supervisor.check_process()
}
pub fn tick(&mut self, butterfly: &butterfly::Server, census_ring: &CensusRing) -> bool {
let mut service_rumor_written = false;
if !self.initialized {
if !self.all_bindings_present(census_ring) {
outputln!(preamble self.service_group, "Waiting to initialize service.");
return service_rumor_written;
}
}
service_rumor_written = self.update_configuration(butterfly, census_ring);
match self.topology {
Topology::Standalone => {
self.execute_hooks();
}
Topology::Leader => {
let census_group = census_ring
.census_group_for(&self.service_group)
.expect("Service Group's census entry missing from list!");
let current_election_status = &census_group.election_status;
match census_group.election_status {
ElectionStatus::None => {
if self.last_election_status != *current_election_status {
outputln!(preamble self.service_group,
"Waiting to execute hooks; {}",
Yellow.bold().paint("election hasn't started"));
self.last_election_status = *current_election_status;
}
}
ElectionStatus::ElectionInProgress => {
if self.last_election_status != *current_election_status {
outputln!(preamble self.service_group,
"Waiting to execute hooks; {}",
Yellow.bold().paint("election in progress."));
self.last_election_status = *current_election_status;
}
}
ElectionStatus::ElectionNoQuorum => {
if self.last_election_status != *current_election_status {
outputln!(preamble self.service_group,
"Waiting to execute hooks; {}, {}.",
Yellow.bold().paint("election in progress"),
Red.bold().paint("and we have no quorum"));
self.last_election_status = *current_election_status
}
}
ElectionStatus::ElectionFinished => {
let leader_id = census_group
.leader_id
.as_ref()
.expect("No leader with finished election");
if self.last_election_status != *current_election_status {
outputln!(preamble self.service_group,
"Executing hooks; {} is the leader",
Green.bold().paint(leader_id.to_string()));
self.last_election_status = *current_election_status;
}
self.execute_hooks()
}
}
}
}
service_rumor_written
}
pub fn to_spec(&self) -> ServiceSpec {
let mut spec = ServiceSpec::default_for(self.spec_ident.clone());
spec.group = self.service_group.group().to_string();
spec.depot_url = self.depot_url.clone();
spec.topology = self.topology;
spec.update_strategy = self.update_strategy;
spec.binds = self.spec_binds.clone();
spec.start_style = self.start_style;
spec.config_from = self.config_from.clone();
spec
}
fn all_bindings_present(&self, census_ring: &CensusRing) -> bool {
let mut ret = true;
for ref bind in self.spec_binds.iter() {
if census_ring
.census_group_for(&bind.service_group)
.is_none() {
ret = false;
outputln!(preamble self.service_group,
"The specified service group '{}' for binding '{}' is not (yet?) present in the census data.",
Green.bold().paint(format!("{}", bind.service_group)), Green.bold().paint(format!("{}", bind.name)));
}
}
ret
}
fn update_configuration(&mut self,
butterfly: &butterfly::Server,
census_ring: &CensusRing)
-> bool {
let mut service_rumor_written = false;
self.config.populate(&self.service_group, census_ring);
self.persist_service_files(butterfly);
let svc_cfg_updated = self.persist_service_config(butterfly);
if svc_cfg_updated || census_ring.changed {
if svc_cfg_updated {
service_rumor_written = self.update_service_rumor_cfg(butterfly);
if let Some(err) = self.config.reload_gossip().err() {
outputln!(preamble self.service_group, "error loading gossip config, {}", err);
}
}
match self.config.write() {
Ok(true) => {
self.needs_reconfiguration = true;
self.hooks.compile(&self.service_group, &self.config);
if let Some(err) = self.copy_run().err() {
outputln!(preamble self.service_group, "Failed to copy run hook: {}", err);
}
}
Ok(false) => (),
Err(e) => {
outputln!(preamble self.service_group,
"Failed to write service configuration: {}",
e);
}
}
}
service_rumor_written
}
pub fn package(&self) -> RwLockReadGuard<PackageInstall> {
self.package.read().expect("Package lock poisoned")
}
pub fn update_package(&mut self, package: PackageInstall) {
let runtime_cfg = match Self::runtime_config_from(&package) {
Ok(c) => c,
Err(err) => {
outputln!(preamble self.service_group,
"Unable to extract svc_user, svc_group, and env_vars \
from updated package, {}", err);
return;
}
};
let config_root = self.config_from
.clone()
.unwrap_or(package.installed_path.clone());
let hooks_path = fs::svc_hooks_path(self.service_group.service());
self.hooks = HookTable::default().load_hooks(&self.service_group,
hooks_path,
&config_root.join("hooks"));
if let Some(err) = self.config
.reload_package(&package, config_root, &runtime_cfg)
.err() {
outputln!(preamble self.service_group,
"Failed to reload service config with updated package: {}", err);
}
*self.package.write().expect("Package lock poisoned") = package;
if let Err(err) = self.supervisor.down() {
outputln!(preamble self.service_group,
"Error stopping process while updating package: {}", err);
}
self.initialized = false;
}
pub fn to_rumor<T: ToString>(&self, member_id: T) -> ServiceRumor {
let exported = match self.config.to_exported() {
Ok(exported) => Some(exported),
Err(err) => {
outputln!(preamble self.service_group,
"Failed to generate exported cfg for service rumor: {}",
Red.bold().paint(format!("{}", err)));
None
}
};
ServiceRumor::new(member_id.to_string(),
&self.package().ident,
&self.service_group,
&*self.config.sys,
exported.as_ref())
}
/// Run initialization hook if present
fn initialize(&mut self) {
if self.initialized {
return;
}
outputln!(preamble self.service_group, "Initializing");
self.hooks.compile(&self.service_group, &self.config);
if let Some(err) = self.copy_run().err() {
outputln!(preamble self.service_group, "Failed to copy run hook: {}", err);
}
self.initialized = true;
if let Some(ref hook) = self.hooks.init {
self.initialized = hook.run(&self.service_group, self.runtime_cfg())
}
}
pub fn populate(&mut self, census_ring: &CensusRing) {
self.config.populate(&self.service_group, census_ring)
}
/// Run reconfigure hook if present. Return false if it is not present, to trigger default
/// restart behavior.
fn reconfigure(&mut self) {
self.needs_reconfiguration = false;
if let Some(ref hook) = self.hooks.reconfigure {
hook.run(&self.service_group, self.runtime_cfg());
}
}
fn post_run(&mut self) {
if let Some(ref hook) = self.hooks.post_run {
hook.run(&self.service_group, self.runtime_cfg());
}
}
/// Modifies PATH env with the full run path for this package. This path is composed of any
/// binary paths specified by this package, or its TDEPS, plus a path to a BusyBox(non-windows),
/// plus the existing value of the PATH variable.
///
/// This means we work on any operating system, as long as you can invoke the Supervisor,
/// without having to worry much about context.
fn run_path(run_env: &mut HashMap<String, String>) -> Result<()> {
let path_key = "PATH".to_string();
let mut paths: Vec<PathBuf> = match run_env.get(&path_key) {
Some(path) => env::split_paths(&path).collect(),
None => vec![],
};
// Lets join the run paths to the FS_ROOT
// In most cases, this does nothing and should only mutate
// the paths in a windows studio where FS_ROOT_PATH will
// be the studio root path (ie c:\hab\studios\...). In any other
// environment FS_ROOT will be "/" and this will not make any
// meaningful change.
for i in 0..paths.len() {
if paths[i].starts_with("/") {
paths[i] = Path::new(&*FS_ROOT_PATH).join(paths[i].strip_prefix("/").unwrap());
}
}
run_env.insert(path_key,
util::path::append_interpreter_and_path(&mut paths)?);
Ok(())
}
fn runtime_cfg(&self) -> &RuntimeConfig {
&self.supervisor.runtime_config
}
pub fn suitability(&self) -> Option<u64> {
if !self.initialized {
return None;
}
self.hooks
.suitability
.as_ref()
.and_then(|hook| hook.run(&self.service_group, self.runtime_cfg()))
}
/// Returns the root path for service configuration, files, and data.
fn svc_path(&self) -> PathBuf {
fs::svc_path(&self.service_group.service())
}
/// Returns the path to the service configuration.
fn svc_config_path(&self) -> PathBuf {
fs::svc_config_path(&self.service_group.service())
}
/// Returns the path to the service data.
fn svc_data_path(&self) -> PathBuf {
fs::svc_data_path(&self.service_group.service())
}
/// Returns the path to the service's gossiped config files.
fn svc_files_path(&self) -> PathBuf {
fs::svc_files_path(&self.service_group.service())
}
/// Returns the path to the service hooks.
fn svc_hooks_path(&self) -> PathBuf {
fs::svc_hooks_path(&self.service_group.service())
}
/// Returns the path to the service static content.
fn svc_static_path(&self) -> PathBuf {
fs::svc_static_path(&self.service_group.service())
}
/// Returns the path to the service variable state.
fn svc_var_path(&self) -> PathBuf {
fs::svc_var_path(&self.service_group.service())
}
/// Returns the path to the service logs.
fn svc_logs_path(&self) -> PathBuf {
fs::svc_logs_path(&self.service_group.service())
}
/// this function wraps create_dir_all so we can give friendly error
/// messages to the user.
fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
debug!("Creating dir with subdirs: {:?}", &path.as_ref());
if let Err(e) = std::fs::create_dir_all(&path) {
Err(sup_error!(Error::Permissions(format!("Can't create {:?}, {}", &path.as_ref(), e))))
} else {
Ok(())
}
}
fn cache_health_check(&self, check_result: HealthCheck) {
let state_file = self.manager_fs_cfg
.data_path
.join(format!("{}.health", self.service_group.service()));
let tmp_file = state_file.with_extension("tmp");
let file = match File::create(&tmp_file) {
Ok(file) => file,
Err(err) => {
warn!("Couldn't open temporary health check file, {}, {}",
self.service_group,
err);
return;
}
};
let mut writer = BufWriter::new(file);
if let Some(err) = writer
.write_all((check_result as i8).to_string().as_bytes())
.err() {
warn!("Couldn't write to temporary health check state file, {}, {}",
self.service_group,
err);
}
if let Some(err) = std::fs::rename(&tmp_file, &state_file).err() {
warn!("Couldn't finalize health check state file, {}, {}",
self.service_group,
err);
}
}
// Copy the "run" file to the svc path.
fn copy_run(&self) -> Result<()> {
let svc_run = self.svc_path().join(hooks::RunHook::file_name());
match self.hooks.run {
Some(ref hook) => {
try!(std::fs::copy(hook.path(), &svc_run));
try!(set_permissions(&svc_run.to_str().unwrap(), HOOK_PERMISSIONS));
}
None => {
let run = self.package()
.installed_path()
.join(hooks::RunHook::file_name());
match std::fs::metadata(&run) {
Ok(_) => {
try!(std::fs::copy(&run, &svc_run));
try!(set_permissions(&svc_run, HOOK_PERMISSIONS));
}
Err(err) => {
outputln!(preamble self.service_group, "Error finding run file: {}", err);
}
}
}
}
Ok(())
}
fn execute_hooks(&mut self) {
if !self.initialized {
self.initialize();
if self.initialized {
self.start();
self.post_run();
}
} else {
self.check_process();
if Instant::now().duration_since(self.last_health_check) >= *HEALTH_CHECK_INTERVAL {
self.run_health_check_hook();
}
if self.needs_reload || self.is_down() || self.needs_reconfiguration {
self.reload();
if self.needs_reconfiguration {
self.reconfigure()
}
}
}
}
/// Run file_updated hook if present
fn file_updated(&self) -> bool {
if self.initialized {
if let Some(ref hook) = self.hooks.file_updated {
return hook.run(&self.service_group, self.runtime_cfg());
}
}
false
}
/// Write service configuration from gossip data to disk.
///
/// Returns true if a change was made and false if there were no updates.
fn persist_service_config(&mut self, butterfly: &butterfly::Server) -> bool {
if let Some((incarnation, config)) =
butterfly.service_config_for(&*self.service_group, Some(self.config.incarnation)) {
self.config.incarnation = incarnation;
self.write_butterfly_service_config(config)
} else {
false
}
}
/// Write service files from gossip data to disk.
///
/// Returnst rue if a file was changed, added, or removed, and false if there were no updates.
fn persist_service_files(&mut self, butterfly: &butterfly::Server) -> bool {
let mut updated = false;
for (incarnation, filename, body) in
butterfly
.service_files_for(&*self.service_group, &self.current_service_files)
.into_iter() {
if self.write_butterfly_service_file(filename, incarnation, body) {
updated = true;
}
}
if updated { self.file_updated() } else { false }
}
/// attempt to remove a symlink in the /svc/run/foo/ directory if
/// the link exists.
fn remove_symlink<P: AsRef<Path>>(p: P) -> Result<()> {
let p = p.as_ref();
if !p.exists() {
return Ok(());
}
// note: we're NOT using p.metadata() here as that will follow the
// symlink, which returns smd.file_type().is_symlink() == false in all cases.
let smd = try!(p.symlink_metadata());
if smd.file_type().is_symlink() {
try!(std::fs::remove_file(p));
}
Ok(())
}
fn run_health_check_hook(&mut self) {
let check_result = if let Some(ref hook) = self.hooks.health_check {
hook.run(&self.service_group, self.runtime_cfg())
} else {
match self.supervisor.status() {
(true, _) => HealthCheck::Ok,
(false, _) => HealthCheck::Critical,
}
};
self.last_health_check = Instant::now();
self.cache_health_check(check_result);
}
/// Update our own service rumor with a new configuration from the packages exported
/// configuration data.
///
/// The run loop's last updated census is a required parameter on this function to inform the
/// main loop that we, ourselves, updated the service counter when we updated ourselves.
fn update_service_rumor_cfg(&self, butterfly: &butterfly::Server) -> bool {
if let Some(cfg) = self.config.to_exported().ok() {
let me = butterfly.member_id().to_string();
let mut updated = None;
butterfly
.service_store
.with_rumor(&*self.service_group, &me, |rumor| {
if let Some(rumor) = rumor {
let mut rumor = rumor.clone();
let incarnation = rumor.get_incarnation() + 1;
rumor.set_incarnation(incarnation);
// TODO FN: the updated toml API returns a `Result` when
// serializing--we should handle this and not potentially panic
*rumor.mut_cfg() =
toml::ser::to_vec(&cfg).expect("Can't serialize to TOML bytes");
updated = Some(rumor);
}
});
if let Some(rumor) = updated {
butterfly.insert_service(rumor);
return true;
}
}
false
}
fn write_butterfly_service_file(&mut self,
filename: String,
incarnation: u64,
body: Vec<u8>)
-> bool {
self.current_service_files
.insert(filename.clone(), incarnation);
let on_disk_path = self.svc_files_path().join(filename);
let current_checksum = match hash::hash_file(&on_disk_path) {
Ok(current_checksum) => current_checksum,
Err(e) => {
debug!("Failed to get current checksum for {:?}: {}",
on_disk_path,
e);
String::new()
}
};
let new_checksum = hash::hash_bytes(&body)
.expect("We failed to hash a Vec<u8> in a method that can't return an error; not \
even sure what this means");
if new_checksum != current_checksum {
let new_filename = format!("{}.write", on_disk_path.to_string_lossy());
let mut new_file = match File::create(&new_filename) {
Ok(new_file) => new_file,
Err(e) => {
outputln!(preamble self.service_group,
"Service file from butterfly failed to open the new file {}: {}",
new_filename,
Red.bold().paint(format!("{}", e)));
return false;
}
};
if let Err(e) = new_file.write_all(&body) {
outputln!(preamble self.service_group,
"Service file from butterfly failed to write {}: {}",
new_filename,
Red.bold().paint(format!("{}", e)));
return false;
}
if let Err(e) = std::fs::rename(&new_filename, &on_disk_path) {
outputln!(preamble self.service_group,
"Service file from butterfly failed to rename {} to {}: {}",
new_filename,
on_disk_path.to_string_lossy(),
Red.bold().paint(format!("{}", e)));
return false;
}
if let Err(e) = set_owner(&on_disk_path,
&self.supervisor.runtime_config.svc_user,
&self.supervisor.runtime_config.svc_group) {
outputln!(preamble self.service_group,
"Service file from butterfly failed to set ownership on {}: {}",
on_disk_path.to_string_lossy(),
Red.bold().paint(format!("{}", e)));
return false;
}
if let Err(e) = set_permissions(&on_disk_path, 0o640) {
outputln!(preamble self.service_group,
"Service file from butterfly failed to set permissions on {}: {}",
on_disk_path.to_string_lossy(),
Red.bold().paint(format!("{}", e)));
return false;
}
outputln!(preamble self.service_group,
"Service file updated from butterfly {}: {}",
on_disk_path.to_string_lossy(),
Green.bold().paint(new_checksum));
true
} else {
false
}
}
fn write_butterfly_service_config(&mut self, config: toml::Value) -> bool {
let encoded = toml::ser::to_string(&config)
.expect("Failed to serialize service configuration to a string in a method that \
can't return an error; this could be made better");
let on_disk_path = self.svc_path().join("gossip.toml");
let current_checksum = match hash::hash_file(&on_disk_path) {
Ok(current_checksum) => current_checksum,
Err(e) => {
debug!("Failed to get current checksum for {:?}: {}",
on_disk_path,
e);
String::new()
}
};
let new_checksum = hash::hash_string(&encoded)
.expect("We failed to hash a string in a method that can't return an error; not even \
sure what this means");
if new_checksum != current_checksum {
let new_filename = format!("{}.write", on_disk_path.to_string_lossy());
let mut new_file = match File::create(&new_filename) {
Ok(new_file) => new_file,
Err(e) => {
outputln!(preamble self.service_group,
"Service configuration from butterfly failed to open the new file: \
{}",
Red.bold().paint(format!("{}", e)));
return false;
}
};
if let Err(e) = new_file.write_all(encoded.as_bytes()) {
outputln!(preamble self.service_group,
"Service configuration from butterfly failed to write: {}",
Red.bold().paint(format!("{}", e)));
return false;
}
if let Err(e) = std::fs::rename(&new_filename, &on_disk_path) {
outputln!(preamble self.service_group,
"Service configuration from butterfly failed to rename: {}",
Red.bold().paint(format!("{}", e)));
return false;
}
if let Err(e) = set_owner(&on_disk_path,
&self.supervisor.runtime_config.svc_user,
&self.supervisor.runtime_config.svc_group) {
outputln!(preamble self.service_group,
"Service configuration from butterfly failed to set ownership: {}",
Red.bold().paint(format!("{}", e)));
return false;
}
if let Err(e) = set_permissions(&on_disk_path, 0o640) {
outputln!(preamble self.service_group,
"Service configuration from butterfly failed to set permissions: {}",
Red.bold().paint(format!("{}", e)));
return false;
}
outputln!(preamble self.service_group,
"Service configuration updated from butterfly: {}",
Green.bold().paint(new_checksum));
true
} else {
false
}
}
}
impl fmt::Display for Service {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.package().to_string())
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Topology {
Standalone,
Leader,
}
impl Topology {
fn as_str(&self) -> &str {
match *self {
Topology::Leader => "leader",
Topology::Standalone => "standalone",
}
}
}
impl FromStr for Topology {
type Err = SupError;
fn from_str(topology: &str) -> result::Result<Self, Self::Err> {
match topology {
"leader" => Ok(Topology::Leader),
"standalone" => Ok(Topology::Standalone),
_ => Err(sup_error!(Error::InvalidTopology(String::from(topology)))),
}
}
}
impl fmt::Display for Topology {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl Default for Topology {
fn default() -> Topology {
Topology::Standalone
}
}
impl serde::Deserialize for Topology {
fn deserialize<D>(deserializer: D) -> result::Result<Self, D::Error>
where D: serde::Deserializer
{
deserialize_using_from_str(deserializer)
}
}
impl serde::Serialize for Topology {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: serde::Serializer
{
serializer.serialize_str(self.as_str())
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum UpdateStrategy {
None,
AtOnce,
Rolling,
}
impl UpdateStrategy {
fn as_str(&self) -> &str {
match *self {
UpdateStrategy::None => "none",
UpdateStrategy::AtOnce => "at-once",
UpdateStrategy::Rolling => "rolling",
}
}
}
impl FromStr for UpdateStrategy {
type Err = SupError;
fn from_str(strategy: &str) -> result::Result<Self, Self::Err> {
match strategy {
"none" => Ok(UpdateStrategy::None),
"at-once" => Ok(UpdateStrategy::AtOnce),
"rolling" => Ok(UpdateStrategy::Rolling),
_ => Err(sup_error!(Error::InvalidUpdateStrategy(String::from(strategy)))),
}
}
}
impl fmt::Display for UpdateStrategy {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
impl Default for UpdateStrategy {
fn default() -> UpdateStrategy {
UpdateStrategy::None
}
}
impl serde::Deserialize for UpdateStrategy {
fn deserialize<D>(deserializer: D) -> result::Result<Self, D::Error>
where D: serde::Deserializer
{
deserialize_using_from_str(deserializer)
}
}
impl serde::Serialize for UpdateStrategy {
fn serialize<S>(&self, serializer: S) -> result::Result<S::Ok, S::Error>
where S: serde::Serializer
{
serializer.serialize_str(self.as_str())
}
}
fn serialize_lock<S>(x: &Arc<RwLock<PackageInstall>>, s: S) -> result::Result<S::Ok, S::Error>
where S: serde::Serializer
{
s.serialize_str(&x.read().expect("Package lock poisoned").to_string())
}
#[cfg(test)]
mod test {
use std::str::FromStr;
use toml;
use super::{Topology, UpdateStrategy};
use error::Error::*;
#[test]
fn topology_default() {
// This should always be the default topology, if this default gets changed, we have
// a failing test to confirm we changed our minds
assert_eq!(Topology::default(), Topology::Standalone);
}
#[test]
fn topology_from_str() {
let topology_str = "leader";
let topology = Topology::from_str(topology_str).unwrap();
assert_eq!(topology, Topology::Leader);
}
#[test]
fn topology_from_str_invalid() {
let topology_str = "dope";
match Topology::from_str(topology_str) {
Err(e) => {
match e.err {
InvalidTopology(s) => assert_eq!("dope", s),
wrong => panic!("Unexpected error returned: {:?}", wrong),
}
}
Ok(_) => panic!("String should fail to parse"),
}
}
#[test]
fn topology_to_string() {
let topology = Topology::Standalone;
assert_eq!("standalone", topology.to_string())
}
#[test]
fn topology_toml_deserialize() {
#[derive(Deserialize)]
struct Data {
key: Topology,
}
let toml = r#"
key = "leader"
"#;
let data: Data = toml::from_str(toml).unwrap();
assert_eq!(data.key, Topology::Leader);
}
#[test]
fn topology_toml_serialize() {
#[derive(Serialize)]
struct Data {
key: Topology,
}
let data = Data { key: Topology::Leader };
let toml = toml::to_string(&data).unwrap();
assert!(toml.starts_with(r#"key = "leader""#))
}
#[test]
fn update_strategy_default() {
// This should always be the default update strategy, if this default gets changed, we have
// a failing test to confirm we changed our minds
assert_eq!(UpdateStrategy::default(), UpdateStrategy::None);
}
#[test]
fn update_strategy_from_str() {
let strategy_str = "at-once";
let strategy = UpdateStrategy::from_str(strategy_str).unwrap();
assert_eq!(strategy, UpdateStrategy::AtOnce);
}
#[test]
fn update_strategy_from_str_invalid() {
let strategy_str = "dope";
match UpdateStrategy::from_str(strategy_str) {
Err(e) => {
match e.err {
InvalidUpdateStrategy(s) => assert_eq!("dope", s),
wrong => panic!("Unexpected error returned: {:?}", wrong),
}
}
Ok(_) => panic!("String should fail to parse"),
}
}
#[test]
fn update_strategy_to_string() {
let strategy = UpdateStrategy::AtOnce;
assert_eq!("at-once", strategy.to_string())
}
#[test]
fn update_strategy_toml_deserialize() {
#[derive(Deserialize)]
struct Data {
key: UpdateStrategy,
}
let toml = r#"
key = "at-once"
"#;
let data: Data = toml::from_str(toml).unwrap();
assert_eq!(data.key, UpdateStrategy::AtOnce);
}
#[test]
fn update_strategy_toml_serialize() {
#[derive(Serialize)]
struct Data {
key: UpdateStrategy,
}
let data = Data { key: UpdateStrategy::AtOnce };
let toml = toml::to_string(&data).unwrap();
assert!(toml.starts_with(r#"key = "at-once""#));
}
}
|
#![warn(missing_docs)]
use std::error::Error;
use auto_impl::auto_impl;
use slotmap::Key;
use super::ops::DelayType;
use super::{Color, GraphNodeId, GraphSubgraphId};
/// Trait for writing textual representations of graphs, i.e. mermaid or dot graphs.
#[auto_impl(&mut, Box)]
pub trait GraphWrite {
/// Error type emitted by writing.
type Err: Error;
/// Begin the graph. First method called.
fn write_prologue(&mut self) -> Result<(), Self::Err>;
/// Begin writing a subgraph.
fn write_subgraph_start(
&mut self,
sg_id: GraphSubgraphId,
stratum: usize,
) -> Result<(), Self::Err>;
/// Write a node, possibly within a subgraph.
fn write_node(
&mut self,
node_id: GraphNodeId,
node: &str, //&Node,
node_color: Color,
in_subgraph: Option<GraphSubgraphId>,
) -> Result<(), Self::Err>;
/// Write an edge, possibly within a subgraph.
fn write_edge(
&mut self,
src_id: GraphNodeId,
dst_id: GraphNodeId,
delay_type: Option<DelayType>,
label: Option<&str>,
in_subgraph: Option<GraphSubgraphId>,
) -> Result<(), Self::Err>;
/// Write the nodes associated with a single variable name, within a subgraph.
fn write_subgraph_varname(
&mut self,
sg_id: GraphSubgraphId,
varname: &str,
varname_nodes: impl Iterator<Item = GraphNodeId>,
) -> Result<(), Self::Err>;
/// End writing a subgraph.
fn write_subgraph_end(&mut self) -> Result<(), Self::Err>;
/// End the graph. Last method called.
fn write_epilogue(&mut self) -> Result<(), Self::Err>;
}
pub struct Mermaid<W> {
write: W,
}
impl<W> Mermaid<W> {
pub fn new(write: W) -> Self {
Self { write }
}
}
impl<W> GraphWrite for Mermaid<W>
where
W: std::fmt::Write,
{
type Err = std::fmt::Error;
fn write_prologue(&mut self) -> Result<(), Self::Err> {
writeln!(
self.write,
r"%%{{init:{{'theme':'base','themeVariables':{{'clusterBkg':'#ddd','clusterBorder':'#888'}}}}}}%%",
)?;
writeln!(self.write, "flowchart TD")?;
writeln!(
self.write,
"classDef pullClass fill:#8af,stroke:#000,text-align:left,white-space:pre"
)?;
writeln!(
self.write,
"classDef pushClass fill:#ff8,stroke:#000,text-align:left,white-space:pre"
)?;
writeln!(
self.write,
"linkStyle default stroke:#aaa,stroke-width:4px,color:red,font-size:1.5em;"
)?;
Ok(())
}
fn write_subgraph_start(
&mut self,
sg_id: GraphSubgraphId,
stratum: usize,
) -> Result<(), Self::Err> {
writeln!(
self.write,
"{:t$}subgraph sg_{sg:?} [\"sg_{sg:?} stratum {:?}\"]",
"",
stratum,
sg = sg_id.data(),
t = 0,
)?;
Ok(())
}
fn write_node(
&mut self,
node_id: GraphNodeId,
node: &str, //&Node,
node_color: Color,
in_subgraph: Option<GraphSubgraphId>,
) -> Result<(), Self::Err> {
let class_str = match node_color {
Color::Push => "pushClass",
Color::Pull => "pullClass",
_ => "otherClass",
};
let label = format!(
r#"{:t$}{id:?}{lbracket}"{id_label} <code>{code}</code>"{rbracket}:::{class}"#,
"",
id = node_id.data(),
id_label = if node.contains('\n') {
format!("<div style=text-align:center>({:?})</div>", node_id.data())
} else {
format!("({:?})", node_id.data())
},
class = class_str,
lbracket = match node_color {
Color::Push => r"[/",
Color::Pull => r"[\",
_ => "[",
},
code = node
.replace('&', "&")
.replace('<', "<")
.replace('>', ">")
.replace('"', """)
// Mermaid entity codes
// https://mermaid.js.org/syntax/flowchart.html#entity-codes-to-escape-characters
.replace('#', "#")
// Not really needed, newline literals seem to work
.replace('\n', "<br>")
// Mermaid font awesome fa
// https://github.com/mermaid-js/mermaid/blob/e4d2118d4bfa023628a020b7ab1f8c491e6dc523/packages/mermaid/src/diagrams/flowchart/flowRenderer-v2.js#L62
.replace("fa:fa", "fa:<wbr>fa")
.replace("fab:fa", "fab:<wbr>fa")
.replace("fal:fa", "fal:<wbr>fa")
.replace("far:fa", "far:<wbr>fa")
.replace("fas:fa", "fas:<wbr>fa"),
rbracket = match node_color {
Color::Push => r"\]",
Color::Pull => r"/]",
_ => "]",
},
t = if in_subgraph.is_some() { 4 } else { 0 },
);
writeln!(self.write, "{}", label)?;
Ok(())
}
fn write_edge(
&mut self,
src_id: GraphNodeId,
dst_id: GraphNodeId,
delay_type: Option<DelayType>,
label: Option<&str>,
in_subgraph: Option<GraphSubgraphId>,
) -> Result<(), Self::Err> {
let src_str = format!("{:?}", src_id.data());
let dest_str = format!("{:?}", dst_id.data());
writeln!(
self.write,
"{:t$}{src}{label}{delay}{dst}",
"",
src = src_str.trim(),
label = if let Some(label) = &label {
if Some(DelayType::Stratum) == delay_type {
format!("=={}", label.trim())
} else {
format!("--{}", label.trim())
}
} else {
"".to_string()
},
delay = if Some(DelayType::Stratum) == delay_type {
"===o"
} else {
"--->"
},
dst = dest_str.trim(),
t = if in_subgraph.is_some() { 4 } else { 0 },
)?;
Ok(())
}
fn write_subgraph_varname(
&mut self,
sg_id: GraphSubgraphId,
varname: &str,
varname_nodes: impl Iterator<Item = GraphNodeId>,
) -> Result<(), Self::Err> {
writeln!(
self.write,
"{:t$}subgraph sg_{sg:?}_var_{var} [\"var <tt>{var}</tt>\"]",
"",
sg = sg_id.data(),
var = varname,
t = 4,
)?;
for local_named_node in varname_nodes {
writeln!(self.write, "{:t$}{:?}", "", local_named_node.data(), t = 8)?;
}
writeln!(self.write, "{:t$}end", "", t = 4)?;
Ok(())
}
fn write_subgraph_end(&mut self) -> Result<(), Self::Err> {
writeln!(self.write, "{:t$}end", "", t = 0)?;
Ok(())
}
fn write_epilogue(&mut self) -> Result<(), Self::Err> {
// No-op.
Ok(())
}
}
pub struct Dot<W> {
write: W,
}
impl<W> Dot<W> {
pub fn new(write: W) -> Self {
Self { write }
}
}
impl<W> GraphWrite for Dot<W>
where
W: std::fmt::Write,
{
type Err = std::fmt::Error;
fn write_prologue(&mut self) -> Result<(), Self::Err> {
writeln!(self.write, "digraph {{")?;
Ok(())
}
fn write_subgraph_start(
&mut self,
sg_id: GraphSubgraphId,
stratum: usize,
) -> Result<(), Self::Err> {
writeln!(
self.write,
"{:t$}subgraph \"cluster n{:?}\" {{",
"",
sg_id.data(),
t = 4,
)?;
writeln!(self.write, "{:t$}fillcolor=\"#dddddd\"", "", t = 8)?;
writeln!(self.write, "{:t$}style=filled", "", t = 8)?;
writeln!(
self.write,
"{:t$}label = \"sg_{:?}\\nstratum {}\"",
"",
sg_id.data(),
stratum,
t = 8,
)?;
Ok(())
}
fn write_node(
&mut self,
node_id: GraphNodeId,
node: &str, //&Node,
node_color: Color,
in_subgraph: Option<GraphSubgraphId>,
) -> Result<(), Self::Err> {
let nm = node.replace('"', "\\\"").replace('\n', "\\l");
let label = format!("n{:?}", node_id.data());
let shape_str = match node_color {
Color::Push => "house",
Color::Pull => "invhouse",
Color::Hoff => "parallelogram",
Color::Comp => "circle",
};
let color_str = match node_color {
Color::Push => "style = filled, color = \"#ffff00\"",
Color::Pull => "style = filled, color = \"#0022ff\", fontcolor = \"#ffffff\"",
Color::Hoff => "style = filled, color = \"#ddddff\"",
Color::Comp => "style = filled, color = white",
};
write!(
self.write,
"{:t$}{} [label=\"({}) {}{}\"",
"",
label,
label,
nm,
// if contains linebreak left-justify by appending another "\\l"
if nm.contains("\\l") { "\\l" } else { "" },
t = if in_subgraph.is_some() { 8 } else { 4 },
)?;
write!(self.write, ", fontname=Monaco")?;
write!(self.write, ", shape={}", shape_str)?;
write!(self.write, ", {}", color_str)?;
writeln!(self.write, "]")?;
Ok(())
}
fn write_edge(
&mut self,
src_id: GraphNodeId,
dst_id: GraphNodeId,
delay_type: Option<DelayType>,
label: Option<&str>,
in_subgraph: Option<GraphSubgraphId>,
) -> Result<(), Self::Err> {
let mut properties = Vec::new();
if let Some(label) = label {
properties.push(format!("label=\"{}\"", label));
};
if Some(DelayType::Stratum) == delay_type {
properties.push("arrowhead=box, color=red".to_string());
};
writeln!(
self.write,
"{:t$}n{:?} -> n{:?}{}",
"",
src_id.data(),
dst_id.data(),
if !properties.is_empty() {
format!(" [{}]", properties.join(", "))
} else {
"".to_string()
},
t = if in_subgraph.is_some() { 8 } else { 4 },
)?;
Ok(())
}
fn write_subgraph_varname(
&mut self,
sg_id: GraphSubgraphId,
varname: &str,
varname_nodes: impl Iterator<Item = GraphNodeId>,
) -> Result<(), Self::Err> {
writeln!(
self.write,
"{:t$}subgraph \"cluster sg_{sg:?}_var_{var}\" {{",
"",
sg = sg_id.data(),
var = varname,
t = 8,
)?;
writeln!(
self.write,
"{:t$}label=\"var {var}\"",
"",
var = varname,
t = 12,
)?;
for local_named_node in varname_nodes {
writeln!(
self.write,
"{:t$}n{:?}",
"",
local_named_node.data(),
t = 12
)?;
}
writeln!(self.write, "{:t$}}}", "", t = 8)?;
Ok(())
}
fn write_subgraph_end(&mut self) -> Result<(), Self::Err> {
// subgraph footer
writeln!(self.write, "{:t$}}}", "", t = 4)?;
Ok(())
}
fn write_epilogue(&mut self) -> Result<(), Self::Err> {
writeln!(self.write, "}}")?;
Ok(())
}
}
|
use std::fs::File;
use std::io::Read;
fn main() {
let mut file = File::open("d06-input").expect("file not found");
let mut input = String::new();
file.read_to_string(&mut input).expect("something went wrong reading file");
let mut sum = 0;
let data: Vec<String> = input.split("\n\n").map(|s| s.to_string()).collect();
for line in data.iter() {
let group_size = line.chars().filter(|&c| c == '\n').count() + 1;
let mut group: Vec<char> = line.chars().filter(|&c| !c.is_whitespace()).collect();
group.sort();
let mut answers = group.clone();
answers.dedup();
for answer in answers.iter() {
let count = group.iter().filter(|&c| c == answer).count();
if count == group_size {
sum += 1;
}
}
}
println!("Sum of answers: {}", sum);
} |
use std::borrow::Cow;
use std::io;
use futures::{future, FutureExt};
use futures::future::{lazy, Future};
use futures::channel::mpsc::unbounded;
use futures::channel::oneshot::{channel, Receiver};
use tokio::runtime::current_thread::Handle;
use tokio_timer::{timer::Timer, clock::Clock};
use tokio_net::driver::Reactor;
use crate::arbiter::{Arbiter, SystemArbiter};
use crate::runtime::Runtime;
use crate::system::System;
use tokio_executor::current_thread::CurrentThread;
/// Builder struct for a actix runtime.
///
/// Either use `Builder::build` to create a system and start actors.
/// Alternatively, use `Builder::run` to start the tokio runtime and
/// run a function in its context.
pub struct Builder {
/// Name of the System. Defaults to "actix" if unset.
name: Cow<'static, str>,
/// The clock to use
clock: Clock,
/// Whether the Arbiter will stop the whole System on uncaught panic. Defaults to false.
stop_on_panic: bool,
}
impl Builder {
pub(crate) fn new() -> Self {
Builder {
name: Cow::Borrowed("actix"),
clock: Clock::new(),
stop_on_panic: false,
}
}
/// Sets the name of the System.
pub fn name<T: Into<String>>(mut self, name: T) -> Self {
self.name = Cow::Owned(name.into());
self
}
/// Set the Clock instance that will be used by this System.
///
/// Defaults to the system clock.
pub fn clock(mut self, clock: Clock) -> Self {
self.clock = clock;
self
}
/// Sets the option 'stop_on_panic' which controls whether the System is stopped when an
/// uncaught panic is thrown from a worker thread.
///
/// Defaults to false.
pub fn stop_on_panic(mut self, stop_on_panic: bool) -> Self {
self.stop_on_panic = stop_on_panic;
self
}
/// Create new System.
///
/// This method panics if it can not create tokio runtime
pub fn build(self) -> SystemRunner {
self.create_runtime(|| {})
}
/// Create new System that can run asynchronously.
///
/// This method panics if it cannot start the system arbiter
pub(crate) fn build_async(self, executor: Handle) -> AsyncSystemRunner {
self.create_async_runtime(executor)
}
/// This function will start tokio runtime and will finish once the
/// `System::stop()` message get called.
/// Function `f` get called within tokio runtime context.
pub fn run<F>(self, f: F) -> io::Result<()>
where
F: FnOnce() + 'static,
{
self.create_runtime(f).run()
}
fn create_async_runtime(self, executor: Handle) -> AsyncSystemRunner {
let (stop_tx, stop) = channel();
let (sys_sender, sys_receiver) = unbounded();
let system = System::construct(sys_sender, Arbiter::new_system(), self.stop_on_panic);
// system arbiter
let arb = SystemArbiter::new(stop_tx, sys_receiver);
// start the system arbiter
executor.spawn(arb).expect("could not start system arbiter");
AsyncSystemRunner { stop, system }
}
fn create_runtime<F>(self, f: F) -> SystemRunner
where
F: FnOnce() + 'static,
{
let (stop_tx, stop) = channel();
let (sys_sender, sys_receiver) = unbounded();
let system = System::construct(sys_sender, Arbiter::new_system(), self.stop_on_panic);
// system arbiter
let arb = SystemArbiter::new(stop_tx, sys_receiver);
let mut rt = self.build_rt().unwrap();
rt.spawn(arb);
// init system arbiter and run configuration method
let _ = rt.block_on(lazy(move |_| {
f();
Ok::<_, ()>(())
}));
SystemRunner { rt, stop, system }
}
pub(crate) fn build_rt(&self) -> io::Result<Runtime> {
// We need a reactor to receive events about IO objects from kernel
let reactor = Reactor::new()?;
let reactor_handle = reactor.handle();
// Place a timer wheel on top of the reactor. If there are no timeouts to fire, it'll let the
// reactor pick up some new external events.
let timer = Timer::new_with_now(reactor, self.clock.clone());
let timer_handle = timer.handle();
// And now put a single-threaded executor on top of the timer. When there are no futures ready
// to do something, it'll let the timer or the reactor to generate some new stimuli for the
// futures to continue in their life.
let executor = CurrentThread::new_with_park(timer);
Ok(Runtime::new2(
reactor_handle,
timer_handle,
self.clock.clone(),
executor,
))
}
}
#[derive(Debug)]
pub(crate) struct AsyncSystemRunner {
stop: Receiver<i32>,
system: System,
}
impl AsyncSystemRunner {
/// This function will start event loop and returns a future that
/// resolves once the `System::stop()` function is called.
pub(crate) fn run_nonblocking(self) -> impl Future<Output = Result<(),io::Error>> + Send {
let AsyncSystemRunner { stop, .. } = self;
// run loop
future::lazy(|_| {
Arbiter::run_system();
async {
let res = match stop.await {
Ok(code) => {
if code != 0 {
Err(io::Error::new(
io::ErrorKind::Other,
format!("Non-zero exit code: {}", code),
))
} else {
Ok(())
}
}
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
};
Arbiter::stop_system();
return res;
}
}).flatten()
}
}
/// Helper object that runs System's event loop
#[must_use = "SystemRunner must be run"]
#[derive(Debug)]
pub struct SystemRunner {
rt: Runtime,
stop: Receiver<i32>,
system: System,
}
impl SystemRunner {
/// This function will start event loop and will finish once the
/// `System::stop()` function is called.
pub fn run(self) -> io::Result<()> {
let SystemRunner { mut rt, stop, .. } = self;
// run loop
let _ = rt.block_on(async {
Arbiter::run_system();
Ok::<_, ()>(())
});
let result = match rt.block_on(stop) {
Ok(code) => {
if code != 0 {
Err(io::Error::new(
io::ErrorKind::Other,
format!("Non-zero exit code: {}", code),
))
} else {
Ok(())
}
}
Err(e) => Err(io::Error::new(io::ErrorKind::Other, e)),
};
Arbiter::stop_system();
result
}
/// Execute a future and wait for result.
pub fn block_on<F, O>(&mut self, fut: F) -> O
where
F: Future<Output = O>,
{
let _ = self.rt.block_on(async {
Arbiter::run_system();
});
let res = self.rt.block_on(fut);
let _ = self.rt.block_on(async {
Arbiter::stop_system();
});
res
}
}
|
use std::env;
use std::process;
pub struct Options {
pub input : String,
pub output : String,
//pub weight : String,
pub tolerate : String,
pub matrix : String,
pub colorize : String,
}
impl Options {
pub fn new() -> Options
{
let argv : Vec<String> = env::args().collect();
let argc : usize = argv.len();
let mut arg_i : &String = &String::new();
let mut arg_o : &String = &String::new();
//let mut arg_w : &String = &String::from( "hen" );
let mut arg_t : &String = &String::from( "yes" );
let mut arg_m : &String = &String::from( "blosum62" );
let mut arg_c : &String = &String::from( "no" );
if argc < 5 { show_usage( &argv[ 0 ] ) };
let mut i : usize = 1;
while i < argc {
match argv[ i ].as_str() {
"-i" => { i += 1; arg_i = &argv[ i ]; }
"-o" => { i += 1; arg_o = &argv[ i ]; }
//"-w" => { i += 1; arg_w = &argv[ i ]; }
"-t" => { i += 1; arg_t = &argv[ i ]; }
"-m" => { i += 1; arg_m = &argv[ i ]; }
"-c" => { i += 1; arg_c = &argv[ i ]; }
"-h" => { show_usage( &argv[ 0 ] ); }
_ => { show_usage( &argv[ 0 ] ); }
}
i += 1;
}
/*
match ( *arg_w ).as_str() {
"hen" | "va" => (),
_ => show_usage( &argv[ 0 ] ),
}
*/
match ( *arg_t ).as_str() {
"yes" | "no" => (),
_ => show_usage( &argv[ 0 ] ),
}
match ( *arg_m ).as_str() {
"blosum45" => (),
"blosum50" => (),
"blosum62" => (),
"blosum80" => (),
"blosum90" => (),
"pam30" => (),
"pam70" => (),
"pam250" => (),
"pet91mod" => (),
"blosum62mod" => (),
_ => show_usage( &argv[ 0 ] ),
}
match ( *arg_c ).as_str() {
"yes" | "no" => (),
_ => show_usage( &argv[ 0 ] ),
}
Options {
input : arg_i.to_string(),
output : arg_o.to_string(),
//weight : arg_w.to_string(),
tolerate : arg_t.to_string(),
matrix : arg_m.to_string(),
colorize : arg_c.to_string(),
}
}
pub fn show_parameter( &self )
{
println!( "\nParameter set :" );
println!( "===========================================" );
println!( "Input filename : {}", self.input );
println!( "Onput filename : {}", self.output );
//println!( "Weighting method : {}", self.weight );
println!( "Non-standard AA : {}", self.tolerate );
println!( "Substitution matrix : {}", self.matrix );
println!( "Colorize AA : {}", self.colorize );
println!( "===========================================" );
}
}
fn show_usage( arg : &String )
{
println!( "Usage: {} [Options] \n\nOptions :\n\n", *arg );
println!( " -i Input filename in aligned Multi-FASTA format, REQUIRED." );
println!( " -o Onput filename, REQUIRED." );
//println!( " -w Method of sequence weighting ('hen' or 'va', default 'hen').\n hen : Position-sased method by Henikoff and Henikoff\n va : Distance-Based method by Vingron and Argos" );
println!( " -t Tolerate non-standard AA types (such as B, Z and X) in input file ('yes' or 'no', default 'yes').\n yes : All non-standard AAs are converted to gaps.\n no : The program halts if the input file includes non-standard AA types." );
println!( " -m Substitution scoring matrix (default 'blosum62').
blosum45 : BLOSUM45
blosum50 : BLOSUM50
blosum62 : BLOSUM62
blosum80 : BLOSUM80
blosum90 : BLOSUM90
pam30 : PAM30
pam70 : PAM70
pam250 : PAM250
pet91mod : Modified version of PET91
blosum62mod : Modified version of BLOSUM62" );
println!( " -c Colorize each AA displayed on the terminal based on their stereochemical properties ('yes' or 'no', default 'no')." );
println!( " -h Print this help, ignore all other arguments." );
println!( "\n" );
process::exit( 1 );
}
|
// Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use super::{
rescue, CYCLE_LENGTH as HASH_CYCLE_LEN, SIG_CYCLE_LENGTH as SIG_CYCLE_LEN, TRACE_WIDTH,
};
use crate::utils::{are_equal, is_binary, is_zero, not, EvaluationResult};
use winterfell::{
math::{fields::f128::BaseElement, FieldElement},
Air, AirContext, Assertion, ByteWriter, EvaluationFrame, ProofOptions, Serializable, TraceInfo,
TransitionConstraintDegree,
};
// CONSTANTS
// ================================================================================================
const TWO: BaseElement = BaseElement::new(2);
// AGGREGATE LAMPORT PLUS SIGNATURE AIR
// ================================================================================================
pub struct PublicInputs {
pub pub_keys: Vec<[BaseElement; 2]>,
pub messages: Vec<[BaseElement; 2]>,
}
impl Serializable for PublicInputs {
fn write_into<W: ByteWriter>(&self, target: &mut W) {
target.write(&self.pub_keys);
target.write(&self.messages);
}
}
pub struct LamportAggregateAir {
context: AirContext<BaseElement>,
pub_keys: Vec<[BaseElement; 2]>,
messages: Vec<[BaseElement; 2]>,
}
impl Air for LamportAggregateAir {
type BaseElement = BaseElement;
type PublicInputs = PublicInputs;
// CONSTRUCTOR
// --------------------------------------------------------------------------------------------
fn new(trace_info: TraceInfo, pub_inputs: PublicInputs, options: ProofOptions) -> Self {
// define degrees for all transition constraints
let degrees = vec![
TransitionConstraintDegree::with_cycles(2, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]), // m0 bit is binary
TransitionConstraintDegree::with_cycles(2, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]), // m1 bit is binary
TransitionConstraintDegree::with_cycles(
1,
vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN, SIG_CYCLE_LEN],
), // m0 accumulation
TransitionConstraintDegree::with_cycles(
1,
vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN, SIG_CYCLE_LEN],
), // m1 accumulation
// secret key 1 hashing
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
// secret key 2 hashing
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
// public key hashing
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
TransitionConstraintDegree::with_cycles(5, vec![HASH_CYCLE_LEN, SIG_CYCLE_LEN]),
];
assert_eq!(TRACE_WIDTH, trace_info.width());
LamportAggregateAir {
context: AirContext::new(trace_info, degrees, options),
pub_keys: pub_inputs.pub_keys,
messages: pub_inputs.messages,
}
}
fn context(&self) -> &AirContext<Self::BaseElement> {
&self.context
}
fn evaluate_transition<E: FieldElement + From<Self::BaseElement>>(
&self,
frame: &EvaluationFrame<E>,
periodic_values: &[E],
result: &mut [E],
) {
let current = frame.current();
let next = frame.next();
// expected state width is 4 field elements
debug_assert_eq!(TRACE_WIDTH, current.len());
debug_assert_eq!(TRACE_WIDTH, next.len());
// spit periodic values into flags and Rescue round constants
let sig_cycle_end_flag = periodic_values[0];
let power_of_two = periodic_values[1];
let hash_flag = periodic_values[2];
let ark = &periodic_values[3..];
// evaluate the constraints
evaluate_constraints(
result,
current,
next,
ark,
hash_flag,
sig_cycle_end_flag,
power_of_two,
);
}
fn get_assertions(&self) -> Vec<Assertion<Self::BaseElement>> {
let last_cycle_step = SIG_CYCLE_LEN - 1;
let messages = transpose(&self.messages);
let pub_keys = transpose(&self.pub_keys);
vec![
// --- set assertions against the first step of every cycle: 0, 1024, 2048 etc. -------
// message aggregators should be set to zeros
Assertion::periodic(2, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(3, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
// for private key hasher, last 4 state register should be set to zeros
Assertion::periodic(6, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(7, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(8, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(9, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(12, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(13, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(14, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(15, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
// for public key hasher, all registers should be set to zeros
Assertion::periodic(16, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(17, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(18, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(19, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(20, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(21, 0, SIG_CYCLE_LEN, BaseElement::ZERO),
// --- set assertions against the last step of every cycle: 1023, 2047, 3071 etc. -----
// last bits of message bit registers should be set to zeros; this is because we truncate
// message elements to 127 bits each - so, 128th bit must always be zero
Assertion::periodic(0, last_cycle_step, SIG_CYCLE_LEN, BaseElement::ZERO),
Assertion::periodic(1, last_cycle_step, SIG_CYCLE_LEN, BaseElement::ZERO),
// message accumulator registers should be set to message element values
Assertion::sequence(2, last_cycle_step, SIG_CYCLE_LEN, messages.0),
Assertion::sequence(3, last_cycle_step, SIG_CYCLE_LEN, messages.1),
// public key hasher should terminate with public key elements
Assertion::sequence(16, last_cycle_step, SIG_CYCLE_LEN, pub_keys.0),
Assertion::sequence(17, last_cycle_step, SIG_CYCLE_LEN, pub_keys.1),
]
}
fn get_periodic_column_values(&self) -> Vec<Vec<Self::BaseElement>> {
let mut result = vec![];
// signature cycle mask: 1023 zeros followed by 1 one
let mut sig_cycle_mask = vec![BaseElement::ZERO; SIG_CYCLE_LEN];
sig_cycle_mask[SIG_CYCLE_LEN - 1] = BaseElement::ONE;
result.push(sig_cycle_mask);
// build powers of two column
let mut powers_of_two = vec![BaseElement::ZERO; SIG_CYCLE_LEN];
let mut current_power_of_two = BaseElement::ONE;
powers_of_two[0] = BaseElement::ONE;
for (i, value) in powers_of_two.iter_mut().enumerate().skip(1) {
// we switch to a new power of two once every 8 steps this. is so that a
// new power of two is available for every hash cycle
if i % HASH_CYCLE_LEN == 0 {
current_power_of_two *= TWO;
}
*value = current_power_of_two;
}
result.push(powers_of_two);
// add hash cycle mask (seven ones followed by a zero), and rescue round constants
result.push(HASH_CYCLE_MASK.to_vec());
result.append(&mut rescue::get_round_constants());
result
}
}
// HELPER FUNCTIONS
// ================================================================================================
#[rustfmt::skip]
fn evaluate_constraints<E: FieldElement + From<BaseElement>>(
result: &mut [E],
current: &[E],
next: &[E],
ark: &[E],
hash_flag: E,
sig_cycle_end_flag: E,
power_of_two: E,
) {
// when hash_flag = 1 (which happens on all steps except steps which are one less than a
// multiple of 8 - e.g. all steps except for 7, 15, 23 etc.), and we are not on the last step
// of a signature cycle make sure the contents of the first 4 registers are copied over, and
// for other registers, Rescue constraints are applied separately for hashing secret and
// public keys
let flag = not(sig_cycle_end_flag) * hash_flag;
result.agg_constraint(0, flag, are_equal(current[0], next[0]));
result.agg_constraint(1, flag, are_equal(current[1], next[1]));
result.agg_constraint(2, flag, are_equal(current[2], next[2]));
result.agg_constraint(3, flag, are_equal(current[3], next[3]));
rescue::enforce_round(&mut result[4..10], ¤t[4..10], &next[4..10], ark, flag);
rescue::enforce_round(&mut result[10..16], ¤t[10..16], &next[10..16], ark, flag);
rescue::enforce_round(&mut result[16..22], ¤t[16..22], &next[16..22], ark, flag);
// when hash_flag = 0 (which happens on steps which are one less than a multiple of 8 - e.g. 7,
// 15, 23 etc.), and we are not on the last step of a signature cycle:
let flag = not(sig_cycle_end_flag) * not(hash_flag);
// make sure values inserted into registers 0 and 1 are binary
result.agg_constraint(0, flag, is_binary(current[0]));
result.agg_constraint(1, flag, is_binary(current[1]));
// make sure message values were aggregated correctly in registers 2 and 3
let next_m0 = current[2] + current[0] * power_of_two;
result.agg_constraint(2, flag, are_equal(next_m0, next[2]));
let next_m1 = current[3] + current[1] * power_of_two;
result.agg_constraint(3, flag, are_equal(next_m1, next[3]));
// registers 6..10 and 12..16 were set to zeros
result.agg_constraint(4, flag, is_zero(next[6]));
result.agg_constraint(5, flag, is_zero(next[7]));
result.agg_constraint(6, flag, is_zero(next[8]));
result.agg_constraint(7, flag, is_zero(next[9]));
result.agg_constraint(8, flag, is_zero(next[12]));
result.agg_constraint(9, flag, is_zero(next[13]));
result.agg_constraint(10, flag, is_zero(next[14]));
result.agg_constraint(11, flag, is_zero(next[15]));
// contents of registers 20 and 21 (capacity section of public key hasher state) were
// copied over to the next step
result.agg_constraint(12, flag, are_equal(current[20], next[20]));
result.agg_constraint(13, flag, are_equal(current[21], next[21]));
// when current bit of m0 = 1, hash of private key 1 (which should be equal to public key)
// should be injected into the hasher state for public key aggregator
let m0_bit = current[0];
result.agg_constraint(14, flag * m0_bit,are_equal(current[16] + current[4], next[16]));
result.agg_constraint(15, flag * m0_bit, are_equal(current[17] + current[5], next[17]));
// when current bit of m1 = 1, hash of private key 2 (which should be equal to public key)
// should be injected into the hasher state for public key aggregator
let m1_bit = current[1];
result.agg_constraint(16, flag * m1_bit, are_equal(current[18] + current[10], next[18]));
result.agg_constraint(17, flag * m1_bit, are_equal(current[19] + current[11], next[19]));
}
fn transpose(values: &[[BaseElement; 2]]) -> (Vec<BaseElement>, Vec<BaseElement>) {
let n = values[0].len();
let mut r1 = Vec::with_capacity(n);
let mut r2 = Vec::with_capacity(n);
for element in values {
r1.push(element[0]);
r2.push(element[1]);
}
(r1, r2)
}
// MASKS
// ================================================================================================
const HASH_CYCLE_MASK: [BaseElement; HASH_CYCLE_LEN] = [
BaseElement::ONE,
BaseElement::ONE,
BaseElement::ONE,
BaseElement::ONE,
BaseElement::ONE,
BaseElement::ONE,
BaseElement::ONE,
BaseElement::ZERO,
];
|
//TODO: make a build script that builds out each of these for the given platform
//TODO: create a launcher system
#[cfg(feature = "dx11")]
pub extern crate gfx_backend_dx11 as back;
#[cfg(feature = "dx12")]
pub extern crate gfx_backend_dx12 as back;
//TODO: make webgl work
#[cfg(any(feature = "gl", feature = "wgl"))]
pub extern crate gfx_backend_gl as back;
#[cfg(feature = "metal")]
pub extern crate gfx_backend_metal as back;
#[cfg(feature = "vulkan")]
pub extern crate gfx_backend_vulkan as back;
// #[cfg(not(any(
// feature = "vulkan",
// feature = "metal",
// feature = "dx11",
// feature = "dx12",
// )))]
// compile_error!("you should specify a backend api (vulkan, metal, dx11, dx12)");
#[cfg(not(any(
feature = "vulkan",
feature = "metal",
feature = "dx11",
feature = "gl",
// feature = "wgl", webgl does not work yet
feature = "dx12",
)))]
pub extern crate gfx_backend_empty as back;
|
fn century(year: u32) -> u32 {
return ((year - 1) / 100) + 1
}
|
macro_rules! fourcc_code {
($a:expr, $b:expr, $c:expr, $d:expr) => {
(($a as u32) |
(($b as u32) << 8) |
(($c as u32) << 16) |
(($d as u32) << 24)) as u32
};
}
#[derive(Debug)]
#[repr(u32)]
pub enum Format {
RGB888 = fourcc_code!('R', 'G', '2', '4'),
XRGB8888 = fourcc_code!('X', 'R', '2', '4'),
}
#[cfg(test)]
mod tests {
#[test]
fn test_format_enum() {
assert_eq!(super::Format::RGB888 as u32, 0x34324752);
}
}
|
#[derive(Debug)]
struct Person<'a> {
name: &'a str
}
fn make_person<'a>(name: &'a str) -> Person<'a> {
Person {name}
}
fn make_static_str() -> &'static str {
"hello"
}
fn make_ref_str<'a>() -> &'a str {
"hello"
}
fn modify_string(s: &mut String) {
s.push_str(" world");
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_make_static_str() {
assert_eq!("hello", make_static_str());
}
#[test]
fn test_make_ref_str() {
assert_eq!("hello", make_ref_str());
}
#[test]
fn test_modify_string() {
let mut s = String::from("hello");
modify_string(&mut s);
assert_eq!("hello world", s);
modify_string(&mut s);
assert_eq!("hello world world", s);
}
#[test]
fn test_make_person() {
let name = "unnamed";
let person = make_person(name);
assert_eq!("Person { name: \"unnamed\" }", format!("{:?}", person))
}
} |
#[doc = "Reader of register HWCFGR2"]
pub type R = crate::R<u32, super::HWCFGR2>;
#[doc = "Reader of field `CFG1`"]
pub type CFG1_R = crate::R<u8, u8>;
#[doc = "Reader of field `CFG2`"]
pub type CFG2_R = crate::R<u8, u8>;
impl R {
#[doc = "Bits 0:3 - CFG1"]
#[inline(always)]
pub fn cfg1(&self) -> CFG1_R {
CFG1_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bits 4:7 - CFG2"]
#[inline(always)]
pub fn cfg2(&self) -> CFG2_R {
CFG2_R::new(((self.bits >> 4) & 0x0f) as u8)
}
}
|
use std::error::Error;
use std::fs::read_to_string;
use std::path::PathBuf;
use structopt::StructOpt;
use crate::model::*;
use crate::PROJECT_TEMP_FILE;
mod common;
mod change_column;
mod change_tags;
#[derive(StructOpt, Debug)]
pub struct CommitMsg {
file: PathBuf,
}
pub fn commit_msg(args: CommitMsg) -> Result<(), Box<dyn Error>> {
if !PathBuf::from(PROJECT_TEMP_FILE).exists() { return Ok(()); }
let mut git_project = GitProject::open()?;
let message = read_to_string(&args.file)?;
change_column::change_column(&mut git_project, message.as_str())?;
change_tags::change_tags(&mut git_project, message.as_str())?;
git_project.save()
}
|
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
/// Objects that can be cast to another object in a saturating way.
///
/// = Remarks
///
/// This is used to cast between integer type so that `256.saturating_cast():u8 == 255`.
pub trait SaturatingCast<T> {
fn saturating_cast(self) -> T;
}
// truncations that always preserve the value
macro_rules! trnc_up {
($src:ty as $($dst:ty),+) => {
$(impl SaturatingCast<$dst> for $src {
fn saturating_cast(self) -> $dst {
self as $dst
}
})+
}
}
trnc_up!(u8 as u8, u16, i16, u32, i32, u64, i64, u128, i128, usize, isize);
trnc_up!(i8 as i8, i16, i32, i64, i128, isize);
trnc_up!(u16 as u16, u32, i32, u64, i64, u128, i128, usize, isize);
trnc_up!(i16 as i16, i32, i64, i128, isize);
trnc_up!(u32 as u32, u64, i64, u128, i128, usize);
trnc_up!(i32 as i32, i64, i128, isize);
trnc_up!(u64 as u64, u128, i128);
trnc_up!(i64 as i64, i128);
trnc_up!(u128 as u128);
trnc_up!(i128 as i128);
trnc_up!(usize as u64, u128, i128, usize);
trnc_up!(isize as i64, i128, isize);
#[cfg(target_pointer_width = "64")]
trnc_up!(u64 as usize);
#[cfg(target_pointer_width = "64")]
trnc_up!(i64 as isize);
#[cfg(target_pointer_width = "32")]
trnc_up!(usize as u32);
#[cfg(target_pointer_width = "32")]
trnc_up!(isize as i32);
// truncations of signed tyes to unsigned types of equal or larger width
macro_rules! trnc_s2u_up {
($src:ty as $($dst:ty),+) => {
$(impl SaturatingCast<$dst> for $src {
fn saturating_cast(self) -> $dst {
if self >= 0 {
self as $dst
} else {
0
}
}
})+
}
}
trnc_s2u_up!(i8 as u8, u16, u32, u64, u128, usize);
trnc_s2u_up!(i16 as u16, u32, u64, u128, usize);
trnc_s2u_up!(i32 as u32, u64, u128, usize);
trnc_s2u_up!(i64 as u64, u128);
trnc_s2u_up!(i128 as u128);
trnc_s2u_up!(isize as usize, u128);
#[cfg(target_pointer_width = "64")]
trnc_s2u_up!(i64 as usize);
#[cfg(target_pointer_width = "32")]
trnc_s2u_up!(isize as u32);
// truncation of signed types to types of smaller width
macro_rules! trnc_s_down {
($src:ty as $($dst:ident),+) => {
$(impl SaturatingCast<$dst> for $src {
fn saturating_cast(self) -> $dst {
if self > $dst::max_value() as $src {
$dst::max_value()
} else if self < $dst::min_value() as $src {
$dst::min_value()
} else {
self as $dst
}
}
})+
}
}
trnc_s_down!(i16 as u8, i8);
trnc_s_down!(i32 as u8, i8, u16, i16);
trnc_s_down!(i64 as u8, i8, u16, i16, u32, i32);
trnc_s_down!(i128 as u8, i8, u16, i16, u32, i32, i64, usize, isize);
trnc_s_down!(isize as u8, i8, u16, i16);
#[cfg(target_pointer_width = "64")]
trnc_s_down!(isize as u32, i32);
#[cfg(target_pointer_width = "32")]
trnc_s_down!(i64 as usize, isize);
// truncation of unsigned types to types of equal or smaller width
macro_rules! trnc_u_down {
($src:ty as $($dst:ident),+) => {
$(impl SaturatingCast<$dst> for $src {
fn saturating_cast(self) -> $dst {
if self > $dst::max_value() as $src {
$dst::max_value()
} else {
self as $dst
}
}
})+
}
}
trnc_u_down!(u16 as i8, u8, i16);
trnc_u_down!(u32 as i8, u8, i16, u16, i32);
trnc_u_down!(u64 as i8, u8, i16, u16, i32, u32, i64);
trnc_u_down!(u128 as i8, u8, i16, u16, i32, u32, i64, u64, i128, usize, isize);
trnc_u_down!(usize as i8, u8, i16, u16, i32);
#[cfg(target_pointer_width = "64")]
trnc_u_down!(usize as u32, i64);
#[cfg(target_pointer_width = "32")]
trnc_u_down!(u64 as isize, usize); |
use std::collections::{HashMap, HashSet, VecDeque};
use std::io;
use ckb_logger::{debug, warn};
use futures::{
prelude::*,
sync::mpsc::{channel, Receiver, Sender},
Async, Poll, Stream,
};
use p2p::{
bytes::Bytes,
context::{ProtocolContext, ProtocolContextMutRef},
multiaddr::Multiaddr,
traits::ServiceProtocol,
utils::{is_reachable, multiaddr_to_socketaddr},
SessionId,
};
use rand::seq::SliceRandom;
use tokio::timer::Interval;
use std::convert::TryFrom;
use std::time::{Duration, Instant};
const CHECK_INTERVAL: Duration = Duration::from_secs(3);
mod addr;
mod protocol;
mod substream;
pub use crate::{
addr::{AddrKnown, AddressManager, MisbehaveResult, Misbehavior, RawAddr},
protocol::{DiscoveryMessage, Node, Nodes},
substream::{Substream, SubstreamKey, SubstreamValue},
};
use crate::{addr::DEFAULT_MAX_KNOWN, substream::RemoteAddress};
pub struct DiscoveryProtocol<M> {
discovery: Option<Discovery<M>>,
discovery_handle: DiscoveryHandle,
discovery_senders: HashMap<SessionId, Sender<Vec<u8>>>,
}
impl<M: AddressManager> DiscoveryProtocol<M> {
pub fn new(discovery: Discovery<M>) -> DiscoveryProtocol<M> {
let discovery_handle = discovery.handle();
DiscoveryProtocol {
discovery: Some(discovery),
discovery_handle,
discovery_senders: HashMap::default(),
}
}
}
impl<M: AddressManager + Send + 'static> ServiceProtocol for DiscoveryProtocol<M> {
fn init(&mut self, context: &mut ProtocolContext) {
debug!("protocol [discovery({})]: init", context.proto_id);
let discovery_task = self
.discovery
.take()
.map(|discovery| {
debug!("Start discovery future_task");
discovery
.for_each(|()| Ok(()))
.map_err(|err| {
warn!("discovery stream error: {:?}", err);
})
.then(|_| {
debug!("End of discovery");
Ok(())
})
})
.unwrap();
if context.future_task(discovery_task).is_err() {
warn!("start discovery fail");
};
}
fn connected(&mut self, context: ProtocolContextMutRef, _: &str) {
let session = context.session;
debug!(
"protocol [discovery] open on session [{}], address: [{}], type: [{:?}]",
session.id, session.address, session.ty
);
let (sender, receiver) = channel(8);
self.discovery_senders.insert(session.id, sender);
let substream = Substream::new(context, receiver);
match self.discovery_handle.substream_sender.try_send(substream) {
Ok(_) => {
debug!("Send substream success");
}
Err(err) => {
// TODO: handle channel is full (wait for poll API?)
warn!("Send substream failed : {:?}", err);
}
}
}
fn disconnected(&mut self, context: ProtocolContextMutRef) {
self.discovery_senders.remove(&context.session.id);
debug!(
"protocol [discovery] close on session [{}]",
context.session.id
);
}
fn received(&mut self, context: ProtocolContextMutRef, data: Bytes) {
debug!("[received message]: length={}", data.len());
if let Some(ref mut sender) = self.discovery_senders.get_mut(&context.session.id) {
// TODO: handle channel is full (wait for poll API?)
if let Err(err) = sender.try_send(data.to_vec()) {
if err.is_full() {
warn!("channel is full");
} else if err.is_disconnected() {
warn!("channel is disconnected");
} else {
warn!("other channel error: {:?}", err);
}
}
}
}
}
pub struct Discovery<M> {
// Default: 5000
max_known: usize,
// Address Manager
addr_mgr: M,
// The Nodes not yet been yield
pending_nodes: VecDeque<(SubstreamKey, SessionId, Nodes)>,
// For manage those substreams
substreams: HashMap<SubstreamKey, SubstreamValue>,
// For add new substream to Discovery
substream_sender: Sender<Substream>,
// For add new substream to Discovery
substream_receiver: Receiver<Substream>,
dead_keys: HashSet<SubstreamKey>,
dynamic_query_cycle: Option<Duration>,
check_interval: Interval,
global_ip_only: bool,
}
#[derive(Clone)]
pub struct DiscoveryHandle {
pub substream_sender: Sender<Substream>,
}
impl<M: AddressManager> Discovery<M> {
/// Query cycle means checking and synchronizing the cycle time of the currently connected node, default is 24 hours
pub fn new(addr_mgr: M, query_cycle: Option<Duration>) -> Discovery<M> {
let (substream_sender, substream_receiver) = channel(8);
let check_interval = Interval::new_interval(CHECK_INTERVAL);
Discovery {
check_interval,
max_known: DEFAULT_MAX_KNOWN,
addr_mgr,
pending_nodes: VecDeque::default(),
substreams: HashMap::default(),
substream_sender,
substream_receiver,
dead_keys: HashSet::default(),
dynamic_query_cycle: query_cycle,
global_ip_only: true,
}
}
/// Turning off global ip only mode will allow any ip to be broadcast, default is true
pub fn global_ip_only(mut self, global_ip_only: bool) -> Self {
self.global_ip_only = global_ip_only;
self
}
pub fn addr_mgr(&self) -> &M {
&self.addr_mgr
}
pub fn handle(&self) -> DiscoveryHandle {
DiscoveryHandle {
substream_sender: self.substream_sender.clone(),
}
}
fn recv_substreams(&mut self) -> Result<(), io::Error> {
loop {
match self.substream_receiver.poll() {
Ok(Async::Ready(Some(substream))) => {
let key = substream.key();
debug!("Received a substream: key={:?}", key);
let value = SubstreamValue::new(
key.direction,
substream,
self.max_known,
self.dynamic_query_cycle,
);
self.substreams.insert(key, value);
}
Ok(Async::Ready(None)) => unreachable!(),
Ok(Async::NotReady) => {
debug!("Discovery.substream_receiver Async::NotReady");
break;
}
Err(err) => {
debug!("receive substream error: {:?}", err);
return Err(io::ErrorKind::Other.into());
}
}
}
Ok(())
}
fn check_interval(&mut self) {
loop {
match self.check_interval.poll() {
Ok(Async::Ready(Some(_))) => {}
Ok(Async::Ready(None)) => {
debug!("Discovery check_interval poll finished");
break;
}
Ok(Async::NotReady) => break,
Err(err) => {
debug!("Discovery check_interval poll error: {:?}", err);
break;
}
}
}
}
}
impl<M: AddressManager> Stream for Discovery<M> {
type Item = ();
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
debug!("Discovery.poll()");
self.recv_substreams()?;
self.check_interval();
let announce_fn =
|announce_multiaddrs: &mut Vec<Multiaddr>, global_ip_only: bool, addr: &Multiaddr| {
if !global_ip_only
|| multiaddr_to_socketaddr(addr)
.map(|addr| is_reachable(addr.ip()))
.unwrap_or_default()
{
announce_multiaddrs.push(addr.clone());
}
};
let mut announce_multiaddrs = Vec::new();
for (key, value) in self.substreams.iter_mut() {
value.check_timer();
match value.receive_messages(&mut self.addr_mgr) {
Ok(Some((session_id, nodes_list))) => {
for nodes in nodes_list {
self.pending_nodes
.push_back((key.clone(), session_id, nodes));
}
}
Ok(None) => {
// stream close
self.dead_keys.insert(key.clone());
}
Err(err) => {
debug!("substream {:?} receive messages error: {:?}", key, err);
// remove the substream
self.dead_keys.insert(key.clone());
}
}
match value.send_messages() {
Ok(_) => {}
Err(err) => {
debug!("substream {:?} send messages error: {:?}", key, err);
// remove the substream
self.dead_keys.insert(key.clone());
}
}
if value.announce {
if let RemoteAddress::Listen(ref addr) = value.remote_addr {
announce_fn(&mut announce_multiaddrs, self.global_ip_only, addr)
}
value.announce = false;
value.last_announce = Some(Instant::now());
}
}
let mut dead_addr = Vec::default();
for key in self.dead_keys.drain() {
if let Some(addr) = self.substreams.remove(&key) {
dead_addr.push(RawAddr::try_from(addr.remote_addr.into_inner()).unwrap());
}
}
if !dead_addr.is_empty() {
self.substreams
.values_mut()
.for_each(|value| value.addr_known.remove(dead_addr.iter()));
}
let mut rng = rand::thread_rng();
let mut remain_keys = self.substreams.keys().cloned().collect::<Vec<_>>();
debug!("announce_multiaddrs: {:?}", announce_multiaddrs);
for announce_multiaddr in announce_multiaddrs.into_iter() {
let announce_addr = RawAddr::try_from(announce_multiaddr.clone()).unwrap();
remain_keys.shuffle(&mut rng);
for i in 0..2 {
if let Some(key) = remain_keys.get(i) {
if let Some(value) = self.substreams.get_mut(key) {
debug!(
">> send {} to: {:?}, contains: {}",
announce_multiaddr,
value.remote_addr,
value.addr_known.contains(&announce_addr)
);
if value.announce_multiaddrs.len() < 10
&& !value.addr_known.contains(&announce_addr)
{
value.announce_multiaddrs.push(announce_multiaddr.clone());
value.addr_known.insert(announce_addr);
}
}
}
}
}
for (key, value) in self.substreams.iter_mut() {
let announce_multiaddrs = value.announce_multiaddrs.split_off(0);
if !announce_multiaddrs.is_empty() {
let items = announce_multiaddrs
.into_iter()
.map(|addr| Node {
addresses: vec![addr],
})
.collect::<Vec<_>>();
let nodes = Nodes {
announce: true,
items,
};
value
.pending_messages
.push_back(DiscoveryMessage::Nodes(nodes));
}
match value.send_messages() {
Ok(_) => {}
Err(err) => {
debug!("substream {:?} send messages error: {:?}", key, err);
// remove the substream
self.dead_keys.insert(key.clone());
}
}
}
match self.pending_nodes.pop_front() {
Some((_key, session_id, nodes)) => {
let addrs = nodes
.items
.into_iter()
.flat_map(|node| node.addresses.into_iter())
.collect::<Vec<_>>();
self.addr_mgr.add_new_addrs(session_id, addrs);
Ok(Async::Ready(Some(())))
}
None => Ok(Async::NotReady),
}
}
}
|
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: Apache-2.0
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! # Assets Module
//!
//! A simple, secure module for dealing with fungible assets.
//!
//! ## Overview
//!
//! The Assets module provides functionality for asset management of fungible asset classes
//! with a fixed supply, including:
//!
//! * Asset Issuance
//! * Asset Transfer
//! * Asset Destruction
//!
//! To use it in your runtime, you need to implement the assets [`Trait`](./trait.Trait.html).
//!
//! The supported dispatchable functions are documented in the [`Call`](./enum.Call.html) enum.
//!
//! ### Terminology
//!
//! * **Asset issuance:** The creation of a new asset, whose total supply will belong to the account
//! that issues the asset.
//! * **Asset transfer:** The action of transferring assets from one account to another.
//! * **Asset destruction:** The process of an account removing its entire holding of an asset.
//! * **Fungible asset:** An asset whose units are interchangeable.
//! * **Non-fungible asset:** An asset for which each unit has unique characteristics.
//!
//! ### Goals
//!
//! The assets system in Substrate is designed to make the following possible:
//!
//! * Issue a unique asset to its creator's account.
//! * Move assets between accounts.
//! * Remove an account's balance of an asset when requested by that account's owner and update the
//! asset's total supply.
//!
//! ## Interface
//!
//! ### Dispatchable Functions
//!
//! * `issue` - Issues the total supply of a new fungible asset to the account of the caller of the
//! function.
//! * `transfer` - Transfers an `amount` of units of fungible asset `id` from the balance of
//! the function caller's account (`origin`) to a `target` account.
//! * `destroy` - Destroys the entire holding of a fungible asset `id` associated with the account
//! that called the function.
//!
//! Please refer to the [`Call`](./enum.Call.html) enum and its associated variants for
//! documentation on each function.
//!
//! ### Public Functions
//! <!-- Original author of descriptions: @gavofyork -->
//!
//! * `balance` - Get the asset `id` balance of `who`.
//! * `total_supply` - Get the total supply of an asset `id`.
//!
//! Please refer to the [`Module`](./struct.Module.html) struct for details on publicly available
//! functions.
//!
//! ## Usage
//!
//! The following example shows how to use the Assets module in your runtime by exposing public
//! functions to:
//!
//! * Issue a new fungible asset for a token distribution event (airdrop).
//! * Query the fungible asset holding balance of an account.
//! * Query the total supply of a fungible asset that has been issued.
//!
//! ### Prerequisites
//!
//! Import the Assets module and types and derive your runtime's configuration traits from the
//! Assets module trait.
//!
//! ### Simple Code Snippet
//!
//! ```rust,ignore
//! use pallet_assets as assets;
//! use frame_support::{decl_module, dispatch, ensure};
//! use frame_system::ensure_signed;
//!
//! pub trait Trait: assets::Trait { }
//!
//! decl_module! {
//! pub struct Module<T: Trait> for enum Call where origin: T::Origin {
//! pub fn issue_token_airdrop(origin) -> dispatch::DispatchResult {
//! let sender = ensure_signed(origin).map_err(|e| e.as_str())?;
//!
//! const ACCOUNT_ALICE: u64 = 1;
//! const ACCOUNT_BOB: u64 = 2;
//! const COUNT_AIRDROP_RECIPIENTS: u64 = 2;
//! const TOKENS_FIXED_SUPPLY: u64 = 100;
//!
//! ensure!(!COUNT_AIRDROP_RECIPIENTS.is_zero(), "Divide by zero error.");
//!
//! let asset_id = Self::next_asset_id();
//!
//! <NextAssetId<T>>::mutate(|asset_id| *asset_id += 1);
//! <Balances<T>>::insert((asset_id, &ACCOUNT_ALICE), TOKENS_FIXED_SUPPLY / COUNT_AIRDROP_RECIPIENTS);
//! <Balances<T>>::insert((asset_id, &ACCOUNT_BOB), TOKENS_FIXED_SUPPLY / COUNT_AIRDROP_RECIPIENTS);
//! <TotalSupply<T>>::insert(asset_id, TOKENS_FIXED_SUPPLY);
//!
//! Self::deposit_event(RawEvent::Issued(asset_id, sender, TOKENS_FIXED_SUPPLY));
//! Ok(())
//! }
//! }
//! }
//! ```
//!
//! ## Assumptions
//!
//! Below are assumptions that must be held when using this module. If any of
//! them are violated, the behavior of this module is undefined.
//!
//! * The total count of assets should be less than `Trait::AssetId::max_value()`.
//!
//! ## Related Modules
//!
//! * [`System`](../frame_system/index.html)
//! * [`Support`](../frame_support/index.html)
// Ensure we're `no_std` when compiling for Wasm.
#![cfg_attr(not(feature = "std"), no_std)]
use frame_support::{decl_error, decl_event, decl_module, decl_storage, ensure, Parameter};
use frame_system::ensure_signed;
use sp_runtime::traits::One;
use sp_runtime::traits::{AtLeast32Bit, AtLeast32BitUnsigned, Member, StaticLookup, Zero};
/// The module configuration trait.
pub trait Trait: frame_system::Trait {
/// The overarching event type.
type Event: From<Event<Self>> + Into<<Self as frame_system::Trait>::Event>;
/// The units in which we record balances.
type Balance: Member + Parameter + AtLeast32BitUnsigned + Default + Copy;
/// The arithmetic type of asset identifier.
type AssetId: Parameter + AtLeast32Bit + Default + Copy;
}
decl_module! {
pub struct Module<T: Trait> for enum Call where origin: T::Origin {
type Error = Error<T>;
fn deposit_event() = default;
/// Issue a new class of fungible assets. There are, and will only ever be, `total`
/// such assets and they'll all belong to the `origin` initially. It will have an
/// identifier `AssetId` instance: this will be specified in the `Issued` event.
///
/// # <weight>
/// - `O(1)`
/// - 1 storage mutation (codec `O(1)`).
/// - 2 storage writes (condec `O(1)`).
/// - 1 event.
/// # </weight>
#[weight = 0]
fn issue(origin, #[compact] total: T::Balance) {
let origin = ensure_signed(origin)?;
let id = Self::next_asset_id();
<NextAssetId<T>>::mutate(|id| *id += One::one());
<Balances<T>>::insert((id, &origin), total);
<TotalSupply<T>>::insert(id, total);
Self::deposit_event(RawEvent::Issued(id, origin, total));
}
/// Move some assets from one holder to another.
///
/// # <weight>
/// - `O(1)`
/// - 1 static lookup
/// - 2 storage mutations (codec `O(1)`).
/// - 1 event.
/// # </weight>
#[weight = 0]
fn transfer(origin,
#[compact] id: T::AssetId,
target: <T::Lookup as StaticLookup>::Source,
#[compact] amount: T::Balance
) {
let origin = ensure_signed(origin)?;
let origin_account = (id, origin.clone());
let origin_balance = <Balances<T>>::get(&origin_account);
let target = T::Lookup::lookup(target)?;
ensure!(!amount.is_zero(), Error::<T>::AmountZero);
ensure!(origin_balance >= amount, Error::<T>::BalanceLow);
Self::deposit_event(RawEvent::Transferred(id, origin, target.clone(), amount));
<Balances<T>>::insert(origin_account, origin_balance - amount);
<Balances<T>>::mutate((id, target), |balance| *balance += amount);
}
/// Destroy any assets of `id` owned by `origin`.
///
/// # <weight>
/// - `O(1)`
/// - 1 storage mutation (codec `O(1)`).
/// - 1 storage deletion (codec `O(1)`).
/// - 1 event.
/// # </weight>
#[weight = 0]
fn destroy(origin, #[compact] id: T::AssetId) {
let origin = ensure_signed(origin)?;
let balance = <Balances<T>>::take((id, &origin));
ensure!(!balance.is_zero(), Error::<T>::BalanceZero);
<TotalSupply<T>>::mutate(id, |total_supply| *total_supply -= balance);
Self::deposit_event(RawEvent::Destroyed(id, origin, balance));
}
}
}
decl_event! {
pub enum Event<T> where
<T as frame_system::Trait>::AccountId,
<T as Trait>::Balance,
<T as Trait>::AssetId,
{
/// Some assets were issued. \[asset_id, owner, total_supply\]
Issued(AssetId, AccountId, Balance),
/// Some assets were transferred. \[asset_id, from, to, amount\]
Transferred(AssetId, AccountId, AccountId, Balance),
/// Some assets were destroyed. \[asset_id, owner, balance\]
Destroyed(AssetId, AccountId, Balance),
}
}
decl_error! {
pub enum Error for Module<T: Trait> {
/// Transfer amount should be non-zero
AmountZero,
/// Account balance must be greater than or equal to the transfer amount
BalanceLow,
/// Balance should be non-zero
BalanceZero,
}
}
decl_storage! {
trait Store for Module<T: Trait> as Assets {
/// The number of units of assets held by any given account.
Balances: map hasher(blake2_128_concat) (T::AssetId, T::AccountId) => T::Balance;
/// The next asset identifier up for grabs.
NextAssetId get(fn next_asset_id): T::AssetId;
/// The total unit supply of an asset.
///
/// TWOX-NOTE: `AssetId` is trusted, so this is safe.
TotalSupply: map hasher(twox_64_concat) T::AssetId => T::Balance;
}
}
// The main implementation block for the module.
impl<T: Trait> Module<T> {
// Public immutables
/// Get the asset `id` balance of `who`.
pub fn balance(id: T::AssetId, who: T::AccountId) -> T::Balance {
<Balances<T>>::get((id, who))
}
/// Get the total supply of an asset `id`.
pub fn total_supply(id: T::AssetId) -> T::Balance {
<TotalSupply<T>>::get(id)
}
}
#[cfg(test)]
mod tests {
use super::*;
use frame_support::{
assert_noop, assert_ok, impl_outer_origin, parameter_types, weights::Weight,
};
use sp_core::H256;
use sp_runtime::{
testing::Header,
traits::{BlakeTwo256, IdentityLookup},
Perbill,
};
impl_outer_origin! {
pub enum Origin for Test where system = frame_system {}
}
#[derive(Clone, Eq, PartialEq)]
pub struct Test;
parameter_types! {
pub const BlockHashCount: u64 = 250;
pub const MaximumBlockWeight: Weight = 1024;
pub const MaximumBlockLength: u32 = 2 * 1024;
pub const AvailableBlockRatio: Perbill = Perbill::one();
}
impl frame_system::Trait for Test {
type BaseCallFilter = ();
type Origin = Origin;
type Index = u64;
type Call = ();
type BlockNumber = u64;
type Hash = H256;
type Hashing = BlakeTwo256;
type AccountId = u64;
type Lookup = IdentityLookup<Self::AccountId>;
type Header = Header;
type Event = ();
type BlockHashCount = BlockHashCount;
type MaximumBlockWeight = MaximumBlockWeight;
type DbWeight = ();
type BlockExecutionWeight = ();
type ExtrinsicBaseWeight = ();
type MaximumExtrinsicWeight = MaximumBlockWeight;
type AvailableBlockRatio = AvailableBlockRatio;
type MaximumBlockLength = MaximumBlockLength;
type Version = ();
type PalletInfo = ();
type AccountData = ();
type OnNewAccount = ();
type OnKilledAccount = ();
type SystemWeightInfo = ();
}
impl Trait for Test {
type Event = ();
type Balance = u64;
type AssetId = u32;
}
type Assets = Module<Test>;
fn new_test_ext() -> sp_io::TestExternalities {
frame_system::GenesisConfig::default().build_storage::<Test>().unwrap().into()
}
#[test]
fn issuing_asset_units_to_issuer_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
});
}
#[test]
fn querying_total_supply_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
assert_ok!(Assets::transfer(Origin::signed(1), 0, 2, 50));
assert_eq!(Assets::balance(0, 1), 50);
assert_eq!(Assets::balance(0, 2), 50);
assert_ok!(Assets::transfer(Origin::signed(2), 0, 3, 31));
assert_eq!(Assets::balance(0, 1), 50);
assert_eq!(Assets::balance(0, 2), 19);
assert_eq!(Assets::balance(0, 3), 31);
assert_ok!(Assets::destroy(Origin::signed(3), 0));
assert_eq!(Assets::total_supply(0), 69);
});
}
#[test]
fn transferring_amount_above_available_balance_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
assert_ok!(Assets::transfer(Origin::signed(1), 0, 2, 50));
assert_eq!(Assets::balance(0, 1), 50);
assert_eq!(Assets::balance(0, 2), 50);
});
}
#[test]
fn transferring_amount_more_than_available_balance_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
assert_ok!(Assets::transfer(Origin::signed(1), 0, 2, 50));
assert_eq!(Assets::balance(0, 1), 50);
assert_eq!(Assets::balance(0, 2), 50);
assert_ok!(Assets::destroy(Origin::signed(1), 0));
assert_eq!(Assets::balance(0, 1), 0);
assert_noop!(Assets::transfer(Origin::signed(1), 0, 1, 50), Error::<Test>::BalanceLow);
});
}
#[test]
fn transferring_less_than_one_unit_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
assert_noop!(Assets::transfer(Origin::signed(1), 0, 2, 0), Error::<Test>::AmountZero);
});
}
#[test]
fn transferring_more_units_than_total_supply_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
assert_noop!(Assets::transfer(Origin::signed(1), 0, 2, 101), Error::<Test>::BalanceLow);
});
}
#[test]
fn destroying_asset_balance_with_positive_balance_should_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 1), 100);
assert_ok!(Assets::destroy(Origin::signed(1), 0));
});
}
#[test]
fn destroying_asset_balance_with_zero_balance_should_not_work() {
new_test_ext().execute_with(|| {
assert_ok!(Assets::issue(Origin::signed(1), 100));
assert_eq!(Assets::balance(0, 2), 0);
assert_noop!(Assets::destroy(Origin::signed(2), 0), Error::<Test>::BalanceZero);
});
}
}
|
pub mod clock;
pub mod pwm;
pub mod sercom;
pub mod timer;
pub mod trng;
#[cfg(feature="unproven")]
pub mod adc;
|
/**
* File format: (Version \n\0\0\1)
* Magic number:
* "BKTree: " + "0000\n"
* Checksum: "SHA256: " + hex sha-256 of the remainder of the file following this newline + "\n---\n"
* CBOR encoded header as a map:
* "Created-On": ISO-8601 timestamp
* "Node-Format": "8 bits distance, 8 bits child"
* "Node-Bytes": integer, node storage size
* "Node-Offset": integer, byte offset after the end of the header where nodes start
* Should be "0\n"
* "Node-Count": optional, integer, number of nodes
* "Key-Format": "fixed 64 bits" (future work: "variable length\n")
* "Key-Offset": integer, byte offset after header where keys start
* "Key-Bytes": integer, key storage size
* "Padding:": optional if lucky, '.' repeated (0 to 63 times) until the byte after the end
* of header marker is 64-byte aligned from the start of the file.
*
* Binary data: Offsets in the header start counting from here. The first byte of the node array.
* is at offset 0.
* * node array
* * 0 padding to next 64-byte-aligned position from the start of the file.
* * key array
*/
//use memmap::MmapOptions;
use memmap::Mmap;
use std::fs::File;
use std::io::Result as IOResult;
use std::io::{BufRead, BufReader};
use std::io::{Seek, SeekFrom};
//use std::error::Error;
use sha2::{Digest, Sha256};
use std::error;
use std::io;
fn open_mmap(filename: &str, offset: usize, length: usize) -> IOResult<Mmap> {
let file = File::open(filename)?;
// let mmap = unsafe { MmapOptions::new().map(&file)? };
let mmap = unsafe { Mmap::map(&file)? };
Ok(mmap)
}
trait TrimStart {
type Elt;
fn trim_start_matches(&self, val: Self::Elt) -> Self;
}
impl TrimStart for String {
type Elt = char;
fn trim_start_matches(&self, val: char) -> Self {
let mut chars = self.chars();
while let Some(c) = chars.next() {
if c != val {
return chars.collect::<String>();
}
}
return "".to_string();
}
}
impl TrimStart for Vec<u8> {
type Elt = u8;
fn trim_start_matches(&self, val: u8) -> Self {
let mut i = 0;
while self[i] == val {
i += 1;
}
return (self)[i..].to_vec();
}
}
#[derive(Debug, Default, Deserialize, Serialize)]
pub struct FileDescrHeader {
#[serde(rename = "Created-On")]
pub created_on: String,
#[serde(rename = "Node-Format")]
pub node_format: String,
#[serde(rename = "Node-Bytes")]
pub node_bytes: u64,
#[serde(rename = "Node-Offset")]
pub node_offset: u64,
#[serde(rename = "Node-Count")]
pub node_count: u64,
#[serde(rename = "Key-Format")]
pub key_format: String,
#[serde(rename = "Key-Offset")]
pub key_offset: u64,
#[serde(rename = "Key-Bytes")]
pub key_bytes: u64,
#[serde(rename = "Padding", default)]
padding: String,
}
impl FileDescrHeader {
pub fn encode(&mut self, offset: usize) -> Vec<u8> {
// Ensure 64 byte alignment
const ALIGNMENT: usize = 64;
self.padding = "".to_string();
let mut buffer = serde_cbor::to_vec(&self).unwrap();
let padding = ALIGNMENT - (offset + buffer.len() + 1) % ALIGNMENT;
self.padding = ".".repeat(padding);
buffer = serde_cbor::to_vec(&self).unwrap();
assert_eq!(0, (offset + buffer.len()) % ALIGNMENT);
return buffer;
}
}
#[derive(Debug, Default)]
pub struct Header {
version: Vec<u8>,
checksum: Vec<u8>,
descr: FileDescrHeader,
}
pub const MAGIC_VERSION: &'static str = "BKTREE: 0000";
pub const HASH_HEADER_NAME: &'static str = "SHA256";
pub const PREFIX_SIZE: usize = 86;
impl Header {
pub fn read(
file: &mut File,
verify_checksum: bool,
) -> Result<Header, Box<dyn error::Error + 'static>> {
let mut header: Header = Default::default();
let mut reader = BufReader::new(file);
// Check the magic number
reader.read_until('\n' as u8, &mut header.version)?;
if header.version != MAGIC_VERSION.as_bytes() {
return Err("Unknown file format (expected \"BKTREE: 0000\")".into());
}
// Read the checksum
let mut checksum_type: Vec<u8> = Vec::new();
reader.read_until(':' as u8, &mut checksum_type)?;
if checksum_type != HASH_HEADER_NAME.as_bytes() {
return Err("Unknown checksum format (expected \"SHA256\")".into());
}
let mut checksum: Vec<u8> = Vec::new();
reader.read_until('\n' as u8, &mut checksum);
header.checksum = checksum.trim_start_matches(' ' as u8);
let descr_start = reader.seek(SeekFrom::Current(0))?;
if verify_checksum {
let mut hasher = Sha256::new();
let n = io::copy(&mut reader, &mut hasher)?;
let found = format!("{:x}", hasher.result());
if found.as_bytes() != header.checksum.as_slice() {
return Err(format!(
"Checksum failure. Found {:?}, expected {:?}",
found, header.checksum
)
.into());
}
}
reader.seek(SeekFrom::Start(descr_start))?;
return Ok(header);
}
}
|
extern crate stream_cipher;
extern crate block_cipher_trait;
extern crate salsa20_family;
use block_cipher_trait::generic_array::GenericArray;
use salsa20_family::Salsa20;
use stream_cipher::NewStreamCipher;
use stream_cipher::StreamCipher;
use stream_cipher::SyncStreamCipherSeek;
#[cfg(test)]
const KEY_BYTES: usize = 32;
#[cfg(test)]
const IV_BYTES: usize = 8;
#[cfg(test)]
const KEY0: [u8; KEY_BYTES] =
[ 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00 ];
#[cfg(test)]
const KEY1: [u8; KEY_BYTES] =
[ 0x80, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00 ];
#[cfg(test)]
const KEY_LONG: [u8; KEY_BYTES] =
[ 1, 2, 3, 4,
5, 6, 7, 8,
9, 10, 11, 12,
13, 14, 15, 16,
17, 18, 19, 20,
21, 22, 23, 24,
25, 26, 27, 28,
29, 30, 31, 32 ];
#[cfg(test)]
const IV0: [u8; IV_BYTES] = [0; IV_BYTES];
#[cfg(test)]
const IV1: [u8; IV_BYTES] =
[ 0x80, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00 ];
#[cfg(test)]
const IVHI: [u8; IV_BYTES] =
[ 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01 ];
#[cfg(test)]
const IV_LONG: [u8; IV_BYTES] =
[ 3, 1, 4, 1,
5, 9, 2, 6 ];
#[cfg(test)]
const EXPECTED_KEY1_IV0: [u8; 64] = [
0xe3, 0xbe, 0x8f, 0xdd,
0x8b, 0xec, 0xa2, 0xe3,
0xea, 0x8e, 0xf9, 0x47,
0x5b, 0x29, 0xa6, 0xe7,
0x00, 0x39, 0x51, 0xe1,
0x09, 0x7a, 0x5c, 0x38,
0xd2, 0x3b, 0x7a, 0x5f,
0xad, 0x9f, 0x68, 0x44,
0xb2, 0x2c, 0x97, 0x55,
0x9e, 0x27, 0x23, 0xc7,
0xcb, 0xbd, 0x3f, 0xe4,
0xfc, 0x8d, 0x9a, 0x07,
0x44, 0x65, 0x2a, 0x83,
0xe7, 0x2a, 0x9c, 0x46,
0x18, 0x76, 0xaf, 0x4d,
0x7e, 0xf1, 0xa1, 0x17
];
#[cfg(test)]
const EXPECTED_KEY0_IV1: [u8; 64] = [
0x2a, 0xba, 0x3d, 0xc4,
0x5b, 0x49, 0x47, 0x00,
0x7b, 0x14, 0xc8, 0x51,
0xcd, 0x69, 0x44, 0x56,
0xb3, 0x03, 0xad, 0x59,
0xa4, 0x65, 0x66, 0x28,
0x03, 0x00, 0x67, 0x05,
0x67, 0x3d, 0x6c, 0x3e,
0x29, 0xf1, 0xd3, 0x51,
0x0d, 0xfc, 0x04, 0x05,
0x46, 0x3c, 0x03, 0x41,
0x4e, 0x0e, 0x07, 0xe3,
0x59, 0xf1, 0xf1, 0x81,
0x6c, 0x68, 0xb2, 0x43,
0x4a, 0x19, 0xd3, 0xee,
0xe0, 0x46, 0x48, 0x73
];
#[cfg(test)]
const EXPECTED_KEY0_IVHI: [u8; 64] = [
0xb4, 0x7f, 0x96, 0xaa,
0x96, 0x78, 0x61, 0x35,
0x29, 0x7a, 0x3c, 0x4e,
0xc5, 0x6a, 0x61, 0x3d,
0x0b, 0x80, 0x09, 0x53,
0x24, 0xff, 0x43, 0x23,
0x9d, 0x68, 0x4c, 0x57,
0xff, 0xe4, 0x2e, 0x1c,
0x44, 0xf3, 0xcc, 0x01,
0x16, 0x13, 0xdb, 0x6c,
0xdc, 0x88, 0x09, 0x99,
0xa1, 0xe6, 0x5a, 0xed,
0x12, 0x87, 0xfc, 0xb1,
0x1c, 0x83, 0x9c, 0x37,
0x12, 0x07, 0x65, 0xaf,
0xa7, 0x3e, 0x50, 0x75
];
#[cfg(test)]
const EXPECTED_LONG: [u8; 256] = [
0x6e, 0xbc, 0xbd, 0xbf,
0x76, 0xfc, 0xcc, 0x64,
0xab, 0x05, 0x54, 0x2b,
0xee, 0x8a, 0x67, 0xcb,
0xc2, 0x8f, 0xa2, 0xe1,
0x41, 0xfb, 0xef, 0xbb,
0x3a, 0x2f, 0x9b, 0x22,
0x19, 0x09, 0xc8, 0xd7,
0xd4, 0x29, 0x52, 0x58,
0xcb, 0x53, 0x97, 0x70,
0xdd, 0x24, 0xd7, 0xac,
0x34, 0x43, 0x76, 0x9f,
0xfa, 0x27, 0xa5, 0x0e,
0x60, 0x64, 0x42, 0x64,
0xdc, 0x8b, 0x6b, 0x61,
0x26, 0x83, 0x37, 0x2e,
0x08, 0x5d, 0x0a, 0x12,
0xbf, 0x24, 0x0b, 0x18,
0x9c, 0xe2, 0xb7, 0x82,
0x89, 0x86, 0x2b, 0x56,
0xfd, 0xc9, 0xfc, 0xff,
0xc3, 0x3b, 0xef, 0x93,
0x25, 0xa2, 0xe8, 0x1b,
0x98, 0xfb, 0x3f, 0xb9,
0xaa, 0x04, 0xcf, 0x43,
0x46, 0x15, 0xce, 0xff,
0xeb, 0x98, 0x5c, 0x1c,
0xb0, 0x8d, 0x84, 0x40,
0xe9, 0x0b, 0x1d, 0x56,
0xdd, 0xea, 0xea, 0x16,
0xd9, 0xe1, 0x5a, 0xff,
0xff, 0x1f, 0x69, 0x8c,
0x48, 0x3c, 0x7a, 0x46,
0x6a, 0xf1, 0xfe, 0x06,
0x25, 0x74, 0xad, 0xfd,
0x2b, 0x06, 0xa6, 0x2b,
0x4d, 0x98, 0x44, 0x07,
0x19, 0xea, 0x77, 0x63,
0x85, 0xc4, 0x70, 0x34,
0x9a, 0x7e, 0xd6, 0x96,
0x95, 0x83, 0x46, 0x3e,
0xd5, 0xd2, 0x6b, 0x8f,
0xef, 0xcc, 0xb2, 0x05,
0xda, 0x0f, 0x5b, 0xfa,
0x98, 0xc7, 0x78, 0x12,
0xfe, 0x75, 0x6b, 0x09,
0xea, 0xcc, 0x28, 0x2a,
0xa4, 0x2f, 0x4b, 0xaf,
0xa7, 0x96, 0x33, 0x18,
0x90, 0x46, 0xe2, 0xb2,
0x0f, 0x35, 0xb3, 0xe0,
0xe5, 0x4a, 0xa3, 0xb9,
0x29, 0xe2, 0x3c, 0x0f,
0x47, 0xdc, 0x7b, 0xcd,
0x4f, 0x92, 0x8b, 0x2a,
0x97, 0x64, 0xbe, 0x7d,
0x4b, 0x8a, 0x50, 0xf9,
0x80, 0xa5, 0x0b, 0x35,
0xad, 0x80, 0x87, 0x37,
0x5e, 0x0c, 0x55, 0x6e,
0xcb, 0xe6, 0xa7, 0x16,
0x1e, 0x86, 0x53, 0xce,
0x93, 0x91, 0xe1, 0xe6,
0x71, 0x0e, 0xd4, 0xf1
];
#[test]
fn salsa20_KEY1_IV0() {
let mut cipher = Salsa20::new(&GenericArray::from(KEY1),
&GenericArray::from(IV0));
let mut buf = [0; 64];
cipher.encrypt(&mut buf);
for i in 0 .. 64 {
assert_eq!(buf[i], EXPECTED_KEY1_IV0[i])
}
}
#[test]
fn salsa20_KEY0_IV1() {
let mut cipher = Salsa20::new(&GenericArray::from(KEY0),
&GenericArray::from(IV1));
let mut buf = [0; 64];
cipher.encrypt(&mut buf);
for i in 0 .. 64 {
assert_eq!(buf[i], EXPECTED_KEY0_IV1[i])
}
}
#[test]
fn salsa20_KEY0_IVHI() {
let mut cipher = Salsa20::new(&GenericArray::from(KEY0),
&GenericArray::from(IVHI));
let mut buf = [0; 64];
cipher.encrypt(&mut buf);
for i in 0 .. 64 {
assert_eq!(buf[i], EXPECTED_KEY0_IVHI[i])
}
}
#[test]
fn salsa20_LONG() {
let mut cipher = Salsa20::new(&GenericArray::from(KEY_LONG),
&GenericArray::from(IV_LONG));
let mut buf = [0; 256];
cipher.encrypt(&mut buf);
for i in 0 .. 256 {
assert_eq!(buf[i], EXPECTED_LONG[i])
}
}
#[test]
fn salsa20_offsets() {
for idx in 0 .. 256 {
for middle in idx .. 256 {
for last in middle .. 256 {
let mut cipher = Salsa20::new(&GenericArray::from(KEY_LONG),
&GenericArray::from(IV_LONG));
let mut buf = [0; 256];
cipher.seek(idx as u64);
cipher.encrypt(&mut buf[idx .. middle]);
cipher.encrypt(&mut buf[middle .. last]);
for k in idx .. last {
assert_eq!(buf[k], EXPECTED_LONG[k])
}
}
}
}
}
|
use std::net::SocketAddr;
use hydroflow::hydroflow_syntax;
use hydroflow::util::{UdpSink, UdpStream};
use crate::helpers::parse_command;
use crate::protocol::KVSMessage;
use crate::GraphType;
pub(crate) async fn run_client(
outbound: UdpSink,
inbound: UdpStream,
server_addr: SocketAddr,
graph: Option<GraphType>,
) {
println!("Client live!");
let mut hf = hydroflow_syntax! {
// set up channels
outbound_chan = dest_sink_serde(outbound);
inbound_chan = source_stream_serde(inbound) -> map(Result::unwrap) -> map(|(m, _a)| m) -> demux(|m, var_args!(resps, errs)| match m {
KVSMessage::Response {..} => resps.give(m),
_ => errs.give(m),
});
inbound_chan[errs] -> for_each(|m| println!("Received unexpected message type: {:?}", m));
// read in commands from stdin and forward to server
source_stdin()
-> filter_map(|line| parse_command(line.unwrap()))
-> map(|msg| { (msg, server_addr) })
-> outbound_chan;
// print inbound msgs
inbound_chan[resps] -> for_each(|m| println!("Got a Response: {:?}", m));
};
if let Some(graph) = graph {
let serde_graph = hf
.meta_graph()
.expect("No graph found, maybe failed to parse.");
match graph {
GraphType::Mermaid => {
println!("{}", serde_graph.to_mermaid());
}
GraphType::Dot => {
println!("{}", serde_graph.to_dot())
}
GraphType::Json => {
unimplemented!();
// println!("{}", serde_graph.to_json())
}
}
}
hf.run_async().await.unwrap();
}
|
#[macro_export]
macro_rules! not_nan {
( $l:expr ) => {
ordered_float::NotNan::new($l).unwrap()
};
}
|
#[doc = "Register `CSR17` reader"]
pub type R = crate::R<CSR17_SPEC>;
#[doc = "Register `CSR17` writer"]
pub type W = crate::W<CSR17_SPEC>;
#[doc = "Field `CSR17` reader - CSR17"]
pub type CSR17_R = crate::FieldReader<u32>;
#[doc = "Field `CSR17` writer - CSR17"]
pub type CSR17_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 32, O, u32>;
impl R {
#[doc = "Bits 0:31 - CSR17"]
#[inline(always)]
pub fn csr17(&self) -> CSR17_R {
CSR17_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31 - CSR17"]
#[inline(always)]
#[must_use]
pub fn csr17(&mut self) -> CSR17_W<CSR17_SPEC, 0> {
CSR17_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "context swap registers\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`csr17::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`csr17::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CSR17_SPEC;
impl crate::RegisterSpec for CSR17_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`csr17::R`](R) reader structure"]
impl crate::Readable for CSR17_SPEC {}
#[doc = "`write(|w| ..)` method takes [`csr17::W`](W) writer structure"]
impl crate::Writable for CSR17_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CSR17 to value 0"]
impl crate::Resettable for CSR17_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use crate::{
gui::{BuildContext, UiMessage, UiNode},
scene::{EditorScene, Selection},
send_sync_message,
sidebar::{
base::BaseSection, camera::CameraSection, decal::DecalSection, light::LightSection,
lod::LodGroupEditor, mesh::MeshSection, particle::ParticleSystemSection,
physics::PhysicsSection, sound::SoundSection, sprite::SpriteSection,
terrain::TerrainSection,
},
GameEngine, Message,
};
use rg3d::{
core::{color::Color, pool::Handle, scope_profile},
gui::{
border::BorderBuilder,
brush::Brush,
check_box::CheckBoxBuilder,
color::ColorFieldBuilder,
expander::ExpanderBuilder,
message::{MessageDirection, WidgetMessage},
numeric::NumericUpDownBuilder,
scroll_viewer::ScrollViewerBuilder,
stack_panel::StackPanelBuilder,
text::TextBuilder,
vec::vec3::Vec3EditorBuilder,
widget::WidgetBuilder,
window::{WindowBuilder, WindowTitle},
HorizontalAlignment, Thickness, VerticalAlignment,
},
};
use std::sync::mpsc::Sender;
mod base;
mod camera;
mod decal;
mod light;
mod lod;
mod mesh;
mod particle;
mod physics;
mod sound;
mod sprite;
mod terrain;
const ROW_HEIGHT: f32 = 25.0;
const COLUMN_WIDTH: f32 = 140.0;
pub struct SideBar {
pub window: Handle<UiNode>,
scroll_viewer: Handle<UiNode>,
base_section: BaseSection,
lod_editor: LodGroupEditor,
sender: Sender<Message>,
light_section: LightSection,
camera_section: CameraSection,
particle_system_section: ParticleSystemSection,
sprite_section: SpriteSection,
mesh_section: MeshSection,
physics_section: PhysicsSection,
sound_section: SoundSection,
decal_section: DecalSection,
pub terrain_section: TerrainSection,
}
fn make_text_mark(ctx: &mut BuildContext, text: &str, row: usize) -> Handle<UiNode> {
TextBuilder::new(
WidgetBuilder::new()
.with_vertical_alignment(VerticalAlignment::Center)
.with_margin(Thickness::left(4.0))
.on_row(row)
.on_column(0),
)
.with_text(text)
.build(ctx)
}
fn make_section(name: &str, content: Handle<UiNode>, ctx: &mut BuildContext) -> Handle<UiNode> {
BorderBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0))
.with_child(
ExpanderBuilder::new(WidgetBuilder::new())
.with_header(
TextBuilder::new(WidgetBuilder::new().with_margin(Thickness::left(3.0)))
.with_vertical_text_alignment(VerticalAlignment::Center)
.with_text(name)
.build(ctx),
)
.with_content(content)
.build(ctx),
)
.with_foreground(Brush::Solid(Color::opaque(130, 130, 130))),
)
.build(ctx)
}
fn make_vec3_input_field(ctx: &mut BuildContext, row: usize) -> Handle<UiNode> {
Vec3EditorBuilder::new(
WidgetBuilder::new()
.with_margin(Thickness::uniform(1.0))
.on_row(row)
.on_column(1),
)
.build(ctx)
}
fn make_f32_input_field(
ctx: &mut BuildContext,
row: usize,
min: f32,
max: f32,
step: f32,
) -> Handle<UiNode> {
NumericUpDownBuilder::new(
WidgetBuilder::new()
.with_height(ROW_HEIGHT)
.on_row(row)
.with_margin(Thickness::uniform(1.0))
.on_column(1),
)
.with_min_value(min)
.with_max_value(max)
.with_step(step)
.build(ctx)
}
fn make_int_input_field(
ctx: &mut BuildContext,
row: usize,
min: i32,
max: i32,
step: i32,
) -> Handle<UiNode> {
NumericUpDownBuilder::new(
WidgetBuilder::new()
.on_row(row)
.with_margin(Thickness::uniform(1.0))
.on_column(1),
)
.with_min_value(min as f32)
.with_max_value(max as f32)
.with_step(step as f32)
.with_precision(0)
.build(ctx)
}
fn make_color_input_field(ctx: &mut BuildContext, row: usize) -> Handle<UiNode> {
ColorFieldBuilder::new(
WidgetBuilder::new()
.on_row(row)
.with_margin(Thickness::uniform(1.0))
.on_column(1),
)
.build(ctx)
}
fn make_bool_input_field(ctx: &mut BuildContext, row: usize) -> Handle<UiNode> {
CheckBoxBuilder::new(
WidgetBuilder::new()
.with_horizontal_alignment(HorizontalAlignment::Left)
.on_row(row)
.with_margin(Thickness::uniform(1.0))
.on_column(1),
)
.build(ctx)
}
impl SideBar {
pub fn new(ctx: &mut BuildContext, sender: Sender<Message>) -> Self {
let scroll_viewer;
let base_section = BaseSection::new(ctx);
let lod_editor = LodGroupEditor::new(ctx, sender.clone());
let light_section = LightSection::new(ctx, sender.clone());
let camera_section = CameraSection::new(ctx, sender.clone());
let particle_system_section = ParticleSystemSection::new(ctx, sender.clone());
let sprite_section = SpriteSection::new(ctx, sender.clone());
let mesh_section = MeshSection::new(ctx, sender.clone());
let physics_section = PhysicsSection::new(ctx, sender.clone());
let terrain_section = TerrainSection::new(ctx);
let sound_section = SoundSection::new(ctx);
let decal_section = DecalSection::new(ctx);
let window = WindowBuilder::new(WidgetBuilder::new())
.can_minimize(false)
.with_content({
scroll_viewer =
ScrollViewerBuilder::new(WidgetBuilder::new().with_visibility(false))
.with_content(
StackPanelBuilder::new(WidgetBuilder::new().with_children([
base_section.section,
light_section.section,
camera_section.section,
particle_system_section.section,
sprite_section.section,
mesh_section.section,
terrain_section.section,
physics_section.section,
sound_section.section,
decal_section.section,
]))
.build(ctx),
)
.build(ctx);
scroll_viewer
})
.with_title(WindowTitle::text("Properties"))
.build(ctx);
Self {
scroll_viewer,
window,
base_section,
sender,
lod_editor,
light_section,
camera_section,
particle_system_section,
sprite_section,
mesh_section,
physics_section,
terrain_section,
sound_section,
decal_section,
}
}
pub fn sync_to_model(&mut self, editor_scene: &EditorScene, engine: &mut GameEngine) {
scope_profile!();
send_sync_message(
&engine.user_interface,
WidgetMessage::visibility(
self.scroll_viewer,
MessageDirection::ToWidget,
editor_scene.selection.is_single_selection(),
),
);
let scene = &engine.scenes[editor_scene.scene];
let ui = &engine.user_interface;
match &editor_scene.selection {
Selection::Graph(selection) => {
if selection.is_single_selection() {
let node_handle = selection.nodes()[0];
if scene.graph.is_valid_handle(node_handle) {
let node = &scene.graph[node_handle];
let ui = &mut engine.user_interface;
send_sync_message(
ui,
WidgetMessage::visibility(
self.base_section.section,
MessageDirection::ToWidget,
true,
),
);
send_sync_message(
ui,
WidgetMessage::visibility(
self.sound_section.section,
MessageDirection::ToWidget,
false,
),
);
self.base_section.sync_to_model(node, ui);
self.lod_editor.sync_to_model(node, scene, ui);
self.light_section.sync_to_model(node, ui);
self.camera_section.sync_to_model(node, ui);
self.particle_system_section.sync_to_model(node, ui);
self.sprite_section.sync_to_model(node, ui);
self.decal_section.sync_to_model(node, ui);
self.mesh_section.sync_to_model(node, ui);
self.terrain_section.sync_to_model(node, ui);
self.physics_section.sync_to_model(editor_scene, engine);
}
}
}
Selection::Sound(selection) => {
for §ion in &[
self.base_section.section,
self.sprite_section.section,
self.decal_section.section,
self.light_section.section,
self.camera_section.section,
self.particle_system_section.section,
self.mesh_section.section,
self.terrain_section.section,
self.physics_section.section,
] {
send_sync_message(
ui,
WidgetMessage::visibility(section, MessageDirection::ToWidget, false),
);
}
send_sync_message(
ui,
WidgetMessage::visibility(
self.sound_section.section,
MessageDirection::ToWidget,
true,
),
);
if selection.is_single_selection() {
if let Some(first) = selection.first() {
let state = scene.sound_context.state();
if state.is_valid_handle(first) {
self.sound_section
.sync_to_model(state.source(first), &mut engine.user_interface);
}
}
}
}
_ => {}
}
}
pub fn handle_ui_message(
&mut self,
message: &UiMessage,
editor_scene: &EditorScene,
engine: &mut GameEngine,
) {
scope_profile!();
match &editor_scene.selection {
Selection::Graph(selection) => {
if selection.is_single_selection() {
self.physics_section
.handle_ui_message(message, editor_scene, engine);
let scene = &mut engine.scenes[editor_scene.scene];
let graph = &mut scene.graph;
let node_handle = selection.nodes()[0];
let node = &mut graph[node_handle];
if message.direction() == MessageDirection::FromWidget {
self.light_section
.handle_ui_message(message, node, node_handle);
self.camera_section.handle_ui_message(
message,
node,
node_handle,
&engine.user_interface,
engine.resource_manager.clone(),
);
self.particle_system_section.handle_ui_message(
message,
node,
node_handle,
&engine.user_interface,
);
self.sprite_section
.handle_ui_message(message, node, node_handle);
self.decal_section.handle_ui_message(
message,
&mut engine.user_interface,
engine.resource_manager.clone(),
node_handle,
&self.sender,
);
self.mesh_section
.handle_ui_message(message, node, node_handle);
self.base_section.handle_ui_message(
message,
&self.sender,
node,
node_handle,
&mut engine.user_interface,
&mut self.lod_editor,
);
self.terrain_section.handle_ui_message(
message,
&mut engine.user_interface,
graph,
node_handle,
&self.sender,
);
self.lod_editor.handle_ui_message(
message,
node_handle,
scene,
&mut engine.user_interface,
);
}
}
}
Selection::Sound(selection) => {
if selection.is_single_selection() {
if let Some(first) = selection.first() {
let scene = &mut engine.scenes[editor_scene.scene];
let state = scene.sound_context.state();
if state.is_valid_handle(first) {
self.sound_section.handle_message(
message,
&self.sender,
state.source(first),
first,
&engine.user_interface,
engine.resource_manager.clone(),
);
}
}
}
}
_ => {}
}
}
}
|
pub mod svg_movie;
|
//! This example shows how to add an IP address to the given link, with minimal error handling.
//! You need to be root run this example.
use std::env;
use std::thread::spawn;
use futures::{Future, Stream};
use ipnetwork::IpNetwork;
use tokio_core::reactor::Core;
use netlink_packet_route::link::nlas::LinkNla;
use rtnetlink::{new_connection, ErrorKind};
fn main() {
// Parse the arguments
let args: Vec<String> = env::args().collect();
if args.len() != 3 {
return usage();
}
let link_name = &args[1];
let ip: IpNetwork = args[2].parse().unwrap_or_else(|_| {
eprintln!("invalid address");
std::process::exit(1);
});
// Create a netlink connection, and a handle to send requests via this connection
let (connection, handle) = new_connection().unwrap();
// Spawn the connection on the event loop
spawn(move || Core::new().unwrap().run(connection));
handle
// The the "link" handle
.link()
// Create a "get" request from the link handle. We could tweak the request here, before
// calling "execute()"
.get()
// Turn the request into a runnable future
.execute()
// The future is a stream of link message. We are interested only in a specific link, so we
// filter out the other message.
.filter(|link_msg| {
for nla in &link_msg.nlas {
if let LinkNla::IfName(ref name) = nla {
return name == link_name;
}
}
false
})
.take(1)
.for_each(|link_msg| {
handle
// Get an "address" handle
.address()
// Create an "add" request
.add(link_msg.header.index, ip.ip(), ip.prefix())
// Turn the request into a future
.execute()
.and_then(|_| {
println!("done");
Ok(())
})
.or_else(|e| match e.kind() {
// We handle permission denied errors gracefully
ErrorKind::NetlinkError(ref err_msg) if err_msg.code == -1 => {
eprintln!("permission denied!");
Ok(())
}
// but just propagate any other error
_ => Err(e),
})
})
.wait()
.unwrap();
}
fn usage() {
eprintln!(
"usage:
cargo run --example add_address -- <link_name> <ip_address>
Note that you need to run this program as root. Instead of running cargo as root,
build the example normally:
cd rtnetlink ; cargo build --example add_address
Then find the binary in the target directory:
cd ../target/debug/example ; sudo ./add_address <link_name> <ip_address>"
);
}
|
pub mod gameobjects;
pub mod components;
pub mod rendering; |
use serde::{Serialize, Deserialize};
use super::location::Location;
use chrono::{DateTime, Local};
/// returned by the mvg api
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ConnectionList{
pub connection_list: Vec<Connection>
}
/// Desciption of one time-dependant connection
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Connection{
zoom_notice_to: bool,
zoom_notice_from: bool,
from: Location,
to: Location,
departure: u64,
arrival: u64,
connection_part_list: Vec<ConnectionPart>,
efa_ticket_ids: Vec<String>,
server_id: u64,
ring_from: u8,
ring_to: u8,
old_tarif: bool,
banner_hash: String
}
/// Transportation from one to another location by one product of public traffic
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Transportation{
stops: Vec<Stop>,
from: Location,
to: Location,
path: Vec<Location>,
path_description: Vec<PathDescriptor>,
interchange_path: Vec<Location>,
departure: u64,
arrival: u64,
delay: i32,
arr_delay: i32,
cancelled: bool,
product: Product,
label: String,
server_id: String,
destination: String,
sev: bool,
zoom_notice_departure: bool,
zoom_notice_arrival: bool,
departure_platform: String,
departure_stop_position_number: u8,
arrival_platform: String,
arrival_stop_position_number: u8,
no_changing_required: bool,
from_id: String,
departure_id: String,
info_messages: Option<Vec<String>>
}
/// A stop during a transportation
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Stop{
location: Location,
time: u64,
delay: i32,
arr_delay: i32,
}
/// Part of a connection which has to be walked
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Footway{
from: Location,
to: Location,
path: Vec<Location>,
path_description: Vec<PathDescriptor>,
interchange_path: Vec<()>,
departure: u64,
arrival: u64,
cancelled: bool,
zoom_notice_departure: bool,
zoom_notice_arrival: bool,
departure_stop_position_number: u8,
arrival_stop_position_number: u8,
no_changing_required: bool,
}
/// Representing one part of a connection.
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "connectionPartType")]
#[serde(rename_all = "UPPERCASE")]
pub enum ConnectionPart{
Transportation(Transportation),
Footway(Footway)
}
#[derive(Serialize, Deserialize, Debug)]
struct PathDescriptor{
from: u8,
to: u8,
level: i8
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "UPPERCASE")]
pub enum Product{
SBahn,
UBahn,
Bus,
Bahn,
Tram
}
impl Connection{
/// the starting location
pub fn from(&self) -> &Location{
&self.from
}
/// the destination
pub fn to(&self) -> &Location{
&self.to
}
/// start time
pub fn departure_time(&self) -> DateTime<Local> {
let time =
std::time::UNIX_EPOCH + std::time::Duration::from_millis(self.departure as u64);
let time = DateTime::<Local>::from(time);
time
}
/// end time
pub fn arrival_time(&self) -> DateTime<Local> {
let time =
std::time::UNIX_EPOCH + std::time::Duration::from_millis(self.arrival as u64);
let time = DateTime::<Local>::from(time);
time
}
/// list of different connection parts
pub fn connection_parts(&self) -> &Vec<ConnectionPart>{
&self.connection_part_list
}
}
impl Transportation{
/// starting location
pub fn from(&self) -> &Location{
&self.from
}
/// destination
pub fn to(&self) -> &Location{
&self.to
}
/// transporting product (e.g. UBAHN, SBAHN)
pub fn product(&self) -> &Product{
&self.product
}
/// label of transporting product (e.g. U6, S7)
pub fn label(&self) -> &String{
&self.label
}
} |
use crate::data::channel;
use std::convert::TryFrom;
use std::fmt::{self, Display};
use std::str::FromStr;
/// A wildcard Channel specifier.
///
/// This type represents a value suitable for use with [wildcard subscribe]
/// feature.
///
/// Currently you can have up to three levels deep with your channel segment
/// hierarchies, `a.b.c`, for example.
///
/// Developers MUST use .* to denote a wildcard subscription. Just having a * at
/// the end of channel names will NOT get translated to .* and will result in a
/// subscribe error.
///
/// [wildcard subscribe]: https://support.pubnub.com/support/solutions/folders/14000109563
#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)]
pub struct WildcardSpec(String);
impl WildcardSpec {
// See https://support.pubnub.com/support/solutions/articles/14000043664-how-many-channel-segments-are-supported-with-wildcard-subscribe-
fn is_valid(s: &str) -> bool {
if s.starts_with('.') {
// Cannot start with the dot.
return false;
}
// A simple, manually implemented, state machine to check the validity.
{
let mut was_dot = false;
let mut was_asterisk = false;
let mut dots_count = 0;
for c in s.chars() {
if was_asterisk {
// Asterisk must be last.
return false;
}
was_asterisk = false;
if was_dot {
if c == '*' {
was_asterisk = true;
}
} else if c == '*' {
// Asterisk must be prepended by a dot, but this one wasn't.
return false;
}
let is_dot = c == '.';
if is_dot {
dots_count += 1;
if dots_count > 2 {
// There must be at most three segments.
return false;
}
}
was_dot = is_dot;
}
if was_dot {
// Dot must be followed by asterisk if it's at the end.
return false;
}
}
true
}
/// Create a new [`WildcardSpec`] skipping the validity check.
#[must_use]
pub fn from_string_unchecked(s: String) -> Self {
Self(s)
}
}
impl TryFrom<String> for WildcardSpec {
type Error = String;
fn try_from(value: String) -> Result<Self, Self::Error> {
if !Self::is_valid(&value) {
return Err(value);
}
Ok(Self(value))
}
}
impl FromStr for WildcardSpec {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !Self::is_valid(s) {
return Err(());
}
Ok(Self(s.to_owned()))
}
}
impl AsRef<String> for WildcardSpec {
fn as_ref(&self) -> &String {
&self.0
}
}
impl AsRef<str> for WildcardSpec {
fn as_ref(&self) -> &str {
self.0.as_str()
}
}
impl Display for WildcardSpec {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl From<channel::Name> for WildcardSpec {
fn from(name: channel::Name) -> Self {
// Name is guaranteed to be valid from the wildcard spec perspective.
Self::from_string_unchecked(name.into())
}
}
#[cfg(test)]
mod tests {
use super::WildcardSpec;
fn is_valid(s: &str) -> bool {
WildcardSpec::is_valid(s)
}
#[test]
fn valid() {
assert_eq!(is_valid("stocks.*"), true); // from https://support.pubnub.com/support/solutions/articles/14000043663-how-do-i-subscribe-to-a-wildcard-channel-
assert_eq!(is_valid(""), true);
}
#[test]
fn valid_from_docs() {
// From https://support.pubnub.com/support/solutions/articles/14000043664-how-many-channel-segments-are-supported-with-wildcard-subscribe-
assert_eq!(is_valid("a.*"), true);
assert_eq!(is_valid("a.b.*"), true);
assert_eq!(is_valid("a.b"), true);
assert_eq!(is_valid("a.b.c"), true);
// Technically speaking, the last two examples are just single channels
// without any wildcards, but you can subscribe to any of the above
// forms.
}
#[test]
fn invalid_incorrect_from_docs() {
// From https://support.pubnub.com/support/solutions/articles/14000043664-how-many-channel-segments-are-supported-with-wildcard-subscribe-
assert_eq!(is_valid("*"), false); // can not wildcard at the top level to subscribe to all channels
assert_eq!(is_valid(".*"), false); // can not start with a .
assert_eq!(is_valid("a.*.b"), false); // * must be at the end
assert_eq!(is_valid("a."), false); // the . must be followed by a * when it is at the end of the name
assert_eq!(is_valid("a*"), false); // * must always be preceded with a .
assert_eq!(is_valid("a*b"), false); // * must always be preceded with a . and .* must always be at the end
// NOTE: The above invalid channel names will actually succeed if you
// attempt to subscribe to them. They will even succeed when you publish
// to them. But they will operate as literal channel names and not as
// wildcard channels. So it is highly recommended that you do not use
// the invalid forms so as not to confuse the intent of the channel
// names with wildcard behaviors.
}
#[test]
fn invalid_faulty_from_docs() {
// From https://support.pubnub.com/support/solutions/articles/14000043664-how-many-channel-segments-are-supported-with-wildcard-subscribe-
// As stated above, there are valid wildcard channel names and invalid
// wildcard channel names and even the invalid channel names can be
// successfully subscribed to without error and published to and
// subscribes to those invalid channel names will succeed. But it only
// works as a single, literal channel name and with wildcard behaviors.
// The one exception to this rule is channels with more than two .
//characters. If you attempt to subscribe to a channel with more than
// two . characters (more than three segments) it will succeed, but
// you will not be able to publish to those channels.
assert_eq!(is_valid("a.b.c.d"), false); // too many segments
assert_eq!(is_valid("a.b.c.*"), false); // too many segments
// If you do attempt to publish to channel names with more than three
// segments (three or more . delimiters), then you will receive a 400
// INVALID error response.
}
}
|
//! This file contains the types necessary to collect various types of stats.
use crate::loom::sync::atomic::{AtomicU64, Ordering::Relaxed};
/// This type contains methods to retrieve stats from a Tokio runtime.
#[derive(Debug)]
pub struct RuntimeStats {
workers: Box<[WorkerStats]>,
}
/// This type contains methods to retrieve stats from a worker thread on a Tokio runtime.
#[derive(Debug)]
#[repr(align(128))]
pub struct WorkerStats {
park_count: AtomicU64,
steal_count: AtomicU64,
poll_count: AtomicU64,
}
impl RuntimeStats {
pub(crate) fn new(worker_threads: usize) -> Self {
let mut workers = Vec::with_capacity(worker_threads);
for _ in 0..worker_threads {
workers.push(WorkerStats {
park_count: AtomicU64::new(0),
steal_count: AtomicU64::new(0),
poll_count: AtomicU64::new(0),
});
}
Self {
workers: workers.into_boxed_slice(),
}
}
/// Returns a slice containing the worker stats for each worker thread.
pub fn workers(&self) -> impl Iterator<Item = &WorkerStats> {
self.workers.iter()
}
}
impl WorkerStats {
/// Returns the total number of times this worker thread has parked.
pub fn park_count(&self) -> u64 {
self.park_count.load(Relaxed)
}
/// Returns the number of tasks this worker has stolen from other worker
/// threads.
pub fn steal_count(&self) -> u64 {
self.steal_count.load(Relaxed)
}
/// Returns the number of times this worker has polled a task.
pub fn poll_count(&self) -> u64 {
self.poll_count.load(Relaxed)
}
}
pub(crate) struct WorkerStatsBatcher {
my_index: usize,
park_count: u64,
steal_count: u64,
poll_count: u64,
}
impl WorkerStatsBatcher {
pub(crate) fn new(my_index: usize) -> Self {
Self {
my_index,
park_count: 0,
steal_count: 0,
poll_count: 0,
}
}
pub(crate) fn submit(&mut self, to: &RuntimeStats) {
let worker = &to.workers[self.my_index];
worker.park_count.store(self.park_count, Relaxed);
worker.steal_count.store(self.steal_count, Relaxed);
worker.poll_count.store(self.poll_count, Relaxed);
}
pub(crate) fn about_to_park(&mut self) {
self.park_count += 1;
}
pub(crate) fn returned_from_park(&mut self) {}
#[cfg(feature = "rt-multi-thread")]
pub(crate) fn incr_steal_count(&mut self, by: u16) {
self.steal_count += u64::from(by);
}
pub(crate) fn incr_poll_count(&mut self) {
self.poll_count += 1;
}
}
|
use super::error::Error;
use super::header::Header;
use super::types;
use num_traits::FromPrimitive;
use std::io::Read;
#[derive(Default, Debug, Clone)]
pub struct SegmentHeader {
pub phtype: types::SegmentType,
pub flags: types::SegmentFlags,
pub offset: u64,
pub vaddr: u64,
pub paddr: u64,
pub filesz: u64,
pub memsz: u64,
pub align: u64,
}
impl SegmentHeader {
pub fn from_reader<R>(io: &mut R, eh: &Header) -> Result<SegmentHeader, Error>
where
R: Read,
{
let mut r = SegmentHeader::default();
let reb = elf_read_u32!(eh, io)?;
r.phtype = match types::SegmentType::from_u32(reb) {
Some(v) => v,
None => return Err(Error::InvalidSegmentType(reb)),
};
match eh.ident_class {
types::Class::Class64 => {
r.flags =
types::SegmentFlags::from_bits_truncate(u64::from(elf_read_u32!(eh, io)?));
r.offset = elf_read_u64!(eh, io)?;
r.vaddr = elf_read_u64!(eh, io)?;
r.paddr = elf_read_u64!(eh, io)?;
r.filesz = elf_read_u64!(eh, io)?;
r.memsz = elf_read_u64!(eh, io)?;
r.align = elf_read_u64!(eh, io)?;
}
types::Class::Class32 => {
r.offset = u64::from(elf_read_u32!(eh, io)?);
r.vaddr = u64::from(elf_read_u32!(eh, io)?);
r.paddr = u64::from(elf_read_u32!(eh, io)?);
r.filesz = u64::from(elf_read_u32!(eh, io)?);
r.memsz = u64::from(elf_read_u32!(eh, io)?);
r.flags =
types::SegmentFlags::from_bits_truncate(u64::from(elf_read_u32!(eh, io)?));
r.align = u64::from(elf_read_u32!(eh, io)?);
}
};
Ok(r)
}
}
|
use memlib::logger::MinimalLogger;
use memlib::memory;
use log::*;
use std::error::Error;
mod config;
mod hacks;
mod sdk;
pub const PROCESS_NAME: &str = "csgo.exe";
pub const CHEAT_TICKRATE: u64 = 1;
const LOG_LEVEL: LevelFilter = LevelFilter::Debug;
fn run() -> Result<(), Box<dyn Error>> {
MinimalLogger::init(LOG_LEVEL)?;
let handle = memory::Handle::new(PROCESS_NAME)?;
let ctx = sdk::GameContext::new(handle);
hacks::hack_loop(ctx)?;
Ok(())
}
fn main() {
std::process::exit(match run() {
Ok(_) => 0,
Err(err) => {
error!("{}", err);
1
}
})
}
|
pub fn collatz(n: u64) -> Option<u64> {
match n {
0 => None,
1 => Some(0),
n if n%2 == 0 => collatz(n/2).map(|x| x+1),
_ => collatz(3*n+1).map(|x| x+1)
}
}
|
use x86_64::VirtAddr;
use x86_64::structures::gdt::{GlobalDescriptorTable, Descriptor, SegmentSelector};
use x86_64::structures::tss::TaskStateSegment;
pub const DOUBLE_FAULT_IST_INDEX: u16 = 0;
#[no_mangle]
pub static mut TSS: TaskStateSegment = TaskStateSegment::new();
#[allow(unused)] const GDT_DPL0: u64 = 0 << 45;
#[allow(unused)] const GDT_DPL3: u64 = 3 << 45;
#[allow(unused)] const GDT_PRESENT: u64 = 1 << 47;
#[allow(unused)] const GDT_LONG_MODE: u64 = 1 << 53;
#[allow(unused)] const GDT_CODE_READABLE: u64 = 1 << 41;
#[allow(unused)] const GDT_DATA_WRITABLE: u64 = 1 << 41;
#[allow(unused)] const GDT_TYPE_SYSTEM: u64 = 0 << 44;
#[allow(unused)] const GDT_TYPE_USER: u64 = 1 << 44;
#[allow(unused)] const GDT_EXECUTABLE: u64 = 1 << 43;
// const GDT
lazy_static! {
pub static ref GDT: (GlobalDescriptorTable, Selectors) = {
let mut gdt = GlobalDescriptorTable::new();
let kernel_code = gdt.add_entry(Descriptor::UserSegment(GDT_PRESENT | GDT_TYPE_USER | GDT_DPL0 | GDT_CODE_READABLE | GDT_LONG_MODE | GDT_EXECUTABLE));
let kernel_data = gdt.add_entry(Descriptor::UserSegment(GDT_PRESENT | GDT_TYPE_USER | GDT_DPL0 | GDT_DATA_WRITABLE | GDT_LONG_MODE));
let mut user_code = gdt.add_entry(Descriptor::UserSegment(GDT_PRESENT | GDT_TYPE_USER | GDT_DPL3 | GDT_CODE_READABLE | GDT_LONG_MODE | GDT_EXECUTABLE));
let mut user_data = gdt.add_entry(Descriptor::UserSegment(GDT_PRESENT | GDT_TYPE_USER | GDT_DPL3 | GDT_DATA_WRITABLE | GDT_LONG_MODE));
user_code.0 = user_code.0 | 3;
user_data.0 = user_data.0 | 3;
// let code_selector = gdt.add_entry(Descriptor::kernel_code_segment());
// let user_code = gdt.add_entry({
// let mut desc = Descriptor::kernel_code_segment();
// if let Descriptor::UserSegment(ref mut value) = desc {
// *value = *value | 3 << 45 | 1 << 41;
// println!("User Seg {:x}", *value);
// // *value = *value | 1 << 42
// }
// desc
// });
// let user_data = gdt.add_entry(Descriptor::UserSegment(GDT_PRESENT | GDT_TYPE_USER | 3 << 45 | 1 << 44 | 1 << 47 | 1 << 41));
let tss_selector = gdt.add_entry(Descriptor::tss_segment(unsafe { &TSS }));
// println!("GDT: code_selector={:?}, tss_selector={:?}", code_selector, tss_selector);
(gdt, Selectors { kernel_code, kernel_data, user_code, user_data, tss_selector })
};
}
pub struct Selectors {
pub kernel_code: SegmentSelector,
pub kernel_data: SegmentSelector,
pub user_code: SegmentSelector,
pub user_data: SegmentSelector,
pub tss_selector: SegmentSelector
}
pub fn init() {
use x86_64::instructions::segmentation::*;
use x86_64::instructions::tables::load_tss;
unsafe {
TSS.interrupt_stack_table[DOUBLE_FAULT_IST_INDEX as usize] = {
const STACK_SIZE: usize = 4096;
static mut STACK: [u8; STACK_SIZE] = [0; STACK_SIZE];
let stack_start = VirtAddr::from_ptr(&STACK);
let stack_end = stack_start + STACK_SIZE;
stack_end
};
}
GDT.0.load();
unsafe {
set_cs(GDT.1.kernel_code);
load_ss(GDT.1.kernel_data);
load_ds(GDT.1.kernel_data);
load_es(GDT.1.kernel_data);
load_fs(GDT.1.kernel_data);
load_gs(GDT.1.kernel_data);
load_tss(GDT.1.tss_selector);
}
}
pub fn set_kernel_stack(ptr: VirtAddr) {
unsafe {
TSS.privilege_stack_table[0] = ptr;
}
}
|
#![feature(option_result_contains)]
// #![allow(dead_code)]
#![allow(unused_imports)]
#[macro_use]
extern crate io_error;
extern crate strum;
extern crate strum_macros;
extern crate fixed;
extern crate log;
pub mod methods;
pub mod operators;
pub mod pddl_parser;
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
}
|
#![allow(unused_imports)]
#![allow(dead_code)]
extern crate rand;
use std::io::{self, BufRead, BufReader, Read, Write};
use std::net::{TcpListener, TcpStream, ToSocketAddrs};
use std::str;
use std::{thread, time};
use rand::prelude::*;
pub const SOH: char = '\u{01}';
fn create_fix_message() -> String {
let mut rng = rand::thread_rng();
let mut msg = String::with_capacity(64);
for i in 0..10 {
let tag: u16 = rng.gen_range(1, 500);
let value: u64 = rng.gen_range(1000, 10000);
let s = format!("{}={}", tag, value);
msg.push_str(s.as_str());
msg.push(SOH);
}
let trailer_value: u16 = rng.gen();
let trailer = format!("10={}", trailer_value);
msg.push_str(trailer.as_str());
msg.push(SOH);
msg
}
fn send_fix_message<A: ToSocketAddrs>(addrs: A) -> io::Result<()> {
let mut stream = TcpStream::connect(addrs).expect("could not connect");
let std_in = io::stdin();
let mut buff = String::new();
loop {
buff.clear();
println!("send message [y/n]");
std_in.read_line(&mut buff);
if buff.starts_with("y") || buff.starts_with("Y") {
let msg = create_fix_message();
println!("sending string: {}", msg);
stream.write(msg[..msg.len()/2].as_bytes());
thread::sleep(time::Duration::from_millis(5000));
stream.write(msg[(msg.len()/2)+1..].as_bytes());
} else {
return Ok(());
}
// thread::sleep(time::Duration::from_millis(1000));
}
}
fn main() {
// send_random_bytes("127.0.0.1:4375");
match start_server() {
Ok(_) => {},
Err(e) => {
println!("{:?}", e);
}
}
}
fn send_random_bytes<A: ToSocketAddrs>(addr: A) {
let mut stream = TcpStream::connect(addr).expect("could not connect");
loop {
let buff = ['c' as u8; 16];
stream.write(&buff);
stream.write(&[b'\n']);
thread::sleep(time::Duration::from_millis(5000));
}
}
fn start_server() -> io::Result<usize> {
let listener = TcpListener::bind("127.0.0.1:4378").unwrap();
for stream in listener.incoming() {
let mut stream = stream.unwrap();
let mut buf_reader = BufReader::new(stream);
loop {
let byte_used = {
let res = match buf_reader.fill_buf() {
Ok(n) => {
if n.len() == 0 {
println!("nothing came");
break;
} else {
println!("{}", str::from_utf8(n).unwrap());
}
n
},
Err(e) => {
println!("Error occured");
return Err(e);
}
};
res.len()
};
buf_reader.consume(byte_used);
}
}
Ok(0 as usize)
}
|
use std::error::Error as StdError;
use std::marker;
use std::result::Result as StdResult;
use crate::{Result, Error};
/// Lazily decode the data bytes, it can be used to avoid CPU intensive decoding
/// before making sure we really need to decode it (e.g. based on the key).
#[derive(Default)]
pub struct LazyDecode<C>(marker::PhantomData<C>);
impl<'a, C: 'static> heed_traits::BytesDecode<'a> for LazyDecode<C> {
type DItem = Lazy<'a, C>;
fn bytes_decode(bytes: &'a [u8]) -> StdResult<Self::DItem, Box<dyn StdError>> {
Ok(Lazy { data: bytes, _phantom: marker::PhantomData })
}
}
/// Owns bytes that can be decoded on demand.
#[derive(Copy, Clone)]
pub struct Lazy<'a, C> {
data: &'a [u8],
_phantom: marker::PhantomData<C>,
}
impl<'a, C: heed_traits::BytesDecode<'a>> Lazy<'a, C> {
pub fn decode(&self) -> Result<C::DItem> {
C::bytes_decode(self.data).map_err(Error::Decoding)
}
}
|
// Copied from nom master:
// https://github.com/Geal/nom/blob/a38188f333c29d00c32a3082bec5491d2eefa33f/src/sequence.rs#L591-L687
// Will be released in nom 2.0.
#![cfg(feature = "parsing")]
#[macro_export]
#[doc(hidden)]
macro_rules! do_parse (
($i:expr, $($rest:tt)*) => (
{
do_parse_impl!($i, 0usize, $($rest)*)
}
);
);
/// Internal parser, do not use directly
#[doc(hidden)]
#[macro_export]
macro_rules! do_parse_impl (
($i:expr, $consumed:expr, ( $($rest:expr),* )) => (
::nom::IResult::Done($i, ( $($rest),* ))
);
($i:expr, $consumed:expr, $e:ident >> $($rest:tt)*) => (
do_parse_impl!($i, $consumed, call!($e) >> $($rest)*);
);
($i:expr, $consumed:expr, $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => (
{
match $submac!($i, $($args)*) {
::nom::IResult::Error(e) => ::nom::IResult::Error(e),
::nom::IResult::Incomplete(::nom::Needed::Unknown) =>
::nom::IResult::Incomplete(::nom::Needed::Unknown),
::nom::IResult::Incomplete(::nom::Needed::Size(i)) =>
::nom::IResult::Incomplete(::nom::Needed::Size($consumed + i)),
::nom::IResult::Done(i,_) => {
do_parse_impl!(i,
$consumed + (::nom::InputLength::input_len(&($i)) -
::nom::InputLength::input_len(&i)), $($rest)*)
},
}
}
);
($i:expr, $consumed:expr, $field:ident : $e:ident >> $($rest:tt)*) => (
do_parse_impl!($i, $consumed, $field: call!($e) >> $($rest)*);
);
($i:expr, $consumed:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> $($rest:tt)*) => (
{
match $submac!($i, $($args)*) {
::nom::IResult::Error(e) => ::nom::IResult::Error(e),
::nom::IResult::Incomplete(::nom::Needed::Unknown) =>
::nom::IResult::Incomplete(::nom::Needed::Unknown),
::nom::IResult::Incomplete(::nom::Needed::Size(i)) =>
::nom::IResult::Incomplete(::nom::Needed::Size($consumed + i)),
::nom::IResult::Done(i,o) => {
let $field = o;
do_parse_impl!(i,
$consumed + (::nom::InputLength::input_len(&($i)) -
::nom::InputLength::input_len(&i)), $($rest)*)
},
}
}
);
// ending the chain
($i:expr, $consumed:expr, $e:ident >> ( $($rest:tt)* )) => (
do_parse_impl!($i, $consumed, call!($e) >> ( $($rest)* ));
);
($i:expr, $consumed:expr, $submac:ident!( $($args:tt)* ) >> ( $($rest:tt)* )) => (
match $submac!($i, $($args)*) {
::nom::IResult::Error(e) => ::nom::IResult::Error(e),
::nom::IResult::Incomplete(::nom::Needed::Unknown) =>
::nom::IResult::Incomplete(::nom::Needed::Unknown),
::nom::IResult::Incomplete(::nom::Needed::Size(i)) =>
::nom::IResult::Incomplete(::nom::Needed::Size($consumed + i)),
::nom::IResult::Done(i,_) => {
::nom::IResult::Done(i, ( $($rest)* ))
},
}
);
($i:expr, $consumed:expr, $field:ident : $e:ident >> ( $($rest:tt)* )) => (
do_parse_impl!($i, $consumed, $field: call!($e) >> ( $($rest)* ) );
);
($i:expr, $consumed:expr, $field:ident : $submac:ident!( $($args:tt)* ) >> ( $($rest:tt)* )) => (
match $submac!($i, $($args)*) {
::nom::IResult::Error(e) => ::nom::IResult::Error(e),
::nom::IResult::Incomplete(::nom::Needed::Unknown) =>
::nom::IResult::Incomplete(::nom::Needed::Unknown),
::nom::IResult::Incomplete(::nom::Needed::Size(i)) =>
::nom::IResult::Incomplete(::nom::Needed::Size($consumed + i)),
::nom::IResult::Done(i,o) => {
let $field = o;
::nom::IResult::Done(i, ( $($rest)* ))
},
}
);
);
|
use crate::base::{ArchivedRkyvRequest, ErrorCode, RkyvGenericResponse};
use crate::storage::local::FileStorage;
pub fn commit_write(
request: &ArchivedRkyvRequest,
file_storage: &FileStorage,
) -> Result<RkyvGenericResponse, ErrorCode> {
match request {
ArchivedRkyvRequest::Fsync { inode } => file_storage.fsync(inode.into()),
ArchivedRkyvRequest::HardlinkRollback {
inode,
last_modified_time,
} => file_storage.hardlink_rollback(inode.into(), last_modified_time.into()),
ArchivedRkyvRequest::Utimens {
inode,
atime,
mtime,
context,
} => file_storage.utimens(
inode.into(),
atime.as_ref().map(|x| x.into()),
mtime.as_ref().map(|x| x.into()),
context.into(),
),
ArchivedRkyvRequest::SetXattr {
inode,
key,
value,
context,
} => file_storage.set_xattr(inode.into(), key.as_str(), value, context.into()),
ArchivedRkyvRequest::RemoveXattr {
inode,
key,
context,
} => file_storage.remove_xattr(inode.into(), key.as_str(), context.into()),
ArchivedRkyvRequest::Mkdir { .. }
| ArchivedRkyvRequest::Hardlink { .. }
| ArchivedRkyvRequest::Rename { .. }
| ArchivedRkyvRequest::Create { .. }
| ArchivedRkyvRequest::Unlink { .. }
| ArchivedRkyvRequest::Rmdir { .. } => {
unreachable!("Transaction coordinator should break these up into internal requests");
}
ArchivedRkyvRequest::Chmod {
inode,
mode,
context,
} => file_storage.chmod(inode.into(), mode.into(), context.into()),
ArchivedRkyvRequest::Chown {
inode,
uid,
gid,
context,
} => file_storage.chown(
inode.into(),
uid.as_ref().map(|x| x.into()),
gid.as_ref().map(|x| x.into()),
context.into(),
),
ArchivedRkyvRequest::Truncate {
inode,
new_length,
context,
} => file_storage.truncate(inode.into(), new_length.into(), context.into()),
ArchivedRkyvRequest::Write {
inode,
offset,
data,
} => file_storage.write(inode.into(), offset.into(), data),
ArchivedRkyvRequest::RemoveLink {
parent,
name,
link_inode_and_uid,
context,
..
} => file_storage.remove_link(
parent.into(),
name.as_str(),
link_inode_and_uid
.as_ref()
.map(|x| (x.inode.into(), x.uid.into())),
context.into(),
),
ArchivedRkyvRequest::ReplaceLink {
parent,
name,
new_inode,
kind,
context,
..
} => file_storage.replace_link(
parent.into(),
name.as_str(),
new_inode.into(),
kind.into(),
context.into(),
),
ArchivedRkyvRequest::CreateLink {
inode,
parent,
name,
kind,
context,
..
} => file_storage.create_link(
inode.into(),
parent.into(),
name.as_str(),
context.into(),
kind.into(),
),
ArchivedRkyvRequest::CreateInode {
parent,
uid,
gid,
mode,
kind,
..
} => file_storage.create_inode(
parent.into(),
uid.into(),
gid.into(),
mode.into(),
kind.into(),
),
ArchivedRkyvRequest::HardlinkIncrement { inode } => {
file_storage.hardlink_stage0_link_increment(inode.into())
}
ArchivedRkyvRequest::UpdateParent {
inode, new_parent, ..
} => file_storage.update_parent(inode.into(), new_parent.into()),
ArchivedRkyvRequest::UpdateMetadataChangedTime { inode, .. } => {
file_storage.update_metadata_changed_time(inode.into())
}
ArchivedRkyvRequest::DecrementInode {
inode,
decrement_count,
..
} => file_storage.decrement_inode_link_count(inode.into(), decrement_count.into()),
ArchivedRkyvRequest::Lock { .. } | ArchivedRkyvRequest::Unlock { .. } => {
unreachable!("This should have been handled by the LockTable");
}
ArchivedRkyvRequest::FilesystemReady
| ArchivedRkyvRequest::FilesystemInformation
| ArchivedRkyvRequest::FilesystemChecksum
| ArchivedRkyvRequest::FilesystemCheck
| ArchivedRkyvRequest::Read { .. }
| ArchivedRkyvRequest::ReadRaw { .. }
| ArchivedRkyvRequest::Lookup { .. }
| ArchivedRkyvRequest::GetAttr { .. }
| ArchivedRkyvRequest::ListDir { .. }
| ArchivedRkyvRequest::ListXattrs { .. }
| ArchivedRkyvRequest::GetXattr { .. }
| ArchivedRkyvRequest::LatestCommit { .. }
| ArchivedRkyvRequest::RaftGroupLeader { .. }
| ArchivedRkyvRequest::RaftMessage { .. } => {
unreachable!()
}
}
}
|
use crate::config::{Config, ThemeSetting};
use crate::display_action::DisplayAction;
use crate::models::Manager;
use crate::models::Window;
use crate::models::Workspace;
use crate::DisplayEvent;
#[cfg(test)]
mod mock_display_server;
pub mod xlib_display_server;
use std::sync::Arc;
#[cfg(test)]
pub use self::mock_display_server::MockDisplayServer;
pub use self::xlib_display_server::XlibDisplayServer;
pub trait DisplayServer<C: Config> {
fn new(config: C, theme: Arc<ThemeSetting>) -> Self;
fn get_next_events(&mut self) -> Vec<DisplayEvent>;
fn update_theme_settings(&mut self, _settings: Arc<ThemeSetting>) {}
fn update_windows(
&self,
_windows: Vec<&Window>,
_focused: Option<&Window>,
_manager: &Manager,
) {
}
fn update_workspaces(&self, _windows: Vec<&Workspace>, _focused: Option<&Workspace>) {}
fn execute_action(&mut self, _act: DisplayAction) -> Option<DisplayEvent> {
None
}
}
|
use super::util;
use super::mapping;
use super::controls;
use super::rendering;
use super::ai;
use super::player_actions;
use super::equipment::{ Equipment, Slot };
use super::data::{
Object,
Fighter,
DeathCallback,
Game,
Tcod,
MessageLog,
PlayerAction,
Item,
};
use tcod::input::{self, Event,};
use tcod::console::*;
use crate::PLAYER;
use std::io::{Read, Write};
use std::fs::File;
use std::error::Error;
pub fn new_game(tcod: &mut Tcod) -> (Vec<Object>, Game) {
let mut player = Object::new(0, 0, '@', tcod::colors::WHITE, "Player", true);
player.alive = true;
player.fighter = Some(Fighter {
base_max_hp: 100,
hp: 100,
base_defense: 1,
base_power: 4,
on_death: DeathCallback::Player,
xp: 0,
});
let mut objects = vec![player];
let level = 1;
let mut game = Game {
map: mapping::make_map(&mut objects, level), // Generate the map
log: vec![], // Game messages log
inventory: vec![], // Player inventory
dungeon_level: 1,
};
// Give the player a dagger to start with
let mut dagger = Object::new(0, 0, '-', tcod::colors::SKY, "dagger", false);
dagger.item = Some(Item::Sword);
dagger.equipment = Some(Equipment {
equipped: true,
slot: Slot::LeftHand,
defense_bonus: 0,
power_bonus: 2,
max_hp_bonus: 0,
});
game.inventory.push(dagger);
rendering::init_fov(&game.map, tcod);
// Friendly welcoming message
game.log.add(
"Welcome, stranger, to the Mysterious Mysteries of the Abysslike Catacombs! Prepare to die.",
tcod::colors::RED,
);
(objects, game)
}
// GAME LOOP
pub fn play_game(objects: &mut Vec<Object>, game: &mut Game, tcod: &mut Tcod) {
let mut previous_player_position = (-1, -1);
let mut key = Default::default();
while !tcod.root.window_closed() {
tcod.con.clear();
match tcod::input::check_for_event(input::MOUSE | input::KEY_PRESS) {
Some(
(_, Event::Mouse(m))
) => tcod.mouse = m,
Some(
(_, Event::Key(k))
) => key = k,
_ => key = Default::default()
}
let fov_recompute = previous_player_position != (objects[PLAYER].pos());
rendering::render_all(
fov_recompute,
&objects,
game,
tcod,
);
tcod.root.flush();
player_actions::level_up(objects, game, tcod);
previous_player_position = objects[PLAYER].pos();
let player_action = controls::handle_keys(
key,
objects,
game,
tcod,
);
if player_action == PlayerAction::Exit {
save_game(objects, game).unwrap();
break;
}
if objects[PLAYER].alive && player_action != PlayerAction::NoTurnTaken {
// util::growl(objects, game); // Useful for in-game debug
for id in 0..objects.len() {
if objects[id].ai.is_some() {
ai::ai_take_turn(id, objects, game, &tcod.fov);
}
}
}
}
}
pub fn save_game(objects: &[Object], game: &Game) -> Result<(), Box<Error>> {
let save_data = serde_json::to_string(&(objects, game))?;
let mut file = File::create("savegame")?;
file.write_all(save_data.as_bytes())?;
Ok(())
}
pub fn load_game() -> Result<(Vec<Object>, Game), Box<Error>> {
let mut json_save_state = String::new();
let mut file = File::open("savegame")?;
file.read_to_string(&mut json_save_state)?;
let result =
serde_json::from_str::<(Vec<Object>, Game)>(&json_save_state)?;
Ok(result)
}
|
mod cli;
fn main() -> std::io::Result<()> {
let project = cli::Project::new();
project.build_structure()?;
Ok(())
}
|
mod event;
mod flags;
mod key;
pub use self::event::*;
pub use self::flags::*;
pub use self::key::*;
|
use sys;
pub struct ConstraintSolver {
solver: Box<sys::btSequentialImpulseConstraintSolver>,
}
impl ConstraintSolver {
pub fn new() -> Self {
ConstraintSolver {
solver: Box::new(unsafe { sys::btSequentialImpulseConstraintSolver::new() }),
}
}
pub fn as_ptr(&self) -> *mut sys::btConstraintSolver {
&*self.solver as *const _ as *mut _
}
}
impl Drop for ConstraintSolver {
fn drop(&mut self) {
unsafe {
sys::btSequentialImpulseConstraintSolver_btSequentialImpulseConstraintSolver_destructor(&mut *self.solver as *mut _);
}
}
}
|
mod cluster_client;
mod node_client;
mod peer_client;
mod tcp_client;
pub use cluster_client::RemoteRaftGroups;
pub use node_client::NodeClient;
pub use peer_client::PeerClient;
pub use peer_client::TcpPeerClient;
|
use std::collections::HashMap;
use std::fs;
pub fn run(filename: &str) {
let protein = fs::read_to_string(filename).expect("Something went wrong reading the file");
let protein = protein.trim();
let table: HashMap<char, &str> = [
('A', "71.03711"),
('C', "103.00919"),
('D', "115.02694"),
('E', "129.04259"),
('F', "147.06841"),
('G', "57.02146"),
('H', "137.05891"),
('I', "113.08406"),
('K', "128.09496"),
('L', "113.08406"),
('M', "131.04049"),
('N', "114.04293"),
('P', "97.05276"),
('Q', "128.05858"),
('R', "156.10111"),
('S', "87.03203"),
('T', "101.04768"),
('V', "99.06841"),
('W', "186.07931"),
('Y', "163.06333"),
]
.iter()
.cloned()
.collect();
let mut weight = 0.0;
for p in protein.chars() {
match table.get(&p) {
Some(val) => {
let val: f64 = val.parse().unwrap();
weight += val;
}
None => println!("Invalid codon: {}", p),
}
}
println!("{}", weight);
}
|
pub use crate::apex::blackboard::abstraction::*;
pub use crate::apex::buffer::abstraction::*;
pub use crate::apex::error::abstraction::*;
pub use crate::apex::event::abstraction::*;
pub use crate::apex::file_system::abstraction::*;
pub use crate::apex::interrupt::abstraction::*;
pub use crate::apex::limits;
pub use crate::apex::logbook::abstraction::*;
pub use crate::apex::memory_block::abstraction::*;
pub use crate::apex::multicore::abstraction::*;
pub use crate::apex::mutex::abstraction::*;
pub use crate::apex::name_service::abstraction::*;
pub use crate::apex::partition::abstraction::*;
pub use crate::apex::process::abstraction::*;
pub use crate::apex::queuing::abstraction::*;
pub use crate::apex::queuing_list::abstraction::*;
pub use crate::apex::sampling::abstraction::*;
pub use crate::apex::sap::abstraction::*;
pub use crate::apex::schedules::abstraction::*;
pub use crate::apex::semaphore::abstraction::*;
pub use crate::apex::sp_data::abstraction::*;
pub use crate::apex::time::abstraction::*;
pub use crate::apex::types::abstraction::*;
|
pub mod operations;
pub mod op_types;
mod imagemagick_commands; |
/*===============================================================================================*/
// Copyright 2016 Kyle Finlay
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*===============================================================================================*/
extern crate serde;
use ::resource::config::ConfigLoader;
use ::resource::plugin::PluginLoader;
use ::util::Directory;
use self::serde::{Deserialize, Serialize};
use std::cell::RefCell;
use std::rc::Rc;
/*===============================================================================================*/
/*------RESOURCE MANAGER STRUCT------------------------------------------------------------------*/
/*===============================================================================================*/
/// Interface for resource loading and management.
pub struct ResourceManager {
// Private
_config_loader: Rc<RefCell<ConfigLoader>>,
_plugin_loader: Rc<RefCell<PluginLoader>>,
}
/*===============================================================================================*/
/*------RESOURCE MANAGER PUBLIC METHODS----------------------------------------------------------*/
/*===============================================================================================*/
impl ResourceManager {
/// Initializes the Resource Manager.
pub fn init (&mut self) {
info! ("Initializing the Resource Manager.");
self._config_loader.borrow ().init ();
self._plugin_loader.borrow_mut ().init (self);
}
/*-----------------------------------------------------------------------------------------------*/
/// Creates a new config file.
pub fn new_config<T: Default + Serialize> (&self, config_name: &str) -> Result<(), ()> {
let p_config_dir = &Directory::get_persistent_config_directory ();
self._config_loader.borrow ().new_config::<T> (p_config_dir, config_name)
}
/*-----------------------------------------------------------------------------------------------*/
/// Loads a config file.
pub fn load_config<T: Deserialize> (&self, config_name: &str) -> Result<T, ()> {
let p_config_dir = &Directory::get_persistent_config_directory ();
self._config_loader.borrow ().load_config::<T> (p_config_dir, config_name)
}
/*-----------------------------------------------------------------------------------------------*/
/// Saves a config file.
pub fn save_config<T: Serialize> (&self, config_name: &str, config_data: &T) -> Result<(), ()> {
let p_config_dir = &Directory::get_persistent_config_directory ();
self._config_loader.borrow ().save_config::<T> (p_config_dir, config_name, config_data)
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns a pointer to the config loader instance.
pub unsafe fn get_config_loader_raw (&self) -> Rc<RefCell<ConfigLoader>> {
self._config_loader.clone ()
}
/*-----------------------------------------------------------------------------------------------*/
/// Returns a pointer to the plugin loader instance.
pub unsafe fn get_plugin_loader_raw (&self) -> Rc<RefCell<PluginLoader>> {
self._plugin_loader.clone ()
}
/*===============================================================================================*/
/*------RESOURCE MANAGER PUBLIC STATIC METHODS---------------------------------------------------*/
/*===============================================================================================*/
/// Create a new instance of the Resource Manager.
pub fn new () -> ResourceManager {
ResourceManager {
_config_loader: Rc::new (RefCell::new (ConfigLoader {})),
_plugin_loader: Rc::new (RefCell::new (PluginLoader::new ()))
}
}
}
/*-----------------------------------------------------------------------------------------------*/
impl Default for ResourceManager {
fn default () -> ResourceManager {
ResourceManager::new ()
}
}
|
/*
* BitTorrent bencode decoder demo (Rust)
*
* Copyright (c) 2021 Project Nayuki. (MIT License)
* https://www.nayuki.io/page/bittorrent-bencode-format-tools
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
* - The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* - The Software is provided "as is", without warranty of any kind, express or
* implied, including but not limited to the warranties of merchantability,
* fitness for a particular purpose and noninfringement. In no event shall the
* authors or copyright holders be liable for any claim, damages or other
* liability, whether in an action of contract, tort or otherwise, arising from,
* out of or in connection with the Software or the use or other dealings in the
* Software.
*/
mod bencode;
use bencode::Bencode;
// Reads the given file, parses its data as bencode, then prints the
// data structure with hierarchical formatting to standard output.
fn main() -> std::io::Result<()> {
let argv: Vec<String> = std::env::args().collect();
if argv.len() != 2 {
eprintln!("Usage: {} Input.torrent", argv[0]);
std::process::exit(1);
}
let file = std::path::Path::new(&argv[1]);
if !file.is_file() {
eprintln!("Usage: {} Input.torrent", argv[0]);
std::process::exit(1);
}
let mut inp: std::fs::File = std::fs::OpenOptions::new().read(true).open(file)?;
let obj: Bencode = Bencode::parse(&mut inp)?;
print_bencode_value(&obj, 0);
Ok(())
}
// Recursively prints the given value/structure to standard output,
// with at least the given indentation depth.
fn print_bencode_value(obj: &Bencode, depth: u32) {
match obj {
Bencode::Int(val) => {
println!("Integer: {}", val);
},
Bencode::Bytes(ref bytes) => {
print!("Byte string ({}) ", bytes.len());
match std::str::from_utf8(bytes) {
Ok(s) => println!("(text): {}", s),
Err(_) => {
print!("(binary): ");
for (i, b) in bytes.iter().enumerate() {
print!("{:02X}", b);
if i + 1 < bytes.len() {
print!(" ");
if i == 30 {
print!("...");
break;
}
}
}
println!();
},
}
},
Bencode::List(ref list) => {
println!("List:");
for (i, item) in list.iter().enumerate() {
print_indent(depth + 1);
print!("{} = ", i);
print_bencode_value(item, depth + 1);
}
},
Bencode::Dict(ref dict) => {
println!("Dictionary:");
for (key, item) in dict {
print_indent(depth + 1);
match std::str::from_utf8(key) {
Ok(s) => print!("{}", s),
Err(_) => {
for (i, b) in key.iter().enumerate() {
if i > 0 {
print!(" ");
}
print!("{:02X}", b);
}
}
}
print!(" = ");
print_bencode_value(item, depth + 1);
}
},
}
}
// Prints the given multiple of indentation whitespace
// to standard output, without a trailing newline.
fn print_indent(depth: u32) {
for _ in 0 .. depth {
print!(" ");
}
}
|
#[doc = "Register `CCR` reader"]
pub type R = crate::R<CCR_SPEC>;
#[doc = "Register `CCR` writer"]
pub type W = crate::W<CCR_SPEC>;
#[doc = "Field `CKMODE` reader - ADC clock mode These bits are set and cleared by software to define the ADC clock scheme (which is common to both master and slave ADCs): In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADCs are disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."]
pub type CKMODE_R = crate::FieldReader;
#[doc = "Field `CKMODE` writer - ADC clock mode These bits are set and cleared by software to define the ADC clock scheme (which is common to both master and slave ADCs): In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADCs are disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."]
pub type CKMODE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 2, O>;
#[doc = "Field `PRESC` reader - ADC prescaler These bits are set and cleared by software to select the frequency of the clock to the ADC. The clock is common for all the ADCs. other: reserved Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0). The ADC prescaler value is applied only when CKMODE\\[1:0\\]
= 0b00."]
pub type PRESC_R = crate::FieldReader;
#[doc = "Field `PRESC` writer - ADC prescaler These bits are set and cleared by software to select the frequency of the clock to the ADC. The clock is common for all the ADCs. other: reserved Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0). The ADC prescaler value is applied only when CKMODE\\[1:0\\]
= 0b00."]
pub type PRESC_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 4, O>;
#[doc = "Field `VREFEN` reader - VREFINT enable This bit is set and cleared by software to enable/disable the VREFINT channel."]
pub type VREFEN_R = crate::BitReader;
#[doc = "Field `VREFEN` writer - VREFINT enable This bit is set and cleared by software to enable/disable the VREFINT channel."]
pub type VREFEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `TSEN` reader - VSENSE enable This bit is set and cleared by software to control VSENSE."]
pub type TSEN_R = crate::BitReader;
#[doc = "Field `TSEN` writer - VSENSE enable This bit is set and cleared by software to control VSENSE."]
pub type TSEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
#[doc = "Field `VBATEN` reader - VBAT enable This bit is set and cleared by software to control."]
pub type VBATEN_R = crate::BitReader;
#[doc = "Field `VBATEN` writer - VBAT enable This bit is set and cleared by software to control."]
pub type VBATEN_W<'a, REG, const O: u8> = crate::BitWriter<'a, REG, O>;
impl R {
#[doc = "Bits 16:17 - ADC clock mode These bits are set and cleared by software to define the ADC clock scheme (which is common to both master and slave ADCs): In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADCs are disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."]
#[inline(always)]
pub fn ckmode(&self) -> CKMODE_R {
CKMODE_R::new(((self.bits >> 16) & 3) as u8)
}
#[doc = "Bits 18:21 - ADC prescaler These bits are set and cleared by software to select the frequency of the clock to the ADC. The clock is common for all the ADCs. other: reserved Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0). The ADC prescaler value is applied only when CKMODE\\[1:0\\]
= 0b00."]
#[inline(always)]
pub fn presc(&self) -> PRESC_R {
PRESC_R::new(((self.bits >> 18) & 0x0f) as u8)
}
#[doc = "Bit 22 - VREFINT enable This bit is set and cleared by software to enable/disable the VREFINT channel."]
#[inline(always)]
pub fn vrefen(&self) -> VREFEN_R {
VREFEN_R::new(((self.bits >> 22) & 1) != 0)
}
#[doc = "Bit 23 - VSENSE enable This bit is set and cleared by software to control VSENSE."]
#[inline(always)]
pub fn tsen(&self) -> TSEN_R {
TSEN_R::new(((self.bits >> 23) & 1) != 0)
}
#[doc = "Bit 24 - VBAT enable This bit is set and cleared by software to control."]
#[inline(always)]
pub fn vbaten(&self) -> VBATEN_R {
VBATEN_R::new(((self.bits >> 24) & 1) != 0)
}
}
impl W {
#[doc = "Bits 16:17 - ADC clock mode These bits are set and cleared by software to define the ADC clock scheme (which is common to both master and slave ADCs): In all synchronous clock modes, there is no jitter in the delay from a timer trigger to the start of a conversion. Note: The software is allowed to write these bits only when the ADCs are disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0)."]
#[inline(always)]
#[must_use]
pub fn ckmode(&mut self) -> CKMODE_W<CCR_SPEC, 16> {
CKMODE_W::new(self)
}
#[doc = "Bits 18:21 - ADC prescaler These bits are set and cleared by software to select the frequency of the clock to the ADC. The clock is common for all the ADCs. other: reserved Note: The software is allowed to write these bits only when the ADC is disabled (ADCAL = 0, JADSTART = 0, ADSTART = 0, ADSTP = 0, ADDIS = 0 and ADEN = 0). The ADC prescaler value is applied only when CKMODE\\[1:0\\]
= 0b00."]
#[inline(always)]
#[must_use]
pub fn presc(&mut self) -> PRESC_W<CCR_SPEC, 18> {
PRESC_W::new(self)
}
#[doc = "Bit 22 - VREFINT enable This bit is set and cleared by software to enable/disable the VREFINT channel."]
#[inline(always)]
#[must_use]
pub fn vrefen(&mut self) -> VREFEN_W<CCR_SPEC, 22> {
VREFEN_W::new(self)
}
#[doc = "Bit 23 - VSENSE enable This bit is set and cleared by software to control VSENSE."]
#[inline(always)]
#[must_use]
pub fn tsen(&mut self) -> TSEN_W<CCR_SPEC, 23> {
TSEN_W::new(self)
}
#[doc = "Bit 24 - VBAT enable This bit is set and cleared by software to control."]
#[inline(always)]
#[must_use]
pub fn vbaten(&mut self) -> VBATEN_W<CCR_SPEC, 24> {
VBATEN_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "ADC common control register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`ccr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`ccr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct CCR_SPEC;
impl crate::RegisterSpec for CCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`ccr::R`](R) reader structure"]
impl crate::Readable for CCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`ccr::W`](W) writer structure"]
impl crate::Writable for CCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets CCR to value 0"]
impl crate::Resettable for CCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
use std::sync::Arc;
use chrono::Utc;
use eyre::Report;
use hashbrown::HashMap;
use rosu_v2::prelude::Beatmapset;
use twilight_model::channel::Message;
use crate::{
commands::osu::MapsetEntry, core::Context, custom_client::OsuTrackerMapsetEntry,
embeds::OsuTrackerMapsetsEmbed, BotResult,
};
use super::{Pages, Pagination};
pub struct OsuTrackerMapsetsPagination {
msg: Message,
pages: Pages,
entries: Vec<OsuTrackerMapsetEntry>,
mapsets: HashMap<u32, MapsetEntry>,
ctx: Arc<Context>,
}
impl OsuTrackerMapsetsPagination {
pub fn new(
ctx: Arc<Context>,
msg: Message,
entries: Vec<OsuTrackerMapsetEntry>,
mapsets: HashMap<u32, MapsetEntry>,
) -> Self {
Self {
ctx,
pages: Pages::new(10, entries.len()),
msg,
entries,
mapsets,
}
}
}
#[async_trait]
impl Pagination for OsuTrackerMapsetsPagination {
type PageData = OsuTrackerMapsetsEmbed;
fn msg(&self) -> &Message {
&self.msg
}
fn pages(&self) -> Pages {
self.pages
}
fn pages_mut(&mut self) -> &mut Pages {
&mut self.pages
}
fn single_step(&self) -> usize {
self.pages.per_page
}
async fn build_page(&mut self) -> BotResult<Self::PageData> {
let index = self.pages.index;
let entries = &self.entries[index..(index + 10).min(self.entries.len())];
for entry in entries {
let mapset_id = entry.mapset_id;
if self.mapsets.contains_key(&mapset_id) {
continue;
}
let mapset_fut = self.ctx.psql().get_beatmapset::<Beatmapset>(mapset_id);
let mapset = match mapset_fut.await {
Ok(mapset) => mapset,
Err(_) => {
let mapset = self.ctx.osu().beatmapset(mapset_id).await?;
if let Err(err) = self.ctx.psql().insert_beatmapset(&mapset).await {
warn!("{:?}", Report::new(err));
}
mapset
}
};
let entry = MapsetEntry {
creator: mapset.creator_name,
name: format!("{} - {}", mapset.artist, mapset.title),
mapset_id,
ranked_date: mapset.ranked_date.unwrap_or_else(Utc::now),
user_id: mapset.creator_id,
};
self.mapsets.insert(mapset_id, entry);
}
let page = self.page();
let pages = self.pages.total_pages;
let embed = OsuTrackerMapsetsEmbed::new(entries, &self.mapsets, (page, pages));
Ok(embed)
}
}
|
use std::str::FromStr;
use std::time::Duration as StdDuration;
use nom::{
branch::alt,
bytes::complete::tag,
character::complete::digit1,
combinator::{all_consuming, map_res, opt},
error::{Error, ErrorKind, ParseError},
number::complete::float,
sequence::{preceded, separated_pair, terminated, tuple},
Err, IResult,
};
const YEAR_IN_S: u64 = 31556952; // gregorian - includes leap-seconds
#[derive(Debug, PartialEq)]
pub struct Duration {
pub year: f32,
pub month: f32,
pub day: f32,
pub hour: f32,
pub minute: f32,
pub second: f32,
}
impl Duration {
pub fn new(year: f32, month: f32, day: f32, hour: f32, minute: f32, second: f32) -> Self {
Duration {
year,
month,
day,
hour,
minute,
second,
}
}
pub fn to_std(&self) -> StdDuration {
let millis = (self.second.fract() * 1000.0).round() as u64;
StdDuration::from_millis(
(self.year.round() as u64 * YEAR_IN_S
+ self.month.round() as u64 * 60 * 60 * 24 * 30 // there is no official answer on how long a month is, so 30 days will have to do
+ self.day.round() as u64 * 60 * 60 * 24
+ self.hour.round() as u64 * 60 * 60
+ self.minute.round() as u64 * 60
+ self.second.trunc() as u64)
* 1000
+ millis,
)
}
pub fn parse(input: &str) -> Result<Duration, Err<Error<&str>>> {
let (_, duration) = all_consuming(preceded(
tag("P"),
alt((parse_week_format, parse_basic_format)),
))(input)?;
Ok(duration)
}
}
fn decimal_comma_number(input: &str) -> IResult<&str, f32> {
map_res(separated_pair(digit1, tag(","), digit1), |(a, b)| {
f32::from_str(&format!("{}.{}", a, b))
})(input)
}
fn value_with_designator(designator: &str) -> impl Fn(&str) -> IResult<&str, f32> + '_ {
move |input| {
terminated(
alt((
float,
decimal_comma_number,
map_res(digit1, |s: &str| f32::from_str(s)),
)),
tag(designator),
)(input)
}
}
fn parse_basic_format(input: &str) -> IResult<&str, Duration> {
let (input, (year, month, day)) = tuple((
opt(value_with_designator("Y")),
opt(value_with_designator("M")),
opt(value_with_designator("D")),
))(input)?;
let (input, time) = opt(preceded(
tag("T"),
tuple((
opt(value_with_designator("H")),
opt(value_with_designator("M")),
opt(value_with_designator("S")),
)),
))(input)?;
let (hour, minute, second) = time.unwrap_or_default();
if year.is_none()
&& month.is_none()
&& day.is_none()
&& hour.is_none()
&& minute.is_none()
&& second.is_none()
{
Err(Err::Error(ParseError::from_error_kind(
input,
ErrorKind::Verify,
)))
} else {
Ok((
input,
Duration {
year: year.unwrap_or_default(),
month: month.unwrap_or_default(),
day: day.unwrap_or_default(),
hour: hour.unwrap_or_default(),
minute: minute.unwrap_or_default(),
second: second.unwrap_or_default(),
},
))
}
}
fn parse_week_format(input: &str) -> IResult<&str, Duration> {
let (input, week) = value_with_designator("W")(input)?;
Ok((
input,
Duration {
year: 0.,
month: 0.,
day: week * 7.,
hour: 0.,
minute: 0.,
second: 0.,
},
))
}
fn _parse_extended_format(_input: &str) -> IResult<&str, Duration> {
unimplemented!()
}
|
pub mod opaque;
pub mod password;
pub mod ssh_key;
pub mod x509;
|
//! Input tokenizer
use std;
use std::borrow::ToOwned;
use std::rc::Rc;
use parser::tokens::{Token, SourceLocation, dummy_source};
use parser::util::{SharedString, rcstr, rcstring};
// --- Lexer: Error -------------------------------------------------------------
const SYMBOL_CHARS: &'static str = "+-*/%\\=<>!?&_#$§^`.,:@";
pub type LexerResult<T> = Result<T, LexerError>;
pub enum LexerError {
UnexpectedChar {
expected: SharedString,
found: SharedString,
location: SourceLocation
},
UnknownToken {
token: SharedString, // result of curr_repr
location: SourceLocation
},
InvalidInteger {
input: SharedString,
location: SourceLocation
}
}
impl std::fmt::Debug for LexerError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match *self {
LexerError::UnexpectedChar { ref expected, ref found, ref location } => {
write!(f, "unexpected character: expected `{}`, but found {} at {}",
expected, found, location)
},
LexerError::UnknownToken { ref token, ref location } => {
write!(f, "unknown token: `{}` at {}", token, location)
},
LexerError::InvalidInteger { ref input, ref location } => {
write!(f, "invalid integer: `{}` at {}", input, location)
}
}
}
}
macro_rules! unknown_token(
($token:expr; $location:expr) => (
return Err(LexerError::UnknownToken {
token: $token.clone(),
location: $location
})
)
);
macro_rules! invalid_number(
($input:expr; $location:expr) => (
return Err(LexerError::InvalidInteger {
input: $input.clone(),
location: $location
})
)
);
// --- Lexer --------------------------------------------------------------------
pub trait Lexer {
/// Get the source of the current token
fn get_source(&self) -> SourceLocation;
/// Get the next token
fn next_token(&mut self) -> LexerResult<Token>;
/// Tokenize the input into a vector
fn tokenize(&mut self) -> LexerResult<Vec<Token>>;
}
/// Lexer for tokenize a file from disk/memory.
pub struct FileLexer<'a> {
source: &'a str,
file: SharedString,
len: usize,
pos: usize,
curr: Option<char>,
lineno: usize
}
impl<'a> FileLexer<'a> {
pub fn new(source: &'a str, file: &'a str) -> FileLexer<'a> {
FileLexer {
source: source,
file: rcstr(file),
len: source.len(),
pos: 0,
curr: if source.len() > 0 { Some(source.char_at(0)) } else { None },
lineno: 1
}
}
/// --- Internal methods: Helpers -------------------------------------------
/// Whether we've reached EOF
fn is_eof(&self) -> bool {
self.curr.is_none()
}
/// Move on to the next char
fn bump(&mut self) {
self.curr = self.nextch();
self.pos += 1;
debug!("Moved on to {:?}", self.curr)
}
/// Get the next char if possible
fn nextch(&self) -> Option<char> {
let mut new_pos = self.pos + 1;
// When encountering multi-byte UTF-8, we may stop in the middle
// of it. Fast forward till we see the next actual char or EOF
while !self.source.is_char_boundary(new_pos)
&& self.pos < self.len {
new_pos += 1;
}
if new_pos < self.len {
Some(self.source.char_at(new_pos))
} else {
None
}
}
fn expect(&mut self, expect: char) -> LexerResult<()> {
if self.curr != Some(expect) {
let expect_str = [expect].iter().cloned().collect::<String>().escape_default();
let found_str = match self.curr {
Some(_) => format!("'{}'", self.curr_repr()),
None => "EOF".to_owned()
};
return Err(LexerError::UnexpectedChar {
expected: rcstring(expect_str),
found: rcstring(found_str),
location: self.get_source()
});
}
self.bump();
Ok(())
}
/// Get a printable representation of the current char
fn curr_repr(&self) -> SharedString {
match self.curr {
Some(c) => {
Rc::new([c].iter().cloned().collect::<String>().escape_default())
},
None => rcstr("EOF")
}
}
/// Collect a series of chars starting at the current character
fn collect<F>(&mut self, cond: F) -> SharedString where F: Fn(&char) -> bool {
let mut chars = vec![];
debug!("start colleting");
while let Some(c) = self.curr {
if cond(&c) {
chars.push(c);
self.bump();
} else {
debug!("colleting finished");
break;
}
}
Rc::new(chars.iter().cloned().collect::<String>())
}
fn eat_all<F>(&mut self, cond: F) where F: Fn(&char) -> bool {
while let Some(c) = self.curr {
if cond(&c) { self.bump(); }
else { break; }
}
}
// --- Internal methods: Tokenizers -----------------------------------------
/// Tokenize a number
fn tokenize_number(&mut self) -> LexerResult<Token> {
let sign = if self.curr == Some('-') {
self.bump();
-1.0
} else {
1.0
};
let number = self.collect(|c| c.is_numeric() || *c == '.');
let number = if let Ok(m) = number.parse() { m }
else { invalid_number!(number; self.get_source()) };
Ok(Token::NUMBER(sign * number))
}
/// Tokenize a symbol
fn tokenize_symbol(&mut self) -> LexerResult<Token> {
let symbol = self.collect(|c| {
c.is_alphanumeric() || SYMBOL_CHARS.contains_char(*c)
});
Ok(Token::SYMBOL(symbol))
}
/// Tokenize a string
fn tokenize_string(&mut self) -> LexerResult<Token> {
self.bump();
let mut string = vec![];
let mut escaped = false;
while let Some(c) = self.curr {
if c == '"' && !escaped {
break;
} else {
string.push(c);
self.bump();
escaped = c == '\\';
}
}
try!(self.expect('"'));
let string = string.iter().cloned().collect::<String>()
.replace("\\n", "\n")
.replace("\\r", "\r")
.replace("\\t", "\t")
.replace("\\\\", "\\")
.replace("\\\"", "\"")
.replace("\\0", "\0");
Ok(Token::STRING(Rc::new(string)))
}
/// Read the next token and return it
fn read_token(&mut self) -> LexerResult<Option<Token>> {
let c = match self.curr {
Some(c) => c,
None => return Ok(Some(Token::EOF))
};
let token = match c {
c if c.is_numeric() => {
try!(self.tokenize_number())
},
c if c.is_alphanumeric() || SYMBOL_CHARS.contains_char(c) => {
try!(self.tokenize_symbol())
},
'"' => {
try!(self.tokenize_string())
}
'(' => { self.bump(); Token::LPAREN },
')' => { self.bump(); Token::RPAREN },
'{' => { self.bump(); Token::LBRACE },
'}' => { self.bump(); Token::RBRACE },
';' => {
self.eat_all(|c| *c != '\n');
return Ok(None);
},
c if c.is_whitespace() => {
if c == '\n' { self.lineno += 1; }
self.bump();
return Ok(None);
},
_ => {
unknown_token!(self.curr_repr(); self.get_source())
// UNKNOWN(format!("{}", c).into_string())
}
};
Ok(Some(token))
}
}
impl<'a> Lexer for FileLexer<'a> {
fn get_source(&self) -> SourceLocation {
SourceLocation {
filename: self.file.clone(),
lineno: self.lineno
}
}
fn next_token(&mut self) -> LexerResult<Token> {
if self.is_eof() {
Ok(Token::EOF)
} else {
let mut tok = try!(self.read_token());
while tok.is_none() {
// Token is to be ignored, try next one
tok = try!(self.read_token());
}
Ok(tok.unwrap()) // Can't really be None any more
}
}
#[allow(dead_code)] // Used for tests
fn tokenize(&mut self) -> LexerResult<Vec<Token>> {
let mut tokens = vec![];
// NOTE: We can't use `for c in self.iter` because then we can't
// access `self.iter` inside the body because it's borrowed.
while !self.is_eof() {
debug!("Processing {:?}", self.curr);
if let Some(t) = try!(self.read_token()) {
tokens.push(t);
}
debug!("So far: {:?}", tokens);
}
Ok(tokens)
}
}
// A dummy lexer that takes it's tokens from a vector. Used for parser testing.
impl Lexer for Vec<Token> {
fn get_source(&self) -> SourceLocation {
dummy_source()
}
fn next_token(&mut self) -> LexerResult<Token> {
if self.len() >= 1 {
Ok(self.remove(0))
} else {
Ok(Token::EOF)
}
}
fn tokenize(&mut self) -> LexerResult<Vec<Token>> {
Ok(self.iter().cloned().collect())
}
}
#[cfg(test)]
mod tests {
use parser::lexer::{Lexer, FileLexer};
use parser::tokens::Token;
use parser::tokens::Token::*;
use parser::util::rcstr;
fn tokenize(src: &'static str) -> Vec<Token> {
FileLexer::new(src, "<test>").tokenize().unwrap()
}
#[test]
fn test_symbol() {
assert_eq!(tokenize("+"),
vec![SYMBOL(rcstr("+"))]);
assert_eq!(tokenize("-"),
vec![SYMBOL(rcstr("-"))]);
assert_eq!(tokenize("*"),
vec![SYMBOL(rcstr("*"))]);
assert_eq!(tokenize("/"),
vec![SYMBOL(rcstr("/"))]);
}
#[test]
fn test_number() {
assert_eq!(tokenize("123"),
vec![NUMBER(123.)]);
}
/*#[test]
fn test_number_neg() {
assert_eq!(tokenize("-123"),
vec![NUMBER(-123)]);
}*/
#[test]
fn test_parens() {
assert_eq!(tokenize("("),
vec![LPAREN]);
assert_eq!(tokenize(")"),
vec![RPAREN]);
}
} |
use wasm_bindgen::prelude::*;
use crate::{active_tab, goto_page};
#[wasm_bindgen]
pub async fn quadcopter() {
// Set active tab.
active_tab("");
// Go to the page.
goto_page(
"/projects/quadcopter",
"/api/projects/quadcopter/quadcopter.html?ver=HoXPgcmMerc",
"Quadcopter",
)
.await;
}
|
use std::io;
use std::io::BufRead;
mod ledger;
use ledger::account::Account;
use ledger::title::Title;
#[derive(Debug)]
struct Transaction {
title: Title,
accounts: Vec<Account>,
}
struct ErrorMsg<'a> {
line_no: i32,
message: &'a str,
}
fn parse<'a>(source: impl BufRead) -> Result<Vec<Transaction>, ErrorMsg<'a>> {
let mut num = 0i32;
let mut lines_iter = source.lines();
let mut txs: Vec<Transaction> = vec![];
while let Some(line) = lines_iter.next() {
num += 1;
if let Ok(line) = line {
if let Some(title) = Title::parse(&line) {
let mut accs: Vec<Account> = vec![];
while let Some(Ok(line)) = lines_iter.next() {
num += 1;
if line.starts_with(' ') {
if let Some(account) = Account::parse(&line) {
accs.push(account);
} else {
return Err(ErrorMsg {
line_no: num,
message: &"Account format error",
});
}
}
}
txs.push(Transaction {
title: title,
accounts: accs,
});
}
} else {
return Err(ErrorMsg {
line_no: num,
message: &"Input error",
});
}
}
Err(ErrorMsg {
line_no: 1,
message: &"todo",
})
}
fn main() {
let result = parse(io::stdin().lock());
}
|
use crate::common::*;
/// A module, the top-level type produced by the parser. So-named because
/// although at present, all justfiles consist of a single module, in the future
/// we will likely have multi-module and multi-file justfiles.
///
/// Not all successful parses result in valid justfiles, so additional
/// consistency checks and name resolution are performed by the `Analyzer`,
/// which produces a `Justfile` from a `Module`.
#[derive(Debug)]
pub(crate) struct Module<'src> {
/// Items in the justfile
pub(crate) items: Vec<Item<'src>>,
/// Non-fatal warnings encountered during parsing
pub(crate) warnings: Vec<Warning>,
}
|
extern crate pkg_config;
use std::env;
use std::process;
fn main() {
if let Ok(info) = pkg_config::find_library("openssl") {
let paths = env::join_paths(info.include_paths).unwrap();
println!("cargo:include={}", paths.to_str().unwrap());
process::exit(0);
}
process::exit(1);
}
|
pub type Entity = usize; |
#[derive(Debug)]
pub enum Signal {
Dot,
Dash,
ShortGap,
LongGap,
}
pub fn signals_to_char(signals: &[Signal]) -> Option<char> {
use Signal::*;
match signals {
&[Dot, Dash] => Some('a'),
&[Dash, Dot, Dot, Dot] => Some('b'),
_ => None,
}
}
pub struct SignalProcessor {
previous_signals: Vec<Signal>,
}
impl SignalProcessor {
pub fn new() -> Self {
SignalProcessor {
previous_signals: vec![],
}
}
pub fn next(&mut self, signal: Signal) -> Option<String> {
use Signal::*;
match signal {
Dot | Dash => {
self.previous_signals.push(signal);
None
}
ShortGap => {
let signals = self.previous_signals.drain(..).collect::<Vec<_>>();
match signals_to_char(&signals) {
Some(c) => Some(format!("{}", c)),
None => Some("".to_string()),
}
}
LongGap => {
let signals = self.previous_signals.drain(..).collect::<Vec<_>>();
match signals_to_char(&signals) {
Some(c) => Some(format!("{} ", c)),
None => Some(" ".to_string()),
}
}
}
}
}
fn main() {
use Signal::*;
let signal_list = vec![
Dot, Dash, ShortGap, Dash, Dot, Dot, Dot, LongGap, Dot, Dash, ShortGap,
];
let mut processor = SignalProcessor::new();
signal_list.into_iter().for_each(|s| {
println!("{:?}", processor.next(s));
});
}
|
use std::cmp::{max, min};
/// Solves the Day 05 Part 1 puzzle with respect to the given input.
pub fn part_1(input: String) {
let mut vents: Vec<Vent> = Vec::new();
for line in input.lines() {
let mut segments = line.split(" -> ");
let mut beg = segments.next().unwrap().split(",");
let x1 = beg.next().unwrap().parse::<usize>().unwrap();
let y1 = beg.next().unwrap().parse::<usize>().unwrap();
let mut end = segments.next().unwrap().split(",");
let x2 = end.next().unwrap().parse::<usize>().unwrap();
let y2 = end.next().unwrap().parse::<usize>().unwrap();
let vent = Vent {
x1: x1,
y1: y1,
x2: x2,
y2: y2,
};
vents.push(vent);
}
let w = vents
.iter()
.map(|vent| max(vent.x1, vent.x2))
.max()
.unwrap()
+ 1;
let h = vents
.iter()
.map(|vent| max(vent.y1, vent.y2))
.max()
.unwrap()
+ 1;
let mut field = vec![vec![0i32; w]; h];
plot_spots(&mut field, &vents);
plot_flats(&mut field, &vents);
plot_verts(&mut field, &vents);
let points = field.into_iter().flatten().filter(|&v| v > 1).count();
println!("{}", points);
}
/// Solves the Day 05 Part 2 puzzle with respect to the given input.
pub fn part_2(input: String) {
let mut vents: Vec<Vent> = Vec::new();
for line in input.lines() {
let mut segments = line.split(" -> ");
let mut beg = segments.next().unwrap().split(",");
let x1 = beg.next().unwrap().parse::<usize>().unwrap();
let y1 = beg.next().unwrap().parse::<usize>().unwrap();
let mut end = segments.next().unwrap().split(",");
let x2 = end.next().unwrap().parse::<usize>().unwrap();
let y2 = end.next().unwrap().parse::<usize>().unwrap();
let vent = Vent {
x1: x1,
y1: y1,
x2: x2,
y2: y2,
};
vents.push(vent);
}
let w = vents
.iter()
.map(|vent| max(vent.x1, vent.x2))
.max()
.unwrap()
+ 1;
let h = vents
.iter()
.map(|vent| max(vent.y1, vent.y2))
.max()
.unwrap()
+ 1;
let mut field = vec![vec![0i32; w]; h];
plot_spots(&mut field, &vents);
plot_flats(&mut field, &vents);
plot_verts(&mut field, &vents);
plot_diags(&mut field, &vents);
let points = field.into_iter().flatten().filter(|&v| v > 1).count();
println!("{}", points);
}
struct Vent {
x1: usize,
y1: usize,
x2: usize,
y2: usize,
}
/// Plots all the vents which occupy only a single cell.
fn plot_spots(field: &mut Vec<Vec<i32>>, vents: &Vec<Vent>) {
let filter = |vent: &&Vent| vent.x1 == vent.x2 && vent.y1 == vent.y2;
for vent in vents.iter().filter(filter) {
field[vent.y1][vent.x1] += 1;
}
}
/// Plots all the vents which form a horizontal line.
fn plot_flats(field: &mut Vec<Vec<i32>>, vents: &Vec<Vent>) {
let filter = |vent: &&Vent| vent.x1 != vent.x2 && vent.y1 == vent.y2;
for vent in vents.iter().filter(filter) {
let y = vent.y1;
let x1 = min(vent.x1, vent.x2);
let x2 = max(vent.x1, vent.x2);
for x in x1..x2 + 1 {
field[y][x] += 1;
}
}
}
/// Plots all the vents which form a vertical line.
fn plot_verts(field: &mut Vec<Vec<i32>>, vents: &Vec<Vent>) {
let filter = |vent: &&Vent| vent.x1 == vent.x2 && vent.y1 != vent.y2;
for vent in vents.iter().filter(filter) {
let x = vent.x1;
let y1 = min(vent.y1, vent.y2);
let y2 = max(vent.y1, vent.y2);
for y in y1..y2 + 1 {
field[y][x] += 1;
}
}
}
/// Plots all the vents which form a diagonal line.
fn plot_diags(field: &mut Vec<Vec<i32>>, vents: &Vec<Vent>) {
let filter = |vent: &&Vent| vent.x1 != vent.x2 && vent.y1 != vent.y2;
for vent in vents.iter().filter(filter) {
let same = (vent.x1 < vent.x2) == (vent.y1 < vent.y2);
let min_x = min(vent.x1, vent.x2);
let max_x = max(vent.x1, vent.x2);
for adx in 0..(max_x as i32 - min_x as i32).abs() + 1 {
let dx = if vent.x1 < vent.x2 { adx } else { -adx };
let x = vent.x1 as i32 + dx;
let y = vent.y1 as i32 + if same { dx } else { -dx };
field[y as usize][x as usize] += 1;
}
}
}
|
#![feature(generic_associated_types)]
use crystalorb::{client::stage::Stage, Config, TweeningMethod};
use test_env_log::test;
mod common;
use common::MockClientServer;
#[test]
fn when_server_and_client_clocks_desync_then_client_should_resync_quickly() {
const UPDATE_COUNT: usize = 200;
const TIMESTEP_SECONDS: f64 = 1.0 / 64.0;
for desync_seconds in &[
0.0f64,
0.5f64,
-0.5f64,
-1.0f64,
-100.0f64,
-1000.0f64,
-10000.0f64,
] {
// GIVEN a server and client in a perfect network.
let mut mock_client_server = MockClientServer::new(Config {
lag_compensation_latency: TIMESTEP_SECONDS * 16.0,
blend_latency: 0.2,
timestep_seconds: TIMESTEP_SECONDS,
clock_sync_needed_sample_count: 8,
clock_sync_request_period: 0.0,
clock_sync_assumed_outlier_rate: 0.2,
max_tolerable_clock_deviation: 0.1,
snapshot_send_period: 0.1,
update_delta_seconds_max: 0.5,
timestamp_skip_threshold_seconds: 1.0,
fastforward_max_per_step: 10,
tweening_method: TweeningMethod::MostRecentlyPassed,
});
mock_client_server.client_1_net.connect();
mock_client_server.client_2_net.connect();
// GIVEN that the client is ready and synced up.
mock_client_server.update_until_clients_ready(TIMESTEP_SECONDS);
match mock_client_server.client_1.stage() {
Stage::Ready(client) => {
assert_eq!(
client.last_completed_timestamp(),
mock_client_server
.server
.estimated_client_last_completed_timestamp()
// Note: + 1 since client is overshooting.
+ 1,
"Precondition: clocks are initially synced up"
);
}
_ => unreachable!(),
}
// WHEN the client and server clocks are desynchronized.
mock_client_server.client_1_clock_offset = *desync_seconds;
// THEN the client should quickly offset its own clock to agree with the server.
for _ in 0..UPDATE_COUNT {
mock_client_server.update(TIMESTEP_SECONDS);
}
match mock_client_server.client_1.stage() {
Stage::Ready(client) => {
assert_eq!(
client.last_completed_timestamp(),
mock_client_server
.server
.estimated_client_last_completed_timestamp()
// Note: + 1 since client is overshooting.
+ 1,
"Condition: Client synced up after {} updates (desync by {})",
UPDATE_COUNT,
desync_seconds
);
}
_ => unreachable!(),
}
}
}
#[test]
fn when_client_connects_then_client_calculates_correct_initial_clock_offset() {
const TIMESTEP_SECONDS: f64 = 1.0 / 64.0;
for desync_seconds in &[
0.0f64,
0.5f64,
1.0f64,
100.0f64,
1000.0f64,
10000.0f64,
-0.5f64,
-1.0f64,
-100.0f64,
-1000.0f64,
-10000.0f64,
] {
// GIVEN a server and client in a perfect network.
let mut mock_client_server = MockClientServer::new(Config {
lag_compensation_latency: TIMESTEP_SECONDS * 16.0,
blend_latency: 0.2,
timestep_seconds: TIMESTEP_SECONDS,
clock_sync_needed_sample_count: 8,
clock_sync_request_period: 0.0,
clock_sync_assumed_outlier_rate: 0.2,
max_tolerable_clock_deviation: 0.1,
snapshot_send_period: 0.1,
update_delta_seconds_max: 0.5,
timestamp_skip_threshold_seconds: 1.0,
fastforward_max_per_step: 10,
tweening_method: TweeningMethod::MostRecentlyPassed,
});
mock_client_server.client_1_net.connect();
mock_client_server.client_2_net.connect();
// GIVEN that the client and server clocks initially disagree.
mock_client_server.client_1_clock_offset = *desync_seconds;
// WHEN the client initially connects.
mock_client_server.update_until_clients_ready(TIMESTEP_SECONDS);
// THEN the client should accurately offset its own clock to agree with the server.
match mock_client_server.client_1.stage() {
Stage::Ready(client) => {
assert_eq!(
client.last_completed_timestamp(),
mock_client_server
.server
.estimated_client_last_completed_timestamp()
// Note: + 1 since client is overshooting.
+ 1,
"Precondition: clocks are initially synced up"
);
}
_ => unreachable!(),
}
}
}
|
#[macro_use]
extern crate nom;
pub mod lexer;
pub mod ast;
pub mod parser; |
use std::fmt::{self, Display, Formatter};
#[derive(Copy, Clone, Debug, Default, Eq, PartialEq)]
pub struct Position {
line: usize,
col: usize,
}
impl Position {
fn to(self, other: Position) -> Span {
Span {
from: self,
to: other,
}
}
}
impl Display for Position {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}:{}", self.line + 1, self.col + 1)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct Span {
pub from: Position,
pub to: Position,
}
impl Display for Span {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}-{}", self.from, self.to)
}
}
pub struct Source<'a, I>
where
I: Iterator<Item = char>,
{
pub name: String,
chars: &'a mut I,
peeked: Option<(Option<char>, Position, Position)>,
cur_pos: Position,
prev_pos: Position,
}
impl<'a, I> Source<'a, I>
where
I: Iterator<Item = char>,
{
pub fn new(name: String, chars: &'a mut I) -> Source<'a, I> {
Source {
name,
chars,
peeked: None,
cur_pos: Position::default(),
prev_pos: Position::default(),
}
}
fn incr_col(&mut self) {
self.prev_pos = self.cur_pos;
self.cur_pos.col += 1;
}
fn incr_line(&mut self) {
self.prev_pos = self.cur_pos;
self.cur_pos.line += 1;
self.cur_pos.col = 0;
}
fn current_pos(&self) -> Position {
match self.peeked {
Some((_, pos, _)) => pos,
None => self.cur_pos,
}
}
fn previous_pos(&self) -> Position {
match self.peeked {
Some((_, _, pos)) => pos,
None => self.prev_pos,
}
}
fn peek(&mut self) -> Option<<Self as Iterator>::Item> {
match self.peeked {
None => {
let cur_pos = self.current_pos();
let prev_pos = self.previous_pos();
let next = self.next();
self.peeked = Some((next, cur_pos, prev_pos));
next
}
Some((p, _, _)) => p,
}
}
pub fn tokenize(self) -> Tokens<'a, I> {
Tokens(self)
}
}
impl<'a, I> Iterator for Source<'a, I>
where
I: Iterator<Item = char>,
{
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
match self.peeked.take() {
Some((item, _, _)) => item,
None => {
let next = self.chars.next();
match next {
Some('\n') => self.incr_line(),
Some(_) => self.incr_col(),
None => (),
}
next
}
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Token {
OpeningBrace,
ClosingBrace,
Ident(String),
Num(String),
}
pub struct Tokens<'a, I>(Source<'a, I>)
where
I: Iterator<Item = char>;
impl<'a, I> Tokens<'a, I>
where
I: Iterator<Item = char>,
{
fn discard_while<F>(&mut self, predicate: F)
where
F: Fn(char) -> bool + Copy,
{
while self.0.peek().map(predicate) == Some(true) {
self.0.next();
}
}
fn read_while<F>(&mut self, predicate: F, target: &mut String)
where
F: Fn(char) -> bool + Copy,
{
while self.0.peek().map(predicate) == Some(true) {
let ch = self.0.next().unwrap();
target.push(ch);
}
}
}
impl<'a, I> Iterator for Tokens<'a, I>
where
I: Iterator<Item = char>,
{
type Item = (Token, Span);
fn next(&mut self) -> Option<Self::Item> {
self.discard_while(char::is_whitespace);
let start_pos = self.0.current_pos();
match self.0.next() {
None => None,
Some(ch) => {
let next = self.0.peek();
let tok = match (ch, next) {
('(', _) => Token::OpeningBrace,
(')', _) => Token::ClosingBrace,
('-', Some(ch)) if ch.is_ascii_digit() => {
let mut str = String::new();
str.push('-');
self.read_while(
|ch: char| ch != '(' && ch != ')' && !ch.is_whitespace(),
&mut str,
);
Token::Num(str)
}
(ch, _) if ch.is_ascii_digit() => {
let mut str = String::new();
str.push(ch);
self.read_while(
|ch: char| ch != '(' && ch != ')' && !ch.is_whitespace(),
&mut str,
);
Token::Num(str)
}
(ch, _) => {
let mut str = String::new();
str.push(ch);
self.read_while(
|ch: char| ch != '(' && ch != ')' && !ch.is_whitespace(),
&mut str,
);
Token::Ident(str)
}
};
let span = start_pos.to(self.0.previous_pos());
Some((tok, span))
}
}
}
}
|
fn main() {
println!("Hello world!");
another_function();
another_function2(5);
}
fn another_function() {
println!("ANother function");
}
fn another_function2(x:i32){
println!("The value of x = {}",x);
}
|
#![macro_use]
use std::ops::*;
use std::cmp::*;
use num::*;
/// Returns true when 2 values subtracted are smaller than eps.
#[inline]
pub fn nearly_equal_eps<T>(a: T, b: T, eps: T) -> bool
where T: Sub<Output = T> + Signed + PartialOrd
{
let diff = abs(a - b);
diff < eps
}
/// Returns true, when a and b is nearly equal (within Float epsilon range)
pub fn nearly_equal<T>(a: T, b: T) -> bool
where T: Sub<Output = T> + Signed + PartialOrd + Zero + Float + Copy
{
let abs_a = abs(a);
let abs_b = abs(b);
let diff = (a - b).abs();
let eps: T = Float::epsilon();
if a == b {
// Handle infinities.
true
} else if a == zero() || b == zero() || diff < Float::min_positive_value() {
// One of a or b is zero (or both are extremely close to it,) use absolute error.
diff < (eps * Float::min_positive_value())
} else {
// Use relative error.
diff / (abs_a + abs_b).min(Float::max_value()) < eps
}
}
/// Calculates the cotangents of a number by taking the reciprocal
#[inline]
pub fn cotan<T>(a:T) -> T where T: Float {
a.tan().recip()
}
/// Converts the value from degrees into radians
#[inline]
pub fn to_radians<T>(degree:T) -> T where T: Float {
degree.to_radians()
} |
use std::net::Ipv4Addr;
use std::env;
use stremio_addon_sdk::server::{serve_http, ServerOptions};
mod manifest;
use manifest::get_manifest;
mod handlers;
use handlers::build;
#[tokio::main]
async fn main() {
// get the Manifest, which is declared in manifest.rs
let manifest = get_manifest();
// get the handlers, declared in handlers.rs
let interface = build(manifest);
let port =
env::var("PORT")
.ok()
.and_then(|port| port.parse().ok())
.unwrap_or(1337);
// HTTP server settings
let options = ServerOptions {
cache_max_age: 3600 * 24 *3, // cache for 3 days
port,
ip: Ipv4Addr::new(0,0,0,0).into(),
};
// run HTTP server asynchronously
serve_http(interface, options);
}
|
use particle::collide::absorb::Absorber;
use particle::collide::collide::Collider;
use std::marker::PhantomData;
pub trait TokenLike: Ord + Clone {}
impl TokenLike for () {}
impl<T> TokenLike for T
where T: Ord + Clone
{
}
impl<'a, C: 'a, D: 'a, B: 'a> IntoIterator for &'a SackType<C, D, B>
where B: SackBacker
{
type Item = (C, D);
type IntoIter = SackIter<C, D, B>;
fn into_iter(self) -> SackIter<C, D, B> {
unimplemented!()
}
}
impl<'a, C: 'a, D: 'a, B: 'a, T: 'a> IntoIterator for Sack<C, D, B, T>
where B: SackBacker
{
type Item = (C, D);
type IntoIter = SackIter<C, D, B>;
fn into_iter(self) -> SackIter<C, D, B> {
unimplemented!()
}
}
impl<'a, C: 'a, D: 'a, B: 'a> IntoIterator for &'a MultiSack<C, D, B>
where B: SackBacker
{
type Item = (C, D);
type IntoIter = SackIter<C, D, B>;
fn into_iter(self) -> SackIter<C, D, B> {
unimplemented!()
}
}
pub struct SingleIter<C, D>(C, D);
impl<C, D> Iterator for SingleIter<C, D> {
type Item = (C, D);
fn next(&mut self) -> Option<(C, D)> {
unimplemented!()
}
}
pub struct SackIter<C, D, B> {
_phantom: PhantomData<(C, D, B)>,
}
impl<C, D, B> Iterator for SackIter<C, D, B> {
type Item = (C, D);
fn next(&mut self) -> Option<Self::Item> {
unimplemented!()
}
}
// impl<'a, C: 'a> SackLike<'a, C, (), C> for ()
// where C: SackBacker
// {
// }
impl SackStorable for () {}
impl<T> SackStorable for T
where T: Copy
{
}
pub trait SackStorable: Clone {}
pub trait SackBacker: Clone {}
impl<'a, C, D> SackBacker for (&'a C, &'a D)
where D: SackStorable,
C: SackStorable
{
}
impl SackBacker for () {}
#[derive(Debug,Clone)]
pub struct Sack<C, D, B, T = ()> {
pub t: T,
pub b: SackType<C, D, B>,
}
impl<C, D, B, T> Default for Sack<C, D, B, T>
where B: Default,
T: Default
{
default fn default() -> Self {
Sack {
b: SackType::Multi(MultiSack {
b: B::default(),
_phantom: PhantomData,
}),
t: T::default(),
}
}
}
impl<C,D,B,T> Sack<C,D,B,T> where B:SackBacker,T:Default{
fn new(s:(C,D)) -> Self {
Sack{t:T::default(),b:SackType::Single(s.0,s.1)}
}
}
// impl<'a, C1: 'a, D1: 'a, T1: 'a> Absorber<'a, C1, C1, D1, D1, (C1,D1), (C1,D1), T1, T1> for Sack<C1, D1, (C1,D1), T1> where C1:SackStorable,D1:SackStorable{
// fn absorb(mut self, s2: Sack<C1, D1,(C1,D1) >) -> Self {
// match self.b {
// SackType::Single(c, d) => unimplemented!(),
// SackType::Multi(s) => unimplemented!(),
// SackType::Empty => Sack { t: self.t, b: s2.b },
// }
// }
// }
impl<'a, C1: 'a, D1: 'a, B1: 'a, T1: 'a> Absorber<'a, C1, C1, D1, D1, B1, B1, T1> for Sack<C1, D1, B1, T1>
where B1: SackBacker,
C1: SackStorable,
D1: SackStorable,
{
default fn absorb(s1: Sack<C1, D1, B1, T1>, s2: Sack<C1, D1, B1>) -> Self {
let t = s1.t;
match s1.b {
SackType::Single(c, d) => unimplemented!(),
SackType::Multi(b) => {
Sack {
t: t,
b: SackType::Multi(MultiSack::absorb(Sack {
t: (),
b: SackType::Multi(b),
},
s2)),
}
}
SackType::Empty => Sack{t:t,b:s2.b},
}
}
}
// impl<'a, C1: 'a, D1: 'a, B1: 'a, T1: 'a> Absorber<'a, C1, C1, D1, D1, B1, (C1,D1), T1, T1> for Sack<C1, D1, B1, T1>
// where B1: SackBacker,C1:SackStorable,D1:SackStorable
// {
// fn absorb(mut self, s2: Sack<C1, D1, (C1,D1)>) -> Self
// where B1: SackBacker {
// match self.b {
// SackType::Single(c, d) => unimplemented!(),
// SackType::Multi(s) => {
// for s in self.b {
// self.absorb(Sack::from(s));
// };
// self
// },
// SackType::Empty => self.absorb(s2),
// }
// }
// }
// impl<'a, C1: 'a, D1: 'a, B1: 'a, T1: 'a> Absorber<'a, C1, C1, D1, D1, B1, B1, T1, T1> for Sack<C1, D1, B1, T1>
// where B1: SackBacker
// {
// default fn absorb(mut self, s2: Sack<C1, D1, (C1, D1)>) -> Self
// where B1: SackBacker {
// match self.b {
// SackType::Single(c, d) => unimplemented!(),
// SackType::Multi(s) => {
// for s in self.b {
// self.absorb(Sack::from(s));
// }
// self
// }
// SackType::Empty => self.absorb(s2),
// }
// }
// }
// impl<'a, C1: 'a, C2: 'a, D1: 'a, D2: 'a, B1: 'a, B2: 'a, T1: 'a, T2: 'a> Absorber<'a, C1, C2, D1, D2, B1, B2, T1, T2> for Sack<C1, D1, B1, T1>
// where B1: SackBacker,
// B2: SackBacker
// {
// fn absorb(mut self, s2: Sack<C2, D2, B2>) -> Self
// where B1: SackBacker,
// B2: SackBacker {
// unimplemented!()
// }
// }
//impl<C, D, B> From<(C, D)> for Sack<C, D, B> where B:SackBacker{
// default fn from(pair: (C, D)) -> Self {
// Sack::new(pair)
// }
//}
impl<C, D, B> Into<Sack<C, D, B, ()>> for SackType<C, D, B> {
fn into(self) -> Sack<C, D, B, ()> {
match self {
SackType::Empty=>{unimplemented!()}
SackType::Single(c,d)=>{unimplemented!()}
SackType::Multi(b)=>{unimplemented!()}
}
}
}
impl<'a, C1, C2, C3, C4, D1, D2, D3, D4, B1, B2, B3, B4, T1, T2, T3, T4> Collider<'a, C1, C2, C3, C4, D1, D2, D3, D4, B1, B2, B3, B4, T1, T2, T3, T4> for Sack<C1, D1, B1, T1>
where B1: SackBacker,
B2: SackBacker,
B3: SackBacker,
B4: SackBacker
{
fn collide(_s1: &'a SackType<C1, D1, B1>, _s2: &'a SackType<C2, D2, B2>) -> (&'a SackType<C3, D3, B3>, &'a SackType<C4, D4, B4>) {
unimplemented!();
}
}
#[derive(Debug,Clone)]
pub enum SackType<C, D, B> {
Empty,
Single(C, D),
Multi(MultiSack<C, D, B>),
}
impl<C, D, B> IntoIterator for SackType<C, D, B> {
type Item = (C, D);
type IntoIter = SackIter<C, D, (C, D)>;
fn into_iter(self) -> SackIter<C, D, (C, D)> {
unimplemented!()
}
}
// #[derive(Debug)]
// pub struct EmptySack {}
pub type SackPair<C, D> = Sack<C, D, (C, D)>;
impl<C, D> From<(C, D)> for Sack<C, D, (C, D), ()> {
fn from(pair: (C, D)) -> Self {
Sack {
t: (),
b: (SackType::Single(pair.0, pair.1)),
}
}
}
// pub struct SackPair<C, D>(C, D);
// impl<'a, C, D> SackLike<'a, C, D, (&'a C, &'a D)> for (&'a C, &'a D)
// where C: SackStorable,
// D: SackStorable
// {
// }
#[derive(Debug,Clone)]
pub struct MultiSack<C, D, B> {
b: B,
_phantom: PhantomData<(C, D)>,
}
impl<'a, C1: 'a, C2: 'a, D1: 'a, D2: 'a, B1: 'a, B2: 'a, T1: 'a> Absorber<'a, C1, C2, D1, D2, B1, B2, T1> for MultiSack<C1, D1, B1>
where B2: SackBacker,
B1: SackBacker,
MultiSack<C1, D1, B1>: Collider<'a, C1, C2, C1, (), D1, D2, D1, (), B1, B2, B1, (), T1, (), T1, ()>
{
fn absorb(s1: Sack<C1, D1, B1, T1>, s2: Sack<C2, D2, B2>) -> Self {
unimplemented!();
}
}
impl<'a, C1: 'a, C2: 'a, C3: 'a, C4: 'a, D1: 'a, D2: 'a, D3: 'a, D4: 'a, B1: 'a, B2: 'a, B3: 'a, B4: 'a> Collider<'a,
C1,
C2,
C3,
C4,
D1,
D2,
D3,
D4,
B1,
B2,
B3,
B4,
(),
(),
(),
()>
for MultiSack<C1, D1, B1>
where B2: SackBacker,
B1: SackBacker,
B3: SackBacker,
B4: SackBacker
{
fn collide(_s1: &'a SackType<C1, D1, B1>, _s2: &'a SackType<C2, D2, B2>) -> (&'a SackType<C3, D3, B3>, &'a SackType<C4, D4, B4>) {
unimplemented!();
}
}
impl<C, D, B> Default for MultiSack<C, D, B>
where B: SackBacker + Default
{
fn default() -> Self {
MultiSack {
b: B::default(),
_phantom: PhantomData,
}
}
}
impl<C, D, B> Iterator for MultiSack<C, D, B> {
type Item = (C, D);
fn next(&mut self) -> Option<(C, D)> {
unimplemented!()
}
}
|
use crate::generate::src::{quotable_to_src, quote, Src, ToSrc};
use crate::grammar::ParseNodeShape;
use crate::grammar::{Grammar, MatchesEmpty, Rule, RuleWithNamedFields};
use crate::scannerless::Pat as SPat;
use ordermap::{Entry, OrderMap, OrderSet};
use std::borrow::Cow;
use std::cell::RefCell;
use std::fmt::Write as FmtWrite;
use std::hash::Hash;
use std::ops::Add;
use std::rc::Rc;
use std::{iter, mem};
pub trait RustInputPat {
fn rust_slice_ty() -> Src;
fn rust_matcher(&self) -> Src;
}
impl<S: AsRef<str>> RustInputPat for SPat<S> {
fn rust_slice_ty() -> Src {
quote!(str)
}
fn rust_matcher(&self) -> Src {
match self {
SPat::String(s) => Src::new(s.as_ref()),
SPat::Range(start, end) => quote!(#start..=#end),
}
}
}
struct RuleMap<'a, Pat> {
named: &'a OrderMap<String, RuleWithNamedFields<Pat>>,
anon: RefCell<OrderSet<Rc<Rule<Pat>>>>,
desc: RefCell<OrderMap<Rc<Rule<Pat>>, String>>,
anon_shape: RefCell<OrderMap<Rc<Rule<Pat>>, ParseNodeShape<ParseNodeKind>>>,
}
struct ParseNode {
kind: ParseNodeKind,
desc: String,
shape: ParseNodeShape<ParseNodeKind>,
ty: Option<Src>,
}
struct Variant<'a, Pat> {
rule: Rc<Rule<Pat>>,
name: &'a str,
fields: OrderMap<&'a str, OrderSet<Vec<usize>>>,
}
impl<Pat: PartialEq> RuleWithNamedFields<Pat> {
fn find_variant_fields(&self) -> Option<Vec<Variant<'_, Pat>>> {
if let Rule::Or(cases) = &*self.rule {
if self.fields.is_empty() {
return None;
}
let mut variants: Vec<_> = cases
.iter()
.map(|rule| Variant {
rule: rule.clone(),
name: "",
fields: OrderMap::new(),
})
.collect();
for (field, paths) in &self.fields {
for path in paths {
match path[..] {
[] => return None,
[variant] if variants[variant].name != "" => return None,
[variant] => variants[variant].name = field,
// FIXME: use [variant, rest @ ..] when possible.
_ => {
variants[path[0]]
.fields
.entry(&field[..])
.or_insert_with(OrderSet::new)
.insert(path[1..].to_vec());
}
}
}
}
if variants.iter().any(|x| x.name == "") {
return None;
}
Some(variants)
} else {
None
}
}
}
impl<Pat> Rule<Pat> {
fn field_pathset_type(&self, paths: &OrderSet<Vec<usize>>) -> Src {
let ty = self.field_type(paths.get_index(0).unwrap());
if paths.len() > 1 {
// HACK(eddyb) find a way to compare `Src` w/o printing (`to_ugly_string`).
let ty_string = ty.to_ugly_string();
for path in paths.iter().skip(1) {
if self.field_type(path).to_ugly_string() != ty_string {
return quote!(());
}
}
}
ty
}
fn field_type(&self, path: &[usize]) -> Src {
match self {
Rule::Empty | Rule::Eat(_) | Rule::NegativeLookahead(_) => {
assert_eq!(path, []);
quote!(())
}
Rule::Call(r) => {
let ident = Src::ident(r);
quote!(#ident<'a, 'i, I>)
}
Rule::Concat(rules) => {
if path.is_empty() {
return quote!(());
}
rules[path[0]].field_type(&path[1..])
}
Rule::Or(cases) => cases[path[0]].field_type(&path[1..]),
Rule::Opt(rule) => [rule][path[0]].field_type(&path[1..]),
Rule::RepeatMany(elem, _) | Rule::RepeatMore(elem, _) => {
assert_eq!(path, []);
let elem = elem.field_type(&[]);
quote!([#elem])
}
}
}
}
// FIXME(eddyb) this should just work with `self: &Rc<Self>` on inherent methods,
// but that still requires `#![feature(arbitrary_self_types)]`.
trait RcRuleRuleMapMethods<Pat>: Sized {
fn parse_node_kind(&self, rules: &RuleMap<'_, Pat>) -> ParseNodeKind;
fn parse_node_desc(&self, rules: &RuleMap<'_, Pat>) -> String;
fn fill_parse_node_shape(&self, rules: &RuleMap<'_, Pat>);
}
impl<Pat: Ord + Hash + RustInputPat> RcRuleRuleMapMethods<Pat> for Rc<Rule<Pat>> {
fn parse_node_kind(&self, rules: &RuleMap<'_, Pat>) -> ParseNodeKind {
if let Rule::Call(r) = &**self {
return ParseNodeKind::NamedRule(r.clone());
}
if let Some((i, _)) = rules.anon.borrow().get_full(self) {
return ParseNodeKind::Anon(i);
}
let i = rules.anon.borrow().len();
rules.anon.borrow_mut().insert(self.clone());
ParseNodeKind::Anon(i)
}
fn parse_node_desc(&self, rules: &RuleMap<'_, Pat>) -> String {
if let Some(desc) = rules.desc.borrow().get(self) {
return desc.clone();
}
let desc = self.parse_node_desc_uncached(rules);
match rules.desc.borrow_mut().entry(self.clone()) {
Entry::Vacant(entry) => entry.insert(desc).clone(),
Entry::Occupied(_) => unreachable!(),
}
}
// FIXME(eddyb) this probably doesn't need the "fill" API anymore.
fn fill_parse_node_shape(&self, rules: &RuleMap<'_, Pat>) {
if let Rule::Call(_) = **self {
return;
}
if rules.anon_shape.borrow().contains_key(self) {
return;
}
let shape = Rule::parse_node_shape_uncached(self, rules);
rules.anon_shape.borrow_mut().insert(self.clone(), shape);
}
}
impl<Pat: Ord + Hash + RustInputPat> Rule<Pat> {
fn parse_node_desc_uncached(&self, rules: &RuleMap<'_, Pat>) -> String {
match self {
Rule::Empty => "".to_string(),
Rule::Eat(pat) => pat.rust_matcher().to_pretty_string(),
Rule::NegativeLookahead(pat) => format!("!{}", pat.rust_matcher().to_pretty_string()),
Rule::Call(r) => r.clone(),
Rule::Concat([left, right]) => format!(
"({} {})",
left.parse_node_desc(rules),
right.parse_node_desc(rules)
),
Rule::Or(cases) => {
assert!(cases.len() > 1);
let mut desc = format!("({}", cases[0].parse_node_desc(rules));
for rule in &cases[1..] {
desc += " | ";
desc += &rule.parse_node_desc(rules);
}
desc + ")"
}
Rule::Opt(rule) => format!("{}?", rule.parse_node_desc(rules)),
Rule::RepeatMany(elem, None) => format!("{}*", elem.parse_node_desc(rules)),
Rule::RepeatMany(elem, Some(sep)) => format!(
"{}* % {}",
elem.parse_node_desc(rules),
sep.parse_node_desc(rules)
),
Rule::RepeatMore(elem, None) => format!("{}+", elem.parse_node_desc(rules)),
Rule::RepeatMore(elem, Some(sep)) => format!(
"{}+ % {}",
elem.parse_node_desc(rules),
sep.parse_node_desc(rules)
),
}
}
fn parse_node_shape_uncached(
rc_self: &Rc<Self>,
rules: &RuleMap<'_, Pat>,
) -> ParseNodeShape<ParseNodeKind> {
match &**rc_self {
Rule::Empty | Rule::Eat(_) | Rule::NegativeLookahead(_) => ParseNodeShape::Opaque,
Rule::Call(_) => unreachable!(),
Rule::Concat([left, right]) => {
ParseNodeShape::Split(left.parse_node_kind(rules), right.parse_node_kind(rules))
}
Rule::Or(_) => ParseNodeShape::Choice,
Rule::Opt(rule) => ParseNodeShape::Opt(rule.parse_node_kind(rules)),
Rule::RepeatMany(elem, sep) => ParseNodeShape::Opt(
Rc::new(Rule::RepeatMore(elem.clone(), sep.clone())).parse_node_kind(rules),
),
Rule::RepeatMore(rule, None) => ParseNodeShape::Split(
rule.parse_node_kind(rules),
Rc::new(Rule::RepeatMany(rule.clone(), None)).parse_node_kind(rules),
),
Rule::RepeatMore(elem, Some(sep)) => ParseNodeShape::Split(
elem.parse_node_kind(rules),
Rc::new(Rule::Opt(Rc::new(Rule::Concat([
sep.clone(),
rc_self.clone(),
]))))
.parse_node_kind(rules),
),
}
}
}
#[derive(Clone)]
enum ParseNodeKind {
NamedRule(String),
Anon(usize),
}
impl ParseNodeKind {
fn ident(&self) -> Src {
match self {
ParseNodeKind::NamedRule(name) => Src::ident(name),
ParseNodeKind::Anon(i) => Src::ident(format!("_{}", i)),
}
}
}
impl ToSrc for ParseNodeKind {
fn to_src(&self) -> Src {
let ident = self.ident();
quote!(_P::#ident)
}
}
quotable_to_src!(ParseNodeKind);
impl ToSrc for ParseNodeShape<ParseNodeKind> {
fn to_src(&self) -> Src {
let variant = match self {
ParseNodeShape::Opaque => quote!(Opaque),
ParseNodeShape::Alias(inner) => quote!(Alias(#inner)),
ParseNodeShape::Choice => quote!(Choice),
ParseNodeShape::Opt(inner) => quote!(Opt(#inner)),
ParseNodeShape::Split(left, right) => quote!(Split(#left, #right)),
};
quote!(ParseNodeShape::#variant)
}
}
quotable_to_src!(ParseNodeShape<ParseNodeKind>);
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
enum CodeLabel {
NamedRule(String),
Nested { parent: Rc<CodeLabel>, i: usize },
}
impl CodeLabel {
fn flattened_name(&self) -> Cow<'_, str> {
match self {
CodeLabel::NamedRule(r) => r.into(),
CodeLabel::Nested { parent, i } => {
let mut name = parent.flattened_name().into_owned();
name += "__";
let _ = write!(name, "{}", i);
name.into()
}
}
}
fn flattened_ident(&self) -> Src {
Src::ident(self.flattened_name())
}
}
impl ToSrc for CodeLabel {
fn to_src(&self) -> Src {
let ident = self.flattened_ident();
quote!(_C::#ident)
}
}
quotable_to_src!(CodeLabel);
impl<Pat: Ord + Hash + MatchesEmpty + RustInputPat> Grammar<Pat> {
pub fn generate_rust(&self) -> Src {
self.check();
let rules = &RuleMap {
named: &self.rules,
anon: RefCell::new(OrderSet::new()),
desc: RefCell::new(OrderMap::new()),
anon_shape: RefCell::new(OrderMap::new()),
};
let mut out = concat!(
include_str!("templates/imports.rs"),
include_str!("templates/header.rs")
)
.parse::<Src>()
.unwrap();
for (name, rule) in rules.named {
out += declare_rule(name, rule, rules) + impl_parse_with::<Pat>(name);
}
let mut code_labels = OrderMap::new();
out += define_parse_fn(rules, &mut code_labels);
let mut i = 0;
while i < rules.anon.borrow().len() {
let rule = rules.anon.borrow().get_index(i).unwrap().clone();
rule.fill_parse_node_shape(rules);
i += 1;
}
let all_parse_nodes: Vec<ParseNode> = rules
.named
.iter()
.map(|(name, rule)| {
let ident = Src::ident(name);
ParseNode {
kind: ParseNodeKind::NamedRule(name.to_string()),
desc: name.clone(),
shape: if rule.fields.is_empty() {
ParseNodeShape::Opaque
} else {
ParseNodeShape::Alias(rule.rule.parse_node_kind(rules))
},
ty: Some(quote!(#ident<'_, '_, _>)),
}
})
.chain(rules.anon.borrow().iter().map(|rule| ParseNode {
kind: rule.parse_node_kind(rules),
desc: rule.parse_node_desc(rules),
shape: rules.anon_shape.borrow()[rule].clone(),
ty: match &**rule {
Rule::RepeatMany(elem, _) | Rule::RepeatMore(elem, _) => match &**elem {
Rule::Eat(_) => Some(quote!([()])),
Rule::Call(r) => {
let ident = Src::ident(r);
Some(quote!([#ident<'_, '_, _>]))
}
_ => None,
},
_ => None,
},
}))
.collect();
out + declare_parse_node_kind(&all_parse_nodes)
+ impl_debug_for_handle_any(&all_parse_nodes)
+ code_label_decl_and_impls(rules, &code_labels)
}
}
#[must_use]
struct Continuation<'a> {
code_labels: &'a mut OrderMap<Rc<CodeLabel>, usize>,
fn_code_label: &'a mut Rc<CodeLabel>,
code_label_arms: &'a mut Vec<Src>,
code: Code,
nested_frames: Vec<Option<(Rc<CodeLabel>, Rc<CodeLabel>)>>,
}
#[derive(Clone)]
enum Code {
Inline(Src),
Label(Rc<CodeLabel>),
}
impl Continuation<'_> {
fn next_code_label(&mut self) -> Rc<CodeLabel> {
let counter = self
.code_labels
.entry(self.fn_code_label.clone())
.or_insert(0);
let label = Rc::new(CodeLabel::Nested {
parent: self.fn_code_label.clone(),
i: *counter,
});
*counter += 1;
label
}
fn clone(&mut self) -> Continuation<'_> {
Continuation {
code_labels: self.code_labels,
fn_code_label: self.fn_code_label,
code_label_arms: self.code_label_arms,
code: self.code.clone(),
nested_frames: self.nested_frames.clone(),
}
}
fn to_inline(&mut self) -> &mut Src {
// HACK(eddyb) remove `self.code` juggling post-NLL
let replacement = match self.code {
Code::Inline(_) => None,
Code::Label(ref label) => Some(Code::Inline(quote!(
c.code = #label;
p.threads.spawn(c, _range);
))),
};
if let Some(replacement) = replacement {
self.code = replacement;
}
match self.code {
Code::Inline(ref mut code) => code,
Code::Label(_) => unreachable!(),
}
}
fn to_label(&mut self) -> &mut Rc<CodeLabel> {
match self.code {
Code::Label(ref mut label) => label,
Code::Inline(_) => {
let label = self.next_code_label();
self.reify_as(label);
self.to_label()
}
}
}
fn reify_as(&mut self, label: Rc<CodeLabel>) {
// HACK(eddyb) remove awkward scope post-NLL
let code = {
let code = self.to_inline();
quote!(#label => {#code})
};
self.code_label_arms.push(code);
self.code = Code::Label(label);
}
}
trait ContFn {
fn apply(self, cont: Continuation<'_>) -> Continuation<'_>;
}
impl<F: FnOnce(Continuation<'_>) -> Continuation<'_>> ContFn for F {
fn apply(self, cont: Continuation<'_>) -> Continuation<'_> {
self(cont)
}
}
struct Compose<F, G>(F, G);
impl<F: ContFn, G: ContFn> ContFn for Compose<F, G> {
fn apply(self, cont: Continuation<'_>) -> Continuation<'_> {
self.1.apply(self.0.apply(cont))
}
}
#[must_use]
struct Thunk<F>(F);
impl<F> Thunk<F> {
fn new(f: F) -> Self
where
F: FnOnce(Continuation<'_>) -> Continuation<'_>,
{
Thunk(f)
}
}
impl<F, G> Add<Thunk<G>> for Thunk<F> {
type Output = Thunk<Compose<G, F>>;
fn add(self, other: Thunk<G>) -> Self::Output {
Thunk(Compose(other.0, self.0))
}
}
impl<F: ContFn> ContFn for Thunk<F> {
fn apply(self, cont: Continuation<'_>) -> Continuation<'_> {
self.0.apply(cont)
}
}
macro_rules! thunk {
($($t:tt)*) => {{
let prefix = quote!($($t)*);
Thunk::new(move |mut cont| {
// HACK(eddyb) remove awkward scope post-NLL
{
let code = cont.to_inline();
let suffix = mem::replace(code, prefix);
*code += suffix;
}
cont
})
}}
}
fn pop_state<F: ContFn>(f: impl FnOnce(Src) -> Thunk<F>) -> Thunk<impl ContFn> {
f(quote!(c.state))
+ Thunk::new(|mut cont| {
if let Some(&None) = cont.nested_frames.last() {
*cont.nested_frames.last_mut().unwrap() =
Some((cont.to_label().clone(), cont.fn_code_label.clone()));
*cont.fn_code_label = cont.next_code_label();
cont.code_labels.insert(cont.fn_code_label.clone(), 0);
cont.code = Code::Inline(quote!());
cont = ret().apply(cont);
}
cont.nested_frames.push(None);
cont
})
}
fn push_state(state: Src) -> Thunk<impl ContFn> {
thunk!(c.state = #state;)
+ Thunk::new(move |mut cont| {
if let Some((ret_label, outer_fn_label)) = cont.nested_frames.pop().unwrap() {
let inner_fn_label = mem::replace(cont.fn_code_label, outer_fn_label);
cont.reify_as(inner_fn_label);
cont = call(mem::replace(cont.to_label(), ret_label)).apply(cont);
}
cont
})
}
fn check(condition: Src) -> Thunk<impl ContFn> {
Thunk::new(move |mut cont| {
// HACK(eddyb) remove awkward scope post-NLL
{
let code = cont.to_inline();
*code = quote!(
if #condition {
#code
}
);
}
cont
})
}
fn call(callee: Rc<CodeLabel>) -> Thunk<impl ContFn> {
Thunk::new(move |mut cont| {
// HACK(eddyb) remove awkward scope post-NLL
cont.code = {
let label = cont.to_label().clone();
Code::Inline(quote!(
c.code = #label;
p.call(Call { callee: #callee, range: _range }, c);
))
};
cont
})
}
fn ret() -> Thunk<impl ContFn> {
thunk!(p.ret(c, _range);)
+ Thunk::new(|mut cont| {
assert!(cont.to_inline().is_empty());
cont
})
}
fn sppf_add(parse_node_kind: &ParseNodeKind, child: Src) -> Thunk<impl ContFn> {
thunk!(p.sppf_add(#parse_node_kind, c.fn_input.subtract_suffix(_range), #child);)
}
trait ForEachThunk {
fn for_each_thunk(self, cont: &mut Continuation<'_>, f: impl FnMut(Continuation<'_>));
}
impl<F> ForEachThunk for Thunk<F>
where
F: ContFn,
{
fn for_each_thunk(self, cont: &mut Continuation<'_>, mut f: impl FnMut(Continuation<'_>)) {
f(self.apply(cont.clone()));
}
}
impl<T, U> ForEachThunk for (T, U)
where
T: ForEachThunk,
U: ForEachThunk,
{
fn for_each_thunk(self, cont: &mut Continuation<'_>, mut f: impl FnMut(Continuation<'_>)) {
self.0.for_each_thunk(cont, &mut f);
self.1.for_each_thunk(cont, &mut f);
}
}
struct ThunkIter<I>(I);
impl<I, T> ForEachThunk for ThunkIter<I>
where
I: Iterator<Item = T>,
T: ForEachThunk,
{
fn for_each_thunk(self, cont: &mut Continuation<'_>, mut f: impl FnMut(Continuation<'_>)) {
self.0.for_each(|x| {
x.for_each_thunk(cont, &mut f);
});
}
}
fn parallel(thunks: impl ForEachThunk) -> Thunk<impl ContFn> {
Thunk::new(|mut cont| {
cont.to_label();
let mut code = quote!();
let mut child_nested_frames = None;
let nested_frames = cont.nested_frames.clone();
thunks.for_each_thunk(&mut cont, |mut child_cont| {
if let Some(prev) = child_nested_frames {
assert_eq!(child_cont.nested_frames.len(), prev);
} else {
child_nested_frames = Some(child_cont.nested_frames.len());
}
if let Some(Some((ret_label, outer_fn_label))) =
child_cont.nested_frames.last().cloned()
{
if let None = nested_frames[child_cont.nested_frames.len() - 1] {
let inner_fn_label = mem::replace(child_cont.fn_code_label, outer_fn_label);
child_cont.reify_as(inner_fn_label);
child_cont =
call(mem::replace(child_cont.to_label(), ret_label)).apply(child_cont);
*child_cont.nested_frames.last_mut().unwrap() = None;
}
}
assert_eq!(
child_cont.nested_frames[..],
nested_frames[..child_cont.nested_frames.len()]
);
code += child_cont.to_inline().clone();
});
cont.code = Code::Inline(code);
if let Some(child_nested_frames) = child_nested_frames {
while cont.nested_frames.len() > child_nested_frames {
assert_eq!(cont.nested_frames.pop(), Some(None));
}
}
cont
})
}
fn opt(thunk: Thunk<impl ContFn>) -> Thunk<impl ContFn> {
parallel((thunk, thunk!()))
}
fn fix<F: ContFn>(f: impl FnOnce(Rc<CodeLabel>) -> Thunk<F>) -> Thunk<impl ContFn> {
Thunk::new(|mut cont| {
let nested_frames = mem::replace(&mut cont.nested_frames, vec![]);
let ret_label = cont.to_label().clone();
cont.code = Code::Inline(quote!());
let label = cont.next_code_label();
let outer_fn_label = mem::replace(cont.fn_code_label, label.clone());
cont.code_labels.insert(label.clone(), 0);
cont = (reify_as(label.clone()) + f(label) + ret()).apply(cont);
*cont.fn_code_label = outer_fn_label;
cont.nested_frames = nested_frames;
cont = call(mem::replace(cont.to_label(), ret_label)).apply(cont);
cont
})
}
fn reify_as(label: Rc<CodeLabel>) -> Thunk<impl ContFn> {
Thunk::new(|mut cont| {
cont.reify_as(label);
cont
})
}
impl<Pat: Ord + Hash + RustInputPat> Rule<Pat> {
// HACK(eddyb) the `Rc<Self>` points to the same `Self` value as `self`,
// but can't be `self` itself without `#![feature(arbitrary_self_types)]`.
fn generate_parse<'a>(
&'a self,
rc_self_and_rules: Option<(&'a Rc<Self>, &'a RuleMap<'_, Pat>)>,
) -> Thunk<impl ContFn + 'a> {
if let Some((rc_self, _)) = rc_self_and_rules {
assert!(std::ptr::eq(self, &**rc_self));
}
Thunk::new(move |cont| match (self, rc_self_and_rules) {
(Rule::Empty, _) => cont,
(Rule::Eat(pat), _) => {
let pat = pat.rust_matcher();
check(quote!(let Some(_range) = p.input_consume_left(_range, #pat))).apply(cont)
}
(Rule::NegativeLookahead(pat), _) => {
let pat = pat.rust_matcher();
check(quote!(p.input_consume_left(_range, #pat).is_none())).apply(cont)
}
(Rule::Call(r), _) => call(Rc::new(CodeLabel::NamedRule(r.clone()))).apply(cont),
(Rule::Concat([left, right]), None) => {
(left.generate_parse(None) + right.generate_parse(None)).apply(cont)
}
(Rule::Concat([left, right]), Some((rc_self, rules))) => {
(thunk!(assert_eq!(_range.start(), c.fn_input.start());)
+ left.generate_parse(Some((left, rules)))
+ push_state(quote!(c.fn_input.subtract_suffix(_range).len()))
+ right.generate_parse(Some((right, rules)))
+ pop_state(|state| sppf_add(&rc_self.parse_node_kind(rules), state)))
.apply(cont)
}
(Rule::Or(cases), None) => parallel(ThunkIter(
cases.iter().map(|rule| rule.generate_parse(None)),
))
.apply(cont),
(Rule::Or(cases), Some((rc_self, rules))) => {
(thunk!(assert_eq!(_range.start(), c.fn_input.start());)
+ parallel(ThunkIter(cases.iter().map(|rule| {
let parse_node_kind = rule.parse_node_kind(rules);
push_state(quote!(#parse_node_kind.to_usize()))
+ rule.generate_parse(Some((rule, rules)))
})))
+ pop_state(|state| sppf_add(&rc_self.parse_node_kind(rules), state)))
.apply(cont)
}
(Rule::Opt(rule), _) => {
opt(rule.generate_parse(rc_self_and_rules.map(|(_, rules)| (rule, rules))))
.apply(cont)
}
(Rule::RepeatMany(rule, None), None) => {
fix(|label| opt(rule.generate_parse(None) + call(label))).apply(cont)
}
(Rule::RepeatMany(rule, None), Some((_, rules))) => fix(|label| {
let more = Rc::new(Rule::RepeatMore(rule.clone(), None));
opt(thunk!(assert_eq!(_range.start(), c.fn_input.start());)
+ rule.generate_parse(Some((rule, rules)))
+ push_state(quote!(c.fn_input.subtract_suffix(_range).len()))
+ call(label)
+ pop_state(move |state| sppf_add(&more.parse_node_kind(rules), state)))
})
.apply(cont),
(Rule::RepeatMany(elem, Some(sep)), _) => {
// HACK(eddyb) remove extra variables post-NLL
let rule = Rc::new(Rule::RepeatMore(elem.clone(), Some(sep.clone())));
let cont =
opt(rule.generate_parse(rc_self_and_rules.map(|(_, rules)| (&rule, rules))))
.apply(cont);
cont
}
(Rule::RepeatMore(rule, None), None) => {
fix(|label| rule.generate_parse(None) + opt(call(label))).apply(cont)
}
(Rule::RepeatMore(elem, Some(sep)), None) => {
fix(|label| elem.generate_parse(None) + opt(sep.generate_parse(None) + call(label)))
.apply(cont)
}
(Rule::RepeatMore(rule, None), Some((rc_self, rules))) => fix(|label| {
thunk!(assert_eq!(_range.start(), c.fn_input.start());)
+ rule.generate_parse(Some((rule, rules)))
+ push_state(quote!(c.fn_input.subtract_suffix(_range).len()))
+ opt(call(label))
+ pop_state(|state| sppf_add(&rc_self.parse_node_kind(rules), state))
})
.apply(cont),
(Rule::RepeatMore(elem, Some(sep)), Some((rc_self, rules))) => fix(|label| {
thunk!(assert_eq!(_range.start(), c.fn_input.start());)
+ elem.generate_parse(Some((elem, rules)))
+ push_state(quote!(c.fn_input.subtract_suffix(_range).len()))
+ opt(thunk!(assert_eq!(_range.start(), c.fn_input.start());)
+ sep.generate_parse(None)
+ push_state(quote!(c.fn_input.subtract_suffix(_range).len()))
+ call(label)
+ pop_state(|state| {
sppf_add(
&Rc::new(Rule::Concat([sep.clone(), rc_self.clone()]))
.parse_node_kind(rules),
state,
)
}))
+ pop_state(|state| sppf_add(&rc_self.parse_node_kind(rules), state))
})
.apply(cont),
})
}
}
impl<Pat: Ord + Hash + RustInputPat> Rule<Pat> {
fn generate_traverse_shape(&self, refutable: bool, rules: &RuleMap<'_, Pat>) -> Src {
match self {
Rule::Empty
| Rule::Eat(_)
| Rule::NegativeLookahead(_)
| Rule::Call(_)
| Rule::RepeatMany(..)
| Rule::RepeatMore(..) => {
if refutable {
quote!(?)
} else {
quote!(_)
}
}
Rule::Concat([left, right]) => {
let left = left.generate_traverse_shape(refutable, rules);
let right = right.generate_traverse_shape(refutable, rules);
quote!((#left, #right))
}
Rule::Or(cases) => {
let cases_idx = cases.iter().enumerate().map(|(i, _)| {
let i_var_ident = Src::ident(format!("_{}", i));
// HACK(eddyb) workaround `quote!(#i)` producing `0usize`.
let i = ::proc_macro2::Literal::usize_unsuffixed(i);
quote!(#i #i_var_ident)
});
let cases_node_kind = cases.iter().map(|rule| rule.parse_node_kind(rules));
let cases_shape = cases
.iter()
.map(|rule| rule.generate_traverse_shape(true, rules));
quote!({ #(#cases_idx: #cases_node_kind => #cases_shape,)* })
}
Rule::Opt(rule) => {
let shape = rule.generate_traverse_shape(true, rules);
quote!([#shape])
}
}
}
}
fn impl_parse_with<Pat>(name: &str) -> Src
where
Pat: RustInputPat,
{
let ident = Src::ident(name);
let code_label = Rc::new(CodeLabel::NamedRule(name.to_string()));
let parse_node_kind = ParseNodeKind::NamedRule(name.to_string());
let rust_slice_ty = Pat::rust_slice_ty();
quote!(
impl<I> #ident<'_, '_, I>
where I: gll::runtime::Input<Slice = #rust_slice_ty>,
{
pub fn parse(input: I) -> gll::runtime::ParseResult<OwnedHandle<I, Self>> {
let handle = |forest_and_node| OwnedHandle {
forest_and_node,
_marker: PhantomData,
};
gll::runtime::Parser::parse(
input,
#code_label,
#parse_node_kind,
).map(handle).map_err(|err| err.map_partial(handle))
}
}
impl<I: gll::runtime::Input> OwnedHandle<I, #ident<'_, '_, I>> {
pub fn with<R>(&self, f: impl for<'a, 'i> FnOnce(Handle<'a, 'i, I, #ident<'a, 'i, I>>) -> R) -> R {
self.forest_and_node.unpack_ref(|_, forest_and_node| {
let (ref forest, node) = *forest_and_node;
f(Handle {
node,
forest,
_marker: PhantomData,
})
})
}
}
)
}
fn declare_rule<Pat>(name: &str, rule: &RuleWithNamedFields<Pat>, rules: &RuleMap<'_, Pat>) -> Src
where
Pat: Ord + Hash + RustInputPat,
{
let ident = Src::ident(name);
let variants = rule.find_variant_fields();
let variants: Option<&[Variant<'_, Pat>]> = variants.as_ref().map(|x| &**x);
let field_handle_ty = |rule: &Rule<_>, paths| {
let ty = rule.field_pathset_type(paths);
let handle_ty = quote!(Handle<'a, 'i, I, #ty>);
if rule.field_pathset_is_refutable(paths) {
quote!(Option<#handle_ty>)
} else {
handle_ty
}
};
let rule_ty_def = if let Some(variants) = variants {
let variants = variants.iter().map(|v| {
let variant_ident = Src::ident(v.name);
if v.fields.is_empty() {
let field_ty = v.rule.field_type(&[]);
quote!(#variant_ident(Handle<'a, 'i, I, #field_ty>))
} else {
let fields_ident = v.fields.keys().map(Src::ident);
let fields_ty = v
.fields
.values()
.map(|paths| field_handle_ty(&v.rule, paths));
quote!(#variant_ident {
#(#fields_ident: #fields_ty),*
})
}
});
quote!(
#[allow(non_camel_case_types)]
pub enum #ident<'a, 'i, I: gll::runtime::Input> {
#(#variants),*
}
)
} else {
let fields_ident = rule.fields.keys().map(Src::ident);
let fields_ty = rule
.fields
.values()
.map(|paths| field_handle_ty(&rule.rule, paths));
let marker_field = if rule.fields.is_empty() {
Some(quote!(_marker: PhantomData<(&'a (), &'i (), I)>,))
} else {
None
};
quote!(
#[allow(non_camel_case_types)]
pub struct #ident<'a, 'i, I: gll::runtime::Input> {
#(pub #fields_ident: #fields_ty),*
#marker_field
}
)
};
rule_ty_def
+ rule_debug_impls(name, &rule, variants)
+ impl_rule_from_sppf(name, &rule, variants, rules)
+ impl_rule_one_and_all(name, &rule, variants, rules)
}
fn impl_rule_from_sppf<Pat>(
name: &str,
rule: &RuleWithNamedFields<Pat>,
variants: Option<&[Variant<'_, Pat>]>,
rules: &RuleMap<'_, Pat>,
) -> Src
where
Pat: Ord + Hash + RustInputPat,
{
let ident = Src::ident(name);
let field_handle_expr = |rule: &Rule<_>, paths: &OrderSet<Vec<usize>>| {
let paths_expr = paths.iter().map(|path| {
// HACK(eddyb) workaround `quote!(#i)` producing `0usize`.
let path = path
.iter()
.cloned()
.map(::proc_macro2::Literal::usize_unsuffixed);
quote!(_r #(.#path)*)
});
if rule.field_pathset_is_refutable(paths) {
quote!(None #(.or(#paths_expr))* .map(|node| Handle {
node,
forest,
_marker: PhantomData,
}))
} else {
assert_eq!(paths.len(), 1);
quote!(Handle {
node: #(#paths_expr)*,
forest,
_marker: PhantomData,
})
}
};
let methods = if let Some(variants) = variants {
let variants_from_sppf_ident = variants
.iter()
.map(|v| Src::ident(format!("{}_from_sppf", v.name)));
let variants_shape = variants
.iter()
.map(|v| v.rule.generate_traverse_shape(false, rules));
let variants_body = variants.iter().map(|v| {
let variant_ident = Src::ident(&v.name);
if v.fields.is_empty() {
quote!(#ident::#variant_ident(Handle {
node: _node,
forest,
_marker: PhantomData,
}))
} else {
let fields_ident = v.fields.keys().map(Src::ident);
let fields_expr = v
.fields
.values()
.map(|paths| field_handle_expr(&v.rule, paths));
quote!(#ident::#variant_ident {
#(#fields_ident: #fields_expr),*
})
}
});
quote!(#(
#[allow(non_snake_case)]
fn #variants_from_sppf_ident(
forest: &'a gll::runtime::ParseForest<'i, _P, I>,
_node: ParseNode<'i, _P>,
_r: traverse!(typeof(ParseNode<'i, _P>) #variants_shape),
) -> Self {
#variants_body
}
)*)
} else {
let shape = rule.rule.generate_traverse_shape(false, rules);
let fields_ident = rule.fields.keys().map(Src::ident);
let fields_expr = rule
.fields
.values()
.map(|paths| field_handle_expr(&rule.rule, paths));
let marker_field = if rule.fields.is_empty() {
Some(quote!(_marker: { let _ = forest; PhantomData },))
} else {
None
};
quote!(
fn from_sppf(
forest: &'a gll::runtime::ParseForest<'i, _P, I>,
_node: ParseNode<'i, _P>,
_r: traverse!(typeof(ParseNode<'i, _P>) #shape),
) -> Self {
#ident {
#(#fields_ident: #fields_expr),*
#marker_field
}
}
)
};
quote!(impl<'a, 'i, I: gll::runtime::Input> #ident<'a, 'i, I> {
#methods
})
}
fn impl_rule_one_and_all<Pat>(
name: &str,
rule: &RuleWithNamedFields<Pat>,
variants: Option<&[Variant<'_, Pat>]>,
rules: &RuleMap<'_, Pat>,
) -> Src
where
Pat: Ord + Hash + RustInputPat,
{
let ident = Src::ident(name);
let (one, all) = if let Some(variants) = variants {
// FIXME(eddyb) figure out a more efficient way to reuse
// iterators with `quote!(...)` than `.collect::<Vec<_>>()`.
let i_ident = (0..variants.len())
.map(|i| Src::ident(format!("_{}", i)))
.collect::<Vec<_>>();
let variants_from_sppf_ident = variants
.iter()
.map(|v| Src::ident(format!("{}_from_sppf", v.name)))
.collect::<Vec<_>>();
let variants_kind = variants
.iter()
.map(|v| v.rule.parse_node_kind(rules))
.collect::<Vec<_>>();
let variants_shape = variants
.iter()
.map(|v| v.rule.generate_traverse_shape(false, rules))
.collect::<Vec<_>>();
(
quote!(
let node = _sppf.one_choice(node)?;
match node.kind {
#(#variants_kind => {
let r = traverse!(one(_sppf, node) #variants_shape);
#ident::#variants_from_sppf_ident(self.forest, node, r)
})*
_ => unreachable!()
}
),
quote!(
#[derive(Clone)]
enum Iter<#(#i_ident),*> {
#(#i_ident(#i_ident)),*
}
impl<T #(, #i_ident: Iterator<Item = T>)*> Iterator for Iter<#(#i_ident),*>
{
type Item = T;
fn next(&mut self) -> Option<T> {
match self {
#(Iter::#i_ident(iter) => iter.next()),*
}
}
}
_sppf.all_choices(node).flat_map(move |node| {
match node.kind {
#(#variants_kind => Iter::#i_ident(
traverse!(all(_sppf) #variants_shape)
.apply(node)
.map(move |r| #ident::#variants_from_sppf_ident(self.forest, node, r))
),)*
_ => unreachable!(),
}
})
),
)
} else {
let shape = rule.rule.generate_traverse_shape(false, rules);
(
quote!(
let r = traverse!(one(_sppf, node) #shape);
#ident::from_sppf(self.forest, node, r)
),
quote!(
traverse!(all(_sppf) #shape)
.apply(node)
.map(move |r| #ident::from_sppf(self.forest, node, r))
),
)
};
quote!(impl<'a, 'i, I> Handle<'a, 'i, I, #ident<'a, 'i, I>>
where I: gll::runtime::Input,
{
pub fn one(self) -> Result<#ident<'a, 'i, I>, Ambiguity<Self>> {
// HACK(eddyb) using a closure to catch `Err`s from `?`
(|| Ok({
let _sppf = self.forest;
let node = self.node.unpack_alias();
#one
}))().map_err(|gll::runtime::MoreThanOne| Ambiguity(self))
}
pub fn all(self) -> impl Iterator<Item = #ident<'a, 'i, I>> {
let _sppf = self.forest;
let node = self.node.unpack_alias();
#all
}
})
}
fn rule_debug_impls<Pat>(
name: &str,
rule: &RuleWithNamedFields<Pat>,
variants: Option<&[Variant<'_, Pat>]>,
) -> Src {
rule_debug_impl(name, rule, variants) + rule_handle_debug_impl(name, !rule.fields.is_empty())
}
fn rule_debug_impl<Pat>(
name: &str,
rule: &RuleWithNamedFields<Pat>,
variants: Option<&[Variant<'_, Pat>]>,
) -> Src {
let ident = Src::ident(name);
let body = if let Some(variants) = variants {
let variants_pat = variants.iter().map(|v| {
let variant_ident = Src::ident(&v.name);
if v.fields.is_empty() {
quote!(#ident::#variant_ident(x))
} else {
let fields_ident = v.fields.keys().map(Src::ident);
let fields_var_ident = v
.fields
.keys()
.map(|field_name| Src::ident(format!("f_{}", field_name)));
quote!(#ident::#variant_ident {
#(#fields_ident: #fields_var_ident,)*
})
}
});
let variants_body = variants.iter().map(|v| {
let variant_path_str = format!("{}::{}", name, v.name);
if v.fields.is_empty() {
quote!(f.debug_tuple(#variant_path_str).field(x).finish(),)
} else {
let fields_debug = v.fields.iter().map(|(field_name, paths)| {
let field_var_ident = Src::ident(format!("f_{}", field_name));
if v.rule.field_pathset_is_refutable(paths) {
quote!(if let Some(field) = #field_var_ident {
d.field(#field_name, field);
})
} else {
quote!(d.field(#field_name, #field_var_ident);)
}
});
quote!({
let mut d = f.debug_struct(#variant_path_str);
#(#fields_debug)*
d.finish()
})
}
});
quote!(match self {
#(#variants_pat => #variants_body)*
})
} else {
let fields_debug = rule.fields.iter().map(|(field_name, paths)| {
let field_ident = Src::ident(field_name);
if rule.rule.field_pathset_is_refutable(paths) {
quote!(if let Some(ref field) = self.#field_ident {
d.field(#field_name, field);
})
} else {
quote!(d.field(#field_name, &self.#field_ident);)
}
});
quote!(
let mut d = f.debug_struct(#name);
#(#fields_debug)*
d.finish()
)
};
quote!(impl<I: gll::runtime::Input> fmt::Debug for #ident<'_, '_, I> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#body
}
})
}
fn rule_handle_debug_impl(name: &str, has_fields: bool) -> Src {
let ident = Src::ident(name);
let body = if !has_fields {
quote!()
} else {
quote!(
write!(f, " => ")?;
let mut first = true;
for x in self.all() {
if !first {
write!(f, " | ")?;
}
first = false;
fmt::Debug::fmt(&x, f)?;
}
)
};
quote!(
impl<'a, 'i, I: gll::runtime::Input> fmt::Debug for Handle<'a, 'i, I, #ident<'a, 'i, I>> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.source_info())?;
#body
Ok(())
}
}
impl<I: gll::runtime::Input> fmt::Debug for OwnedHandle<I, #ident<'_, '_, I>> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.with(|handle| handle.fmt(f))
}
}
)
}
fn define_parse_fn<Pat>(
rules: &RuleMap<'_, Pat>,
code_labels: &mut OrderMap<Rc<CodeLabel>, usize>,
) -> Src
where
Pat: Ord + Hash + RustInputPat,
{
let mut code_label_arms = vec![];
for (name, rule) in rules.named {
let code_label = Rc::new(CodeLabel::NamedRule(name.clone()));
let rules = if rule.fields.is_empty() {
None
} else {
Some(rules)
};
(rule
.rule
.generate_parse(rules.map(|rules| (&rule.rule, rules)))
+ ret())
.apply(Continuation {
code_labels,
fn_code_label: &mut code_label.clone(),
code_label_arms: &mut code_label_arms,
code: Code::Inline(quote!()),
nested_frames: vec![],
})
.reify_as(code_label);
}
let rust_slice_ty = Pat::rust_slice_ty();
quote!(impl<I> gll::runtime::CodeStep<_P, I> for _C
where I: gll::runtime::Input<Slice = #rust_slice_ty>,
{
fn step<'i>(
p: &mut gll::runtime::Parser<'i, _P, _C, I>,
mut c: gll::runtime::Continuation<'i, _C>,
_range: gll::runtime::Range<'i>,
) {
match c.code {
#(#code_label_arms)*
}
}
})
}
fn declare_parse_node_kind(all_parse_nodes: &[ParseNode]) -> Src {
let nodes_i = (0..all_parse_nodes.len()).map(|i| {
// HACK(eddyb) workaround `quote!(#i)` producing `0usize`.
let i = ::proc_macro2::Literal::usize_unsuffixed(i);
quote!(#i)
});
// FIXME(eddyb) figure out a more efficient way to reuse
// iterators with `quote!(...)` than `.collect::<Vec<_>>()`.
let nodes_kind = all_parse_nodes
.iter()
.map(|node| &node.kind)
.collect::<Vec<_>>();
let nodes_kind_ident = nodes_kind.iter().map(|kind| kind.ident());
let nodes_desc = all_parse_nodes
.iter()
.map(|node| Src::new(format!("`{}`", node.desc.replace('`', "\\`"))))
.collect::<Vec<_>>();
let nodes_shape = all_parse_nodes.iter().map(|node| &node.shape);
quote!(
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
pub enum _P {
#(
#[doc = #nodes_desc]
#nodes_kind_ident,
)*
}
impl fmt::Display for _P {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match *self {
#(#nodes_kind => #nodes_desc),*
};
write!(f, "{}", s)
}
}
impl ParseNodeKind for _P {
fn shape(self) -> ParseNodeShape<Self> {
match self {
#(#nodes_kind => #nodes_shape),*
}
}
fn from_usize(i: usize) -> Self {
match i {
#(#nodes_i => #nodes_kind,)*
_ => unreachable!(),
}
}
fn to_usize(self) -> usize { self as usize }
}
)
}
fn impl_debug_for_handle_any(all_parse_nodes: &[ParseNode]) -> Src {
let arms = all_parse_nodes
.iter()
.filter_map(|ParseNode { kind, ty, .. }| {
ty.as_ref().map(|ty| {
quote!(#kind => write!(f, "{:?}", Handle::<_, #ty> {
node: self.node,
forest: self.forest,
_marker: PhantomData,
}),)
})
});
quote!(impl<I: gll::runtime::Input> fmt::Debug for Handle<'_, '_, I, Any> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.node.kind {
#(#arms)*
_ => write!(f, "{:?}", Handle::<_, ()> {
node: self.node,
forest: self.forest,
_marker: PhantomData,
}),
}
}
})
}
fn code_label_decl_and_impls<Pat>(
rules: &RuleMap<'_, Pat>,
code_labels: &OrderMap<Rc<CodeLabel>, usize>,
) -> Src {
let all_labels = rules
.named
.keys()
.map(|r| CodeLabel::NamedRule(r.clone()))
.chain(code_labels.iter().flat_map(|(fn_label, &counter)| {
iter::repeat(fn_label.clone())
.zip(0..counter)
.map(|(parent, i)| CodeLabel::Nested { parent, i })
}))
.map(Rc::new)
.collect::<Vec<_>>();
let all_labels_ident = all_labels.iter().map(|label| label.flattened_ident());
let all_labels_enclosing_fn = all_labels.iter().map(|label| match &**label {
CodeLabel::Nested { parent, .. } if !code_labels.contains_key(label) => parent,
_ => label,
});
quote!(
#[allow(non_camel_case_types)]
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
enum _C {
#(#all_labels_ident),*
}
impl CodeLabel for _C {
fn enclosing_fn(self) -> Self {
match self {
#(#all_labels => #all_labels_enclosing_fn),*
}
}
}
)
}
|
use super::{Measure, Measurements};
use sightglass_data::Phase;
/// For users that may want to record measurements on their own, this mechanism allows the tool to
/// be used without the overhead of any measurement activity. TODO document example using `perf` and
/// `start`/`end` (how to reference `NoopMeasure::start`?)
pub struct NoopMeasure;
impl NoopMeasure {
pub fn new() -> Self {
Self
}
}
impl Measure for NoopMeasure {
fn start(&mut self, _phase: Phase) {}
fn end(&mut self, _phase: Phase, _measurements: &mut Measurements) {}
}
|
use std::fs::File;
use std::io::Read;
use scan_fmt::*;
use std::collections::HashMap;
fn main() {
let mut file = File::open("input").unwrap();
let mut buf = String::new();
file.read_to_string(&mut buf).unwrap();
let lines = buf.lines();
let edges: Vec<(char,char)> = lines.map(|line| {
let (src, dst) = scan_fmt!(
line,
"Step {} must be finished before step {} can begin.",
char, char
);
(src.unwrap(), dst.unwrap())
}).collect();
let out_edges: HashMap<char,Vec<char>> = edges
.iter()
.fold(HashMap::new(), |mut acc, (src, dst)| {
acc.entry(*dst).or_insert(Vec::new());
acc.entry(*src).or_insert(Vec::new()).push(*dst);
acc
});
let in_edges: HashMap<char,Vec<char>> = edges
.iter()
.fold(HashMap::new(), |mut acc, (src, dst)| {
acc.entry(*src).or_insert(Vec::new());
acc.entry(*dst).or_insert(Vec::new()).push(*src);
acc
});
let mut fan_ins: HashMap<char, usize> = in_edges
.iter()
.map(|(dst, srcs)| (*dst, srcs.len()))
.collect();
let mut available: Vec<char> = fan_ins
.iter()
.filter(|(_, num_srcs)| **num_srcs == 0)
.map(|(dst, _)| *dst)
.collect();
loop {
available.sort();
available.reverse();
//println!("Available: {:?}", available);
if let Some(src) = available.pop() {
//println!("Picked: {}", src);
print!("{}", src);
out_edges
.get(&src)
.unwrap()
.iter()
.for_each(|dst| {
let fan_in = fan_ins.get_mut(dst).unwrap();
*fan_in -= 1;
if *fan_in == 0 {
available.push(*dst)
}
});
} else {
break;
};
}
println!("");
}
|
use crate::sys::unix::net::{new_ip_socket, socket_addr};
use crate::sys::unix::{SourceFd, TcpStream};
use crate::{event, Interest, Registry, Token};
use std::fmt;
use std::io;
use std::mem::size_of;
use std::net::{self, SocketAddr};
use std::os::unix::io::{AsRawFd, FromRawFd, IntoRawFd, RawFd};
pub struct TcpListener {
inner: net::TcpListener,
}
impl TcpListener {
pub fn bind(addr: SocketAddr) -> io::Result<TcpListener> {
new_ip_socket(addr, libc::SOCK_STREAM).and_then(|socket| {
// Set SO_REUSEADDR (mirrors what libstd does).
syscall!(setsockopt(
socket,
libc::SOL_SOCKET,
libc::SO_REUSEADDR,
&1 as *const libc::c_int as *const libc::c_void,
size_of::<libc::c_int>() as libc::socklen_t,
))
.and_then(|_| {
let (raw_addr, raw_addr_length) = socket_addr(&addr);
syscall!(bind(socket, raw_addr, raw_addr_length))
})
.and_then(|_| syscall!(listen(socket, 1024)))
.map_err(|err| {
// Close the socket if we hit an error, ignoring the error
// from closing since we can't pass back two errors.
let _ = unsafe { libc::close(socket) };
err
})
.map(|_| TcpListener {
inner: unsafe { net::TcpListener::from_raw_fd(socket) },
})
})
}
pub fn from_std(inner: net::TcpListener) -> TcpListener {
TcpListener { inner }
}
pub fn local_addr(&self) -> io::Result<SocketAddr> {
self.inner.local_addr()
}
pub fn try_clone(&self) -> io::Result<TcpListener> {
self.inner.try_clone().map(|s| TcpListener { inner: s })
}
pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> {
self.inner.accept().and_then(|(inner, addr)| {
inner
.set_nonblocking(true)
.map(|()| (TcpStream::new(inner), addr))
})
}
pub fn set_ttl(&self, ttl: u32) -> io::Result<()> {
self.inner.set_ttl(ttl)
}
pub fn ttl(&self) -> io::Result<u32> {
self.inner.ttl()
}
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.inner.take_error()
}
}
impl event::Source for TcpListener {
fn register(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
SourceFd(&self.as_raw_fd()).register(registry, token, interests)
}
fn reregister(
&mut self,
registry: &Registry,
token: Token,
interests: Interest,
) -> io::Result<()> {
SourceFd(&self.as_raw_fd()).reregister(registry, token, interests)
}
fn deregister(&mut self, registry: &Registry) -> io::Result<()> {
SourceFd(&self.as_raw_fd()).deregister(registry)
}
}
impl fmt::Debug for TcpListener {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.inner, f)
}
}
impl FromRawFd for TcpListener {
unsafe fn from_raw_fd(fd: RawFd) -> TcpListener {
TcpListener {
inner: net::TcpListener::from_raw_fd(fd),
}
}
}
impl IntoRawFd for TcpListener {
fn into_raw_fd(self) -> RawFd {
self.inner.into_raw_fd()
}
}
impl AsRawFd for TcpListener {
fn as_raw_fd(&self) -> RawFd {
self.inner.as_raw_fd()
}
}
|
/*
Copyright (c) 2023 Uber Technologies, Inc.
<p>Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
except in compliance with the License. You may obtain a copy of the License at
<p>http://www.apache.org/licenses/LICENSE-2.0
<p>Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
use crate::models::capture_group_patterns::ConcreteSyntax;
use crate::models::concrete_syntax::get_all_matches_for_concrete_syntax;
use crate::models::{default_configs::JAVA, language::PiranhaLanguage};
fn run_test(
code: &str, pattern: &str, expected_matches: usize, expected_vars: Vec<Vec<(&str, &str)>>,
) {
let java = PiranhaLanguage::from(JAVA);
let mut parser = java.parser();
let tree = parser.parse(code.as_bytes(), None).unwrap();
let meta = ConcreteSyntax(String::from(pattern));
let (matches, _is_match_found) = get_all_matches_for_concrete_syntax(
&tree.root_node().child(0).unwrap(),
code.as_bytes(),
&meta,
true,
None,
);
assert_eq!(matches.len(), expected_matches);
for (i, vars) in expected_vars.iter().enumerate() {
let match_item = &matches[i];
for &(var, expected_val) in vars {
let val = match_item.matches.get(var).unwrap();
assert_eq!(val, expected_val);
}
}
}
#[test]
fn test_single_match() {
run_test(
"class Example { public int a = 10; }",
"public int :[name] = :[value];",
1,
vec![vec![("name", "a"), ("value", "10")]],
);
}
#[test]
fn test_multiple_match() {
run_test(
"class Example { public int a = 10; public int b = 20; }",
"public int :[name] = :[value];",
2,
vec![
vec![("name", "a"), ("value", "10")],
vec![("name", "b"), ("value", "20")],
],
);
}
#[test]
fn test_no_match() {
run_test(
"class Example { public int a = 10; }",
"public String :[name] = :[value];",
0,
vec![],
);
}
|
//! This example demonstrates using the [attribute macro](https://doc.rust-lang.org/reference/procedural-macros.html#attribute-macros)
//! [`inline`] to expand struct fields to individual columns in a [`Table`] display.
//!
//! * Note that without inlining a struct or enum field, those objects
//! must implement the [`Display`] trait as they will be represented in
//! a single column with the value of their [`ToString`] output.
use tabled::{Table, Tabled};
#[derive(Tabled)]
struct Country {
name: &'static str,
capital_city: &'static str,
surface_area_km2: f32,
#[tabled(inline)]
currency: Currency,
}
#[derive(Tabled)]
struct Currency {
str: &'static str,
short: &'static str,
}
impl Country {
fn new(
name: &'static str,
national_currency: &'static str,
national_currency_short: &'static str,
capital_city: &'static str,
surface_area_km2: f32,
) -> Self {
Self {
name,
capital_city,
surface_area_km2,
currency: Currency {
str: national_currency,
short: national_currency_short,
},
}
}
}
fn main() {
let data = [
Country::new("Afghanistan", "Afghani", "AFN", "Kabul", 652867.0),
Country::new("Angola", "Kwanza", "AOA", "Luanda", 1246700.0),
Country::new("Canada", "Canadian Dollar", "CAD", "Ottawa", 9984670.0),
];
let table = Table::new(data);
println!("{table}");
}
|
pub mod adjacent;
pub mod hash;
pub mod rect;
|
use file;
pub fn run() {
let inputs = file::read_inputs("Day2.txt");
let lines = read_values(&inputs);
part1(&lines);
part2(&lines);
part2_recursive(&lines);
part1(&read_values(&"5 1 9 5\n7 5 3\n2 4 6 8"));
part2(&read_values(&"5 9 2 8\n9 4 7 3\n3 8 6 5"));
}
fn read_values(inputs: &str) -> Vec<Vec<u32>> {
let mut values = Vec::new();
for line in inputs.split('\n') {
if line.is_empty() {
break;
}
values.push(Vec::new());
let length = values.len();
let mut line: Vec<u32> = line
.split(|c| c == ' ' || c == '\t')
.map(|v| v.parse::<u32>().unwrap())
.collect();
line.sort_by(|a, b| b.cmp(a));
values[length - 1] = line.to_owned();
}
values
}
fn part1(lines: &Vec<Vec<u32>>) {
let mut sum: u32 = 0;
for line in lines {
sum += line[0] - line[lines.len() - 1];
}
println!("Checksum {}", sum);
}
fn part2(lines: &Vec<Vec<u32>>) {
let mut sum: u32 = 0;
for line in lines {
let mut division = 0;
for i in 0..line.len() {
for j in i + 1..line.len() {
if line[i] % line[j] == 0 {
division = line[i] / line[j];
break;
}
}
}
sum += division;
}
println!("New checksum {}", sum);
}
fn part2_recursive(lines: &Vec<Vec<u32>>) {
let sum: u32 = lines
.iter()
.map(|line| find_divisor(line))
.collect::<Vec<u32>>()
.iter()
.sum();
println!("New checksum recursive {}", sum);
}
fn find_divisor(line: &Vec<u32>) -> u32 {
match line.split_first() {
None => return 0,
Some((comp, line)) => {
for value in line {
if *comp % *value == 0 {
return *comp / *value;
}
}
return find_divisor(&line.to_vec());
},
};
} |
use std::fmt;
use std::ops::{Deref, DerefMut};
use std::iter::FromIterator;
use std::cell::{RefCell, Ref, RefMut};
use std::ptr;
use std::mem;
use std::ffi::{CStr, CString};
use std::any::TypeId;
use std::marker::PhantomData;
use std::collections::{HashMap, VecDeque};
use std::collections::hash_map::Entry as HashMapEntry;
use std::os::raw::{c_char, c_int, c_void};
use ffi;
use error::*;
use util::*;
/// A rust-side handle to an internal Lua value.
#[derive(Debug, Clone)]
pub enum LuaValue<'lua> {
Nil,
Boolean(bool),
LightUserData(LightUserData),
Integer(LuaInteger),
Number(LuaNumber),
String(LuaString<'lua>),
Table(LuaTable<'lua>),
Function(LuaFunction<'lua>),
UserData(LuaUserData<'lua>),
Thread(LuaThread<'lua>),
}
pub use self::LuaValue::Nil as LuaNil;
/// Trait for types convertible to LuaValue
pub trait ToLua<'a> {
fn to_lua(self, lua: &'a Lua) -> LuaResult<LuaValue<'a>>;
}
/// Trait for types convertible from LuaValue
pub trait FromLua<'a>: Sized {
fn from_lua(lua_value: LuaValue<'a>, lua: &'a Lua) -> LuaResult<Self>;
}
/// Multiple lua values used for both argument passing and also for multiple return values.
#[derive(Debug, Clone)]
pub struct LuaMultiValue<'lua>(VecDeque<LuaValue<'lua>>);
impl<'lua> LuaMultiValue<'lua> {
pub fn new() -> LuaMultiValue<'lua> {
LuaMultiValue(VecDeque::new())
}
}
impl<'lua> FromIterator<LuaValue<'lua>> for LuaMultiValue<'lua> {
fn from_iter<I: IntoIterator<Item = LuaValue<'lua>>>(iter: I) -> Self {
LuaMultiValue(VecDeque::from_iter(iter))
}
}
impl<'lua> IntoIterator for LuaMultiValue<'lua> {
type Item = LuaValue<'lua>;
type IntoIter = <VecDeque<LuaValue<'lua>> as IntoIterator>::IntoIter;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
impl<'lua> Deref for LuaMultiValue<'lua> {
type Target = VecDeque<LuaValue<'lua>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'lua> DerefMut for LuaMultiValue<'lua> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
pub trait ToLuaMulti<'a> {
fn to_lua_multi(self, lua: &'a Lua) -> LuaResult<LuaMultiValue<'a>>;
}
pub trait FromLuaMulti<'a>: Sized {
fn from_lua_multi(values: LuaMultiValue<'a>, lua: &'a Lua) -> LuaResult<Self>;
}
type LuaCallback = Box<for<'lua> FnMut(&'lua Lua, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>>;
struct LuaRef<'lua> {
lua: &'lua Lua,
registry_id: c_int,
}
impl<'lua> fmt::Debug for LuaRef<'lua> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "LuaRef({})", self.registry_id)
}
}
impl<'lua> Clone for LuaRef<'lua> {
fn clone(&self) -> Self {
unsafe {
self.lua.push_ref(self.lua.state, self);
self.lua.pop_ref(self.lua.state)
}
}
}
impl<'lua> Drop for LuaRef<'lua> {
fn drop(&mut self) {
unsafe {
ffi::luaL_unref(self.lua.state, ffi::LUA_REGISTRYINDEX, self.registry_id);
}
}
}
pub type LuaInteger = ffi::lua_Integer;
pub type LuaNumber = ffi::lua_Number;
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct LightUserData(pub *mut c_void);
/// Handle to an an internal lua string
#[derive(Clone, Debug)]
pub struct LuaString<'lua>(LuaRef<'lua>);
impl<'lua> LuaString<'lua> {
pub fn get(&self) -> LuaResult<&str> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 1)?;
lua.push_ref(lua.state, &self.0);
assert_eq!(ffi::lua_type(lua.state, -1), ffi::LUA_TSTRING);
let s = CStr::from_ptr(ffi::lua_tostring(lua.state, -1)).to_str()?;
ffi::lua_pop(lua.state, 1);
Ok(s)
})
}
}
}
/// Handle to an an internal lua table
#[derive(Clone, Debug)]
pub struct LuaTable<'lua>(LuaRef<'lua>);
impl<'lua> LuaTable<'lua> {
pub fn set<K: ToLua<'lua>, V: ToLua<'lua>>(&self, key: K, value: V) -> LuaResult<()> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = value.to_lua(lua)?;
unsafe {
error_guard(lua.state, 0, 0, |state| {
check_stack(state, 3)?;
lua.push_ref(state, &self.0);
lua.push_value(state, key)?;
lua.push_value(state, value)?;
ffi::lua_settable(state, -3);
Ok(())
})
}
}
pub fn get<K: ToLua<'lua>, V: FromLua<'lua>>(&self, key: K) -> LuaResult<V> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
unsafe {
let res = error_guard(lua.state, 0, 0, |state| {
check_stack(state, 2)?;
lua.push_ref(state, &self.0);
lua.push_value(state, key.to_lua(lua)?)?;
ffi::lua_gettable(state, -2);
let res = lua.pop_value(state)?;
ffi::lua_pop(state, 1);
Ok(res)
})?;
V::from_lua(res, lua)
}
}
/// Shorthand for checking whether get(key) is nil
pub fn has<K: ToLua<'lua>>(&self, key: K) -> LuaResult<bool> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
unsafe {
error_guard(lua.state, 0, 0, |state| {
check_stack(state, 2)?;
lua.push_ref(state, &self.0);
lua.push_value(state, key)?;
ffi::lua_gettable(state, -2);
let has = ffi::lua_isnil(state, -1) == 0;
ffi::lua_pop(state, 2);
Ok(has)
})
}
}
/// Set a field in the table, without invoking metamethods
pub fn raw_set<K: ToLua<'lua>, V: ToLua<'lua>>(&self, key: K, value: V) -> LuaResult<()> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 3)?;
lua.push_ref(lua.state, &self.0);
lua.push_value(lua.state, key.to_lua(lua)?)?;
lua.push_value(lua.state, value.to_lua(lua)?)?;
ffi::lua_rawset(lua.state, -3);
ffi::lua_pop(lua.state, 1);
Ok(())
})
}
}
/// Get a field in the table, without invoking metamethods
pub fn raw_get<K: ToLua<'lua>, V: FromLua<'lua>>(&self, key: K) -> LuaResult<V> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 2)?;
lua.push_ref(lua.state, &self.0);
lua.push_value(lua.state, key.to_lua(lua)?)?;
ffi::lua_gettable(lua.state, -2);
let res = V::from_lua(lua.pop_value(lua.state)?, lua)?;
ffi::lua_pop(lua.state, 1);
Ok(res)
})
}
}
/// Equivalent to the result of the lua '#' operator.
pub fn length(&self) -> LuaResult<LuaInteger> {
let lua = self.0.lua;
unsafe {
error_guard(lua.state, 0, 0, |state| {
check_stack(state, 1)?;
lua.push_ref(state, &self.0);
Ok(ffi::luaL_len(state, -1))
})
}
}
/// Equivalent to the result of the lua '#' operator, without invoking the
/// __len metamethod.
pub fn raw_length(&self) -> LuaResult<LuaInteger> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 1)?;
lua.push_ref(lua.state, &self.0);
let len = ffi::lua_rawlen(lua.state, -1);
ffi::lua_pop(lua.state, 1);
Ok(len as LuaInteger)
})
}
}
/// Loop over each key, value pair in the table
pub fn for_each_pair<K, V, F>(&self, mut f: F) -> LuaResult<()>
where K: FromLua<'lua>,
V: FromLua<'lua>,
F: FnMut(K, V)
{
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 4)?;
lua.push_ref(lua.state, &self.0);
ffi::lua_pushnil(lua.state);
while ffi::lua_next(lua.state, -2) != 0 {
ffi::lua_pushvalue(lua.state, -2);
let key = K::from_lua(lua.pop_value(lua.state)?, lua)?;
let value = V::from_lua(lua.pop_value(lua.state)?, lua)?;
f(key, value);
}
ffi::lua_pop(lua.state, 1);
Ok(())
})
}
}
/// Loop over the table, strictly interpreting the table as an array, and
/// fail if it is not a proper lua array.
pub fn for_each_array_value<V: FromLua<'lua>, F: FnMut(V)>(&self, mut f: F) -> LuaResult<()> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
let mut count = 0;
check_stack(lua.state, 4)?;
lua.push_ref(lua.state, &self.0);
let len = ffi::lua_rawlen(lua.state, -1) as ffi::lua_Integer;
ffi::lua_pushnil(lua.state);
while ffi::lua_next(lua.state, -2) != 0 {
let mut isnum = 0;
let i = ffi::lua_tointegerx(lua.state, -2, &mut isnum);
if isnum == 0 {
return Err("Not all table keys are integers".into());
} else if i > len {
return Err("integer key in table is greater than length".into());
} else if i <= 0 {
return Err("integer key in table is less than 1".into());
}
// Skip missing keys
while count < (i - 1) as usize {
f(V::from_lua(LuaNil, lua)?);
count += 1;
}
f(V::from_lua(lua.pop_value(lua.state)?, lua)?);
count += 1;
}
ffi::lua_pop(lua.state, 1);
Ok(())
})
}
}
/// Collect all the pairs in the table into a Vec
pub fn pairs<K: FromLua<'lua>, V: FromLua<'lua>>(&self) -> LuaResult<Vec<(K, V)>> {
let mut pairs = Vec::new();
self.for_each_pair(|k, v| pairs.push((k, v)))?;
Ok(pairs)
}
/// Collect all the values in an array-like table into a Vec
pub fn array_values<V: FromLua<'lua>>(&self) -> LuaResult<Vec<V>> {
let mut values = Vec::new();
self.for_each_array_value(|v| values.push(v))?;
Ok(values)
}
}
/// Handle to an an internal lua function
#[derive(Clone, Debug)]
pub struct LuaFunction<'lua>(LuaRef<'lua>);
impl<'lua> LuaFunction<'lua> {
pub fn call<A: ToLuaMulti<'lua>, R: FromLuaMulti<'lua>>(&self, args: A) -> LuaResult<R> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
let args = args.to_lua_multi(lua)?;
let nargs = args.len() as c_int;
check_stack(lua.state, nargs + 3)?;
let stack_start = ffi::lua_gettop(lua.state);
lua.push_ref(lua.state, &self.0);
for arg in args {
lua.push_value(lua.state, arg)?;
}
handle_error(lua.state,
pcall_with_traceback(lua.state, nargs, ffi::LUA_MULTRET))?;
let nresults = ffi::lua_gettop(lua.state) - stack_start;
let mut results = LuaMultiValue::new();
for _ in 0..nresults {
results.push_front(lua.pop_value(lua.state)?);
}
R::from_lua_multi(results, lua)
})
}
}
pub fn bind<A: ToLuaMulti<'lua>>(&self, args: A) -> LuaResult<LuaFunction<'lua>> {
unsafe extern "C" fn bind_call_impl(state: *mut ffi::lua_State) -> c_int {
let nargs = ffi::lua_gettop(state);
let nbinds = ffi::lua_tointeger(state, ffi::lua_upvalueindex(2)) as c_int;
check_stack(state, nbinds + 1).expect("not enough space to handle bound arguments");
ffi::lua_pushvalue(state, ffi::lua_upvalueindex(1));
ffi::lua_insert(state, 1);
// TODO: This is quadratic
for i in 0..nbinds {
ffi::lua_pushvalue(state, ffi::lua_upvalueindex(i + 3));
ffi::lua_insert(state, i + 2);
}
ffi::lua_call(state, nargs + nbinds, ffi::LUA_MULTRET);
ffi::lua_gettop(state)
}
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
let args = args.to_lua_multi(lua)?;
let nargs = args.len() as c_int;
check_stack(lua.state, nargs + 2)?;
lua.push_ref(lua.state, &self.0);
ffi::lua_pushinteger(lua.state, nargs as ffi::lua_Integer);
for arg in args {
lua.push_value(lua.state, arg)?;
}
ffi::lua_pushcclosure(lua.state, bind_call_impl, nargs + 2);
Ok(LuaFunction(lua.pop_ref(lua.state)))
})
}
}
}
/// A LuaThread is Active before the coroutine function finishes, Dead after it finishes, and in
/// Error state if error has been called inside the coroutine.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum LuaThreadStatus {
Dead,
Active,
Error,
}
/// Handle to an an internal lua coroutine
#[derive(Clone, Debug)]
pub struct LuaThread<'lua>(LuaRef<'lua>);
impl<'lua> LuaThread<'lua> {
/// If this thread has yielded a value, will return Some, otherwise the thread is finished and
/// this will return None.
pub fn resume<A: ToLuaMulti<'lua>, R: FromLuaMulti<'lua>>(&self,
args: A)
-> LuaResult<Option<R>> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 1)?;
lua.push_ref(lua.state, &self.0);
let thread_state = ffi::lua_tothread(lua.state, -1);
ffi::lua_pop(lua.state, 1);
let args = args.to_lua_multi(lua)?;
let nargs = args.len() as c_int;
check_stack(thread_state, nargs)?;
for arg in args {
lua.push_value(thread_state, arg)?;
}
handle_error(lua.state,
resume_with_traceback(thread_state, lua.state, nargs))?;
let nresults = ffi::lua_gettop(thread_state);
let mut results = LuaMultiValue::new();
for _ in 0..nresults {
results.push_front(lua.pop_value(thread_state)?);
}
R::from_lua_multi(results, lua).map(|r| Some(r))
})
}
}
pub fn status(&self) -> LuaResult<LuaThreadStatus> {
let lua = self.0.lua;
unsafe {
stack_guard(lua.state, 0, || {
check_stack(lua.state, 1)?;
lua.push_ref(lua.state, &self.0);
let thread_state = ffi::lua_tothread(lua.state, -1);
ffi::lua_pop(lua.state, 1);
let status = ffi::lua_status(thread_state);
if status != ffi::LUA_OK && status != ffi::LUA_YIELD {
Ok(LuaThreadStatus::Error)
} else if status == ffi::LUA_YIELD || ffi::lua_gettop(thread_state) > 0 {
Ok(LuaThreadStatus::Active)
} else {
Ok(LuaThreadStatus::Dead)
}
})
}
}
}
/// These are the metamethods that can be overridden using this API
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum LuaMetaMethod {
Add,
Sub,
Mul,
Div,
Mod,
Pow,
Unm,
Concat,
Len,
Eq,
Lt,
Le,
Index,
NewIndex,
Call,
}
/// Methods added will be added to the __index table on the metatable for the
/// userdata, so they can be called as userdata:method(args) as expected. If
/// there are any regular methods, and an "Index" metamethod is given, it will
/// be called as a *fallback* if the index doesn't match an existing regular
/// method.
pub struct LuaUserDataMethods<T> {
methods: HashMap<String, LuaCallback>,
meta_methods: HashMap<LuaMetaMethod, LuaCallback>,
_type: PhantomData<T>,
}
impl<T: LuaUserDataType> LuaUserDataMethods<T> {
/// Add a regular method as a function which accepts a &T parameter
pub fn add_method<M>(&mut self, name: &str, method: M)
where M: 'static + for<'a, 'lua> FnMut(&'lua Lua, &'a T, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
self.methods
.insert(name.to_owned(), Self::box_method(method));
}
/// Add a regular method as a function which accepts a &mut T parameter
pub fn add_method_mut<M>(&mut self, name: &str, method: M)
where M: 'static + for<'a, 'lua> FnMut(&'lua Lua, &'a mut T, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
self.methods
.insert(name.to_owned(), Self::box_method_mut(method));
}
/// Add a regular method as a function which accepts generic arguments, the first argument will
/// always be a LuaUserData of real type T
pub fn add_function<F>(&mut self, name: &str, function: F)
where F: 'static + for<'a, 'lua> FnMut(&'lua Lua, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
self.methods.insert(name.to_owned(), Box::new(function));
}
/// Add a metamethod as a function which accepts a &T parameter
pub fn add_meta_method<M>(&mut self, meta: LuaMetaMethod, method: M)
where M: 'static + for<'a, 'lua> FnMut(&'lua Lua, &'a T, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
self.meta_methods.insert(meta, Self::box_method(method));
}
/// Add a metamethod as a function which accepts a &mut T parameter
pub fn add_meta_method_mut<M>(&mut self, meta: LuaMetaMethod, method: M)
where M: 'static + for<'a, 'lua> FnMut(&'lua Lua, &'a mut T, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
self.meta_methods.insert(meta, Self::box_method_mut(method));
}
/// Add a metamethod as a function which accepts generic arguments, the first argument will
/// always be a LuaUserData of real type T
pub fn add_meta_function<F>(&mut self, meta: LuaMetaMethod, function: F)
where F: 'static + for<'a, 'lua> FnMut(&'lua Lua, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
self.meta_methods.insert(meta, Box::new(function));
}
fn box_method<M>(mut method: M) -> LuaCallback
where M: 'static + for<'a, 'lua> FnMut(&'lua Lua, &'a T, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
Box::new(move |lua, mut args| if let Some(front) = args.pop_front() {
let userdata = LuaUserData::from_lua(front, lua)?;
let userdata = userdata.borrow::<T>()?;
method(lua, &userdata, args)
} else {
Err("No userdata supplied as first argument to method".into())
})
}
fn box_method_mut<M>(mut method: M) -> LuaCallback
where M: 'static + for<'a, 'lua> FnMut(&'lua Lua, &'a mut T, LuaMultiValue<'lua>)
-> LuaResult<LuaMultiValue<'lua>>
{
Box::new(move |lua, mut args| if let Some(front) = args.pop_front() {
let userdata = LuaUserData::from_lua(front, lua)?;
let mut userdata = userdata.borrow_mut::<T>()?;
method(lua, &mut userdata, args)
} else {
Err("No userdata supplied as first argument to method".into())
})
}
}
/// Trait for types that can be converted to `LuaUserData`
pub trait LuaUserDataType: 'static + Sized {
fn add_methods(_methods: &mut LuaUserDataMethods<Self>) {}
}
/// Handle to an internal instance of custom userdata. All userdata in this API
/// is based around RefCell, to best match the mutable semantics of the lua
/// language.
#[derive(Clone, Debug)]
pub struct LuaUserData<'lua>(LuaRef<'lua>);
impl<'lua> LuaUserData<'lua> {
pub fn is<T: LuaUserDataType>(&self) -> bool {
self.inspect(|_: &RefCell<T>| Ok(())).is_ok()
}
/// Borrow this userdata out of the internal RefCell that is held in lua.
pub fn borrow<T: LuaUserDataType>(&self) -> LuaResult<Ref<T>> {
self.inspect(|cell| Ok(cell.try_borrow()?))
}
/// Borrow mutably this userdata out of the internal RefCell that is held in lua.
pub fn borrow_mut<T: LuaUserDataType>(&self) -> LuaResult<RefMut<T>> {
self.inspect(|cell| Ok(cell.try_borrow_mut()?))
}
fn inspect<'a, T, R, F>(&'a self, func: F) -> LuaResult<R>
where T: LuaUserDataType,
F: FnOnce(&'a RefCell<T>) -> LuaResult<R>
{
unsafe {
let lua = self.0.lua;
stack_guard(lua.state, 0, move || {
check_stack(lua.state, 3)?;
lua.push_ref(lua.state, &self.0);
let userdata = ffi::lua_touserdata(lua.state, -1);
if userdata.is_null() {
return Err("value not userdata".into());
}
if ffi::lua_getmetatable(lua.state, -1) == 0 {
return Err("value has no metatable".into());
}
ffi::lua_rawgeti(lua.state,
ffi::LUA_REGISTRYINDEX,
lua.userdata_metatable::<T>()? as ffi::lua_Integer);
if ffi::lua_rawequal(lua.state, -1, -2) == 0 {
return Err("wrong metatable type for lua userdata".into());
}
let res = func(&*(userdata as *const RefCell<T>));
ffi::lua_pop(lua.state, 3);
res
})
}
}
}
/// Top level Lua struct which holds the lua state itself.
pub struct Lua {
state: *mut ffi::lua_State,
main_state: *mut ffi::lua_State,
ephemeral: bool,
}
impl Drop for Lua {
fn drop(&mut self) {
unsafe {
if !self.ephemeral {
ffi::lua_close(self.state);
}
}
}
}
impl Lua {
pub fn new() -> Lua {
unsafe {
let state = ffi::luaL_newstate();
ffi::luaL_openlibs(state);
stack_guard(state, 0, || {
ffi::lua_pushlightuserdata(state,
&LUA_USERDATA_REGISTRY_KEY as *const u8 as *mut c_void);
let registered_userdata =
ffi::lua_newuserdata(state,
mem::size_of::<RefCell<HashMap<TypeId, c_int>>>()) as
*mut RefCell<HashMap<TypeId, c_int>>;
ptr::write(registered_userdata, RefCell::new(HashMap::new()));
ffi::lua_newtable(state);
push_string(state, "__gc");
ffi::lua_pushcfunction(state, destructor::<RefCell<HashMap<TypeId, c_int>>>);
ffi::lua_rawset(state, -3);
ffi::lua_setmetatable(state, -2);
ffi::lua_rawset(state, ffi::LUA_REGISTRYINDEX);
Ok(())
}).unwrap();
stack_guard(state, 0, || {
ffi::lua_pushlightuserdata(state,
&FUNCTION_METATABLE_REGISTRY_KEY as *const u8 as
*mut c_void);
ffi::lua_newtable(state);
push_string(state, "__gc");
ffi::lua_pushcfunction(state, destructor::<LuaCallback>);
ffi::lua_rawset(state, -3);
push_string(state, "__metatable");
ffi::lua_pushboolean(state, 0);
ffi::lua_rawset(state, -3);
ffi::lua_rawset(state, ffi::LUA_REGISTRYINDEX);
Ok(())
}).unwrap();
stack_guard(state, 0, || {
ffi::lua_rawgeti(state, ffi::LUA_REGISTRYINDEX, ffi::LUA_RIDX_GLOBALS);
push_string(state, "pcall");
ffi::lua_pushcfunction(state, safe_pcall);
ffi::lua_rawset(state, -3);
push_string(state, "xpcall");
ffi::lua_pushcfunction(state, safe_xpcall);
ffi::lua_rawset(state, -3);
ffi::lua_pop(state, 1);
Ok(())
}).unwrap();
Lua {
state,
main_state: state,
ephemeral: false,
}
}
}
pub fn load<'lua, R: FromLuaMulti<'lua>>(&'lua self,
source: &str,
name: Option<&str>)
-> LuaResult<R> {
unsafe {
stack_guard(self.state, 0, || {
let stack_start = ffi::lua_gettop(self.state);
handle_error(self.state,
if let Some(name) = name {
let name = CString::new(name.to_owned())?;
ffi::luaL_loadbuffer(self.state,
source.as_ptr() as *const c_char,
source.len(),
name.as_ptr())
} else {
ffi::luaL_loadbuffer(self.state,
source.as_ptr() as *const c_char,
source.len(),
ptr::null())
})?;
check_stack(self.state, 2)?;
handle_error(self.state,
pcall_with_traceback(self.state, 0, ffi::LUA_MULTRET))?;
let nresults = ffi::lua_gettop(self.state) - stack_start;
let mut results = LuaMultiValue::new();
for _ in 0..nresults {
results.push_front(self.pop_value(self.state)?);
}
R::from_lua_multi(results, self)
})
}
}
/// Evaluate the given expression or statement inside this Lua state, and if it is an
/// expression or a statement with return, this returns the value.
pub fn eval<'lua, R: FromLuaMulti<'lua>>(&'lua self, source: &str) -> LuaResult<R> {
unsafe {
stack_guard(self.state, 0, || {
let stack_start = ffi::lua_gettop(self.state);
// First, try interpreting the lua as an expression by adding
// "return", then as a statement. This is the same thing the
// actual lua repl does.
let return_source = "return ".to_owned() + source;
let mut res = ffi::luaL_loadbuffer(self.state,
return_source.as_ptr() as *const c_char,
return_source.len(),
ptr::null());
if res == ffi::LUA_ERRSYNTAX {
ffi::lua_pop(self.state, 1);
res = ffi::luaL_loadbuffer(self.state,
source.as_ptr() as *const c_char,
source.len(),
ptr::null());
}
handle_error(self.state, res)?;
check_stack(self.state, 2)?;
handle_error(self.state,
pcall_with_traceback(self.state, 0, ffi::LUA_MULTRET))?;
let nresults = ffi::lua_gettop(self.state) - stack_start;
let mut results = LuaMultiValue::new();
for _ in 0..nresults {
results.push_front(self.pop_value(self.state)?);
}
R::from_lua_multi(results, self)
})
}
}
pub fn create_string(&self, s: &str) -> LuaResult<LuaString> {
unsafe {
stack_guard(self.state, 0, || {
check_stack(self.state, 1)?;
ffi::lua_pushlstring(self.state, s.as_ptr() as *const c_char, s.len());
Ok(LuaString(self.pop_ref(self.state)))
})
}
}
pub fn create_empty_table(&self) -> LuaResult<LuaTable> {
unsafe {
stack_guard(self.state, 0, || {
check_stack(self.state, 1)?;
ffi::lua_newtable(self.state);
Ok(LuaTable(self.pop_ref(self.state)))
})
}
}
pub fn create_table<'lua, K, V, I>(&'lua self, cont: I) -> LuaResult<LuaTable>
where K: ToLua<'lua>,
V: ToLua<'lua>,
I: IntoIterator<Item = (K, V)>
{
unsafe {
stack_guard(self.state, 0, || {
check_stack(self.state, 3)?;
ffi::lua_newtable(self.state);
for (k, v) in cont.into_iter() {
self.push_value(self.state, k.to_lua(self)?)?;
self.push_value(self.state, v.to_lua(self)?)?;
ffi::lua_rawset(self.state, -3);
}
Ok(LuaTable(self.pop_ref(self.state)))
})
}
}
pub fn create_array_table<'lua, T, I>(&'lua self, cont: I) -> LuaResult<LuaTable>
where T: ToLua<'lua>,
I: IntoIterator<Item = T>
{
self.create_table(cont.into_iter().enumerate().map(|(k, v)| (k + 1, v)))
}
pub fn create_function<F>(&self, func: F) -> LuaResult<LuaFunction>
where F: 'static + for<'a> FnMut(&'a Lua, LuaMultiValue<'a>) -> LuaResult<LuaMultiValue<'a>>
{
self.create_callback_function(Box::new(func))
}
pub fn create_thread<'lua>(&'lua self, func: LuaFunction<'lua>) -> LuaResult<LuaThread<'lua>> {
unsafe {
stack_guard(self.state, 0, move || {
check_stack(self.state, 1)?;
let thread_state = ffi::lua_newthread(self.state);
self.push_ref(thread_state, &func.0);
Ok(LuaThread(self.pop_ref(self.state)))
})
}
}
pub fn create_userdata<T>(&self, data: T) -> LuaResult<LuaUserData>
where T: LuaUserDataType
{
unsafe {
stack_guard(self.state, 0, move || {
check_stack(self.state, 2)?;
let data = RefCell::new(data);
let data_userdata = ffi::lua_newuserdata(self.state,
mem::size_of::<RefCell<T>>()) as
*mut RefCell<T>;
ptr::write(data_userdata, data);
ffi::lua_rawgeti(self.state,
ffi::LUA_REGISTRYINDEX,
self.userdata_metatable::<T>()? as ffi::lua_Integer);
ffi::lua_setmetatable(self.state, -2);
Ok(LuaUserData(self.pop_ref(self.state)))
})
}
}
/// Returns a handle to the globals table
pub fn globals<'lua>(&'lua self) -> LuaResult<LuaTable<'lua>> {
unsafe {
check_stack(self.state, 1)?;
ffi::lua_rawgeti(self.state, ffi::LUA_REGISTRYINDEX, ffi::LUA_RIDX_GLOBALS);
Ok(LuaTable(self.pop_ref(self.state)))
}
}
pub fn coerce_string<'lua>(&'lua self, v: LuaValue<'lua>) -> LuaResult<LuaString<'lua>> {
match v {
LuaValue::String(s) => Ok(s),
v => unsafe {
stack_guard(self.state, 0, || {
check_stack(self.state, 1)?;
self.push_value(self.state, v)?;
if ffi::lua_tostring(self.state, -1).is_null() {
Err("cannot convert lua value to string".into())
} else {
Ok(LuaString(self.pop_ref(self.state)))
}
})
},
}
}
pub fn coerce_integer(&self, v: LuaValue) -> LuaResult<LuaInteger> {
match v {
LuaValue::Integer(i) => Ok(i),
LuaValue::Number(n) => Ok(n as LuaInteger),
v => unsafe {
stack_guard(self.state, 0, || {
check_stack(self.state, 1)?;
self.push_value(self.state, v)?;
let mut isint = 0;
let i = ffi::lua_tointegerx(self.state, -1, &mut isint);
if isint == 0 {
Err("cannot convert lua value to integer".into())
} else {
ffi::lua_pop(self.state, 1);
Ok(i)
}
})
},
}
}
pub fn coerce_number(&self, v: LuaValue) -> LuaResult<LuaNumber> {
match v {
LuaValue::Integer(i) => Ok(i as LuaNumber),
LuaValue::Number(n) => Ok(n),
v => unsafe {
stack_guard(self.state, 0, || {
check_stack(self.state, 1)?;
self.push_value(self.state, v)?;
let mut isnum = 0;
let n = ffi::lua_tonumberx(self.state, -1, &mut isnum);
if isnum == 0 {
Err("cannot convert lua value to number".into())
} else {
ffi::lua_pop(self.state, 1);
Ok(n)
}
})
},
}
}
pub fn from<'lua, T: ToLua<'lua>>(&'lua self, t: T) -> LuaResult<LuaValue<'lua>> {
t.to_lua(self)
}
pub fn to<'lua, T: FromLua<'lua>>(&'lua self, value: LuaValue<'lua>) -> LuaResult<T> {
T::from_lua(value, self)
}
pub fn pack<'lua, T: ToLuaMulti<'lua>>(&'lua self, t: T) -> LuaResult<LuaMultiValue<'lua>> {
t.to_lua_multi(self)
}
pub fn unpack<'lua, T: FromLuaMulti<'lua>>(&'lua self,
value: LuaMultiValue<'lua>)
-> LuaResult<T> {
T::from_lua_multi(value, self)
}
fn create_callback_function(&self, func: LuaCallback) -> LuaResult<LuaFunction> {
unsafe extern "C" fn callback_call_impl(state: *mut ffi::lua_State) -> c_int {
callback_error(state, || {
let lua = Lua {
state: state,
main_state: main_state(state),
ephemeral: true,
};
let func = &mut *(ffi::lua_touserdata(state, ffi::lua_upvalueindex(1)) as
*mut LuaCallback);
let nargs = ffi::lua_gettop(state);
let mut args = LuaMultiValue::new();
for _ in 0..nargs {
args.push_front(lua.pop_value(state)?);
}
let results = func(&lua, args)?;
let nresults = results.len() as c_int;
for r in results {
lua.push_value(state, r)?;
}
Ok(nresults)
})
}
unsafe {
stack_guard(self.state, 0, move || {
check_stack(self.state, 2)?;
let func_userdata = ffi::lua_newuserdata(self.state,
mem::size_of::<LuaCallback>()) as
*mut LuaCallback;
ptr::write(func_userdata, func);
ffi::lua_pushlightuserdata(self.state,
&FUNCTION_METATABLE_REGISTRY_KEY as *const u8 as
*mut c_void);
ffi::lua_gettable(self.state, ffi::LUA_REGISTRYINDEX);
ffi::lua_setmetatable(self.state, -2);
ffi::lua_pushcclosure(self.state, callback_call_impl, 1);
Ok(LuaFunction(self.pop_ref(self.state)))
})
}
}
unsafe fn push_value(&self, state: *mut ffi::lua_State, value: LuaValue) -> LuaResult<()> {
stack_guard(state, 1, move || {
match value {
LuaValue::Nil => {
ffi::lua_pushnil(state);
}
LuaValue::Boolean(b) => {
ffi::lua_pushboolean(state, if b { 1 } else { 0 });
}
LuaValue::LightUserData(ud) => {
ffi::lua_pushlightuserdata(state, ud.0);
}
LuaValue::Integer(i) => {
ffi::lua_pushinteger(state, i);
}
LuaValue::Number(n) => {
ffi::lua_pushnumber(state, n);
}
LuaValue::String(s) => {
self.push_ref(state, &s.0);
}
LuaValue::Table(t) => {
self.push_ref(state, &t.0);
}
LuaValue::Function(f) => {
self.push_ref(state, &f.0);
}
LuaValue::UserData(ud) => {
self.push_ref(state, &ud.0);
}
LuaValue::Thread(t) => {
self.push_ref(state, &t.0);
}
}
Ok(())
})
}
unsafe fn pop_value(&self, state: *mut ffi::lua_State) -> LuaResult<LuaValue> {
stack_guard(state, -1, || match ffi::lua_type(state, -1) {
ffi::LUA_TNIL => {
ffi::lua_pop(state, 1);
Ok(LuaNil)
}
ffi::LUA_TBOOLEAN => {
let b = LuaValue::Boolean(ffi::lua_toboolean(state, -1) != 0);
ffi::lua_pop(state, 1);
Ok(b)
}
ffi::LUA_TLIGHTUSERDATA => {
let ud = LuaValue::LightUserData(LightUserData(ffi::lua_touserdata(state, -1)));
ffi::lua_pop(state, 1);
Ok(ud)
}
ffi::LUA_TNUMBER => {
if ffi::lua_isinteger(state, -1) != 0 {
let i = LuaValue::Integer(ffi::lua_tointeger(state, -1));
ffi::lua_pop(state, 1);
Ok(i)
} else {
let n = LuaValue::Number(ffi::lua_tonumber(state, -1));
ffi::lua_pop(state, 1);
Ok(n)
}
}
ffi::LUA_TSTRING => Ok(LuaValue::String(LuaString(self.pop_ref(state)))),
ffi::LUA_TTABLE => Ok(LuaValue::Table(LuaTable(self.pop_ref(state)))),
ffi::LUA_TFUNCTION => Ok(LuaValue::Function(LuaFunction(self.pop_ref(state)))),
ffi::LUA_TUSERDATA => Ok(LuaValue::UserData(LuaUserData(self.pop_ref(state)))),
ffi::LUA_TTHREAD => Ok(LuaValue::Thread(LuaThread(self.pop_ref(state)))),
_ => Err("Unsupported type in pop_value".into()),
})
}
unsafe fn push_ref(&self, state: *mut ffi::lua_State, lref: &LuaRef) {
assert_eq!(lref.lua.main_state,
self.main_state,
"Lua instance passed LuaValue created from a different Lua");
ffi::lua_rawgeti(state,
ffi::LUA_REGISTRYINDEX,
lref.registry_id as ffi::lua_Integer);
}
unsafe fn pop_ref(&self, state: *mut ffi::lua_State) -> LuaRef {
let registry_id = ffi::luaL_ref(state, ffi::LUA_REGISTRYINDEX);
LuaRef {
lua: self,
registry_id: registry_id,
}
}
unsafe fn userdata_metatable<T: LuaUserDataType>(&self) -> LuaResult<c_int> {
// Used if both an __index metamethod is set and regular methods, checks methods table
// first, then __index metamethod.
unsafe extern "C" fn meta_index_impl(state: *mut ffi::lua_State) -> c_int {
ffi::lua_pushvalue(state, -1);
ffi::lua_gettable(state, ffi::lua_upvalueindex(1));
if ffi::lua_isnil(state, -1) == 0 {
ffi::lua_insert(state, -3);
ffi::lua_pop(state, 2);
1
} else {
ffi::lua_pop(state, 1);
ffi::lua_pushvalue(state, ffi::lua_upvalueindex(2));
ffi::lua_insert(state, -3);
ffi::lua_call(state, 2, 1);
1
}
}
stack_guard(self.state, 0, move || {
check_stack(self.state, 3)?;
ffi::lua_pushlightuserdata(self.state,
&LUA_USERDATA_REGISTRY_KEY as *const u8 as *mut c_void);
ffi::lua_gettable(self.state, ffi::LUA_REGISTRYINDEX);
let registered_userdata = ffi::lua_touserdata(self.state, -1) as
*mut RefCell<HashMap<TypeId, c_int>>;
let mut map = (*registered_userdata).borrow_mut();
ffi::lua_pop(self.state, 1);
match map.entry(TypeId::of::<T>()) {
HashMapEntry::Occupied(entry) => Ok(*entry.get()),
HashMapEntry::Vacant(entry) => {
ffi::lua_newtable(self.state);
let mut methods = LuaUserDataMethods {
methods: HashMap::new(),
meta_methods: HashMap::new(),
_type: PhantomData,
};
T::add_methods(&mut methods);
let has_methods = !methods.methods.is_empty();
if has_methods {
push_string(self.state, "__index");
ffi::lua_newtable(self.state);
check_stack(self.state, methods.methods.len() as c_int * 2)?;
for (k, m) in methods.methods {
push_string(self.state, &k);
self.push_value(self.state,
LuaValue::Function(self.create_callback_function(m)?))?;
ffi::lua_rawset(self.state, -3);
}
ffi::lua_rawset(self.state, -3);
}
check_stack(self.state, methods.meta_methods.len() as c_int * 2)?;
for (k, m) in methods.meta_methods {
if k == LuaMetaMethod::Index && has_methods {
push_string(self.state, "__index");
ffi::lua_pushvalue(self.state, -1);
ffi::lua_gettable(self.state, -3);
self.push_value(self.state,
LuaValue::Function(self.create_callback_function(m)?))?;
ffi::lua_pushcclosure(self.state, meta_index_impl, 2);
ffi::lua_rawset(self.state, -3);
} else {
let name = match k {
LuaMetaMethod::Add => "__add",
LuaMetaMethod::Sub => "__sub",
LuaMetaMethod::Mul => "__mul",
LuaMetaMethod::Div => "__div",
LuaMetaMethod::Mod => "__mod",
LuaMetaMethod::Pow => "__pow",
LuaMetaMethod::Unm => "__unm",
LuaMetaMethod::Concat => "__concat",
LuaMetaMethod::Len => "__len",
LuaMetaMethod::Eq => "__eq",
LuaMetaMethod::Lt => "__lt",
LuaMetaMethod::Le => "__le",
LuaMetaMethod::Index => "__index",
LuaMetaMethod::NewIndex => "__newIndex",
LuaMetaMethod::Call => "__call",
};
push_string(self.state, name);
self.push_value(self.state,
LuaValue::Function(self.create_callback_function(m)?))?;
ffi::lua_rawset(self.state, -3);
}
}
push_string(self.state, "__gc");
ffi::lua_pushcfunction(self.state, destructor::<RefCell<T>>);
ffi::lua_rawset(self.state, -3);
push_string(self.state, "__metatable");
ffi::lua_pushboolean(self.state, 0);
ffi::lua_rawset(self.state, -3);
let id = ffi::luaL_ref(self.state, ffi::LUA_REGISTRYINDEX);
entry.insert(id);
Ok(id)
}
}
})
}
}
static LUA_USERDATA_REGISTRY_KEY: u8 = 0;
static FUNCTION_METATABLE_REGISTRY_KEY: u8 = 0;
|
// This module attempts to implement the Generalized Policy Iteration
// (GPI) algorithm as an abstract concept, allowing clients to
// "plug in" different policy evaluation/improvement algorithms.
use std::collections::HashMap;
use rand::Rng;
use game::{State, Action, Deck, Reward, MIN_SUM, MAX_SUM, MIN_CARD, MAX_CARD};
use game::Action::*;
use util::increment;
// This trait encapsulates a specific algorithm to use for GPI.
pub trait Alg {
// Given the current state, return the action that maximizes reward
// in the long-term.
fn choose_best_action(&self, state: State) -> Action;
// Return the expected long-term reward if we take the given action
// at the given state.
fn get_expected_reward(&self, state: State, action: Action) -> Reward;
// A hook that's called whenever an episode begins.
fn on_episode_begin(&mut self) {
}
// Whether or not the algorithm needs a valid 'next_action' passed to
// on_episode_step().
fn needs_next_action(&self) -> bool {
false
}
// A hook that's called whenever an episode transitions from one state
// to another, as the result of an action. Can optionally return a
// successor action to use next.
//
// Note that next_action will only be valid if needs_next_action()
// returns true.
fn on_episode_step(&mut self, state: State, action: Action,
reward: Reward, next_state: State,
next_action: Option<Action>) -> Option<Action> {
let _ = (state, action, reward, next_state, next_action);
None
}
// A hook that's called whenever an episode ends. Implementations can
// use this to e.g. update their value functions.
fn on_episode_end(&mut self) {
}
// Print the expected reward for every state given that we
// take the optimal action at each state.
fn print_optimal_values(&self) {
let dealer_rng = MIN_CARD..MAX_CARD + 1;
for player in (MIN_SUM..MAX_SUM + 1).rev() {
for dealer in dealer_rng.clone() {
let state = State { dealer, player };
let action = self.choose_best_action(state);
let value = self.get_expected_reward(state, action);
let ivalue = (value * 100.0) as i32;
print!("{:4} ", ivalue);
}
println!(" <- player sum = {}", player);
}
for _ in dealer_rng.clone() {
print!("----");
}
println!();
for dealer in dealer_rng {
if dealer == 1 {
print!(" A ");
} else {
print!("{:4} ", dealer);
}
}
println!(" <- dealer showing");
}
}
pub trait Policy {
fn choose_action(&mut self, state: State) -> Action;
fn on_episode_begin(&mut self);
fn on_episode_step(&mut self, state: State, action: Action,
reward: Reward, next_state: State) -> Option<Action>;
fn on_episode_end(&mut self);
}
#[derive(Debug, PartialEq)]
enum EpsilonType {
Varying,
Constant(f32),
}
pub struct EpsilonGreedyPolicy<T: Rng, U: Alg> {
times_visited: HashMap<State, f32>,
rng: T,
pub alg: U,
epsilon: EpsilonType,
}
impl<T: Rng, U: Alg> EpsilonGreedyPolicy<T, U> {
pub fn new(rng: T, alg: U) -> Self {
EpsilonGreedyPolicy {
times_visited: HashMap::new(),
rng,
alg,
epsilon: EpsilonType::Varying,
}
}
pub fn with_constant_epsilon(mut self, value: f32) -> Self {
self.epsilon = EpsilonType::Constant(value);
self
}
fn exploratory_action(&mut self) -> Action {
if self.rng.gen_weighted_bool(2) { Hit } else { Stick }
}
fn should_explore(&mut self, state: State) -> bool {
let epsilon = match self.epsilon {
EpsilonType::Varying => {
let n_0 = 100.0;
let visited = *self.times_visited.get(&state).unwrap_or(&0.0);
n_0 / (n_0 + visited)
},
EpsilonType::Constant(value) => {
value
}
};
self.rng.next_f32() < epsilon
}
}
impl<T: Rng, U: Alg> Policy for EpsilonGreedyPolicy<T, U> {
fn choose_action(&mut self, state: State) -> Action {
if self.should_explore(state) {
self.exploratory_action()
} else {
self.alg.choose_best_action(state)
}
}
fn on_episode_begin(&mut self) {
self.alg.on_episode_begin();
}
fn on_episode_step(&mut self, state: State, action: Action,
reward: Reward, next_state: State) -> Option<Action> {
if self.epsilon == EpsilonType::Varying {
increment(&mut self.times_visited, state, 1.0);
}
// Argh, I wanted to just pass the policy in as the last argument, so
// that the algorithm (e.g. Sarsa) could calculate the next action
// only if it needed to, but that raised an error complaining that
// `EpsilonGreedyPolicy` didn't implement `Policy`.
//
// Then I tried changing the last parameter to just being
// `FnMut(State) -> Action`, but that raised errors with the
// borrow checker when I tried passing `self.choose_action`.
//
// Then I tried using RefCells to make this class' mutability
// more granular, but that didn't work either:
//
// https://github.com/toolness/ml-fun/pull/2
//
// The only remaining option is to create Alg::needs_next_action(),
// and pass the next action to the algorithm only if that returns
// true.
//
// It should also be noted that tinkering with the calling
// convention of `Alg.on_episode_step()` is difficult once we
// have multiple trait implementations in place, since we have to
// change the trait definition *and* every implementation site
// just to see what the borrow checker thinks.
let next_action = if self.alg.needs_next_action() {
Some(self.choose_action(next_state))
} else {
None
};
self.alg.on_episode_step(state, action, reward, next_state,
next_action)
}
fn on_episode_end(&mut self) {
self.alg.on_episode_end();
}
}
pub struct Gpi<T: Deck, U: Policy> {
episodes: i32,
deck: T,
pub policy: U,
}
impl<T: Deck, U: Policy> Gpi<T, U> {
pub fn new(deck: T, policy: U) -> Self {
Gpi {
episodes: 0,
deck,
policy,
}
}
pub fn play_episode(&mut self) {
let mut state = State::new(&mut self.deck);
self.policy.on_episode_begin();
let mut action = self.policy.choose_action(state);
while !state.is_terminal() {
let (next_state, reward) = state.step(&mut self.deck, action);
match self.policy.on_episode_step(state, action, reward,
next_state) {
None => {
action = self.policy.choose_action(next_state);
},
Some(next_action) => {
action = next_action;
}
}
state = next_state;
}
self.policy.on_episode_end();
self.episodes += 1;
}
pub fn play_episodes(&mut self, count: i32) {
for _ in 0..count {
self.play_episode();
}
}
}
#[cfg(test)]
pub mod tests {
use game::{RngDeck, State, Action, Reward};
use rand::thread_rng;
use gpi::{Gpi, Alg, EpsilonGreedyPolicy, EpsilonType};
pub struct DumbAlg {
pub action: Action,
pub reward: Reward,
}
impl Alg for DumbAlg {
fn choose_best_action(&self, _: State) -> Action {
self.action
}
fn get_expected_reward(&self, _: State, _: Action) -> Reward {
self.reward
}
}
#[test]
fn test_play_episodes_works() {
let deck = RngDeck::new(thread_rng());
let policy = EpsilonGreedyPolicy::new(thread_rng(), DumbAlg {
action: Action::Hit,
reward: 0.0,
});
assert_eq!(policy.epsilon, EpsilonType::Varying);
let mut gpi = Gpi::new(deck, policy);
gpi.play_episodes(3);
assert_eq!(gpi.episodes, 3);
}
#[test]
fn test_constant_epsilon_works() {
let policy = EpsilonGreedyPolicy::new(thread_rng(), DumbAlg {
action: Action::Hit,
reward: 0.0,
}).with_constant_epsilon(0.5);
assert_eq!(policy.epsilon, EpsilonType::Constant(0.5));
}
}
|
// Copyright (c) The Starcoin Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::difficulty;
use crate::difficulty::{difficult_to_target, target_to_difficulty};
use anyhow::{Error, Result};
use argon2::{self, Config};
use byteorder::{ByteOrder, LittleEndian, ReadBytesExt, WriteBytesExt};
use config::NodeConfig;
use logger::prelude::*;
use rand::Rng;
use std::convert::TryFrom;
use std::io::Cursor;
use std::sync::Arc;
use traits::ChainReader;
use traits::{Consensus, ConsensusHeader};
use types::block::BlockHeader;
use types::{H256, U256};
#[derive(Clone, Debug)]
pub struct ArgonConsensusHeader {
pub nonce: u64,
}
impl ConsensusHeader for ArgonConsensusHeader {}
impl TryFrom<Vec<u8>> for ArgonConsensusHeader {
type Error = Error;
fn try_from(value: Vec<u8>) -> Result<Self> {
let mut rdr = Cursor::new(value.as_slice());
let nonce = rdr.read_u64::<LittleEndian>()?;
Ok(ArgonConsensusHeader { nonce })
}
}
impl Into<Vec<u8>> for ArgonConsensusHeader {
fn into(self) -> Vec<u8> {
let mut buf = vec![0u8; 8];
LittleEndian::write_u64(buf.as_mut(), self.nonce);
buf
}
}
#[derive(Clone)]
pub struct ArgonConsensus {}
impl Consensus for ArgonConsensus {
type ConsensusHeader = ArgonConsensusHeader;
fn calculate_next_difficulty(
_config: Arc<NodeConfig>,
reader: &dyn ChainReader,
) -> Result<U256> {
let target = difficulty::get_next_work_required(reader)?;
Ok(target_to_difficulty(target))
}
fn solve_consensus_header(header_hash: &[u8], difficulty: U256) -> Self::ConsensusHeader {
let mut nonce = generate_nonce();
loop {
let pow_hash: U256 = calculate_hash(&set_header_nonce(&header_hash, nonce))
.expect("calculate hash should work")
.into();
if pow_hash > difficulty {
nonce += 1;
continue;
}
break;
}
ArgonConsensusHeader { nonce }
}
fn verify(
config: Arc<NodeConfig>,
reader: &dyn ChainReader,
header: &BlockHeader,
) -> Result<()> {
let difficulty = ArgonConsensus::calculate_next_difficulty(config, reader)?;
if header.difficulty() != difficulty {
return Err(anyhow::Error::msg("Invalid difficulty"));
}
let consensus_header: ArgonConsensusHeader =
ArgonConsensusHeader::try_from(header.consensus_header().to_vec())?;
let nonce = consensus_header.nonce;
let header = header.parent_hash().to_hex();
debug!(
"Verify header, nonce, difficulty :{:?}, {:o}, {:x}",
header, nonce, difficulty
);
if verify(header.as_bytes(), nonce, difficulty) {
Ok(())
} else {
Err(anyhow::Error::msg("Invalid header"))
}
}
}
pub fn u64_to_vec(u: u64) -> Vec<u8> {
let mut wtr = vec![];
wtr.write_u64::<LittleEndian>(u).unwrap();
wtr
}
fn verify(header: &[u8], nonce: u64, difficulty: U256) -> bool {
let pow_header = set_header_nonce(header, nonce);
let pow_hash = calculate_hash(&pow_header);
if pow_hash.is_err() {
return false;
}
let hash_u256: U256 = pow_hash.unwrap().into();
let target = difficult_to_target(difficulty);
if hash_u256 <= target {
return true;
}
false
}
pub fn calculate_hash(header: &[u8]) -> Result<H256> {
let mut config = Config::default();
config.mem_cost = 1024;
let output = argon2::hash_raw(header, header, &config)?;
let h_256: H256 = output.as_slice().into();
Ok(h_256)
}
fn generate_nonce() -> u64 {
let mut rng = rand::thread_rng();
rng.gen::<u64>();
rng.gen_range(0, u64::max_value())
}
pub fn set_header_nonce(header: &[u8], nonce: u64) -> Vec<u8> {
let len = header.len();
let mut header = header.to_owned();
header.truncate(len - 8);
let _ = header.write_u64::<LittleEndian>(nonce);
header
}
pub fn vec_to_u64(v: Vec<u8>) -> u64 {
LittleEndian::read_u64(&v)
}
|
//! Extract author information for book clusters.
use std::path::PathBuf;
use crate::arrow::polars::nonnull_schema;
use crate::arrow::writer::open_parquet_writer;
use crate::ids::codes::*;
use crate::prelude::*;
use polars::prelude::*;
static GRAPH_NODE_FILE: &str = "book-links/cluster-graph-nodes.parquet";
#[derive(Args, Debug)]
#[command(name = "extract-books")]
/// Extract cluster book codes for a particular namespace.
pub struct ExtractBooks {
/// Specify output file
#[arg(short = 'o', long = "output")]
output: PathBuf,
/// Output numspaced book codes instead of original IDs.
#[arg(short = 'C', long = "numspaced-book-codes")]
book_codes: bool,
/// Specify the name of the book code field.
#[arg(
short = 'n',
long = "name",
name = "FIELD",
default_value = "book_code"
)]
field_name: String,
/// Specify an additional file to join into the results.
#[arg(long = "join-file", name = "LINKFILE")]
join_file: Option<PathBuf>,
/// Speficy a field to read from the join file.
#[arg(long = "join-field", name = "LINKFIELD")]
join_field: Option<String>,
/// Extract book codes in namespace NS.
#[arg(name = "NS")]
namespace: String,
}
impl Command for ExtractBooks {
fn exec(&self) -> Result<()> {
require_working_root()?;
let ns = NS::by_name(&self.namespace).ok_or(anyhow!("invalid namespace"))?;
let data = LazyFrame::scan_parquet(GRAPH_NODE_FILE, default())?;
let bc_col = if self.book_codes {
info!(
"writing numspaced book codes in column {}",
&self.field_name
);
col("book_code").alias(&self.field_name)
} else {
info!("writing source book IDs in column {}", &self.field_name);
(col("book_code") - lit(ns.base())).alias(&self.field_name)
};
let filtered = data
.filter((col("book_code") / lit(NS_MULT_BASE)).eq(lit(ns.code())))
.select(&[bc_col, col("cluster")]);
let results = if let Some(jf) = &self.join_file {
let join = LazyFrame::scan_parquet(jf, default())?;
let join = filtered.join(
join,
&[col(&self.field_name)],
&[col(&self.field_name)],
JoinType::Left,
);
if let Some(fld) = &self.join_field {
join.select(&[col(&self.field_name), col(fld), col("cluster")])
} else {
join
}
} else {
filtered
};
info!("collecting results");
let mut frame = results.collect()?;
frame.rechunk();
info!("got {} book links", frame.height());
let schema = nonnull_schema(&frame);
let writer = open_parquet_writer(&self.output, schema)?;
writer.write_and_finish(frame.iter_chunks())?;
Ok(())
}
}
|
mod atlas_rect;
use vulkano::command_buffer::SubpassContents;
use gristmill::asset::image::{Image, TileAtlasImage};
use gristmill::renderer::{LoadContext, RenderContext, scene};
use gristmill::geometry2d::*;
use super::{Entity, World};
use atlas_rect::{Texture, TileAtlasTexture, AtlasRectPipeline};
#[derive(Clone)]
pub enum Sprite {
Texture(Texture),
Tile(TileAtlasTexture, Index2D),
}
pub struct SpriteRenderer {
atlas_rect_pipeline: AtlasRectPipeline,
scale: u32,
}
impl SpriteRenderer {
pub fn set_scale(&mut self, scale: u32) {
self.scale = scale;
}
pub fn load_image(&mut self, context: &mut LoadContext, image: &Image) -> Sprite {
Sprite::Texture(self.atlas_rect_pipeline.load_image(context, image))
}
pub fn load_tile_image(&mut self, context: &mut LoadContext, image: &TileAtlasImage) -> Sprite {
Sprite::Tile(self.atlas_rect_pipeline.load_tile_image(context, image), Index2D::default())
}
fn render_entity(&mut self, context: &mut RenderContext, scene: &World, entity: Entity, parent_position: Point) {
let obj = scene.forest.get(entity);
let obj_position = obj.position.offset_from(parent_position);
if let Some(sprite) = obj.sprite.as_ref() {
let color = gristmill::color::white();
match sprite {
Sprite::Texture(texture) => self.atlas_rect_pipeline.draw_rect_full(context, obj_position, texture, color),
Sprite::Tile(texture, index) => self.atlas_rect_pipeline.draw_tile(context, obj_position, texture, *index, color),
}
}
for child in scene.forest.iter_children(entity) {
self.render_entity(context, scene, *child, obj_position);
}
}
}
impl scene::SceneRenderer for SpriteRenderer {
type RenderType = scene::Geometry2D;
type Scene = World;
fn contents() -> SubpassContents { SubpassContents::Inline }
fn new(context: &mut LoadContext) -> Self {
let atlas_rect_pipeline = AtlasRectPipeline::new(context);
SpriteRenderer { atlas_rect_pipeline, scale: 1 }
}
fn set_dimensions(&mut self, dimensions: Size) {
let width = dimensions.width as f32 / self.scale as f32;
let height = dimensions.height as f32 / self.scale as f32;
self.atlas_rect_pipeline.set_dimensions([width, height]);
}
fn pre_render(&mut self, _context: &mut RenderContext, _scene: &mut World) {}
fn render(&mut self, context: &mut RenderContext, scene: &mut World) {
self.render_entity(context, scene, scene.render_root, Point::origin());
}
}
|
/// If `profile` feature is enabled, records high-level profiling information to `profile.csv`.
/// Recording is done via a thread-local buffer and dedicated file writing thread, in an attempt to
/// mitigate overhead.
///
#[macro_export(local_inner_macros)]
macro_rules! profile {
($name: expr) => {
#[cfg(feature = "profile")]
cao_profile::profile!($name)
};
}
|
use nix::errno::Errno;
use nix::unistd::Pid;
use nix::sys::ptrace;
use core::ffi::c_void;
#[derive(Clone)]
#[derive(Copy)]
#[derive(Debug)]
pub struct breakpoint {
saved_data: u8,
pub addr: usize,
pub enabled: bool,
pub pid: Pid,
}
impl breakpoint {
pub fn New(proc: Pid, target_addr: usize) -> breakpoint {
breakpoint {
saved_data: 0,
addr: target_addr,
enabled: false,
pid: proc,
}
}
pub fn enable(&mut self) -> Result<(), Errno> {
let mut data = match ptrace::read(self.pid, self.addr as *mut c_void) {
Ok(mem_val) => mem_val as u64,
Err(err_num) => return Err(err_num),
};
self.saved_data = (data & 0xff) as u8;
//oxcc is byte for breakpoints
data = (data & !0xff) | 0xcc;
unsafe { ptrace::write(self.pid, self.addr as *mut c_void, data as *mut c_void); }
self.enabled = true;
Ok(())
}
pub fn disable(&mut self) -> Result<(),Errno> {
let mut data = match ptrace::read(self.pid, self.addr as *mut c_void) {
Ok(mem_val) => mem_val as u64,
Err(err_num) => return Err(err_num),
};
data = (data & !0xff) | (self.saved_data as u64);
unsafe { ptrace::write(self.pid, self.addr as *mut c_void, data as *mut c_void); }
self.enabled = false;
Ok(())
}
}
//currently unimplemented
//How we store breakpoints. Was intiially a hashmap of addresses and breakpoints
//But that made it inconvenient to reference breakpoints (have to type whole address to enable/disable/delete/do anything)
//so, vector of breakpoints instead (can access via index
//The second vector (addr_list) is to preent duplicate breakpoints (original reason for choosing hashmap)
pub struct bp_storage {
pub bp_list: Vec<Option<breakpoint>>,
pub addr_list: Vec<usize>,
}
impl bp_storage {
pub fn New() -> bp_storage {
bp_storage {
bp_list: Vec::new(),
addr_list: Vec::new(),
}
}
pub fn insert(&mut self, bp: breakpoint) -> Result<(),()> {
//breakpoint already set at that address
if self.addr_list.contains(&bp.addr) {
return Err(());
}
self.bp_list.push(Some(bp));
self.addr_list.push(bp.addr);
Ok(())
}
pub fn delete(&mut self, idx: usize) -> Result<(),()> {
if idx > (self.bp_list.len()-1) {
return Err(());
}
match self.bp_list[idx] {
None => {
return Err(());
},
Some(mut bp) => {
bp.disable();
self.bp_list[idx] = None;
self.addr_list[idx] = 0;
},
}
Ok(())
}
pub fn enable(&mut self, idx: usize) -> Result<(),()> {
if idx > (self.bp_list.len()-1) {
return Err(());
}
match self.bp_list[idx] {
None => {
return Err(());
},
Some(mut bp) => {
bp.enable();
self.bp_list[idx] = Some(bp);
},
}
Ok(())
}
pub fn disable(&mut self, idx: usize) -> Result<(),()> {
if idx > (self.bp_list.len()-1) {
return Err(());
}
match self.bp_list[idx] {
None => {
return Err(());
},
Some(mut bp) => {
bp.disable();
self.bp_list[idx] = Some(bp);
},
}
Ok(())
}
pub fn contains(&self, idx: usize) -> bool {
if self.addr_list.contains(&idx) {
return true;
}
false
}
}
|
#[doc = "Register `VLCR` reader"]
pub type R = crate::R<VLCR_SPEC>;
#[doc = "Register `VLCR` writer"]
pub type W = crate::W<VLCR_SPEC>;
#[doc = "Field `HLINE` reader - Horizontal line duration"]
pub type HLINE_R = crate::FieldReader<u16>;
#[doc = "Field `HLINE` writer - Horizontal line duration"]
pub type HLINE_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 15, O, u16>;
impl R {
#[doc = "Bits 0:14 - Horizontal line duration"]
#[inline(always)]
pub fn hline(&self) -> HLINE_R {
HLINE_R::new((self.bits & 0x7fff) as u16)
}
}
impl W {
#[doc = "Bits 0:14 - Horizontal line duration"]
#[inline(always)]
#[must_use]
pub fn hline(&mut self) -> HLINE_W<VLCR_SPEC, 0> {
HLINE_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "DSI Host video line configuration register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`vlcr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`vlcr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct VLCR_SPEC;
impl crate::RegisterSpec for VLCR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`vlcr::R`](R) reader structure"]
impl crate::Readable for VLCR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`vlcr::W`](W) writer structure"]
impl crate::Writable for VLCR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets VLCR to value 0"]
impl crate::Resettable for VLCR_SPEC {
const RESET_VALUE: Self::Ux = 0;
}
|
trait Bob {
fn is_shouting(&self) -> bool;
fn is_silent(&self) -> bool;
}
impl Bob for str {
fn is_shouting(&self) -> bool {
self.chars().all(|c| (!c.is_alphabetic() || c.is_uppercase()))
}
fn is_silent(&self) -> bool {
self.chars().all(|c| !c.is_whitespace())
}
}
pub fn reply(s: &str) -> &'static str {
if s.ends_with("?") {
"Sure."
} else if s.is_silent() {
"Fine. Be that way!"
} else if s.is_shouting() {
"Whoa, chill out!"
} else {
"Whatever."
}
}
|
pub fn solve_v1() -> i32 {
let data = super::load_file("day2.txt");
let mut count = 0;
for line in data.split("\n") {
let parts: Vec<&str> = line.split(" ").map(|s| s.trim()).collect();
assert_eq!(parts.len(), 3, "Invalid input file");
let limits: Vec<i32> = parts[0]
.split("-")
.map(|s| s.parse::<i32>().unwrap())
.collect();
let letter = parts[1].chars().nth(0).unwrap();
let password = parts[2];
let encounters = password.chars().filter(|&c| c == letter).count() as i32;
if encounters >= limits[0] && encounters <= limits[1] {
count += 1;
}
}
count
}
pub fn solve_v2() -> i32 {
let data = super::load_file("day2.txt");
let mut count = 0;
for line in data.split("\n") {
let parts: Vec<&str> = line.split(" ").map(|s| s.trim()).collect();
assert_eq!(parts.len(), 3, "Invalid input file");
let limits: Vec<usize> = parts[0]
.split("-")
.map(|s| s.parse::<usize>().unwrap())
.collect();
let letter = parts[1].chars().nth(0).unwrap();
let password = parts[2];
let l1 = password.chars().nth(limits[0] - 1).unwrap();
let l2 = password.chars().nth(limits[1] - 1).unwrap();
if (l1 == letter || l2 == letter) && l1 != l2 {
count += 1;
}
}
count
}
|
pub mod ast;
use self::ast::{Assertion, Atom, Clause, Const, Term, Var};
use lalrpop_util::lalrpop_mod;
use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::io::Write;
lalrpop_mod!(pub parser);
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Environment(HashMap<Var, Term>);
pub type KnowledgeBase = Vec<Assertion>;
pub type Assertions = Vec<Assertion>;
#[derive(Debug, Copy, Clone)]
enum UnifyErr {
NoUnify,
}
#[derive(Debug, Copy, Clone)]
enum SolveErr {
NoSolution,
}
#[derive(Debug, Clone)]
enum Solution {
Answer(String),
Choicepoint(String, Vec<Choicepoint>),
}
#[derive(Debug, Clone)]
struct Choicepoint {
assertions: KnowledgeBase,
environment: Environment,
clause: Clause,
depth: usize,
}
impl Display for Environment {
fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {
let mut env: Vec<_> = self.0.iter().filter(|(Var(_, n), _)| *n == 0).collect();
env.sort();
let mut response = String::from("\n");
let last = env.last().cloned();
match last {
None => Ok(write!(f, "Yes")?),
Some((Var(last_x, _), last_t)) => {
for (Var(x, _), t) in &env[..env.len() - 1] {
response.push_str(&format!("{} = {}\n", x, self.substitute_term(t)))
}
response.push_str(&format!("{} = {} ", last_x, self.substitute_term(last_t)));
Ok(write!(f, "{}", response)?)
}
}
}
}
impl Environment {
fn new() -> Self {
Environment(HashMap::new())
}
fn insert(&mut self, x: Var, t: Term) {
self.0.insert(x, t);
}
fn lookup(&self, x: &Var) -> Term {
match self.0.get(x) {
Some(t) => t.clone(),
None => Term::Var(x.clone()),
}
}
fn substitute_term(&self, t: &Term) -> Term {
if let Term::Const(_) = t {
return t.clone();
}
let mut t = t.clone();
let mut temp = t;
loop {
match temp {
Term::Var(x) => {
t = self.lookup(&x);
if Term::Var(x) == t {
return t;
}
temp = t;
}
Term::Atom(mut a) => {
let mut next_atoms = Vec::new();
self.substitute_atom(&mut a, &mut next_atoms);
while let Some(a) = next_atoms.pop() {
self.substitute_atom(a, &mut next_atoms);
}
return Term::Atom(a);
}
Term::Const(_) => return temp,
}
}
}
fn substitute_atom<'a>(&self, a: &'a mut Atom, next: &mut Vec<&'a mut Atom>) {
for arg in &mut a.args {
match arg {
ref t @ Term::Var(_) => {
*arg = self.substitute_term(*t);
}
Term::Atom(ref mut a) => next.push(a),
_ => (),
}
}
}
fn unify_terms(self, t1: &Term, t2: &Term) -> Result<Self, UnifyErr> {
match (self.substitute_term(t1), self.substitute_term(t2)) {
(ref t1, ref t2) if t1 == t2 => Ok(self),
(Term::Var(y), t) | (t, Term::Var(y)) => {
if occurs(&y, &t) {
return Err(UnifyErr::NoUnify);
}
let mut env = self;
env.insert(y, t);
Ok(env)
}
(
Term::Atom(Atom {
name: ref c1,
args: ref ts1,
..
}),
Term::Atom(Atom {
name: ref c2,
args: ref ts2,
..
}),
) if c1 == c2 => {
let mut next_atoms = Vec::new();
let mut env = self.unify_list_level(ts1, ts2, &mut next_atoms)?;
while let Some((a1, a2)) = next_atoms.pop() {
if a1.name != a2.name {
return Err(UnifyErr::NoUnify);
}
let next_env = env.unify_list_level(&a1.args, &a2.args, &mut next_atoms)?;
env = next_env;
}
Ok(env)
}
_ => Err(UnifyErr::NoUnify),
}
}
fn unify_list_level<'a>(
self,
l1: &'a [Term],
l2: &'a [Term],
next_atoms: &mut Vec<(&'a Atom, &'a Atom)>,
) -> Result<Environment, UnifyErr> {
if l1.len() != l2.len() {
return Err(UnifyErr::NoUnify);
}
let terms = l1.iter().zip(l2.iter());
let mut env = self;
for (t1, t2) in terms {
if let (Term::Atom(ref a1), Term::Atom(ref a2)) = (t1, t2) {
next_atoms.push((a1, a2));
} else {
env = env.unify_terms(t1, t2)?;
}
}
Ok(env)
}
fn unify_lists(&self, l1: &[Term], l2: &[Term]) -> Result<Self, UnifyErr> {
if l1.len() != l2.len() {
return Err(UnifyErr::NoUnify);
}
l1.iter()
.zip(l2.iter())
.fold(Ok(self.clone()), |env, (t1, t2)| env?.unify_terms(t1, t2))
}
fn unify_atoms(&self, a1: &Atom, a2: &Atom) -> Result<Self, UnifyErr> {
if a1.name == a2.name {
return self.unify_lists(&a1.args, &a2.args);
}
Err(UnifyErr::NoUnify)
}
fn reduce_atom(
&self,
n: usize,
a: &Atom,
asrl: &[Assertion],
) -> Option<(KnowledgeBase, Environment, Clause)> {
let mut asrl = asrl.to_vec();
while let Some(Assertion {
head: ref b,
clause: ref lst,
}) = asrl.pop()
{
let next_env = self.unify_atoms(a, &renumber_atom(n, b));
match next_env {
Ok(next_env) => {
return Some((
asrl,
next_env,
lst.iter().map(|a| renumber_atom(n, a)).collect(),
));
}
Err(UnifyErr::NoUnify) => {
continue;
}
}
}
None
}
fn solve(
self,
mut ch: Vec<Choicepoint>,
kb: &[Assertion],
asrl: &[Assertion],
mut c: Clause,
mut n: usize,
) -> Result<Solution, SolveErr> {
let mut env = self;
let mut asrl = asrl;
let mut next_asrl = Some(asrl.to_vec());
while let Some(a) = c.pop() {
let Atom {
name: Const(ref atom_name),
arity,
..
} = a;
if atom_name == "halt" && arity == 0 {
std::process::exit(0);
}
asrl = match next_asrl {
None => kb,
Some(ref assertions) => assertions,
};
match env.reduce_atom(n, &a, asrl) {
None => match ch.pop() {
None => return Err(SolveErr::NoSolution),
Some(Choicepoint {
assertions: ch_asrl,
environment: next_env,
clause: gs,
depth: next_n,
}) => {
env = next_env;
next_asrl = Some(ch_asrl);
c = gs;
n = next_n;
}
},
Some((ch_asrl, next_env, mut d)) => {
let mut ch_clause = c.clone();
ch_clause.push(a);
let mut ch_buffer = vec![Choicepoint {
assertions: ch_asrl,
environment: env,
clause: ch_clause,
depth: n,
}];
ch_buffer.extend_from_slice(&ch);
d.extend_from_slice(&c);
env = next_env;
ch = ch_buffer;
next_asrl = None;
c = d;
n += 1;
}
}
}
Ok(match (&env.to_string()[..], &ch[..]) {
(answer, []) => Solution::Answer(String::from(answer)),
(answer, _) => {
let answer = if answer == "Yes" { "Yes " } else { answer };
Solution::Choicepoint(String::from(answer), ch)
}
})
}
}
fn occurs(x: &Var, t: &Term) -> bool {
match t {
Term::Var(y) => x == y,
Term::Const(_) => false,
Term::Atom(a) => occurs_atom(x, a),
}
}
fn occurs_atom(x: &Var, a: &Atom) -> bool {
let mut atom_queue = vec![a];
while let Some(a) = atom_queue.pop() {
for t in &a.args {
match t {
Term::Var(y) if x == y => return true,
Term::Atom(ref q) => atom_queue.push(q),
_ => (),
}
}
}
false
}
fn renumber_term(n: usize, t: &Term) -> Term {
match t {
Term::Var(Var(x, _)) => Term::Var(Var(x.clone(), n)),
c @ Term::Const(_) => c.clone(),
Term::Atom(a) => Term::Atom(renumber_atom(n, a)),
}
}
fn renumber_atom(n: usize, a: &Atom) -> Atom {
let mut a = a.clone();
let mut next_atoms = Vec::new();
renumber_atom_level(n, &mut a, &mut next_atoms);
while let Some(a) = next_atoms.pop() {
renumber_atom_level(n, a, &mut next_atoms);
}
a
}
fn renumber_atom_level<'a>(n: usize, a: &'a mut Atom, next: &mut Vec<&'a mut Atom>) {
for arg in &mut a.args {
match arg {
ref t @ Term::Var(_) => {
*arg = renumber_term(n, *t);
}
Term::Atom(ref mut a) => next.push(a),
_ => (),
}
}
}
fn continue_search(kb: &[Assertion], mut ch: Vec<Choicepoint>) -> Result<Solution, SolveErr> {
match ch.pop() {
None => Err(SolveErr::NoSolution),
Some(Choicepoint {
assertions: asrl,
environment: env,
clause: gs,
depth: n,
}) => env.solve(ch, kb, &asrl, gs, n),
}
}
pub fn solve_toplevel(interactive: bool, kb: &[Assertion], c: Clause) -> Vec<String> {
let env = Environment::new();
let asrl = kb;
let mut s = env.solve(Vec::new(), kb, asrl, c, 1);
let mut answers = Vec::new();
let mut found = false;
loop {
match s {
Err(SolveErr::NoSolution) if found => break,
Err(SolveErr::NoSolution) => {
println!("\nNo.");
if !interactive {
answers.push(String::from("No"))
}
break;
}
Ok(Solution::Choicepoint(answer, ch)) => {
found = true;
print!("{}", answer);
if !interactive {
answers.push(answer)
}
std::io::stdout().flush().expect("Could not flush stdout");
if interactive {
let mut input_buffer = String::new();
std::io::stdin()
.read_line(&mut input_buffer)
.expect("error reading input");
match &input_buffer[..] {
";\r\n" | ";\n" => {
s = continue_search(kb, ch);
}
_ => break,
}
} else {
s = continue_search(kb, ch);
}
}
Ok(Solution::Answer(answer)) => {
println!("\n{}.", answer);
if !interactive {
answers.push(answer)
}
break;
}
}
}
answers
}
#[cfg(test)]
mod tests {
use super::*;
fn unification_result(env: &Environment, results: &mut [(Var, Term)]) {
let mut env: Vec<_> = env.0.iter().map(|(v, t)| (v.clone(), t.clone())).collect();
env.sort();
results.sort();
assert_eq!(env, results);
}
#[test]
fn test_unify_1_succeeds() {
let x = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"bar",
vec![Term::Var(Var::new("X", 0))],
))],
));
let f = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"bar",
vec![Term::Const(Const::new("z"))],
))],
));
let env = Environment::new().unify_terms(&x, &f);
unification_result(
&env.unwrap(),
&mut [(Var::new("X", 0), Term::Const(Const::new("z")))],
);
}
#[test]
#[should_panic]
fn test_unify_1_fails() {
let x = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"baz",
vec![Term::Var(Var::new("X", 0))],
))],
));
let f = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"bar",
vec![Term::Const(Const::new("z"))],
))],
));
let env = Environment::new().unify_terms(&x, &f);
env.unwrap();
}
#[test]
fn test_unify_2_succeeds() {
let x = Term::Var(Var::new("X", 0));
let f = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"bar",
vec![Term::Const(Const::new("a"))],
))],
));
let env = Environment::new().unify_terms(&f, &x);
unification_result(&env.unwrap(), &mut [(Var::new("X", 0), f)]);
}
#[test]
fn test_unify_3_succeeds() {
let x = Term::Var(Var::new("X", 0));
let y = Term::Var(Var::new("Y", 0));
let env = Environment::new().unify_terms(&x, &y);
unification_result(&env.unwrap(), &mut [(Var::new("X", 0), y)]);
}
#[test]
fn test_unify_4_succeeds() {
let x1 = Term::Var(Var::new("X", 0));
let x2 = Term::Var(Var::new("X", 0));
let env = Environment::new().unify_terms(&x1, &x2);
unification_result(&env.unwrap(), &mut []);
}
#[test]
fn test_unify_5_succeeds() {
let a1 = Term::Const(Const::new("a"));
let a2 = Term::Const(Const::new("a"));
let env = Environment::new().unify_terms(&a1, &a2);
unification_result(&env.unwrap(), &mut []);
}
#[test]
#[should_panic]
fn test_unify_5_fails() {
let a1 = Term::Const(Const::new("a"));
let a2 = Term::Const(Const::new("b"));
let env = Environment::new().unify_terms(&a1, &a2);
env.unwrap();
}
#[test]
fn test_unify_6_succeeds() {
let x = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"bar",
vec![Term::Var(Var::new("X", 0)), Term::Const(Const::new("q"))],
))],
));
let f = Term::Atom(Atom::new(
"foo",
vec![Term::Atom(Atom::new(
"bar",
vec![Term::Const(Const::new("z")), Term::Var(Var::new("V", 0))],
))],
));
let env = Environment::new().unify_terms(&x, &f);
unification_result(
&env.unwrap(),
&mut [
(Var::new("V", 0), Term::Const(Const::new("q"))),
(Var::new("X", 0), Term::Const(Const::new("z"))),
],
);
}
#[test]
fn test_unify_7_succeeds() {
let p1 = Term::Atom(Atom::new(
"p",
vec![
Term::Var(Var::new("Z", 0)),
Term::Atom(Atom::new(
"h",
vec![Term::Var(Var::new("Z", 0)), Term::Var(Var::new("W", 0))],
)),
Term::Atom(Atom::new("f", vec![Term::Var(Var::new("W", 0))])),
],
));
let p2 = Term::Atom(Atom::new(
"p",
vec![
Term::Atom(Atom::new("f", vec![Term::Var(Var::new("X", 0))])),
Term::Atom(Atom::new(
"h",
vec![
Term::Var(Var::new("Y", 0)),
Term::Atom(Atom::new("f", vec![Term::Const(Const::new("a"))])),
],
)),
Term::Var(Var::new("Y", 0)),
],
));
let env = Environment::new().unify_terms(&p1, &p2);
unification_result(
&env.unwrap(),
&mut [
(
Var::new("W", 0),
Term::Atom(Atom::new("f", vec![Term::Const(Const::new("a"))])),
),
(Var::new("X", 0), Term::Var(Var::new("W", 0))),
(
Var::new("Y", 0),
Term::Atom(Atom::new("f", vec![Term::Var(Var::new("W", 0))])),
),
(
Var::new("Z", 0),
Term::Atom(Atom::new("f", vec![Term::Var(Var::new("X", 0))])),
),
],
)
}
#[test]
#[should_panic]
fn test_unify_7_fails() {
let p1 = Term::Atom(Atom::new(
"p",
vec![
Term::Var(Var::new("Z", 0)),
Term::Atom(Atom::new(
"g",
vec![Term::Var(Var::new("Z", 0)), Term::Var(Var::new("W", 0))],
)),
Term::Atom(Atom::new("f", vec![Term::Var(Var::new("W", 0))])),
],
));
let p2 = Term::Atom(Atom::new(
"p",
vec![
Term::Atom(Atom::new("f", vec![Term::Var(Var::new("X", 0))])),
Term::Atom(Atom::new(
"h",
vec![
Term::Var(Var::new("Y", 0)),
Term::Atom(Atom::new("f", vec![Term::Const(Const::new("a"))])),
],
)),
Term::Var(Var::new("Y", 0)),
],
));
let env = Environment::new().unify_terms(&p1, &p2);
env.unwrap();
}
#[test]
fn test_unify_8_succeeds() {
let f1 = Term::Atom(Atom::new(
"f",
vec![
Term::Var(Var::new("X", 0)),
Term::Atom(Atom::new(
"g",
vec![
Term::Var(Var::new("X", 0)),
Term::Atom(Atom::new("a", vec![])),
],
)),
],
));
let f2 = Term::Atom(Atom::new(
"f",
vec![
Term::Atom(Atom::new("b", vec![])),
Term::Var(Var::new("Y", 0)),
],
));
let env = Environment::new().unify_terms(&f1, &f2);
unification_result(
&env.unwrap(),
&mut [
(Var::new("X", 0), Term::Atom(Atom::new("b", vec![]))),
(
Var::new("Y", 0),
Term::Atom(Atom::new(
"g",
vec![
Term::Atom(Atom::new("b", vec![])),
Term::Atom(Atom::new("a", vec![])),
],
)),
),
],
)
}
#[test]
fn test_unify_9_succeeds() {
let f1 = Atom::new(
"f",
vec![
Term::Var(Var::new("X", 0)),
Term::Atom(Atom::new(
"g",
vec![
Term::Var(Var::new("X", 0)),
Term::Atom(Atom::new("a", vec![])),
],
)),
],
);
let f2 = Atom::new(
"f",
vec![
Term::Atom(Atom::new("b", vec![])),
Term::Var(Var::new("Y", 0)),
],
);
let env = Environment::new().unify_atoms(&f1, &f2);
unification_result(
&env.unwrap(),
&mut [
(Var::new("X", 0), Term::Atom(Atom::new("b", vec![]))),
(
Var::new("Y", 0),
Term::Atom(Atom::new(
"g",
vec![
Term::Atom(Atom::new("b", vec![])),
Term::Atom(Atom::new("a", vec![])),
],
)),
),
],
)
}
#[test]
#[should_panic]
fn test_unify_9_fails() {
let f1 = Atom::new(
"f",
vec![
Term::Var(Var::new("X", 0)),
Term::Atom(Atom::new(
"g",
vec![
Term::Var(Var::new("X", 0)),
Term::Atom(Atom::new("a", vec![])),
],
)),
],
);
let f2 = Atom::new(
"f",
vec![
Term::Atom(Atom::new("b", vec![])),
Term::Var(Var::new("X", 0)),
],
);
let env = Environment::new().unify_atoms(&f1, &f2);
env.unwrap();
}
#[test]
fn test_unify_10_succeeds() {
let l1 = vec![Term::Atom(Atom::new("a", vec![]))];
let l2 = vec![Term::Var(Var::new("X", 1))];
let env = Environment::new().unify_lists(&l1, &l2);
unification_result(
&env.unwrap(),
&mut [(Var::new("X", 1), Term::Atom(Atom::new("a", vec![])))],
)
}
#[test]
#[should_panic]
fn test_unify_10_fails() {
let l1 = vec![
Term::Atom(Atom::new("a", vec![])),
Term::Atom(Atom::new("a", vec![])),
];
let l2 = vec![Term::Var(Var::new("X", 0))];
let env = Environment::new().unify_lists(&l1, &l2);
env.unwrap();
}
#[test]
#[should_panic]
fn test_unify_11_fails() {
let l1 = vec![Term::Atom(Atom::new("a", vec![]))];
let l2 = vec![Term::Atom(Atom::new("b", vec![]))];
let env = Environment::new().unify_lists(&l1, &l2);
env.unwrap();
}
#[test]
fn test_unify_12_succeeds() {
let l1 = vec![
Term::Atom(Atom::new(
"a",
vec![Term::Atom(Atom::new(
"x",
vec![Term::Const(Const::new("c"))],
))],
)),
Term::Atom(Atom::new("b", vec![])),
];
let l2 = vec![
Term::Atom(Atom::new("a", vec![Term::Var(Var::new("X", 0))])),
Term::Atom(Atom::new("b", vec![])),
];
let env = Environment::new().unify_lists(&l1, &l2);
unification_result(
&env.unwrap(),
&mut [(
Var::new("X", 0),
Term::Atom(Atom::new("x", vec![Term::Const(Const::new("c"))])),
)],
)
}
#[test]
#[should_panic]
fn test_unify_12_fails() {
let l1 = vec![
Term::Atom(Atom::new(
"a",
vec![Term::Atom(Atom::new(
"x",
vec![Term::Const(Const::new("c"))],
))],
)),
Term::Atom(Atom::new("q", vec![])),
];
let l2 = vec![
Term::Atom(Atom::new("a", vec![Term::Var(Var::new("X", 0))])),
Term::Atom(Atom::new("b", vec![])),
];
let env = Environment::new().unify_lists(&l1, &l2);
env.unwrap();
}
#[test]
fn test_occurs_1_succeeds() {
let v = Var::new("X", 0);
let t = Term::Var(Var::new("X", 0));
assert!(occurs(&v, &t))
}
#[test]
fn test_occurs_1_fails() {
let v = Var::new("X", 0);
let t = Term::Var(Var::new("X", 1));
assert!(!occurs(&v, &t))
}
#[test]
fn test_occurs_2_fails() {
let v = Var::new("X", 0);
let t = Term::Var(Var::new("Y", 0));
assert!(!occurs(&v, &t))
}
#[test]
fn test_occurs_3_succeeds() {
let v = Var::new("X", 0);
let t = Term::Atom(Atom::new(
"x",
vec![Term::Atom(Atom::new(
"y",
vec![Term::Var(Var::new("X", 0))],
))],
));
assert!(occurs(&v, &t))
}
#[test]
fn test_occurs_3_fails() {
let v = Var::new("X", 0);
let t = Term::Atom(Atom::new(
"x",
vec![Term::Atom(Atom::new(
"y",
vec![Term::Var(Var::new("Var", 0))],
))],
));
assert!(!occurs(&v, &t))
}
}
|
use clap::{crate_version, App, Arg};
use nix::sched::{clone, CloneFlags};
use nix::sys::signal::Signal;
use nix::sys::wait::waitpid;
use nix::unistd::execvp;
use std::ffi::{CStr, CString};
use std::process;
fn child_func(args: &[&CStr]) -> isize {
execvp(&args[0], &args).expect("exec() failed");
0
}
const STACK_LENGTH: usize = 1024 * 1024;
fn main() {
let matches = App::new("ns-child-exec")
.version(crate_version!())
.arg(
Arg::with_name("ipc")
.help("unshare IPC namespace")
.short("i")
.long("ipc"),
)
.arg(
Arg::with_name("mount")
.help("unshare mount namespace")
.short("m")
.long("mount"),
)
.arg(
Arg::with_name("net")
.help("unshare network namespace")
.short("n")
.long("net"),
)
.arg(
Arg::with_name("pid")
.help("unshare PID namespace")
.short("p")
.long("pid"),
)
.arg(
Arg::with_name("uts")
.help("unshare UTS namespace")
.short("u")
.long("uts"),
)
.arg(
Arg::with_name("user")
.help("unshare user namespace")
.short("U")
.long("user"),
)
.arg(
Arg::with_name("verbose")
.help("verbose operation")
.short("v")
.long("verbose"),
)
.arg(Arg::with_name("cmd").index(1).required(true))
.arg(Arg::with_name("arg").multiple(true))
.get_matches();
let mut flags = CloneFlags::empty();
if matches.is_present("ipc") {
flags.set(CloneFlags::CLONE_NEWIPC, true)
}
if matches.is_present("mount") {
flags.set(CloneFlags::CLONE_NEWNS, true)
}
if matches.is_present("net") {
flags.set(CloneFlags::CLONE_NEWNET, true)
}
if matches.is_present("pid") {
flags.set(CloneFlags::CLONE_NEWPID, true)
}
if matches.is_present("uts") {
flags.set(CloneFlags::CLONE_NEWUTS, true)
}
if matches.is_present("user") {
flags.set(CloneFlags::CLONE_NEWUSER, true)
}
let verbose = matches.is_present("verbose");
let mut child_stack: [u8; STACK_LENGTH] = [0; STACK_LENGTH];
let cmd = matches.value_of("cmd").unwrap();
let mut args_exec_owned: Vec<CString> = vec![CString::new(cmd).unwrap()];
if matches.is_present("arg") {
matches
.values_of("arg")
.unwrap()
.for_each(|a| args_exec_owned.push(CString::new(a).unwrap()));
}
let args_exec: Vec<&CStr> = args_exec_owned.iter().map(CString::as_c_str).collect();
let pid = clone(
Box::new(|| child_func(&args_exec)),
&mut child_stack,
flags,
Some(Signal::SIGCHLD as i32),
)
.expect("clone() failed");
if verbose {
println!("ns-child-exec: PID of child created by clone is {}", pid);
}
// Parent process: Wait for child.
waitpid(pid, None).expect("waitpid() failed");
if verbose {
println!("ns-child-exec: Terminating");
}
process::exit(0);
}
|
fn merge(in1: &[i32], in2: &[i32], out: &mut [i32]) {
let (left, right) = out.split_at_mut(in1.len());
left.clone_from_slice(in1);
right.clone_from_slice(in2);
}
// least significant digit radix sort
fn radix_sort(data: &mut [i32]) {
for bit in 0..31 {
// types of small and big is Vec<i32>.
// It will be infered from the next call of merge function.
let (small, big): (Vec<_>, Vec<_>) = data.iter().partition(|&&x| (x >> bit) & 1 == 0);
merge(&small, &big, data);
}
// last bit is sign
let (negative, positive): (Vec<_>, Vec<_>) = data.iter().partition(|&&x| x < 0);
merge(&negative, &positive, data);
}
fn main() {
let mut data = [170, 45, 75, -90, -802, 24, 2, 66, -17, 2];
println!("Before: {:?}", data);
radix_sort(&mut data);
println!("After: {:?}", data);
} |
#[doc = "Register `RCC_AXIDIVR` reader"]
pub type R = crate::R<RCC_AXIDIVR_SPEC>;
#[doc = "Register `RCC_AXIDIVR` writer"]
pub type W = crate::W<RCC_AXIDIVR_SPEC>;
#[doc = "Field `AXIDIV` reader - AXIDIV"]
pub type AXIDIV_R = crate::FieldReader;
#[doc = "Field `AXIDIV` writer - AXIDIV"]
pub type AXIDIV_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 3, O>;
#[doc = "Field `AXIDIVRDY` reader - AXIDIVRDY"]
pub type AXIDIVRDY_R = crate::BitReader;
impl R {
#[doc = "Bits 0:2 - AXIDIV"]
#[inline(always)]
pub fn axidiv(&self) -> AXIDIV_R {
AXIDIV_R::new((self.bits & 7) as u8)
}
#[doc = "Bit 31 - AXIDIVRDY"]
#[inline(always)]
pub fn axidivrdy(&self) -> AXIDIVRDY_R {
AXIDIVRDY_R::new(((self.bits >> 31) & 1) != 0)
}
}
impl W {
#[doc = "Bits 0:2 - AXIDIV"]
#[inline(always)]
#[must_use]
pub fn axidiv(&mut self) -> AXIDIV_W<RCC_AXIDIVR_SPEC, 0> {
AXIDIV_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "This register is used to control the AXI Matrix clock prescaler. Refer to Section: Sub-system clock generation for additional information. If TZEN = , this register can only be modified in secure mode.\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`rcc_axidivr::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`rcc_axidivr::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct RCC_AXIDIVR_SPEC;
impl crate::RegisterSpec for RCC_AXIDIVR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`rcc_axidivr::R`](R) reader structure"]
impl crate::Readable for RCC_AXIDIVR_SPEC {}
#[doc = "`write(|w| ..)` method takes [`rcc_axidivr::W`](W) writer structure"]
impl crate::Writable for RCC_AXIDIVR_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets RCC_AXIDIVR to value 0x8000_0000"]
impl crate::Resettable for RCC_AXIDIVR_SPEC {
const RESET_VALUE: Self::Ux = 0x8000_0000;
}
|
// Generated from vec_mask.rs.tera template. Edit the template, not the generated file.
#[cfg(not(target_arch = "spirv"))]
use core::fmt;
use core::ops::*;
/// A 3-dimensional `u32` vector mask.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[repr(C, align(16))]
pub struct BVec3A {
pub x: u32,
pub y: u32,
pub z: u32,
}
const MASK: [u32; 2] = [0, 0xff_ff_ff_ff];
impl BVec3A {
/// All false.
pub const FALSE: Self = Self::splat(false);
/// All true.
pub const TRUE: Self = Self::splat(true);
/// Creates a new vector mask.
#[inline(always)]
pub const fn new(x: bool, y: bool, z: bool) -> Self {
Self {
x: MASK[x as usize],
y: MASK[y as usize],
z: MASK[z as usize],
}
}
/// Creates a vector with all elements set to `v`.
#[inline]
pub const fn splat(v: bool) -> Self {
Self::new(v, v, v)
}
/// Returns a bitmask with the lowest 3 bits set from the elements of `self`.
///
/// A true element results in a `1` bit and a false element in a `0` bit. Element `x` goes
/// into the first lowest bit, element `y` into the second, etc.
#[inline]
pub fn bitmask(self) -> u32 {
(self.x & 0x1) | (self.y & 0x1) << 1 | (self.z & 0x1) << 2
}
/// Returns true if any of the elements are true, false otherwise.
#[inline]
pub fn any(self) -> bool {
((self.x | self.y | self.z) & 0x1) != 0
}
/// Returns true if all the elements are true, false otherwise.
#[inline]
pub fn all(self) -> bool {
((self.x & self.y & self.z) & 0x1) != 0
}
/// Tests the value at `index`.
///
/// Panics if `index` is greater than 2.
#[inline]
pub fn test(&self, index: usize) -> bool {
match index {
0 => (self.x & 0x1) != 0,
1 => (self.y & 0x1) != 0,
2 => (self.z & 0x1) != 0,
_ => panic!("index out of bounds"),
}
}
/// Sets the element at `index`.
///
/// Panics if `index` is greater than 2.
#[inline]
pub fn set(&mut self, index: usize, value: bool) {
match index {
0 => self.x = MASK[value as usize],
1 => self.y = MASK[value as usize],
2 => self.z = MASK[value as usize],
_ => panic!("index out of bounds"),
}
}
#[inline]
fn into_bool_array(self) -> [bool; 3] {
[
(self.x & 0x1) != 0,
(self.y & 0x1) != 0,
(self.z & 0x1) != 0,
]
}
#[inline]
fn into_u32_array(self) -> [u32; 3] {
[self.x, self.y, self.z]
}
}
impl Default for BVec3A {
#[inline]
fn default() -> Self {
Self::FALSE
}
}
impl BitAnd for BVec3A {
type Output = Self;
#[inline]
fn bitand(self, rhs: Self) -> Self {
Self {
x: self.x & rhs.x,
y: self.y & rhs.y,
z: self.z & rhs.z,
}
}
}
impl BitAndAssign for BVec3A {
#[inline]
fn bitand_assign(&mut self, rhs: Self) {
*self = self.bitand(rhs);
}
}
impl BitOr for BVec3A {
type Output = Self;
#[inline]
fn bitor(self, rhs: Self) -> Self {
Self {
x: self.x | rhs.x,
y: self.y | rhs.y,
z: self.z | rhs.z,
}
}
}
impl BitOrAssign for BVec3A {
#[inline]
fn bitor_assign(&mut self, rhs: Self) {
*self = self.bitor(rhs);
}
}
impl BitXor for BVec3A {
type Output = Self;
#[inline]
fn bitxor(self, rhs: Self) -> Self {
Self {
x: self.x ^ rhs.x,
y: self.y ^ rhs.y,
z: self.z ^ rhs.z,
}
}
}
impl BitXorAssign for BVec3A {
#[inline]
fn bitxor_assign(&mut self, rhs: Self) {
*self = self.bitxor(rhs);
}
}
impl Not for BVec3A {
type Output = Self;
#[inline]
fn not(self) -> Self {
Self {
x: !self.x,
y: !self.y,
z: !self.z,
}
}
}
#[cfg(not(target_arch = "spirv"))]
impl fmt::Debug for BVec3A {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let arr = self.into_u32_array();
write!(
f,
"{}({:#x}, {:#x}, {:#x})",
stringify!(BVec3A),
arr[0],
arr[1],
arr[2]
)
}
}
#[cfg(not(target_arch = "spirv"))]
impl fmt::Display for BVec3A {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let arr = self.into_bool_array();
write!(f, "[{}, {}, {}]", arr[0], arr[1], arr[2])
}
}
impl From<BVec3A> for [bool; 3] {
#[inline]
fn from(mask: BVec3A) -> Self {
mask.into_bool_array()
}
}
impl From<BVec3A> for [u32; 3] {
#[inline]
fn from(mask: BVec3A) -> Self {
mask.into_u32_array()
}
}
|
//! Graphics helpers
use crate::graphics::bitmap::{BitmapMode, PalletMode};
use crate::graphics::colour::{self, Colour};
/// The colours of a rainbow
const RAINBOW: [Colour; 6] = [
colour::R,
colour::O,
colour::Y,
colour::G,
colour::B,
colour::V,
];
/// Draw a rainbow with the given `thickness` to the graphic instance `g`.
pub fn rainbow<T: BitmapMode<u16>>(g: &mut T, thickness: usize) {
let (w, h, _) = g.bounds();
let start = h / 2 - RAINBOW.len() * thickness / 2;
for x in 0..w {
for c in 0..RAINBOW.len() {
let offset = start + thickness * c;
for y in offset..offset + thickness {
g.set(x, y, RAINBOW[c].u16());
}
}
}
}
/// Draw a rainbow with the given `thickness` to the graphic instance `g`,
/// where `g` is a pallet mode.
pub fn rainbow2<T: BitmapMode<u8> + PalletMode<u16>>(g: &mut T, thickness: usize) {
// Set up pallet with rainbow colours
for i in 0..RAINBOW.len() {
g.set_pallet(i + 1, RAINBOW[i].u16())
}
{
let (w, h, _) = g.bounds();
let start = h / 2 - RAINBOW.len() * thickness / 2;
for x in 0..w {
for c in 0..RAINBOW.len() {
let offset = start + thickness * c;
for y in offset..offset + thickness {
g.set(x, y, (c + 1) as u8);
}
}
}
}
}
|
use game_state;
use std::str;
pub fn get_rook_moves(state: &game_state::GameState, piece_coord: Vec<u8>) -> Vec<String> {
let mut can_move_here = true;
let mut allowed_rook_moves: Vec<String> = Vec::new();
//down
let down = (0i8,1i8);
//up
let up = (0i8,-1i8);
//left
let left = (-1i8,0i8);
//right
let right = (1i8,0i8);
let mut move_piece_coord0 = piece_coord[0] as i8;
let mut move_piece_coord1 = piece_coord[1] as i8;
//down
while true {
move_piece_coord0 += down.0;
move_piece_coord1 += down.1;
if (move_piece_coord1 as u8) < 'G' as u8 {
let mut coord:Vec<u8> = Vec::new();
coord.push(move_piece_coord0 as u8);
coord.push(move_piece_coord1 as u8);
if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) {
if let Some(playerAt) = playerat_opt {
if playerAt == state.player_turn {
can_move_here = false;
}
}
if can_move_here {
let mut rook_move_str = String::new();
rook_move_str.push(piece_coord[0] as char);
rook_move_str.push(piece_coord[1] as char);
rook_move_str.push('-');
rook_move_str.push(move_piece_coord0 as u8 as char);
rook_move_str.push(move_piece_coord1 as u8 as char);
allowed_rook_moves.push(rook_move_str);
} else {
break;
}
}
} else {
break;
}
}
//up
move_piece_coord0 = piece_coord[0] as i8;
move_piece_coord1 = piece_coord[1] as i8;
can_move_here = true;
while true {
move_piece_coord0 += up.0;
move_piece_coord1 += up.1;
can_move_here = true;
if (move_piece_coord1 as u8) > '0' as u8 {
let mut coord:Vec<u8> = Vec::new();
coord.push(move_piece_coord0 as u8);
coord.push(move_piece_coord1 as u8);
if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) {
if let Some(playerAt) = playerat_opt {
if playerAt == state.player_turn {
can_move_here = false;
}
}
if can_move_here {
let mut rook_move_str = String::new();
rook_move_str.push(piece_coord[0] as char);
rook_move_str.push(piece_coord[1] as char);
rook_move_str.push('-');
rook_move_str.push(move_piece_coord0 as u8 as char);
rook_move_str.push(move_piece_coord1 as u8 as char);
allowed_rook_moves.push(rook_move_str);
} else {
break;
}
}
} else {
break;
}
}
//left
move_piece_coord0 = piece_coord[0] as i8;
move_piece_coord1 = piece_coord[1] as i8;
can_move_here = true;
while true {
move_piece_coord0 += left.0;
move_piece_coord1 += left.1;
can_move_here = true;
if (move_piece_coord0 as u8) > 'A' as u8 {
let mut coord:Vec<u8> = Vec::new();
coord.push(move_piece_coord0 as u8);
coord.push(move_piece_coord1 as u8);
if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) {
if let Some(playerAt) = playerat_opt {
if playerAt == state.player_turn {
can_move_here = false;
}
}
if can_move_here {
let mut rook_move_str = String::new();
rook_move_str.push(piece_coord[0] as char);
rook_move_str.push(piece_coord[1] as char);
rook_move_str.push('-');
rook_move_str.push(move_piece_coord0 as u8 as char);
rook_move_str.push(move_piece_coord1 as u8 as char);
allowed_rook_moves.push(rook_move_str);
} else {
break;
}
}
} else {
break;
}
}
//right
move_piece_coord0 = piece_coord[0] as i8;
move_piece_coord1 = piece_coord[1] as i8;
can_move_here = true;
while true {
move_piece_coord0 += right.0;
move_piece_coord1 += right.1;
can_move_here = true;
if (move_piece_coord0 as u8) < 'I' as u8 {
let mut coord:Vec<u8> = Vec::new();
coord.push(move_piece_coord0 as u8);
coord.push(move_piece_coord1 as u8);
if let Ok(playerat_opt) = state.get_player_color_at(coord.as_slice()) {
if let Some(playerAt) = playerat_opt {
if playerAt == state.player_turn {
can_move_here = false;
}
}
if can_move_here {
let mut rook_move_str = String::new();
rook_move_str.push(piece_coord[0] as char);
rook_move_str.push(piece_coord[1] as char);
rook_move_str.push('-');
rook_move_str.push(move_piece_coord0 as u8 as char);
rook_move_str.push(move_piece_coord1 as u8 as char);
allowed_rook_moves.push(rook_move_str);
} else {
break;
}
}
} else {
break;
}
}
allowed_rook_moves
}
|
use std::fs::File;
use std::io::prelude::*;
use std::env;
use std::process;
use std::str;
use std::collections::HashMap;
fn load_file(file_path: &str) -> String {
let mut contents = String::new();
let mut f = File::open(file_path).expect("Unable to open file");
f.read_to_string(&mut contents).expect("cant read file");
contents
}
fn react(bytes: &mut Vec<u8>) -> usize {
let mut left_pos = 0;
let mut right_pos = 1;
loop {
if right_pos >= bytes.len() {
break;
}
if (bytes[left_pos] as i32 - bytes[right_pos] as i32).abs() == 32i32 {
bytes[left_pos] = 42;
bytes[right_pos] = 42;
if left_pos > 0 {
left_pos -= 1;
while bytes[left_pos] == 42 && left_pos > 0 {
left_pos -= 1;
}
}
if right_pos < bytes.len() - 1 {
right_pos += 1;
}
} else {
left_pos += 1;
while bytes[left_pos] == 42 && left_pos < bytes.len() - 1 {
left_pos += 1;
}
right_pos += 1;
}
}
let reacted: Vec<u8> = bytes.iter().filter(|x| *x != &42u8).map(|x| *x).collect();
reacted.len()
}
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() != 2 {
println!("day5 <file>");
process::exit(1);
}
let row = load_file(&args[1]);
let bytes = row.as_bytes().to_vec();
let mut input = bytes.clone();
let reacted_len = react(&mut input);
println!("Part A answer: reacted len is {}", reacted_len);
let mut map = HashMap::new();
for s in 65..91 {
let mut input: Vec<u8> = bytes
.iter()
.filter(|x| *x != &s && *x != &(s + 32))
.map(|x| *x)
.collect();
let reacted_len = react(&mut input);
map.insert(s as char, reacted_len);
}
let min = map.iter().min_by_key(|&(_, item)| item).unwrap();
println!("Part B answer: shortest fixed polymer is {}", min.1);
}
|
use cgmath::prelude::ElementWise;
use cgmath::{dot, Array, EuclideanSpace, InnerSpace, Point3, Vector3};
use rand::Rng;
use crate::light::{Color, Light, Ray};
pub type Triangle = [Point3<f64>; 3];
pub struct Plane {
pub id: f64,
pub vertices: Triangle,
pub color: Color,
pub normal: Vector3<f64>,
}
fn is_point_in_triangle(vertices: Triangle, point: &Point3<f64>) -> bool {
// Compute vectors
let v0 = vertices[2] - vertices[0];
let v1 = vertices[1] - vertices[0];
let v2 = *point - vertices[0];
// Compute dot products
let dot00 = dot(v0, v0);
let dot01 = dot(v0, v1);
let dot02 = dot(v0, v2);
let dot11 = dot(v1, v1);
let dot12 = dot(v1, v2);
// Compute barycentric coordinates
let inv_denom = 1.0 / (dot00 * dot11 - dot01 * dot01);
let u = (dot11 * dot02 - dot01 * dot12) * inv_denom;
let v = (dot00 * dot12 - dot01 * dot02) * inv_denom;
// Check if point is in triangle
(u >= 0.0) && (v >= 0.0) && (u + v < 1.0)
}
fn normal(vertices: &Triangle) -> Vector3<f64> {
let v0 = vertices[2] - vertices[0];
let v1 = vertices[1] - vertices[0];
v0.cross(v1).normalize()
}
impl Plane {
pub fn new(vertices: Triangle, color: Color) -> Self {
Plane {
id: vertices.iter().fold(0.0, |sum, p| p.sum() + sum as f64) + color.sum(),
vertices: vertices,
color: color,
normal: normal(&vertices),
}
}
pub fn intersect(&self, ray: &Ray) -> Option<Point3<f64>> {
let n = self.normal;
let col = dot(n, ray.direction);
if col == 0.0 {
None
} else {
let d = dot(self.vertices[0] - ray.pos, n) / col;
if d < 0.0 {
return None;
}
let point = ray.pos + d * ray.direction;
if is_point_in_triangle(self.vertices, &point) {
Some(point)
} else {
None
}
}
}
pub fn random_point(&self) -> Point3<f64> {
let mut rng = rand::thread_rng();
let (r0, r1): (f64, f64) = (rng.gen(), rng.gen());
((1.0 - r0.sqrt()) * self.vertices[0])
.add_element_wise(r0.sqrt() * (1.0 - r1) * self.vertices[1])
.add_element_wise((r0.sqrt() * r1) * self.vertices[2])
}
pub fn are_on_same_side(&self, p0: Point3<f64>, p1: Point3<f64>) -> bool {
dot(self.normal, self.vertices[0] - p0).signum()
== dot(self.normal, self.vertices[0] - p1).signum()
}
pub fn reflect(&self, ray: &Ray) -> Ray {
let pos = self.intersect(ray).unwrap();
let direction = ray.direction - 2.0 * dot(self.normal, ray.direction) * self.normal;
Ray {
pos: pos,
direction: if self.are_on_same_side(ray.pos, pos + direction) {
direction
} else {
-direction
},
color: ray.color.mul_element_wise(self.color),
}
}
}
impl PartialEq<Plane> for Plane {
fn eq(&self, other: &Plane) -> bool {
self.id.eq(&other.id)
}
}
|
use anyhow::{Error, Result};
use hyper::{Body, Response, StatusCode};
use std::fmt::Display;
use url::Url;
#[derive(Debug)]
pub struct HttpError {
pub status_code: StatusCode,
pub request_url: Url,
pub body: Option<Body>,
}
impl Display for HttpError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Request to {} responded with {}",
self.request_url.to_string(),
self.status_code
)
}
}
impl std::error::Error for HttpError {}
impl HttpError {
pub fn is_error(path: Url, response: &Response<Body>) -> Result<()> {
if response.status().as_u16() >= 300 {
return Err(Error::new(HttpError {
status_code: response.status(),
request_url: path,
// This must be implemented in the future for
// better debugging experience
body: None,
}));
}
Ok(())
}
}
|
use std::path::PathBuf;
#[derive(Clone)]
pub struct Project {
pub directory: PathBuf,
}
|
use log::{LevelFilter, Log, Metadata, Record};
#[allow(dead_code)]
struct Logger {
pub filter: Option<String>,
}
impl Log for Logger {
fn enabled(&self, _: &Metadata) -> bool {
true
}
fn log(&self, record: &Record) {
eprintln!(
"[{}] {}",
record
.module_path()
.unwrap_or_default()
.trim_start_matches("rust_tree_sitter_cli::"),
record.args()
);
}
fn flush(&self) {}
}
pub fn init() {
log::set_boxed_logger(Box::new(Logger { filter: None })).unwrap();
log::set_max_level(LevelFilter::Info);
}
|
#[doc = "Register `FDCAN_NBTP` reader"]
pub type R = crate::R<FDCAN_NBTP_SPEC>;
#[doc = "Register `FDCAN_NBTP` writer"]
pub type W = crate::W<FDCAN_NBTP_SPEC>;
#[doc = "Field `TSEG2` reader - Nominal Time segment after sample point"]
pub type TSEG2_R = crate::FieldReader;
#[doc = "Field `TSEG2` writer - Nominal Time segment after sample point"]
pub type TSEG2_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
#[doc = "Field `NTSEG1` reader - Nominal Time segment before sample point"]
pub type NTSEG1_R = crate::FieldReader;
#[doc = "Field `NTSEG1` writer - Nominal Time segment before sample point"]
pub type NTSEG1_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 8, O>;
#[doc = "Field `NBRP` reader - Bit Rate Prescaler"]
pub type NBRP_R = crate::FieldReader<u16>;
#[doc = "Field `NBRP` writer - Bit Rate Prescaler"]
pub type NBRP_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 9, O, u16>;
#[doc = "Field `NSJW` reader - NSJW: Nominal (Re)Synchronization Jump Width"]
pub type NSJW_R = crate::FieldReader;
#[doc = "Field `NSJW` writer - NSJW: Nominal (Re)Synchronization Jump Width"]
pub type NSJW_W<'a, REG, const O: u8> = crate::FieldWriter<'a, REG, 7, O>;
impl R {
#[doc = "Bits 0:6 - Nominal Time segment after sample point"]
#[inline(always)]
pub fn tseg2(&self) -> TSEG2_R {
TSEG2_R::new((self.bits & 0x7f) as u8)
}
#[doc = "Bits 8:15 - Nominal Time segment before sample point"]
#[inline(always)]
pub fn ntseg1(&self) -> NTSEG1_R {
NTSEG1_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:24 - Bit Rate Prescaler"]
#[inline(always)]
pub fn nbrp(&self) -> NBRP_R {
NBRP_R::new(((self.bits >> 16) & 0x01ff) as u16)
}
#[doc = "Bits 25:31 - NSJW: Nominal (Re)Synchronization Jump Width"]
#[inline(always)]
pub fn nsjw(&self) -> NSJW_R {
NSJW_R::new(((self.bits >> 25) & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6 - Nominal Time segment after sample point"]
#[inline(always)]
#[must_use]
pub fn tseg2(&mut self) -> TSEG2_W<FDCAN_NBTP_SPEC, 0> {
TSEG2_W::new(self)
}
#[doc = "Bits 8:15 - Nominal Time segment before sample point"]
#[inline(always)]
#[must_use]
pub fn ntseg1(&mut self) -> NTSEG1_W<FDCAN_NBTP_SPEC, 8> {
NTSEG1_W::new(self)
}
#[doc = "Bits 16:24 - Bit Rate Prescaler"]
#[inline(always)]
#[must_use]
pub fn nbrp(&mut self) -> NBRP_W<FDCAN_NBTP_SPEC, 16> {
NBRP_W::new(self)
}
#[doc = "Bits 25:31 - NSJW: Nominal (Re)Synchronization Jump Width"]
#[inline(always)]
#[must_use]
pub fn nsjw(&mut self) -> NSJW_W<FDCAN_NBTP_SPEC, 25> {
NSJW_W::new(self)
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
}
#[doc = "FDCAN Nominal Bit Timing and Prescaler Register\n\nYou can [`read`](crate::generic::Reg::read) this register and get [`fdcan_nbtp::R`](R). You can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero) this register using [`fdcan_nbtp::W`](W). You can also [`modify`](crate::generic::Reg::modify) this register. See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub struct FDCAN_NBTP_SPEC;
impl crate::RegisterSpec for FDCAN_NBTP_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [`fdcan_nbtp::R`](R) reader structure"]
impl crate::Readable for FDCAN_NBTP_SPEC {}
#[doc = "`write(|w| ..)` method takes [`fdcan_nbtp::W`](W) writer structure"]
impl crate::Writable for FDCAN_NBTP_SPEC {
const ZERO_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
const ONE_TO_MODIFY_FIELDS_BITMAP: Self::Ux = 0;
}
#[doc = "`reset()` method sets FDCAN_NBTP to value 0x0600_0a03"]
impl crate::Resettable for FDCAN_NBTP_SPEC {
const RESET_VALUE: Self::Ux = 0x0600_0a03;
}
|
use super::super::prelude::{
HMODULE
};
pub type Module = HMODULE; |
fn main() {}
#[cfg(test)]
mod tests {
#[test]
fn you_can_assert_eq() { assert_eq!(2, 2); }
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.