content
stringlengths 12
392k
| id
int64 0
1.08k
|
|---|---|
pub fn write_bin_len<W>(wr: &mut W, len: u32) -> Result<Marker, ValueWriteError>
where W: Write
{
if len < 256 {
try!(write_marker(wr, Marker::Bin8));
write_data_u8(wr, len as u8).and(Ok(Marker::Bin8))
} else if len < 65536 {
try!(write_marker(wr, Marker::Bin16));
write_data_u16(wr, len as u16).and(Ok(Marker::Bin16))
} else {
try!(write_marker(wr, Marker::Bin32));
write_data_u32(wr, len).and(Ok(Marker::Bin32))
}
}
| 800
|
fn test_invalid_variable_4() {
let parsed_data = parse(&"$invalid: [ 1, 2, 3]");
assert!(parsed_data
.unwrap_err()
.downcast_ref::<ParseError>()
.is_some());
}
| 801
|
pub fn quick_sort_rayon<T: Send + PartialOrd + Debug>(v: &mut[T]) {
if v.len() <= 1 {
return;
}
let p = pivot(v);
println!("{:?}", v);
let (a, b) = v.split_at_mut(p);
// put f2 on queue then start f1;
// if another thread is ready it will steal f2
// this works recursively recursively down the stack
rayon::join(||quick_sort_rayon(a), || quick_sort_rayon(&mut b[1..]));
}
| 802
|
fn sub(value: i32) -> i32 {
value - 1
}
| 803
|
fn
store
(
&
self
_val
:
(
)
_order
:
Ordering
)
{
}
| 804
|
pub fn readline_and_print() -> io::Result<()> {
let f = File::open("/Users/liwei/coding/rust/git/rust/basic/fs/Cargo.toml")?;
let f = BufReader::new(f);
for line in f.lines() {
if let Ok(line) = line {
println!("{:?}", line);
}
}
Ok(())
}
| 805
|
fn yield_spin_loop_unfair() {
yield_spin_loop(false);
}
| 806
|
fn to_rust_typestr(s: &str) -> String {
use inflector::cases::pascalcase::to_pascal_case;
let s = to_pascal_case(s);
fixup(s)
}
| 807
|
fn find_first_zero_space_node(nodes: & HashMap<String, Node>, from: &Node) -> Option<Node> {
let mut scan_list: Vec<&Node> = vec![from];
let mut used_list: HashSet<String> = HashSet::new();
loop {
let mut temp_list: Vec<&Node> = Vec::new();
let mut any_found = false;
for node in &scan_list {
let neighbours = node.get_neighbours();
for c in neighbours {
if used_list.contains(&c) {
continue
}
any_found = true;
used_list.insert(c.clone());
let node = nodes.get(&c).unwrap();
if node.used == 0 {
return Some((*node).clone());
}
temp_list.push(node);
}
}
scan_list = temp_list.clone();
if !any_found {
break
}
}
None
}
| 808
|
fn download_and_verify(url: &str, hash: &str) -> crate::Result<Vec<u8>> {
common::print_info(format!("Downloading {}", url).as_str())?;
let response = attohttpc::get(url).send()?;
let data: Vec<u8> = response.bytes()?;
common::print_info("validating hash")?;
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
let url_hash = hasher.finalize().to_vec();
let expected_hash = hex::decode(hash)?;
if expected_hash == url_hash {
Ok(data)
} else {
Err(crate::Error::HashError)
}
}
| 809
|
pub fn ap(f: Value, arg: Value) -> Value {
Rc::new(RefCell::new(V {
val: Value_::Apply(f, arg),
computed: false,
}))
}
| 810
|
fn func_2() {
if func_1() != Some( CONSTANT ) {
unsafe { abort(); }
}
}
| 811
|
fn inc_8() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(IndirectScaledDisplaced(EAX, Two, 1716018741, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[254, 4, 69, 53, 94, 72, 102], OperandSize::Dword)
}
| 812
|
fn init() {
let project = project("init").build();
project.cargo_fuzz().arg("init").assert().success();
assert!(project.fuzz_dir().is_dir());
assert!(project.fuzz_cargo_toml().is_file());
assert!(project.fuzz_targets_dir().is_dir());
assert!(project.fuzz_target_path("fuzz_target_1").is_file());
project
.cargo_fuzz()
.arg("run")
.arg("fuzz_target_1")
.arg("--")
.arg("-runs=1")
.assert()
.success();
}
| 813
|
fn any_method_supports_media(resources: &[Resource]) -> bool {
resources.iter().any(|resource| {
resource
.methods
.iter()
.any(|method| method.supports_media_download || method.media_upload.is_some())
})
}
| 814
|
fn inc_1() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(Direct(DX)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[66], OperandSize::Word)
}
| 815
|
fn main() {
println!("Visit http://localhost:8080 to try this example.");
//Read the page before we start
let page = Arc::new(read_string("examples/handler_storage/page.html").unwrap());
//The shared counter state
let value = Arc::new(RwLock::new(0));
let router = insert_routes!{
TreeRouter::new() => {
"/" => Get: Counter{
page: page.clone(),
value: value.clone(),
operation: None
},
"/add" => Get: Counter{
page: page.clone(),
value: value.clone(),
operation: Some(add)
},
"/sub" => Get: Counter{
page: page.clone(),
value: value.clone(),
operation: Some(sub)
}
}
};
let server_result = Server {
host: 8080.into(),
handlers: router,
content_type: content_type!(Text / Html; Charset = Utf8),
..Server::default()
}.run();
match server_result {
Ok(_server) => {},
Err(e) => println!("could not start server: {}", e.description())
}
}
| 816
|
async fn fallback_route(_req: HttpRequest) -> impl Responder {
HttpResponse::NotFound().json("Route not found")
}
| 817
|
pub fn bit_xor<E, CS>(cs: CS, left: &Scalar<E>, right: &Scalar<E>) -> Result<Scalar<E>, Error>
where
E: IEngine,
CS: ConstraintSystem<E>,
{
fn inner<E, CS>(mut cs: CS, left: &Scalar<E>, right: &Scalar<E>) -> Result<Scalar<E>, Error>
where
E: IEngine,
CS: ConstraintSystem<E>,
{
let scalar_type = zinc_types::ScalarType::expect_same(left.get_type(), right.get_type())?;
scalar_type.assert_signed(false)?;
let len = scalar_type.bitlength::<E>();
let left_bits = left
.to_expression::<CS>()
.into_bits_le_fixed(cs.namespace(|| "left bits"), len)?;
let right_bits = right
.to_expression::<CS>()
.into_bits_le_fixed(cs.namespace(|| "left bits"), len)?;
let result_bits = left_bits
.into_iter()
.zip(right_bits)
.enumerate()
.map(|(i, (l_bit, r_bit))| {
Boolean::xor(cs.namespace(|| format!("bit {}", i)), &l_bit, &r_bit)
})
.collect::<Result<Vec<Boolean>, SynthesisError>>()?;
let result = AllocatedNum::pack_bits_to_element(cs.namespace(|| "result"), &result_bits)?;
Ok(Scalar::new_unchecked_variable(
result.get_value(),
result.get_variable(),
scalar_type,
))
}
auto_const!(inner, cs, left, right)
}
| 818
|
pub
fn
load
(
&
self
)
-
>
T
{
unsafe
{
atomic_load
(
self
.
as_ptr
(
)
)
}
}
| 819
|
fn inc_12() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(IndirectScaledIndexedDisplaced(RDI, RDI, Four, 553850230, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[254, 132, 191, 118, 21, 3, 33], OperandSize::Qword)
}
| 820
|
fn find_single_byte_key(data: &[u8]) -> u8 {
let mut best_key = 0;
let mut best_score = 1_000_000f64;
for x in 0..255 {
let decrypt = utils::single_byte_xor(&data, x);
let score = freq_analysis::text_score(&decrypt);
if score <= best_score {
best_score = score;
best_key = x;
}
}
best_key
}
| 821
|
pub fn u_le8(x: u64, y: u64) -> u64 {
((((y | H8) - (x & !H8)) | (x ^ y)) ^ (x & !y)) & H8
}
| 822
|
fn parseLightInfo(reader: &mut BufReader<&File>, buf: &mut String, lights: &mut Vec<Light>) -> Model {
let mut light = Light {
lightType: "" as str,
radius: 0.0,
period: 0,
position: Vec3f::new(0.0, 0.0, 0.0),
Color: Vec3f::new(0.0, 0.0, 0.0),
};
//Firstly, read the LigthType
reader.read_line(buf);
let lightType: &str = buf.trim().clone();
let mut key = "";
let mut radius = "";
let mut period = 0;
if lightType == "o" || lightType == "l" {
let mut infoIndex = 0;
reader.read_line(buf);
let mut split_info = buf.split(" ");
key = split_info.next().unwrap().parse().unwrap();
radius = split_info.next().unwrap().parse().unwrap();
period = split_info.next().unwrap().parse().unwrap();
}
let mut infoIndex = 0;
while infoIndex < 2 {
//Then, read the position and Color Info
split_info = buf.split(" ");
let mut fieldInfo = 0;
reader.read_line(buf);
let mut split_info = buf.split(" ");
key = split_info.next().unwrap().parse().unwrap();
if infoIndex == 1 {
light.position = Vec3f::new(
x: split_info.next().unwrap().parse().unwrap(),
y: split_info.next().unwrap().parse().unwrap(),
z: split_info.next().unwrap().parse().unwrap(),
)
} else {
light.Color = Vec3f::new(
x: split_info.next().unwrap().parse().unwrap(),
y: split_info.next().unwrap().parse().unwrap(),
z: split_info.next().unwrap().parse().unwrap(),
)
}
infoIndex += 1
}
//Finally, we only need to read an empty line to finish the model parsing process
reader.read_line(buf);
lights.push(light);
}
| 823
|
fn main() {
let nhits = 20;
let ntrials = 2000;
let nnanos = 60 * 1000 * 1000 * 1000;
let ntransfers = 1000;
let nbrokers = 8;
let tolerance = 0.05;
let ops = (ntransfers * nbrokers) as f64;
let million = (1000 * 1000) as f64;
let mut sum = 0.0;
let mut hits = 0;
let mut trial = 0;
let limit = time::precise_time_ns() + nnanos;
while hits < nhits && trial < ntrials && time::precise_time_ns() < limit {
let mut table = HashMapOfTreeMap::new();
let start = time::precise_time_ns();
for _ in 0..nbrokers {
broker(&mut table, ntransfers);
}
let end = time::precise_time_ns();
let ns = (end - start) as f64;
let x = ops / ns * million;
sum += x;
let n = (trial + 1) as f64;
let mean = sum / n;
let dev = (x - mean).abs() / mean;
if dev <= tolerance {
println!("{:5} {:8.2} ops/ms ({:8.2})", trial, x, mean);
hits += 1;
}
trial += 1;
}
}
| 824
|
fn split_node(ng: &mut NameGen, beta: &RcNode) {
let t = beta.get_body();
match &*t {
Term::CFG { kind, name, args } => {
let names1 = ng.fresh_name_list(args.len());
let args1 = names1.iter().map(|x| Term::var(x)).collect();
let t1 = Term::mk_cfg(kind.clone(), name, args1);
let bs1 = names1
.iter()
.zip(args)
.map(|(n, t)| (n.clone(), Rc::clone(t)))
.collect();
let let_term = Term::mk_let(t1, bs1);
replace_subtree(beta, &let_term);
}
_ => unimplemented!(),
}
}
| 825
|
pub fn cmp_op(inputs: OpInputs) -> EmulatorResult<()> {
// CMP compares the A-value to the B-value. If the result of the
// comparison is equal, the instruction after the next instruction
// (PC + 2) is queued (skipping the next instruction). Otherwise, the
// the next instruction is queued (PC + 1).
let a = inputs.regs.a;
let b = inputs.regs.b;
let is_equal = match inputs.regs.current.instr.modifier {
Modifier::A => a.a_field == b.a_field,
Modifier::B => a.b_field == b.b_field,
Modifier::AB => a.a_field == b.b_field,
Modifier::BA => a.b_field == b.a_field,
Modifier::F => a.a_field == b.a_field && a.b_field == b.b_field,
Modifier::X => a.a_field == b.b_field && a.b_field == b.a_field,
Modifier::I => {
a.instr == b.instr
&& a.a_field == b.a_field
&& a.b_field == b.b_field
}
};
// Increment PC twice if the condition holds, otherwise increment once
let amt = if is_equal { 2 } else { 1 };
inputs.pq.push_back(
offset(inputs.regs.current.idx, amt, inputs.core_size)?,
inputs.warrior_id,
)?;
Ok(())
}
| 826
|
fn wrap_use<T: quote::ToTokens>(name: syn::Ident, ty: &str, content: &T) -> quote::Tokens {
let dummy_const = syn::Ident::new(&format!("_IMPL_{}_FOR_{}", ty.to_uppercase(), name), Span::call_site());
quote! {
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
const #dummy_const: () = {
extern crate nbt as _nbt;
#content
};
}
}
| 827
|
pub extern "x86-interrupt" fn break_point() { CommonExceptionHandler( 3); }
| 828
|
fn write_fixval<W>(wr: &mut W, marker: Marker) -> Result<(), FixedValueWriteError>
where W: Write
{
wr.write_u8(marker.to_u8()).map_err(|err| FixedValueWriteError(From::from(err)))
}
| 829
|
fn main() {
env_logger::init_from_env(Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "warn"));
example!(Six;
RunFor::Part1, (), "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L",
RunFor::Part2, (), "COM)B\nB)C\nC)D\nD)E\nE)F\nB)G\nG)H\nD)I\nE)J\nJ)K\nK)L\nK)YOU\nI)SAN"
);
run::<Six>((), include_str!("6_input.txt"));
}
| 830
|
fn schema_id_to_ident(id: &str) -> syn::Ident {
to_ident(&to_rust_typestr(id))
}
| 831
|
fn bench_topology(c: &mut Criterion, bench_name: &'static str) {
let num_lines: usize = 10_000;
let line_size: usize = 100;
let in_addr = next_addr();
let out_addr = next_addr();
let mut group = c.benchmark_group(format!("{}/{}", bench_name, "topology"));
group.sampling_mode(SamplingMode::Flat);
// Encapsulate noise seen in
// https://github.com/timberio/vector/runs/1746002475
group.noise_threshold(0.10);
for &num_writers in [1, 2, 4, 8, 16].iter() {
group.throughput(Throughput::Bytes(
(num_lines * line_size * num_writers) as u64,
));
group.bench_with_input(
BenchmarkId::new(
"tcp_socket",
format!(
"{:02}_{}",
num_writers,
if num_writers == 1 {
"writer"
} else {
"writers"
}
),
),
&num_writers,
|b, &num_writers| {
b.iter_batched(
|| {
let input_lines: Vec<Vec<String>> = (0..num_writers)
.into_iter()
.map(|_| random_lines(line_size).take(num_lines).collect())
.collect();
let mut config = config::Config::builder();
config.add_source(
"in",
sources::socket::SocketConfig::make_basic_tcp_config(in_addr),
);
config.add_sink(
"out",
&["in"],
sinks::socket::SocketSinkConfig::make_basic_tcp_config(
out_addr.to_string(),
),
);
let rt = runtime();
let (output_lines, topology) = rt.block_on(async move {
let output_lines = CountReceiver::receive_lines(out_addr);
let (topology, _crash) =
start_topology(config.build().unwrap(), false).await;
wait_for_tcp(in_addr).await;
(output_lines, topology)
});
(input_lines, rt, topology, output_lines)
},
|(input_lines, rt, topology, output_lines)| {
rt.block_on(async move {
let sends = stream::iter(input_lines)
.map(|lines| send_lines(in_addr, lines))
.collect::<Vec<_>>()
.await;
future::try_join_all(sends).await.unwrap();
topology.stop().await;
let output_lines = output_lines.await;
debug_assert_eq!(num_lines * num_writers, output_lines.len());
output_lines
});
},
BatchSize::PerIteration,
);
},
);
}
group.finish();
}
| 832
|
fn mac_to_string(mac: Option<MacAddr>) -> String {
match mac {
Some(m) => m.to_string(),
None => "Unknown mac address".to_string(),
}
}
| 833
|
pub fn update_future<'a, C, I, P>(
config: &'a Config,
vidx_list: I,
client: &'a Client<C, Body>,
logger: &'a Logger,
progress: P
) -> impl Future<Item = Vec<PathBuf>, Error = Error> + 'a
where C: Connect,
I: IntoIterator<Item = String> + 'a,
P: DownloadProgress + 'a,
{
let parsed_vidx = download_vidx_list(vidx_list, client, logger);
let pdsc_list = parsed_vidx
.filter_map(move |vidx| vidx.map(|v| flatmap_pdscs(v, client, logger)))
.flatten();
download_stream(config, pdsc_list, client, logger, progress).collect()
}
| 834
|
pub fn threaded_quick_sort<T: 'static + PartialOrd + Debug + Send>(v: &mut [T]) {
if v.len() <= 1 {
return;
}
let p = pivot(v);
println!("{:?}", v);
let (a, b) = v.split_at_mut(p);
let raw_a = a as *mut [T];
let raw_s = RawSend(raw_a);
unsafe {
let handle = std::thread::spawn(move || {
threaded_quick_sort(&mut *raw_s.0);
});
threaded_quick_sort(&mut b[1..]);
// compiler doesn't know that we join these
// We do
handle.join().ok();
}
}
| 835
|
pub fn render() {
// let a = ext_rec();
// let some = a.calcArea();
// log!("got event! {}", some);
// log!("got event!");
let app = seed::App::build(|_, _| Model::default(), update, view)
.finish()
.run();
app.update(Msg::FetchData);
}
| 836
|
fn system_call_take_payload<T: Any + Clone>(message: SystemCall) -> (SystemCall, T) {
use core::mem::{size_of};
let addr = task_buffer_addr();
unsafe {
let buffer = &mut *(addr as *mut TaskBuffer);
buffer.call = Some(message);
system_call_raw();
let payload_addr = &mut buffer.payload_data as *mut _ as *mut T;
let payload_data = &*payload_addr;
assert!(buffer.payload_length != 0 && buffer.payload_length == size_of::<T>());
(buffer.call.take().unwrap(), payload_data.clone())
}
}
| 837
|
fn help() {
cargo_fuzz().arg("help").assert().success();
}
| 838
|
fn criterion_benchmark(c: &mut Criterion) {
let mut group = c.benchmark_group("fyrstikker");
group.sampling_mode(SamplingMode::Flat).sample_size(10);
group.bench_function("fyrstikker 40", |b| {
b.iter(|| fyrstikk_tal_kombinasjonar(40))
});
group.bench_function("fyrstikker 2000", |b| {
b.iter(|| fyrstikk_tal_kombinasjonar(2000))
});
group.finish();
}
| 839
|
fn main() {
let num = input("Ingrese un número: ")
.unwrap()
.parse::<i32>()
.expect("Expected a number");
if num % 2 == 0 {
println!("`{}` es un número par.", num);
} else {
println!("`{}` es un número impar", num);
}
}
| 840
|
pub fn median_filter(frame : ImageBuffer<Luma<u8>, Vec<u8>>) -> ImageBuffer<Luma<u8>, Vec<u8>> {
let mut result = ImageBuffer::new(640, 480);
let mut kernel = [0; 9];
for i in 1..638 {
for j in 1..478 {
// Fill the kernel
for k in 0..3 {
for l in 0..3 {
let index = k + 3 * l;
let coord_x = (i + k - 1) as u32;
let coord_y = (j + l - 1) as u32;
kernel[index] = frame[(coord_x, coord_y)].channels()[0];
}
}
kernel.sort();
let pixel_value = kernel[5];
result.put_pixel(i as u32, j as u32, Luma([pixel_value]));
}
}
return result;
}
| 841
|
fn get_submodules(
repo: &Repository,
at: &Commit,
) -> Result<Vec<Submodule>, Box<dyn std::error::Error>> {
let submodule_cfg = modules_file(&repo, &at)?;
let submodule_cfg = Config::parse(&submodule_cfg)?;
let mut path_to_url = HashMap::new();
let entries = submodule_cfg.entries(None)?;
for entry in &entries {
let entry = entry?;
let name = entry.name().unwrap();
if name.ends_with(".path") {
let url = name.replace(".path", ".url");
let url = submodule_cfg.get_string(&url).unwrap();
path_to_url.insert(entry.value().unwrap().to_owned(), url);
}
}
let mut submodules = Vec::new();
let tree = at.tree()?;
for (path, url) in &path_to_url {
let path = Path::new(&path);
let entry = tree.get_path(&path);
// the submodule may not actually exist
let entry = match entry {
Ok(e) => e,
Err(_) => continue,
};
assert_eq!(entry.kind().unwrap(), git2::ObjectType::Commit);
submodules.push(Submodule {
path: path.to_owned(),
commit: entry.id(),
repository: url.to_owned(),
});
}
submodules.retain(|s| {
let is_rust =
s.repository.contains("rust-lang") || s.repository.contains("rust-lang-nursery");
let exclude = vec![
"https://github.com/rust-lang/llvm.git",
"https://github.com/rust-lang/llvm-project.git",
"https://github.com/rust-lang/lld.git",
"https://github.com/rust-lang-nursery/clang.git",
"https://github.com/rust-lang-nursery/lldb.git",
"https://github.com/rust-lang/libuv.git",
"https://github.com/rust-lang/gyp.git",
"https://github.com/rust-lang/jemalloc.git",
"https://github.com/rust-lang/compiler-rt.git",
"https://github.com/rust-lang/hoedown.git",
];
is_rust
&& !exclude.contains(&s.repository.as_str())
&& !exclude.contains(&&*format!("{}.git", s.repository))
});
Ok(submodules)
}
| 842
|
async fn get_price_ticker() {
let exchange = init().await;
let req = GetPriceTickerRequest {
market_pair: "eth_btc".to_string(),
};
let resp = exchange.get_price_ticker(&req).await.unwrap();
println!("{:?}", resp);
}
| 843
|
pub fn test_wxorx_crash_64() {
let buffer = fs::read("tests/programs/wxorx_crash_64").unwrap().into();
let result = run::<u64, SparseMemory<u64>>(&buffer, &vec!["wxorx_crash_64".into()]);
assert_eq!(result.err(), Some(Error::MemOutOfBound));
}
| 844
|
fn add_media_to_alt_param(params: &mut [Param]) {
if let Some(alt_param) = params.iter_mut().find(|p| p.id == "alt") {
if let Param {
typ:
Type {
type_desc: TypeDesc::Enum(enum_desc),
..
},
..
} = alt_param
{
if enum_desc.iter().find(|d| d.value == "media").is_none() {
enum_desc.push(EnumDesc {
description: Some("Upload/Download media content".to_owned()),
ident: parse_quote! {Media},
value: "media".to_owned(),
})
}
}
}
}
| 845
|
pub async fn client_async_tls<R, S>(
request: R,
stream: S,
) -> Result<(WebSocketStream<ClientStream<S>>, Response), Error>
where
R: IntoClientRequest + Unpin,
S: 'static + tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin,
AutoStream<S>: Unpin,
{
client_async_tls_with_connector_and_config(request, stream, None, None).await
}
| 846
|
pub fn part2() {
let input = crate::common::read_stdin_to_string();
let matches = find_part2_matches(&input).expect("No matches found");
let common_letters: String = matches
.0
.chars()
.zip(matches.1.chars())
.filter(|(letter_1, letter_2)| letter_1 == letter_2)
.map(|letters| letters.0)
.collect();
println!(
"the common letters between the two correct box IDs: {}",
common_letters
);
}
| 847
|
fn main() {
let key = utils::random_key();
let nonce = utils::u64_to_bytes(0);
let crypter = stream::CTR::new(&key, &nonce).unwrap();
let d = load_data();
let mut d: Vec<Vec<u8>> = d.into_iter().map(|s| crypter.crypt(&s)).collect();
truncate_to_shortest(&mut d);
let d_t = transpose(&d);
let mut key = Vec::<u8>::new();
for block in d_t {
key.push(find_single_byte_key(&block));
}
for entry in d {
println!("{}",
str::from_utf8(&utils::repeating_key_xor(&entry, &key)).unwrap());
}
}
| 848
|
pub fn parse(raw: &str) -> Option<game::Game> {
let game_raw: GameRaw = serde_json::from_str(raw).ok()?;
let even_initial_timelines = game_raw
.timelines
.iter()
.any(|tl| tl.index == -0.5 || tl.index == 0.5);
let min_timeline = game_raw.timelines
.iter()
.map(|tl| tl.index)
.min_by_key(|x| (*x) as isize)?;
let max_timeline = game_raw.timelines
.iter()
.map(|tl| tl.index)
.max_by_key(|x| (*x) as isize)?;
let timeline_width = ((-min_timeline).min(max_timeline) + 1.0).round();
let active_timelines = game_raw.timelines
.iter()
.filter(|tl| tl.index.abs() <= timeline_width);
let present = active_timelines
.map(|tl| tl.begins_at + (tl.states.len() as isize) - 1)
.min()?;
let mut res = game::Game::new(game_raw.width, game_raw.height);
res.info.present = present;
res.info.min_timeline = de_l(min_timeline, even_initial_timelines);
res.info.max_timeline = de_l(max_timeline, even_initial_timelines);
res.info.active_player = game_raw.active_player;
res.info.even_initial_timelines = even_initial_timelines;
for tl in game_raw.timelines.into_iter() {
res.timelines.insert(
de_l(tl.index, even_initial_timelines),
de_timeline(tl, even_initial_timelines),
);
}
Some(res)
}
| 849
|
async fn index(_req: HttpRequest) -> impl Responder {
HttpResponse::Ok().json("Catalog API root")
}
| 850
|
pub async fn render_window_wasm(subaction: brawllib_rs::high_level_fighter::HighLevelSubaction) {
use brawllib_rs::renderer::app::state::{AppEventIncoming, State};
use brawllib_rs::renderer::app::App;
use wasm_bindgen::prelude::*;
use web_sys::HtmlElement;
let document = web_sys::window().unwrap().document().unwrap();
let body = document.body().unwrap();
let parent_div = document.create_element("div").unwrap();
parent_div
.dyn_ref::<HtmlElement>()
.unwrap()
.style()
.set_css_text("margin: auto; width: 80%; aspect-ratio: 4 / 2; background-color: black");
body.append_child(&parent_div).unwrap();
let app = App::new_insert_into_element(parent_div, subaction).await;
let event_tx = app.get_event_tx();
let frame = document.create_element("p").unwrap();
frame.set_inner_html("Frame: 0");
body.append_child(&frame).unwrap();
let button = document.create_element("button").unwrap();
body.append_child(&button).unwrap();
let button_move = button.clone();
button_move.set_inner_html("Run");
let event_tx_move = event_tx.clone();
let do_thing = Closure::wrap(Box::new(move || {
if button_move.inner_html() == "Stop" {
event_tx_move
.send(AppEventIncoming::SetState(State::Pause))
.unwrap();
button_move.set_inner_html("Run");
} else {
event_tx_move
.send(AppEventIncoming::SetState(State::Play))
.unwrap();
button_move.set_inner_html("Stop");
}
}) as Box<dyn FnMut()>);
button
.dyn_ref::<HtmlElement>()
.unwrap()
.set_onclick(Some(do_thing.as_ref().unchecked_ref()));
let button = document.create_element("button").unwrap();
body.append_child(&button).unwrap();
let button_move = button.clone();
button_move.set_inner_html("Perspective");
let do_thing = Closure::wrap(Box::new(move || {
if button_move.inner_html() == "Orthographic" {
event_tx
.send(AppEventIncoming::SetPerspective(false))
.unwrap();
button_move.set_inner_html("Perspective");
} else {
event_tx
.send(AppEventIncoming::SetPerspective(true))
.unwrap();
button_move.set_inner_html("Orthographic");
}
}) as Box<dyn FnMut()>);
button
.dyn_ref::<HtmlElement>()
.unwrap()
.set_onclick(Some(do_thing.as_ref().unchecked_ref()));
app.get_event_tx()
.send(AppEventIncoming::SetState(State::Pause))
.unwrap();
app.run();
}
| 851
|
fn inc_10() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(IndirectScaledIndexedDisplaced(RAX, RDI, Two, 366931006, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[254, 132, 120, 62, 236, 222, 21], OperandSize::Qword)
}
| 852
|
fn reflect(v: &Vec3, n: &Vec3) -> Vec3 {
*v - *n * 2.0 * dot(v, n)
}
| 853
|
pub fn write_uint32(buf: &mut Vec<u8>, u: u32) -> usize {
let mut size = 0;
size += write_uint16(buf, (u & 0xffff) as u16);
size += write_uint16(buf, ((u >> 16) & 0xffff) as u16);
size
}
| 854
|
pub fn parse_opts() -> Result<CliStatus, Error> {
let opt = Opt::from_args();
log::debug!("Cli opts are: {:?}", opt);
match opt.cmd {
Command::Generate => {
generate_empty_config().context("Failed to generate config")?;
log::info!("config.yml generated");
Ok(CliStatus::Exit)
}
Command::Run {
config,
twil_sid,
twil_token,
twil_from,
} => {
if twil_sid.is_none() || twil_token.is_none() || twil_from.is_none() {
bail!("TWIL_ACCOUNT_SID, TWIL_AUTH_TOKEN & TWIL_FROM env variables must be set, or passed via --twil-sid, --twil-token & --twil-from");
}
let twil_sid = twil_sid.unwrap();
let twil_token = twil_token.unwrap();
let twil_from = twil_from.unwrap();
let app_config = AppConfig::new(config, twil_sid, twil_token, twil_from)
.context("Failed to get config")?;
Ok(CliStatus::Continue(app_config))
}
}
}
| 855
|
fn solve_2(input: &str) -> usize {
let types = "abcdefghijklmnopqrstuvwxyz";
let min: Option<usize> = types
.chars()
.map(|c| {
let first = input.to_string().replace(c, "");
first.replace(c.to_ascii_uppercase(), "")
})
.map(|s| solve_1(&s))
.min();
min.unwrap()
}
| 856
|
fn build_serverprefix(s: ~str) -> Result<IRCToken, ~str> {
Ok(PrefixT(Prefix {nick: s, user: ~"", host: ~""}))
}
| 857
|
fn vector_test() {
{
fn build_vector() -> Vec<i16> {
let mut v: Vec<i16> = Vec::<i16>::new();
v.push(10i16);
v.push(20i16);
v
}
fn build_vector_2() -> Vec<i16> {
let mut v = Vec::new();
v.push(10);
v.push(20);
v
}
let v1 = build_vector();
let v2 = build_vector_2();
assert_eq!(v1, v2);
}
let mut v1 = vec![2, 3, 5, 7];
assert_eq!(v1.iter().fold(1, |a, b| a * b), 210);
v1.push(11);
v1.push(13);
assert_eq!(v1.iter().fold(1, |a, b| a * b), 30030);
let mut v2 = Vec::new();
v2.push("step");
v2.push("on");
v2.push("no");
v2.push("pets");
assert_eq!(v2, vec!["step", "on", "no", "pets"]);
let v3: Vec<i32> = (0..5).collect();
assert_eq!(v3, [0, 1, 2, 3, 4]);
let mut v4 = vec!["a man", "a plan", "a canal", "panama"];
v4.reverse();
assert_eq!(v4, vec!["panama", "a canal", "a plan", "a man"]);
let mut v5 = Vec::with_capacity(2);
assert_eq!(v5.len(), 0);
assert_eq!(v5.capacity(), 2);
v5.push(1);
v5.push(2);
assert_eq!(v5.len(), 2);
assert_eq!(v5.capacity(), 2);
v5.push(3);
assert_eq!(v5.len(), 3);
assert_eq!(v5.capacity(), 4);
let mut v6 = vec![10, 20, 30, 40, 50];
v6.insert(3, 35);
assert_eq!(v6, [10, 20, 30, 35, 40, 50]);
v6.remove(1);
assert_eq!(v6, [10, 30, 35, 40, 50]);
let mut v7 = vec!["carmen", "miranda"];
assert_eq!(v7.pop(), Some("miranda"));
assert_eq!(v7.pop(), Some("carmen"));
assert_eq!(v7.pop(), None);
// let languages: Vec<String> = std::env::args().skip(1).collect();
let languages = vec!["Lisp", "Scheme", "C", "C++", "Fortran"];
let mut v8 = Vec::new();
for l in languages {
if l.len() % 2 == 0 {
v8.push("functional");
} else {
v8.push("imperative");
}
}
assert_eq!(
v8,
[
"functional",
"functional",
"imperative",
"imperative",
"imperative"
]
);
// slice
let v9: Vec<f64> = vec![0.0, 0.707, 1.0, 0.707];
let a9: [f64; 4] = [0.0, 0.707, 1.0, 0.707];
let sv: &[f64] = &v9;
let sa: &[f64] = &a9;
assert_eq!(sv[0..2], [0.0, 0.707]);
assert_eq!(sa[2..], [1.0, 0.707]);
assert_eq!(&sv[1..3], [0.707, 1.0]);
}
| 858
|
pub fn merge_sort<T: PartialOrd + Debug>(mut v: Vec<T>) -> Vec<T> {
// sort the left half
// sort the right half O(n*ln(n))
// bring the sorted half together O(n)
if v.len() <= 1 {
return v;
}
let mut res = Vec::with_capacity(v.len());
let b = v.split_off(v.len()/2);
let a = merge_sort(v);
let b = merge_sort(b);
// bring them together again add whichever is lowest the front of a or the front of b
let mut a_it = a.into_iter();
let mut b_it = b.into_iter();
let mut a_peek = a_it.next();
let mut b_peek = b_it.next();
loop {
match a_peek {
Some(ref a_val) => match b_peek{
Some(ref b_val) =>{
if b_val < a_val {
res.push(b_peek.take().unwrap());
b_peek = b_it.next();
} else {
res.push(a_peek.take().unwrap());
a_peek = a_it.next();
}
}
None => {
res.push(a_peek.take().unwrap());
res.extend(a_it);
return res;
}
}
None => {
if let Some(b_val) = b_peek {
res.push(b_val);
}
res.extend(b_it);
return res;
}
}
}
}
| 859
|
pub unsafe fn gatt_svr_init() -> i32 {
// Leaks the eff out of the svc_def
let svcs_ptr = alloc_svc_def();
print_svcs(svcs_ptr);
ble_svc_gap_init();
ble_svc_gatt_init();
let mut rc;
rc = ble_gatts_count_cfg(svcs_ptr);
esp_assert!(rc == 0, cstr!("RC err after ble_gatts_count_cfg\n"));
rc = ble_gatts_add_svcs(svcs_ptr);
esp_assert!(rc == 0, cstr!("RC err after ble_gatts_add_svcs\n"));
return 0;
}
| 860
|
pub fn u16_to_u8(n: u16) -> (u8, u8) {
(((n >> 8) as u8), (n as u8))
}
| 861
|
pub fn median(set: &[f32]) -> f32 {
let mut copy = vec![0.; set.len()];
copy[..].clone_from_slice(set);
copy.sort_by(|a, b| a.partial_cmp(b).unwrap());
let middle_index = copy.len() / 2;
if copy.len() % 2 == 0 {
return mean(©[middle_index - 1..middle_index]);
}
copy[middle_index]
}
| 862
|
fn fist_5() {
run_test(&Instruction { mnemonic: Mnemonic::FIST, operand1: Some(IndirectScaledIndexed(EDX, ECX, Four, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[223, 20, 138], OperandSize::Dword)
}
| 863
|
fn expect_money_conserved(table: &Table) {
let mut history: BTreeMap<u32, Vec<Row>> = BTreeMap::new();
for c in table.scan() {
history.entry(c.t).or_insert(Vec::new()).push(Row { k: c.k, v: c.v });
}
let mut tracker: HashMap<i32, i32> = HashMap::new();
for (_, rs) in history {
for r in rs {
tracker.insert(r.k, r.v);
}
let mut sum = 0;
for (_, v) in tracker.clone() {
sum += v;
}
assert! (sum == 0);
}
}
| 864
|
fn draw_foot<Message, B>(
renderer: &mut Renderer<B>,
foot: &Option<Element<'_, Message, Renderer<B>>>,
layout: Layout<'_>,
cursor_position: Point,
viewport: &Rectangle,
style: &Style,
) -> (Primitive, mouse::Interaction)
where
B: Backend + backend::Text,
{
let mut foot_children = layout.children();
let foot_background = Primitive::Quad {
bounds: layout.bounds(),
background: style.foot_background,
border_radius: style.border_radius,
border_width: 0.0,
border_color: Color::TRANSPARENT,
};
let (foot, foot_mouse_interaction) = foot.as_ref().map_or_else(
|| (Primitive::None, mouse::Interaction::default()),
|foot| {
foot.draw(
renderer,
&Defaults {
text: defaults::Text {
color: style.foot_text_color,
},
},
foot_children
.next()
.expect("Graphics: Layout should have a foot content layout"),
cursor_position,
viewport,
)
},
);
(
Primitive::Group {
primitives: vec![foot_background, foot],
},
foot_mouse_interaction,
)
}
| 865
|
pub fn task_set_inactive(target: CAddr) {
system_call(SystemCall::TaskSetInactive {
request: target
});
}
| 866
|
pub(super) fn construct<A: Allocate>(
worker: &mut timely::worker::Worker<A>,
config: &LoggingConfig,
event_queue: EventQueue<ReachabilityEvent>,
compute_state: &ComputeState,
) -> BTreeMap<LogVariant, (KeysValsHandle, Rc<dyn Any>)> {
let interval_ms = std::cmp::max(1, config.interval.as_millis());
// A dataflow for multiple log-derived arrangements.
let traces = worker.dataflow_named("Dataflow: timely reachability logging", move |scope| {
let (mut logs, token) = Some(event_queue.link).mz_replay(
scope,
"reachability logs",
config.interval,
event_queue.activator,
);
// If logging is disabled, we still need to install the indexes, but we can leave them
// empty. We do so by immediately filtering all logs events.
if !config.enable_logging {
logs = logs.filter(|_| false);
}
use timely::dataflow::operators::generic::builder_rc::OperatorBuilder;
// Restrict results by those logs that are meant to be active.
let logs_active = vec![LogVariant::Timely(TimelyLog::Reachability)];
let mut flatten = OperatorBuilder::new(
"Timely Reachability Logging Flatten ".to_string(),
scope.clone(),
);
use timely::dataflow::channels::pact::Pipeline;
let mut input = flatten.new_input(&logs, Pipeline);
let (mut updates_out, updates) = flatten.new_output();
let mut buffer = Vec::new();
flatten.build(move |_capability| {
move |_frontiers| {
let mut updates = updates_out.activate();
let mut updates_session = ConsolidateBuffer::new(&mut updates, 0);
input.for_each(|cap, data| {
data.swap(&mut buffer);
for (time, worker, (addr, massaged)) in buffer.drain(..) {
let time_ms = (((time.as_millis() / interval_ms) + 1) * interval_ms)
.try_into()
.expect("must fit");
for (source, port, update_type, ts, diff) in massaged {
let datum = (update_type, addr.clone(), source, port, worker, ts);
updates_session.give(&cap, ((datum, ()), time_ms, diff));
}
}
});
}
});
let updates = updates
.as_collection()
.mz_arrange_core::<_, RowSpine<_, _, _, _>>(
Exchange::new(|(((_, _, _, _, w, _), ()), _, _)| u64::cast_from(*w)),
"PreArrange Timely reachability",
compute_state.enable_arrangement_size_logging,
);
let mut result = BTreeMap::new();
for variant in logs_active {
if config.index_logs.contains_key(&variant) {
let key = variant.index_by();
let (_, value) = permutation_for_arrangement(
&key.iter()
.cloned()
.map(MirScalarExpr::Column)
.collect::<Vec<_>>(),
variant.desc().arity(),
);
let mut row_buf = Row::default();
let updates = updates.as_collection(
move |(update_type, addr, source, port, worker, ts), _| {
let row_arena = RowArena::default();
let update_type = if *update_type { "source" } else { "target" };
row_buf.packer().push_list(
addr.iter()
.chain_one(source)
.map(|id| Datum::UInt64(u64::cast_from(*id))),
);
let datums = &[
row_arena.push_unary_row(row_buf.clone()),
Datum::UInt64(u64::cast_from(*port)),
Datum::UInt64(u64::cast_from(*worker)),
Datum::String(update_type),
Datum::from(ts.clone()),
];
row_buf.packer().extend(key.iter().map(|k| datums[*k]));
let key_row = row_buf.clone();
row_buf.packer().extend(value.iter().map(|k| datums[*k]));
let value_row = row_buf.clone();
(key_row, value_row)
},
);
let trace = updates
.mz_arrange::<RowSpine<_, _, _, _>>(
&format!("Arrange {:?}", variant),
compute_state.enable_arrangement_size_logging,
)
.trace;
result.insert(variant.clone(), (trace, Rc::clone(&token)));
}
}
result
});
traces
}
| 867
|
fn
drop
(
&
mut
self
)
{
if
mem
:
:
needs_drop
:
:
<
T
>
(
)
{
unsafe
{
self
.
as_ptr
(
)
.
drop_in_place
(
)
;
}
}
}
| 868
|
fn main() {
let file_name = "input.txt";
let instructions = parse_file(file_name);
let (registers, largest_value) = process_instructions(&instructions);
println!("Day 8, part 1: {}", get_largest_register_value(®isters));
println!("Day 8, part 2: {}", largest_value);
}
| 869
|
pub fn make_buffer_with_indexed<T, F: FnMut (usize) -> T> (len: usize, mut f: F) -> Box<[T]> {
let mut v = Vec::with_capacity(len);
for i in 0..len { v.push(f(i)) }
v.into_boxed_slice()
}
| 870
|
fn multi_field_uniques_are_preserved_when_generating_data_model_from_a_schema() {
let ref_data_model = Datamodel {
models: vec![Model {
database_name: None,
name: "User".to_string(),
documentation: None,
is_embedded: false,
is_commented_out: false,
fields: vec![
Field::ScalarField(ScalarField {
name: "id".to_string(),
arity: FieldArity::Required,
field_type: FieldType::Base(ScalarType::Int, None),
database_name: None,
default_value: Some(DMLDefault::Expression(ValueGenerator::new_autoincrement())),
is_unique: false,
is_id: true,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField::new(
"name",
FieldArity::Required,
FieldType::Base(ScalarType::String, None),
)),
Field::ScalarField(ScalarField::new(
"lastname",
FieldArity::Required,
FieldType::Base(ScalarType::String, None),
)),
],
is_generated: false,
indices: vec![datamodel::dml::IndexDefinition {
name: Some("name_last_name_unique".to_string()),
fields: vec!["name".to_string(), "lastname".to_string()],
tpe: datamodel::dml::IndexType::Unique,
}],
id_fields: vec![],
}],
enums: vec![],
};
let schema = SqlSchema {
tables: vec![Table {
name: "User".to_string(),
columns: vec![
Column {
name: "id".to_string(),
tpe: ColumnType {
data_type: "integer".to_string(),
full_data_type: "integer".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
},
default: None,
auto_increment: true,
},
Column {
name: "name".to_string(),
tpe: ColumnType {
data_type: "text".to_string(),
full_data_type: "text".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::String,
arity: ColumnArity::Required,
},
default: None,
auto_increment: false,
},
Column {
name: "lastname".to_string(),
tpe: ColumnType {
data_type: "text".to_string(),
full_data_type: "text".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::String,
arity: ColumnArity::Required,
},
default: None,
auto_increment: false,
},
],
indices: vec![Index {
name: "name_last_name_unique".to_string(),
columns: vec!["name".to_string(), "lastname".to_string()],
tpe: IndexType::Unique,
}],
primary_key: Some(PrimaryKey {
columns: vec!["id".to_string()],
sequence: None,
constraint_name: None,
}),
foreign_keys: vec![],
}],
enums: vec![],
sequences: vec![],
};
let introspection_result = calculate_datamodel(&schema, &SqlFamily::Postgres).expect("calculate data model");
assert_eq!(introspection_result.data_model, ref_data_model);
}
| 871
|
pub fn fill_buffer_clone<T: Clone> (b: &mut [T], v: &T) {
fill_buffer_with(b, move || v.clone())
}
| 872
|
fn main() {
let days = ["first", "second", "third", "fourth", "fifth", "sixth", "seventh", "eighth", "ninth", "tenth", "eleventh", "twelfth"];
let lyrics = ["A partridge in a pear tree", "Two turtle doves, and", "Three french hens", "Four calling birds", "Five golden rings", "Six geese a-laying", "Seven swans a-swimming", "Eight maids a-milking", "Nine ladies dancing", "Ten lords a-leaping", "Eleven pipers piping", "Twelve drummers drumming"];
for n in 0..12 {
println!("On the {} day of Christmas, my true love sent to me", days[n]);
if n == 0 {
println!("{}\n", lyrics[n]);
} else {
let x = n;
let mut words = String::new();
for index in (0..x + 1).rev() {
words.push_str(lyrics[index]);
words.push_str("\n");
}
println!("{}", words)
};
}
}
| 873
|
pub fn causet_partitioner_request_scan(request: &CausetPartitionerRequest, dest: &mut [u8]) -> bool {
let mut dest_iter = dest.iter_mut();
let mut prev_user_soliton_id_iter = request.prev_user_soliton_id.iter();
let mut current_user_soliton_id_iter = request.current_user_soliton_id.iter();
let mut current_output_file_size_iter = request.current_output_file_size.to_be_bytes().iter();
loop {
match (prev_user_soliton_id_iter.next(), current_user_soliton_id_iter.next(), current_output_file_size_iter.next()) {
(Some(prev_user_soliton_id), Some(current_user_soliton_id), Some(current_output_file_size)) => {
dest_iter.next().map(|dest| *dest = *prev_user_soliton_id);
dest_iter.next().map(|dest| *dest = *current_user_soliton_id);
dest_iter.next().map(|dest| *dest = *current_output_file_size);
}
_ => break,
}
}
true
}
| 874
|
pub async fn connect_async_with_tls_connector_and_config<R>(
request: R,
connector: Option<Connector>,
config: Option<WebSocketConfig>,
) -> Result<(WebSocketStream<ConnectStream>, Response), Error>
where
R: IntoClientRequest + Unpin,
{
let request: Request = request.into_client_request()?;
let domain = domain(&request)?;
let port = port(&request)?;
let try_socket = TcpStream::connect((domain.as_str(), port)).await;
let socket = try_socket.map_err(Error::Io)?;
client_async_tls_with_connector_and_config(request, socket, connector, config).await
}
| 875
|
pub fn clear(gl: &gl::Gl) {
gl.clear(gl::COLOR_BUFFER_BIT);
}
| 876
|
pub fn test_load_elf_crash_64() {
let buffer = fs::read("tests/programs/load_elf_crash_64").unwrap().into();
let result = run::<u64, SparseMemory<u64>>(&buffer, &vec!["load_elf_crash_64".into()]);
assert_eq!(result.err(), Some(Error::MemWriteOnExecutablePage));
}
| 877
|
fn main() {
// 创建一个通道
let (tx, rx): (mpsc::Sender<i32>, mpsc::Receiver<i32>) =
mpsc::channel();
// 创建线程用于发送消息
thread::spawn(move || {
// 发送一个消息,此处是数字id
tx.send(1).unwrap();
});
// 在主线程中接收子线程发送的消息并输出
println!("receive {}", rx.recv().unwrap());
}
| 878
|
pub extern "C" fn soliton_panic_causet_partitioner_database_type_name() -> *const c_char {
CString::new("").unwrap().as_ptr()
}
| 879
|
fn test_solve_2() {
assert_eq!(solve_2("dabAcCaCBAcCcaDA"), 4);
}
| 880
|
pub fn start_conflict_resolver_factory(ledger: &mut Ledger_Proxy, key: Vec<u8>) {
let (s1, s2) = Channel::create(ChannelOpts::Normal).unwrap();
let resolver_client = ConflictResolverFactory_Client::from_handle(s1.into_handle());
let resolver_client_ptr = ::fidl::InterfacePtr {
inner: resolver_client,
version: ConflictResolverFactory_Metadata::VERSION,
};
let _ = fidl::Server::new(ConflictResolverFactoryServer { key }, s2).spawn();
ledger.set_conflict_resolver_factory(Some(resolver_client_ptr)).with(ledger_crash_callback);
}
| 881
|
pub fn challenge_4() {
let mut file = File::open("data/4.txt").unwrap();
let mut all_file = String::new();
file.read_to_string(&mut all_file).unwrap();
let mut best_guess = (vec![], 0, f64::INFINITY);
for line in all_file.lines() {
let data = hex::decode(line).expect("Test");
let (key, conf) = crypto::find_single_xor(&data);
if conf < best_guess.2 {
best_guess = (data, key, conf);
}
}
let output = crypto::xor_repeating(&best_guess.0, &vec![best_guess.1]);
println!("{} {}", String::from_utf8(output).unwrap(), best_guess.2);
}
| 882
|
fn read_init() -> State {
let stdin = io::stdin();
let mut lines = stdin.lock().lines().map(|x| x.unwrap());
let line = lines.next().unwrap();
let mut wh = line.split(" ");
let w: u32= wh.next().unwrap().parse().unwrap();
let h: u32= wh.next().unwrap().parse().unwrap();
let line = lines.next().unwrap();
let mut xy = line.split(" ");
let x: u32= xy.next().unwrap().parse().unwrap();
let y: u32= xy.next().unwrap().parse().unwrap();
let dir = match lines.next().unwrap().trim() {
"N" => Direction::N,
"E" => Direction::E,
"S" => Direction::S,
"W" => Direction::W,
_ => unreachable!(),
};
let t: u32 = lines.next().unwrap().parse().unwrap();
let grid = lines
.map(|l|
l.chars()
.map(|c| match c {
'#' => GridCell::Black,
'.' => GridCell::White,
_ => unreachable!(),
})
.collect()
)
.collect::<Vec<Vec<_>>>();
State {
width: w as usize,
height: h as usize,
ant_x: x as usize,
ant_y: y as usize,
ant_dir: dir,
turns_left: t as usize,
grid: grid,
}
}
| 883
|
fn reduce(input: &str) -> String {
let mut result: (Option<char>, String) =
input
.chars()
.fold((None, String::new()), |mut acc, c| match acc {
(None, _) => {
acc.0 = Some(c);
acc
}
(Some(p), _) if does_react(p, c) => {
acc.0 = None;
acc
}
(Some(p), _) => {
acc.0 = Some(c);
acc.1.push(p);
acc
}
});
match result.0 {
Some(c) => result.1.push(c),
None => {}
}
result.1
}
| 884
|
fn alloc_svc_def() -> *const ble_gatt_svc_def {
leaky_box!(
ble_gatt_svc_def {
type_: BLE_GATT_SVC_TYPE_PRIMARY as u8,
uuid: ble_uuid16_declare!(GATT_HRS_UUID),
includes: ptr::null_mut(),
characteristics: leaky_box!(
ble_gatt_chr_def {
uuid: ble_uuid16_declare!(GATT_HRS_MEASUREMENT_UUID),
access_cb: Some(gatt_svr_chr_access_heart_rate),
arg: (ptr::null_mut()),
descriptors: (ptr::null_mut()),
flags: BLE_GATT_CHR_F_NOTIFY as u16,
min_key_size: 0,
val_handle: (unsafe { &mut HRS_HRM_HANDLE as *mut u16 }),
},
ble_gatt_chr_def {
uuid: ble_uuid16_declare!(GATT_HRS_BODY_SENSOR_LOC_UUID),
access_cb: Some(gatt_svr_chr_access_heart_rate),
arg: (ptr::null_mut()),
descriptors: (ptr::null_mut()),
flags: BLE_GATT_CHR_F_READ as u16,
min_key_size: 0,
val_handle: ptr::null_mut(),
},
null_ble_gatt_chr_def()
)
},
ble_gatt_svc_def {
type_: BLE_GATT_SVC_TYPE_PRIMARY as u8,
uuid: ble_uuid16_declare!(GATT_DEVICE_INFO_UUID),
includes: ptr::null_mut(),
characteristics: leaky_box!(
ble_gatt_chr_def {
uuid: ble_uuid16_declare!(GATT_MANUFACTURER_NAME_UUID),
access_cb: Some(gatt_svr_chr_access_device_info),
arg: (ptr::null_mut()),
descriptors: (ptr::null_mut()),
flags: BLE_GATT_CHR_F_READ as u16,
min_key_size: 0,
val_handle: (ptr::null_mut()),
},
ble_gatt_chr_def {
uuid: ble_uuid16_declare!(GATT_MODEL_NUMBER_UUID),
access_cb: Some(gatt_svr_chr_access_device_info),
arg: (ptr::null_mut()),
descriptors: (ptr::null_mut()),
flags: BLE_GATT_CHR_F_READ as u16,
min_key_size: 0,
val_handle: (ptr::null_mut()),
},
null_ble_gatt_chr_def()
)
},
null_ble_gatt_svc_def()
)
}
| 885
|
fn initialize(lines: &Vec<String>) -> Vec<Day> {
let regex = Regex::new(r"(\d\d-\d\d) ((?:23|00):\d\d)\] (Guard #(\d*)|wakes|falls)").expect("Building Regex failed");
let mut events = lines.iter().map(|l| GuardEvent::from_line(l, ®ex)).collect::<Vec<GuardEvent>>();
events.sort_by(|GuardEvent {date: date1, minute: minute1, ..}, GuardEvent {date: date2, minute: minute2, ..}| {
date1.cmp(date2).then(minute1.cmp(minute2))
});
let mut days = Vec::new();
let mut events_iter = events.iter();
let mut event_option = events_iter.next();
while event_option.is_some() {
let event = event_option.unwrap();
assert_eq!(event.action, BeginsShift);
let mut current_day = Day::new(event.guard_id.unwrap());
let mut is_awake = true;
event_option = events_iter.next();
for minute in 0..60 {
if event_option.map_or(false, |e| e.action != BeginsShift && e.minute == minute) {
is_awake = !is_awake;
event_option = events_iter.next();
}
current_day.set_next_minute(is_awake);
}
days.push(current_day);
}
days
}
| 886
|
pub fn test_rvc_pageend() {
// The last instruction of a executable memory page is an RVC instruction.
let buffer = fs::read("tests/programs/rvc_pageend").unwrap().into();
let core_machine =
DefaultCoreMachine::<u64, SparseMemory<u64>>::new(ISA_IMC, VERSION0, u64::max_value());
let mut machine = DefaultMachineBuilder::new(core_machine).build();
machine
.load_program(&buffer, &vec!["rvc_end".into()])
.unwrap();
let anchor_pc: u64 = 69630;
// Ensure that anchor_pc is in the end of the page
assert_eq!(anchor_pc as usize % RISCV_PAGESIZE, RISCV_PAGESIZE - 2);
let memory = machine.memory_mut();
// Ensure that the data segment is located at anchor_pc + 2
let data0 = memory.load16(&(anchor_pc + 2)).unwrap().to_u32();
assert_eq!(data0, 4);
let data1 = memory.load16(&(anchor_pc + 6)).unwrap().to_u32();
assert_eq!(data1, 2);
// Ensure that the anchor instruction is "c.jr a0"
let anchor_inst = memory.load16(&anchor_pc).unwrap().to_u16();
assert_eq!(anchor_inst, 0x8502);
let result = machine.run();
assert!(result.is_ok());
assert_eq!(result.unwrap(), 0);
}
| 887
|
fn generate_resource_data(settings: &Settings) -> crate::Result<ResourceMap> {
let mut resources = ResourceMap::new();
let regex = Regex::new(r"[^\w\d\.]")?;
let cwd = std::env::current_dir()?;
for src in settings.resource_files() {
let src = src?;
let filename = src
.file_name()
.expect("failed to extract resource filename")
.to_os_string()
.into_string()
.expect("failed to convert resource filename to string");
let resource_path = cwd
.join(src.clone())
.into_os_string()
.into_string()
.expect("failed to read resource path");
let resource_entry = ResourceFile {
guid: generate_guid(filename.as_bytes()).to_string(),
path: resource_path,
id: regex.replace_all(&filename, "").to_string(),
};
// split the resource path directories
let mut directories = src
.components()
.filter(|component| {
let comp = component.as_os_str();
comp != "." && comp != ".."
})
.collect::<Vec<_>>();
directories.truncate(directories.len() - 1);
// transform the directory structure to a chained vec structure
for directory in directories {
let directory_name = directory
.as_os_str()
.to_os_string()
.into_string()
.expect("failed to read resource folder name");
// if the directory is already on the map
if resources.contains_key(&directory_name) {
let directory_entry = &mut resources
.get_mut(&directory_name)
.expect("Unable to handle resources");
if directory_entry.name == directory_name {
// the directory entry is the root of the chain
directory_entry.add_file(resource_entry.clone());
} else {
let index = directory_entry
.directories
.iter()
.position(|f| f.name == directory_name);
if index.is_some() {
// the directory entry is already a part of the chain
let dir = directory_entry
.directories
.get_mut(index.expect("Unable to get index"))
.expect("Unable to get directory");
dir.add_file(resource_entry.clone());
} else {
// push it to the chain
directory_entry.directories.push(ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
});
}
}
} else {
resources.insert(
directory_name.clone(),
ResourceDirectory {
name: directory_name.clone(),
directories: vec![],
files: vec![resource_entry.clone()],
},
);
}
}
}
Ok(resources)
}
| 888
|
pub fn crate_inherent_impls(tcx: TyCtxt<'_>, (): ()) -> CrateInherentImpls {
let mut collect = InherentCollect { tcx, impls_map: Default::default() };
for id in tcx.hir().items() {
collect.check_item(id);
}
collect.impls_map
}
| 889
|
pub fn acrn_remove_dir(path: &str) -> Result<(), String> {
fs::remove_dir_all(path).map_err(|e| e.to_string())
}
| 890
|
fn inc_21() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(Direct(EDX)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[66], OperandSize::Dword)
}
| 891
|
fn inc_20() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(IndirectDisplaced(SI, 122, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 255, 68, 122], OperandSize::Word)
}
| 892
|
fn skip_whitespace(input: &str) -> &str {
space0::<&str, nom::error::Error<&str>>(input).unwrap().0
}
| 893
|
fn load_asset_to_vmo(path: &Path) -> Result<mem::Buffer, Error> {
let file = File::open(path)?;
let vmo = fdio::get_vmo_copy_from_file(&file)?;
let size = file.metadata()?.len();
Ok(mem::Buffer { vmo, size })
}
| 894
|
pub fn smart_to_total_words() -> TotalWords {
let mut words_vec: Vec<Words> = Vec::new();
for i in 1..22 {
let words = smart_to_words(i);
words_vec.push(words);
}
TotalWords::new(words_vec)
}
| 895
|
fn hard_fault(ef: &ExceptionFrame) -> ! {
panic!("Hardfault... : {:#?}", ef);
}
| 896
|
pub fn file_append() -> io::Result<()> {
let filename = "foo.txt";
let file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
// .create_new(true)
.append(true)
// .truncate(true)
.open(filename);
match file {
Ok(mut stream) => {
stream.write_all(b"hello, world!\n")?;
}
Err(err) => {
println!("{:?}", err);
}
}
Ok(())
}
| 897
|
fn read_matrix<T>() -> Vec<Vec<T>>
where
T: std::str::FromStr,
T::Err: std::fmt::Debug,
{
use std::io::prelude::*;
let stdin = stdin();
let mut reader = BufReader::with_capacity(100 * 1024, stdin);
let mut line = String::with_capacity(100);
let mut matrix: Vec<Vec<T>> = Vec::new();
while reader.read_line(&mut line).unwrap() > 0 {
matrix.push(
line.trim()
.split_whitespace()
.map(|s| s.parse().unwrap())
.collect(),
);
line.clear();
}
return matrix;
}
| 898
|
pub fn write_board(board_name: String, contents: String) -> Result<(), String> {
let mut configurator = Configurator::new();
unsafe {
configurator.set_working_folder(WORKING_FOLDER.clone());
}
configurator.write_board(board_name, contents)
}
| 899
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.