content
stringlengths 12
392k
| id
int64 0
1.08k
|
|---|---|
fn santa(instruction: &String) -> i32 {
// if '(' up else if ')' down
let mut floor: i32 = 0;
for paren in instruction.chars() {
println!("{}", paren);
match paren {
'(' => floor += 1,
')' => floor -= 1,
_ => panic!(),
}
}
floor
}
| 500
|
pub unsafe extern "C" fn amcostestimate(
_root: *mut pg_sys::PlannerInfo,
path: *mut pg_sys::IndexPath,
_loop_count: f64,
index_startup_cost: *mut pg_sys::Cost,
index_total_cost: *mut pg_sys::Cost,
index_selectivity: *mut pg_sys::Selectivity,
index_correlation: *mut f64,
index_pages: *mut f64,
) {
let path = path.as_ref().expect("path argument is NULL");
let indexinfo = path.indexinfo.as_ref().expect("indexinfo in path is NULL");
let index_relation = unsafe {
PgRelation::with_lock(
indexinfo.indexoid,
pg_sys::AccessShareLock as pg_sys::LOCKMODE,
)
};
let heap_relation = index_relation
.heap_relation()
.expect("failed to get heap relation for index");
*index_correlation = 1.0;
*index_startup_cost = 0.0;
*index_pages = 0.0;
*index_total_cost = 0.0;
// go with the smallest already-calculated selectivity.
// these would have been calculated in zdb_restrict()
*index_selectivity = 1.0;
#[cfg(any(feature = "pg10", feature = "pg11"))]
let index_clauses = PgList::<pg_sys::RestrictInfo>::from_pg(path.indexclauses);
#[cfg(any(feature = "pg12", feature = "pg13", feature = "pg14", feature = "pg15"))]
let index_clauses = PgList::<pg_sys::IndexClause>::from_pg(path.indexclauses);
for clause in index_clauses.iter_ptr() {
#[cfg(any(feature = "pg10", feature = "pg11"))]
let ri = clause.as_ref().expect("restrict info is NULL");
#[cfg(any(feature = "pg12", feature = "pg13", feature = "pg14", feature = "pg15"))]
let ri = clause
.as_ref()
.unwrap()
.rinfo
.as_ref()
.expect("restrict info in index clause is NULL");
if ri.norm_selec > 0f64 {
*index_selectivity = ri.norm_selec.min(*index_selectivity);
}
}
let reltuples = heap_relation.reltuples().unwrap_or(1f32) as f64;
*index_total_cost += *index_selectivity * reltuples * pg_sys::cpu_index_tuple_cost;
// we subtract random_page_cost from the total cost because Postgres assumes we'll read at least
// one index page, and that's just not true for ZomboDB -- we have no pages on disk
//
// Assuming default values for random_page_cost and seq_page_cost, this should always
// get our IndexScans set to a lower cost than a sequential scan, which we don't necessarily prefer,
// allowing Postgres to instead prefer to use our index for plans where it can actually use one
*index_total_cost -= pg_sys::random_page_cost;
}
| 501
|
fn does_line_intersect(x0: i64, x1: i64, ox0: i64, ox1: i64) -> bool {
(x0 <= ox0 && ox0 <= x1)
|| (x0 <= ox1 && ox1 <= x1)
|| (ox0 <= x0 && x0 <= ox1)
|| (ox0 <= x1 && x1 <= ox1)
}
| 502
|
extern "C" fn debug_info_thread_fn() {
loop {
let debug_info = DEBUG_INFO_RECEIVER.recv();
if let Ok(debug_info) = debug_info {
debug_info.print();
}
}
}
| 503
|
fn inc_5() {
run_test(&Instruction { mnemonic: Mnemonic::INC, operand1: Some(Direct(CL)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[254, 193], OperandSize::Word)
}
| 504
|
fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {
match msg {
Msg::Clicked => {
model.test = model.test+1;
},
Msg::FetchData=> {
orders
.skip()
.perform_cmd(fetch_data());
},
Msg::DataFetched(data) => {
model.data = Some(data.result.unwrap().data.unwrap());
use wasm_bindgen::JsCast;
let doc:web_sys::HtmlCanvasElement = document().get_element_by_id("myChart").unwrap().dyn_into().unwrap();
let context:web_sys::CanvasRenderingContext2d = doc.get_context("2d").unwrap().unwrap().dyn_into().unwrap();
let points : Vec<PointMod> = model.data.clone().unwrap().into_iter().map(|data| Point::from(data).into() ).collect();
chartjs::Chart::load_js_lib(context, points);
}
Msg::OnFetchError { label: _, fail_reason: _ } => {}
}
}
| 505
|
fn assert_memory_store_empty_bytes<M: Memory>(memory: &mut M) {
assert!(memory.store_byte(0, 0, 42).is_ok());
assert!(memory.store_bytes(0, &[]).is_ok());
}
| 506
|
pub fn challenge_2() {
let input1 = hex::decode("1c0111001f010100061a024b53535009181c").unwrap();
let input2 = hex::decode("686974207468652062756c6c277320657965").unwrap();
let output = crypto::xor_repeating(&input1, &input2);
println!("{}", hex::encode(output));
}
| 507
|
fn specific_url_encoded() {
eprintln!("check that all chars are URL safe");
let mut test_string = String::new();
for _ in 0..1000 {
write!(&mut test_string, "{} ", rand::thread_rng().gen::<f64>())
.expect("write rand float to string")
}
let compressed = lz_str::compress_to_encoded_uri_component(
&test_string.encode_utf16().collect::<Vec<u16>>(),
);
assert!(!compressed.contains('='));
assert!(!compressed.contains('/'));
let decompressed =
lz_str::decompress_from_encoded_uri_component(&compressed).expect("Valid Decompress");
assert_eq!(
decompressed,
test_string.encode_utf16().collect::<Vec<u16>>()
);
eprintln!("check that + and ' ' are interchangeable in decompression");
let decompressed = "During tattooing, ink is injected into the skin, initiating an immune response, and cells called \"macrophages\" move into the area and \"eat up\" the ink. The macrophages carry some of the ink to the body\'s lymph nodes, but some that are filled with ink stay put, embedded in the skin. That\'s what makes the tattoo visible under the skin. Dalhousie Uiversity\'s Alec Falkenham is developing a topical cream that works by targeting the macrophages that have remained at the site of the tattoo. New macrophages move in to consume the previously pigment-filled macrophages and then migrate to the lymph nodes, eventually taking all the dye with them. \"When comparing it to laser-based tattoo removal, in which you see the burns, the scarring, the blisters, in this case, we\'ve designed a drug that doesn\'t really have much off-target effect,\" he said. \"We\'re not targeting any of the normal skin cells, so you won\'t see a lot of inflammation. In fact, based on the process that we\'re actually using, we don\'t think there will be any inflammation at all and it would actually be anti-inflammatory.";
let compressed = "CIVwTglgdg5gBAFwIYIQezdGAaO0DWeAznlAFYCmAxghQCanqIAWFcR 0u0ECEKWOEih4AtqJBQ2YCkQAOaKEQq5hDKhQA2mklSTb6cAESikVMGjnMkMWUbii0ANzbQmCVkJlIhUBkYoUOBA5ew9XKHwAOjgAFU9Tc0trW10kMDAAT3Y0UTY0ADMWCMJ3TwAjNDpMgHISTUzRKzgoKtlccpAEHLyWIPS2AogDBgB3XmZSQiJkbLku3ApRcvo6Q2hi9k4oGPiUOrhR627TfFlN5FQMOCcIIghyzTZJNbBNjmgY4H1mNBB7tgAVQgLjA9wQtRIAEEnlQ4AAxfRnKDWUTEOBrFyaSyCHzoOQQPSaODmQJojxBUZoMD4EjlbLIMC2PiwTaJCxWGznCndawuOAyUzQQxBcLsXj5Ipiy7oNAxAByFFGDjMHJS50c-I2TCoiiIIF6YrkMlufyIDTgBJgeSgCAAtEMRiqkpzUr4GOERKIIDAwCg2GU2A0mpNWmsiIsXLaQPoLchtvBY5tqmxxh5iqIYkYAOqsES6prpQS8RBoOCaJDKMB28qVwwy66C5z6bgiI6EyaZP7sCgBirgJS4MVEPQZLBDiqaO60MGtlh3El13CjCg1fnhn1SBg OhgEDwHkYtCyKA1brebTZPlsCRUSaFAp2xnMuAUAoFagIbD2TxEJAQOgs2zVcZBaNBumfCgWUTKBskKTZWjAUxiQ fMtB0XAiDLLsQEORQzx7NgfGxbp4OgAoK3EARFBiABJEQCjML84FrZQGEUTZjTQDQiBIQ8VxqUCmJjS9gnuWBlzYOh8Ig5gCGKUDxm0FiiNg0gKKQKi A4-plLUPBuipEBNG3GgRItFZfD4O1yMo0x0CyKIgAAA$$";
let decomp2 =
lz_str::decompress_from_encoded_uri_component(compressed).expect("Valid Decompress");
assert_eq!(decompressed.encode_utf16().collect::<Vec<u16>>(), decomp2);
}
| 508
|
fn test_line_intersections() {
let p1 = Point::new(vec![1.0, 0.0]);
let p2 = Point::new(vec![3.0, 2.0]);
let p3 = Point::new(vec![2.0, 0.0]);
let p3a = Point::new(vec![2.0, 3.0]);
let p4 = Point::new(vec![2.0, 4.0]);
let p5 = Point::new(vec![1.0, 1.0]);
let p6 = Point::new(vec![2.5, 3.0]);
let p7 = Point::new(vec![1.0, 2.0]);
let p8 = Point::new(vec![0.0, -1.0]);
let p9 = Point::new(vec![4.0, 3.0]);
let ls1 = LineSegment::new(p1, p2);
let ls2 = LineSegment::new(p3, p4.clone());
let ls3 = LineSegment::new(p3a, p4);
assert_eq!(ls1.intersects_line_segment(&ls2), Ok(true));
assert_eq!(ls1.intersects_line_segment(&ls3), Ok(false));
let r1 = Region::from_points(&p5, &p6);
let r2 = Region::from_points(&p7, &p6);
let r3 = Region::from_points(&p8, &p9);
assert_eq!(r1.intersects_line_segment(&ls1), Ok(true));
assert_eq!(ls1.intersects_region(&r1), Ok(true));
assert_eq!(r2.intersects_line_segment(&ls1), Ok(false));
assert_eq!(ls1.intersects_region(&r2), Ok(false));
assert_eq!(r3.intersects_line_segment(&ls1), Ok(true));
assert_eq!(ls1.intersects_region(&r3), Ok(true));
}
| 509
|
pub fn stdio() -> AsyncIO<impl AsyncRead, impl AsyncWrite> {
let fin = tokio_file_unix::File::new_nb(tokio_file_unix::raw_stdin().unwrap())
.unwrap()
.into_reader(&tokio::reactor::Handle::default())
.unwrap();
let fout = tokio::io::stdout();
AsyncIO { fin, fout }
}
| 510
|
fn to_matrix(side: &str) -> Matrix<bool> {
Matrix::square_from_vec(
side.bytes()
.filter(|b| *b != b'/')
.map(|b| b == b'#')
.collect())
}
| 511
|
fn print_fixed_acc(inss: &[Instruction], op: Operation, pc: usize) -> bool {
let mut fixed_inss = inss.to_vec();
fixed_inss[pc].op = op;
match Evaluator::new(&mut fixed_inss).eval_until_loop() {
(final_pc, final_acc, _) if final_pc == fixed_inss.len() => {
println!("{}", final_acc);
true
}
_ => false,
}
}
| 512
|
fn main() -> Result<()> {
let inss = parse_instructions()?;
for (pc, ins) in inss.iter().enumerate() {
match ins.op {
Operation::Nothing => {
// Don't invert zero `nop`s as `jmp +0` results in a loop.
if ins.arg != 0 && print_fixed_acc(&inss, Operation::Jump, pc) {
break;
}
}
Operation::Jump => {
// Finish as soon as one inversion fixes the code.
if print_fixed_acc(&inss, Operation::Nothing, pc) {
break;
}
}
Operation::Accumulate => {}
}
}
Ok(())
}
| 513
|
fn exit_qemu(exit_code: u32) {
use x86_64::instructions::port::Port;
unsafe {
let mut port = Port::new(0xf4);
port.write(exit_code);
}
}
| 514
|
pub fn get(
module: &Atom,
index: &Index,
old_unique: &OldUnique,
unique: &Unique,
arity: &Arity,
) -> Option<Code> {
RW_LOCK_CODE_BY_ARITY_BY_UNIQUE_BY_OLD_UNIQUE_BY_INDEX_BY_MODULE
.read()
.get(module)
.and_then(|code_by_arity_by_unique_by_old_unique_by_index| {
code_by_arity_by_unique_by_old_unique_by_index
.get(index)
.and_then(|code_by_arity_by_unique_by_old_unique| {
code_by_arity_by_unique_by_old_unique
.get(old_unique)
.and_then(|code_by_arity_by_unique| {
code_by_arity_by_unique
.get(unique)
.and_then(|code_by_arity| {
code_by_arity.get(arity).map(|code| *code)
})
})
})
})
}
| 515
|
fn set_screen(n: u8, disp: &mut Screen) {
let color_name = match n {
1 => {
"red"
},
2 => {
"yellow"
}
3 => {
"white"
}
4 => {
"aqua"
}
5 => {
"purple"
}
6 => {
"blue"
}
_ => {
"black"
}
};
disp.clear();
let style = TextStyleBuilder::new(Font8x16)
.text_color(BinaryColor::On)
.build();
Text::new(color_name, Point::zero())
.into_styled(style)
.draw(disp)
.unwrap();
disp.flush();
}
| 516
|
fn boot() {
vector::trace::init(false, false, "warn", false);
vector::metrics::init().expect("metrics initialization failed");
}
| 517
|
pub fn establish_connection() -> PooledConnection<ConnectionManager<PgConnection>> {
pool().get().unwrap()
}
| 518
|
fn collect_tokens(source: &Source, span: Span, report: &Report) -> Vec<NestedToken> {
let mut iter = source.iter_from_span(span).unwrap();
let tokens = {
let mut lexer = Lexer::new(&mut iter, report);
let nest = Nest::new(&mut lexer);
nest.collect::<Vec<_>>()
};
assert!(!tokens.is_empty()); // should include EOF
tokens
}
| 519
|
pub fn set_panic_hook() {
// When the `console_error_panic_hook` feature is enabled, we can call the
// `set_panic_hook` function at least once during initialization, and then
// we will get better error messages if our code ever panics.
//
// For more details see
// https://github.com/rustwasm/console_error_panic_hook#readme
#[cfg(feature = "console_error_panic_hook")]
console_error_panic_hook::set_once();
}
| 520
|
fn part_1() {
let re = Regex::new(r".*=(-?\d+)\.+(-?\d+).*=(-?\d+)\.+(-?\d+).*=(-?\d+)\.+(-?\d+)").unwrap();
let mut pts: HashSet<(i32, i32, i32)> = HashSet::new();
for l in include_str!("input.txt").split("\n") {
let t: Vec<_> = re
.captures(l)
.unwrap()
.iter()
.skip(1)
.map(|x| x.unwrap().as_str().parse::<i32>().unwrap())
.collect();
if t[0] > 50 || t[1] < -50 || t[2] > 50 || t[3] < -50 || t[4] > 50 || t[5] < -50 {
continue;
}
for x in max(t[0], -50)..min(50, t[1] + 1) {
for y in max(t[2], -50)..min(50, t[3] + 1) {
for z in max(t[4], -50)..min(50, t[5] + 1) {
if l.contains("on") {
pts.insert((x, y, z));
} else {
pts.remove(&(x, y, z));
}
}
}
}
}
println!("{}", pts.len());
}
| 521
|
fn main() {
let text = match read_input::read_text("input.txt") {
Ok(t) => t,
Err(e) => panic!("{:?}", e),
};
let re = Regex::new(r"\s+").unwrap();
let mut nodes: HashMap<String, Node> = HashMap::new();
for line in text.lines() {
if line.starts_with("/") {
let words = re.split(line).collect::<Vec<&str>>();
let pieces = words[0].split("-").collect::<Vec<&str>>();
let coords = format!("{}{}", pieces[1], pieces[2]);
nodes.insert(coords.clone(), Node::new(coords, words[1].to_string(), words[2].to_string(), words[3].to_string()));
}
}
let mut pairs: Vec<String> = Vec::new();
// part 1
for (_, node) in &nodes {
pairs.append(&mut find_pairs_for_node(&nodes, &node));
}
println!("part 1 pairs: {:?}", pairs.len());
if let Some(zero_space) = find_first_zero_space_node(&nodes, nodes.get(&format!("x{}y{}", MAX_X, 0)).unwrap()) {
println!("\n\nZero space node, from top right {:?}\n\n", zero_space);
let result = move_node_data_to_coords(&nodes, &zero_space, &format!("x{}y{}", MAX_X, 0), Vec::new());
println!("Count to move 0 to {} : {}", result.1, result.0);
let mut data_node = result.2.get(&result.1).unwrap().clone();
println!("Moved data amount: {} to: {}\n", data_node.used, data_node.coords);
let path = get_path_for_data(&result.2, &data_node.coords);
println!("{:?}", path);
let mut zero_node = result.2.get(&format!("x{}y{}", MAX_X, 0)).unwrap().clone();
print_nodes(&result.2);
let mut state = result.2;
let mut move_count = 0;
let mut index = 0;
loop {
move_data_node_to_index_of_path(&mut state, &mut data_node, &mut zero_node, &path[index], &mut move_count);
index += 1;
if index == path.len() {
break
}
}
println!("Moved 0 count: {}, move data count: {}, total: {}", result.0, move_count, result.0 + move_count);
}
}
| 522
|
fn build_graph(choice: &str, pattern: &str) -> Option<Vec<Vec<MatchingStatus>>> {
let mut scores = vec![];
let mut match_start_idx = 0; // to ensure that the pushed char are able to match the pattern
let mut pat_prev_ch = '\0';
// initialize the match positions and inline scores
for (pat_idx, pat_ch) in pattern.chars().enumerate() {
let mut vec = vec![];
let mut choice_prev_ch = '\0';
for (idx, ch) in choice.chars().enumerate() {
if ch.to_ascii_lowercase() == pat_ch.to_ascii_lowercase() && idx >= match_start_idx {
let score = fuzzy_score(ch, idx, choice_prev_ch, pat_ch, pat_idx, pat_prev_ch);
vec.push(MatchingStatus {
idx,
score,
final_score: score,
adj_num: 1,
back_ref: 0,
});
}
choice_prev_ch = ch;
}
if vec.is_empty() {
// not matched
return None;
}
match_start_idx = vec[0].idx + 1;
scores.push(vec);
pat_prev_ch = pat_ch;
}
// calculate max scores considering adjacent characters
for pat_idx in 1..scores.len() {
let (first_half, last_half) = scores.split_at_mut(pat_idx);
let prev_row = &first_half[first_half.len() - 1];
let cur_row = &mut last_half[0];
for idx in 0..cur_row.len() {
let next = cur_row[idx];
let prev = if idx > 0 {
cur_row[idx - 1]
} else {
MatchingStatus::default()
};
let mut score_before_idx = prev.final_score - prev.score + next.score;
score_before_idx += PENALTY_UNMATCHED * ((next.idx - prev.idx) as i64);
score_before_idx -= if prev.adj_num == 0 {
BONUS_ADJACENCY
} else {
0
};
let (back_ref, score, adj_num) = prev_row
.iter()
.enumerate()
.take_while(|&(_, &MatchingStatus { idx, .. })| idx < next.idx)
.skip_while(|&(_, &MatchingStatus { idx, .. })| idx < prev.idx)
.map(|(back_ref, cur)| {
let adj_num = next.idx - cur.idx - 1;
let mut final_score = cur.final_score + next.score;
final_score += if adj_num == 0 {
BONUS_ADJACENCY
} else {
PENALTY_UNMATCHED * adj_num as i64
};
(back_ref, final_score, adj_num)
})
.max_by_key(|&(_, x, _)| x)
.unwrap_or((prev.back_ref, score_before_idx, prev.adj_num));
cur_row[idx] = if idx > 0 && score < score_before_idx {
MatchingStatus {
final_score: score_before_idx,
back_ref: prev.back_ref,
adj_num,
..next
}
} else {
MatchingStatus {
final_score: score,
back_ref,
adj_num,
..next
}
};
}
}
Some(scores)
}
| 523
|
fn trait_test() {
{
use std::io::Write;
fn say_hello(out: &mut Write) -> std::io::Result<()> {
out.write_all(b"hello world\n")?;
out.flush()
}
// use std::fs::File;
// let mut local_file = File::create("hello.txt");
// say_hello(&mut local_file).expect("error"); // could not work, now
let mut bytes = vec![];
say_hello(&mut bytes).expect("error"); // works
assert_eq!(bytes, b"hello world\n");
// 11.1
let mut buf: Vec<u8> = vec![];
buf.write_all(b"hello").expect("error");
}
// 11.1.1
{
use std::io::Write;
let mut buf: Vec<u8> = vec![];
// let writer: Write = buf; // error: `Write` does not have a constant size
let writer: &mut Write = &mut buf; // ok
writer.write_all(b"hello").expect("error");
assert_eq!(buf, b"hello");
}
// 11.1.3
{
use std::io::Write;
fn say_hello<W: Write>(out: &mut W) -> std::io::Result<()> {
out.write_all(b"hello world\n")?;
out.flush()
}
let mut buf: Vec<u8> = vec![];
buf.write_all(b"hello").expect("error");
buf::<Vec>.write_all(b"hello").expect("error");
// let v1 = (0 .. 1000).collect(); // error: can't infer type
let v2 = (0..1000).collect::<Vec<i32>>(); // ok
// /// Run a query on large, partitioned data set.
// /// See <http://research.google.com/archive/mapreduce.html>.
// fn run_query<M: Mapper + Serialize, R: Reducer + Serialize>(data: &dataSet, map: M, reduce: R) -> Results {
// }
//
// fun run_query<M, R>(data: &Dataset, map: M, reduce: R) -> Results
// where M: Mapper + Serialize,
// R: Reducer + Serialize
// {}
// fn nearest<'t, 'c, P>(target: &'t P, candidates: &'c [P]) -> &'c P
// where P: MeasureDistance
// {}
//
// impl PancakeStack {
// fn Push<:T Topping>(&mut self, goop: T) - PancakeResult<()> {
// }
// }
// type PancakeResult<T> = Result<T, PancakeError>;
}
{
// struct Broom {
// name: String,
// height: u32,
// health: u32,
// position: (f32, f32, f32),
// intent: BroomIntent,
// }
// impl Broom {
// fn boomstick_range(&self) -> Range<i32> {
// self.y - self.height - 1 .. self.y
// }
// }
// trait Visible {
// fn draw(&self, canvas: &mut Canvas);
// fn hit_test(&self, x: i32, y: i32) -> bool;
// }
// impl Visible for Broom {
// fn draw(&self, canvas: &mut Canvas) {
// //for y in self.y - self.height - 1 .. self.y {
// for y in self.broomstick_range() {
// canvas.write_at(self.x, y, '|');
// }
// canvas.write_at(self.x, y, 'M');
// }
// }
// fn hit_test(&self, x: i32, y:i32) -> bool {
// self.x == x
// && self.y - self.height - 1 <= y
// && y <- self.y
// }
}
{
// 11.2.1
/// A writer that ignores whatever data you write to it.
pub struct Sink;
use std::io::{Result, Write};
impl Write for Sink {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
}
{
// 11.2.2
trait IsEmoji {
fn is_emoji(&self) -> bool;
}
impl IsEmoji for char {
fn is_emoji(&self) -> bool {
return false;
}
}
assert_eq!('$'.is_emoji(), false);
use std::io::{self, Write};
struct HtmlDocument;
trait WriteHtml {
fn write_html(&mut self, html: &HtmlDocument) -> std::io::Result<()>;
}
impl<W: Write> WriteHtml for W {
fn write_html(&mut self, html: &HtmlDocument) -> io::Result<()> {
Ok(())
}
}
extern crate serde;
use serde::Serialize;
use serde_json;
use std::collections::HashMap;
use std::fs::File;
pub fn save_configuration(config: &HashMap<String, String>) -> std::io::Result<()> {
let writer = File::create("test.json").expect("error");
let mut serializer = serde_json::Serializer::new(writer);
config.serialize(&mut serializer).expect("error");
Ok(())
}
{
// 11.2.3
}
}
}
| 524
|
async fn main() -> Result<!, String> {
// Begin by parsing the arguments. We are either a server or a client, and
// we need an address and potentially a sleep duration.
let args: Vec<_> = env::args().collect();
match &*args {
[_, mode, url] if mode == "server" => server(url).await?,
[_, mode, url] if mode == "client" => client(url, tokio::io::stdin()).await?,
[_, mode, url, input_file] if mode == "client" => {
match tokio::fs::File::open(input_file).await {
Ok(file) => client(url, file).await?,
Err(err) => {
eprintln!("Failed to open input_file: \"{}\", error: {}", input_file, err);
process::exit(2);
}
}
}
_ => {
eprintln!("Usage:\n{0} server <url>\n or\n{0} client <url> [input_file]", args[0]);
process::exit(1);
}
}
}
| 525
|
fn fizz_buzz(number: i32) -> Vec<String> {
//NOTE: we start at 1 to avoid handling another case with 0
let numbers = 1..=number;
numbers
.map(|n| match n {
n if (n % 3 == 0 && n % 5 == 0) => String::from("FizzBuzz"),
n if n % 3 == 0 => String::from("Fizz"),
n if n % 5 == 0 => String::from("Buzz"),
_ => n.to_string(),
})
.collect()
}
| 526
|
fn calc_chunk_size<E>(mem: u64, core_count: usize) -> usize
where
E: Engine,
{
let aff_size = std::mem::size_of::<E::G1Affine>() + std::mem::size_of::<E::G2Affine>();
let exp_size = exp_size::<E>();
let proj_size = std::mem::size_of::<E::G1>() + std::mem::size_of::<E::G2>();
((((mem as f64) * (1f64 - MEMORY_PADDING)) as usize)
- (2 * core_count * ((1 << MAX_WINDOW_SIZE) + 1) * proj_size))
/ (aff_size + exp_size)
}
| 527
|
fn is_not_skipped(rule: &Option<SerdeValue>) -> bool {
rule.as_ref().map(|value| !value.skip).unwrap_or(true)
}
| 528
|
pub fn fuzzy_indices(choice: &str, pattern: &str) -> Option<(i64, Vec<usize>)> {
if pattern.is_empty() {
return Some((0, Vec::new()));
}
let mut picked = vec![];
let scores = build_graph(choice, pattern)?;
let last_row = &scores[scores.len() - 1];
let (mut next_col, &MatchingStatus { final_score, .. }) = last_row
.iter()
.enumerate()
.max_by_key(|&(_, x)| x.final_score)
.expect("fuzzy_indices failed to iterate over last_row");
let mut pat_idx = scores.len() as i64 - 1;
while pat_idx >= 0 {
let status = scores[pat_idx as usize][next_col];
next_col = status.back_ref;
picked.push(status.idx);
pat_idx -= 1;
}
picked.reverse();
Some((final_score, picked))
}
| 529
|
fn main() {
std::panic::set_hook(Box::new(console_error_panic_hook::hook));
console_log::init_with_level(log::Level::Warn).expect("could not initialize logger");
let fighter_bytes = include_bytes!("subaction_data.bin");
let subaction = bincode::deserialize(fighter_bytes).unwrap();
wasm_bindgen_futures::spawn_local(render_window_wasm(subaction));
}
| 530
|
fn kxord_1() {
run_test(&Instruction { mnemonic: Mnemonic::KXORD, operand1: Some(Direct(K5)), operand2: Some(Direct(K4)), operand3: Some(Direct(K5)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 225, 221, 71, 237], OperandSize::Dword)
}
| 531
|
pub extern "x86-interrupt" fn coprocessor() { CommonInterruptHandler(45); }
| 532
|
fn view(model: &Model) -> impl View<Msg> {
// let scatter_plot = model.plot_state.view().map_message(|orig_msg|{
// Msg::PlotMsg(orig_msg)
// });
div![
class!["w-screen", "flex", "flex-col"], // root container
div![class!["flex", "w-full"], // header container
h1![class!["pb-4", "text-center", "flex-grow-1", "text-4xl", "md:text-6xl", "tracking-wider"],
"Gringos Shower Monitor"],
],
div![class!["w-11/12", "md:w-4/5", "m-auto"], // chart container
div![class!["w-full", "flex", "justify-center"], // chart
// scatter_plot
// <canvas id="myChart" width="200" height="200"></canvas>
canvas![attrs![At::Id => "myChart"]]
]
]
// button![ simple_ev("click", Msg::Clicked), "Click me!" ],
// h3![ model.test.to_string() ]
]
}
| 533
|
fn string_test() {
// literal
let speech = "\"Ouch!\" said the well.\n";
println!("{}", speech);
println!(
"In the room the women come and go,
Singing of Mount Abora"
);
println!(
"It was a bright, cold day in Aplil, and \
there were four of us \
more or less."
);
let default_win_install_path = r"C:\Program Files\Gorillas";
println!("{}", default_win_install_path);
// let pattern = Regex::new(r"\d(\.\d+)*");
println!(
r###"
This raw string started with 'r###"'.
Therefore it does not end until we reach a quote mark ('"')
followed immediately by three pound signs ('###'):
"###
);
// byte strings
let method = b"GET";
assert_eq!(method, &[b'G', b'E', b'T']);
let noodles = "noodles".to_string();
let oodles = &noodles[1..];
let poodles = "\u{CA0}_\u{CA0}";
assert_eq!(oodles.len(), 6);
assert_eq!(poodles.len(), 7);
assert_eq!(poodles.chars().count(), 3);
// let mut s = "hello";
// s[0] = 'c'; error: tye thpe 'str' cannot be mutably indexed
// s.push('\n'); error: no method named `push` found for type `&str`
assert_eq!(
format!("{}° {:02}’ {:02}” N", 24, 5, 23),
"24° 05’ 23” N".to_string()
);
let bits = vec!["veni", "vidi", "vici"];
assert_eq!(bits.concat(), "venividivici");
assert_eq!(bits.join(","), "veni,vidi,vici");
assert!("ONE".to_lowercase() == "one");
assert!("peanut".contains("nut"));
assert_eq!("\u{CA0}_\u{CA0}".replace("\u{CA0}", "■"), "■_■");
assert_eq!(" clean\n".trim(), "clean");
for word in "veni, vidi, vici".split(", ") {
assert!(word.starts_with("v"));
}
}
| 534
|
fn modules_file(repo: &Repository, at: &Commit) -> Result<String, Box<dyn std::error::Error>> {
if let Some(modules) = at.tree()?.get_name(".gitmodules") {
Ok(String::from_utf8(
modules.to_object(&repo)?.peel_to_blob()?.content().into(),
)?)
} else {
return Ok(String::new());
}
}
| 535
|
pub fn run(config: Config) -> Result<(), Box<dyn Error>> {
let file_content = read_to_string(config.file_path)?;
let search_result = search(&config.target, &file_content);
for line in search_result {
println!("{}", line);
}
Ok(())
}
| 536
|
fn next(args: Args) -> Result<Value, Value> {
let o = args.this();
if o.type_of() != ValueType::Object {
return Err(Value::new_error(args.agent(), "invalid receiver"));
}
let a = o.get_slot("iterated object");
if a == Value::Null {
return Value::new_iter_result(args.agent(), Value::Null, true);
}
let index = if let Value::Number(n) = o.get_slot("array iterator next index") {
n
} else {
unreachable!();
};
let len = if let Value::Number(n) = a.get(
args.agent(),
Value::from("length").to_object_key(args.agent())?,
)? {
n
} else {
return Err(Value::new_error(args.agent(), "invalid array length"));
};
if index >= len {
o.set_slot("iterated object", Value::Null);
return Value::new_iter_result(args.agent(), Value::Null, true);
}
o.set_slot("array iterator next index", Value::from(index + 1.0));
let value = a.get(
args.agent(),
Value::from(index).to_object_key(args.agent())?,
)?;
Value::new_iter_result(args.agent(), value, false)
}
| 537
|
pub extern "x86-interrupt" fn page_fault() { CommonExceptionHandler(14); }
| 538
|
fn run_one_input() {
let corpus = Path::new("fuzz").join("corpus").join("run_one");
let project = project("run_one_input")
.with_fuzz()
.fuzz_target(
"run_one",
r#"
#![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
#[cfg(fuzzing_repro)]
eprintln!("Reproducing a crash");
assert!(data.is_empty());
});
"#,
)
.file(corpus.join("pass"), "")
.file(corpus.join("fail"), "not empty")
.build();
project
.cargo_fuzz()
.arg("run")
.arg("run_one")
.arg(corpus.join("pass"))
.assert()
.stderr(
predicate::str::contains("Running 1 inputs 1 time(s) each.")
.and(predicate::str::contains(
"Running: fuzz/corpus/run_one/pass",
))
.and(predicate::str::contains("Reproducing a crash")),
)
.success();
}
| 539
|
fn set_privilege(handle: HANDLE, name: &str) -> Result<(), std::io::Error> {
let mut luid: LUID = LUID {
LowPart: 0,
HighPart: 0,
};
let name: U16CString = name.try_into().unwrap();
let r = unsafe {LookupPrivilegeValueW(std::ptr::null(),name.as_ptr(), &mut luid )};
if r == 0 {
return Err(std::io::Error::last_os_error());
}
let mut privilege = TOKEN_PRIVILEGES{
PrivilegeCount: 1,
Privileges: [LUID_AND_ATTRIBUTES {Luid: luid, Attributes: SE_PRIVILEGE_ENABLED}],
};
let r = unsafe {
AdjustTokenPrivileges(handle, false as i32, &mut privilege, std::mem::size_of::<TOKEN_PRIVILEGES>() as u32, std::ptr::null_mut(), std::ptr::null_mut())
};
if r == 0 {
return Err(std::io::Error::last_os_error());
}
Ok(())
}
| 540
|
pub fn register()
{
device_manager::register_driver(&s_pci_legacy_driver);
device_manager::register_driver(&s_pci_native_driver);
}
| 541
|
fn test_with_error() {
let parsed_data = parse(&"test: $false_var");
assert!(parsed_data
.unwrap_err()
.downcast_ref::<VariableNotDefinedError>()
.is_some());
}
| 542
|
fn parse_args() -> Args {
let env_args = env::args();
if env_args.len() < 2 {
println!("{}", USAGE);
std::process::exit(1)
}
let mut args = Args {
port_number: 0,
group: Group::Singleton,
num_worker_threads: 0,
upstream: None,
downstream: None,
};
let mut last_flag = Flag::None;
for arg in env_args.skip(1) {
match last_flag {
Flag::None => {
match &*arg {
"-w" | "--workers" => last_flag = Flag::Workers,
"-ig" | "--in-group" => {
if args.group != Group::Singleton {
error!("A server cannot both be in a group and a lock server.");
std::process::exit(1)
}
last_flag = Flag::InGroup
}
"-ls" | "--lock-server" => {
if args.group != Group::Singleton {
error!("A server cannot both be in a group and a lock server.");
std::process::exit(1)
}
args.group = Group::LockServer
}
"-up" | "--upstream" => {
last_flag = Flag::Upstream
}
"-dwn" | "--downstream" => {
last_flag = Flag::Downstream
}
port => {
match port.parse() {
Ok(port) => args.port_number = port,
Err(e) => {
error!("Invalid flag: {}.", port);
debug!("caused by {}", e);
std::process::exit(1)
}
}
}
}
}
Flag::Workers => {
match arg.parse() {
Ok(num_workers) => {
if num_workers == 0 {
println!("WARNING: Number of worker threads must be non-zero, will default to 1");
args.num_worker_threads = 1
}
else {
args.num_worker_threads = num_workers
}
last_flag = Flag::None
}
Err(e) => {
error!("Invalid <num worker threads> at '--workers': {}.", e);
std::process::exit(1)
}
}
}
Flag::Upstream => {
match arg.parse() {
Ok(addr) => {
args.upstream = Some(addr)
}
Err(e) => {
error!("Invalid <upstream addr> at '--upstream': {}.", e);
}
}
last_flag = Flag::None;
}
Flag::Downstream => {
match arg.parse() {
Ok(addr) => {
args.downstream = Some(addr)
}
Err(e) => {
error!("Invalid <downstream addr> at '--downstream': {}.", e);
}
}
last_flag = Flag::None;
}
Flag::InGroup => {
let split: Vec<_> = arg.split(':').collect();
if split.len() != 2 {
error!("Invalid '--in-group {}': must be in the form of '--in-group <server num>:<num servers in group>'.", arg);
std::process::exit(1)
}
match (split[0].parse(), split[1].parse()) {
(Ok(server_num), Ok(group_size)) => {
if group_size <= server_num {
error!("<server num>: {} must be less than <num servers in group>: {} in '--in-group'",
split[0], split[1]);
std::process::exit(1)
}
last_flag = Flag::None;
args.group = Group::InGroup(server_num, group_size)
}
(Err(e1), Err(e2)) => {
error!("Invalid <server num>: {} '{}' at '--in-group'", e1, split[0]);
error!("Invalid <num servers in group>: {} '{}' at '--in-group'",
e2, split[1]);
std::process::exit(1)
}
(Err(e1), _) => {
error!("Invalid <server num>: {} '{}' at '--in-group'", e1, split[0]);
std::process::exit(1)
}
(_, Err(e2)) => {
error!("Invalid <num servers in group>: {} '{}' at '--in-group'",
e2, split[1]);
std::process::exit(1)
}
}
}
}
}
match last_flag {
Flag::None => args,
Flag::InGroup => {
error!("Missing <server num>:<num servers in group> for '--in-group'");
std::process::exit(1)
},
Flag::Workers => {
error!("Missing <num worker threads> for '--workers'");
std::process::exit(1)
}
Flag::Downstream => {
error!("Missing <downstream addr> for '--downstream'");
std::process::exit(1)
}
Flag::Upstream => {
error!("Missing <upstream addr> for '--upstream'");
std::process::exit(1)
}
}
}
| 543
|
pub fn challenge_13() {
println!("TODO");
}
| 544
|
pub fn retype_task(source: CAddr, target: CAddr) {
system_call(SystemCall::RetypeTask {
request: (source, target),
});
}
| 545
|
const fn null_ble_gatt_chr_def() -> ble_gatt_chr_def {
return ble_gatt_chr_def {
uuid: ptr::null(),
access_cb: None,
arg: (ptr::null_mut()),
descriptors: (ptr::null_mut()),
flags: 0,
min_key_size: 0,
val_handle: (ptr::null_mut()),
};
}
| 546
|
fn build_one() {
let project = project("build_one").with_fuzz().build();
// Create some targets.
project
.cargo_fuzz()
.arg("add")
.arg("build_one_a")
.assert()
.success();
project
.cargo_fuzz()
.arg("add")
.arg("build_one_b")
.assert()
.success();
// Build to ensure that the build directory is created and
// `fuzz_build_dir()` won't panic.
project.cargo_fuzz().arg("build").assert().success();
let build_dir = project.fuzz_build_dir().join("release");
let a_bin = build_dir.join("build_one_a");
let b_bin = build_dir.join("build_one_b");
// Remove the files we just built.
fs::remove_file(&a_bin).unwrap();
fs::remove_file(&b_bin).unwrap();
assert!(!a_bin.is_file());
assert!(!b_bin.is_file());
// Test that we can build one and not the other.
project
.cargo_fuzz()
.arg("build")
.arg("build_one_a")
.assert()
.success();
assert!(a_bin.is_file());
assert!(!b_bin.is_file());
}
| 547
|
fn main() {
let argv = os::args();
let size = from_str::<uint>(argv[1]).unwrap();
// println!("{}",size);
let align = from_str::<uint>(argv[2]).unwrap();
// println!("{}", align);
let aligned = align_to(size,align);
println!("{} by {} = {}", size, align, aligned);
// print_uint(*argv[1]);
}
| 548
|
fn run() -> Result<(), Box<dyn std::error::Error>> {
let by_version = generate_thanks()?;
let mut all_time = by_version.values().next().unwrap().clone();
for map in by_version.values().skip(1) {
all_time.extend(map.clone());
}
site::render(by_version, all_time)?;
Ok(())
}
| 549
|
fn run_with_coverage() {
let target = "with_coverage";
let project = project("run_with_coverage")
.with_fuzz()
.fuzz_target(
target,
r#"
#![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
println!("{:?}", data);
});
"#,
)
.build();
project
.cargo_fuzz()
.arg("run")
.arg(target)
.arg("--")
.arg("-runs=100")
.assert()
.stderr(predicate::str::contains("Done 100 runs"))
.success();
project
.cargo_fuzz()
.arg("coverage")
.arg(target)
.assert()
.stderr(predicate::str::contains("Coverage data merged and saved"))
.success();
let profdata_file = project.fuzz_coverage_dir(target).join("coverage.profdata");
assert!(profdata_file.exists(), "Coverage data file not generated");
}
| 550
|
fn update_repo(url: &str) -> Result<PathBuf, Box<dyn std::error::Error>> {
let mut slug = url;
let prefix = "https://github.com/";
if slug.starts_with(prefix) {
slug = &slug[prefix.len()..];
}
let prefix = "git://github.com/";
if slug.starts_with(prefix) {
slug = &slug[prefix.len()..];
}
let prefix = "https://git.chromium.org/";
if slug.starts_with(prefix) {
slug = &slug[prefix.len()..];
}
let suffix = ".git";
if slug.ends_with(suffix) {
slug = &slug[..slug.len() - suffix.len()];
}
let path_s = format!("repos/{}", slug);
let path = PathBuf::from(&path_s);
if !UPDATED.lock().unwrap().insert(slug.to_string()) {
return Ok(path);
}
if path.exists() {
if should_update() {
// we know for sure the path_s does *not* contain .git as we strip it, so this is a safe
// temp directory
let tmp = format!("{}.git", path_s);
std::fs::rename(&path, &tmp)?;
git(&[
"clone",
"--bare",
"--dissociate",
"--reference",
&tmp,
&url,
&path_s,
])?;
std::fs::remove_dir_all(&tmp)?;
}
} else {
git(&["clone", "--bare", &url, &path_s])?;
}
Ok(path)
}
| 551
|
fn fetch_data() -> impl Future<Item = Msg, Error = Msg> {
let url = "http://192.168.15.22:8000/shower_data";
Request::new(url).fetch_json(Msg::DataFetched)
}
| 552
|
fn run_with_different_fuzz_dir() {
let (fuzz_dir, mut project_builder) = project_with_fuzz_dir(
"project_likes_to_move_it",
Some("dir_likes_to_move_it_move_it"),
);
let project = project_builder
.with_fuzz()
.fuzz_target(
"you_like_to_move_it",
r#"
#![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|_data: &[u8]| {
});
"#,
)
.build();
project
.cargo_fuzz()
.arg("run")
.arg("--fuzz-dir")
.arg(fuzz_dir)
.arg("you_like_to_move_it")
.arg("--")
.arg("-runs=1")
.assert()
.stderr(predicate::str::contains("Done 2 runs"))
.success();
}
| 553
|
fn
test_value
(
)
-
>
Result
<
(
)
>
{
let
db
=
checked_memory_handle
(
)
?
;
db
.
execute
(
"
INSERT
INTO
foo
(
i
)
VALUES
(
?
1
)
"
[
Value
:
:
Integer
(
10
)
]
)
?
;
assert_eq
!
(
10i64
db
.
one_column
:
:
<
i64
>
(
"
SELECT
i
FROM
foo
"
)
?
)
;
Ok
(
(
)
)
}
| 554
|
pub fn jmz_op(inputs: OpInputs) -> EmulatorResult<()> {
// JMZ tests the B-value to determine if it is zero. If the B-value is
// zero, the sum of the program counter and the A-pointer is queued.
// Otherwise, the next instruction is queued (PC + 1). JMZ.I functions
// as JMZ.F would, i.e. it jumps if both the A-number and the B-number of
// the B-instruction are zero.
let a = inputs.regs.a;
let b = inputs.regs.b;
let is_zero = match inputs.regs.current.instr.modifier {
Modifier::A | Modifier::BA => {
// B value is the A-number of the B instruction
b.a_field == 0
}
Modifier::B | Modifier::AB => {
// B value is the B-number of the B instruction
b.b_field == 0
}
Modifier::F | Modifier::X | Modifier::I => {
// B value is the A and B numbers of the B instruction
b.a_field == 0 && b.b_field == 0
}
};
if is_zero {
inputs.pq.push_back(a.idx, inputs.warrior_id)?;
} else {
let next_pc = offset(inputs.regs.current.idx, 1, inputs.core_size)?;
inputs.pq.push_back(next_pc, inputs.warrior_id)?;
}
Ok(())
}
| 555
|
fn project_with_fuzz_dir(
project_name: &str,
fuzz_dir_opt: Option<&str>,
) -> (String, ProjectBuilder) {
let fuzz_dir = fuzz_dir_opt.unwrap_or("custom_dir");
let next_root = next_root();
let fuzz_dir_pb = next_root.join(fuzz_dir);
let fuzz_dir_sting = fuzz_dir_pb.display().to_string();
let pb = project_with_params(project_name, next_root, fuzz_dir_pb);
(fuzz_dir_sting, pb)
}
| 556
|
fn CommonInterruptHandler(vector: u8){
let mut buffer = b"[INT: , ]".clone();
static mut common_count: u8 = 0;
buffer[5] = vector / 10 + '0' as u8;
buffer[6] = vector % 10 + '0' as u8;
unsafe {
buffer[8] = common_count + '0' as u8;
common_count = (common_count + 1) % 10;
}
print_string(70, 0, &buffer);
SendEOI((vector - pic::PIC_IRQSTARTVECTOR) as u16 );
}
| 557
|
fn main() {
println!("Welcome to my credit card verifier. Please input number to check.");
let mut card_number = String::new();
io::stdin()
.read_line(&mut card_number)
.expect("Failed to read the input");
if card_number.ends_with('\n') {
card_number.pop();
if card_number.ends_with('\r') {
card_number.pop();
}
}
if card_number.len() != 16 {
panic!("Card number is not the correct length")
}
println!("Now checking the validity of the credit card number.");
number_vectorizer(&card_number);
}
| 558
|
fn get_command(stream: &mut TcpStream, buf: &mut[u8]) -> Result<Task, Error> {
let buf_sz = stream.read(buf).expect("failed to read from stream");
let buf_usize = buf_sz as usize;
let v = match serde_json::from_slice::<Task>(&buf[..buf_usize]){
Ok(v) => v,
Err(e) => return Err(e)
};
Ok(v)
}
| 559
|
fn buffered() {
let (tx, rx) = channel::<_, u32>();
let (a, b) = promise::<u32>();
let (c, d) = promise::<u32>();
tx.send(Ok(b.map_err(|_| 2).boxed()))
.and_then(|tx| tx.send(Ok(d.map_err(|_| 4).boxed())))
.forget();
let mut rx = rx.buffered(2);
sassert_empty(&mut rx);
c.complete(3);
sassert_next(&mut rx, 3);
sassert_empty(&mut rx);
a.complete(5);
sassert_next(&mut rx, 5);
sassert_done(&mut rx);
let (tx, rx) = channel::<_, u32>();
let (a, b) = promise::<u32>();
let (c, d) = promise::<u32>();
tx.send(Ok(b.map_err(|_| 2).boxed()))
.and_then(|tx| tx.send(Ok(d.map_err(|_| 4).boxed())))
.forget();
let mut rx = rx.buffered(1);
sassert_empty(&mut rx);
c.complete(3);
sassert_empty(&mut rx);
a.complete(5);
sassert_next(&mut rx, 5);
sassert_next(&mut rx, 3);
sassert_done(&mut rx);
}
| 560
|
pub fn nop_op(inputs: OpInputs) -> EmulatorResult<()> {
// Increments and queues the PC but otherwise has no effect past operand
// evaluation
inputs.pq.push_back(
offset(inputs.regs.current.idx, 1, inputs.core_size)?,
inputs.warrior_id,
)?;
Ok(())
}
| 561
|
fn main() {}
| 562
|
fn figure1b(num_threads: usize) {
assert!(num_threads >= 2);
let x1 = Arc::new(Mutex::new(Some(1)));
let x2 = Arc::clone(&x1);
// Optionally, spawn a bunch of threads that add scheduling choice points, each taking 5 steps
for _ in 0..num_threads - 2 {
thread::spawn(|| {
for _ in 0..5 {
thread::sleep(Duration::from_millis(1));
}
});
}
// Main worker threads take 10 steps each
thread::spawn(move || {
for _ in 0..5 {
thread::sleep(Duration::from_millis(1));
}
*x1.lock().unwrap() = None;
for _ in 0..4 {
thread::sleep(Duration::from_millis(1));
}
});
thread::spawn(move || {
for _ in 0..4 {
thread::sleep(Duration::from_millis(1));
}
let b = {
let b = x2.lock().unwrap().is_some();
b
};
if b {
let _ = x2.lock().unwrap().expect("null dereference");
}
for _ in 0..4 {
thread::sleep(Duration::from_millis(1));
}
});
}
| 563
|
fn main() {
let instruction: Vec<String> = std::env::args().collect();
let instruction: &String = &instruction[1];
println!("{}", santa(instruction));
}
| 564
|
fn find_realpath(cmd_name: &str) -> String {
match env::var_os("PATH") {
Some(paths) => {
for path in env::split_paths(&paths) {
let cmd_path = Path::new(&path).join(cmd_name);
if cmd_path.exists() {
return cmd_path.to_str().unwrap().to_string();
}
}
cmd_name.to_string()
}
None => cmd_name.to_string(),
}
}
| 565
|
pub fn channel_take_raw(target: CAddr) -> u64 {
let result = channel_take_nonpayload(target);
match result {
ChannelMessage::Raw(v) => return v,
_ => panic!(),
};
}
| 566
|
fn main() {
let decimal = 65.4321_f32;
// エラー! 暗黙的な型変換はできない。
let integer: u8 = decimal;
// 明示的な型変換
let integer = decimal as u8;
let character = integer as char;
println!("Casting: {} -> {} -> {}", decimal, integer, character);
// 何らかの値を符号なしの型(仮にTとする)へキャスティングすると
// 値がTに収まるまで、std::T::MAX + 1が加算あるいは減算される。
// 1000はすでにu16に収まっているため変化しない
println!("1000 as a u16 is : {}", 1000 as u16);
// 1000 - 256 - 256 - 256 = 232
// 水面下では最下位ビットから8bitが使用され、残りの上位ビットが圧縮される形になる。
println!("1000 as u8 is : {}", 1000 as u8);
// -1 + 256 = 255
println!(" -1 as a u8 is : {}", (-1i8) as u8);
println!("1000 mod 256 is : {}", 1000 % 256);
// 符号付きの型にキャストする場合、結果は以下の2つを行った場合に等しい
// 1. 対応する符号なしの型にキャストする。
// 2. 2の歩数(two's complement)をとる。
// 128をu8にキャストすると128となる。128の8ビットにおける補数は-128
println!(" 128 as a i8 is : {}", 128 as i8);
// 上で示した例から
// 1000 as u8 -> 232
// が成り立つ。232の8ビットにおける補数は-24
println!("1000 as a i8 is : {}", 1000 as i8);
println!(" 232 as a i8 is : {}", 232 as i8);
}
| 567
|
async fn link_to_quic(mut link: LinkSender, quic: Arc<AsyncConnection>) -> Result<(), Error> {
while let Some(mut p) = link.next_send().await {
p.drop_inner_locks();
quic.dgram_send(p.bytes_mut()).await?;
}
Ok(())
}
| 568
|
pub fn sum_even_after_queries(mut a: Vec<i32>, queries: Vec<Vec<i32>>) -> Vec<i32> {
let mut ret: Vec<i32> = Vec::with_capacity(queries.len());
for query in queries.into_iter() {
a[query[1] as usize] += query[0];
ret.push(a.iter().filter(|&&n| n % 2 == 0).sum::<i32>());
}
ret
}
| 569
|
fn db_scheme_type_mapper(scheme: &str) -> SchemeType {
match scheme {
"postgres" => SchemeType::Relative(5432),
"mysql" => SchemeType::Relative(3306),
_ => SchemeType::NonRelative,
}
}
| 570
|
pub fn sobel(frame : ImageBuffer<Luma<u8>, Vec<u8>>) -> ImageBuffer<Luma<u8>, Vec<u8>> {
let mut result = ImageBuffer::new(640, 480);
for i in 1..638 {
for j in 1..478 {
let north_west = frame[(i-1, j-1)].channels()[0] as i32;
let north = frame[(i, j-1)].channels()[0] as i32;
let north_east = frame[(i+1, j-1)].channels()[0] as i32;
let west = frame[(i-1, j)].channels()[0] as i32;
let east = frame[(i+1, j)].channels()[0] as i32;
let south_west = frame[(i-1, j+1)].channels()[0] as i32;
let south = frame[(i, j+1)].channels()[0] as i32;
let south_east = frame[(i+1, j+1)].channels()[0] as i32;
let gx : i32 = north_west + south_west + (2 * west) - north_east - south_east - (2 * east);
let gy : i32 = north_west + north_east + (2 * north) - south_west - south_east - (2 * south);
let root : u8 = (((gx * gx) + (gy * gy)) as f32).sqrt() as u8;
result.put_pixel(i, j, Luma([root]));
}
}
return result;
}
| 571
|
pub
fn
fetch_nand
(
&
self
val
:
bool
)
-
>
bool
{
let
a
=
unsafe
{
&
*
(
self
.
as_ptr
(
)
as
*
const
AtomicBool
)
}
;
a
.
fetch_nand
(
val
Ordering
:
:
AcqRel
)
}
| 572
|
fn test_let_cons() {
let module = make_module(
indoc! {r#"
tail x = let (x :: xs) = x in xs
t = tail [1, 2, 3]
"#},
default_sym_table(),
);
assert!(module.is_ok());
let module = module.unwrap();
let decls = module.get_decls();
assert_eq!(decls[0].get_type(), Some(FuncType::new_func_type(Some(vec![TRAIT_UNKNOWN.clone_box()]), ListType::new_list(TRAIT_UNKNOWN.clone_box()))));
assert_eq!(decls[1].get_type(), Some(ListType::new_list(BasicType::int())));
}
| 573
|
pub fn write_array_len<W>(wr: &mut W, len: u32) -> Result<Marker, ValueWriteError>
where W: Write
{
if len < 16 {
let marker = Marker::FixArray(len as u8);
try!(write_fixval(wr, marker));
Ok(marker)
} else if len < 65536 {
try!(write_marker(wr, Marker::Array16));
write_data_u16(wr, len as u16).and(Ok(Marker::Array16))
} else {
try!(write_marker(wr, Marker::Array32));
write_data_u32(wr, len).and(Ok(Marker::Array32))
}
}
| 574
|
pub fn median_filter_hist(frame : ImageBuffer<Luma<u8>, Vec<u8>>, kernel_size : usize) -> ImageBuffer<Luma<u8>, Vec<u8>> {
//assert!(kernel_size % 2 == 1, "Kernel size must be odd.");
let mut result = ImageBuffer::new(640, 480);
let kernel_offset = ((kernel_size - 1) / 2) as i32;
for i in 0..640 {
for j in 0..480 {
let mut hist = Histogram::new();
for k in (i as i32 - kernel_offset)..(i as i32 + kernel_offset + 1) {
for l in (j as i32 - kernel_offset)..(j as i32 + kernel_offset + 1) {
if 0 <= k && k < 640 && 0 <= l && l < 480 {
let color = frame[(k as u32, l as u32)].channels()[0];
hist.increment(color);
}
}
}
let median_color = Luma([hist.median()]);
result.put_pixel(i as u32, j as u32, median_color);
}
}
return result;
}
| 575
|
pub async fn connect_async_with_tls_connector<R>(
request: R,
connector: Option<Connector>,
) -> Result<(WebSocketStream<ConnectStream>, Response), Error>
where
R: IntoClientRequest + Unpin,
{
connect_async_with_tls_connector_and_config(request, connector, None).await
}
| 576
|
async fn get_config(db: &FdbTransactional) -> Result<Config, dyn Error> {
let config_file = db.get_config_file().await?;
let config_file_content = db.get_config_file_content().await?;
let config = Config::new(config_file, config_file_content);
Ok(config)
}
| 577
|
fn get_disjoint<T>(ts: &mut [T], a: usize, b: usize) -> (&mut T, &mut T) {
assert!(a != b, "a ({}) and b ({}) must be disjoint", a, b);
assert!(a < ts.len(), "a ({}) is out of bounds", a);
assert!(b < ts.len(), "b ({}) is out of bounds", b);
if a < b {
let (al, bl) = ts.split_at_mut(b);
(&mut al[a], &mut bl[0])
} else {
let (bl, al) = ts.split_at_mut(a);
(&mut al[0], &mut bl[b])
}
}
| 578
|
fn decode_ppm_image(cursor: &mut Cursor<Vec<u8>>) -> Result<Image, Box<std::error::Error>> {
let mut image = Image {
width : 0,
height: 0,
pixels: vec![]
};
// read header
let mut c: [u8; 2] = [0; 2];
cursor.read(&mut c)?;
match &c {
b"P6" => { },
_ => { bail!("error") }
}
let w = read_num(cursor)?;
let h = read_num(cursor)?;
let cr = read_num(cursor)?;
print!("width: {}, height: {}, color range: {}\n", w, h, cr);
// TODO: Parse the image here
let mut pxls:Vec<Vec<Pixel>> = vec![];
let mut buff: [u8; 1] = [0];
loop{
cursor.read(&mut buff)?;
match &buff {
b" " | b"\t" | b"\n" => {},
_ => { cursor.seek(std::io::SeekFrom::Current(-1)); break; }
};
};
for x in 0..h {
let mut row: Vec<Pixel> = vec!();
for y in 0..w {
let mut mv: Vec<u8> = vec![];
for mut z in 0..3 {
mv.push(cursor.read_u8()?);
}
let px = Pixel {
R: mv[0] as u32,
G: mv[1] as u32,
B: mv[2] as u32
};
row.push(px);
}
pxls.insert(0, row);
}
image = Image {
width : w,
height: h,
pixels: pxls
};
Ok(image)
}
| 579
|
pub fn debug_cpool_list() {
system_call(SystemCall::DebugCPoolList);
}
| 580
|
fn part1(rules: &Vec<PasswordRule>) -> usize {
rules
.iter()
.filter(|rule| (rule.min..=rule.max).contains(&rule.password.matches(rule.letter).count()))
.count()
}
| 581
|
fn assert_memory_load_bytes<R: Rng, M: Memory>(
rng: &mut R,
memory: &mut M,
buffer_size: usize,
addr: u64,
) {
let mut buffer_store = Vec::<u8>::new();
buffer_store.resize(buffer_size, 0);
rng.fill(buffer_store.as_mut_slice());
memory
.store_bytes(addr, &buffer_store.as_slice())
.expect("store bytes failed");
let buffer_load = memory
.load_bytes(addr, buffer_store.len() as u64)
.expect("load bytes failed")
.to_vec();
assert!(buffer_load.cmp(&buffer_store).is_eq());
// length out of bound
let outofbound_size = if buffer_store.is_empty() {
memory.memory_size() + 1
} else {
buffer_store.len() + memory.memory_size()
};
let ret = memory.load_bytes(addr, outofbound_size as u64);
assert!(ret.is_err());
assert_eq!(ret.err().unwrap(), Error::MemOutOfBound);
// address out of bound
let ret = memory.load_bytes(
addr + memory.memory_size() as u64 + 1,
buffer_store.len() as u64,
);
if buffer_store.is_empty() {
assert!(ret.is_ok())
} else {
assert!(ret.is_err());
assert_eq!(ret.err().unwrap(), Error::MemOutOfBound);
}
// addr + size is overflow
let ret = memory.load_bytes(addr + (0xFFFFFFFFFFFFFF - addr), buffer_store.len() as u64);
if buffer_store.is_empty() {
assert!(ret.is_ok());
} else {
assert!(ret.is_err());
assert_eq!(ret.err().unwrap(), Error::MemOutOfBound);
}
}
| 582
|
extern "C" fn gatt_svr_chr_access_device_info(
_conn_handle: u16,
_attr_handle: u16,
ctxt: *mut ble_gatt_access_ctxt,
_arg: *mut ::core::ffi::c_void,
) -> i32 {
let uuid: u16 = unsafe { ble_uuid_u16((*(*ctxt).__bindgen_anon_1.chr).uuid) };
if uuid == GATT_MODEL_NUMBER_UUID {
let rc: i32 = unsafe {
os_mbuf_append(
(*ctxt).om,
MODEL_NUM.as_ptr() as *const c_void,
MODEL_NUM.len() as u16,
)
};
return if rc == 0 {
0
} else {
BLE_ATT_ERR_INSUFFICIENT_RES as i32
};
}
if uuid == GATT_MANUFACTURER_NAME_UUID {
let rc: i32 = unsafe {
os_mbuf_append(
(*ctxt).om,
MANUF_NAME.as_ptr() as *const c_void,
MANUF_NAME.len() as u16,
)
};
return if rc == 0 {
0
} else {
BLE_ATT_ERR_INSUFFICIENT_RES as i32
};
}
return BLE_ATT_ERR_UNLIKELY as i32;
}
| 583
|
fn system_call_put_payload<T: Any>(message: SystemCall, payload: T) -> SystemCall {
use core::mem::{size_of};
let addr = task_buffer_addr();
unsafe {
let buffer = &mut *(addr as *mut TaskBuffer);
buffer.call = Some(message);
buffer.payload_length = size_of::<T>();
let payload_addr = &mut buffer.payload_data as *mut _ as *mut T;
let payload_data = &mut *payload_addr;
*payload_data = payload;
system_call_raw();
buffer.call.take().unwrap()
}
}
| 584
|
pub(crate) fn assignment_expression(i: Input) -> NodeResult {
alt((
single::single_assignment_expression,
abbreviated::abbreviated_assignment_expression,
assignment_with_rescue_modifier,
))(i)
}
| 585
|
pub fn smart_to_words_vec() -> Vec<Words> {
let mut words_vec: Vec<Words> = Vec::new();
for i in 1..22 {
let words = smart_to_words(i);
words_vec.push(words);
}
words_vec
}
| 586
|
pub fn test_invalid_file_offset64() {
let buffer = fs::read("tests/programs/invalid_file_offset64")
.unwrap()
.into();
let result = run::<u64, SparseMemory<u64>>(&buffer, &vec!["invalid_file_offset64".into()]);
assert_eq!(result.err(), Some(Error::ElfSegmentAddrOrSizeError));
}
| 587
|
fn uniqueness_is_preserved_when_generating_data_model_from_a_schema() {
let ref_data_model = Datamodel {
models: vec![Model {
database_name: None,
name: "Table1".to_string(),
documentation: None,
is_embedded: false,
is_commented_out: false,
fields: vec![
Field::ScalarField(ScalarField::new(
"non_unique",
FieldArity::Optional,
FieldType::Base(ScalarType::Int, None),
)),
Field::ScalarField(ScalarField {
name: "unique".to_string(),
arity: FieldArity::Required,
field_type: FieldType::Base(ScalarType::Int, None),
database_name: None,
default_value: None,
is_unique: true,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
],
is_generated: false,
indices: vec![],
id_fields: vec![],
}],
enums: vec![],
};
let schema = SqlSchema {
tables: vec![Table {
name: "Table1".to_string(),
columns: vec![
Column {
name: "non_unique".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::Int, ColumnArity::Nullable),
default: None,
auto_increment: false,
},
Column {
name: "unique".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::Int, ColumnArity::Required),
default: None,
auto_increment: false,
},
],
indices: vec![Index {
name: "unique".to_string(),
columns: vec!["unique".to_string()],
tpe: IndexType::Unique,
}],
primary_key: None,
foreign_keys: vec![],
}],
enums: vec![],
sequences: vec![],
};
let introspection_result = calculate_datamodel(&schema, &SqlFamily::Postgres).expect("calculate data model");
assert_eq!(introspection_result.data_model, ref_data_model);
}
| 588
|
fn souper_type_of(dfg: &ir::DataFlowGraph, val: ir::Value) -> Option<ast::Type> {
let ty = dfg.value_type(val);
assert!(ty.is_int());
assert_eq!(ty.lane_count(), 1);
let width = match dfg.value_def(val).inst() {
Some(inst)
if dfg.insts[inst].opcode() == ir::Opcode::IcmpImm
|| dfg.insts[inst].opcode() == ir::Opcode::Icmp =>
{
1
}
_ => ty.bits().try_into().unwrap(),
};
Some(ast::Type { width })
}
| 589
|
pub fn get_settings() -> HashMap<String, String>{
let mut settings = Config::default();
settings
.merge(glob("config/*")
.unwrap()
.map(|path| File::from(path.unwrap()))
.collect::<Vec<_>>())
.unwrap();
// Print out our settings (as a HashMap)
settings.try_into::<HashMap<String, String>>().unwrap()
}
| 590
|
pub fn borrow_expr<'a>(
expr: &'a Expr<'a>,
var_id: &mut u64,
ast: &'a Ast<'a>,
borrowstate: &mut State<(Vec<&'a str>, u64)>,
) -> Result<(Vec<&'a str>, u64), BorrowError<'a>> {
match &expr.value {
Value::Literal(Literal::Ref(_)) => panic!(),
Value::UnOp(UnOp::Ref(_), arg) => {
let (mut inner_life, id) = borrow_expr(arg, var_id, ast, borrowstate)?;
inner_life.push(LOCAL_LIFE);
Ok((inner_life, id))
}
Value::UnOp(UnOp::Deref, arg) => {
let (mut inner_life, id) = borrow_expr(arg, var_id, ast, borrowstate)?;
inner_life.pop();
Ok((inner_life, id))
}
Value::Literal(_) | Value::UnOp(_, _) | Value::BinOp(_, _, _) => {
*var_id += 1;
Ok((vec![], *var_id - 1))
}
Value::Call(ident, args) => borrow_call(&ident, &args, var_id, ast, borrowstate),
Value::Ident(ident) => {
let lifetimes = borrowstate.get(*ident).unwrap();
Ok(lifetimes.clone())
}
}
}
| 591
|
fn test_solve_1() {
assert_eq!(solve_1("dabAcCaCBAcCcaDA"), 10);
}
| 592
|
pub fn causet_partitioner_scan_column_as_string(
context: Box<CausetPartitionerContext>,
column_name: &[u8],
column_value: &[u8],
) -> Box<CausetPartitionerContext> {
context
}
| 593
|
fn build_author_map_(
repo: &Repository,
reviewers: &Reviewers,
mailmap: &Mailmap,
from: &str,
to: &str,
) -> Result<AuthorMap, Box<dyn std::error::Error>> {
let mut walker = repo.revwalk()?;
if repo.revparse_single(to).is_err() {
// If a commit is not found, try fetching it.
git(&[
"--git-dir",
repo.path().to_str().unwrap(),
"fetch",
"origin",
to,
])?;
}
if from == "" {
let to = repo.revparse_single(to)?.peel_to_commit()?.id();
walker.push(to)?;
} else {
walker.push_range(&format!("{}..{}", from, to))?;
}
let mut author_map = AuthorMap::new();
for oid in walker {
let oid = oid?;
let commit = repo.find_commit(oid)?;
let mut commit_authors = Vec::new();
if !is_rollup_commit(&commit) {
// We ignore the author of rollup-merge commits, and account for
// that author once by counting the reviewer of all bors merges. For
// rollups, we consider that this is the most relevant person, which
// is usually the case.
//
// Otherwise, a single rollup with N PRs attributes N commits to the author of the
// rollup, which isn't fair.
commit_authors.push(Author::from_sig(commit.author()));
}
match parse_bors_reviewer(&reviewers, &repo, &commit) {
Ok(Some(reviewers)) => commit_authors.extend(reviewers),
Ok(None) => {}
Err(ErrorContext(msg, e)) => {
if e.is::<reviewers::UnknownReviewer>() {
eprintln!("Unknown reviewer: {}", ErrorContext(msg, e));
} else {
return Err(ErrorContext(msg, e).into());
}
}
}
commit_authors.extend(commit_coauthors(&commit));
for author in commit_authors {
let author = mailmap.canonicalize(&author);
author_map.add(author, oid);
}
}
Ok(author_map)
}
| 594
|
fn
load
(
&
self
_order
:
Ordering
)
{
}
| 595
|
fn zig_zag(root: Option<Rc<RefCell<TreeNode>>>, left: bool) -> (i32, i32) {
if root.is_none() {
return (0, 1)
}
let mut root = root.unwrap();
let mut rt = root.borrow_mut();
let (lr, lmax) = zig_zag(rt.left.take(), true);
let (rl, rmax) = zig_zag(rt.right.take(), false);
let (l, r) = (1 + lr, 1 + rl);
let max = l.max(r).max(lmax).max(rmax);
(if left { r } else { l }, max)
}
| 596
|
fn get_line_intersection(p0: i64, p1: i64, op0: i64, op1: i64) -> (i64, i64) {
let r0 = if op0 < p0 { p0 } else { op0 };
let r1 = if op1 < p1 { op1 } else { p1 };
(r0, r1)
}
| 597
|
fn main() {
println!(
"Part 1 Result - {}",
solve_1(include_str!("../data/input.txt"))
);
println!(
"Part 2 Result - {}",
solve_2(include_str!("../data/input.txt"))
);
}
| 598
|
pub fn validate_line_type(data: &str) -> DataType {
let line_tokens: Vec<&str> = data.split(" ").collect();
if line_tokens.len() == 2 && line_tokens[0] == "Driver" {
DataType::Driver(line_tokens[1].to_string())
} else if line_tokens.len() == 5 && line_tokens[0] == "Trip" {
DataType::Trip(line_tokens[1].to_string(), line_tokens)
} else {
DataType::Unknown
}
}
| 599
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.