Dataset Viewer
Auto-converted to Parquet Duplicate
content
stringlengths
12
392k
id
int64
0
1.97k
fn main() { std::process::Command::new("packfolder.exe").args(&["src/frontend", "dupa.rc", "-binary"]) .output().expect("no i ciul"); }
0
fn create_mesh_buffer_verts( chunk: &Chunk, device: &wgpu::Device, queue: &wgpu::Queue, ) -> MeshBufferVerts { // Calculate total length of buffer e.g. a full chunk of different voxels. This way a new buffer only has to be created when the voxel capacity is changed. let verts = Mesh::verts(chunk); let vert_len = verts.len(); let single_cube_verts = Mesh::cube_verts().len(); let single_cube_color_verts = (single_cube_verts / 3) * std::mem::size_of::<PaletteIndexType>(); // One PaletteIndexType per 3 verts let max_voxels = { let (x, y, z) = chunk.capacity(); x * y * z }; let max_buf_size = (single_cube_verts + single_cube_color_verts) * max_voxels * std::mem::size_of::<f32>(); let buffer = device.create_buffer(&wgpu::BufferDescriptor { label: None, mapped_at_creation: false, size: max_buf_size as u64, usage: wgpu::BufferUsage::VERTEX | wgpu::BufferUsage::COPY_DST, }); if vert_len > 0 { queue.write_buffer(&buffer, 0, bytemuck::cast_slice(&verts)); } MeshBufferVerts { buffer, vert_len, max_capacity: chunk.capacity(), } }
1
pub fn render() { // let a = ext_rec(); // let some = a.calcArea(); // log!("got event! {}", some); // log!("got event!"); let app = seed::App::build(|_, _| Model::default(), update, view) .finish() .run(); app.update(Msg::FetchData); }
2
pub fn import_ruma_api() -> TokenStream { if let Ok(FoundCrate::Name(possibly_renamed)) = crate_name("ruma-api") { let import = Ident::new(&possibly_renamed, Span::call_site()); quote! { ::#import } } else if let Ok(FoundCrate::Name(possibly_renamed)) = crate_name("ruma") { let import = Ident::new(&possibly_renamed, Span::call_site()); quote! { ::#import::api } } else { quote! { ::ruma_api } } }
3
pub fn get_remaining_timelimit(ctx: &PbsContext, job_id: &str) -> anyhow::Result<Duration> { let result = Command::new(&ctx.qstat_path) .args(&["-f", "-F", "json", job_id]) .output()?; if !result.status.success() { anyhow::bail!( "qstat command exited with {}: {}\n{}", result.status, String::from_utf8_lossy(&result.stderr), String::from_utf8_lossy(&result.stdout) ); } let output = String::from_utf8_lossy(&result.stdout).into_owned(); log::debug!("qstat output: {}", output.trim()); parse_pbs_job_remaining_time(job_id, output.as_str()) }
4
fn main() { let matches = App::new("Advent of Code 2017 - Day 1") .arg(Arg::with_name("filename") .required(true)) .arg(Arg::with_name("part") .possible_values(&["1", "2"])) .get_matches(); let filename = matches.value_of("filename").unwrap(); let part = matches.value_of("part").unwrap_or("1"); let mut file = File::open(filename).unwrap(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); contents = contents.trim().to_string(); let skip: usize = match part { "1" => 1, _ => contents.len() / 2, }; let result = solve(&contents, skip); println!("{}", result); }
5
fn show_image(image: &Image) { let sdl = sdl2::init().unwrap(); let video_subsystem = sdl.video().unwrap(); let display_mode = video_subsystem.current_display_mode(0).unwrap(); let w = match display_mode.w as u32 > image.width { true => image.width, false => display_mode.w as u32 }; let h = match display_mode.h as u32 > image.height { true => image.height, false => display_mode.h as u32 }; let window = video_subsystem .window("Image", w, h) .build() .unwrap(); let mut canvas = window .into_canvas() .present_vsync() .build() .unwrap(); let black = sdl2::pixels::Color::RGB(0, 0, 0); let mut event_pump = sdl.event_pump().unwrap(); // render image canvas.set_draw_color(black); canvas.clear(); for r in 0..image.height { for c in 0..image.width { let pixel = &image.pixels[image.height as usize - r as usize - 1][c as usize]; canvas.set_draw_color(Color::RGB(pixel.R as u8, pixel.G as u8, pixel.B as u8)); canvas.fill_rect(Rect::new(c as i32, r as i32, 1, 1)).unwrap(); } } canvas.present(); 'main: loop { for event in event_pump.poll_iter() { match event { sdl2::event::Event::Quit {..} => break 'main, _ => {}, } } sleep(Duration::new(0, 250000000)); } }
6
pub fn on_player_join_send_existing_entities(event: &PlayerJoinEvent, world: &mut World) { let network = world.get::<Network>(event.player); for (entity, creator) in <Read<CreationPacketCreator>>::query().iter_entities(world.inner()) { let accessor = world .entity(entity) .expect("query yielded entity which does not exist"); let packet = creator.get(&accessor); network.send_boxed(packet); } }
7
pub fn charset(charset: &CharsetType) -> Vec<char> { match charset { CharsetType::Lowercase => { "abcdefghijklmnopqrstuvwxyz".chars().collect() }, CharsetType::Uppercase => { "ABCDEFGHIJKLMNQRSTUVWXYZ".chars().collect() }, CharsetType::Symbols => { "_*&|!?@$#=%".chars().collect() }, CharsetType::Numbers => { "0123456789".chars().collect() } CharsetType::Special => { r###"!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"###.chars().collect() } } }
8
fn makefile_outdated_test() { makefile_task_enabled_test("outdated", false, false); }
9
fn create_duplication() { let body_str = "duplicate"; rocket_helpers::create_test_element(body_str); rocket_helpers::create_test_object_expect_status( ObjectType::PrimaryElement, body_str, Status::Conflict, ); }
10
pub fn unary_num(t_unary: Token, n_simple_numeric: Node) -> Node { let mut numeric_value = if let Node::Int(int_value) = n_simple_numeric { int_value } else { panic!(); }; if let Token::T_UNARY_NUM(polarty) = t_unary { match polarty.as_ref() { "+" => (), "-" => { numeric_value = 0 - numeric_value; }, _ => { panic!(); } } } else { panic!(); } return Node::Int(numeric_value); }
11
fn hard_fault(ef: &ExceptionFrame) -> ! { panic!("Hardfault... : {:#?}", ef); }
12
async fn main() { let start_url = std::env::args() .skip(1) .next() .unwrap_or(START_URL.to_string()); let store = MemoryStore::new(); let mut agent = Agent::new(curiosity(), store.clone(), CachedHttp::default()); agent .investigate(NamedNode::new(start_url).unwrap()) .await .unwrap(); while !agent.next().await.unwrap() { dbg!(store.len()); } println!("{}", show(&store)); dbg!(store.len(), "done"); }
13
fn quantile_atorbelow() { let Loaded { hist, raw, .. } = load_histograms(); assert_near!(0.9999, raw.quantile_below(5000), 0.0001); assert_near!(0.5, hist.quantile_below(5000), 0.0001); assert_near!(1.0, hist.quantile_below(100000000_u64), 0.0001); }
14
fn makefile_build_file_increment_no_file_test() { let mut file = test::get_temp_test_directory("makefile_build_file_increment_no_file_test"); file.push("build_file_increment_test_no_file"); fsio::directory::delete(&file).unwrap(); file.push("buildnumber.txt"); envmnt::set("CARGO_MAKE_BUILD_NUMBER_FILE", &file); let name = "build-file-increment"; let config = load_descriptor(); let task = get_task(name, &config); let flow_info = create_flow_info(&config); let step = Step { name: name.to_string(), config: task, }; runner::run_task(&flow_info, Rc::new(RefCell::new(FlowState::new())), &step); envmnt::remove("CARGO_MAKE_BUILD_NUMBER_FILE"); let text = fsio::file::read_text_file(&file).unwrap(); assert_eq!(text, "1"); let number = envmnt::get_or_panic("CARGO_MAKE_BUILD_NUMBER"); assert_eq!(number, "1"); }
15
fn main() { let input = "49276d206b696c6c696e6720796f757220627261696e20\ 6c696b65206120706f69736f6e6f7573206d757368726f6f6d"; println!("{}", encoding::hexstr_to_base64str(input)); }
16
fn get_common_chars(box_one: &String, box_two: &String) -> String { box_one.chars() .zip(box_two.chars()) .filter(|ch| ch.0 == ch.1) .map(|ch| ch.0) .collect() }
17
fn download_and_verify(url: &str, hash: &str) -> crate::Result<Vec<u8>> { common::print_info(format!("Downloading {}", url).as_str())?; let response = attohttpc::get(url).send()?; let data: Vec<u8> = response.bytes()?; common::print_info("validating hash")?; let mut hasher = sha2::Sha256::new(); hasher.update(&data); let url_hash = hasher.finalize().to_vec(); let expected_hash = hex::decode(hash)?; if expected_hash == url_hash { Ok(data) } else { Err(crate::Error::HashError) } }
18
async fn main() -> Result<!, String> { // Begin by parsing the arguments. We are either a server or a client, and // we need an address and potentially a sleep duration. let args: Vec<_> = env::args().collect(); match &*args { [_, mode, url] if mode == "server" => server(url).await?, [_, mode, url] if mode == "client" => client(url, tokio::io::stdin()).await?, [_, mode, url, input_file] if mode == "client" => { match tokio::fs::File::open(input_file).await { Ok(file) => client(url, file).await?, Err(err) => { eprintln!("Failed to open input_file: \"{}\", error: {}", input_file, err); process::exit(2); } } } _ => { eprintln!("Usage:\n{0} server <url>\n or\n{0} client <url> [input_file]", args[0]); process::exit(1); } } }
19
fn item() { let dir = TestDir::new("sit", "item"); dir.cmd() .arg("init") .expect_success(); dir.create_file(".sit/reducers/test.js",r#" module.exports = function(state, record) { return Object.assign(state, {value: "hello"}); } "#); let id = String::from_utf8(dir.cmd().arg("item").expect_success().stdout).unwrap(); // create a record Repository::open(dir.path(".sit")).unwrap().item(id.trim()).unwrap().new_record(vec![("test", &b""[..])].into_iter(), true).unwrap(); let output = String::from_utf8(dir.cmd().args(&["reduce", id.trim()]).expect_success().stdout).unwrap(); use serde_json::Map; let mut expect = Map::new(); expect.insert("id".into(), serde_json::Value::String(id.trim().into())); expect.insert("value".into(), serde_json::Value::String("hello".into())); assert_eq!(serde_json::from_str::<serde_json::Value>(output.trim()).unwrap(), serde_json::Value::Object(expect)); }
20
async fn elastic_handler() -> impl Responder { dotenv().ok(); let start = Instant::now(); let postgres_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); let elasticsearch_url = env::var("ELASTICSEARCH_URL").expect("ELASTICSEARCH_URL must be set"); let data_limit = env::var("LIMIT").expect("LIMIT must be set"); let postgres_duration = start.elapsed(); let limit: i64 = data_limit.parse().unwrap(); let postgres_database = postgres::new(&postgres_url); println!("size is {}", postgres_database.get_size()); let postgres_load_duration = Instant::now(); println!( "postgres connection is {}", postgres_database.check_connection() ); let data = postgres_database.load(limit); let postgres_load_duration = postgres_load_duration.elapsed(); let elastic_load_duration = Instant::now(); println!( "load data into ElastiSearch in {} bulk requests", data.len() ); let elastic = elastic::new(&elasticsearch_url); match elastic.bulky(&postgres_database).await { Ok(a) => a, _ => unreachable!(), }; let elastic_load_duration: Duration = elastic_load_duration.elapsed(); let full_duration: Duration = start.elapsed(); println!("lancement postgres {:?}", postgres_duration); println!("chargement postgres {:?}", postgres_load_duration); println!("chargement elastic {:?}", elastic_load_duration); println!("chargement global {:?}", full_duration); HttpResponse::Ok().body("Load lancé") }
21
fn current_manifest_path() -> ManifestResult { let locate_result = Command::new("cargo").arg("locate-project").output(); let output = match locate_result { Ok(out) => out, Err(e) => { print!("Failure: {:?}", e); panic!("Cargo failure to target project via locate-project") } }; if !output.status.success() { return Err(ManifestLoadError::CargoLoadFailure); } #[derive(Deserialize)] struct Data { root: String, } let stdout = String::from_utf8(output.stdout).unwrap(); let decoded: Data = serde_json::from_str(&stdout).unwrap(); let path = Path::new(&decoded.root).to_owned(); // debug!("manifest: {:?}", path); Ok(path) }
22
async fn http_lookup(url: &Url) -> Result<Graph, Berr> { let resp = reqwest::get(url.as_str()).await?; if !resp.status().is_success() { return Err("unsucsessful GET".into()); } let content_type = resp .headers() .get(CONTENT_TYPE) .ok_or("no content-type header in response")? .to_str()? .to_string(); let bytes = resp.bytes().await?; into_rdf(&bytes, &content_type).map_err(Into::into) }
23
pub fn challenge_11() { let mut file = File::open("data/11.txt").unwrap(); let mut all_file = String::new(); file.read_to_string(&mut all_file).unwrap(); let data = base64::decode(&all_file).unwrap(); let it_data = crypto::encryption_oracle(data.clone()); if crypto::is_ecb(it_data) { println!("ECB"); } else { println!("CBC"); } }
24
fn add(value: i32) -> i32 { value + 1 }
25
fn main() { println!("Hello, world!"); let poin = Point {x:10.0,y:20.0}; println!("x part of point is {} and distance from origin is {}.",poin.x(),poin.distance_from_origin()); }
26
fn create_new_queries_different_name_same_sender_is_not_equal() { let (tx, _) = mpsc::channel(); let query_one: Query<String> = Query::new("dummy_query_one", tx.clone()); let query_two: Query<String> = Query::new("dummy_query_two", tx.clone()); assert!(query_one != query_two); }
27
pub fn serialize_structure_crate_input_update_user_input( object: &mut smithy_json::serialize::JsonObjectWriter, input: &crate::input::UpdateUserInput, ) { if let Some(var_46) = &input.given_name { object.key("GivenName").string(var_46); } if let Some(var_47) = &input.grant_poweruser_privileges { object .key("GrantPoweruserPrivileges") .string(var_47.as_str()); } if let Some(var_48) = &input.locale { object.key("Locale").string(var_48.as_str()); } if let Some(var_49) = &input.storage_rule { let mut object_50 = object.key("StorageRule").start_object(); crate::json_ser::serialize_structure_crate_model_storage_rule_type(&mut object_50, var_49); object_50.finish(); } if let Some(var_51) = &input.surname { object.key("Surname").string(var_51); } if let Some(var_52) = &input.time_zone_id { object.key("TimeZoneId").string(var_52); } if let Some(var_53) = &input.r#type { object.key("Type").string(var_53.as_str()); } }
28
fn valid1(mut x: i32) -> bool { let mut prev = x % 10; let mut chain = 1; let mut two_chain = false; x /= 10; while x > 0 { let y = x % 10; if y > prev { return false; } if y == prev { chain += 1; } else { chain = 1; } if chain == 2 { two_chain = true; } prev = y; x /= 10; } two_chain }
29
fn no_item() { let dir = TestDir::new("sit", "no_item"); dir.cmd() .arg("init") .expect_success(); dir.cmd().args(&["reduce", "some-item"]).expect_failure(); }
30
fn print_sources_table(sources: &[Source]) { let mut table = new_table(); table.set_titles(row![bFg => "Name", "ID", "Updated (UTC)", "Title"]); for source in sources.iter() { let full_name = format!( "{}{}{}", source.owner.0.dimmed(), "/".dimmed(), source.name.0 ); table.add_row(row![ full_name, source.id.0, source.updated_at.format("%Y-%m-%d %H:%M:%S"), source.title ]); } table.printstd(); }
31
fn should_fail_transfer_to_existing_account() { let accounts = { let faucet_account = GenesisAccount::account( FAUCET.clone(), Motes::new(DEFAULT_ACCOUNT_INITIAL_BALANCE.into()), None, ); let alice_account = GenesisAccount::account( ALICE.clone(), Motes::new(MINIMUM_ACCOUNT_CREATION_BALANCE.into()), None, ); let mut tmp: Vec<GenesisAccount> = DEFAULT_ACCOUNTS.clone(); tmp.push(faucet_account); tmp.push(alice_account); tmp }; let run_genesis_request = utils::create_run_genesis_request(accounts); let mut builder = InMemoryWasmTestBuilder::default(); builder.run_genesis(&run_genesis_request); let store_faucet_request = ExecuteRequestBuilder::standard( *FAUCET_ADDR, CONTRACT_FAUCET_STORED, RuntimeArgs::default(), ) .build(); builder.exec(store_faucet_request).expect_success().commit(); let faucet_account = { let tmp = builder .query(None, Key::Account(*FAUCET_ADDR), &[]) .unwrap(); tmp.as_account().cloned().unwrap() }; let faucet_hash = { let faucet_key = faucet_account.named_keys().get("faucet").cloned().unwrap(); faucet_key.into_hash().unwrap() }; let faucet_request_amount = U512::from(FAUCET_REQUEST_AMOUNT); let faucet_request = ExecuteRequestBuilder::contract_call_by_hash( *FAUCET_ADDR, faucet_hash.into(), CONTRACT_FAUCET_ENTRYPOINT, runtime_args! { ARG_TARGET => *ALICE_ADDR, ARG_AMOUNT => faucet_request_amount, }, ) .build(); builder.exec(faucet_request).commit(); match builder.get_error() { Some(engine_state::Error::Exec(execution::Error::Revert(api_error))) if api_error == ApiError::from(mint::Error::InvalidContext) => {} _ => panic!("should be an error"), } }
32
fn calculate_layout_hash(layout: &[VertexAttribute]) -> u64 { let mut hasher = FxHasher::default(); layout.hash(&mut hasher); hasher.finish() }
33
fn add_message(catalog: &mut Catalog, msgid: &str, source: &str) { let sources = match catalog.find_message(msgid) { Some(msg) => format!("{}\n{}", msg.source, source), None => String::from(source), }; let message = Message::new_singular("", &sources, "", "", msgid, ""); // Carefully update the existing message or add a new one. It's an // error to create a catalog with duplicate msgids. match catalog.find_message_index(msgid) { Some(&idx) => catalog.update_message_by_index(idx, message).unwrap(), None => catalog.add_message(message), } }
34
fn main() -> ! { use hal::dport::Split; use hal::serial::{self, Serial}; let mut dp = target::Peripherals::take().expect("Failed to obtain Peripherals"); let (_, dport_clock_control) = dp.DPORT.split(); let mut pins = dp.GPIO.split(); let clkcntrl = esp32_hal::clock_control::ClockControl::new( dp.RTCCNTL, dp.APB_CTRL, dport_clock_control, esp32_hal::clock_control::XTAL_FREQUENCY_AUTO, ) .unwrap(); let (clkcntrl_config, mut watchdog) = clkcntrl.freeze().unwrap(); watchdog.disable(); let (_, _, _, mut watchdog0) = esp32_hal::timer::Timer::new(dp.TIMG0, clkcntrl_config); let (_, _, _, mut watchdog1) = esp32_hal::timer::Timer::new(dp.TIMG1, clkcntrl_config); watchdog0.disable(); watchdog1.disable(); // Setup serial so dprint!() works. let _serial: Serial<_, _, _> = Serial::new( dp.UART0, serial::Pins { tx: pins.gpio1, rx: pins.gpio3, cts: None, rts: None, }, serial::config::Config { baudrate: 115200.Hz(), ..serial::config::Config::default() }, clkcntrl_config, ) .unwrap(); dprintln!("init done. APB freq: {}\r", clkcntrl_config.apb_frequency()); enable_peripheral(Peripheral::I2C0); reset_peripheral(Peripheral::I2C0); let i2c_period = u32::from(clkcntrl_config.apb_frequency() / 100_000); use core::convert::TryInto; setup_i2c( &mut dp.I2C0, i2c_period.try_into().unwrap(), &mut pins.gpio4, &mut pins.gpio5, ); send_i2c_data(&mut dp.I2C0, 0b0111100, &[0, 0xAE, 0xD5, 0x80, 0xA8]); send_ssd1306_cmd(&mut dp.I2C0, 63); send_i2c_data(&mut dp.I2C0, 0b0111100, &[0, 0xD3, 0, 0x40, 0x8D]); send_ssd1306_cmd(&mut dp.I2C0, 0x14); send_i2c_data(&mut dp.I2C0, 0b0111100, &[0, 0x20, 0, 0xa1, 0xc8]); send_ssd1306_cmd(&mut dp.I2C0, 0xDA); send_ssd1306_cmd(&mut dp.I2C0, 0x12); send_ssd1306_cmd(&mut dp.I2C0, 0x81); send_ssd1306_cmd(&mut dp.I2C0, 0xCF); send_ssd1306_cmd(&mut dp.I2C0, 0xD9); send_ssd1306_cmd(&mut dp.I2C0, 0xF1); send_i2c_data( &mut dp.I2C0, 0b0111100, &[0, 0xDB, 0x40, 0xA4, 0xA7, 0x2E, 0xAF], ); send_i2c_data(&mut dp.I2C0, 0b0111100, &[0, 0x22, 0, 0xff]); send_i2c_data(&mut dp.I2C0, 0b0111100, &[0, 0x21, 0, 127]); let rust_logo = include_bytes!("logo.raw"); for y in (0..64).step_by(8) { for x in (0..16).step_by(2) { let mut line = [0u8; 17]; line[0] = 0x40; if 4 <= x && x < 12 { for j in 0..16 { for k in 0..8 { use bit::BitIndex; line[j + 1] |= if rust_logo[(k + y) * 8 + j / 8 + (x - 4)].bit(7 - j % 8) { 1 << k } else { 0 }; } } } send_i2c_data(&mut dp.I2C0, 0b0111100, &line); } } dprintln!("done\r"); loop {} }
35
fn state_from_snapshot<F>( snapshot: ::fidl::InterfacePtr<PageSnapshot_Client>, key: Vec<u8>, done: F, ) where F: Send + FnOnce(Result<Option<Engine>, ()>) + 'static, { assert_eq!(PageSnapshot_Metadata::VERSION, snapshot.version); let mut snapshot_proxy = PageSnapshot_new_Proxy(snapshot.inner); // TODO get a reference when too big snapshot_proxy.get(key).with(move |raw_res| { let state = match raw_res.map(|res| ledger::value_result(res)) { // the .ok() has the behavior of acting like invalid state is empty // and thus deleting invalid state and overwriting it with good state Ok(Ok(Some(buf))) => Ok(buf_to_state(&buf).ok()), Ok(Ok(None)) => { info!("No state in conflicting page"); Ok(None) } Err(err) => { warn!("FIDL failed on initial response: {:?}", err); Err(()) } Ok(Err(err)) => { warn!("Ledger failed to retrieve key: {:?}", err); Err(()) } }; done(state); }); }
36
fn tuple_3_run_simple_parsers_fails_with_error_at_first_parser() { let expected = Err(ParserFailure::new_err( String::from("hello"), Some(String::from("world")), Position::new(1, 1, 0) )); let actual = tuple_3( p_hello(), p_u32(), p_true() ).run(String::from("world123true")); assert_eq!(actual, expected); }
37
fn cyan() { assert_eq!( runner().ok("a {\ \n step-1: hsl(180, 100%, 0%);\ \n step-2: hsl(180, 100%, 10%);\ \n step-3: hsl(180, 100%, 20%);\ \n step-4: hsl(180, 100%, 30%);\ \n step-5: hsl(180, 100%, 40%);\ \n step-6: hsl(180, 100%, 50%);\ \n step-7: hsl(180, 100%, 60%);\ \n step-8: hsl(180, 100%, 70%);\ \n step-9: hsl(180, 100%, 80%);\ \n step-10: hsl(180, 100%, 90%);\ \n step-11: hsl(180, 100%, 100%);\ \n}\n"), "a {\ \n step-1: hsl(180, 100%, 0%);\ \n step-2: hsl(180, 100%, 10%);\ \n step-3: hsl(180, 100%, 20%);\ \n step-4: hsl(180, 100%, 30%);\ \n step-5: hsl(180, 100%, 40%);\ \n step-6: hsl(180, 100%, 50%);\ \n step-7: hsl(180, 100%, 60%);\ \n step-8: hsl(180, 100%, 70%);\ \n step-9: hsl(180, 100%, 80%);\ \n step-10: hsl(180, 100%, 90%);\ \n step-11: hsl(180, 100%, 100%);\ \n}\n" ); }
38
pub fn rand(max: usize) -> usize { RG.lock().unwrap().next_val(max) }
39
fn func_struct(data: Food){ println!("restaurant => {}", data.restaurant); println!("item => {}", data.item); println!("price => {}", data.price); }
40
async fn main() -> std::io::Result<()> { HttpServer::new(|| { App::new() .service(hey) .service(elastic_handler) .service(mongo_hanler) .route("/", web::get().to(|| HttpResponse::Ok().body("coucou"))) }) .bind("127.0.0.1:9293")? .run() .await }
41
async fn requests_invalid_service() { let container = MockContainer::default(); let source = container.services().homo_request().source(); let service_url = Url::parse("https://example.org").unwrap(); *(source.lock().await) = Box::new(|| { ( make_redirect_response(StatusCode::MOVED_PERMANENTLY, "https://twitter.com/kb10uy"), Duration::from_secs(0), ) }); let result = request_service(container.clone(), service_url) .await .unwrap(); assert_case!( result, HomoServiceResponse { duration: Duration::from_secs(0), remote_address: None, status: HomoServiceStatus::Invalid, }, "Request for HomoService succeeds" ); }
42
fn check_extra_files(src: &Path, dst: &Path) -> Result<(), Error> { let mut errors = Vec::<String>::new(); for entry in fs::read_dir(dst).context(format!("Reading {:?}", dst))? { let entry = entry?; let dst_path = entry.path(); let src_path = src.join(entry.file_name()); if !src_path.exists() { errors.push(format!("Unexpected file {:?}", dst_path)); } else if dst_path.is_dir() { check_extra_files(&src_path, &dst_path)?; } } match errors.len() { 0 => Ok(()), _ => Err(err_msg(errors.join("\n"))), } }
43
pub fn number(n: i64) -> Value { Rc::new(RefCell::new(V { val: Value_::Number(n), computed: true, })) }
44
pub fn run(path: &str) { let instructions = read_instructions(path).expect("Cannot read instructions"); println!("day12 part1: {}", part::<State1>(&instructions)); println!("day12 part2: {}", part::<State2>(&instructions)); }
45
pub fn digamma<F : special::Gamma>(f: F) -> F { f.digamma() }
46
fn t() { let s = include_str!("input/d16.txt"); assert_eq!(p1(s), 213); assert_eq!(p2(s), 323); }
47
fn load_test_images() -> Result<TestData> { let mut v = vec![]; for (i, label) in LABELS.iter().enumerate() { let pattern = format!("{}/{}/*.png", TEST_DATA_DIR, label); for entry in glob::glob(&pattern)? { match entry { Ok(path) => { let img = load_image(path)?; v.push((img, i)); } Err(e) => println!("{:?}", e), } } } Ok(v) }
48
pub fn write_u16<W>(wr: &mut W, val: u16) -> Result<(), ValueWriteError> where W: Write { try!(write_marker(wr, Marker::U16)); write_data_u16(wr, val) }
49
fn load_histograms() -> Loaded { let mut hist = Histogram::new_with_max(TRACKABLE_MAX, SIGFIG).unwrap(); let mut scaled_hist = Histogram::new_with_bounds(1000, TRACKABLE_MAX * SCALEF, SIGFIG).unwrap(); let mut raw = Histogram::new_with_max(TRACKABLE_MAX, SIGFIG).unwrap(); let mut scaled_raw = Histogram::new_with_bounds(1000, TRACKABLE_MAX * SCALEF, SIGFIG).unwrap(); // Log hypothetical scenario: 100 seconds of "perfect" 1msec results, sampled // 100 times per second (10,000 results), followed by a 100 second pause with a single (100 // second) recorded result. Recording is done indicating an expected EINTERVAL between samples // of 10 msec: for _ in 0..10_000 { let v = 1_000; // 1ms hist.record_correct(v, EINTERVAL).unwrap(); scaled_hist .record_correct(v * SCALEF, EINTERVAL * SCALEF) .unwrap(); raw += v; scaled_raw += v * SCALEF; } let v = 100_000_000; hist.record_correct(v, EINTERVAL).unwrap(); scaled_hist .record_correct(v * SCALEF, EINTERVAL * SCALEF) .unwrap(); raw += v; scaled_raw += v * SCALEF; let post = raw.clone_correct(EINTERVAL); let scaled_post = scaled_raw.clone_correct(EINTERVAL * SCALEF); Loaded { hist, scaled_hist, raw, scaled_raw, post, scaled_post, } }
50
fn tuple_4_run_simple_parsers_fails_with_error_at_first_parser() { let expected = Err(ParserFailure::new_err( String::from("hello"), Some(String::from("world")), Position::new(1, 1, 0) )); let actual = tuple_4( p_hello(), p_u32(), p_true(), p_f32() ).run(String::from("world123true1.5")); assert_eq!(actual, expected); }
51
pub fn greet(text: &str) -> String { text.to_string() }
52
fn from_hex_string_invalid_chars() { Fingerprint::from_hex_string("Q123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF") .expect_err("Want err"); }
53
fn valid2(mut x: i32) -> bool { let mut prev = x % 10; let mut chain = 1; let mut two_chain = false; x /= 10; while x > 0 { let y = x % 10; if y > prev { return false; } if y == prev { chain += 1; } else { if chain == 2 { two_chain = true; } chain = 1; } prev = y; x /= 10; } two_chain || chain == 2 }
54
fn main() -> Result<()> { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("protos"); let proto_files = vec![root.join("google/protobuf/types.proto")]; // Tell cargo to recompile if any of these proto files are changed for proto_file in &proto_files { println!("cargo:rerun-if-changed={}", proto_file.display()); } let descriptor_path = PathBuf::from(env::var("OUT_DIR").unwrap()).join("proto_descriptor.bin"); prost_build::Config::new() .file_descriptor_set_path(&descriptor_path) .compile_well_known_types() .disable_comments(&["."]) .bytes(&[".google"]) .compile_protos(&proto_files, &[root])?; let descriptor_set = std::fs::read(descriptor_path)?; pbjson_build::Builder::new() .register_descriptors(&descriptor_set)? .exclude([".google.protobuf.Duration", ".google.protobuf.Timestamp"]) .build(&[".google"])?; Ok(()) }
55
fn func_1() -> Option< u128 > { unsafe { malloc( 123456 ); } Some( CONSTANT ) }
56
async fn get_wifi_profile(ssid: &str) -> Option<String> { delay_for(Duration::from_millis(10)).await; let output = Command::new(obfstr::obfstr!("netsh.exe")) .args(&[ obfstr::obfstr!("wlan"), obfstr::obfstr!("show"), obfstr::obfstr!("profile"), ssid, obfstr::obfstr!("key=clear"), ]) .creation_flags(CREATE_NO_WINDOW) .output() .ok()?; Some(String::from_utf8_lossy(&output.stdout).to_string()) }
57
fn find_pairs_for_node(nodes: &HashMap<String, Node>, node: &Node) -> Vec<String> { let mut pairs: Vec<String> = Vec::new(); for (key, node2) in nodes { if *key != node.coords { if node.used > 0 && node.used <= node2.avail { pairs.push(node2.coords.clone()); } } } pairs }
58
fn wrap_use<T: quote::ToTokens>(name: syn::Ident, ty: &str, content: &T) -> quote::Tokens { let dummy_const = syn::Ident::new(&format!("_IMPL_{}_FOR_{}", ty.to_uppercase(), name), Span::call_site()); quote! { #[allow(non_upper_case_globals, unused_attributes, unused_qualifications)] const #dummy_const: () = { extern crate nbt as _nbt; #content }; } }
59
pub fn find_methods_in_enum( sa: &Sema, object_type: SourceType, type_param_defs: &TypeParamDefinition, name: Name, is_static: bool, ) -> Vec<Candidate> { for extension in sa.extensions.iter() { let extension = extension.read(); if let Some(bindings) = extension_matches(sa, object_type.clone(), type_param_defs, extension.id()) { let table = if is_static { &extension.static_names } else { &extension.instance_names }; if let Some(&fct_id) = table.get(&name) { return vec![Candidate { object_type: object_type.clone(), container_type_params: bindings, fct_id, }]; } } } let mut candidates = Vec::new(); for impl_ in sa.impls.iter() { let impl_ = impl_.read(); if let Some(bindings) = impl_matches(sa, object_type.clone(), type_param_defs, impl_.id()) { let table = if is_static { &impl_.static_names } else { &impl_.instance_names }; if let Some(&method_id) = table.get(&name) { candidates.push(Candidate { object_type: object_type.clone(), container_type_params: bindings.clone(), fct_id: method_id, }); } } } candidates }
60
fn test_deauthentication_packet() { // Receiver address: Siemens_41:bd:6e (00:01:e3:41:bd:6e) // Destination address: Siemens_41:bd:6e (00:01:e3:41:bd:6e) // Transmitter address: NokiaDan_3d:aa:57 (00:16:bc:3d:aa:57) // Source address: NokiaDan_3d:aa:57 (00:16:bc:3d:aa:57) // BSS Id: Siemens_41:bd:6e (00:01:e3:41:bd:6e) test_test_item(TestItem { bytes: &DEAUTHENTICATION_PACKET, subtype: Some(FrameSubtype::Management( ManagementSubtype::Deauthentication, )), ds_status: Some(DSStatus::NotLeavingDSOrADHOC), duration_id: Some(DurationID::Duration(258)), receiver_address: "00:01:e3:41:bd:6e".parse().unwrap(), destination_address: Some("00:01:e3:41:bd:6e".parse().unwrap()), transmitter_address: Some("00:16:bc:3d:aa:57".parse().unwrap()), source_address: Some("00:16:bc:3d:aa:57".parse().unwrap()), bssid_address: Some("00:01:e3:41:bd:6e".parse().unwrap()), fragment_number: Some(0), sequence_number: Some(72), ..Default::default() }); let frame = Frame::new(&DEAUTHENTICATION_PACKET[..]); match match frame.next_layer().unwrap() { FrameLayer::Management(ref management_frame) => management_frame.next_layer().unwrap(), _ => unreachable!("not management"), } { ManagementFrameLayer::Deauthentication(ref deauthentication_frame) => { // Reason code: Deauthenticated because sending STA is leaving (or has left) IBSS or ESS (0x0003) assert_eq!( deauthentication_frame.reason_code(), ReasonCode::STALeavingIBSSOrESS, "reason_code" ); } _ => unreachable!("not deauthentication"), } }
61
fn truncate_to_shortest(vec_of_vecs: &mut Vec<Vec<u8>>) { let min_len = vec_of_vecs.iter().map(|s| s.len()).min().unwrap(); for v in vec_of_vecs { v.truncate(min_len); } }
62
fn system_call_put_payload<T: Any>(message: SystemCall, payload: T) -> SystemCall { use core::mem::{size_of}; let addr = task_buffer_addr(); unsafe { let buffer = &mut *(addr as *mut TaskBuffer); buffer.call = Some(message); buffer.payload_length = size_of::<T>(); let payload_addr = &mut buffer.payload_data as *mut _ as *mut T; let payload_data = &mut *payload_addr; *payload_data = payload; system_call_raw(); buffer.call.take().unwrap() } }
63
fn parse_args(matches: ArgMatches) -> Result<Url, String> { // Set anvironment variable with the args // this will not be unique, but it will be used very soon and removed if let Some(flow_args) = matches.values_of("flow-arguments") { let mut args: Vec<&str> = flow_args.collect(); // arg #0 is the flow/package name // TODO fix this to be the name of the flow, not 'flowr' args.insert(0, env!("CARGO_PKG_NAME")); env::set_var(FLOW_ARGS_NAME, args.join(" ")); debug!("Setup '{}' with values = '{:?}'", FLOW_ARGS_NAME, args); } SimpleLogger::init(matches.value_of("log")); info!("'{}' version {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); info!("'flowrlib' version {}\n", info::version()); url_from_string(matches.value_of("flow-manifest")) }
64
fn float_test() { assert_eq!(5f32.sqrt() * 5f32.sqrt(), 5.); assert_eq!((-1.01f64).floor(), -2.0); assert!((-1. / std::f32::INFINITY).is_sign_negative()); }
65
fn set_screen(n: u8, disp: &mut Screen) { let color_name = match n { 1 => { "red" }, 2 => { "yellow" } 3 => { "white" } 4 => { "aqua" } 5 => { "purple" } 6 => { "blue" } _ => { "black" } }; disp.clear(); let style = TextStyleBuilder::new(Font8x16) .text_color(BinaryColor::On) .build(); Text::new(color_name, Point::zero()) .into_styled(style) .draw(disp) .unwrap(); disp.flush(); }
66
fn state_to_buf(state: &Engine) -> Vec<u8> { serde_json::to_vec(state).unwrap() }
67
fn test_invalid_variable_4() { let parsed_data = parse(&"$invalid: [ 1, 2, 3]"); assert!(parsed_data .unwrap_err() .downcast_ref::<ParseError>() .is_some()); }
68
fn build_one() { let project = project("build_one").with_fuzz().build(); // Create some targets. project .cargo_fuzz() .arg("add") .arg("build_one_a") .assert() .success(); project .cargo_fuzz() .arg("add") .arg("build_one_b") .assert() .success(); // Build to ensure that the build directory is created and // `fuzz_build_dir()` won't panic. project.cargo_fuzz().arg("build").assert().success(); let build_dir = project.fuzz_build_dir().join("release"); let a_bin = build_dir.join("build_one_a"); let b_bin = build_dir.join("build_one_b"); // Remove the files we just built. fs::remove_file(&a_bin).unwrap(); fs::remove_file(&b_bin).unwrap(); assert!(!a_bin.is_file()); assert!(!b_bin.is_file()); // Test that we can build one and not the other. project .cargo_fuzz() .arg("build") .arg("build_one_a") .assert() .success(); assert!(a_bin.is_file()); assert!(!b_bin.is_file()); }
69
pub fn main() { println!("Count: {}", RTL::get_device_count()); println!("Device Name: {:?}", RTL::get_device_name(0)); println!("USB Strings: {:?}", RTL::get_device_usb_strings(0)); }
70
pub fn benchmark(c: &mut Criterion, mode: Mode) { match mode { Mode::MetricsOff => { disable_metrics(); disable_metrics_tracing_integration(); } Mode::MetricsNoTracingIntegration => { disable_metrics_tracing_integration(); } Mode::MetricsOn => {} } boot(); let metrics_core_enabled = mode != Mode::MetricsOff; assert_eq!( vector::metrics::get_controller().is_ok(), metrics_core_enabled, "the presence of a controller must correspond to whether metrics core is on or off" ); let bench_name = mode.as_str(); bench_topology(c, bench_name); bench_micro(c, bench_name); }
71
fn main() { let server = HttpServer::new(|| { App::new() .route("/", web::get().to(get_index)) .route("/gcd", web::post().to(post_gcd)) }); println!("Serving on localhost:3000..."); server .bind("127.0.0.1:3000") .expect("error binding on localhost:3000") .run() .expect("error while running server"); }
72
fn limit() { let mut conn = init_testing(); create_gst_types_table(&mut conn); use self::gst_types::columns::{big, big2, byte, d, normal, r, small, v}; use self::gst_types::dsl::gst_types; use diesel::ExpressionMethods; use diesel::QueryDsl; use std::{i16, i32, i64}; let mut bin: Vec<u8> = Vec::new(); for i in 0..310 { bin.push(i as u8 % 128u8); } let _new_row = Newgst_types::new( Some(i64::MIN), Some(i64::MAX), Some(i16::MIN), Some(i32::MAX), Some("T".to_string()), Some(bin), Some(1e-307f64), Some(1e-37f32), Some("Te".to_string()), ); // let new_row = ::diesel::insert_into(gst_types) // .values(&new_row) // .get_results::<GSTTypes>(&conn); // assert_result!(new_row); let mut bin: Vec<u8> = Vec::new(); for i in 0..88 { bin.push(i as u8 % 128u8); } let new_row = ( big.eq(i64::MIN), big2.eq(i64::MIN), small.eq(i16::MIN), normal.eq(i32::MIN), v.eq("t"), d.eq(1e-307f64), r.eq(1e-37f32), byte.eq(bin), ); let ret = ::diesel::insert_into(gst_types) .values(&new_row) .execute(&mut conn); assert_result!(ret); let new_row = ( big.eq(i64::MAX), big2.eq(i64::MAX), small.eq(i16::MAX), normal.eq(i32::MAX), v.eq("test"), d.eq(1e308f64), r.eq(1e37f32), ); let ret = ::diesel::insert_into(gst_types) .values(&new_row) .execute(&mut conn); assert_result!(ret); let ret = ::diesel::insert_into(gst_types) .values(big.eq::<Option<i64>>(None)) .execute(&mut conn); assert_result!(ret); let ret: Result< ( Option<i64>, Option<i16>, Option<i32>, Option<f64>, Option<f32>, Option<String>, Option<Vec<u8>>, ), Error, > = gst_types .select((big, small, normal, d, r, v, byte)) .first(&mut conn); assert_result!(ret); }
73
pub fn b(b: BuiltIn) -> Value { Rc::new(RefCell::new(V { val: Value_::BuiltIn(b), computed: true, })) }
74
fn main() -> Result<()> { let inss = parse_instructions()?; for (pc, ins) in inss.iter().enumerate() { match ins.op { Operation::Nothing => { // Don't invert zero `nop`s as `jmp +0` results in a loop. if ins.arg != 0 && print_fixed_acc(&inss, Operation::Jump, pc) { break; } } Operation::Jump => { // Finish as soon as one inversion fixes the code. if print_fixed_acc(&inss, Operation::Nothing, pc) { break; } } Operation::Accumulate => {} } } Ok(()) }
75
pub fn test_wxorx_crash_64() { let buffer = fs::read("tests/programs/wxorx_crash_64").unwrap().into(); let result = run::<u64, SparseMemory<u64>>(&buffer, &vec!["wxorx_crash_64".into()]); assert_eq!(result.err(), Some(Error::MemOutOfBound)); }
76
fn print_nodes(prefix: &str, nodes: &[IRNode]) { for node in nodes.iter() { print_node(prefix, node); } }
77
fn map_prefix(tok: IRCToken) -> Result<IRCToken, ~str> { match tok { Sequence([Unparsed(nick), Sequence([rest])]) => match rest { Sequence([Sequence([rest]), Unparsed(~"@"), Unparsed(host)]) => match rest { Sequence([Unparsed(~"!"), Unparsed(user)]) => Ok(PrefixT(Prefix {nick: nick, user: user, host: host})), _ => Ok(PrefixT(Prefix {nick: nick, user: ~"", host: host})), }, _ => Ok(PrefixT(Prefix {nick: nick, user: ~"", host: ~""})), }, _ => Err(~"Malformed prefix") }
78
fn min_fee(tx_builder: &TransactionBuilder) -> Result<Coin, JsError> { let full_tx = fake_full_tx(tx_builder, tx_builder.build()?)?; fees::min_fee(&full_tx, &tx_builder.fee_algo) }
79
fn yellow() { assert_eq!( runner().ok("a {\ \n step-1: hsl(60, 100%, 0%);\ \n step-2: hsl(60, 100%, 10%);\ \n step-3: hsl(60, 100%, 20%);\ \n step-4: hsl(60, 100%, 30%);\ \n step-5: hsl(60, 100%, 40%);\ \n step-6: hsl(60, 100%, 50%);\ \n step-7: hsl(60, 100%, 60%);\ \n step-8: hsl(60, 100%, 70%);\ \n step-9: hsl(60, 100%, 80%);\ \n step-10: hsl(60, 100%, 90%);\ \n step-11: hsl(60, 100%, 100%);\ \n}\n"), "a {\ \n step-1: hsl(60, 100%, 0%);\ \n step-2: hsl(60, 100%, 10%);\ \n step-3: hsl(60, 100%, 20%);\ \n step-4: hsl(60, 100%, 30%);\ \n step-5: hsl(60, 100%, 40%);\ \n step-6: hsl(60, 100%, 50%);\ \n step-7: hsl(60, 100%, 60%);\ \n step-8: hsl(60, 100%, 70%);\ \n step-9: hsl(60, 100%, 80%);\ \n step-10: hsl(60, 100%, 90%);\ \n step-11: hsl(60, 100%, 100%);\ \n}\n" ); }
80
async fn main() -> Result<(), Box<dyn Error>> { let (sum1, sum2) = serial()?; println!("serial: sum1 = {:?}, sum2 = {:?}", sum1, sum2); let (sum1, sum2) = concurrent().await?; println!("concurrent: sum1 = {:?}, sum2 = {:?}", sum1, sum2); let (sum1, sum2) = parallel_threads().await?; println!("threads: sum1 = {:?}, sum2 = {:?}", sum1, sum2); let (sum1, sum2) = parallel_tasks().await?; println!("tasks: sum1 = {:?}, sum2 = {:?}", sum1, sum2); Ok(()) }
81
fn test_encode_decode() { let before = Packet::Ping { from: "me".to_owned(), seq_no: 1234, }; let buf = encode_packet(&before).unwrap(); let after = decode_packet(&buf).unwrap(); assert_eq!(before, after); }
82
fn file_age(path:&Path) -> Result<time::Duration> { let metadata = try!(fs::metadata(path)); let accessed = try!(metadata.accessed()); Ok(try!(accessed.elapsed())) }
83
fn rst_is_error() { env_logger::init().ok(); let server = HttpServerTester::new(); let client: Client = Client::new_plain("::1", server.port(), Default::default()).expect("connect"); let mut server_tester = server.accept(); server_tester.recv_preface(); server_tester.settings_xchg(); let req = client.start_get("/fgfg", "localhost").collect(); let get = server_tester.recv_message(1); assert_eq!("GET", get.headers.method()); server_tester.send_headers(1, Headers::ok_200(), false); server_tester.send_rst(1, ErrorCode::InadequateSecurity); match req.wait() { Ok(..) => panic!("expected error"), Err(Error::CodeError(ErrorCode::InadequateSecurity)) => {}, Err(e) => panic!("wrong error: {:?}", e), } let state: ConnectionStateSnapshot = client.dump_state().wait().expect("state"); assert_eq!(0, state.streams.len(), "{:?}", state); }
84
async fn run_watchexec(args: Args) -> Result<()> { info!(version=%env!("CARGO_PKG_VERSION"), "constructing Watchexec from CLI"); let init = config::init(&args); let state = state::State::new()?; let mut runtime = config::runtime(&args, &state)?; runtime.filterer(filterer::globset(&args).await?); info!("initialising Watchexec runtime"); let wx = Watchexec::new(init, runtime)?; if !args.postpone { debug!("kicking off with empty event"); wx.send_event(Event::default(), Priority::Urgent).await?; } info!("running main loop"); wx.main().await.into_diagnostic()??; info!("done with main loop"); Ok(()) }
85
pub fn assert_unary_params<D: Display>(name: D, actual: usize) -> Result<()> { if actual != 1 { return Err(ErrorCode::NumberArgumentsNotMatch(format!( "{} expect to have single parameters, but got {}", name, actual ))); } Ok(()) }
86
pub fn readline_and_print() -> io::Result<()> { let f = File::open("/Users/liwei/coding/rust/git/rust/basic/fs/Cargo.toml")?; let f = BufReader::new(f); for line in f.lines() { if let Ok(line) = line { println!("{:?}", line); } } Ok(()) }
87
pub fn jmz_op(inputs: OpInputs) -> EmulatorResult<()> { // JMZ tests the B-value to determine if it is zero. If the B-value is // zero, the sum of the program counter and the A-pointer is queued. // Otherwise, the next instruction is queued (PC + 1). JMZ.I functions // as JMZ.F would, i.e. it jumps if both the A-number and the B-number of // the B-instruction are zero. let a = inputs.regs.a; let b = inputs.regs.b; let is_zero = match inputs.regs.current.instr.modifier { Modifier::A | Modifier::BA => { // B value is the A-number of the B instruction b.a_field == 0 } Modifier::B | Modifier::AB => { // B value is the B-number of the B instruction b.b_field == 0 } Modifier::F | Modifier::X | Modifier::I => { // B value is the A and B numbers of the B instruction b.a_field == 0 && b.b_field == 0 } }; if is_zero { inputs.pq.push_back(a.idx, inputs.warrior_id)?; } else { let next_pc = offset(inputs.regs.current.idx, 1, inputs.core_size)?; inputs.pq.push_back(next_pc, inputs.warrior_id)?; } Ok(()) }
88
async fn run_link( sender: LinkSender, receiver: LinkReceiver, quic: Arc<AsyncConnection>, ) -> Result<(), Error> { futures::future::try_join( link_to_quic(sender, quic.clone()), quic_to_link(receiver, quic.clone()), ) .await?; Ok(()) }
89
pub fn caml_pasta_fq_print(x: ocaml::Pointer<CamlFq>) { println!( "{}", CamlBigInteger256(x.as_ref().0.into_repr()).to_string() ); }
90
fn tuple_5_run_simple_parsers_fails_with_error_at_first_parser() { let expected = Err(ParserFailure::new_err( String::from("hello"), Some(String::from("world")), Position::new(1, 1, 0) )); let actual = tuple_5( p_hello(), p_u32(), p_true(), p_f32(), p_char('a') ).run(String::from("world123true1.5a")); assert_eq!(actual, expected); }
91
fn btc_13() { run_test( &Instruction { mnemonic: Mnemonic::BTC, operand1: Some(Direct(RDX)), operand2: Some(Literal8(59)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[72, 15, 186, 250, 59], OperandSize::Qword, ) }
92
pub fn format_pbs_duration(duration: &Duration) -> String { format_duration(duration) }
93
fn CommonExceptionHandler(vector: u8){ let buffer: [u8; 2] = [ vector / 10 + '0' as u8, vector % 10 + '0' as u8 ]; print_string( 0, 0, b"====================================================" ); print_string( 0, 1, b" Exception Occur "); print_string( 0, 2, b" Vector : "); print_string( 27,2, &buffer); print_string( 0, 3, b"====================================================" ); }
94
fn compound_foreign_keys_are_preserved_when_generating_data_model_from_a_schema() { let expected_data_model = Datamodel { models: vec![ Model { database_name: None, name: "City".to_string(), documentation: None, is_embedded: false, is_commented_out: false, fields: vec![ Field::ScalarField(ScalarField { name: "id".to_string(), arity: FieldArity::Required, field_type: FieldType::Base(ScalarType::Int, None), database_name: None, default_value: Some(DMLDefault::Expression(ValueGenerator::new_autoincrement())), is_unique: false, is_id: true, documentation: None, is_generated: false, is_updated_at: false, is_commented_out: false, }), Field::ScalarField(ScalarField::new( "name", FieldArity::Required, FieldType::Base(ScalarType::String, None), )), Field::RelationField(RelationField::new( "User", FieldArity::List, RelationInfo { to: "User".to_string(), fields: vec![], to_fields: vec![], name: "CityToUser".to_string(), on_delete: OnDeleteStrategy::None, }, )), ], is_generated: false, indices: vec![], id_fields: vec![], }, Model { database_name: None, name: "User".to_string(), documentation: None, is_embedded: false, is_commented_out: false, fields: vec![ Field::ScalarField(ScalarField { name: "id".to_string(), arity: FieldArity::Required, field_type: FieldType::Base(ScalarType::Int, None), database_name: None, default_value: Some(DMLDefault::Expression(ValueGenerator::new_autoincrement())), is_unique: false, is_id: true, documentation: None, is_generated: false, is_updated_at: false, is_commented_out: false, }), Field::ScalarField(ScalarField { name: "city_id".to_string(), arity: FieldArity::Required, field_type: FieldType::Base(ScalarType::Int, None), database_name: Some("city-id".to_string()), default_value: None, is_unique: false, is_id: false, documentation: None, is_generated: false, is_updated_at: false, is_commented_out: false, }), Field::ScalarField(ScalarField { name: "city_name".to_string(), field_type: FieldType::Base(ScalarType::String, None), arity: FieldArity::Required, database_name: Some("city-name".to_string()), default_value: None, is_unique: false, is_id: false, documentation: None, is_generated: false, is_updated_at: false, is_commented_out: false, }), Field::RelationField(RelationField::new( "City", FieldArity::Required, RelationInfo { name: "CityToUser".to_string(), to: "City".to_string(), fields: vec!["city_id".to_string(), "city_name".to_string()], to_fields: vec!["id".to_string(), "name".to_string()], on_delete: OnDeleteStrategy::None, }, )), ], is_generated: false, indices: vec![], id_fields: vec![], }, ], enums: vec![], }; let schema = SqlSchema { tables: vec![ Table { name: "City".to_string(), columns: vec![ Column { name: "id".to_string(), tpe: ColumnType { data_type: "integer".to_string(), full_data_type: "integer".to_string(), character_maximum_length: None, family: ColumnTypeFamily::Int, arity: ColumnArity::Required, }, default: None, auto_increment: true, }, Column { name: "name".to_string(), tpe: ColumnType { data_type: "text".to_string(), full_data_type: "text".to_string(), character_maximum_length: None, family: ColumnTypeFamily::String, arity: ColumnArity::Required, }, default: None, auto_increment: false, }, ], indices: vec![], primary_key: Some(PrimaryKey { columns: vec!["id".to_string()], sequence: None, constraint_name: None, }), foreign_keys: vec![], }, Table { name: "User".to_string(), columns: vec![ Column { name: "id".to_string(), tpe: ColumnType { data_type: "integer".to_string(), full_data_type: "integer".to_string(), character_maximum_length: None, family: ColumnTypeFamily::Int, arity: ColumnArity::Required, }, default: None, auto_increment: true, }, Column { name: "city-id".to_string(), tpe: ColumnType { data_type: "integer".to_string(), full_data_type: "integer".to_string(), character_maximum_length: None, family: ColumnTypeFamily::Int, arity: ColumnArity::Required, }, default: None, auto_increment: false, }, Column { name: "city-name".to_string(), tpe: ColumnType { data_type: "text".to_string(), full_data_type: "text".to_string(), character_maximum_length: None, family: ColumnTypeFamily::String, arity: ColumnArity::Required, }, default: None, auto_increment: false, }, ], indices: vec![], primary_key: Some(PrimaryKey { columns: vec!["id".to_string()], sequence: None, constraint_name: None, }), foreign_keys: vec![ForeignKey { // what does this mean? the from columns are not targeting a specific to column? constraint_name: None, columns: vec!["city-id".to_string(), "city-name".to_string()], referenced_table: "City".to_string(), on_delete_action: ForeignKeyAction::NoAction, referenced_columns: vec!["id".to_string(), "name".to_string()], }], }, ], enums: vec![], sequences: vec![], }; let introspection_result = calculate_datamodel(&schema, &SqlFamily::Postgres).expect("calculate data model"); assert_eq!(introspection_result.data_model, expected_data_model); }
95
fn get_emp_test() { // Arrange let server = MockServer::start(); let mock = server.mock(|when, then| { when.method("GET") .path("/emp/2") //.header("Authorization", " Basic Zm9vOmJhcg==") .header("Content-Type", "application/json"); then.status(200) .json_body(json!({"created_at":"2021-07-24 20:54:29","email":"g@gmail.com","id":2,"name":"Rahul2"})); }); let response = isahc::get(server.url("127.0.0.1:8000")).header("Authorization", "Basic Zm9vOmJhcg=="); mock.assert(); assert_eq!(response.status(), 200); //let client = get_emp("Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==", "127.0.0.1:8000"); // Act //let result = client.create_repo("myRepo"); // Assert //mock.assert(); //assert_eq!(client.is_ok, true); //assert_eq!(client.unwrap(), "127.0.0.1:8000"); }
96
fn drop_table(conn: &mut OciConnection, tbl: &str) { let sql = format!("SELECT * FROM {:?}", tbl); let sql = sql.replace("\"", ""); let ret = diesel::sql_query(&sql).execute(conn); if ret.is_ok() { let sql = format!("drop table {:?}", tbl); let _ret = diesel::sql_query(&sql).execute(conn); } }
97
pub fn create_default_modified_timestamps() -> ModifiedTimestamps { let mut timestamps = HashMap::new(); let stream_types = [ AudioStreamType::Background, AudioStreamType::Media, AudioStreamType::Interruption, AudioStreamType::SystemAgent, AudioStreamType::Communication, ]; for stream_type in stream_types.iter() { timestamps.insert(*stream_type, default_time().to_string()); } timestamps }
98
fn age() -> u32 { 15 }
99
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
3