content
stringlengths 12
392k
| id
int64 0
1.08k
|
|---|---|
fn largest(list: &[i32]) -> i32 {
let mut largest = list[0];
for &number in list {
if number > largest {
largest = number;
}
}
return largest;
}
| 300
|
pub fn create_puzzle(level: usize) -> String {
let board = binoxxo::bruteforce::create_puzzle_board(BOARD_SIZE, level);
board.to_string()
}
| 301
|
fn main() {
create_wrapper_file();
generate_bindings();
config_linker();
}
| 302
|
fn atoi(s: &str) -> i64 {
i64::from_str_radix(s, 10).unwrap()
}
| 303
|
pub fn write_i16<W>(wr: &mut W, val: i16) -> Result<(), ValueWriteError>
where W: Write
{
try!(write_marker(wr, Marker::I16));
write_data_i16(wr, val)
}
| 304
|
pub fn solve(input: &str) -> Option<Box<usize>> {
let sum_of_counts = input
.trim_end()
.split("\n\n")
.map(|group| group.chars().filter(|ch| *ch != '\n').collect::<HashSet<_>>().len())
.sum();
Some(Box::new(sum_of_counts))
}
| 305
|
fn test_let() {
let module = make_module(
indoc! {r#"
-- taken from http://learnyouahaskell.com/syntax-in-functions#pattern-matching
str_imc w h =
let bmi = w / h ^ 2
in if bmi <= 18.5 then "You're underweight, you emo, you!"
elif bmi <= 25.0 then "You're supposedly normal. Pffft, I bet you're ugly!"
elif bmi <= 30.0 then "You're fat! Lose some weight, fatty!"
else "You're a whale, congratulations!"
"#},
default_sym_table(),
);
assert!(module.is_ok());
let module = module.unwrap();
let decls = module.get_decls();
assert_eq!(
decls[0].get_type(),
Some(FuncType::new_func_type(
Some(vec![BasicType::int(), BasicType::int()]),
BasicType::static_str(),
))
);
}
| 306
|
pub fn test_andi() {
let buffer = fs::read("tests/programs/andi").unwrap().into();
let result = run::<u32, SparseMemory<u32>>(&buffer, &vec!["andi".into()]);
assert!(result.is_ok());
assert_eq!(result.unwrap(), 0);
}
| 307
|
pub fn main() {
let game = Game::new();
game_loop(game, 240, 0.1, |g| {
g.game.your_update_function();
}, |g| {
g.game.your_render_function();
});
}
| 308
|
fn get_platform_dependent_data_dirs() -> Vec<PathBuf> {
let xdg_data_dirs_variable = var("XDG_DATA_DIRS")
.unwrap_or(String::from("/usr/local/share:/usr/local"));
let xdg_dirs_iter = xdg_data_dirs_variable.split(':').map(|s| Some(PathBuf::from(s)));
let dirs = if cfg!(target_os = "macos") {
vec![
var_os("XDG_DATA_HOME").map(|dir| PathBuf::from(dir)),
home_dir().map(|dir| dir.join("Library").join("Application Support"))
].into_iter()
.chain(xdg_dirs_iter)
.chain(vec![Some(PathBuf::from("/").join("Library").join("Application Support"))])
.collect()
} else if cfg!(target_os = "linux") {
vec![
var_os("XDG_DATA_HOME").map(|dir| PathBuf::from(dir)),
home_dir().map(|dir| dir.join(".local").join("share"))
].into_iter()
.chain(xdg_dirs_iter)
.collect()
} else if cfg!(target_os = "windows") {
vec![var_os("APPDATA").map(|dir| PathBuf::from(dir))]
} else {
Vec::new()
};
dirs.into_iter().filter_map(|dir| dir).collect()
}
| 309
|
fn main() {
let n = read::<u64>();
let mut coefs = vec![(3_i64, 1_i64); 64];
for i in 1..64 {
let new_a = ((coefs[i - 1].0).pow(2) + (coefs[i - 1].1).pow(2) * 5) % 1000;
let new_b = (2 * coefs[i - 1].0 * coefs[i - 1].1) % 1000;
coefs[i] = (new_a, new_b);
}
let mut ans = (1, 0);
let mut n = n;
for i in 0..64 {
let bin = n % 2;
if bin == 1 {
ans = ((ans.0 * coefs[i].0 + ans.1 * coefs[i].1 * 5) % 1000, (ans.0 * coefs[i].1 + ans.1 * coefs[i].0) % 1000);
}
n >>= 1;
}
let int = 2 * ans.0 - 1;
if int < 100 {
print!("0");
}
println!("{}", int % 1000);
}
| 310
|
pub fn unary_num(t_unary: Token, n_simple_numeric: Node) -> Node {
let mut numeric_value = if let Node::Int(int_value) = n_simple_numeric { int_value } else { panic!(); };
if let Token::T_UNARY_NUM(polarty) = t_unary {
match polarty.as_ref() {
"+" => (),
"-" => { numeric_value = 0 - numeric_value; },
_ => { panic!(); }
}
} else { panic!(); }
return Node::Int(numeric_value);
}
| 311
|
pub fn b(b: BuiltIn) -> Value {
Rc::new(RefCell::new(V {
val: Value_::BuiltIn(b),
computed: true,
}))
}
| 312
|
pub(crate) fn create_array_iterator_prototype(agent: &Agent) -> Value {
let proto = Value::new_object(agent.intrinsics.iterator_prototype.clone());
proto
.set(
agent,
ObjectKey::from("next"),
Value::new_builtin_function(agent, next, false),
)
.unwrap();
proto
}
| 313
|
pub unsafe fn set_tid_address(data: *mut c_void) -> Pid {
imp::syscalls::tls::set_tid_address(data)
}
| 314
|
pub extern "x86-interrupt" fn serial_1() { CommonInterruptHandler(36); }
| 315
|
pub extern "x86-interrupt" fn parallel_2() { CommonInterruptHandler(37); }
| 316
|
pub fn add_history(history_type: HistoryType, history_path: String) -> Result<(), &'static str> {
let path_buf = Path::new(&history_path);
if !(path_buf.is_dir() || path_buf.is_file()) {
return Err("Not a validate dir or file path.");
}
let mut configurator = Configurator::new();
configurator.add_history(history_type, path_buf);
configurator.save_config();
Ok(())
}
| 317
|
fn extract_zip(data: &Vec<u8>, path: &Path) -> crate::Result<()> {
let cursor = Cursor::new(data);
let mut zipa = ZipArchive::new(cursor)?;
for i in 0..zipa.len() {
let mut file = zipa.by_index(i)?;
let dest_path = path.join(file.name());
let parent = dest_path.parent().expect("Failed to get parent");
if !parent.exists() {
create_dir_all(parent)?;
}
let mut buff: Vec<u8> = Vec::new();
file.read_to_end(&mut buff)?;
let mut fileout = File::create(dest_path).expect("Failed to open file");
fileout.write_all(&buff)?;
}
Ok(())
}
| 318
|
pub async fn client_async_tls_with_connector<R, S>(
request: R,
stream: S,
connector: Option<Connector>,
) -> Result<(WebSocketStream<ClientStream<S>>, Response), Error>
where
R: IntoClientRequest + Unpin,
S: 'static + tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin,
AutoStream<S>: Unpin,
{
client_async_tls_with_connector_and_config(request, stream, connector, None).await
}
| 319
|
pub fn docker_metric_from_stats(first: &InstantDockerContainerMetric, second: &InstantDockerContainerMetric) -> DockerContainerMetric {
let first = first.clone();
let second = second.clone();
let time_diff = second.timestamp - first.timestamp;
let first_iter = first.stat.into_iter();
let stat: Vec<DockerContainerMetricEntry> = second.stat.into_iter()
.filter_map(|v| first_iter.clone()
.find(|item| item.name == v.name)
.map(|item| (item, v))
)
.filter(|two_entries| two_entries.1.cpu_usage > two_entries.0.cpu_usage)
.map(|two_entries| docker_metric_entry_from_two_stats(time_diff, two_entries.0, two_entries.1))
.collect();
DockerContainerMetric { stat, timestamp: second.timestamp }
}
| 320
|
fn channel_take_nonpayload(target: CAddr) -> ChannelMessage {
let result = system_call(SystemCall::ChannelTake {
request: target,
response: None
});
match result {
SystemCall::ChannelTake {
response, ..
} => {
return response.unwrap()
},
_ => panic!(),
};
}
| 321
|
pub unsafe extern "C" fn gatt_svr_register_cb(
ctxt: *mut ble_gatt_register_ctxt,
_arg: *mut ::core::ffi::c_void,
) {
let mut buf_arr: [i8; BLE_UUID_STR_LEN as usize] = [0; BLE_UUID_STR_LEN as usize];
let buf = buf_arr.as_mut_ptr();
match (*ctxt).op as u32 {
BLE_GATT_REGISTER_OP_SVC => {
printf(
cstr!("registered service %s with handle=%d\n"),
ble_uuid_to_str((*(*ctxt).__bindgen_anon_1.svc.svc_def).uuid, buf),
(*ctxt).__bindgen_anon_1.svc.handle as i32,
);
}
BLE_GATT_REGISTER_OP_CHR => {
printf(
cstr!("registering characteristic %s with def_handle=%d val_handle=%d\n"),
ble_uuid_to_str((*(*ctxt).__bindgen_anon_1.chr.chr_def).uuid, buf),
(*ctxt).__bindgen_anon_1.chr.def_handle as i32,
(*ctxt).__bindgen_anon_1.chr.val_handle as i32,
);
}
BLE_GATT_REGISTER_OP_DSC => {
printf(
cstr!("registering descriptor %s with handle=%d\n"),
ble_uuid_to_str((*(*ctxt).__bindgen_anon_1.dsc.dsc_def).uuid, buf),
(*ctxt).__bindgen_anon_1.dsc.handle as i32,
);
}
_ => {
printf(cstr!("unknown operation: %d\n"), (*ctxt).op as u32);
}
}
}
| 322
|
pub fn assert<E> (cond: bool, err: E) -> Result<(), E> {
if cond {
Ok(())
} else {
Err(err)
}
}
| 323
|
fn read_num(cursor: &mut Cursor<Vec<u8>>) -> Result<u32, Box<std::error::Error>> {
let mut v: Vec<u8> = vec![];
let mut c: [u8; 1] = [0];
// consume whitespace
loop {
cursor.read(&mut c)?;
match &c {
b" " | b"\t" | b"\n" => { },
_ => { cursor.seek(std::io::SeekFrom::Current(-1)); break; }
}
}
// read number
loop {
cursor.read(&mut c)?;
match c[0] {
b'0' ... b'9' => { v.push(c[0]); },
b' ' | b'\t' | b'\n' => { cursor.seek(std::io::SeekFrom::Current(-1)); break; },
_ => { bail!("Parse error") }
}
}
let num_str = std::str::from_utf8(&v)?;
let num = num_str.parse::<u32>()?;
Ok(num)
}
| 324
|
pub(crate) fn assignment_statement(i: Input) -> NodeResult {
alt((
single::single_assignment_statement,
abbreviated::abbreviated_assignment_statement,
multiple_assignment_statement,
))(i)
}
| 325
|
fn a_data_model_can_be_generated_from_a_schema() {
let col_types = &[
ColumnTypeFamily::Int,
ColumnTypeFamily::Float,
ColumnTypeFamily::Boolean,
ColumnTypeFamily::String,
ColumnTypeFamily::DateTime,
ColumnTypeFamily::Binary,
ColumnTypeFamily::Json,
ColumnTypeFamily::Uuid,
ColumnTypeFamily::Geometric,
ColumnTypeFamily::LogSequenceNumber,
ColumnTypeFamily::TextSearch,
ColumnTypeFamily::TransactionId,
];
let ref_data_model = Datamodel {
models: vec![Model {
database_name: None,
name: "Table1".to_string(),
documentation: Some(
"The underlying table does not contain a valid unique identifier and can therefore currently not be handled."
.to_string(),
),
is_embedded: false,
is_generated: false,
is_commented_out: true,
indices: vec![],
id_fields: vec![],
fields: col_types
.iter()
.map(|col_type| {
let (field_type, is_commented_out, documentation) = match col_type {
ColumnTypeFamily::Boolean => (FieldType::Base(ScalarType::Boolean, None), false, None),
ColumnTypeFamily::DateTime => (FieldType::Base(ScalarType::DateTime, None), false, None),
ColumnTypeFamily::Float => (FieldType::Base(ScalarType::Float, None), false, None),
ColumnTypeFamily::Int => (FieldType::Base(ScalarType::Int, None), false, None),
ColumnTypeFamily::String => (FieldType::Base(ScalarType::String, None), false, None),
ColumnTypeFamily::Enum(name) => (FieldType::Enum(name.clone()), false, None),
ColumnTypeFamily::Uuid => (FieldType::Base(ScalarType::String, None), false, None),
ColumnTypeFamily::Json => (FieldType::Base(ScalarType::Json, None), false, None),
x => (
FieldType::Unsupported(x.to_string()),
true,
Some("This type is currently not supported.".to_string()),
),
};
Field::ScalarField(ScalarField {
name: col_type.to_string(),
arity: FieldArity::Optional,
field_type,
database_name: None,
default_value: None,
is_unique: false,
is_id: false,
documentation,
is_generated: false,
is_updated_at: false,
is_commented_out,
})
})
.collect(),
}],
enums: vec![],
};
let schema = SqlSchema {
tables: vec![Table {
name: "Table1".to_string(),
columns: col_types
.iter()
.map(|family| Column {
name: family.to_string(),
tpe: ColumnType::pure(family.to_owned(), ColumnArity::Nullable),
default: None,
auto_increment: false,
})
.collect(),
indices: vec![],
primary_key: None,
foreign_keys: vec![],
}],
enums: vec![],
sequences: vec![],
};
let introspection_result = calculate_datamodel(&schema, &SqlFamily::Postgres).expect("calculate data model");
assert_eq!(introspection_result.data_model, ref_data_model);
}
| 326
|
pub fn write_pfix<W>(wr: &mut W, val: u8) -> Result<(), FixedValueWriteError>
where W: Write
{
assert!(val < 128);
write_fixval(wr, Marker::FixPos(val))
}
| 327
|
pub fn pretty_print(ir: &std::collections::HashMap<String, IRFunction>) {
for (_, func) in ir {
println!("Function-'{}':", func.name);
println!(" Arguments:");
for param in func.parameters.iter() {
println!(" {}: {:?}", param.name, param.param_type);
}
for statement in func.statements.iter() {
println!(" Statement:");
print_nodes(" ", statement);
}
}
}
| 328
|
fn compound_foreign_keys_are_preserved_when_generating_data_model_from_a_schema() {
let expected_data_model = Datamodel {
models: vec![
Model {
database_name: None,
name: "City".to_string(),
documentation: None,
is_embedded: false,
is_commented_out: false,
fields: vec![
Field::ScalarField(ScalarField {
name: "id".to_string(),
arity: FieldArity::Required,
field_type: FieldType::Base(ScalarType::Int, None),
database_name: None,
default_value: Some(DMLDefault::Expression(ValueGenerator::new_autoincrement())),
is_unique: false,
is_id: true,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField::new(
"name",
FieldArity::Required,
FieldType::Base(ScalarType::String, None),
)),
Field::RelationField(RelationField::new(
"User",
FieldArity::List,
RelationInfo {
to: "User".to_string(),
fields: vec![],
to_fields: vec![],
name: "CityToUser".to_string(),
on_delete: OnDeleteStrategy::None,
},
)),
],
is_generated: false,
indices: vec![],
id_fields: vec![],
},
Model {
database_name: None,
name: "User".to_string(),
documentation: None,
is_embedded: false,
is_commented_out: false,
fields: vec![
Field::ScalarField(ScalarField {
name: "id".to_string(),
arity: FieldArity::Required,
field_type: FieldType::Base(ScalarType::Int, None),
database_name: None,
default_value: Some(DMLDefault::Expression(ValueGenerator::new_autoincrement())),
is_unique: false,
is_id: true,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField {
name: "city_id".to_string(),
arity: FieldArity::Required,
field_type: FieldType::Base(ScalarType::Int, None),
database_name: Some("city-id".to_string()),
default_value: None,
is_unique: false,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField {
name: "city_name".to_string(),
field_type: FieldType::Base(ScalarType::String, None),
arity: FieldArity::Required,
database_name: Some("city-name".to_string()),
default_value: None,
is_unique: false,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::RelationField(RelationField::new(
"City",
FieldArity::Required,
RelationInfo {
name: "CityToUser".to_string(),
to: "City".to_string(),
fields: vec!["city_id".to_string(), "city_name".to_string()],
to_fields: vec!["id".to_string(), "name".to_string()],
on_delete: OnDeleteStrategy::None,
},
)),
],
is_generated: false,
indices: vec![],
id_fields: vec![],
},
],
enums: vec![],
};
let schema = SqlSchema {
tables: vec![
Table {
name: "City".to_string(),
columns: vec![
Column {
name: "id".to_string(),
tpe: ColumnType {
data_type: "integer".to_string(),
full_data_type: "integer".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
},
default: None,
auto_increment: true,
},
Column {
name: "name".to_string(),
tpe: ColumnType {
data_type: "text".to_string(),
full_data_type: "text".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::String,
arity: ColumnArity::Required,
},
default: None,
auto_increment: false,
},
],
indices: vec![],
primary_key: Some(PrimaryKey {
columns: vec!["id".to_string()],
sequence: None,
constraint_name: None,
}),
foreign_keys: vec![],
},
Table {
name: "User".to_string(),
columns: vec![
Column {
name: "id".to_string(),
tpe: ColumnType {
data_type: "integer".to_string(),
full_data_type: "integer".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
},
default: None,
auto_increment: true,
},
Column {
name: "city-id".to_string(),
tpe: ColumnType {
data_type: "integer".to_string(),
full_data_type: "integer".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::Int,
arity: ColumnArity::Required,
},
default: None,
auto_increment: false,
},
Column {
name: "city-name".to_string(),
tpe: ColumnType {
data_type: "text".to_string(),
full_data_type: "text".to_string(),
character_maximum_length: None,
family: ColumnTypeFamily::String,
arity: ColumnArity::Required,
},
default: None,
auto_increment: false,
},
],
indices: vec![],
primary_key: Some(PrimaryKey {
columns: vec!["id".to_string()],
sequence: None,
constraint_name: None,
}),
foreign_keys: vec![ForeignKey {
// what does this mean? the from columns are not targeting a specific to column?
constraint_name: None,
columns: vec!["city-id".to_string(), "city-name".to_string()],
referenced_table: "City".to_string(),
on_delete_action: ForeignKeyAction::NoAction,
referenced_columns: vec!["id".to_string(), "name".to_string()],
}],
},
],
enums: vec![],
sequences: vec![],
};
let introspection_result = calculate_datamodel(&schema, &SqlFamily::Postgres).expect("calculate data model");
assert_eq!(introspection_result.data_model, expected_data_model);
}
| 329
|
pub fn main(args: Vec<String>) -> io::Result<()> {
// if no arguments are passed, use current working dir
let cwd = vec![".".to_string()];
let mut files: &Vec<String> = &args;
if files.len() == 0 {
files = &cwd;
}
_ls(files, &mut io::stdout())
}
| 330
|
fn soda(_py: Python, m: &PyModule) -> PyResult<()> {
m.add_class::<Soda>()?;
Ok(())
}
| 331
|
fn generate_thanks() -> Result<BTreeMap<VersionTag, AuthorMap>, Box<dyn std::error::Error>> {
let path = update_repo("https://github.com/rust-lang/rust.git")?;
let repo = git2::Repository::open(&path)?;
let mailmap = mailmap_from_repo(&repo)?;
let reviewers = Reviewers::new()?;
let mut versions = get_versions(&repo)?;
let last_full_stable = versions
.iter()
.rfind(|v| v.raw_tag.ends_with(".0"))
.unwrap()
.version
.clone();
versions.push(VersionTag {
name: String::from("Beta"),
version: {
let mut last = last_full_stable.clone();
last.minor += 1;
last
},
raw_tag: String::from("beta"),
commit: repo
.revparse_single("beta")
.unwrap()
.peel_to_commit()
.unwrap()
.id(),
in_progress: true,
});
versions.push(VersionTag {
name: String::from("Master"),
version: {
// master is plus 1 minor versions off of beta, which we just pushed
let mut last = last_full_stable.clone();
last.minor += 2;
last
},
raw_tag: String::from("master"),
commit: repo
.revparse_single("master")
.unwrap()
.peel_to_commit()
.unwrap()
.id(),
in_progress: true,
});
let mut version_map = BTreeMap::new();
let mut cache = HashMap::new();
for (idx, version) in versions.iter().enumerate() {
let previous = if let Some(v) = idx.checked_sub(1).map(|idx| &versions[idx]) {
v
} else {
let author_map = build_author_map(&repo, &reviewers, &mailmap, "", &version.raw_tag)?;
version_map.insert(version.clone(), author_map);
continue;
};
eprintln!("Processing {:?} to {:?}", previous, version);
cache.insert(
version,
up_to_release(&repo, &reviewers, &mailmap, &version)?,
);
let previous = match cache.remove(&previous) {
Some(v) => v,
None => up_to_release(&repo, &reviewers, &mailmap, &previous)?,
};
let current = cache.get(&version).unwrap();
// Remove commits reachable from the previous release.
let only_current = current.difference(&previous);
version_map.insert(version.clone(), only_current);
}
Ok(version_map)
}
| 332
|
pub async fn connect_async_with_config<R>(
request: R,
config: Option<WebSocketConfig>,
) -> Result<(WebSocketStream<ConnectStream>, Response), Error>
where
R: IntoClientRequest + Unpin,
{
let request: Request = request.into_client_request()?;
let domain = domain(&request)?;
let port = port(&request)?;
let try_socket = TcpStream::connect((domain.as_str(), port)).await;
let socket = try_socket.map_err(Error::Io)?;
client_async_tls_with_connector_and_config(request, socket, None, config).await
}
| 333
|
fn send_err(stream: &mut TcpStream, err: Error) {
let _ = stream.write(err.to_string().as_bytes()).expect("failed a write");
}
| 334
|
unsafe fn sift<T, F: Fn(&T, &T) -> bool>(mut root: *mut T, mut order: usize, f: F) {
while order > 1 {
let new_root = near_heap_ultimate_root_ptr(root, order, &f);
order = match ((root as usize) - (new_root as usize))/mem::size_of::<T>() {
0 => return,
1 => order - 2,
_ => order - 1,
};
ptr::swap(root, new_root);
root = new_root;
}
}
| 335
|
pub fn challenge_7() {
let mut file = File::open("data/7good.txt").unwrap();
let mut all_file = String::new();
file.read_to_string(&mut all_file).unwrap();
let data = base64::decode(&all_file).unwrap();
let key = "YELLOW SUBMARINE".as_bytes().to_vec();
let out = crypto::aes_decrypt_ecb(&data, &key);
let cleartext = String::from_utf8(out).unwrap();
println!("{}", cleartext);
}
| 336
|
fn test_encode_decode(enc: Intermediate) {
use wormcode_bits::{Decode, Encode};
let b: B<28> = enc.encode();
let dec = Intermediate::decode_option(b);
assert_eq!(Some(enc), dec);
}
| 337
|
fn run_without_sanitizer_with_crash() {
let project = project("run_without_sanitizer_with_crash")
.with_fuzz()
.fuzz_target(
"yes_crash",
r#"
#![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
run_without_sanitizer_with_crash::fail_fuzzing(data);
});
"#,
)
.build();
project
.cargo_fuzz()
.arg("run")
.arg("yes_crash")
.arg("--")
.arg("-runs=1000")
.arg("-sanitizer=none")
.env("RUST_BACKTRACE", "1")
.assert()
.stderr(
predicate::str::contains("panicked at 'I'm afraid of number 7'")
.and(predicate::str::contains("ERROR: libFuzzer: deadly signal"))
.and(predicate::str::contains("run_without_sanitizer_with_crash::fail_fuzzing"))
.and(predicate::str::contains(
"ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ\n\
\n\
Failing input:\n\
\n\
\tfuzz/artifacts/yes_crash/crash-"
))
.and(predicate::str::contains("Output of `std::fmt::Debug`:"))
.and(predicate::str::contains(
"Reproduce with:\n\
\n\
\tcargo fuzz run yes_crash fuzz/artifacts/yes_crash/crash-"
))
.and(predicate::str::contains(
"Minimize test case with:\n\
\n\
\tcargo fuzz tmin yes_crash fuzz/artifacts/yes_crash/crash-"
)),
)
.failure();
}
| 338
|
fn system_call(message: SystemCall) -> SystemCall {
let addr = task_buffer_addr();
unsafe {
let buffer = &mut *(addr as *mut TaskBuffer);
buffer.call = Some(message);
system_call_raw();
buffer.call.take().unwrap()
}
}
| 339
|
fn fist_4() {
run_test(&Instruction { mnemonic: Mnemonic::FIST, operand1: Some(IndirectDisplaced(BX, 1177, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[223, 151, 153, 4], OperandSize::Word)
}
| 340
|
fn post_order_dfs(
allocs: &mut Allocs,
dfg: &ir::DataFlowGraph,
val: ir::Value,
should_trace: impl Fn(ir::Value) -> bool,
mut visit: impl FnMut(&mut Allocs, ir::Value),
) {
allocs.dfs_stack.push(StackEntry::Trace(val));
while let Some(entry) = allocs.dfs_stack.pop() {
match entry {
StackEntry::Visit(val) => {
let is_new = allocs.dfs_seen.insert(val);
if is_new {
visit(allocs, val);
}
}
StackEntry::Trace(val) => {
if allocs.dfs_seen.contains(&val) {
continue;
}
allocs.dfs_stack.push(StackEntry::Visit(val));
if should_trace(val) {
if let ir::ValueDef::Result(inst, 0) = dfg.value_def(val) {
let args = dfg.inst_args(inst);
for v in args.iter().rev().copied() {
allocs.dfs_stack.push(StackEntry::Trace(v));
}
}
}
}
}
}
}
| 341
|
pub(crate) fn lock() -> Result<(), ErrorKind> {
let mut count = 1;
loop {
if let Ok(true) = LOCK.compare_exchange(false, true, Ordering::Acquire, Ordering::Relaxed) {
break;
}
if count > LOCK_TIMEOUT {
return Err(ErrorKind::TimedOut);
}
cpu_relax(count);
count += 1;
}
Ok(())
}
| 342
|
fn run_candle(
settings: &Settings,
wix_toolset_path: &Path,
build_path: &Path,
wxs_file_name: &str,
) -> crate::Result<()> {
let arch = match settings.binary_arch() {
"x86_64" => "x64",
"x86" => "x86",
target => {
return Err(crate::Error::ArchError(format!(
"unsupported target: {}",
target
)))
}
};
let main_binary = settings
.binaries()
.iter()
.find(|bin| bin.main())
.ok_or_else(|| anyhow::anyhow!("Failed to get main binary"))?;
let args = vec![
"-arch".to_string(),
arch.to_string(),
wxs_file_name.to_string(),
format!(
"-dSourceDir={}",
settings.binary_path(main_binary).display()
),
];
let candle_exe = wix_toolset_path.join("candle.exe");
common::print_info(format!("running candle for {}", wxs_file_name).as_str())?;
let mut cmd = Command::new(&candle_exe);
cmd
.args(&args)
.stdout(Stdio::piped())
.current_dir(build_path);
common::print_info("running candle.exe")?;
common::execute_with_output(&mut cmd).map_err(|_| crate::Error::CandleError)
}
| 343
|
fn to_rules(line: &str) -> Vec<(Matrix<bool>, Matrix<bool>)> { // todo can I use a fixed-size array? it messes with flat_map below
let (left, right) = line.split(" => ").next_tuple().unwrap();
let pat = to_matrix(left);
let result = to_matrix(right);
vec![
(pat.flipped_lr(), result.clone()),
(pat.flipped_lr().rotated_cw(1), result.clone()),
(pat.flipped_lr().rotated_cw(2), result.clone()),
(pat.flipped_lr().rotated_cw(3), result.clone()),
(pat.flipped_ud(), result.clone()),
(pat.flipped_ud().rotated_cw(1), result.clone()),
(pat.flipped_ud().rotated_cw(2), result.clone()),
(pat.flipped_ud().rotated_cw(3), result.clone()),
(pat.rotated_cw(3), result.clone()),
(pat.rotated_cw(2), result.clone()),
(pat.rotated_cw(1), result.clone()),
(pat, result),
]
}
| 344
|
pub async fn run() -> Result<()> {
let args = init().await?;
debug!(?args, "arguments");
if args.manual {
run_manpage(args).await
} else if let Some(shell) = args.completions {
run_completions(shell).await
} else {
run_watchexec(args).await
}
}
| 345
|
fn lz_string_utf_16() {
compression_tests(
|s| ByteString::from(lz_str::compress_to_utf16(s)),
|s| {
lz_str::decompress_from_utf16(&s.to_utf8_string().expect("Valid UTF16 String"))
.map(ByteString::from)
},
false,
);
}
| 346
|
fn main() {
let path = "input";
let input = File::open(path).unwrap();
let buffered = BufReader::new(input);
let mut total_fuel = 0;
for line in buffered.lines() {
let mass: i64 = line.unwrap().parse().unwrap();
total_fuel += fuel_requirement(mass);
}
println!("Total Fuel: {}", total_fuel);
}
| 347
|
pub fn challenge_12() {
println!("TODO");
}
| 348
|
fn curiosity() -> Curiosity {
Curiosity::create(
[
"SELECT DISTINCT ?s WHERE { GRAPH ?g { ?s ?p ?o } }",
"SELECT DISTINCT ?p WHERE { GRAPH ?g { ?s ?p ?o } }",
"SELECT DISTINCT ?o WHERE { GRAPH ?g { ?s ?p ?o } }",
"SELECT DISTINCT ?g WHERE { GRAPH ?g { ?s ?p ?o } }",
]
.iter()
.map(|a| a.parse().unwrap())
.collect(),
)
.unwrap()
}
| 349
|
fn list_migration_directories_with_an_empty_migrations_folder_works(api: TestApi) {
let migrations_directory = api.create_migrations_directory();
api.list_migration_directories(&migrations_directory)
.send()
.assert_listed_directories(&[]);
}
| 350
|
pub unsafe fn set_thread_name(name: &CStr) -> io::Result<()> {
imp::syscalls::tls::set_thread_name(name)
}
| 351
|
pub fn set_working_folder(working_folder: String) -> Result<(), ()> {
unsafe {
WORKING_FOLDER = working_folder;
}
Ok(())
}
| 352
|
pub extern "x86-interrupt" fn stack_segment_fault() { CommonExceptionHandler(12); }
| 353
|
fn main() {
let input = include_str!("day_15.txt");
let initial_state = Cavern::parse(input);
let total_start_time = Instant::now();
let initial_elves = initial_state.elves().count();
let chunk_size: isize = 8;
let mut winning_outcomes = (0..).filter_map(|chunk| {
let chunk_outcomes: Vec<Outcome> = (0..chunk_size).into_par_iter()
.map(|i| {
let mut pathfinder = CavernPathfinder::new();
let attack_boost = (chunk_size * chunk + i) as isize;
let mut cavern = initial_state.clone();
cavern.elf_attack_power += attack_boost;
let mut time = 0;
loop {
if let Some(winner) = cavern.tick(&mut pathfinder) {
break Outcome::new(&cavern, winner, time);
} else {
time += 1;
}
}
})
.collect();
chunk_outcomes.into_iter()
.inspect(|outcome| println!("{}", outcome))
.find(|outcome| outcome.elves_remaining.len() == initial_elves)
});
let winning_outcome = winning_outcomes.next().unwrap();
println!("final outcome: {}", winning_outcome);
for elf in &winning_outcome.elves_remaining {
println!(" surviving elf with {} HP", elf.hp);
}
let total_elapsed = Instant::now() - total_start_time;
println!("elapsed time: {}.{}s", total_elapsed.as_secs(), total_elapsed.subsec_millis());
}
| 354
|
fn fixup(s: String) -> String {
if [
"as", "break", "const", "continue", "crate", "dyn", "else", "enum", "extern", "false",
"fn", "for", "if", "impl", "in", "let", "loop", "match", "mod", "move", "mut", "pub",
"ref", "return", "Self", "static", "struct", "super", "trait", "true", "type", "unsafe",
"use", "where", "while", "abstract", "become", "box", "do", "final", "macro", "override",
"priv", "typeof", "unsized", "virtual", "yield", "async", "await", "try",
]
.contains(&s.as_str())
{
return format!("r#{}", s);
}
if &s == "self" {
return "_self".to_owned();
}
let s: String = s
.chars()
.map(|c| if !c.is_ascii_alphanumeric() { '_' } else { c })
.collect();
match s.chars().nth(0) {
Some(c) if c.is_ascii_digit() => "_".to_owned() + &s,
_ => s,
}
}
| 355
|
fn item_named_query() {
let dir = TestDir::new("sit", "named_query");
dir.cmd()
.arg("init")
.expect_success();
dir.create_file(".sit/reducers/test.js",r#"
module.exports = function(state, record) {
return Object.assign(state, {value: "hello"});
}
"#);
let id = String::from_utf8(dir.cmd().arg("item").expect_success().stdout).unwrap();
// create a record
Repository::open(dir.path(".sit")).unwrap().item(id.trim()).unwrap().new_record(vec![("test", &b""[..])].into_iter(), true).unwrap();
dir.create_file(".sit/.items/queries/q1", "join(' ', ['item', id, value])");
let output = String::from_utf8(dir.cmd().args(&["reduce", id.trim(), "-Q", "q1"]).expect_success().stdout).unwrap();
assert_eq!(output.trim(), format!("item {} hello", id.trim()));
}
| 356
|
fn test_reduce() {
assert_eq!(reduce("dabAcCaCBAcCcaDA"), "dabAaCBAcaDA");
assert_eq!(reduce("dabAaCBAcaDA"), "dabCBAcaDA");
assert_eq!(reduce("dabCBAcaDA"), "dabCBAcaDA");
}
| 357
|
fn generate_package_guid(settings: &Settings) -> Uuid {
generate_guid(settings.bundle_identifier().as_bytes())
}
| 358
|
fn run_with_config(config: Config) {
let mut simulation =
SimulationConfig::new_from_config(config).create_simulation(Box::new(rand::thread_rng()));
println!("iter\tbest_i\tbest_fit\tns_current\tns_total");
let max_iterations = 100;
loop {
simulation.print_statistics();
if simulation.iteration >= max_iterations {
break;
}
simulation = simulation.next_generation();
}
}
| 359
|
pub fn stp_op(inputs: OpInputs) -> EmulatorResult<()> {
// Reads a value from the PSPACE, writing it into core memory
//
// LDP and STP are not defined in any ICWS standard. This implementation
// is based on pMARS's behavior.
let a = inputs.regs.a;
let b = inputs.regs.b;
let source_value = match inputs.regs.current.instr.modifier {
Modifier::A | Modifier::AB => {
// A field of a operand
a.a_field
}
Modifier::B
| Modifier::BA
| Modifier::F
| Modifier::X
| Modifier::I => {
// B field of a operand
a.b_field
}
};
let pspace_dest_index = match inputs.regs.current.instr.modifier {
Modifier::A | Modifier::BA => {
// a field of b operand
b.a_field
}
Modifier::B
| Modifier::AB
| Modifier::F
| Modifier::X
| Modifier::I => {
// b field of b operand
b.b_field
}
};
inputs
.pspace
.write(pspace_dest_index, source_value, inputs.warrior_id)?;
// Queue PC + 1
inputs.pq.push_back(
offset(inputs.regs.current.idx, 1, inputs.core_size)?,
inputs.warrior_id,
)?;
Ok(())
}
| 360
|
fn regular_enum_to_tokens<T: self::enum_variant::Variant>(
tokens: &mut TokenStream,
container_rules: &Option<SerdeContainer>,
enum_variant_features: &Vec<Feature>,
get_variants_tokens_vec: impl FnOnce() -> Vec<T>,
) {
let enum_values = get_variants_tokens_vec();
tokens.extend(match container_rules {
Some(serde_container) => match &serde_container.enum_repr {
SerdeEnumRepr::ExternallyTagged => Enum::new(enum_values).to_token_stream(),
SerdeEnumRepr::InternallyTagged { tag } => TaggedEnum::new(
enum_values
.into_iter()
.map(|variant| (Cow::Borrowed(tag.as_str()), variant)),
)
.to_token_stream(),
SerdeEnumRepr::Untagged => UntaggedEnum::new().to_token_stream(),
SerdeEnumRepr::AdjacentlyTagged { tag, content } => {
AdjacentlyTaggedEnum::new(enum_values.into_iter().map(|variant| {
(
Cow::Borrowed(tag.as_str()),
Cow::Borrowed(content.as_str()),
variant,
)
}))
.to_token_stream()
}
// This should not be possible as serde should not let that happen
SerdeEnumRepr::UnfinishedAdjacentlyTagged { .. } => panic!("Invalid serde enum repr"),
},
_ => Enum::new(enum_values).to_token_stream(),
});
tokens.extend(enum_variant_features.to_token_stream());
}
| 361
|
fn process_value(iof: IntOrFloat) {
unsafe {
match iof {
IntOrFloat {i: 30} => println!("meaning of life value"),
IntOrFloat {f} => println!("f = {}", f)
}
}
}
| 362
|
pub unsafe fn exit_thread(status: i32) -> ! {
imp::syscalls::tls::exit_thread(status)
}
| 363
|
fn extract_comment_blocks(text: &str) -> Vec<Vec<String>> {
do_extract_comment_blocks(text, false)
}
| 364
|
pub fn delete_project_confirmation(dir: StorageDir, search_terms:&[&str]) -> Result<()> {
let luigi = try!(setup_luigi());
for project in try!(luigi.search_projects_any(dir, search_terms)) {
try!(project.delete_project_dir_if(
|| util::really(&format!("you want me to delete {:?} [y/N]", project.dir())) && util::really("really? [y/N]")
))
}
Ok(())
}
| 365
|
fn config_linker() {
let lib_name = match get_platform() {
Platform::Mac => return, // CEF_PATH is not necessarily needed for Mac
Platform::Windows => "libcef",
Platform::Linux => "cef",
};
// Tell the linker the lib name and the path
println!("cargo:rustc-link-lib={}", lib_name);
println!(
"cargo:rustc-link-search={}",
cef_dir().join(get_build_type()).to_str().unwrap()
);
}
| 366
|
fn main() {
if env::args().len() != 2 {
panic!("Incorrect number of arguments provided\n");
}
let input = BufReader::new(File::open(env::args().nth(1).unwrap()).unwrap());
let mut cols: Vec<BTreeMap<char, i32>> = vec![];
for line in input.lines() {
for (i, c) in line.unwrap().chars().enumerate() {
if i == cols.len() {
cols.push(BTreeMap::new());
}
*cols[i].entry(c).or_insert(0) += 1;
}
}
let mut most = String::new();
let mut least = String::new();
for c in cols {
let (m, l) = most_least_common(c);
most.push(m);
least.push(l);
}
println!("Most common message: {}", most);
println!("Least common message: {}", least);
}
| 367
|
fn write_marker<W>(wr: &mut W, marker: Marker) -> Result<(), MarkerWriteError>
where W: Write
{
wr.write_u8(marker.to_u8()).map_err(From::from)
}
| 368
|
pub fn smart_to_words(num: usize) -> Words {
let chap = num - 1;
let word_file = File::open(&format!("word/word_smart/word{}.txt", chap))
.expect(&format!("Can't open word{}.txt", chap));
let mean_file = File::open(&format!("word/word_smart/mean{}.txt", chap))
.expect(&format!("Can't open mean{}.txt", chap));
let word_reader = BufReader::new(word_file);
let mean_reader = BufReader::new(mean_file);
let mut word_vec: Vec<Word> = Vec::new();
for (word, mean) in word_reader.lines().zip(mean_reader.lines()) {
match (word, mean) {
(Ok(w), Ok(m)) => {
let word = Word::new(w, m);
word_vec.push(word);
}
_ => assert!(false, "Can't parse word & mean"),
}
}
Words::new(num, word_vec)
}
| 369
|
fn run_diagnostic_contains_fuzz_dir() {
let (fuzz_dir, mut project_builder) = project_with_fuzz_dir("run_with_crash", None);
let project = project_builder
.with_fuzz()
.fuzz_target(
"yes_crash",
r#"
#![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: &[u8]| {
run_with_crash::fail_fuzzing(data);
});
"#,
)
.build();
let run = format!(
"cargo fuzz run --fuzz-dir {} yes_crash custom_dir/artifacts/yes_crash",
&fuzz_dir
);
let tmin = format!(
"cargo fuzz tmin --fuzz-dir {} yes_crash custom_dir/artifacts/yes_crash",
&fuzz_dir
);
project
.cargo_fuzz()
.arg("run")
.arg("--fuzz-dir")
.arg(fuzz_dir)
.arg("yes_crash")
.arg("--")
.arg("-runs=1000")
.assert()
.stderr(predicates::str::contains(run).and(predicate::str::contains(tmin)))
.failure();
}
| 370
|
pub fn initialize_logs() -> Result<()> {
// In Release Builds, initiallize the logger, so we get messages in the terminal and recorded to disk.
if !cfg!(debug_assertions) {
CrashReport::init()?;
CombinedLogger::init(
vec![
TermLogger::new(LevelFilter::Info, simplelog::Config::default(), TerminalMode::Mixed).ok_or_else(|| Error::from(ErrorKind::InitializingLoggerError))?,
WriteLogger::new(LevelFilter::Info, simplelog::Config::default(), File::create(get_config_path()?.join("rpfm_cli.log"))?),
]
)?;
}
// Simplelog does not work properly with custom terminals, like the one in Sublime Text.
// So, for debug builds, we use simple_logger instead.
else {
simple_logger::init()?;
}
Ok(())
}
| 371
|
fn write_record_sequence<W>(
writer: &mut W,
sequence: &Sequence,
line_bases: usize,
) -> io::Result<()>
where
W: Write,
{
for bases in sequence.as_ref().chunks(line_bases) {
writer.write_all(bases)?;
writeln!(writer)?;
}
Ok(())
}
| 372
|
pub fn challenge_11() {
let mut file = File::open("data/11.txt").unwrap();
let mut all_file = String::new();
file.read_to_string(&mut all_file).unwrap();
let data = base64::decode(&all_file).unwrap();
let it_data = crypto::encryption_oracle(data.clone());
if crypto::is_ecb(it_data) {
println!("ECB");
} else {
println!("CBC");
}
}
| 373
|
pub extern "x86-interrupt" fn invalid_tss() { CommonExceptionHandler(10); }
| 374
|
fn unsigned() {
let value = Unsigned::One;
assert!(value.0 == 0x001u32);
let value = Unsigned::Two;
assert!(value.0 == 0x010u32);
let value = Unsigned::Three;
assert!(value.0 == 0x100u32);
let value: Unsigned = 0x010u32.into();
assert!(value == Unsigned::Two);
let value = Unsigned::One | Unsigned::Three;
assert!(value.0 == 0x101u32);
let value = Unsigned::One;
assert_eq!(!value, Unsigned(0xFFFFFFFEu32))
}
| 375
|
pub extern "x86-interrupt" fn invalid_opcode() { CommonExceptionHandler( 6); }
| 376
|
fn kxord_2() {
run_test(&Instruction { mnemonic: Mnemonic::KXORD, operand1: Some(Direct(K3)), operand2: Some(Direct(K4)), operand3: Some(Direct(K1)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 225, 221, 71, 217], OperandSize::Qword)
}
| 377
|
fn build_prompt() -> String {
let prompt = "β>";
env::current_dir().unwrap().to_string_lossy().to_string() + prompt
}
| 378
|
pub fn jmn_op(inputs: OpInputs) -> EmulatorResult<()> {
// JMN tests the B-value to determine if it is zero. If the B-value is not
// zero, the sum of the program counter and the A-pointer is queued.
// Otherwise, the next instruction is queued (PC + 1). JMN.I functions as
// JMN.F would, i.e. it jumps if both the A-number and the B-number of the
// B-instruction are non-zero. This is not the negation of the condition
// for JMZ.F.
let a = inputs.regs.a;
let b = inputs.regs.b;
let is_non_zero = match inputs.regs.current.instr.modifier {
Modifier::A | Modifier::BA => {
// B value is the A-number of the B instruction
b.a_field != 0
}
Modifier::B | Modifier::AB => {
// B value is the B-number of the B instruction
b.b_field != 0
}
Modifier::F | Modifier::X | Modifier::I => {
// B value is the A and B numbers of the B instruction
b.a_field != 0 || b.b_field != 0
}
};
if is_non_zero {
inputs.pq.push_back(a.idx, inputs.warrior_id)?;
} else {
let next_pc = offset(inputs.regs.current.idx, 1, inputs.core_size);
inputs.pq.push_back(next_pc?, inputs.warrior_id)?;
}
Ok(())
}
| 379
|
pub fn login(path: &str, password: &str) -> Result<User> {
AUTH.login(path, password)
}
| 380
|
pub fn pool_connection_number() -> &'static usize {
POOL_NUMBER.get_or_init(|| {
dotenv().ok();
let database_pool_size_str =
env::var("DATABASE_POOL_SIZE").unwrap_or_else(|_| "10".to_string());
let database_pool_size: usize = database_pool_size_str.parse().unwrap();
database_pool_size
})
}
| 381
|
fn uri_to_path(uri: &str) -> WorkspaceResult<PathBuf> {
let url = Url::parse(uri).map_err(|_| WorkspaceError("invalid URI"))?;
if url.scheme() != "file" {
return Err(WorkspaceError("non-file URI"));
}
if let Ok(path) = url.to_file_path() {
return Ok(path);
}
#[cfg(windows)]
{
use std::ffi::OsString;
use std::path::Component;
use url::Host;
// Url::to_file_path only handles no host or localhost, which is different from vscode-uri
// we first try localhost then retry by temporarily setting the authority part on windows
let host = match url.host() {
Some(Host::Domain(name)) => name.to_string(),
Some(Host::Ipv4(addr)) => addr.to_string(),
Some(Host::Ipv6(addr)) => {
// an "official" hack for UNC
// https://msdn.microsoft.com/en-us/library/aa385353.aspx
let s = &addr.segments();
format!("{:x}-{:x}-{:x}-{:x}-{:x}-{:x}-{:x}-{:x}.ipv6-literal.net",
s[0], s[1], s[2], s[3], s[4], s[5], s[6], s[7])
},
None => return Err(WorkspaceError("non-file URI")),
};
// convert file://host/path to file:///z:/path (z: is required for rust-url)
let url = Url::parse(&format!("file:///z:{}", url.path())).unwrap();
if let Ok(path) = url.to_file_path() {
// now path starts with z:\foo\bar, so replace z: by \\host to complete it
let mut components = path.components();
let _prefix = components.next();
assert!(match _prefix { Some(Component::Prefix(..)) => true, _ => false });
let mut pathstr = OsString::from("\\\\");
pathstr.push(&host);
pathstr.push(components.as_path());
return Ok(PathBuf::from(pathstr));
}
}
Err(WorkspaceError("non-file URI"))
}
| 382
|
pub extern "x86-interrupt" fn debug() { CommonExceptionHandler( 1); }
| 383
|
fn figure5_random() {
// Chance of missing the bug is (1 - 2^-20)^100 ~= 99.99%, so this should not trip the assert
check_random(figure5, 100);
}
| 384
|
async fn init() -> Nash {
dotenv().ok();
let parameters = NashParameters {
credentials: Some(NashCredentials {
secret: env::var("NASH_API_SECRET").unwrap(),
session: env::var("NASH_API_KEY").unwrap(),
}),
environment: Environment::Sandbox,
client_id: 1,
timeout: 1000,
};
OpenLimits::instantiate(parameters).await
}
| 385
|
fn kill_things(
ecs: &mut World,
commands: &mut CommandBuffer,
dead_entities: Vec<Entity>,
splatter: &mut Option<RGB>,
) {
dead_entities.iter().for_each(|entity| {
crate::stats::record_death();
let mut was_decor = false;
let mut was_player = false;
if let Ok(mut er) = ecs.entry_mut(*entity) {
let mut was_colonist = false;
if let Ok(_colonist) = er.get_component_mut::<ColonistStatus>() {
commands.add_component(*entity, ColonistStatus::DiedAfterStart);
was_colonist = true;
}
if let Ok(g) = er.get_component_mut::<Glyph>() {
g.color.bg = DARK_RED.into();
g.color.fg = DARK_GRAY.into();
}
if let Ok(n) = er.get_component_mut::<Name>() {
n.0 = format!("Corpse: {}", n.0);
}
if was_colonist {
if let Ok(d) = er.get_component_mut::<Description>() {
let mut rng = RandomNumberGenerator::new();
if rng.range(0, 10) < 5 {
d.0 = format!(
"{} They left behind a spouse and {} children.",
d.0,
rng.range(1, 8)
);
}
}
}
if er.get_component::<Hostile>().is_ok() {
crate::stats::record_monster_death();
}
if let Ok(b) = er.get_component::<Blood>() {
*splatter = Some(b.0);
}
if let Ok(_) = er.get_component::<SetDecoration>() {
was_decor = true;
}
if let Ok(_) = er.get_component::<Player>() {
was_player = true;
}
}
if !was_player {
commands.remove_component::<Health>(*entity);
commands.remove_component::<Active>(*entity);
commands.remove_component::<CanBeActivated>(*entity);
commands.remove_component::<Blood>(*entity);
commands.remove_component::<Targetable>(*entity);
commands.remove_component::<Explosive>(*entity);
commands.remove_component::<TimedEvent>(*entity);
}
if was_decor {
crate::stats::record_prop_death();
commands.remove_component::<Glyph>(*entity);
commands.remove_component::<Description>(*entity);
}
});
}
| 386
|
fn print_node(prefix: &str, node: &IRNode) {
let next_prefix = get_next_prefix(prefix);
match node {
&IRNode::Assignment(ref name, ref exp) => {
println!("{}Assignment-'{}':", prefix, name);
print_expression(&next_prefix, exp);
}
&IRNode::DeclareVariable(ref name, ref exp) => {
println!("{}DeclareVariable-'{}':", prefix, name);
println!("{}{:?}", next_prefix, exp);
}
&IRNode::Conditional(ref comparison, ref nodes) => {
println!("{}Conditional:", prefix);
println!("{}{:?}", next_prefix, comparison);
let n_next_prefix = get_next_prefix(&next_prefix);
for tmp in nodes {
print_nodes(&n_next_prefix, tmp);
}
}
&IRNode::Loop(ref comparison, ref nodes) => {
println!("{}Loop:", prefix);
println!("{}{:?}", next_prefix, comparison);
let n_next_prefix = get_next_prefix(&next_prefix);
for tmp in nodes {
print_nodes(&n_next_prefix, tmp);
}
}
&IRNode::Return(ref raw_exp) => {
match raw_exp {
Some(exp) => {
println!("{}Return:", prefix);
print_expression(&next_prefix, exp);
}
None => println!("{}Return", prefix),
};
}
&IRNode::SingleExpression(ref exp) => {
println!("{}Expression:", prefix);
print_expression(&next_prefix, exp);
}
};
}
| 387
|
pub fn causet_partitioner_scan_column_as_bool(
context: Box<CausetPartitionerContext>,
column_name: &[u8],
column_value: &[u8],
) -> Box<CausetPartitionerContext> {
context
}
| 388
|
pub fn crate_incoherent_impls(tcx: TyCtxt<'_>, simp: SimplifiedType) -> &[DefId] {
let crate_map = tcx.crate_inherent_impls(());
tcx.arena.alloc_from_iter(
crate_map.incoherent_impls.get(&simp).unwrap_or(&Vec::new()).iter().map(|d| d.to_def_id()),
)
}
| 389
|
pub fn simple_with_projects<F>(dir:StorageDir, search_terms:&[&str], f:F)
where F:Fn(&Project)
{
match with_projects(dir, search_terms, |p| {f(p);Ok(())}){
Ok(_) => {},
Err(e) => error!("{}",e)
}
}
| 390
|
pub async fn accept_hdr_async_with_config<S, C>(
stream: S,
callback: C,
config: Option<WebSocketConfig>,
) -> Result<WebSocketStream<TokioAdapter<S>>, Error>
where
S: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin,
C: Callback + Unpin,
{
crate::accept_hdr_async_with_config(TokioAdapter(stream), callback, config).await
}
| 391
|
fn defaults_are_preserved_when_generating_data_model_from_a_schema() {
let ref_data_model = Datamodel {
models: vec![Model {
database_name: None,
name: "Table1".to_string(),
documentation: None,
is_embedded: false,
is_commented_out: false,
fields: vec![
Field::ScalarField(ScalarField::new(
"no_default",
FieldArity::Required,
FieldType::Base(ScalarType::Int, None),
)),
Field::ScalarField(ScalarField {
name: "int_default".to_string(),
arity: FieldArity::Required,
field_type: FieldType::Base(ScalarType::Int, None),
database_name: None,
default_value: Some(dml::DefaultValue::Single(PrismaValue::Int(1))),
is_unique: false,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField {
name: "bool_default".to_string(),
arity: FieldArity::Optional,
field_type: FieldType::Base(ScalarType::Boolean, None),
database_name: None,
default_value: Some(dml::DefaultValue::Single(PrismaValue::Boolean(true))),
is_unique: false,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField {
name: "float_default".to_string(),
arity: FieldArity::Optional,
field_type: FieldType::Base(ScalarType::Float, None),
database_name: None,
default_value: Some(dml::DefaultValue::Single(PrismaValue::Float(1.into()))),
is_unique: false,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
Field::ScalarField(ScalarField {
name: "string_default".to_string(),
arity: FieldArity::Optional,
field_type: FieldType::Base(ScalarType::String, None),
database_name: None,
default_value: Some(dml::DefaultValue::Single(PrismaValue::String("default".to_string()))),
is_unique: false,
is_id: false,
documentation: None,
is_generated: false,
is_updated_at: false,
is_commented_out: false,
}),
],
is_generated: false,
indices: vec![IndexDefinition {
name: Some("unique".into()),
fields: vec!["no_default".into(), "int_default".into()],
tpe: dml::IndexType::Unique,
}],
id_fields: vec![],
}],
enums: vec![],
};
let schema = SqlSchema {
tables: vec![Table {
name: "Table1".to_string(),
columns: vec![
Column {
name: "no_default".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::Int, ColumnArity::Required),
default: None,
auto_increment: false,
},
Column {
name: "int_default".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::Int, ColumnArity::Required),
default: Some(DefaultValue::VALUE(PrismaValue::Int(1))),
auto_increment: false,
},
Column {
name: "bool_default".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::Boolean, ColumnArity::Nullable),
default: Some(DefaultValue::VALUE(PrismaValue::Boolean(true))),
auto_increment: false,
},
Column {
name: "float_default".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::Float, ColumnArity::Nullable),
default: Some(DefaultValue::VALUE(PrismaValue::new_float(1.0))),
auto_increment: false,
},
Column {
name: "string_default".to_string(),
tpe: ColumnType::pure(ColumnTypeFamily::String, ColumnArity::Nullable),
default: Some(DefaultValue::VALUE(PrismaValue::String("default".to_string()))),
auto_increment: false,
},
],
indices: vec![Index {
name: "unique".to_string(),
columns: vec!["no_default".into(), "int_default".into()],
tpe: IndexType::Unique,
}],
primary_key: None,
foreign_keys: vec![],
}],
enums: vec![],
sequences: vec![],
};
let introspection_result = calculate_datamodel(&schema, &SqlFamily::Postgres).expect("calculate data model");
assert_eq!(introspection_result.data_model, ref_data_model);
}
| 392
|
fn main() -> Result<(), std::io::Error> {
let mut token = std::ptr::null_mut();
let r = unsafe {OpenProcessToken(GetCurrentProcess(), TOKEN_ADJUST_PRIVILEGES, &mut token) };
if r == 0 {
return Err(std::io::Error::last_os_error());
}
set_privilege(token, SE_RESTORE_NAME)?;
set_privilege(token, SE_BACKUP_NAME)?;
let hive_key = Hive::LocalMachine.load("example", r"C:\Users\Default\NTUSER.DAT", Security::Read | Security::Write).unwrap();
let keys: Vec<_> = hive_key
.keys()
.map(|k| k.unwrap().to_string())
.collect();
println!("{:?}", keys);
Ok(())
}
| 393
|
fn execve_wrapper(args: Vec<&str>) {
let path = CString::new(find_realpath(&args[0])).unwrap();
let mut cargs = Vec::<CString>::new();
for arg in args {
cargs.push(CString::new(arg).unwrap());
}
let envs: Vec<CString> = env::vars()
.map(|(k, v)| CString::new(format!("{}={}", k, v)).unwrap())
.collect();
execve(&path, &cargs[0..], &envs).expect(&format!("failed to execute {:?}", &cargs[0]));
}
| 394
|
pub fn assert_unary_params<D: Display>(name: D, actual: usize) -> Result<()> {
if actual != 1 {
return Err(ErrorCode::NumberArgumentsNotMatch(format!(
"{} expect to have single parameters, but got {}",
name, actual
)));
}
Ok(())
}
| 395
|
fn is_collapse_string_parts(parts: &Node) -> bool {
true
}
| 396
|
pub extern "x86-interrupt" fn parallel_1() { CommonInterruptHandler(39); }
| 397
|
fn default_handler(irqn: i16) {
panic!("Unhandled exception (IRQn = {})", irqn);
}
| 398
|
fn main()
{
let args: Vec<String> = std::env::args().collect();
if args.len() < 2 {
eprintln!("Syntax: {} <filename>", args[0]);
return;
}
let path = Path::new(&args[1]);
let display = path.display();
let mut file = match File::open(&path) {
Err(why) => panic!("Could not open file: {} (Reason: {})",
display, why.description()),
Ok(file) => file
};
// read the full file into memory. panic on failure
let mut raw_file = Vec::new();
file.read_to_end(&mut raw_file).unwrap();
// construct a cursor so we can seek in the raw buffer
let mut cursor = Cursor::new(raw_file);
let mut image = match decode_ppm_image(&mut cursor) {
Ok (img) => img,
Err(why) => panic!("Could not parse PPM file - Desc: {}", why.description()),
};
show_image(&image);
}
| 399
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.