file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
ed25519.rs | use digest::Digest;
use sha2::{Sha512};
use curve25519::{GeP2, GeP3, ge_scalarmult_base, sc_reduce, sc_muladd, curve25519, Fe};
use util::{fixed_time_eq};
use std::ops::{Add, Sub, Mul};
pub fn keypair(seed: &[u8]) -> ([u8; 64], [u8; 32]) {
let mut secret: [u8; 64] = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(seed);
hasher.result(&mut hash_output);
hash_output[0] &= 248;
hash_output[31] &= 63;
hash_output[31] |= 64;
hash_output
};
let a = ge_scalarmult_base(&secret[0..32]);
let public_key = a.to_bytes();
for (dest, src) in (&mut secret[32..64]).iter_mut().zip(public_key.iter()) {
*dest = *src;
}
for (dest, src) in (&mut secret[0..32]).iter_mut().zip(seed.iter()) {
*dest = *src;
}
(secret, public_key)
}
pub fn signature(message: &[u8], secret_key: &[u8]) -> [u8; 64] {
let seed = &secret_key[0..32];
let public_key = &secret_key[32..64];
let az: [u8; 64] = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(seed);
hasher.result(&mut hash_output);
hash_output[0] &= 248;
hash_output[31] &= 63;
hash_output[31] |= 64;
hash_output
};
let nonce = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(&az[32..64]);
hasher.input(message);
hasher.result(&mut hash_output);
sc_reduce(&mut hash_output[0..64]);
hash_output
};
let mut signature: [u8; 64] = [0; 64];
let r: GeP3 = ge_scalarmult_base(&nonce[0..32]);
for (result_byte, source_byte) in (&mut signature[0..32]).iter_mut().zip(r.to_bytes().iter()) {
*result_byte = *source_byte;
}
for (result_byte, source_byte) in (&mut signature[32..64]).iter_mut().zip(public_key.iter()) {
*result_byte = *source_byte;
}
{
let mut hasher = Sha512::new();
hasher.input(signature.as_ref());
hasher.input(message);
let mut hram: [u8; 64] = [0; 64];
hasher.result(&mut hram);
sc_reduce(&mut hram);
sc_muladd(&mut signature[32..64], &hram[0..32], &az[0..32], &nonce[0..32]);
}
signature
}
fn check_s_lt_l(s: &[u8]) -> bool
{
let l: [u8; 32] =
[ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x14, 0xde, 0xf9, 0xde, 0xa2, 0xf7, 0x9c, 0xd6,
0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, 0xd3, 0xed ];
let mut c: u8 = 0;
let mut n: u8 = 1;
let mut i = 31;
loop {
c |= ((((s[i] as i32) - (l[i] as i32)) >> 8) as u8) & n;
n &= (((((s[i] ^ l[i]) as i32)) - 1) >> 8) as u8;
if i == 0 {
break;
} else {
i -= 1;
}
}
c == 0
}
pub fn verify(message: &[u8], public_key: &[u8], signature: &[u8]) -> bool {
if check_s_lt_l(&signature[32..64]) {
return false;
}
let a = match GeP3::from_bytes_negate_vartime(public_key) {
Some(g) => g,
None => { return false; }
};
let mut d = 0;
for pk_byte in public_key.iter() {
d |= *pk_byte;
}
if d == 0 {
return false;
}
let mut hasher = Sha512::new();
hasher.input(&signature[0..32]);
hasher.input(public_key);
hasher.input(message);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
sc_reduce(&mut hash);
let r = GeP2::double_scalarmult_vartime(hash.as_ref(), a, &signature[32..64]);
let rcheck = r.to_bytes();
fixed_time_eq(rcheck.as_ref(), &signature[0..32])
}
pub fn exchange(public_key: &[u8], private_key: &[u8]) -> [u8; 32] {
let ed_y = Fe::from_bytes(&public_key);
// Produce public key in Montgomery form.
let mont_x = edwards_to_montgomery_x(ed_y);
// Produce private key from seed component (bytes 0 to 32)
// of the Ed25519 extended private key (64 bytes).
let mut hasher = Sha512::new();
hasher.input(&private_key[0..32]);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
// Clamp the hash such that it is a valid private key
hash[0] &= 248;
hash[31] &= 127;
hash[31] |= 64;
let shared_mont_x : [u8; 32] = curve25519(&hash, &mont_x.to_bytes()); // priv., pub.
shared_mont_x
}
fn edwards_to_montgomery_x(ed_y: Fe) -> Fe {
let ed_z = Fe([1,0,0,0,0,0,0,0,0,0]);
let temp_x = ed_z.add(ed_y);
let temp_z = ed_z.sub(ed_y);
let temp_z_inv = temp_z.invert();
let mont_x = temp_x.mul(temp_z_inv);
mont_x
}
#[cfg(test)]
mod tests {
use ed25519::{keypair, signature, verify, exchange};
use curve25519::{curve25519_base, curve25519};
use digest::Digest;
use sha2::{Sha512};
fn do_keypair_case(seed: [u8; 32], expected_secret: [u8; 64], expected_public: [u8; 32]) {
let (actual_secret, actual_public) = keypair(seed.as_ref());
assert_eq!(actual_secret.to_vec(), expected_secret.to_vec());
assert_eq!(actual_public.to_vec(), expected_public.to_vec());
}
#[test]
fn keypair_cases() {
do_keypair_case(
[0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e],
[0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e,
0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4,
0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66],
[0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4,
0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66]);
do_keypair_case(
[0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb,
0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde],
[0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb,
0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde,
0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd,
0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8],
[0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd,
0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8]);
}
#[test]
fn keypair_matches_mont() {
let seed = [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e];
let (ed_private, ed_public) = keypair(seed.as_ref());
let mut hasher = Sha512::new();
hasher.input(&ed_private[0..32]);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
hash[0] &= 248;
hash[31] &= 127;
hash[31] |= 64;
let cv_public = curve25519_base(&hash);
let edx_ss = exchange(&ed_public, &ed_private);
let cv_ss = curve25519(&hash, &cv_public); |
fn do_sign_verify_case(seed: [u8; 32], message: &[u8], expected_signature: [u8; 64]) {
let (secret_key, public_key) = keypair(seed.as_ref());
let mut actual_signature = signature(message, secret_key.as_ref());
assert_eq!(expected_signature.to_vec(), actual_signature.to_vec());
assert!(verify(message, public_key.as_ref(), actual_signature.as_ref()));
for &(index, flip) in [(0, 1), (31, 0x80), (20, 0xff)].iter() {
actual_signature[index] ^= flip;
assert!(!verify(message, public_key.as_ref(), actual_signature.as_ref()));
actual_signature[index] ^= flip;
}
let mut public_key_corrupt = public_key;
public_key_corrupt[0] ^= 1;
assert!(!verify(message, public_key_corrupt.as_ref(), actual_signature.as_ref()));
}
#[test]
fn sign_verify_cases() {
do_sign_verify_case(
[0x2d, 0x20, 0x86, 0x83, 0x2c, 0xc2, 0xfe, 0x3f, 0xd1, 0x8c, 0xb5, 0x1d, 0x6c, 0x5e, 0x99, 0xa5,
0x75, 0x9f, 0x02, 0x21, 0x1f, 0x85, 0xe5, 0xff, 0x2f, 0x90, 0x4a, 0x78, 0x0f, 0x58, 0x00, 0x6f],
[0x89, 0x8f, 0x9c, 0x4b, 0x2c, 0x6e, 0xe9, 0xe2, 0x28, 0x76, 0x1c, 0xa5, 0x08, 0x97, 0xb7, 0x1f,
0xfe, 0xca, 0x1c, 0x35, 0x28, 0x46, 0xf5, 0xfe, 0x13, 0xf7, 0xd3, 0xd5, 0x7e, 0x2c, 0x15, 0xac,
0x60, 0x90, 0x0c, 0xa3, 0x2c, 0x5b, 0x5d, 0xd9, 0x53, 0xc9, 0xa6, 0x81, 0x0a, 0xcc, 0x64, 0x39,
0x4f, 0xfd, 0x14, 0x98, 0x26, 0xd9, 0x98, 0x06, 0x29, 0x2a, 0xdd, 0xd1, 0x3f, 0xc3, 0xbb, 0x7d,
0xac, 0x70, 0x1c, 0x5b, 0x4a, 0x2d, 0x61, 0x5d, 0x15, 0x96, 0x01, 0x28, 0xed, 0x9f, 0x73, 0x6b,
0x98, 0x85, 0x4f, 0x6f, 0x07, 0x05, 0xb0, 0xf0, 0xda, 0xcb, 0xdc, 0x2c, 0x26, 0x2d, 0x27, 0x39,
0x75, 0x19, 0x14, 0x9b, 0x0e, 0x4c, 0xbe, 0x16, 0x77, 0xc5, 0x76, 0xc1, 0x39, 0x7a, 0xae, 0x5c,
0xe3, 0x49, 0x16, 0xe3, 0x51, 0x31, 0x04, 0x63, 0x2e, 0xc2, 0x19, 0x0d, 0xb8, 0xd2, 0x22, 0x89,
0xc3, 0x72, 0x3c, 0x8d, 0x01, 0x21, 0x3c, 0xad, 0x80, 0x3f, 0x4d, 0x75, 0x74, 0xc4, 0xdb, 0xb5,
0x37, 0x31, 0xb0, 0x1c, 0x8e, 0xc7, 0x5d, 0x08, 0x2e, 0xf7, 0xdc, 0x9d, 0x7f, 0x1b, 0x73, 0x15,
0x9f, 0x63, 0xdb, 0x56, 0xaa, 0x12, 0xa2, 0xca, 0x39, 0xea, 0xce, 0x6b, 0x28, 0xe4, 0xc3, 0x1d,
0x9d, 0x25, 0x67, 0x41, 0x45, 0x2e, 0x83, 0x87, 0xe1, 0x53, 0x6d, 0x03, 0x02, 0x6e, 0xe4, 0x84,
0x10, 0xd4, 0x3b, 0x21, 0x91, 0x88, 0xba, 0x14, 0xa8, 0xaf].as_ref(),
[0x91, 0x20, 0x91, 0x66, 0x1e, 0xed, 0x18, 0xa4, 0x03, 0x4b, 0xc7, 0xdb, 0x4b, 0xd6, 0x0f, 0xe2,
0xde, 0xeb, 0xf3, 0xff, 0x3b, 0x6b, 0x99, 0x8d, 0xae, 0x20, 0x94, 0xb6, 0x09, 0x86, 0x5c, 0x20,
0x19, 0xec, 0x67, 0x22, 0xbf, 0xdc, 0x87, 0xbd, 0xa5, 0x40, 0x91, 0x92, 0x2e, 0x11, 0xe3, 0x93,
0xf5, 0xfd, 0xce, 0xea, 0x3e, 0x09, 0x1f, 0x2e, 0xe6, 0xbc, 0x62, 0xdf, 0x94, 0x8e, 0x99, 0x09]
);
do_sign_verify_case(
[0x33, 0x19, 0x17, 0x82, 0xc1, 0x70, 0x4f, 0x60, 0xd0, 0x84, 0x8d, 0x75, 0x62, 0xa2, 0xfa, 0x19,
0xf9, 0x92, 0x4f, 0xea, 0x4e, 0x77, 0x33, 0xcd, 0x45, 0xf6, 0xc3, 0x2f, 0x21, 0x9a, 0x72, 0x91],
[0x77, 0x13, 0x43, 0x5a, 0x0e, 0x34, 0x6f, 0x67, 0x71, 0xae, 0x5a, 0xde, 0xa8, 0x7a, 0xe7, 0xa4,
0x52, 0xc6, 0x5d, 0x74, 0x8f, 0x48, 0x69, 0xd3, 0x1e, 0xd3, 0x67, 0x47, 0xc3, 0x28, 0xdd, 0xc4,
0xec, 0x0e, 0x48, 0x67, 0x93, 0xa5, 0x1c, 0x67, 0x66, 0xf7, 0x06, 0x48, 0x26, 0xd0, 0x74, 0x51,
0x4d, 0xd0, 0x57, 0x41, 0xf3, 0xbe, 0x27, 0x3e, 0xf2, 0x1f, 0x28, 0x0e, 0x49, 0x07, 0xed, 0x89,
0xbe, 0x30, 0x1a, 0x4e, 0xc8, 0x49, 0x6e, 0xb6, 0xab, 0x90, 0x00, 0x06, 0xe5, 0xa3, 0xc8, 0xe9,
0xc9, 0x93, 0x62, 0x1d, 0x6a, 0x3b, 0x0f, 0x6c, 0xba, 0xd0, 0xfd, 0xde, 0xf3, 0xb9, 0xc8, 0x2d].as_ref(),
[0x4b, 0x8d, 0x9b, 0x1e, 0xca, 0x54, 0x00, 0xea, 0xc6, 0xf5, 0xcc, 0x0c, 0x94, 0x39, 0x63, 0x00,
0x52, 0xf7, 0x34, 0xce, 0x45, 0x3e, 0x94, 0x26, 0xf3, 0x19, 0xdd, 0x96, 0x03, 0xb6, 0xae, 0xae,
0xb9, 0xd2, 0x3a, 0x5f, 0x93, 0xf0, 0x6a, 0x46, 0x00, 0x18, 0xf0, 0x69, 0xdf, 0x19, 0x44, 0x48,
0xf5, 0x60, 0x51, 0xab, 0x9e, 0x6b, 0xfa, 0xeb, 0x64, 0x10, 0x16, 0xf7, 0xa9, 0x0b, 0xe2, 0x0c]
);
}
} |
assert_eq!(edx_ss.to_vec(), cv_ss.to_vec());
} | random_line_split |
ed25519.rs | use digest::Digest;
use sha2::{Sha512};
use curve25519::{GeP2, GeP3, ge_scalarmult_base, sc_reduce, sc_muladd, curve25519, Fe};
use util::{fixed_time_eq};
use std::ops::{Add, Sub, Mul};
pub fn keypair(seed: &[u8]) -> ([u8; 64], [u8; 32]) {
let mut secret: [u8; 64] = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(seed);
hasher.result(&mut hash_output);
hash_output[0] &= 248;
hash_output[31] &= 63;
hash_output[31] |= 64;
hash_output
};
let a = ge_scalarmult_base(&secret[0..32]);
let public_key = a.to_bytes();
for (dest, src) in (&mut secret[32..64]).iter_mut().zip(public_key.iter()) {
*dest = *src;
}
for (dest, src) in (&mut secret[0..32]).iter_mut().zip(seed.iter()) {
*dest = *src;
}
(secret, public_key)
}
pub fn signature(message: &[u8], secret_key: &[u8]) -> [u8; 64] {
let seed = &secret_key[0..32];
let public_key = &secret_key[32..64];
let az: [u8; 64] = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(seed);
hasher.result(&mut hash_output);
hash_output[0] &= 248;
hash_output[31] &= 63;
hash_output[31] |= 64;
hash_output
};
let nonce = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(&az[32..64]);
hasher.input(message);
hasher.result(&mut hash_output);
sc_reduce(&mut hash_output[0..64]);
hash_output
};
let mut signature: [u8; 64] = [0; 64];
let r: GeP3 = ge_scalarmult_base(&nonce[0..32]);
for (result_byte, source_byte) in (&mut signature[0..32]).iter_mut().zip(r.to_bytes().iter()) {
*result_byte = *source_byte;
}
for (result_byte, source_byte) in (&mut signature[32..64]).iter_mut().zip(public_key.iter()) {
*result_byte = *source_byte;
}
{
let mut hasher = Sha512::new();
hasher.input(signature.as_ref());
hasher.input(message);
let mut hram: [u8; 64] = [0; 64];
hasher.result(&mut hram);
sc_reduce(&mut hram);
sc_muladd(&mut signature[32..64], &hram[0..32], &az[0..32], &nonce[0..32]);
}
signature
}
fn check_s_lt_l(s: &[u8]) -> bool
{
let l: [u8; 32] =
[ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x14, 0xde, 0xf9, 0xde, 0xa2, 0xf7, 0x9c, 0xd6,
0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, 0xd3, 0xed ];
let mut c: u8 = 0;
let mut n: u8 = 1;
let mut i = 31;
loop {
c |= ((((s[i] as i32) - (l[i] as i32)) >> 8) as u8) & n;
n &= (((((s[i] ^ l[i]) as i32)) - 1) >> 8) as u8;
if i == 0 {
break;
} else {
i -= 1;
}
}
c == 0
}
pub fn verify(message: &[u8], public_key: &[u8], signature: &[u8]) -> bool {
if check_s_lt_l(&signature[32..64]) {
return false;
}
let a = match GeP3::from_bytes_negate_vartime(public_key) {
Some(g) => g,
None => { return false; }
};
let mut d = 0;
for pk_byte in public_key.iter() {
d |= *pk_byte;
}
if d == 0 {
return false;
}
let mut hasher = Sha512::new();
hasher.input(&signature[0..32]);
hasher.input(public_key);
hasher.input(message);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
sc_reduce(&mut hash);
let r = GeP2::double_scalarmult_vartime(hash.as_ref(), a, &signature[32..64]);
let rcheck = r.to_bytes();
fixed_time_eq(rcheck.as_ref(), &signature[0..32])
}
pub fn exchange(public_key: &[u8], private_key: &[u8]) -> [u8; 32] {
let ed_y = Fe::from_bytes(&public_key);
// Produce public key in Montgomery form.
let mont_x = edwards_to_montgomery_x(ed_y);
// Produce private key from seed component (bytes 0 to 32)
// of the Ed25519 extended private key (64 bytes).
let mut hasher = Sha512::new();
hasher.input(&private_key[0..32]);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
// Clamp the hash such that it is a valid private key
hash[0] &= 248;
hash[31] &= 127;
hash[31] |= 64;
let shared_mont_x : [u8; 32] = curve25519(&hash, &mont_x.to_bytes()); // priv., pub.
shared_mont_x
}
fn edwards_to_montgomery_x(ed_y: Fe) -> Fe {
let ed_z = Fe([1,0,0,0,0,0,0,0,0,0]);
let temp_x = ed_z.add(ed_y);
let temp_z = ed_z.sub(ed_y);
let temp_z_inv = temp_z.invert();
let mont_x = temp_x.mul(temp_z_inv);
mont_x
}
#[cfg(test)]
mod tests {
use ed25519::{keypair, signature, verify, exchange};
use curve25519::{curve25519_base, curve25519};
use digest::Digest;
use sha2::{Sha512};
fn do_keypair_case(seed: [u8; 32], expected_secret: [u8; 64], expected_public: [u8; 32]) {
let (actual_secret, actual_public) = keypair(seed.as_ref());
assert_eq!(actual_secret.to_vec(), expected_secret.to_vec());
assert_eq!(actual_public.to_vec(), expected_public.to_vec());
}
#[test]
fn keypair_cases() {
do_keypair_case(
[0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e],
[0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e,
0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4,
0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66],
[0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4,
0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66]);
do_keypair_case(
[0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb,
0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde],
[0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb,
0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde,
0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd,
0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8],
[0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd,
0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8]);
}
#[test]
fn | () {
let seed = [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e];
let (ed_private, ed_public) = keypair(seed.as_ref());
let mut hasher = Sha512::new();
hasher.input(&ed_private[0..32]);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
hash[0] &= 248;
hash[31] &= 127;
hash[31] |= 64;
let cv_public = curve25519_base(&hash);
let edx_ss = exchange(&ed_public, &ed_private);
let cv_ss = curve25519(&hash, &cv_public);
assert_eq!(edx_ss.to_vec(), cv_ss.to_vec());
}
fn do_sign_verify_case(seed: [u8; 32], message: &[u8], expected_signature: [u8; 64]) {
let (secret_key, public_key) = keypair(seed.as_ref());
let mut actual_signature = signature(message, secret_key.as_ref());
assert_eq!(expected_signature.to_vec(), actual_signature.to_vec());
assert!(verify(message, public_key.as_ref(), actual_signature.as_ref()));
for &(index, flip) in [(0, 1), (31, 0x80), (20, 0xff)].iter() {
actual_signature[index] ^= flip;
assert!(!verify(message, public_key.as_ref(), actual_signature.as_ref()));
actual_signature[index] ^= flip;
}
let mut public_key_corrupt = public_key;
public_key_corrupt[0] ^= 1;
assert!(!verify(message, public_key_corrupt.as_ref(), actual_signature.as_ref()));
}
#[test]
fn sign_verify_cases() {
do_sign_verify_case(
[0x2d, 0x20, 0x86, 0x83, 0x2c, 0xc2, 0xfe, 0x3f, 0xd1, 0x8c, 0xb5, 0x1d, 0x6c, 0x5e, 0x99, 0xa5,
0x75, 0x9f, 0x02, 0x21, 0x1f, 0x85, 0xe5, 0xff, 0x2f, 0x90, 0x4a, 0x78, 0x0f, 0x58, 0x00, 0x6f],
[0x89, 0x8f, 0x9c, 0x4b, 0x2c, 0x6e, 0xe9, 0xe2, 0x28, 0x76, 0x1c, 0xa5, 0x08, 0x97, 0xb7, 0x1f,
0xfe, 0xca, 0x1c, 0x35, 0x28, 0x46, 0xf5, 0xfe, 0x13, 0xf7, 0xd3, 0xd5, 0x7e, 0x2c, 0x15, 0xac,
0x60, 0x90, 0x0c, 0xa3, 0x2c, 0x5b, 0x5d, 0xd9, 0x53, 0xc9, 0xa6, 0x81, 0x0a, 0xcc, 0x64, 0x39,
0x4f, 0xfd, 0x14, 0x98, 0x26, 0xd9, 0x98, 0x06, 0x29, 0x2a, 0xdd, 0xd1, 0x3f, 0xc3, 0xbb, 0x7d,
0xac, 0x70, 0x1c, 0x5b, 0x4a, 0x2d, 0x61, 0x5d, 0x15, 0x96, 0x01, 0x28, 0xed, 0x9f, 0x73, 0x6b,
0x98, 0x85, 0x4f, 0x6f, 0x07, 0x05, 0xb0, 0xf0, 0xda, 0xcb, 0xdc, 0x2c, 0x26, 0x2d, 0x27, 0x39,
0x75, 0x19, 0x14, 0x9b, 0x0e, 0x4c, 0xbe, 0x16, 0x77, 0xc5, 0x76, 0xc1, 0x39, 0x7a, 0xae, 0x5c,
0xe3, 0x49, 0x16, 0xe3, 0x51, 0x31, 0x04, 0x63, 0x2e, 0xc2, 0x19, 0x0d, 0xb8, 0xd2, 0x22, 0x89,
0xc3, 0x72, 0x3c, 0x8d, 0x01, 0x21, 0x3c, 0xad, 0x80, 0x3f, 0x4d, 0x75, 0x74, 0xc4, 0xdb, 0xb5,
0x37, 0x31, 0xb0, 0x1c, 0x8e, 0xc7, 0x5d, 0x08, 0x2e, 0xf7, 0xdc, 0x9d, 0x7f, 0x1b, 0x73, 0x15,
0x9f, 0x63, 0xdb, 0x56, 0xaa, 0x12, 0xa2, 0xca, 0x39, 0xea, 0xce, 0x6b, 0x28, 0xe4, 0xc3, 0x1d,
0x9d, 0x25, 0x67, 0x41, 0x45, 0x2e, 0x83, 0x87, 0xe1, 0x53, 0x6d, 0x03, 0x02, 0x6e, 0xe4, 0x84,
0x10, 0xd4, 0x3b, 0x21, 0x91, 0x88, 0xba, 0x14, 0xa8, 0xaf].as_ref(),
[0x91, 0x20, 0x91, 0x66, 0x1e, 0xed, 0x18, 0xa4, 0x03, 0x4b, 0xc7, 0xdb, 0x4b, 0xd6, 0x0f, 0xe2,
0xde, 0xeb, 0xf3, 0xff, 0x3b, 0x6b, 0x99, 0x8d, 0xae, 0x20, 0x94, 0xb6, 0x09, 0x86, 0x5c, 0x20,
0x19, 0xec, 0x67, 0x22, 0xbf, 0xdc, 0x87, 0xbd, 0xa5, 0x40, 0x91, 0x92, 0x2e, 0x11, 0xe3, 0x93,
0xf5, 0xfd, 0xce, 0xea, 0x3e, 0x09, 0x1f, 0x2e, 0xe6, 0xbc, 0x62, 0xdf, 0x94, 0x8e, 0x99, 0x09]
);
do_sign_verify_case(
[0x33, 0x19, 0x17, 0x82, 0xc1, 0x70, 0x4f, 0x60, 0xd0, 0x84, 0x8d, 0x75, 0x62, 0xa2, 0xfa, 0x19,
0xf9, 0x92, 0x4f, 0xea, 0x4e, 0x77, 0x33, 0xcd, 0x45, 0xf6, 0xc3, 0x2f, 0x21, 0x9a, 0x72, 0x91],
[0x77, 0x13, 0x43, 0x5a, 0x0e, 0x34, 0x6f, 0x67, 0x71, 0xae, 0x5a, 0xde, 0xa8, 0x7a, 0xe7, 0xa4,
0x52, 0xc6, 0x5d, 0x74, 0x8f, 0x48, 0x69, 0xd3, 0x1e, 0xd3, 0x67, 0x47, 0xc3, 0x28, 0xdd, 0xc4,
0xec, 0x0e, 0x48, 0x67, 0x93, 0xa5, 0x1c, 0x67, 0x66, 0xf7, 0x06, 0x48, 0x26, 0xd0, 0x74, 0x51,
0x4d, 0xd0, 0x57, 0x41, 0xf3, 0xbe, 0x27, 0x3e, 0xf2, 0x1f, 0x28, 0x0e, 0x49, 0x07, 0xed, 0x89,
0xbe, 0x30, 0x1a, 0x4e, 0xc8, 0x49, 0x6e, 0xb6, 0xab, 0x90, 0x00, 0x06, 0xe5, 0xa3, 0xc8, 0xe9,
0xc9, 0x93, 0x62, 0x1d, 0x6a, 0x3b, 0x0f, 0x6c, 0xba, 0xd0, 0xfd, 0xde, 0xf3, 0xb9, 0xc8, 0x2d].as_ref(),
[0x4b, 0x8d, 0x9b, 0x1e, 0xca, 0x54, 0x00, 0xea, 0xc6, 0xf5, 0xcc, 0x0c, 0x94, 0x39, 0x63, 0x00,
0x52, 0xf7, 0x34, 0xce, 0x45, 0x3e, 0x94, 0x26, 0xf3, 0x19, 0xdd, 0x96, 0x03, 0xb6, 0xae, 0xae,
0xb9, 0xd2, 0x3a, 0x5f, 0x93, 0xf0, 0x6a, 0x46, 0x00, 0x18, 0xf0, 0x69, 0xdf, 0x19, 0x44, 0x48,
0xf5, 0x60, 0x51, 0xab, 0x9e, 0x6b, 0xfa, 0xeb, 0x64, 0x10, 0x16, 0xf7, 0xa9, 0x0b, 0xe2, 0x0c]
);
}
}
| keypair_matches_mont | identifier_name |
ed25519.rs | use digest::Digest;
use sha2::{Sha512};
use curve25519::{GeP2, GeP3, ge_scalarmult_base, sc_reduce, sc_muladd, curve25519, Fe};
use util::{fixed_time_eq};
use std::ops::{Add, Sub, Mul};
pub fn keypair(seed: &[u8]) -> ([u8; 64], [u8; 32]) {
let mut secret: [u8; 64] = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(seed);
hasher.result(&mut hash_output);
hash_output[0] &= 248;
hash_output[31] &= 63;
hash_output[31] |= 64;
hash_output
};
let a = ge_scalarmult_base(&secret[0..32]);
let public_key = a.to_bytes();
for (dest, src) in (&mut secret[32..64]).iter_mut().zip(public_key.iter()) {
*dest = *src;
}
for (dest, src) in (&mut secret[0..32]).iter_mut().zip(seed.iter()) {
*dest = *src;
}
(secret, public_key)
}
pub fn signature(message: &[u8], secret_key: &[u8]) -> [u8; 64] {
let seed = &secret_key[0..32];
let public_key = &secret_key[32..64];
let az: [u8; 64] = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(seed);
hasher.result(&mut hash_output);
hash_output[0] &= 248;
hash_output[31] &= 63;
hash_output[31] |= 64;
hash_output
};
let nonce = {
let mut hash_output: [u8; 64] = [0; 64];
let mut hasher = Sha512::new();
hasher.input(&az[32..64]);
hasher.input(message);
hasher.result(&mut hash_output);
sc_reduce(&mut hash_output[0..64]);
hash_output
};
let mut signature: [u8; 64] = [0; 64];
let r: GeP3 = ge_scalarmult_base(&nonce[0..32]);
for (result_byte, source_byte) in (&mut signature[0..32]).iter_mut().zip(r.to_bytes().iter()) {
*result_byte = *source_byte;
}
for (result_byte, source_byte) in (&mut signature[32..64]).iter_mut().zip(public_key.iter()) {
*result_byte = *source_byte;
}
{
let mut hasher = Sha512::new();
hasher.input(signature.as_ref());
hasher.input(message);
let mut hram: [u8; 64] = [0; 64];
hasher.result(&mut hram);
sc_reduce(&mut hram);
sc_muladd(&mut signature[32..64], &hram[0..32], &az[0..32], &nonce[0..32]);
}
signature
}
fn check_s_lt_l(s: &[u8]) -> bool
{
let l: [u8; 32] =
[ 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x14, 0xde, 0xf9, 0xde, 0xa2, 0xf7, 0x9c, 0xd6,
0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5, 0xd3, 0xed ];
let mut c: u8 = 0;
let mut n: u8 = 1;
let mut i = 31;
loop {
c |= ((((s[i] as i32) - (l[i] as i32)) >> 8) as u8) & n;
n &= (((((s[i] ^ l[i]) as i32)) - 1) >> 8) as u8;
if i == 0 {
break;
} else {
i -= 1;
}
}
c == 0
}
pub fn verify(message: &[u8], public_key: &[u8], signature: &[u8]) -> bool {
if check_s_lt_l(&signature[32..64]) {
return false;
}
let a = match GeP3::from_bytes_negate_vartime(public_key) {
Some(g) => g,
None => { return false; }
};
let mut d = 0;
for pk_byte in public_key.iter() {
d |= *pk_byte;
}
if d == 0 {
return false;
}
let mut hasher = Sha512::new();
hasher.input(&signature[0..32]);
hasher.input(public_key);
hasher.input(message);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
sc_reduce(&mut hash);
let r = GeP2::double_scalarmult_vartime(hash.as_ref(), a, &signature[32..64]);
let rcheck = r.to_bytes();
fixed_time_eq(rcheck.as_ref(), &signature[0..32])
}
pub fn exchange(public_key: &[u8], private_key: &[u8]) -> [u8; 32] {
let ed_y = Fe::from_bytes(&public_key);
// Produce public key in Montgomery form.
let mont_x = edwards_to_montgomery_x(ed_y);
// Produce private key from seed component (bytes 0 to 32)
// of the Ed25519 extended private key (64 bytes).
let mut hasher = Sha512::new();
hasher.input(&private_key[0..32]);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
// Clamp the hash such that it is a valid private key
hash[0] &= 248;
hash[31] &= 127;
hash[31] |= 64;
let shared_mont_x : [u8; 32] = curve25519(&hash, &mont_x.to_bytes()); // priv., pub.
shared_mont_x
}
fn edwards_to_montgomery_x(ed_y: Fe) -> Fe {
let ed_z = Fe([1,0,0,0,0,0,0,0,0,0]);
let temp_x = ed_z.add(ed_y);
let temp_z = ed_z.sub(ed_y);
let temp_z_inv = temp_z.invert();
let mont_x = temp_x.mul(temp_z_inv);
mont_x
}
#[cfg(test)]
mod tests {
use ed25519::{keypair, signature, verify, exchange};
use curve25519::{curve25519_base, curve25519};
use digest::Digest;
use sha2::{Sha512};
fn do_keypair_case(seed: [u8; 32], expected_secret: [u8; 64], expected_public: [u8; 32]) {
let (actual_secret, actual_public) = keypair(seed.as_ref());
assert_eq!(actual_secret.to_vec(), expected_secret.to_vec());
assert_eq!(actual_public.to_vec(), expected_public.to_vec());
}
#[test]
fn keypair_cases() {
do_keypair_case(
[0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e],
[0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e,
0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4,
0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66],
[0x5d, 0x6d, 0x23, 0x6b, 0x52, 0xd1, 0x8e, 0x3a, 0xb6, 0xd6, 0x07, 0x2f, 0xb6, 0xe4, 0xc7, 0xd4,
0x6b, 0xd5, 0x9a, 0xd9, 0xcc, 0x19, 0x47, 0x26, 0x5f, 0x00, 0xb7, 0x20, 0xfa, 0x2c, 0x8f, 0x66]);
do_keypair_case(
[0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb,
0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde],
[0x29, 0x23, 0xbe, 0x84, 0xe1, 0x6c, 0xd6, 0xae, 0x52, 0x90, 0x49, 0xf1, 0xf1, 0xbb, 0xe9, 0xeb,
0xb3, 0xa6, 0xdb, 0x3c, 0x87, 0x0c, 0x3e, 0x99, 0x24, 0x5e, 0x0d, 0x1c, 0x06, 0xb7, 0x47, 0xde,
0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd,
0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8],
[0x5d, 0x83, 0x31, 0x26, 0x56, 0x0c, 0xb1, 0x9a, 0x14, 0x19, 0x37, 0x27, 0x78, 0x96, 0xf0, 0xfd,
0x43, 0x7b, 0xa6, 0x80, 0x1e, 0xb2, 0x10, 0xac, 0x4c, 0x39, 0xd9, 0x00, 0x72, 0xd7, 0x0d, 0xa8]);
}
#[test]
fn keypair_matches_mont() {
let seed = [0x26, 0x27, 0xf6, 0x85, 0x97, 0x15, 0xad, 0x1d, 0xd2, 0x94, 0xdd, 0xc4, 0x76, 0x19, 0x39, 0x31,
0xf1, 0xad, 0xb5, 0x58, 0xf0, 0x93, 0x97, 0x32, 0x19, 0x2b, 0xd1, 0xc0, 0xfd, 0x16, 0x8e, 0x4e];
let (ed_private, ed_public) = keypair(seed.as_ref());
let mut hasher = Sha512::new();
hasher.input(&ed_private[0..32]);
let mut hash: [u8; 64] = [0; 64];
hasher.result(&mut hash);
hash[0] &= 248;
hash[31] &= 127;
hash[31] |= 64;
let cv_public = curve25519_base(&hash);
let edx_ss = exchange(&ed_public, &ed_private);
let cv_ss = curve25519(&hash, &cv_public);
assert_eq!(edx_ss.to_vec(), cv_ss.to_vec());
}
fn do_sign_verify_case(seed: [u8; 32], message: &[u8], expected_signature: [u8; 64]) |
#[test]
fn sign_verify_cases() {
do_sign_verify_case(
[0x2d, 0x20, 0x86, 0x83, 0x2c, 0xc2, 0xfe, 0x3f, 0xd1, 0x8c, 0xb5, 0x1d, 0x6c, 0x5e, 0x99, 0xa5,
0x75, 0x9f, 0x02, 0x21, 0x1f, 0x85, 0xe5, 0xff, 0x2f, 0x90, 0x4a, 0x78, 0x0f, 0x58, 0x00, 0x6f],
[0x89, 0x8f, 0x9c, 0x4b, 0x2c, 0x6e, 0xe9, 0xe2, 0x28, 0x76, 0x1c, 0xa5, 0x08, 0x97, 0xb7, 0x1f,
0xfe, 0xca, 0x1c, 0x35, 0x28, 0x46, 0xf5, 0xfe, 0x13, 0xf7, 0xd3, 0xd5, 0x7e, 0x2c, 0x15, 0xac,
0x60, 0x90, 0x0c, 0xa3, 0x2c, 0x5b, 0x5d, 0xd9, 0x53, 0xc9, 0xa6, 0x81, 0x0a, 0xcc, 0x64, 0x39,
0x4f, 0xfd, 0x14, 0x98, 0x26, 0xd9, 0x98, 0x06, 0x29, 0x2a, 0xdd, 0xd1, 0x3f, 0xc3, 0xbb, 0x7d,
0xac, 0x70, 0x1c, 0x5b, 0x4a, 0x2d, 0x61, 0x5d, 0x15, 0x96, 0x01, 0x28, 0xed, 0x9f, 0x73, 0x6b,
0x98, 0x85, 0x4f, 0x6f, 0x07, 0x05, 0xb0, 0xf0, 0xda, 0xcb, 0xdc, 0x2c, 0x26, 0x2d, 0x27, 0x39,
0x75, 0x19, 0x14, 0x9b, 0x0e, 0x4c, 0xbe, 0x16, 0x77, 0xc5, 0x76, 0xc1, 0x39, 0x7a, 0xae, 0x5c,
0xe3, 0x49, 0x16, 0xe3, 0x51, 0x31, 0x04, 0x63, 0x2e, 0xc2, 0x19, 0x0d, 0xb8, 0xd2, 0x22, 0x89,
0xc3, 0x72, 0x3c, 0x8d, 0x01, 0x21, 0x3c, 0xad, 0x80, 0x3f, 0x4d, 0x75, 0x74, 0xc4, 0xdb, 0xb5,
0x37, 0x31, 0xb0, 0x1c, 0x8e, 0xc7, 0x5d, 0x08, 0x2e, 0xf7, 0xdc, 0x9d, 0x7f, 0x1b, 0x73, 0x15,
0x9f, 0x63, 0xdb, 0x56, 0xaa, 0x12, 0xa2, 0xca, 0x39, 0xea, 0xce, 0x6b, 0x28, 0xe4, 0xc3, 0x1d,
0x9d, 0x25, 0x67, 0x41, 0x45, 0x2e, 0x83, 0x87, 0xe1, 0x53, 0x6d, 0x03, 0x02, 0x6e, 0xe4, 0x84,
0x10, 0xd4, 0x3b, 0x21, 0x91, 0x88, 0xba, 0x14, 0xa8, 0xaf].as_ref(),
[0x91, 0x20, 0x91, 0x66, 0x1e, 0xed, 0x18, 0xa4, 0x03, 0x4b, 0xc7, 0xdb, 0x4b, 0xd6, 0x0f, 0xe2,
0xde, 0xeb, 0xf3, 0xff, 0x3b, 0x6b, 0x99, 0x8d, 0xae, 0x20, 0x94, 0xb6, 0x09, 0x86, 0x5c, 0x20,
0x19, 0xec, 0x67, 0x22, 0xbf, 0xdc, 0x87, 0xbd, 0xa5, 0x40, 0x91, 0x92, 0x2e, 0x11, 0xe3, 0x93,
0xf5, 0xfd, 0xce, 0xea, 0x3e, 0x09, 0x1f, 0x2e, 0xe6, 0xbc, 0x62, 0xdf, 0x94, 0x8e, 0x99, 0x09]
);
do_sign_verify_case(
[0x33, 0x19, 0x17, 0x82, 0xc1, 0x70, 0x4f, 0x60, 0xd0, 0x84, 0x8d, 0x75, 0x62, 0xa2, 0xfa, 0x19,
0xf9, 0x92, 0x4f, 0xea, 0x4e, 0x77, 0x33, 0xcd, 0x45, 0xf6, 0xc3, 0x2f, 0x21, 0x9a, 0x72, 0x91],
[0x77, 0x13, 0x43, 0x5a, 0x0e, 0x34, 0x6f, 0x67, 0x71, 0xae, 0x5a, 0xde, 0xa8, 0x7a, 0xe7, 0xa4,
0x52, 0xc6, 0x5d, 0x74, 0x8f, 0x48, 0x69, 0xd3, 0x1e, 0xd3, 0x67, 0x47, 0xc3, 0x28, 0xdd, 0xc4,
0xec, 0x0e, 0x48, 0x67, 0x93, 0xa5, 0x1c, 0x67, 0x66, 0xf7, 0x06, 0x48, 0x26, 0xd0, 0x74, 0x51,
0x4d, 0xd0, 0x57, 0x41, 0xf3, 0xbe, 0x27, 0x3e, 0xf2, 0x1f, 0x28, 0x0e, 0x49, 0x07, 0xed, 0x89,
0xbe, 0x30, 0x1a, 0x4e, 0xc8, 0x49, 0x6e, 0xb6, 0xab, 0x90, 0x00, 0x06, 0xe5, 0xa3, 0xc8, 0xe9,
0xc9, 0x93, 0x62, 0x1d, 0x6a, 0x3b, 0x0f, 0x6c, 0xba, 0xd0, 0xfd, 0xde, 0xf3, 0xb9, 0xc8, 0x2d].as_ref(),
[0x4b, 0x8d, 0x9b, 0x1e, 0xca, 0x54, 0x00, 0xea, 0xc6, 0xf5, 0xcc, 0x0c, 0x94, 0x39, 0x63, 0x00,
0x52, 0xf7, 0x34, 0xce, 0x45, 0x3e, 0x94, 0x26, 0xf3, 0x19, 0xdd, 0x96, 0x03, 0xb6, 0xae, 0xae,
0xb9, 0xd2, 0x3a, 0x5f, 0x93, 0xf0, 0x6a, 0x46, 0x00, 0x18, 0xf0, 0x69, 0xdf, 0x19, 0x44, 0x48,
0xf5, 0x60, 0x51, 0xab, 0x9e, 0x6b, 0xfa, 0xeb, 0x64, 0x10, 0x16, 0xf7, 0xa9, 0x0b, 0xe2, 0x0c]
);
}
}
| {
let (secret_key, public_key) = keypair(seed.as_ref());
let mut actual_signature = signature(message, secret_key.as_ref());
assert_eq!(expected_signature.to_vec(), actual_signature.to_vec());
assert!(verify(message, public_key.as_ref(), actual_signature.as_ref()));
for &(index, flip) in [(0, 1), (31, 0x80), (20, 0xff)].iter() {
actual_signature[index] ^= flip;
assert!(!verify(message, public_key.as_ref(), actual_signature.as_ref()));
actual_signature[index] ^= flip;
}
let mut public_key_corrupt = public_key;
public_key_corrupt[0] ^= 1;
assert!(!verify(message, public_key_corrupt.as_ref(), actual_signature.as_ref()));
} | identifier_body |
index.d.ts | import {CamelCase, PascalCase} from 'type-fest';
// eslint-disable-next-line @typescript-eslint/ban-types
type EmptyTuple = [];
/**
Return a default type if input type is nil.
@template T - Input type.
@template U - Default type.
*/
type WithDefault<T, U extends T> = T extends undefined | void | null ? U : T;
/**
Check if an element is included in a tuple.
TODO: Remove this once https://github.com/sindresorhus/type-fest/pull/217 is merged.
*/
type IsInclude<List extends readonly unknown[], Target> = List extends undefined
? false | ? First extends Target
? true
: IsInclude<Rest, Target>
: boolean;
/**
Append a segment to dot-notation path.
*/
type AppendPath<S extends string, Last extends string> = S extends ''
? Last
: `${S}.${Last}`;
/**
Convert keys of an object to camelcase strings.
*/
type CamelCaseKeys<
T extends Record<string, any> | readonly any[],
Deep extends boolean,
IsPascalCase extends boolean,
Exclude extends readonly unknown[],
StopPaths extends readonly string[],
Path extends string = ''
> = T extends readonly any[]
// Handle arrays or tuples.
? {
[P in keyof T]: CamelCaseKeys<
T[P],
Deep,
IsPascalCase,
Exclude,
StopPaths
>;
}
: T extends Record<string, any>
// Handle objects.
? {
[P in keyof T & string as [IsInclude<Exclude, P>] extends [true]
? P
: [IsPascalCase] extends [true]
? PascalCase<P>
: CamelCase<P>]: [IsInclude<StopPaths, AppendPath<Path, P>>] extends [
true
]
? T[P]
: [Deep] extends [true]
? CamelCaseKeys<
T[P],
Deep,
IsPascalCase,
Exclude,
StopPaths,
AppendPath<Path, P>
>
: T[P];
}
// Return anything else as-is.
: T;
declare namespace camelcaseKeys {
interface Options {
/**
Recurse nested objects and objects in arrays.
@default false
*/
readonly deep?: boolean;
/**
Exclude keys from being camel-cased.
If this option can be statically determined, it's recommended to add `as const` to it.
@default []
*/
readonly exclude?: ReadonlyArray<string | RegExp>;
/**
Exclude children at the given object paths in dot-notation from being camel-cased. For example, with an object like `{a: {b: '🦄'}}`, the object path to reach the unicorn is `'a.b'`.
If this option can be statically determined, it's recommended to add `as const` to it.
@default []
@example
```
camelcaseKeys({
a_b: 1,
a_c: {
c_d: 1,
c_e: {
e_f: 1
}
}
}, {
deep: true,
stopPaths: [
'a_c.c_e'
]
}),
// {
// aB: 1,
// aC: {
// cD: 1,
// cE: {
// e_f: 1
// }
// }
// }
```
*/
readonly stopPaths?: readonly string[];
/**
Uppercase the first character as in `bye-bye` → `ByeBye`.
@default false
*/
readonly pascalCase?: boolean;
}
}
/**
Convert object keys to camel case using [`camelcase`](https://github.com/sindresorhus/camelcase).
@param input - Object or array of objects to camel-case.
@example
```
import camelcaseKeys = require('camelcase-keys');
// Convert an object
camelcaseKeys({'foo-bar': true});
//=> {fooBar: true}
// Convert an array of objects
camelcaseKeys([{'foo-bar': true}, {'bar-foo': false}]);
//=> [{fooBar: true}, {barFoo: false}]
camelcaseKeys({'foo-bar': true, nested: {unicorn_rainbow: true}}, {deep: true});
//=> {fooBar: true, nested: {unicornRainbow: true}}
// Convert object keys to pascal case
camelcaseKeys({'foo-bar': true, nested: {unicorn_rainbow: true}}, {deep: true, pascalCase: true});
//=> {FooBar: true, Nested: {UnicornRainbow: true}}
import minimist = require('minimist');
const argv = minimist(process.argv.slice(2));
//=> {_: [], 'foo-bar': true}
camelcaseKeys(argv);
//=> {_: [], fooBar: true}
```
*/
declare function camelcaseKeys<
T extends Record<string, any> | readonly any[],
Options extends camelcaseKeys.Options = camelcaseKeys.Options
>(
input: T,
options?: Options
): CamelCaseKeys<
T,
WithDefault<Options['deep'], false>,
WithDefault<Options['pascalCase'], false>,
WithDefault<Options['exclude'], EmptyTuple>,
WithDefault<Options['stopPaths'], EmptyTuple>
>;
export = camelcaseKeys; | : List extends Readonly<EmptyTuple>
? false
: List extends readonly [infer First, ...infer Rest] | random_line_split |
test_class.py | import pytest
from eos_data_distribution import DirTools
from gi.repository import GLib
ITER_COUNT = 10
class TestClass:
@pytest.mark.timeout(timeout=3, method='thread') | self.__called = 0
def cb_changed(M, p, m, f, o, evt, d=None, e=None):
print('signal', e, p, f, o, evt, d)
assert e == 'created'
self.__called += 1
d = tmpdir.mkdir("ndn")
m = DirTools.Monitor(str(d))
[m.connect(s, cb_changed, s) for s in ['created']]
[d.mkdir(str(i)) for i in range(ITER_COUNT)]
GLib.timeout_add_seconds(2, lambda: loop.quit())
loop.run()
assert self.__called == ITER_COUNT | def test_0(self, tmpdir):
loop = GLib.MainLoop() | random_line_split |
test_class.py | import pytest
from eos_data_distribution import DirTools
from gi.repository import GLib
ITER_COUNT = 10
class TestClass:
@pytest.mark.timeout(timeout=3, method='thread')
def test_0(self, tmpdir):
| loop = GLib.MainLoop()
self.__called = 0
def cb_changed(M, p, m, f, o, evt, d=None, e=None):
print('signal', e, p, f, o, evt, d)
assert e == 'created'
self.__called += 1
d = tmpdir.mkdir("ndn")
m = DirTools.Monitor(str(d))
[m.connect(s, cb_changed, s) for s in ['created']]
[d.mkdir(str(i)) for i in range(ITER_COUNT)]
GLib.timeout_add_seconds(2, lambda: loop.quit())
loop.run()
assert self.__called == ITER_COUNT | identifier_body | |
test_class.py | import pytest
from eos_data_distribution import DirTools
from gi.repository import GLib
ITER_COUNT = 10
class TestClass:
@pytest.mark.timeout(timeout=3, method='thread')
def | (self, tmpdir):
loop = GLib.MainLoop()
self.__called = 0
def cb_changed(M, p, m, f, o, evt, d=None, e=None):
print('signal', e, p, f, o, evt, d)
assert e == 'created'
self.__called += 1
d = tmpdir.mkdir("ndn")
m = DirTools.Monitor(str(d))
[m.connect(s, cb_changed, s) for s in ['created']]
[d.mkdir(str(i)) for i in range(ITER_COUNT)]
GLib.timeout_add_seconds(2, lambda: loop.quit())
loop.run()
assert self.__called == ITER_COUNT
| test_0 | identifier_name |
condition-list.component.spec.ts | // Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// tslint:disable:no-any
import {NO_ERRORS_SCHEMA} from '@angular/core';
import {ComponentFixture, TestBed, waitForAsync} from '@angular/core/testing';
import {MatSnackBar} from '@angular/material/snack-bar';
import {set} from 'lodash';
import {of} from 'rxjs';
import {createTestTravelQuestionnaireResponse} from '../../../test/travel-plan';
import {createTestScheduler} from '../../../test/util';
import {PredictionService} from '../../prediction.service';
import {TravelPlan} from '../travel-plan';
| import {ConditionListComponent, RiskProbability} from './condition-list.component';
describe('ConditionListComponent', () => {
const RISK_PROBABILITY_PATH = 'prediction[0].qualitativeRisk.coding[0].code';
let component: ConditionListComponent;
let fixture: ComponentFixture<ConditionListComponent>;
const scheduler = createTestScheduler();
const predictionServiceSpy = {
predictions$: of([] as fhir.RiskAssessment[]),
};
const snackbarSpy =
jasmine.createSpyObj<MatSnackBar>('MatSnackBar', ['open']);
beforeEach(waitForAsync(() => {
TestBed
.configureTestingModule({
schemas: [NO_ERRORS_SCHEMA],
declarations: [ConditionListComponent],
providers: [
{provide: PredictionService, useValue: predictionServiceSpy},
{provide: MatSnackBar, useValue: snackbarSpy},
],
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(ConditionListComponent);
component = fixture.componentInstance;
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should find the relevant risk assessments', () => {
const fhirTravelPlan = createTestTravelQuestionnaireResponse();
fhirTravelPlan.id = '1';
component.travelPlan = new TravelPlan(fhirTravelPlan);
const riskAssessment1: fhir.RiskAssessment = {
id: 'RA1',
basis: [
{reference: `QuestionnaireResponse/${fhirTravelPlan.id}`},
],
prediction: [{
outcome: {coding: [{display: 'Yellow fever'}]},
qualitativeRisk: {
coding:
[{code: 'low', system: 'http://hl7.org/fhir/risk-probability'}]
}
}]
} as any;
// Risk assessment for another trip
const riskAssessment2: fhir.RiskAssessment = {
id: 'RA2',
basis: [
{reference: 'QuestionnaireResponse/2'},
],
prediction: [{
outcome: {coding: [{display: 'Yellow fever'}]},
qualitativeRisk: {
coding:
[{code: 'high', system: 'http://hl7.org/fhir/risk-probability'}]
}
}]
} as any;
scheduler.run(({cold, expectObservable}) => {
const predictionService = TestBed.get(PredictionService);
predictionService.predictions$ =
cold('-r|', {r: [riskAssessment1, riskAssessment2]});
component.ngOnInit();
expectObservable(component.conditionAssessments$)
.toBe('-l|', {e: [], l: [riskAssessment1]});
});
});
it('should have the right icon classes', () => {
const a: fhir.RiskAssessment = {} as any;
set(a, RISK_PROBABILITY_PATH, RiskProbability.Certain);
expect(component.getIcon(a)).toEqual('report');
set(a, RISK_PROBABILITY_PATH, RiskProbability.Moderate);
expect(component.getIcon(a)).toEqual('warning');
set(a, RISK_PROBABILITY_PATH, RiskProbability.Low);
expect(component.getIcon(a)).toEqual('info');
});
it('should have the right icon colors', () => {
const a: fhir.RiskAssessment = {} as any;
set(a, RISK_PROBABILITY_PATH, RiskProbability.Certain);
expect(component.getIconColor(a)).toEqual('red');
set(a, RISK_PROBABILITY_PATH, RiskProbability.Moderate);
expect(component.getIconColor(a)).toEqual('yellow');
set(a, RISK_PROBABILITY_PATH, RiskProbability.Low);
expect(component.getIconColor(a)).toEqual('blue');
});
it('should display the correct severity', () => {
const a: fhir.RiskAssessment = {} as any;
set(a, RISK_PROBABILITY_PATH, RiskProbability.Certain);
expect(component.showOutcomeSeverity(a)).toEqual('High');
set(a, RISK_PROBABILITY_PATH, RiskProbability.Moderate);
expect(component.showOutcomeSeverity(a)).toEqual('Moderate');
set(a, RISK_PROBABILITY_PATH, RiskProbability.Low);
expect(component.showOutcomeSeverity(a)).toEqual('Low');
});
it('should retrieve the predicted condition', () => {
const a: fhir.RiskAssessment = {
prediction: [
{
outcome: {
coding: [{display: 'Malaria'}],
},
},
],
} as any;
expect(component.getPredictedCondition(a)).toEqual('Malaria');
});
}); | random_line_split | |
addServer.py | #!/usr/bin/python
#-*-encoding:utf-8-*-
#author: asher
#date: 20160429 on train D909
# this scripts useed for add server ip to webvirtmgr
# if not , each server must add by website,it's too slow, and very not interesting.
# use this , it's make you feel very happy
import sqlite3
try:
conn = sqlite3.connect('../webvirtmgr.sqlite3')
cur = conn.cursor()
print "Input the server ip address like:"
ips = raw_input("Ips 172.23.32:").strip() | while True:
if ips1 <= ips2:
ips1 = str(ips1)
newip = ips + "." + ips1
jifang1 = jifang + "_" + newip
print "Add %s into database\n" % jifang1
cur.execute('''insert into servers_compute (name,hostname,login,password,type) values('%s','%s','%s','%s','%d')''' % (jifang1,newip,login,password,1))
ips1 = int(ips1)
ips1 += 1
conn.commit()
else:
break
finally:
allservers = cur.execute("select id,name,hostname,login,type from servers_compute").fetchall()
for i in allservers:
print i
conn.close() | ips1 = int(raw_input("Input start last ip num: 1:>").strip())
ips2 = int(raw_input("Input end ip num: 100:>").strip())
jifang = str(raw_input("DataCenter like:jxq:>").strip())
login = str(raw_input("User:admin or others:>").strip())
password = str(raw_input("Password:>").strip()) | random_line_split |
addServer.py | #!/usr/bin/python
#-*-encoding:utf-8-*-
#author: asher
#date: 20160429 on train D909
# this scripts useed for add server ip to webvirtmgr
# if not , each server must add by website,it's too slow, and very not interesting.
# use this , it's make you feel very happy
import sqlite3
try:
conn = sqlite3.connect('../webvirtmgr.sqlite3')
cur = conn.cursor()
print "Input the server ip address like:"
ips = raw_input("Ips 172.23.32:").strip()
ips1 = int(raw_input("Input start last ip num: 1:>").strip())
ips2 = int(raw_input("Input end ip num: 100:>").strip())
jifang = str(raw_input("DataCenter like:jxq:>").strip())
login = str(raw_input("User:admin or others:>").strip())
password = str(raw_input("Password:>").strip())
while True:
|
finally:
allservers = cur.execute("select id,name,hostname,login,type from servers_compute").fetchall()
for i in allservers:
print i
conn.close()
| if ips1 <= ips2:
ips1 = str(ips1)
newip = ips + "." + ips1
jifang1 = jifang + "_" + newip
print "Add %s into database\n" % jifang1
cur.execute('''insert into servers_compute (name,hostname,login,password,type) values('%s','%s','%s','%s','%d')''' % (jifang1,newip,login,password,1))
ips1 = int(ips1)
ips1 += 1
conn.commit()
else:
break | conditional_block |
puppet_client.py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from docker.errors import DockerException, NotFound
from oslo_log import log as logging
from oslo_config import cfg
from docker import Client as DC
from validator.common.exception import CookbookSyntaxException, \
CookbookDeploymentException, \
CookbookInstallException, \
DockerContainerException
from validator.common.i18n import _LW, _LE, _, _LI
LOG = logging.getLogger(__name__)
opts = [
cfg.StrOpt('url'),
cfg.StrOpt('image'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group="clients_docker")
class PuppetClient(object):
"""
Wrapper for Docker client
"""
def __init__(self, url=CONF.clients_docker.url):
self._url = url
self.container = None
try:
self.dc = DC(base_url=self._url)
except DockerException as e:
LOG.error(_LE("Docker client error: %s") % e)
raise e
def cookbook_deployment_test(self, cookbook, image=CONF.clients_docker.image):
"""
Try to process a cookbook and return results
:param cookbook: cookbook to deploy
:param image: image to deploy to
:return: dictionary with results
"""
LOG.debug("Sending cookbook to docker server in %s" % self._url)
b_success = True
msg = {}
self.run_container(image)
# inject custom solo.json/solo.rb file
json_cont = CONF.clients_puppet.cmd_config % cookbook
cmd_inject = CONF.clients_puppet.cmd_inject.format(json_cont)
self.execute_command(cmd_inject)
msg['install'] = self.run_install(cookbook)
b_success &= msg['install']['success']
msg['test'] = self.run_test(cookbook)
b_success &= msg['test']['success']
msg['deploy'] = self.run_deploy(cookbook)
b_success &= msg['deploy']['success']
# check execution output
if b_success:
msg['result'] = {
'success': True,
'result': "Cookbook %s successfully deployed\n" % cookbook
}
else:
msg['result'] = {
'success': False,
'result': "Error deploying cookbook {}\n".format(cookbook)
}
LOG.error(_LW(msg))
self.remove_container()
return msg
def run_deploy(self, cookbook):
""" Run cookbook deployment
:param cookbook: cookbook to deploy
:return msg: dictionary with results and state
"""
try:
# launch execution
cmd_launch = CONF.clients_puppet.cmd_launch
resp_launch = self.execute_command(cmd_launch)
msg = {
'success': True,
'response': resp_launch
}
LOG.debug(_("Launch result: %s") % resp_launch)
if resp_launch is None or "FATAL" in resp_launch:
msg['success'] = False
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook deployment exception %s" % e))
raise CookbookDeploymentException(cookbook=cookbook)
return msg
def run_test(self, cookbook):
""" Test cookbook syntax
:param cookbook: cookbook to test
:return msg: dictionary with results and state
"""
try:
cmd_test = CONF.clients_puppet.cmd_test.format(cookbook)
resp_test = self.execute_command(cmd_test)
msg = {
'success': True,
'response': resp_test
}
for line in resp_test.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Test result: %s") % resp_test)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook syntax exception %s" % e))
raise CookbookSyntaxException(cookbook=cookbook)
return msg
def run_install(self, cookbook):
"""Run download and install command
:param cookbook: cookbook to process
:return msg: operation result
"""
try:
cmd_install = CONF.clients_puppet.cmd_install.format(cookbook)
resp_install = self.execute_command(cmd_install)
msg = {
'success': True,
'response': resp_install
}
for line in resp_install.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Install result: %s") % resp_install)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Chef install exception: %s" % e))
raise CookbookInstallException(cookbook=cookbook)
return msg
def run_container(self, image):
"""Run and start a container based on the given image
:param image: image to run
:return:
"""
contname = "{}-validate".format(image).replace("/", "_")
try:
try:
self.dc.remove_container(contname, force=True)
LOG.info(_LI('Removing old %s container' % contname))
except NotFound:
pass
self.container = self.dc.create_container(
image,
tty=True,
name=contname
).get('Id')
self.dc.start(container=self.container)
except AttributeError as e:
LOG.error(_LW("Error creating container: %s" % e))
raise DockerContainerException(image=image)
def remove_container(self, kill=True):
"""destroy container on exit
:param kill: inhibits removal for testing purposes
"""
self.dc.stop(self.container)
if kill:
|
def execute_command(self, command):
""" Execute a command in the given container
:param command: bash command to run
:return: execution result
"""
bash_txt = "/bin/bash -c \"{}\"".format(command.replace('"', '\\"'))
exec_txt = self.dc.exec_create(
container=self.container,
cmd=bash_txt
)
return self.dc.exec_start(exec_txt)
| self.dc.remove_container(self.container) | conditional_block |
puppet_client.py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from docker.errors import DockerException, NotFound
from oslo_log import log as logging
from oslo_config import cfg
from docker import Client as DC
from validator.common.exception import CookbookSyntaxException, \
CookbookDeploymentException, \
CookbookInstallException, \
DockerContainerException
from validator.common.i18n import _LW, _LE, _, _LI
LOG = logging.getLogger(__name__)
opts = [
cfg.StrOpt('url'),
cfg.StrOpt('image'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group="clients_docker")
class PuppetClient(object):
"""
Wrapper for Docker client
"""
def __init__(self, url=CONF.clients_docker.url):
self._url = url
self.container = None
try:
self.dc = DC(base_url=self._url)
except DockerException as e:
LOG.error(_LE("Docker client error: %s") % e)
raise e
def cookbook_deployment_test(self, cookbook, image=CONF.clients_docker.image):
"""
Try to process a cookbook and return results
:param cookbook: cookbook to deploy
:param image: image to deploy to
:return: dictionary with results
"""
LOG.debug("Sending cookbook to docker server in %s" % self._url)
b_success = True
msg = {}
self.run_container(image)
# inject custom solo.json/solo.rb file
json_cont = CONF.clients_puppet.cmd_config % cookbook
cmd_inject = CONF.clients_puppet.cmd_inject.format(json_cont)
self.execute_command(cmd_inject)
msg['install'] = self.run_install(cookbook)
b_success &= msg['install']['success']
msg['test'] = self.run_test(cookbook)
b_success &= msg['test']['success']
msg['deploy'] = self.run_deploy(cookbook)
b_success &= msg['deploy']['success']
# check execution output
if b_success:
msg['result'] = {
'success': True,
'result': "Cookbook %s successfully deployed\n" % cookbook
}
else:
msg['result'] = {
'success': False,
'result': "Error deploying cookbook {}\n".format(cookbook)
}
LOG.error(_LW(msg))
self.remove_container()
return msg
def run_deploy(self, cookbook):
""" Run cookbook deployment
:param cookbook: cookbook to deploy
:return msg: dictionary with results and state
"""
try:
# launch execution
cmd_launch = CONF.clients_puppet.cmd_launch
resp_launch = self.execute_command(cmd_launch)
msg = {
'success': True,
'response': resp_launch
}
LOG.debug(_("Launch result: %s") % resp_launch)
if resp_launch is None or "FATAL" in resp_launch:
msg['success'] = False
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook deployment exception %s" % e))
raise CookbookDeploymentException(cookbook=cookbook)
return msg
def run_test(self, cookbook):
""" Test cookbook syntax
:param cookbook: cookbook to test
:return msg: dictionary with results and state
"""
try:
cmd_test = CONF.clients_puppet.cmd_test.format(cookbook)
resp_test = self.execute_command(cmd_test)
msg = {
'success': True,
'response': resp_test
}
for line in resp_test.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Test result: %s") % resp_test)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook syntax exception %s" % e))
raise CookbookSyntaxException(cookbook=cookbook)
return msg
def run_install(self, cookbook):
"""Run download and install command
:param cookbook: cookbook to process
:return msg: operation result
"""
try:
cmd_install = CONF.clients_puppet.cmd_install.format(cookbook)
resp_install = self.execute_command(cmd_install)
msg = {
'success': True,
'response': resp_install
}
for line in resp_install.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Install result: %s") % resp_install)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Chef install exception: %s" % e))
raise CookbookInstallException(cookbook=cookbook)
return msg
def run_container(self, image):
|
def remove_container(self, kill=True):
"""destroy container on exit
:param kill: inhibits removal for testing purposes
"""
self.dc.stop(self.container)
if kill:
self.dc.remove_container(self.container)
def execute_command(self, command):
""" Execute a command in the given container
:param command: bash command to run
:return: execution result
"""
bash_txt = "/bin/bash -c \"{}\"".format(command.replace('"', '\\"'))
exec_txt = self.dc.exec_create(
container=self.container,
cmd=bash_txt
)
return self.dc.exec_start(exec_txt)
| """Run and start a container based on the given image
:param image: image to run
:return:
"""
contname = "{}-validate".format(image).replace("/", "_")
try:
try:
self.dc.remove_container(contname, force=True)
LOG.info(_LI('Removing old %s container' % contname))
except NotFound:
pass
self.container = self.dc.create_container(
image,
tty=True,
name=contname
).get('Id')
self.dc.start(container=self.container)
except AttributeError as e:
LOG.error(_LW("Error creating container: %s" % e))
raise DockerContainerException(image=image) | identifier_body |
puppet_client.py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from docker.errors import DockerException, NotFound
from oslo_log import log as logging
from oslo_config import cfg
from docker import Client as DC
from validator.common.exception import CookbookSyntaxException, \
CookbookDeploymentException, \
CookbookInstallException, \
DockerContainerException
from validator.common.i18n import _LW, _LE, _, _LI
LOG = logging.getLogger(__name__)
opts = [
cfg.StrOpt('url'),
cfg.StrOpt('image'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group="clients_docker")
class PuppetClient(object):
"""
Wrapper for Docker client
"""
def __init__(self, url=CONF.clients_docker.url):
self._url = url
self.container = None
try:
self.dc = DC(base_url=self._url)
except DockerException as e:
LOG.error(_LE("Docker client error: %s") % e)
raise e
def cookbook_deployment_test(self, cookbook, image=CONF.clients_docker.image):
"""
Try to process a cookbook and return results
:param cookbook: cookbook to deploy
:param image: image to deploy to
:return: dictionary with results
"""
LOG.debug("Sending cookbook to docker server in %s" % self._url)
b_success = True
msg = {}
self.run_container(image)
# inject custom solo.json/solo.rb file
json_cont = CONF.clients_puppet.cmd_config % cookbook
cmd_inject = CONF.clients_puppet.cmd_inject.format(json_cont)
self.execute_command(cmd_inject)
msg['install'] = self.run_install(cookbook)
b_success &= msg['install']['success']
msg['test'] = self.run_test(cookbook)
b_success &= msg['test']['success']
msg['deploy'] = self.run_deploy(cookbook)
b_success &= msg['deploy']['success']
# check execution output
if b_success:
msg['result'] = {
'success': True,
'result': "Cookbook %s successfully deployed\n" % cookbook
}
else:
msg['result'] = {
'success': False,
'result': "Error deploying cookbook {}\n".format(cookbook)
}
LOG.error(_LW(msg))
self.remove_container()
return msg
def run_deploy(self, cookbook):
""" Run cookbook deployment
:param cookbook: cookbook to deploy
:return msg: dictionary with results and state
"""
try:
# launch execution
cmd_launch = CONF.clients_puppet.cmd_launch
resp_launch = self.execute_command(cmd_launch)
msg = {
'success': True,
'response': resp_launch
}
LOG.debug(_("Launch result: %s") % resp_launch)
if resp_launch is None or "FATAL" in resp_launch:
msg['success'] = False
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook deployment exception %s" % e))
raise CookbookDeploymentException(cookbook=cookbook)
return msg
def run_test(self, cookbook):
""" Test cookbook syntax
:param cookbook: cookbook to test
:return msg: dictionary with results and state
"""
try:
cmd_test = CONF.clients_puppet.cmd_test.format(cookbook)
resp_test = self.execute_command(cmd_test)
msg = {
'success': True,
'response': resp_test
}
for line in resp_test.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Test result: %s") % resp_test)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook syntax exception %s" % e))
raise CookbookSyntaxException(cookbook=cookbook)
return msg
def run_install(self, cookbook):
"""Run download and install command
:param cookbook: cookbook to process
:return msg: operation result
"""
try:
cmd_install = CONF.clients_puppet.cmd_install.format(cookbook)
resp_install = self.execute_command(cmd_install)
msg = {
'success': True,
'response': resp_install
}
for line in resp_install.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Install result: %s") % resp_install)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Chef install exception: %s" % e))
raise CookbookInstallException(cookbook=cookbook)
return msg
def run_container(self, image):
"""Run and start a container based on the given image
:param image: image to run
:return:
"""
contname = "{}-validate".format(image).replace("/", "_")
try:
try:
self.dc.remove_container(contname, force=True)
LOG.info(_LI('Removing old %s container' % contname))
except NotFound:
pass
self.container = self.dc.create_container(
image,
tty=True,
name=contname
).get('Id')
self.dc.start(container=self.container)
except AttributeError as e:
LOG.error(_LW("Error creating container: %s" % e))
raise DockerContainerException(image=image)
def | (self, kill=True):
"""destroy container on exit
:param kill: inhibits removal for testing purposes
"""
self.dc.stop(self.container)
if kill:
self.dc.remove_container(self.container)
def execute_command(self, command):
""" Execute a command in the given container
:param command: bash command to run
:return: execution result
"""
bash_txt = "/bin/bash -c \"{}\"".format(command.replace('"', '\\"'))
exec_txt = self.dc.exec_create(
container=self.container,
cmd=bash_txt
)
return self.dc.exec_start(exec_txt)
| remove_container | identifier_name |
puppet_client.py | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from docker.errors import DockerException, NotFound
from oslo_log import log as logging
from oslo_config import cfg
from docker import Client as DC
from validator.common.exception import CookbookSyntaxException, \
CookbookDeploymentException, \
CookbookInstallException, \
DockerContainerException
from validator.common.i18n import _LW, _LE, _, _LI
LOG = logging.getLogger(__name__)
opts = [
cfg.StrOpt('url'),
cfg.StrOpt('image'),
]
CONF = cfg.CONF
CONF.register_opts(opts, group="clients_docker")
class PuppetClient(object):
"""
Wrapper for Docker client
"""
def __init__(self, url=CONF.clients_docker.url):
self._url = url
self.container = None
try:
self.dc = DC(base_url=self._url)
except DockerException as e:
LOG.error(_LE("Docker client error: %s") % e)
raise e
def cookbook_deployment_test(self, cookbook, image=CONF.clients_docker.image):
"""
Try to process a cookbook and return results
:param cookbook: cookbook to deploy
:param image: image to deploy to
:return: dictionary with results
"""
LOG.debug("Sending cookbook to docker server in %s" % self._url)
b_success = True
msg = {}
self.run_container(image)
# inject custom solo.json/solo.rb file
json_cont = CONF.clients_puppet.cmd_config % cookbook
cmd_inject = CONF.clients_puppet.cmd_inject.format(json_cont)
self.execute_command(cmd_inject)
msg['install'] = self.run_install(cookbook)
b_success &= msg['install']['success']
msg['test'] = self.run_test(cookbook)
b_success &= msg['test']['success']
msg['deploy'] = self.run_deploy(cookbook)
b_success &= msg['deploy']['success']
# check execution output
if b_success:
msg['result'] = {
'success': True,
'result': "Cookbook %s successfully deployed\n" % cookbook
}
else:
msg['result'] = {
'success': False,
'result': "Error deploying cookbook {}\n".format(cookbook)
}
LOG.error(_LW(msg))
self.remove_container()
return msg
def run_deploy(self, cookbook):
""" Run cookbook deployment
:param cookbook: cookbook to deploy
:return msg: dictionary with results and state
"""
try:
# launch execution
cmd_launch = CONF.clients_puppet.cmd_launch
resp_launch = self.execute_command(cmd_launch)
msg = {
'success': True,
'response': resp_launch
}
LOG.debug(_("Launch result: %s") % resp_launch)
if resp_launch is None or "FATAL" in resp_launch:
msg['success'] = False
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook deployment exception %s" % e))
raise CookbookDeploymentException(cookbook=cookbook)
return msg
def run_test(self, cookbook):
""" Test cookbook syntax
:param cookbook: cookbook to test
:return msg: dictionary with results and state
""" | 'success': True,
'response': resp_test
}
for line in resp_test.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Test result: %s") % resp_test)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Cookbook syntax exception %s" % e))
raise CookbookSyntaxException(cookbook=cookbook)
return msg
def run_install(self, cookbook):
"""Run download and install command
:param cookbook: cookbook to process
:return msg: operation result
"""
try:
cmd_install = CONF.clients_puppet.cmd_install.format(cookbook)
resp_install = self.execute_command(cmd_install)
msg = {
'success': True,
'response': resp_install
}
for line in resp_install.splitlines():
if "ERROR" in line:
msg['success'] = False
LOG.debug(_("Install result: %s") % resp_install)
except Exception as e:
self.remove_container(self.container)
LOG.error(_LW("Chef install exception: %s" % e))
raise CookbookInstallException(cookbook=cookbook)
return msg
def run_container(self, image):
"""Run and start a container based on the given image
:param image: image to run
:return:
"""
contname = "{}-validate".format(image).replace("/", "_")
try:
try:
self.dc.remove_container(contname, force=True)
LOG.info(_LI('Removing old %s container' % contname))
except NotFound:
pass
self.container = self.dc.create_container(
image,
tty=True,
name=contname
).get('Id')
self.dc.start(container=self.container)
except AttributeError as e:
LOG.error(_LW("Error creating container: %s" % e))
raise DockerContainerException(image=image)
def remove_container(self, kill=True):
"""destroy container on exit
:param kill: inhibits removal for testing purposes
"""
self.dc.stop(self.container)
if kill:
self.dc.remove_container(self.container)
def execute_command(self, command):
""" Execute a command in the given container
:param command: bash command to run
:return: execution result
"""
bash_txt = "/bin/bash -c \"{}\"".format(command.replace('"', '\\"'))
exec_txt = self.dc.exec_create(
container=self.container,
cmd=bash_txt
)
return self.dc.exec_start(exec_txt) | try:
cmd_test = CONF.clients_puppet.cmd_test.format(cookbook)
resp_test = self.execute_command(cmd_test)
msg = { | random_line_split |
magenta.rs | extern crate drm;
use std::io::Result as IoResult;
use std::thread::sleep;
use std::time::Duration;
fn main() -> IoResult<()>
{
let mut dev0 = drm::Device::first_card().unwrap();
let dev = dev0.set_master()
.map_err(|err| {
eprintln!("Failed to set master: {:?}", err);
err
})?;
let res = dev.get_resources()?;
let connector = res.connectors().iter()
.filter_map(|id| dev.get(*id).ok())
.find(|conn| conn.encoder_id().is_some())
.expect("No active connectors");
let encoder_id = connector.encoder_id().unwrap();
let encoder = dev.get(encoder_id)
.expect("failed get encoder");
let crtc_id = encoder.crtc_id().unwrap();
let crtc = dev.get(crtc_id)
.expect("failed get crtc");
let old_fbid = crtc.fb_id().expect("Currently no fb");
let mode = crtc.mode().expect("mode")
.clone();
| let mut buffer = drm::mode::DumbBuf::create_with_depth(
&dev,
mode.hdisplay as u32, mode.vdisplay as u32, 32, 32)
.expect("creating buffer");
dev.set_crtc(crtc.id(), Some(buffer.fb().id()),
0, 0,
&[ connector.id() ],
Some(&mode))
.expect("set_crtc 1");
fill_buffer(&mut buffer);
sleep(Duration::new(1, 0));
dev.set_crtc(crtc.id(), Some(old_fbid),
0, 0,
&[ connector.id() ],
Some(&mode))
.expect("set_crtc 1");
Ok(())
}
fn fill_buffer<B:AsMut<[u32]>>(mut buffer_ref: B) {
let mut buffer = buffer_ref.as_mut();
for p in buffer.iter_mut() {
*p = 0xffff00ff;
}
} | random_line_split | |
magenta.rs |
extern crate drm;
use std::io::Result as IoResult;
use std::thread::sleep;
use std::time::Duration;
fn main() -> IoResult<()>
|
fn fill_buffer<B:AsMut<[u32]>>(mut buffer_ref: B) {
let mut buffer = buffer_ref.as_mut();
for p in buffer.iter_mut() {
*p = 0xffff00ff;
}
}
| {
let mut dev0 = drm::Device::first_card().unwrap();
let dev = dev0.set_master()
.map_err(|err| {
eprintln!("Failed to set master: {:?}", err);
err
})?;
let res = dev.get_resources()?;
let connector = res.connectors().iter()
.filter_map(|id| dev.get(*id).ok())
.find(|conn| conn.encoder_id().is_some())
.expect("No active connectors");
let encoder_id = connector.encoder_id().unwrap();
let encoder = dev.get(encoder_id)
.expect("failed get encoder");
let crtc_id = encoder.crtc_id().unwrap();
let crtc = dev.get(crtc_id)
.expect("failed get crtc");
let old_fbid = crtc.fb_id().expect("Currently no fb");
let mode = crtc.mode().expect("mode")
.clone();
let mut buffer = drm::mode::DumbBuf::create_with_depth(
&dev,
mode.hdisplay as u32, mode.vdisplay as u32, 32, 32)
.expect("creating buffer");
dev.set_crtc(crtc.id(), Some(buffer.fb().id()),
0, 0,
&[ connector.id() ],
Some(&mode))
.expect("set_crtc 1");
fill_buffer(&mut buffer);
sleep(Duration::new(1, 0));
dev.set_crtc(crtc.id(), Some(old_fbid),
0, 0,
&[ connector.id() ],
Some(&mode))
.expect("set_crtc 1");
Ok(())
} | identifier_body |
magenta.rs |
extern crate drm;
use std::io::Result as IoResult;
use std::thread::sleep;
use std::time::Duration;
fn main() -> IoResult<()>
{
let mut dev0 = drm::Device::first_card().unwrap();
let dev = dev0.set_master()
.map_err(|err| {
eprintln!("Failed to set master: {:?}", err);
err
})?;
let res = dev.get_resources()?;
let connector = res.connectors().iter()
.filter_map(|id| dev.get(*id).ok())
.find(|conn| conn.encoder_id().is_some())
.expect("No active connectors");
let encoder_id = connector.encoder_id().unwrap();
let encoder = dev.get(encoder_id)
.expect("failed get encoder");
let crtc_id = encoder.crtc_id().unwrap();
let crtc = dev.get(crtc_id)
.expect("failed get crtc");
let old_fbid = crtc.fb_id().expect("Currently no fb");
let mode = crtc.mode().expect("mode")
.clone();
let mut buffer = drm::mode::DumbBuf::create_with_depth(
&dev,
mode.hdisplay as u32, mode.vdisplay as u32, 32, 32)
.expect("creating buffer");
dev.set_crtc(crtc.id(), Some(buffer.fb().id()),
0, 0,
&[ connector.id() ],
Some(&mode))
.expect("set_crtc 1");
fill_buffer(&mut buffer);
sleep(Duration::new(1, 0));
dev.set_crtc(crtc.id(), Some(old_fbid),
0, 0,
&[ connector.id() ],
Some(&mode))
.expect("set_crtc 1");
Ok(())
}
fn | <B:AsMut<[u32]>>(mut buffer_ref: B) {
let mut buffer = buffer_ref.as_mut();
for p in buffer.iter_mut() {
*p = 0xffff00ff;
}
}
| fill_buffer | identifier_name |
cli_ssh.py | '''
Created on Jun 16, 2014
@author: lwoydziak
'''
import pexpect
import sys
from dynamic_machine.cli_commands import assertResultNotEquals, Command
class SshCli(object):
LOGGED_IN = 0
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None):
self.pexpect = pexpect if not pexpectObject else pexpectObject
self.debug = debug
self.trace = trace
self.host = host
self._port = port
self._connection = None
self.modeList = []
self._log = log
self._bufferedCommands = None
self._bufferedMode = None
self._loginUser = loginUser
self._resetExpect()
def __del__(self):
self.closeCliConnectionTo()
def showOutputOnScreen(self): | self.debug = True
self.trace = True
self._log = None
self._setupLog()
def connectWithSsh(self):
self._debugLog("Establishing connection to " + self.host)
self._connection = self.pexpect.spawn(
'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' %
(self._loginUser.username, self.host, self._port))
if self._connection is None:
raise Exception("Unable to connect via SSH perhaps wrong IP!")
self._secure = True
self._setupLog()
self._loginUser.commandLine(self)
self.modeList = [self._loginUser]
def resetLoggingTo(self, log):
self._connection.logfile = log
def _setupLog(self):
if self.trace:
class Python3BytesToStdOut:
def write(self, s):
sys.stdout.buffer.write(s)
def flush(self):
sys.stdout.flush()
self._connection.logfile = Python3BytesToStdOut()
if self._log is not None:
self._connection.logfile = self._log
def loginSsh(self):
self._setupLog()
self._debugLog("Login in as "+self._loginUser.username)
try:
self._loginUser.sendPassword()
return True
except Exception as e:
self.forceCloseCliConnectionTo()
raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def _exit_modes_beyond(self, thisMode):
if not self.modeList: return
while len(self.modeList) > thisMode + 1:
self.modeList.pop().exit()
def exitMode(self, mode):
if mode in self.modeList:
self.modeList.remove(mode)
def check_prereq(self, prereqMode = 0):
self._exit_modes_beyond(prereqMode)
if len(self.modeList) <= prereqMode:
raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def execute_as(self, user):
self.check_prereq(self.LOGGED_IN)
self._exit_modes_beyond(self.LOGGED_IN)
user.commandLine(self)
user.login()
self.modeList.append(user)
return user
def closeCliConnectionTo(self):
if self._connection == None:
return
self._exit_modes_beyond(-1)
self.modeList = []
self._debugLog("Exited all modes.")
self.forceCloseCliConnectionTo()
def forceCloseCliConnectionTo(self):
self.modeList = None
if self._connection:
self._debugLog("Closing connection.")
self._connection.close()
self._connection = None
def _debugLog(self, message):
if self.debug:
print(message)
def _resetExpect(self):
self.previousExpectLine = ""
if self._connection is not None and isinstance(self._connection.buffer, str):
self.previousExpectLine = self._connection.buffer
self._connection.buffer = ""
def _lastExpect(self):
constructLine = self.previousExpectLine
if self._connection is not None and isinstance(self._connection.before, str):
constructLine += self._connection.before
if self._connection is not None and isinstance(self._connection.after, str):
constructLine += self._connection.after
return constructLine
def send(self, command):
if self._bufferedCommands is None:
self._bufferedCommands = command
else:
self._bufferedCommands += "\n" + command
if self._bufferedMode is None:
self.flush()
else:
self._debugLog("Buffering command " + command)
def flush(self):
if self._bufferedCommands is None:
return
self._connection.sendline(str(self._bufferedCommands))
self._bufferedCommands = None
def buffering(self):
return self._bufferedMode
def bufferedMode(self, mode = True):
if mode is None:
self.flush()
self._bufferedMode = mode
def compareReceivedAgainst(self, pattern, timeout=-1, searchwindowsize=None, indexOfSuccessfulResult=0):
if self._bufferedMode is None:
index = self._connection.expect(pattern, timeout, searchwindowsize)
self._debugLog("\nLooking for " + str(pattern) + " Found ("+str(index)+")")
self._debugLog(self._lastExpect())
return index
else:
return indexOfSuccessfulResult | random_line_split | |
cli_ssh.py | '''
Created on Jun 16, 2014
@author: lwoydziak
'''
import pexpect
import sys
from dynamic_machine.cli_commands import assertResultNotEquals, Command
class SshCli(object):
LOGGED_IN = 0
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None):
|
def __del__(self):
self.closeCliConnectionTo()
def showOutputOnScreen(self):
self.debug = True
self.trace = True
self._log = None
self._setupLog()
def connectWithSsh(self):
self._debugLog("Establishing connection to " + self.host)
self._connection = self.pexpect.spawn(
'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' %
(self._loginUser.username, self.host, self._port))
if self._connection is None:
raise Exception("Unable to connect via SSH perhaps wrong IP!")
self._secure = True
self._setupLog()
self._loginUser.commandLine(self)
self.modeList = [self._loginUser]
def resetLoggingTo(self, log):
self._connection.logfile = log
def _setupLog(self):
if self.trace:
class Python3BytesToStdOut:
def write(self, s):
sys.stdout.buffer.write(s)
def flush(self):
sys.stdout.flush()
self._connection.logfile = Python3BytesToStdOut()
if self._log is not None:
self._connection.logfile = self._log
def loginSsh(self):
self._setupLog()
self._debugLog("Login in as "+self._loginUser.username)
try:
self._loginUser.sendPassword()
return True
except Exception as e:
self.forceCloseCliConnectionTo()
raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def _exit_modes_beyond(self, thisMode):
if not self.modeList: return
while len(self.modeList) > thisMode + 1:
self.modeList.pop().exit()
def exitMode(self, mode):
if mode in self.modeList:
self.modeList.remove(mode)
def check_prereq(self, prereqMode = 0):
self._exit_modes_beyond(prereqMode)
if len(self.modeList) <= prereqMode:
raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def execute_as(self, user):
self.check_prereq(self.LOGGED_IN)
self._exit_modes_beyond(self.LOGGED_IN)
user.commandLine(self)
user.login()
self.modeList.append(user)
return user
def closeCliConnectionTo(self):
if self._connection == None:
return
self._exit_modes_beyond(-1)
self.modeList = []
self._debugLog("Exited all modes.")
self.forceCloseCliConnectionTo()
def forceCloseCliConnectionTo(self):
self.modeList = None
if self._connection:
self._debugLog("Closing connection.")
self._connection.close()
self._connection = None
def _debugLog(self, message):
if self.debug:
print(message)
def _resetExpect(self):
self.previousExpectLine = ""
if self._connection is not None and isinstance(self._connection.buffer, str):
self.previousExpectLine = self._connection.buffer
self._connection.buffer = ""
def _lastExpect(self):
constructLine = self.previousExpectLine
if self._connection is not None and isinstance(self._connection.before, str):
constructLine += self._connection.before
if self._connection is not None and isinstance(self._connection.after, str):
constructLine += self._connection.after
return constructLine
def send(self, command):
if self._bufferedCommands is None:
self._bufferedCommands = command
else:
self._bufferedCommands += "\n" + command
if self._bufferedMode is None:
self.flush()
else:
self._debugLog("Buffering command " + command)
def flush(self):
if self._bufferedCommands is None:
return
self._connection.sendline(str(self._bufferedCommands))
self._bufferedCommands = None
def buffering(self):
return self._bufferedMode
def bufferedMode(self, mode = True):
if mode is None:
self.flush()
self._bufferedMode = mode
def compareReceivedAgainst(self, pattern, timeout=-1, searchwindowsize=None, indexOfSuccessfulResult=0):
if self._bufferedMode is None:
index = self._connection.expect(pattern, timeout, searchwindowsize)
self._debugLog("\nLooking for " + str(pattern) + " Found ("+str(index)+")")
self._debugLog(self._lastExpect())
return index
else:
return indexOfSuccessfulResult | self.pexpect = pexpect if not pexpectObject else pexpectObject
self.debug = debug
self.trace = trace
self.host = host
self._port = port
self._connection = None
self.modeList = []
self._log = log
self._bufferedCommands = None
self._bufferedMode = None
self._loginUser = loginUser
self._resetExpect() | identifier_body |
cli_ssh.py | '''
Created on Jun 16, 2014
@author: lwoydziak
'''
import pexpect
import sys
from dynamic_machine.cli_commands import assertResultNotEquals, Command
class SshCli(object):
LOGGED_IN = 0
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None):
self.pexpect = pexpect if not pexpectObject else pexpectObject
self.debug = debug
self.trace = trace
self.host = host
self._port = port
self._connection = None
self.modeList = []
self._log = log
self._bufferedCommands = None
self._bufferedMode = None
self._loginUser = loginUser
self._resetExpect()
def __del__(self):
self.closeCliConnectionTo()
def showOutputOnScreen(self):
self.debug = True
self.trace = True
self._log = None
self._setupLog()
def connectWithSsh(self):
self._debugLog("Establishing connection to " + self.host)
self._connection = self.pexpect.spawn(
'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' %
(self._loginUser.username, self.host, self._port))
if self._connection is None:
raise Exception("Unable to connect via SSH perhaps wrong IP!")
self._secure = True
self._setupLog()
self._loginUser.commandLine(self)
self.modeList = [self._loginUser]
def resetLoggingTo(self, log):
self._connection.logfile = log
def _setupLog(self):
if self.trace:
class Python3BytesToStdOut:
def write(self, s):
sys.stdout.buffer.write(s)
def flush(self):
sys.stdout.flush()
self._connection.logfile = Python3BytesToStdOut()
if self._log is not None:
self._connection.logfile = self._log
def loginSsh(self):
self._setupLog()
self._debugLog("Login in as "+self._loginUser.username)
try:
self._loginUser.sendPassword()
return True
except Exception as e:
self.forceCloseCliConnectionTo()
raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def _exit_modes_beyond(self, thisMode):
if not self.modeList: return
while len(self.modeList) > thisMode + 1:
self.modeList.pop().exit()
def exitMode(self, mode):
if mode in self.modeList:
self.modeList.remove(mode)
def check_prereq(self, prereqMode = 0):
self._exit_modes_beyond(prereqMode)
if len(self.modeList) <= prereqMode:
raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def execute_as(self, user):
self.check_prereq(self.LOGGED_IN)
self._exit_modes_beyond(self.LOGGED_IN)
user.commandLine(self)
user.login()
self.modeList.append(user)
return user
def closeCliConnectionTo(self):
if self._connection == None:
return
self._exit_modes_beyond(-1)
self.modeList = []
self._debugLog("Exited all modes.")
self.forceCloseCliConnectionTo()
def forceCloseCliConnectionTo(self):
self.modeList = None
if self._connection:
self._debugLog("Closing connection.")
self._connection.close()
self._connection = None
def _debugLog(self, message):
if self.debug:
print(message)
def _resetExpect(self):
self.previousExpectLine = ""
if self._connection is not None and isinstance(self._connection.buffer, str):
self.previousExpectLine = self._connection.buffer
self._connection.buffer = ""
def _lastExpect(self):
constructLine = self.previousExpectLine
if self._connection is not None and isinstance(self._connection.before, str):
constructLine += self._connection.before
if self._connection is not None and isinstance(self._connection.after, str):
constructLine += self._connection.after
return constructLine
def send(self, command):
if self._bufferedCommands is None:
self._bufferedCommands = command
else:
self._bufferedCommands += "\n" + command
if self._bufferedMode is None:
|
else:
self._debugLog("Buffering command " + command)
def flush(self):
if self._bufferedCommands is None:
return
self._connection.sendline(str(self._bufferedCommands))
self._bufferedCommands = None
def buffering(self):
return self._bufferedMode
def bufferedMode(self, mode = True):
if mode is None:
self.flush()
self._bufferedMode = mode
def compareReceivedAgainst(self, pattern, timeout=-1, searchwindowsize=None, indexOfSuccessfulResult=0):
if self._bufferedMode is None:
index = self._connection.expect(pattern, timeout, searchwindowsize)
self._debugLog("\nLooking for " + str(pattern) + " Found ("+str(index)+")")
self._debugLog(self._lastExpect())
return index
else:
return indexOfSuccessfulResult | self.flush() | conditional_block |
cli_ssh.py | '''
Created on Jun 16, 2014
@author: lwoydziak
'''
import pexpect
import sys
from dynamic_machine.cli_commands import assertResultNotEquals, Command
class SshCli(object):
LOGGED_IN = 0
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None):
self.pexpect = pexpect if not pexpectObject else pexpectObject
self.debug = debug
self.trace = trace
self.host = host
self._port = port
self._connection = None
self.modeList = []
self._log = log
self._bufferedCommands = None
self._bufferedMode = None
self._loginUser = loginUser
self._resetExpect()
def __del__(self):
self.closeCliConnectionTo()
def showOutputOnScreen(self):
self.debug = True
self.trace = True
self._log = None
self._setupLog()
def connectWithSsh(self):
self._debugLog("Establishing connection to " + self.host)
self._connection = self.pexpect.spawn(
'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' %
(self._loginUser.username, self.host, self._port))
if self._connection is None:
raise Exception("Unable to connect via SSH perhaps wrong IP!")
self._secure = True
self._setupLog()
self._loginUser.commandLine(self)
self.modeList = [self._loginUser]
def resetLoggingTo(self, log):
self._connection.logfile = log
def _setupLog(self):
if self.trace:
class Python3BytesToStdOut:
def write(self, s):
sys.stdout.buffer.write(s)
def flush(self):
sys.stdout.flush()
self._connection.logfile = Python3BytesToStdOut()
if self._log is not None:
self._connection.logfile = self._log
def loginSsh(self):
self._setupLog()
self._debugLog("Login in as "+self._loginUser.username)
try:
self._loginUser.sendPassword()
return True
except Exception as e:
self.forceCloseCliConnectionTo()
raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def _exit_modes_beyond(self, thisMode):
if not self.modeList: return
while len(self.modeList) > thisMode + 1:
self.modeList.pop().exit()
def exitMode(self, mode):
if mode in self.modeList:
self.modeList.remove(mode)
def check_prereq(self, prereqMode = 0):
self._exit_modes_beyond(prereqMode)
if len(self.modeList) <= prereqMode:
raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def execute_as(self, user):
self.check_prereq(self.LOGGED_IN)
self._exit_modes_beyond(self.LOGGED_IN)
user.commandLine(self)
user.login()
self.modeList.append(user)
return user
def closeCliConnectionTo(self):
if self._connection == None:
return
self._exit_modes_beyond(-1)
self.modeList = []
self._debugLog("Exited all modes.")
self.forceCloseCliConnectionTo()
def forceCloseCliConnectionTo(self):
self.modeList = None
if self._connection:
self._debugLog("Closing connection.")
self._connection.close()
self._connection = None
def _debugLog(self, message):
if self.debug:
print(message)
def | (self):
self.previousExpectLine = ""
if self._connection is not None and isinstance(self._connection.buffer, str):
self.previousExpectLine = self._connection.buffer
self._connection.buffer = ""
def _lastExpect(self):
constructLine = self.previousExpectLine
if self._connection is not None and isinstance(self._connection.before, str):
constructLine += self._connection.before
if self._connection is not None and isinstance(self._connection.after, str):
constructLine += self._connection.after
return constructLine
def send(self, command):
if self._bufferedCommands is None:
self._bufferedCommands = command
else:
self._bufferedCommands += "\n" + command
if self._bufferedMode is None:
self.flush()
else:
self._debugLog("Buffering command " + command)
def flush(self):
if self._bufferedCommands is None:
return
self._connection.sendline(str(self._bufferedCommands))
self._bufferedCommands = None
def buffering(self):
return self._bufferedMode
def bufferedMode(self, mode = True):
if mode is None:
self.flush()
self._bufferedMode = mode
def compareReceivedAgainst(self, pattern, timeout=-1, searchwindowsize=None, indexOfSuccessfulResult=0):
if self._bufferedMode is None:
index = self._connection.expect(pattern, timeout, searchwindowsize)
self._debugLog("\nLooking for " + str(pattern) + " Found ("+str(index)+")")
self._debugLog(self._lastExpect())
return index
else:
return indexOfSuccessfulResult | _resetExpect | identifier_name |
analyze_dxp.py | """
Some helper functions to analyze the output of sys.getdxp() (which is
only available if Python was built with -DDYNAMIC_EXECUTION_PROFILE).
These will tell you which opcodes have been executed most frequently
in the current process, and, if Python was also built with -DDXPAIRS,
will tell you which instruction _pairs_ were executed most frequently,
which may help in choosing new instructions.
If Python was built without -DDYNAMIC_EXECUTION_PROFILE, importing
this module will raise a RuntimeError.
If you're running a script you want to profile, a simple way to get
the common pairs is:
$ PYTHONPATH=$PYTHONPATH:<python_srcdir>/Tools/scripts \
./python -i -O the_script.py --args
...
> from analyze_dxp import *
> s = render_common_pairs()
> open('/tmp/some_file', 'w').write(s)
"""
import copy
import opcode
import operator
import sys
import threading
if not hasattr(sys, "getdxp"):
raise RuntimeError("Can't import analyze_dxp: Python built without"
" -DDYNAMIC_EXECUTION_PROFILE.")
_profile_lock = threading.RLock()
_cumulative_profile = sys.getdxp()
# If Python was built with -DDXPAIRS, sys.getdxp() returns a list of
# lists of ints. Otherwise it returns just a list of ints.
def has_pairs(profile):
"""Returns True if the Python that produced the argument profile
was built with -DDXPAIRS."""
return len(profile) > 0 and isinstance(profile[0], list)
def reset_profile():
|
def merge_profile():
"""Reads sys.getdxp() and merges it into this module's cached copy.
We need this because sys.getdxp() 0s itself every time it's called."""
with _profile_lock:
new_profile = sys.getdxp()
if has_pairs(new_profile):
for first_inst in range(len(_cumulative_profile)):
for second_inst in range(len(_cumulative_profile[first_inst])):
_cumulative_profile[first_inst][second_inst] += (
new_profile[first_inst][second_inst])
else:
for inst in range(len(_cumulative_profile)):
_cumulative_profile[inst] += new_profile[inst]
def snapshot_profile():
"""Returns the cumulative execution profile until this call."""
with _profile_lock:
merge_profile()
return copy.deepcopy(_cumulative_profile)
def common_instructions(profile):
"""Returns the most common opcodes in order of descending frequency.
The result is a list of tuples of the form
(opcode, opname, # of occurrences)
"""
if has_pairs(profile) and profile:
inst_list = profile[-1]
else:
inst_list = profile
result = [(op, opcode.opname[op], count)
for op, count in enumerate(inst_list)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def common_pairs(profile):
"""Returns the most common opcode pairs in order of descending frequency.
The result is a list of tuples of the form
((1st opcode, 2nd opcode),
(1st opname, 2nd opname),
# of occurrences of the pair)
"""
if not has_pairs(profile):
return []
result = [((op1, op2), (opcode.opname[op1], opcode.opname[op2]), count)
# Drop the row of single-op profiles with [:-1]
for op1, op1profile in enumerate(profile[:-1])
for op2, count in enumerate(op1profile)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def render_common_pairs(profile=None):
"""Renders the most common opcode pairs to a string in order of
descending frequency.
The result is a series of lines of the form:
# of occurrences: ('1st opname', '2nd opname')
"""
if profile is None:
profile = snapshot_profile()
def seq():
for _, ops, count in common_pairs(profile):
yield "%s: %s\n" % (count, ops)
return ''.join(seq())
| """Forgets any execution profile that has been gathered so far."""
with _profile_lock:
sys.getdxp() # Resets the internal profile
global _cumulative_profile
_cumulative_profile = sys.getdxp() # 0s out our copy.
| identifier_body |
analyze_dxp.py | """
Some helper functions to analyze the output of sys.getdxp() (which is
only available if Python was built with -DDYNAMIC_EXECUTION_PROFILE).
These will tell you which opcodes have been executed most frequently
in the current process, and, if Python was also built with -DDXPAIRS,
will tell you which instruction _pairs_ were executed most frequently,
which may help in choosing new instructions.
If Python was built without -DDYNAMIC_EXECUTION_PROFILE, importing
this module will raise a RuntimeError.
If you're running a script you want to profile, a simple way to get
the common pairs is:
$ PYTHONPATH=$PYTHONPATH:<python_srcdir>/Tools/scripts \
./python -i -O the_script.py --args
...
> from analyze_dxp import *
> s = render_common_pairs()
> open('/tmp/some_file', 'w').write(s)
"""
import copy
import opcode
import operator
import sys
import threading
if not hasattr(sys, "getdxp"):
raise RuntimeError("Can't import analyze_dxp: Python built without"
" -DDYNAMIC_EXECUTION_PROFILE.")
_profile_lock = threading.RLock()
_cumulative_profile = sys.getdxp()
# If Python was built with -DDXPAIRS, sys.getdxp() returns a list of
# lists of ints. Otherwise it returns just a list of ints.
def has_pairs(profile):
"""Returns True if the Python that produced the argument profile
was built with -DDXPAIRS."""
return len(profile) > 0 and isinstance(profile[0], list)
def reset_profile():
"""Forgets any execution profile that has been gathered so far."""
with _profile_lock:
sys.getdxp() # Resets the internal profile
global _cumulative_profile
_cumulative_profile = sys.getdxp() # 0s out our copy.
def merge_profile():
"""Reads sys.getdxp() and merges it into this module's cached copy.
We need this because sys.getdxp() 0s itself every time it's called."""
with _profile_lock:
new_profile = sys.getdxp()
if has_pairs(new_profile):
for first_inst in range(len(_cumulative_profile)):
for second_inst in range(len(_cumulative_profile[first_inst])):
_cumulative_profile[first_inst][second_inst] += (
new_profile[first_inst][second_inst])
else:
for inst in range(len(_cumulative_profile)):
_cumulative_profile[inst] += new_profile[inst]
def snapshot_profile():
"""Returns the cumulative execution profile until this call."""
with _profile_lock:
merge_profile()
return copy.deepcopy(_cumulative_profile)
def common_instructions(profile):
"""Returns the most common opcodes in order of descending frequency.
The result is a list of tuples of the form
(opcode, opname, # of occurrences)
"""
if has_pairs(profile) and profile:
inst_list = profile[-1]
else:
inst_list = profile
result = [(op, opcode.opname[op], count)
for op, count in enumerate(inst_list)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def common_pairs(profile):
"""Returns the most common opcode pairs in order of descending frequency.
The result is a list of tuples of the form
((1st opcode, 2nd opcode),
(1st opname, 2nd opname),
# of occurrences of the pair)
"""
if not has_pairs(profile):
return []
result = [((op1, op2), (opcode.opname[op1], opcode.opname[op2]), count)
# Drop the row of single-op profiles with [:-1]
for op1, op1profile in enumerate(profile[:-1])
for op2, count in enumerate(op1profile)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def render_common_pairs(profile=None):
"""Renders the most common opcode pairs to a string in order of
descending frequency.
The result is a series of lines of the form:
# of occurrences: ('1st opname', '2nd opname')
"""
if profile is None:
|
def seq():
for _, ops, count in common_pairs(profile):
yield "%s: %s\n" % (count, ops)
return ''.join(seq())
| profile = snapshot_profile() | conditional_block |
analyze_dxp.py | """
Some helper functions to analyze the output of sys.getdxp() (which is
only available if Python was built with -DDYNAMIC_EXECUTION_PROFILE).
These will tell you which opcodes have been executed most frequently
in the current process, and, if Python was also built with -DDXPAIRS,
will tell you which instruction _pairs_ were executed most frequently,
which may help in choosing new instructions.
If Python was built without -DDYNAMIC_EXECUTION_PROFILE, importing
this module will raise a RuntimeError.
If you're running a script you want to profile, a simple way to get
the common pairs is:
$ PYTHONPATH=$PYTHONPATH:<python_srcdir>/Tools/scripts \
./python -i -O the_script.py --args
...
> from analyze_dxp import *
> s = render_common_pairs()
> open('/tmp/some_file', 'w').write(s)
"""
import copy
import opcode
import operator
import sys
import threading
if not hasattr(sys, "getdxp"):
raise RuntimeError("Can't import analyze_dxp: Python built without"
" -DDYNAMIC_EXECUTION_PROFILE.")
_profile_lock = threading.RLock()
_cumulative_profile = sys.getdxp()
# If Python was built with -DDXPAIRS, sys.getdxp() returns a list of
# lists of ints. Otherwise it returns just a list of ints.
def has_pairs(profile):
"""Returns True if the Python that produced the argument profile
was built with -DDXPAIRS."""
return len(profile) > 0 and isinstance(profile[0], list)
def reset_profile():
"""Forgets any execution profile that has been gathered so far."""
with _profile_lock:
sys.getdxp() # Resets the internal profile
global _cumulative_profile
_cumulative_profile = sys.getdxp() # 0s out our copy.
def merge_profile():
"""Reads sys.getdxp() and merges it into this module's cached copy.
We need this because sys.getdxp() 0s itself every time it's called."""
with _profile_lock:
new_profile = sys.getdxp()
if has_pairs(new_profile):
for first_inst in range(len(_cumulative_profile)):
for second_inst in range(len(_cumulative_profile[first_inst])):
_cumulative_profile[first_inst][second_inst] += (
new_profile[first_inst][second_inst])
else:
for inst in range(len(_cumulative_profile)):
_cumulative_profile[inst] += new_profile[inst]
def snapshot_profile():
"""Returns the cumulative execution profile until this call."""
with _profile_lock:
merge_profile()
return copy.deepcopy(_cumulative_profile)
def | (profile):
"""Returns the most common opcodes in order of descending frequency.
The result is a list of tuples of the form
(opcode, opname, # of occurrences)
"""
if has_pairs(profile) and profile:
inst_list = profile[-1]
else:
inst_list = profile
result = [(op, opcode.opname[op], count)
for op, count in enumerate(inst_list)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def common_pairs(profile):
"""Returns the most common opcode pairs in order of descending frequency.
The result is a list of tuples of the form
((1st opcode, 2nd opcode),
(1st opname, 2nd opname),
# of occurrences of the pair)
"""
if not has_pairs(profile):
return []
result = [((op1, op2), (opcode.opname[op1], opcode.opname[op2]), count)
# Drop the row of single-op profiles with [:-1]
for op1, op1profile in enumerate(profile[:-1])
for op2, count in enumerate(op1profile)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def render_common_pairs(profile=None):
"""Renders the most common opcode pairs to a string in order of
descending frequency.
The result is a series of lines of the form:
# of occurrences: ('1st opname', '2nd opname')
"""
if profile is None:
profile = snapshot_profile()
def seq():
for _, ops, count in common_pairs(profile):
yield "%s: %s\n" % (count, ops)
return ''.join(seq())
| common_instructions | identifier_name |
analyze_dxp.py | """
Some helper functions to analyze the output of sys.getdxp() (which is
only available if Python was built with -DDYNAMIC_EXECUTION_PROFILE).
These will tell you which opcodes have been executed most frequently
in the current process, and, if Python was also built with -DDXPAIRS,
will tell you which instruction _pairs_ were executed most frequently,
which may help in choosing new instructions.
If Python was built without -DDYNAMIC_EXECUTION_PROFILE, importing
this module will raise a RuntimeError.
If you're running a script you want to profile, a simple way to get
the common pairs is:
$ PYTHONPATH=$PYTHONPATH:<python_srcdir>/Tools/scripts \
./python -i -O the_script.py --args
...
> from analyze_dxp import *
> s = render_common_pairs()
> open('/tmp/some_file', 'w').write(s)
"""
import copy
import opcode
import operator
import sys
import threading
if not hasattr(sys, "getdxp"):
raise RuntimeError("Can't import analyze_dxp: Python built without"
" -DDYNAMIC_EXECUTION_PROFILE.")
_profile_lock = threading.RLock()
_cumulative_profile = sys.getdxp()
# If Python was built with -DDXPAIRS, sys.getdxp() returns a list of
# lists of ints. Otherwise it returns just a list of ints.
def has_pairs(profile):
"""Returns True if the Python that produced the argument profile
was built with -DDXPAIRS."""
return len(profile) > 0 and isinstance(profile[0], list)
def reset_profile():
"""Forgets any execution profile that has been gathered so far."""
with _profile_lock:
sys.getdxp() # Resets the internal profile
global _cumulative_profile
_cumulative_profile = sys.getdxp() # 0s out our copy.
def merge_profile():
"""Reads sys.getdxp() and merges it into this module's cached copy.
We need this because sys.getdxp() 0s itself every time it's called."""
with _profile_lock:
new_profile = sys.getdxp()
if has_pairs(new_profile):
for first_inst in range(len(_cumulative_profile)):
for second_inst in range(len(_cumulative_profile[first_inst])):
_cumulative_profile[first_inst][second_inst] += (
new_profile[first_inst][second_inst])
else:
for inst in range(len(_cumulative_profile)):
_cumulative_profile[inst] += new_profile[inst]
def snapshot_profile():
"""Returns the cumulative execution profile until this call."""
with _profile_lock:
merge_profile()
return copy.deepcopy(_cumulative_profile)
def common_instructions(profile):
"""Returns the most common opcodes in order of descending frequency.
The result is a list of tuples of the form
(opcode, opname, # of occurrences)
"""
if has_pairs(profile) and profile:
inst_list = profile[-1]
else:
inst_list = profile
result = [(op, opcode.opname[op], count)
for op, count in enumerate(inst_list)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
return result
def common_pairs(profile):
"""Returns the most common opcode pairs in order of descending frequency.
The result is a list of tuples of the form
((1st opcode, 2nd opcode),
(1st opname, 2nd opname),
# of occurrences of the pair)
"""
if not has_pairs(profile):
return []
result = [((op1, op2), (opcode.opname[op1], opcode.opname[op2]), count)
# Drop the row of single-op profiles with [:-1]
for op1, op1profile in enumerate(profile[:-1])
for op2, count in enumerate(op1profile)
if count > 0]
result.sort(key=operator.itemgetter(2), reverse=True)
|
def render_common_pairs(profile=None):
"""Renders the most common opcode pairs to a string in order of
descending frequency.
The result is a series of lines of the form:
# of occurrences: ('1st opname', '2nd opname')
"""
if profile is None:
profile = snapshot_profile()
def seq():
for _, ops, count in common_pairs(profile):
yield "%s: %s\n" % (count, ops)
return ''.join(seq()) | return result
| random_line_split |
schema.js | /*!
* Module dependencies.
*/
var readPref = require('./drivers').ReadPreference;
var EventEmitter = require('events').EventEmitter;
var VirtualType = require('./virtualtype');
var utils = require('./utils');
var MongooseTypes;
var Kareem = require('kareem');
var async = require('async');
var PromiseProvider = require('./promise_provider');
var IS_QUERY_HOOK = {
count: true,
find: true,
findOne: true,
findOneAndUpdate: true,
findOneAndRemove: true,
update: true
};
/**
* Schema constructor.
*
* ####Example:
*
* var child = new Schema({ name: String });
* var schema = new Schema({ name: String, age: Number, children: [child] });
* var Tree = mongoose.model('Tree', schema);
*
* // setting schema options
* new Schema({ name: String }, { _id: false, autoIndex: false })
*
* ####Options:
*
* - [autoIndex](/docs/guide.html#autoIndex): bool - defaults to null (which means use the connection's autoIndex option)
* - [bufferCommands](/docs/guide.html#bufferCommands): bool - defaults to true
* - [capped](/docs/guide.html#capped): bool - defaults to false
* - [collection](/docs/guide.html#collection): string - no default
* - [emitIndexErrors](/docs/guide.html#emitIndexErrors): bool - defaults to false.
* - [id](/docs/guide.html#id): bool - defaults to true
* - [_id](/docs/guide.html#_id): bool - defaults to true
* - `minimize`: bool - controls [document#toObject](#document_Document-toObject) behavior when called manually - defaults to true
* - [read](/docs/guide.html#read): string
* - [safe](/docs/guide.html#safe): bool - defaults to true.
* - [shardKey](/docs/guide.html#shardKey): bool - defaults to `null`
* - [strict](/docs/guide.html#strict): bool - defaults to true
* - [toJSON](/docs/guide.html#toJSON) - object - no default
* - [toObject](/docs/guide.html#toObject) - object - no default
* - [typeKey](/docs/guide.html#typeKey) - string - defaults to 'type'
* - [validateBeforeSave](/docs/guide.html#validateBeforeSave) - bool - defaults to `true`
* - [versionKey](/docs/guide.html#versionKey): bool - defaults to "__v"
*
* ####Note:
*
* _When nesting schemas, (`children` in the example above), always declare the child schema first before passing it into its parent._
*
* @param {Object} definition
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter
* @event `init`: Emitted after the schema is compiled into a `Model`.
* @api public
*/
function | (obj, options) {
if (!(this instanceof Schema))
return new Schema(obj, options);
this.paths = {};
this.subpaths = {};
this.virtuals = {};
this.nested = {};
this.inherits = {};
this.callQueue = [];
this._indexes = [];
this.methods = {};
this.statics = {};
this.tree = {};
this._requiredpaths = undefined;
this.discriminatorMapping = undefined;
this._indexedpaths = undefined;
this.s = {
hooks: new Kareem(),
queryHooks: IS_QUERY_HOOK
};
this.options = this.defaultOptions(options);
// build paths
if (obj) {
this.add(obj);
}
// check if _id's value is a subdocument (gh-2276)
var _idSubDoc = obj && obj._id && utils.isObject(obj._id);
// ensure the documents get an auto _id unless disabled
var auto_id = !this.paths['_id'] &&
(!this.options.noId && this.options._id) && !_idSubDoc;
if (auto_id) {
obj = { _id: { auto: true } };
obj._id[this.options.typeKey] = Schema.ObjectId;
this.add(obj);
}
// ensure the documents receive an id getter unless disabled
var autoid = !this.paths['id'] &&
(!this.options.noVirtualId && this.options.id);
if (autoid) {
this.virtual('id').get(idGetter);
}
for (var i = 0; i < this._defaultMiddleware.length; ++i) {
var m = this._defaultMiddleware[i];
this[m.kind](m.hook, !!m.isAsync, m.fn);
}
// adds updatedAt and createdAt timestamps to documents if enabled
var timestamps = this.options.timestamps;
if (timestamps) {
var createdAt = timestamps.createdAt || 'createdAt',
updatedAt = timestamps.updatedAt || 'updatedAt',
schemaAdditions = {};
schemaAdditions[updatedAt] = Date;
if (!this.paths[createdAt]) {
schemaAdditions[createdAt] = Date;
}
this.add(schemaAdditions);
this.pre('save', function(next) {
var defaultTimestamp = new Date();
if (!this[createdAt]) {
this[createdAt] = auto_id ? this._id.getTimestamp() : defaultTimestamp;
}
this[updatedAt] = this.isNew ? this[createdAt] : defaultTimestamp;
next();
});
var genUpdates = function() {
var now = new Date();
var updates = {$set: {}, $setOnInsert: {}};
updates.$set[updatedAt] = now;
updates.$setOnInsert[createdAt] = now;
return updates;
};
this.pre('findOneAndUpdate', function(next) {
this.findOneAndUpdate({}, genUpdates());
next();
});
this.pre('update', function(next) {
this.update({}, genUpdates());
next();
});
}
}
/*!
* Returns this documents _id cast to a string.
*/
function idGetter() {
if (this.$__._id) {
return this.$__._id;
}
return this.$__._id = null == this._id
? null
: String(this._id);
}
/*!
* Inherit from EventEmitter.
*/
Schema.prototype = Object.create( EventEmitter.prototype );
Schema.prototype.constructor = Schema;
/**
* Default middleware attached to a schema. Cannot be changed.
*
* This field is used to make sure discriminators don't get multiple copies of
* built-in middleware. Declared as a constant because changing this at runtime
* may lead to instability with Model.prototype.discriminator().
*
* @api private
* @property _defaultMiddleware
*/
Object.defineProperty(Schema.prototype, '_defaultMiddleware', {
configurable: false,
enumerable: false,
writable: false,
value: [{
kind: 'pre',
hook: 'save',
fn: function(next, options) {
// Nested docs have their own presave
if (this.ownerDocument) {
return next();
}
var hasValidateBeforeSaveOption = options &&
(typeof options === 'object') &&
('validateBeforeSave' in options);
var shouldValidate;
if (hasValidateBeforeSaveOption) {
shouldValidate = !!options.validateBeforeSave;
} else {
shouldValidate = this.schema.options.validateBeforeSave;
}
// Validate
if (shouldValidate) {
// HACK: use $__original_validate to avoid promises so bluebird doesn't
// complain
if (this.$__original_validate) {
this.$__original_validate({ __noPromise: true }, function(error) {
next(error);
});
} else {
this.validate({ __noPromise: true }, function(error) {
next(error);
});
}
} else {
next();
}
}
}, {
kind: 'pre',
hook: 'save',
isAsync: true,
fn: function(next, done) {
var Promise = PromiseProvider.get(),
subdocs = this.$__getAllSubdocs();
if (!subdocs.length || this.$__preSavingFromParent) {
done();
next();
return;
}
new Promise.ES6(function(resolve, reject) {
async.each(subdocs, function(subdoc, cb) {
subdoc.$__preSavingFromParent = true;
subdoc.save(function(err) {
cb(err);
});
}, function(error) {
for (var i = 0; i < subdocs.length; ++i) {
delete subdocs[i].$__preSavingFromParent;
}
if (error) {
reject(error);
return;
}
resolve();
});
}).then(function() {
next();
done();
}, done);
}
}]
});
/**
* Schema as flat paths
*
* ####Example:
* {
* '_id' : SchemaType,
* , 'nested.key' : SchemaType,
* }
*
* @api private
* @property paths
*/
Schema.prototype.paths;
/**
* Schema as a tree
*
* ####Example:
* {
* '_id' : ObjectId
* , 'nested' : {
* 'key' : String
* }
* }
*
* @api private
* @property tree
*/
Schema.prototype.tree;
/**
* Returns default options for this schema, merged with `options`.
*
* @param {Object} options
* @return {Object}
* @api private
*/
Schema.prototype.defaultOptions = function(options) {
if (options && false === options.safe) {
options.safe = { w: 0 };
}
if (options && options.safe && 0 === options.safe.w) {
// if you turn off safe writes, then versioning goes off as well
options.versionKey = false;
}
options = utils.options({
strict: true,
bufferCommands: true,
capped: false, // { size, max, autoIndexId }
versionKey: '__v',
discriminatorKey: '__t',
minimize: true,
autoIndex: null,
shardKey: null,
read: null,
validateBeforeSave: true,
// the following are only applied at construction time
noId: false, // deprecated, use { _id: false }
_id: true,
noVirtualId: false, // deprecated, use { id: false }
id: true,
typeKey: 'type'
}, options);
if (options.read) {
options.read = readPref(options.read);
}
return options;
};
/**
* Adds key path / schema type pairs to this schema.
*
* ####Example:
*
* var ToySchema = new Schema;
* ToySchema.add({ name: 'string', color: 'string', price: 'number' });
*
* @param {Object} obj
* @param {String} prefix
* @api public
*/
Schema.prototype.add = function add(obj, prefix) {
prefix = prefix || '';
var keys = Object.keys(obj);
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (null == obj[key]) {
throw new TypeError('Invalid value for schema path `' + prefix + key + '`');
}
if (Array.isArray(obj[key]) && obj[key].length === 1 && null == obj[key][0]) {
throw new TypeError('Invalid value for schema Array path `' + prefix + key + '`');
}
if (utils.isObject(obj[key]) &&
(!obj[key].constructor || 'Object' == utils.getFunctionName(obj[key].constructor)) &&
(!obj[key][this.options.typeKey] || (this.options.typeKey === 'type' && obj[key].type.type))) {
if (Object.keys(obj[key]).length) {
// nested object { last: { name: String }}
this.nested[prefix + key] = true;
this.add(obj[key], prefix + key + '.');
} else {
this.path(prefix + key, obj[key]); // mixed type
}
} else {
this.path(prefix + key, obj[key]);
}
}
};
/**
* Reserved document keys.
*
* Keys in this object are names that are rejected in schema declarations b/c they conflict with mongoose functionality. Using these key name will throw an error.
*
* on, emit, _events, db, get, set, init, isNew, errors, schema, options, modelName, collection, _pres, _posts, toObject
*
* _NOTE:_ Use of these terms as method names is permitted, but play at your own risk, as they may be existing mongoose document methods you are stomping on.
*
* var schema = new Schema(..);
* schema.methods.init = function () {} // potentially breaking
*/
Schema.reserved = Object.create(null);
var reserved = Schema.reserved;
// EventEmitter
reserved.emit =
reserved.on =
reserved.once =
// document properties and functions
reserved.collection =
reserved.db =
reserved.errors =
reserved.init =
reserved.isModified =
reserved.isNew =
reserved.get =
reserved.modelName =
reserved.save =
reserved.schema =
reserved.set =
reserved.toObject =
reserved.validate =
// hooks.js
reserved._pres = reserved._posts = 1;
/**
* Document keys to print warnings for
*/
var warnings = {};
warnings.increment = '`increment` should not be used as a schema path name ' +
'unless you have disabled versioning.';
/**
* Gets/sets schema paths.
*
* Sets a path (if arity 2)
* Gets a path (if arity 1)
*
* ####Example
*
* schema.path('name') // returns a SchemaType
* schema.path('name', Number) // changes the schemaType of `name` to Number
*
* @param {String} path
* @param {Object} constructor
* @api public
*/
Schema.prototype.path = function(path, obj) {
if (obj == undefined) {
if (this.paths[path]) return this.paths[path];
if (this.subpaths[path]) return this.subpaths[path];
// subpaths?
return /\.\d+\.?.*$/.test(path)
? getPositionalPath(this, path)
: undefined;
}
// some path names conflict with document methods
if (reserved[path]) {
throw new Error("`" + path + "` may not be used as a schema pathname");
}
if (warnings[path]) {
console.log('WARN: ' + warnings[path]);
}
// update the tree
var subpaths = path.split(/\./),
last = subpaths.pop(),
branch = this.tree;
subpaths.forEach(function(sub, i) {
if (!branch[sub]) branch[sub] = {};
if ('object' != typeof branch[sub]) {
var msg = 'Cannot set nested path `' + path + '`. '
+ 'Parent path `'
+ subpaths.slice(0, i).concat([sub]).join('.')
+ '` already set to type ' + branch[sub].name
+ '.';
throw new Error(msg);
}
branch = branch[sub];
});
branch[last] = utils.clone(obj);
this.paths[path] = Schema.interpretAsType(path, obj, this.options);
return this;
};
/**
* Converts type arguments into Mongoose Types.
*
* @param {String} path
* @param {Object} obj constructor
* @api private
*/
Schema.interpretAsType = function(path, obj, options) {
if (obj.constructor) {
var constructorName = utils.getFunctionName(obj.constructor);
if (constructorName != 'Object') {
var oldObj = obj;
obj = {};
obj[options.typeKey] = oldObj;
}
}
// Get the type making sure to allow keys named "type"
// and default to mixed if not specified.
// { type: { type: String, default: 'freshcut' } }
var type = obj[options.typeKey] && (options.typeKey !== 'type' || !obj.type.type)
? obj[options.typeKey]
: {};
if ('Object' == utils.getFunctionName(type.constructor) || 'mixed' == type) {
return new MongooseTypes.Mixed(path, obj);
}
if (Array.isArray(type) || Array == type || 'array' == type) {
// if it was specified through { type } look for `cast`
var cast = (Array == type || 'array' == type)
? obj.cast
: type[0];
if (cast instanceof Schema) {
return new MongooseTypes.DocumentArray(path, cast, obj);
}
if ('string' == typeof cast) {
cast = MongooseTypes[cast.charAt(0).toUpperCase() + cast.substring(1)];
} else if (cast && (!cast[options.typeKey] || (options.typeKey === 'type' && cast.type.type))
&& 'Object' == utils.getFunctionName(cast.constructor)
&& Object.keys(cast).length) {
// The `minimize` and `typeKey` options propagate to child schemas
// declared inline, like `{ arr: [{ val: { $type: String } }] }`.
// See gh-3560
var childSchemaOptions = { minimize: options.minimize };
if (options.typeKey) {
childSchemaOptions.typeKey = options.typeKey;
}
var childSchema = new Schema(cast, childSchemaOptions);
return new MongooseTypes.DocumentArray(path, childSchema, obj);
}
return new MongooseTypes.Array(path, cast || MongooseTypes.Mixed, obj);
}
if (type instanceof Schema) {
return new MongooseTypes.Embedded(type, path, obj);
}
var name;
if (Buffer.isBuffer(type)) {
name = 'Buffer';
} else {
name = 'string' == typeof type
? type
// If not string, `type` is a function. Outside of IE, function.name
// gives you the function name. In IE, you need to compute it
: type.schemaName || utils.getFunctionName(type);
}
if (name) {
name = name.charAt(0).toUpperCase() + name.substring(1);
}
if (undefined == MongooseTypes[name]) {
throw new TypeError('Undefined type `' + name + '` at `' + path +
'`\n Did you try nesting Schemas? ' +
'You can only nest using refs or arrays.');
}
return new MongooseTypes[name](path, obj);
};
/**
* Iterates the schemas paths similar to Array#forEach.
*
* The callback is passed the pathname and schemaType as arguments on each iteration.
*
* @param {Function} fn callback function
* @return {Schema} this
* @api public
*/
Schema.prototype.eachPath = function(fn) {
var keys = Object.keys(this.paths),
len = keys.length;
for (var i = 0; i < len; ++i) {
fn(keys[i], this.paths[keys[i]]);
}
return this;
};
/**
* Returns an Array of path strings that are required by this schema.
*
* @api public
* @param {Boolean} invalidate refresh the cache
* @return {Array}
*/
Schema.prototype.requiredPaths = function requiredPaths(invalidate) {
if (this._requiredpaths && !invalidate) return this._requiredpaths;
var paths = Object.keys(this.paths),
i = paths.length,
ret = [];
while (i--) {
var path = paths[i];
if (this.paths[path].isRequired) ret.push(path);
}
return this._requiredpaths = ret;
};
/**
* Returns indexes from fields and schema-level indexes (cached).
*
* @api private
* @return {Array}
*/
Schema.prototype.indexedPaths = function indexedPaths() {
if (this._indexedpaths) return this._indexedpaths;
return this._indexedpaths = this.indexes();
};
/**
* Returns the pathType of `path` for this schema.
*
* Given a path, returns whether it is a real, virtual, nested, or ad-hoc/undefined path.
*
* @param {String} path
* @return {String}
* @api public
*/
Schema.prototype.pathType = function(path) {
if (path in this.paths) return 'real';
if (path in this.virtuals) return 'virtual';
if (path in this.nested) return 'nested';
if (path in this.subpaths) return 'real';
if (/\.\d+\.|\.\d+$/.test(path)) {
return getPositionalPathType(this, path);
} else {
return 'adhocOrUndefined';
}
};
/**
* Returns true iff this path is a child of a mixed schema.
*
* @param {String} path
* @return {Boolean}
* @api private
*/
Schema.prototype.hasMixedParent = function(path) {
var subpaths = path.split(/\./g);
path = '';
for (var i = 0; i < subpaths.length; ++i) {
path = i > 0 ? path + '.' + subpaths[i] : subpaths[i];
if (path in this.paths &&
this.paths[path] instanceof MongooseTypes.Mixed) {
return true;
}
}
return false;
};
/*!
* ignore
*/
function getPositionalPathType(self, path) {
var subpaths = path.split(/\.(\d+)\.|\.(\d+)$/).filter(Boolean);
if (subpaths.length < 2) {
return self.paths[subpaths[0]];
}
var val = self.path(subpaths[0]);
var isNested = false;
if (!val) return val;
var last = subpaths.length - 1,
subpath,
i = 1;
for (; i < subpaths.length; ++i) {
isNested = false;
subpath = subpaths[i];
if (i === last && val && !val.schema && !/\D/.test(subpath)) {
if (val instanceof MongooseTypes.Array) {
// StringSchema, NumberSchema, etc
val = val.caster;
} else {
val = undefined;
}
break;
}
// ignore if its just a position segment: path.0.subpath
if (!/\D/.test(subpath)) continue;
if (!(val && val.schema)) {
val = undefined;
break;
}
var type = val.schema.pathType(subpath);
isNested = (type === 'nested');
val = val.schema.path(subpath);
}
self.subpaths[path] = val;
if (val) {
return 'real';
}
if (isNested) {
return 'nested';
}
return 'adhocOrUndefined';
}
/*!
* ignore
*/
function getPositionalPath(self, path) {
getPositionalPathType(self, path);
return self.subpaths[path];
}
/**
* Adds a method call to the queue.
*
* @param {String} name name of the document method to call later
* @param {Array} args arguments to pass to the method
* @api public
*/
Schema.prototype.queue = function(name, args) {
this.callQueue.push([name, args]);
return this;
};
/**
* Defines a pre hook for the document.
*
* ####Example
*
* var toySchema = new Schema(..);
*
* toySchema.pre('save', function (next) {
* if (!this.created) this.created = new Date;
* next();
* })
*
* toySchema.pre('validate', function (next) {
* if (this.name != 'Woody') this.name = 'Woody';
* next();
* })
*
* @param {String} method
* @param {Function} callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.pre = function() {
var name = arguments[0];
if (IS_QUERY_HOOK[name]) {
this.s.hooks.pre.apply(this.s.hooks, arguments);
return this;
}
return this.queue('pre', arguments);
};
/**
* Defines a post hook for the document
*
* Post hooks fire `on` the event emitted from document instances of Models compiled from this schema.
*
* var schema = new Schema(..);
* schema.post('save', function (doc) {
* console.log('this fired after a document was saved');
* });
*
* var Model = mongoose.model('Model', schema);
*
* var m = new Model(..);
* m.save(function (err) {
* console.log('this fires after the `post` hook');
* });
*
* @param {String} method name of the method to hook
* @param {Function} fn callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.post = function(method, fn) {
if (IS_QUERY_HOOK[method]) {
this.s.hooks.post.apply(this.s.hooks, arguments);
return this;
}
// assuming that all callbacks with arity < 2 are synchronous post hooks
if (fn.length < 2) {
return this.queue('on', [arguments[0], function(doc) {
return fn.call(doc, doc);
}]);
}
return this.queue('post', [arguments[0], function(next) {
// wrap original function so that the callback goes last,
// for compatibility with old code that is using synchronous post hooks
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
fn.call(this, this, function(err) {
return next.apply(self, [err].concat(args));
});
}]);
};
/**
* Registers a plugin for this schema.
*
* @param {Function} plugin callback
* @param {Object} [opts]
* @see plugins
* @api public
*/
Schema.prototype.plugin = function(fn, opts) {
fn(this, opts);
return this;
};
/**
* Adds an instance method to documents constructed from Models compiled from this schema.
*
* ####Example
*
* var schema = kittySchema = new Schema(..);
*
* schema.method('meow', function () {
* console.log('meeeeeoooooooooooow');
* })
*
* var Kitty = mongoose.model('Kitty', schema);
*
* var fizz = new Kitty;
* fizz.meow(); // meeeeeooooooooooooow
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as methods.
*
* schema.method({
* purr: function () {}
* , scratch: function () {}
* });
*
* // later
* fizz.purr();
* fizz.scratch();
*
* @param {String|Object} method name
* @param {Function} [fn]
* @api public
*/
Schema.prototype.method = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.methods[i] = name[i];
else
this.methods[name] = fn;
return this;
};
/**
* Adds static "class" methods to Models compiled from this schema.
*
* ####Example
*
* var schema = new Schema(..);
* schema.static('findByName', function (name, callback) {
* return this.find({ name: name }, callback);
* });
*
* var Drink = mongoose.model('Drink', schema);
* Drink.findByName('sanpellegrino', function (err, drinks) {
* //
* });
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as statics.
*
* @param {String} name
* @param {Function} fn
* @api public
*/
Schema.prototype.static = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.statics[i] = name[i];
else
this.statics[name] = fn;
return this;
};
/**
* Defines an index (most likely compound) for this schema.
*
* ####Example
*
* schema.index({ first: 1, last: -1 })
*
* @param {Object} fields
* @param {Object} [options] Options to pass to [MongoDB driver's `createIndex()` function](http://mongodb.github.io/node-mongodb-native/2.0/api/Collection.html#createIndex)
* @param {String} [options.expires=null] Mongoose-specific syntactic sugar, uses [ms](https://www.npmjs.com/package/ms) to convert `expires` option into seconds for the `expireAfterSeconds` in the above link.
* @api public
*/
Schema.prototype.index = function(fields, options) {
options || (options = {});
if (options.expires)
utils.expires(options);
this._indexes.push([fields, options]);
return this;
};
/**
* Sets/gets a schema option.
*
* ####Example
*
* schema.set('strict'); // 'true' by default
* schema.set('strict', false); // Sets 'strict' to false
* schema.set('strict'); // 'false'
*
* @param {String} key option name
* @param {Object} [value] if not passed, the current option value is returned
* @see Schema ./
* @api public
*/
Schema.prototype.set = function(key, value, _tags) {
if (1 === arguments.length) {
return this.options[key];
}
switch (key) {
case 'read':
this.options[key] = readPref(value, _tags);
break;
case 'safe':
this.options[key] = false === value
? { w: 0 }
: value;
break;
default:
this.options[key] = value;
}
return this;
};
/**
* Gets a schema option.
*
* @param {String} key option name
* @api public
*/
Schema.prototype.get = function(key) {
return this.options[key];
};
/**
* The allowed index types
*
* @static indexTypes
* @receiver Schema
* @api public
*/
var indexTypes = '2d 2dsphere hashed text'.split(' ');
Object.defineProperty(Schema, 'indexTypes', {
get: function() { return indexTypes; },
set: function() { throw new Error('Cannot overwrite Schema.indexTypes'); }
});
/**
* Compiles indexes from fields and schema-level indexes
*
* @api public
*/
Schema.prototype.indexes = function() {
'use strict';
var indexes = [];
var seenPrefix = {};
var collectIndexes = function(schema, prefix) {
if (seenPrefix[prefix]) {
return;
}
seenPrefix[prefix] = true;
prefix = prefix || '';
var key, path, index, field, isObject, options, type;
var keys = Object.keys(schema.paths);
for (var i = 0; i < keys.length; ++i) {
key = keys[i];
path = schema.paths[key];
if (path instanceof MongooseTypes.DocumentArray) {
collectIndexes(path.schema, key + '.');
} else if (path.$isSingleNested) {
collectIndexes(path.schema, key + '.');
} else {
index = path._index;
if (false !== index && null != index) {
field = {};
isObject = utils.isObject(index);
options = isObject ? index : {};
type = 'string' == typeof index ? index :
isObject ? index.type :
false;
if (type && ~Schema.indexTypes.indexOf(type)) {
field[prefix + key] = type;
} else {
field[prefix + key] = 1;
}
delete options.type;
if (!('background' in options)) {
options.background = true;
}
indexes.push([field, options]);
}
}
}
if (prefix) {
fixSubIndexPaths(schema, prefix);
} else {
schema._indexes.forEach(function(index) {
if (!('background' in index[1])) index[1].background = true;
});
indexes = indexes.concat(schema._indexes);
}
};
collectIndexes(this);
return indexes;
/*!
* Checks for indexes added to subdocs using Schema.index().
* These indexes need their paths prefixed properly.
*
* schema._indexes = [ [indexObj, options], [indexObj, options] ..]
*/
function fixSubIndexPaths(schema, prefix) {
var subindexes = schema._indexes,
len = subindexes.length,
indexObj,
newindex,
klen,
keys,
key,
i = 0,
j;
for (i = 0; i < len; ++i) {
indexObj = subindexes[i][0];
keys = Object.keys(indexObj);
klen = keys.length;
newindex = {};
// use forward iteration, order matters
for (j = 0; j < klen; ++j) {
key = keys[j];
newindex[prefix + key] = indexObj[key];
}
indexes.push([newindex, subindexes[i][1]]);
}
}
};
/**
* Creates a virtual type with the given name.
*
* @param {String} name
* @param {Object} [options]
* @return {VirtualType}
*/
Schema.prototype.virtual = function(name, options) {
var virtuals = this.virtuals;
var parts = name.split('.');
return virtuals[name] = parts.reduce(function(mem, part, i) {
mem[part] || (mem[part] = (i === parts.length - 1)
? new VirtualType(options, name)
: {});
return mem[part];
}, this.tree);
};
/**
* Returns the virtual type with the given `name`.
*
* @param {String} name
* @return {VirtualType}
*/
Schema.prototype.virtualpath = function(name) {
return this.virtuals[name];
};
/**
* Removes the given `path` (or [`paths`]).
*
* @param {String|Array} path
*
* @api public
*/
Schema.prototype.remove = function(path) {
if (typeof path === 'string') {
path = [path];
}
if (Array.isArray(path)) {
path.forEach(function(name) {
if (this.path(name)) {
delete this.paths[name];
}
}, this);
}
};
/*!
* ignore
*/
Schema.prototype._getSchema = function(path) {
var schema = this;
var pathschema = schema.path(path);
if (pathschema) {
return pathschema;
}
// look for arrays
return (function search(parts, schema) {
var p = parts.length + 1,
foundschema,
trypath;
while (p--) {
trypath = parts.slice(0, p).join('.');
foundschema = schema.path(trypath);
if (foundschema) {
if (foundschema.caster) {
// array of Mixed?
if (foundschema.caster instanceof MongooseTypes.Mixed) {
return foundschema.caster;
}
// Now that we found the array, we need to check if there
// are remaining document paths to look up for casting.
// Also we need to handle array.$.path since schema.path
// doesn't work for that.
// If there is no foundschema.schema we are dealing with
// a path like array.$
if (p !== parts.length && foundschema.schema) {
if ('$' === parts[p]) {
// comments.$.comments.$.title
return search(parts.slice(p + 1), foundschema.schema);
} else {
// this is the last path of the selector
return search(parts.slice(p), foundschema.schema);
}
}
}
return foundschema;
}
}
})(path.split('.'), schema);
};
/*!
* Module exports.
*/
module.exports = exports = Schema;
// require down here because of reference issues
/**
* The various built-in Mongoose Schema Types.
*
* ####Example:
*
* var mongoose = require('mongoose');
* var ObjectId = mongoose.Schema.Types.ObjectId;
*
* ####Types:
*
* - [String](#schema-string-js)
* - [Number](#schema-number-js)
* - [Boolean](#schema-boolean-js) | Bool
* - [Array](#schema-array-js)
* - [Buffer](#schema-buffer-js)
* - [Date](#schema-date-js)
* - [ObjectId](#schema-objectid-js) | Oid
* - [Mixed](#schema-mixed-js)
*
* Using this exposed access to the `Mixed` SchemaType, we can use them in our schema.
*
* var Mixed = mongoose.Schema.Types.Mixed;
* new mongoose.Schema({ _user: Mixed })
*
* @api public
*/
Schema.Types = MongooseTypes = require('./schema/index');
/*!
* ignore
*/
exports.ObjectId = MongooseTypes.ObjectId;
| Schema | identifier_name |
schema.js | /*!
* Module dependencies.
*/
var readPref = require('./drivers').ReadPreference;
var EventEmitter = require('events').EventEmitter;
var VirtualType = require('./virtualtype');
var utils = require('./utils');
var MongooseTypes;
var Kareem = require('kareem');
var async = require('async');
var PromiseProvider = require('./promise_provider');
var IS_QUERY_HOOK = {
count: true,
find: true,
findOne: true,
findOneAndUpdate: true,
findOneAndRemove: true,
update: true
};
/**
* Schema constructor.
*
* ####Example:
*
* var child = new Schema({ name: String });
* var schema = new Schema({ name: String, age: Number, children: [child] });
* var Tree = mongoose.model('Tree', schema);
*
* // setting schema options
* new Schema({ name: String }, { _id: false, autoIndex: false })
*
* ####Options:
*
* - [autoIndex](/docs/guide.html#autoIndex): bool - defaults to null (which means use the connection's autoIndex option)
* - [bufferCommands](/docs/guide.html#bufferCommands): bool - defaults to true
* - [capped](/docs/guide.html#capped): bool - defaults to false
* - [collection](/docs/guide.html#collection): string - no default
* - [emitIndexErrors](/docs/guide.html#emitIndexErrors): bool - defaults to false.
* - [id](/docs/guide.html#id): bool - defaults to true
* - [_id](/docs/guide.html#_id): bool - defaults to true
* - `minimize`: bool - controls [document#toObject](#document_Document-toObject) behavior when called manually - defaults to true
* - [read](/docs/guide.html#read): string
* - [safe](/docs/guide.html#safe): bool - defaults to true.
* - [shardKey](/docs/guide.html#shardKey): bool - defaults to `null`
* - [strict](/docs/guide.html#strict): bool - defaults to true
* - [toJSON](/docs/guide.html#toJSON) - object - no default
* - [toObject](/docs/guide.html#toObject) - object - no default
* - [typeKey](/docs/guide.html#typeKey) - string - defaults to 'type'
* - [validateBeforeSave](/docs/guide.html#validateBeforeSave) - bool - defaults to `true`
* - [versionKey](/docs/guide.html#versionKey): bool - defaults to "__v"
*
* ####Note:
*
* _When nesting schemas, (`children` in the example above), always declare the child schema first before passing it into its parent._
*
* @param {Object} definition
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter
* @event `init`: Emitted after the schema is compiled into a `Model`.
* @api public
*/
function Schema(obj, options) {
if (!(this instanceof Schema))
return new Schema(obj, options);
this.paths = {};
this.subpaths = {};
this.virtuals = {};
this.nested = {};
this.inherits = {};
this.callQueue = [];
this._indexes = [];
this.methods = {};
this.statics = {};
this.tree = {};
this._requiredpaths = undefined;
this.discriminatorMapping = undefined;
this._indexedpaths = undefined;
this.s = {
hooks: new Kareem(),
queryHooks: IS_QUERY_HOOK
};
this.options = this.defaultOptions(options);
// build paths
if (obj) {
this.add(obj);
}
// check if _id's value is a subdocument (gh-2276)
var _idSubDoc = obj && obj._id && utils.isObject(obj._id);
// ensure the documents get an auto _id unless disabled
var auto_id = !this.paths['_id'] &&
(!this.options.noId && this.options._id) && !_idSubDoc;
if (auto_id) {
obj = { _id: { auto: true } };
obj._id[this.options.typeKey] = Schema.ObjectId;
this.add(obj);
}
// ensure the documents receive an id getter unless disabled
var autoid = !this.paths['id'] &&
(!this.options.noVirtualId && this.options.id);
if (autoid) {
this.virtual('id').get(idGetter);
}
for (var i = 0; i < this._defaultMiddleware.length; ++i) {
var m = this._defaultMiddleware[i];
this[m.kind](m.hook, !!m.isAsync, m.fn);
}
// adds updatedAt and createdAt timestamps to documents if enabled
var timestamps = this.options.timestamps;
if (timestamps) {
var createdAt = timestamps.createdAt || 'createdAt',
updatedAt = timestamps.updatedAt || 'updatedAt',
schemaAdditions = {};
schemaAdditions[updatedAt] = Date;
if (!this.paths[createdAt]) {
schemaAdditions[createdAt] = Date;
}
this.add(schemaAdditions);
this.pre('save', function(next) {
var defaultTimestamp = new Date();
if (!this[createdAt]) {
this[createdAt] = auto_id ? this._id.getTimestamp() : defaultTimestamp;
}
this[updatedAt] = this.isNew ? this[createdAt] : defaultTimestamp;
next();
});
var genUpdates = function() {
var now = new Date();
var updates = {$set: {}, $setOnInsert: {}};
updates.$set[updatedAt] = now;
updates.$setOnInsert[createdAt] = now;
return updates;
};
this.pre('findOneAndUpdate', function(next) {
this.findOneAndUpdate({}, genUpdates());
next();
});
this.pre('update', function(next) {
this.update({}, genUpdates());
next();
});
}
}
/*!
* Returns this documents _id cast to a string.
*/
function idGetter() |
/*!
* Inherit from EventEmitter.
*/
Schema.prototype = Object.create( EventEmitter.prototype );
Schema.prototype.constructor = Schema;
/**
* Default middleware attached to a schema. Cannot be changed.
*
* This field is used to make sure discriminators don't get multiple copies of
* built-in middleware. Declared as a constant because changing this at runtime
* may lead to instability with Model.prototype.discriminator().
*
* @api private
* @property _defaultMiddleware
*/
Object.defineProperty(Schema.prototype, '_defaultMiddleware', {
configurable: false,
enumerable: false,
writable: false,
value: [{
kind: 'pre',
hook: 'save',
fn: function(next, options) {
// Nested docs have their own presave
if (this.ownerDocument) {
return next();
}
var hasValidateBeforeSaveOption = options &&
(typeof options === 'object') &&
('validateBeforeSave' in options);
var shouldValidate;
if (hasValidateBeforeSaveOption) {
shouldValidate = !!options.validateBeforeSave;
} else {
shouldValidate = this.schema.options.validateBeforeSave;
}
// Validate
if (shouldValidate) {
// HACK: use $__original_validate to avoid promises so bluebird doesn't
// complain
if (this.$__original_validate) {
this.$__original_validate({ __noPromise: true }, function(error) {
next(error);
});
} else {
this.validate({ __noPromise: true }, function(error) {
next(error);
});
}
} else {
next();
}
}
}, {
kind: 'pre',
hook: 'save',
isAsync: true,
fn: function(next, done) {
var Promise = PromiseProvider.get(),
subdocs = this.$__getAllSubdocs();
if (!subdocs.length || this.$__preSavingFromParent) {
done();
next();
return;
}
new Promise.ES6(function(resolve, reject) {
async.each(subdocs, function(subdoc, cb) {
subdoc.$__preSavingFromParent = true;
subdoc.save(function(err) {
cb(err);
});
}, function(error) {
for (var i = 0; i < subdocs.length; ++i) {
delete subdocs[i].$__preSavingFromParent;
}
if (error) {
reject(error);
return;
}
resolve();
});
}).then(function() {
next();
done();
}, done);
}
}]
});
/**
* Schema as flat paths
*
* ####Example:
* {
* '_id' : SchemaType,
* , 'nested.key' : SchemaType,
* }
*
* @api private
* @property paths
*/
Schema.prototype.paths;
/**
* Schema as a tree
*
* ####Example:
* {
* '_id' : ObjectId
* , 'nested' : {
* 'key' : String
* }
* }
*
* @api private
* @property tree
*/
Schema.prototype.tree;
/**
* Returns default options for this schema, merged with `options`.
*
* @param {Object} options
* @return {Object}
* @api private
*/
Schema.prototype.defaultOptions = function(options) {
if (options && false === options.safe) {
options.safe = { w: 0 };
}
if (options && options.safe && 0 === options.safe.w) {
// if you turn off safe writes, then versioning goes off as well
options.versionKey = false;
}
options = utils.options({
strict: true,
bufferCommands: true,
capped: false, // { size, max, autoIndexId }
versionKey: '__v',
discriminatorKey: '__t',
minimize: true,
autoIndex: null,
shardKey: null,
read: null,
validateBeforeSave: true,
// the following are only applied at construction time
noId: false, // deprecated, use { _id: false }
_id: true,
noVirtualId: false, // deprecated, use { id: false }
id: true,
typeKey: 'type'
}, options);
if (options.read) {
options.read = readPref(options.read);
}
return options;
};
/**
* Adds key path / schema type pairs to this schema.
*
* ####Example:
*
* var ToySchema = new Schema;
* ToySchema.add({ name: 'string', color: 'string', price: 'number' });
*
* @param {Object} obj
* @param {String} prefix
* @api public
*/
Schema.prototype.add = function add(obj, prefix) {
prefix = prefix || '';
var keys = Object.keys(obj);
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (null == obj[key]) {
throw new TypeError('Invalid value for schema path `' + prefix + key + '`');
}
if (Array.isArray(obj[key]) && obj[key].length === 1 && null == obj[key][0]) {
throw new TypeError('Invalid value for schema Array path `' + prefix + key + '`');
}
if (utils.isObject(obj[key]) &&
(!obj[key].constructor || 'Object' == utils.getFunctionName(obj[key].constructor)) &&
(!obj[key][this.options.typeKey] || (this.options.typeKey === 'type' && obj[key].type.type))) {
if (Object.keys(obj[key]).length) {
// nested object { last: { name: String }}
this.nested[prefix + key] = true;
this.add(obj[key], prefix + key + '.');
} else {
this.path(prefix + key, obj[key]); // mixed type
}
} else {
this.path(prefix + key, obj[key]);
}
}
};
/**
* Reserved document keys.
*
* Keys in this object are names that are rejected in schema declarations b/c they conflict with mongoose functionality. Using these key name will throw an error.
*
* on, emit, _events, db, get, set, init, isNew, errors, schema, options, modelName, collection, _pres, _posts, toObject
*
* _NOTE:_ Use of these terms as method names is permitted, but play at your own risk, as they may be existing mongoose document methods you are stomping on.
*
* var schema = new Schema(..);
* schema.methods.init = function () {} // potentially breaking
*/
Schema.reserved = Object.create(null);
var reserved = Schema.reserved;
// EventEmitter
reserved.emit =
reserved.on =
reserved.once =
// document properties and functions
reserved.collection =
reserved.db =
reserved.errors =
reserved.init =
reserved.isModified =
reserved.isNew =
reserved.get =
reserved.modelName =
reserved.save =
reserved.schema =
reserved.set =
reserved.toObject =
reserved.validate =
// hooks.js
reserved._pres = reserved._posts = 1;
/**
* Document keys to print warnings for
*/
var warnings = {};
warnings.increment = '`increment` should not be used as a schema path name ' +
'unless you have disabled versioning.';
/**
* Gets/sets schema paths.
*
* Sets a path (if arity 2)
* Gets a path (if arity 1)
*
* ####Example
*
* schema.path('name') // returns a SchemaType
* schema.path('name', Number) // changes the schemaType of `name` to Number
*
* @param {String} path
* @param {Object} constructor
* @api public
*/
Schema.prototype.path = function(path, obj) {
if (obj == undefined) {
if (this.paths[path]) return this.paths[path];
if (this.subpaths[path]) return this.subpaths[path];
// subpaths?
return /\.\d+\.?.*$/.test(path)
? getPositionalPath(this, path)
: undefined;
}
// some path names conflict with document methods
if (reserved[path]) {
throw new Error("`" + path + "` may not be used as a schema pathname");
}
if (warnings[path]) {
console.log('WARN: ' + warnings[path]);
}
// update the tree
var subpaths = path.split(/\./),
last = subpaths.pop(),
branch = this.tree;
subpaths.forEach(function(sub, i) {
if (!branch[sub]) branch[sub] = {};
if ('object' != typeof branch[sub]) {
var msg = 'Cannot set nested path `' + path + '`. '
+ 'Parent path `'
+ subpaths.slice(0, i).concat([sub]).join('.')
+ '` already set to type ' + branch[sub].name
+ '.';
throw new Error(msg);
}
branch = branch[sub];
});
branch[last] = utils.clone(obj);
this.paths[path] = Schema.interpretAsType(path, obj, this.options);
return this;
};
/**
* Converts type arguments into Mongoose Types.
*
* @param {String} path
* @param {Object} obj constructor
* @api private
*/
Schema.interpretAsType = function(path, obj, options) {
if (obj.constructor) {
var constructorName = utils.getFunctionName(obj.constructor);
if (constructorName != 'Object') {
var oldObj = obj;
obj = {};
obj[options.typeKey] = oldObj;
}
}
// Get the type making sure to allow keys named "type"
// and default to mixed if not specified.
// { type: { type: String, default: 'freshcut' } }
var type = obj[options.typeKey] && (options.typeKey !== 'type' || !obj.type.type)
? obj[options.typeKey]
: {};
if ('Object' == utils.getFunctionName(type.constructor) || 'mixed' == type) {
return new MongooseTypes.Mixed(path, obj);
}
if (Array.isArray(type) || Array == type || 'array' == type) {
// if it was specified through { type } look for `cast`
var cast = (Array == type || 'array' == type)
? obj.cast
: type[0];
if (cast instanceof Schema) {
return new MongooseTypes.DocumentArray(path, cast, obj);
}
if ('string' == typeof cast) {
cast = MongooseTypes[cast.charAt(0).toUpperCase() + cast.substring(1)];
} else if (cast && (!cast[options.typeKey] || (options.typeKey === 'type' && cast.type.type))
&& 'Object' == utils.getFunctionName(cast.constructor)
&& Object.keys(cast).length) {
// The `minimize` and `typeKey` options propagate to child schemas
// declared inline, like `{ arr: [{ val: { $type: String } }] }`.
// See gh-3560
var childSchemaOptions = { minimize: options.minimize };
if (options.typeKey) {
childSchemaOptions.typeKey = options.typeKey;
}
var childSchema = new Schema(cast, childSchemaOptions);
return new MongooseTypes.DocumentArray(path, childSchema, obj);
}
return new MongooseTypes.Array(path, cast || MongooseTypes.Mixed, obj);
}
if (type instanceof Schema) {
return new MongooseTypes.Embedded(type, path, obj);
}
var name;
if (Buffer.isBuffer(type)) {
name = 'Buffer';
} else {
name = 'string' == typeof type
? type
// If not string, `type` is a function. Outside of IE, function.name
// gives you the function name. In IE, you need to compute it
: type.schemaName || utils.getFunctionName(type);
}
if (name) {
name = name.charAt(0).toUpperCase() + name.substring(1);
}
if (undefined == MongooseTypes[name]) {
throw new TypeError('Undefined type `' + name + '` at `' + path +
'`\n Did you try nesting Schemas? ' +
'You can only nest using refs or arrays.');
}
return new MongooseTypes[name](path, obj);
};
/**
* Iterates the schemas paths similar to Array#forEach.
*
* The callback is passed the pathname and schemaType as arguments on each iteration.
*
* @param {Function} fn callback function
* @return {Schema} this
* @api public
*/
Schema.prototype.eachPath = function(fn) {
var keys = Object.keys(this.paths),
len = keys.length;
for (var i = 0; i < len; ++i) {
fn(keys[i], this.paths[keys[i]]);
}
return this;
};
/**
* Returns an Array of path strings that are required by this schema.
*
* @api public
* @param {Boolean} invalidate refresh the cache
* @return {Array}
*/
Schema.prototype.requiredPaths = function requiredPaths(invalidate) {
if (this._requiredpaths && !invalidate) return this._requiredpaths;
var paths = Object.keys(this.paths),
i = paths.length,
ret = [];
while (i--) {
var path = paths[i];
if (this.paths[path].isRequired) ret.push(path);
}
return this._requiredpaths = ret;
};
/**
* Returns indexes from fields and schema-level indexes (cached).
*
* @api private
* @return {Array}
*/
Schema.prototype.indexedPaths = function indexedPaths() {
if (this._indexedpaths) return this._indexedpaths;
return this._indexedpaths = this.indexes();
};
/**
* Returns the pathType of `path` for this schema.
*
* Given a path, returns whether it is a real, virtual, nested, or ad-hoc/undefined path.
*
* @param {String} path
* @return {String}
* @api public
*/
Schema.prototype.pathType = function(path) {
if (path in this.paths) return 'real';
if (path in this.virtuals) return 'virtual';
if (path in this.nested) return 'nested';
if (path in this.subpaths) return 'real';
if (/\.\d+\.|\.\d+$/.test(path)) {
return getPositionalPathType(this, path);
} else {
return 'adhocOrUndefined';
}
};
/**
* Returns true iff this path is a child of a mixed schema.
*
* @param {String} path
* @return {Boolean}
* @api private
*/
Schema.prototype.hasMixedParent = function(path) {
var subpaths = path.split(/\./g);
path = '';
for (var i = 0; i < subpaths.length; ++i) {
path = i > 0 ? path + '.' + subpaths[i] : subpaths[i];
if (path in this.paths &&
this.paths[path] instanceof MongooseTypes.Mixed) {
return true;
}
}
return false;
};
/*!
* ignore
*/
function getPositionalPathType(self, path) {
var subpaths = path.split(/\.(\d+)\.|\.(\d+)$/).filter(Boolean);
if (subpaths.length < 2) {
return self.paths[subpaths[0]];
}
var val = self.path(subpaths[0]);
var isNested = false;
if (!val) return val;
var last = subpaths.length - 1,
subpath,
i = 1;
for (; i < subpaths.length; ++i) {
isNested = false;
subpath = subpaths[i];
if (i === last && val && !val.schema && !/\D/.test(subpath)) {
if (val instanceof MongooseTypes.Array) {
// StringSchema, NumberSchema, etc
val = val.caster;
} else {
val = undefined;
}
break;
}
// ignore if its just a position segment: path.0.subpath
if (!/\D/.test(subpath)) continue;
if (!(val && val.schema)) {
val = undefined;
break;
}
var type = val.schema.pathType(subpath);
isNested = (type === 'nested');
val = val.schema.path(subpath);
}
self.subpaths[path] = val;
if (val) {
return 'real';
}
if (isNested) {
return 'nested';
}
return 'adhocOrUndefined';
}
/*!
* ignore
*/
function getPositionalPath(self, path) {
getPositionalPathType(self, path);
return self.subpaths[path];
}
/**
* Adds a method call to the queue.
*
* @param {String} name name of the document method to call later
* @param {Array} args arguments to pass to the method
* @api public
*/
Schema.prototype.queue = function(name, args) {
this.callQueue.push([name, args]);
return this;
};
/**
* Defines a pre hook for the document.
*
* ####Example
*
* var toySchema = new Schema(..);
*
* toySchema.pre('save', function (next) {
* if (!this.created) this.created = new Date;
* next();
* })
*
* toySchema.pre('validate', function (next) {
* if (this.name != 'Woody') this.name = 'Woody';
* next();
* })
*
* @param {String} method
* @param {Function} callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.pre = function() {
var name = arguments[0];
if (IS_QUERY_HOOK[name]) {
this.s.hooks.pre.apply(this.s.hooks, arguments);
return this;
}
return this.queue('pre', arguments);
};
/**
* Defines a post hook for the document
*
* Post hooks fire `on` the event emitted from document instances of Models compiled from this schema.
*
* var schema = new Schema(..);
* schema.post('save', function (doc) {
* console.log('this fired after a document was saved');
* });
*
* var Model = mongoose.model('Model', schema);
*
* var m = new Model(..);
* m.save(function (err) {
* console.log('this fires after the `post` hook');
* });
*
* @param {String} method name of the method to hook
* @param {Function} fn callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.post = function(method, fn) {
if (IS_QUERY_HOOK[method]) {
this.s.hooks.post.apply(this.s.hooks, arguments);
return this;
}
// assuming that all callbacks with arity < 2 are synchronous post hooks
if (fn.length < 2) {
return this.queue('on', [arguments[0], function(doc) {
return fn.call(doc, doc);
}]);
}
return this.queue('post', [arguments[0], function(next) {
// wrap original function so that the callback goes last,
// for compatibility with old code that is using synchronous post hooks
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
fn.call(this, this, function(err) {
return next.apply(self, [err].concat(args));
});
}]);
};
/**
* Registers a plugin for this schema.
*
* @param {Function} plugin callback
* @param {Object} [opts]
* @see plugins
* @api public
*/
Schema.prototype.plugin = function(fn, opts) {
fn(this, opts);
return this;
};
/**
* Adds an instance method to documents constructed from Models compiled from this schema.
*
* ####Example
*
* var schema = kittySchema = new Schema(..);
*
* schema.method('meow', function () {
* console.log('meeeeeoooooooooooow');
* })
*
* var Kitty = mongoose.model('Kitty', schema);
*
* var fizz = new Kitty;
* fizz.meow(); // meeeeeooooooooooooow
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as methods.
*
* schema.method({
* purr: function () {}
* , scratch: function () {}
* });
*
* // later
* fizz.purr();
* fizz.scratch();
*
* @param {String|Object} method name
* @param {Function} [fn]
* @api public
*/
Schema.prototype.method = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.methods[i] = name[i];
else
this.methods[name] = fn;
return this;
};
/**
* Adds static "class" methods to Models compiled from this schema.
*
* ####Example
*
* var schema = new Schema(..);
* schema.static('findByName', function (name, callback) {
* return this.find({ name: name }, callback);
* });
*
* var Drink = mongoose.model('Drink', schema);
* Drink.findByName('sanpellegrino', function (err, drinks) {
* //
* });
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as statics.
*
* @param {String} name
* @param {Function} fn
* @api public
*/
Schema.prototype.static = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.statics[i] = name[i];
else
this.statics[name] = fn;
return this;
};
/**
* Defines an index (most likely compound) for this schema.
*
* ####Example
*
* schema.index({ first: 1, last: -1 })
*
* @param {Object} fields
* @param {Object} [options] Options to pass to [MongoDB driver's `createIndex()` function](http://mongodb.github.io/node-mongodb-native/2.0/api/Collection.html#createIndex)
* @param {String} [options.expires=null] Mongoose-specific syntactic sugar, uses [ms](https://www.npmjs.com/package/ms) to convert `expires` option into seconds for the `expireAfterSeconds` in the above link.
* @api public
*/
Schema.prototype.index = function(fields, options) {
options || (options = {});
if (options.expires)
utils.expires(options);
this._indexes.push([fields, options]);
return this;
};
/**
* Sets/gets a schema option.
*
* ####Example
*
* schema.set('strict'); // 'true' by default
* schema.set('strict', false); // Sets 'strict' to false
* schema.set('strict'); // 'false'
*
* @param {String} key option name
* @param {Object} [value] if not passed, the current option value is returned
* @see Schema ./
* @api public
*/
Schema.prototype.set = function(key, value, _tags) {
if (1 === arguments.length) {
return this.options[key];
}
switch (key) {
case 'read':
this.options[key] = readPref(value, _tags);
break;
case 'safe':
this.options[key] = false === value
? { w: 0 }
: value;
break;
default:
this.options[key] = value;
}
return this;
};
/**
* Gets a schema option.
*
* @param {String} key option name
* @api public
*/
Schema.prototype.get = function(key) {
return this.options[key];
};
/**
* The allowed index types
*
* @static indexTypes
* @receiver Schema
* @api public
*/
var indexTypes = '2d 2dsphere hashed text'.split(' ');
Object.defineProperty(Schema, 'indexTypes', {
get: function() { return indexTypes; },
set: function() { throw new Error('Cannot overwrite Schema.indexTypes'); }
});
/**
* Compiles indexes from fields and schema-level indexes
*
* @api public
*/
Schema.prototype.indexes = function() {
'use strict';
var indexes = [];
var seenPrefix = {};
var collectIndexes = function(schema, prefix) {
if (seenPrefix[prefix]) {
return;
}
seenPrefix[prefix] = true;
prefix = prefix || '';
var key, path, index, field, isObject, options, type;
var keys = Object.keys(schema.paths);
for (var i = 0; i < keys.length; ++i) {
key = keys[i];
path = schema.paths[key];
if (path instanceof MongooseTypes.DocumentArray) {
collectIndexes(path.schema, key + '.');
} else if (path.$isSingleNested) {
collectIndexes(path.schema, key + '.');
} else {
index = path._index;
if (false !== index && null != index) {
field = {};
isObject = utils.isObject(index);
options = isObject ? index : {};
type = 'string' == typeof index ? index :
isObject ? index.type :
false;
if (type && ~Schema.indexTypes.indexOf(type)) {
field[prefix + key] = type;
} else {
field[prefix + key] = 1;
}
delete options.type;
if (!('background' in options)) {
options.background = true;
}
indexes.push([field, options]);
}
}
}
if (prefix) {
fixSubIndexPaths(schema, prefix);
} else {
schema._indexes.forEach(function(index) {
if (!('background' in index[1])) index[1].background = true;
});
indexes = indexes.concat(schema._indexes);
}
};
collectIndexes(this);
return indexes;
/*!
* Checks for indexes added to subdocs using Schema.index().
* These indexes need their paths prefixed properly.
*
* schema._indexes = [ [indexObj, options], [indexObj, options] ..]
*/
function fixSubIndexPaths(schema, prefix) {
var subindexes = schema._indexes,
len = subindexes.length,
indexObj,
newindex,
klen,
keys,
key,
i = 0,
j;
for (i = 0; i < len; ++i) {
indexObj = subindexes[i][0];
keys = Object.keys(indexObj);
klen = keys.length;
newindex = {};
// use forward iteration, order matters
for (j = 0; j < klen; ++j) {
key = keys[j];
newindex[prefix + key] = indexObj[key];
}
indexes.push([newindex, subindexes[i][1]]);
}
}
};
/**
* Creates a virtual type with the given name.
*
* @param {String} name
* @param {Object} [options]
* @return {VirtualType}
*/
Schema.prototype.virtual = function(name, options) {
var virtuals = this.virtuals;
var parts = name.split('.');
return virtuals[name] = parts.reduce(function(mem, part, i) {
mem[part] || (mem[part] = (i === parts.length - 1)
? new VirtualType(options, name)
: {});
return mem[part];
}, this.tree);
};
/**
* Returns the virtual type with the given `name`.
*
* @param {String} name
* @return {VirtualType}
*/
Schema.prototype.virtualpath = function(name) {
return this.virtuals[name];
};
/**
* Removes the given `path` (or [`paths`]).
*
* @param {String|Array} path
*
* @api public
*/
Schema.prototype.remove = function(path) {
if (typeof path === 'string') {
path = [path];
}
if (Array.isArray(path)) {
path.forEach(function(name) {
if (this.path(name)) {
delete this.paths[name];
}
}, this);
}
};
/*!
* ignore
*/
Schema.prototype._getSchema = function(path) {
var schema = this;
var pathschema = schema.path(path);
if (pathschema) {
return pathschema;
}
// look for arrays
return (function search(parts, schema) {
var p = parts.length + 1,
foundschema,
trypath;
while (p--) {
trypath = parts.slice(0, p).join('.');
foundschema = schema.path(trypath);
if (foundschema) {
if (foundschema.caster) {
// array of Mixed?
if (foundschema.caster instanceof MongooseTypes.Mixed) {
return foundschema.caster;
}
// Now that we found the array, we need to check if there
// are remaining document paths to look up for casting.
// Also we need to handle array.$.path since schema.path
// doesn't work for that.
// If there is no foundschema.schema we are dealing with
// a path like array.$
if (p !== parts.length && foundschema.schema) {
if ('$' === parts[p]) {
// comments.$.comments.$.title
return search(parts.slice(p + 1), foundschema.schema);
} else {
// this is the last path of the selector
return search(parts.slice(p), foundschema.schema);
}
}
}
return foundschema;
}
}
})(path.split('.'), schema);
};
/*!
* Module exports.
*/
module.exports = exports = Schema;
// require down here because of reference issues
/**
* The various built-in Mongoose Schema Types.
*
* ####Example:
*
* var mongoose = require('mongoose');
* var ObjectId = mongoose.Schema.Types.ObjectId;
*
* ####Types:
*
* - [String](#schema-string-js)
* - [Number](#schema-number-js)
* - [Boolean](#schema-boolean-js) | Bool
* - [Array](#schema-array-js)
* - [Buffer](#schema-buffer-js)
* - [Date](#schema-date-js)
* - [ObjectId](#schema-objectid-js) | Oid
* - [Mixed](#schema-mixed-js)
*
* Using this exposed access to the `Mixed` SchemaType, we can use them in our schema.
*
* var Mixed = mongoose.Schema.Types.Mixed;
* new mongoose.Schema({ _user: Mixed })
*
* @api public
*/
Schema.Types = MongooseTypes = require('./schema/index');
/*!
* ignore
*/
exports.ObjectId = MongooseTypes.ObjectId;
| {
if (this.$__._id) {
return this.$__._id;
}
return this.$__._id = null == this._id
? null
: String(this._id);
} | identifier_body |
schema.js | /*!
* Module dependencies.
*/
var readPref = require('./drivers').ReadPreference;
var EventEmitter = require('events').EventEmitter;
var VirtualType = require('./virtualtype');
var utils = require('./utils');
var MongooseTypes;
var Kareem = require('kareem');
var async = require('async');
var PromiseProvider = require('./promise_provider');
var IS_QUERY_HOOK = {
count: true,
find: true,
findOne: true,
findOneAndUpdate: true,
findOneAndRemove: true,
update: true
};
/**
* Schema constructor.
*
* ####Example:
*
* var child = new Schema({ name: String });
* var schema = new Schema({ name: String, age: Number, children: [child] });
* var Tree = mongoose.model('Tree', schema);
*
* // setting schema options
* new Schema({ name: String }, { _id: false, autoIndex: false })
*
* ####Options:
*
* - [autoIndex](/docs/guide.html#autoIndex): bool - defaults to null (which means use the connection's autoIndex option)
* - [bufferCommands](/docs/guide.html#bufferCommands): bool - defaults to true
* - [capped](/docs/guide.html#capped): bool - defaults to false
* - [collection](/docs/guide.html#collection): string - no default
* - [emitIndexErrors](/docs/guide.html#emitIndexErrors): bool - defaults to false.
* - [id](/docs/guide.html#id): bool - defaults to true
* - [_id](/docs/guide.html#_id): bool - defaults to true
* - `minimize`: bool - controls [document#toObject](#document_Document-toObject) behavior when called manually - defaults to true
* - [read](/docs/guide.html#read): string
* - [safe](/docs/guide.html#safe): bool - defaults to true.
* - [shardKey](/docs/guide.html#shardKey): bool - defaults to `null`
* - [strict](/docs/guide.html#strict): bool - defaults to true
* - [toJSON](/docs/guide.html#toJSON) - object - no default
* - [toObject](/docs/guide.html#toObject) - object - no default
* - [typeKey](/docs/guide.html#typeKey) - string - defaults to 'type'
* - [validateBeforeSave](/docs/guide.html#validateBeforeSave) - bool - defaults to `true`
* - [versionKey](/docs/guide.html#versionKey): bool - defaults to "__v"
*
* ####Note:
*
* _When nesting schemas, (`children` in the example above), always declare the child schema first before passing it into its parent._
*
* @param {Object} definition
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter
* @event `init`: Emitted after the schema is compiled into a `Model`.
* @api public
*/
function Schema(obj, options) {
if (!(this instanceof Schema))
return new Schema(obj, options);
this.paths = {};
this.subpaths = {};
this.virtuals = {};
this.nested = {};
this.inherits = {};
this.callQueue = [];
this._indexes = [];
this.methods = {};
this.statics = {};
this.tree = {};
this._requiredpaths = undefined;
this.discriminatorMapping = undefined;
this._indexedpaths = undefined;
this.s = {
hooks: new Kareem(),
queryHooks: IS_QUERY_HOOK
};
this.options = this.defaultOptions(options);
// build paths
if (obj) {
this.add(obj);
}
// check if _id's value is a subdocument (gh-2276)
var _idSubDoc = obj && obj._id && utils.isObject(obj._id);
// ensure the documents get an auto _id unless disabled
var auto_id = !this.paths['_id'] &&
(!this.options.noId && this.options._id) && !_idSubDoc;
if (auto_id) {
obj = { _id: { auto: true } };
obj._id[this.options.typeKey] = Schema.ObjectId;
this.add(obj);
}
// ensure the documents receive an id getter unless disabled
var autoid = !this.paths['id'] &&
(!this.options.noVirtualId && this.options.id);
if (autoid) {
this.virtual('id').get(idGetter);
}
for (var i = 0; i < this._defaultMiddleware.length; ++i) {
var m = this._defaultMiddleware[i];
this[m.kind](m.hook, !!m.isAsync, m.fn);
}
// adds updatedAt and createdAt timestamps to documents if enabled
var timestamps = this.options.timestamps;
if (timestamps) {
var createdAt = timestamps.createdAt || 'createdAt',
updatedAt = timestamps.updatedAt || 'updatedAt',
schemaAdditions = {};
schemaAdditions[updatedAt] = Date;
if (!this.paths[createdAt]) {
schemaAdditions[createdAt] = Date;
}
this.add(schemaAdditions);
this.pre('save', function(next) {
var defaultTimestamp = new Date();
if (!this[createdAt]) {
this[createdAt] = auto_id ? this._id.getTimestamp() : defaultTimestamp;
}
this[updatedAt] = this.isNew ? this[createdAt] : defaultTimestamp;
next();
});
var genUpdates = function() {
var now = new Date();
var updates = {$set: {}, $setOnInsert: {}};
updates.$set[updatedAt] = now;
updates.$setOnInsert[createdAt] = now;
return updates;
};
this.pre('findOneAndUpdate', function(next) {
this.findOneAndUpdate({}, genUpdates());
next();
});
this.pre('update', function(next) {
this.update({}, genUpdates());
next();
});
}
}
/*!
* Returns this documents _id cast to a string.
*/
function idGetter() {
if (this.$__._id) {
return this.$__._id;
}
return this.$__._id = null == this._id
? null
: String(this._id);
}
/*!
* Inherit from EventEmitter.
*/
Schema.prototype = Object.create( EventEmitter.prototype );
Schema.prototype.constructor = Schema;
/**
* Default middleware attached to a schema. Cannot be changed.
*
* This field is used to make sure discriminators don't get multiple copies of
* built-in middleware. Declared as a constant because changing this at runtime
* may lead to instability with Model.prototype.discriminator().
*
* @api private
* @property _defaultMiddleware
*/
Object.defineProperty(Schema.prototype, '_defaultMiddleware', {
configurable: false,
enumerable: false,
writable: false,
value: [{
kind: 'pre',
hook: 'save',
fn: function(next, options) {
// Nested docs have their own presave
if (this.ownerDocument) {
return next();
}
var hasValidateBeforeSaveOption = options &&
(typeof options === 'object') &&
('validateBeforeSave' in options);
var shouldValidate;
if (hasValidateBeforeSaveOption) {
shouldValidate = !!options.validateBeforeSave;
} else {
shouldValidate = this.schema.options.validateBeforeSave;
}
// Validate
if (shouldValidate) {
// HACK: use $__original_validate to avoid promises so bluebird doesn't
// complain
if (this.$__original_validate) {
this.$__original_validate({ __noPromise: true }, function(error) {
next(error);
});
} else {
this.validate({ __noPromise: true }, function(error) {
next(error);
});
}
} else {
next();
}
}
}, {
kind: 'pre',
hook: 'save',
isAsync: true,
fn: function(next, done) {
var Promise = PromiseProvider.get(),
subdocs = this.$__getAllSubdocs();
if (!subdocs.length || this.$__preSavingFromParent) {
done();
next();
return;
}
new Promise.ES6(function(resolve, reject) {
async.each(subdocs, function(subdoc, cb) {
subdoc.$__preSavingFromParent = true;
subdoc.save(function(err) {
cb(err);
});
}, function(error) {
for (var i = 0; i < subdocs.length; ++i) |
if (error) {
reject(error);
return;
}
resolve();
});
}).then(function() {
next();
done();
}, done);
}
}]
});
/**
* Schema as flat paths
*
* ####Example:
* {
* '_id' : SchemaType,
* , 'nested.key' : SchemaType,
* }
*
* @api private
* @property paths
*/
Schema.prototype.paths;
/**
* Schema as a tree
*
* ####Example:
* {
* '_id' : ObjectId
* , 'nested' : {
* 'key' : String
* }
* }
*
* @api private
* @property tree
*/
Schema.prototype.tree;
/**
* Returns default options for this schema, merged with `options`.
*
* @param {Object} options
* @return {Object}
* @api private
*/
Schema.prototype.defaultOptions = function(options) {
if (options && false === options.safe) {
options.safe = { w: 0 };
}
if (options && options.safe && 0 === options.safe.w) {
// if you turn off safe writes, then versioning goes off as well
options.versionKey = false;
}
options = utils.options({
strict: true,
bufferCommands: true,
capped: false, // { size, max, autoIndexId }
versionKey: '__v',
discriminatorKey: '__t',
minimize: true,
autoIndex: null,
shardKey: null,
read: null,
validateBeforeSave: true,
// the following are only applied at construction time
noId: false, // deprecated, use { _id: false }
_id: true,
noVirtualId: false, // deprecated, use { id: false }
id: true,
typeKey: 'type'
}, options);
if (options.read) {
options.read = readPref(options.read);
}
return options;
};
/**
* Adds key path / schema type pairs to this schema.
*
* ####Example:
*
* var ToySchema = new Schema;
* ToySchema.add({ name: 'string', color: 'string', price: 'number' });
*
* @param {Object} obj
* @param {String} prefix
* @api public
*/
Schema.prototype.add = function add(obj, prefix) {
prefix = prefix || '';
var keys = Object.keys(obj);
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (null == obj[key]) {
throw new TypeError('Invalid value for schema path `' + prefix + key + '`');
}
if (Array.isArray(obj[key]) && obj[key].length === 1 && null == obj[key][0]) {
throw new TypeError('Invalid value for schema Array path `' + prefix + key + '`');
}
if (utils.isObject(obj[key]) &&
(!obj[key].constructor || 'Object' == utils.getFunctionName(obj[key].constructor)) &&
(!obj[key][this.options.typeKey] || (this.options.typeKey === 'type' && obj[key].type.type))) {
if (Object.keys(obj[key]).length) {
// nested object { last: { name: String }}
this.nested[prefix + key] = true;
this.add(obj[key], prefix + key + '.');
} else {
this.path(prefix + key, obj[key]); // mixed type
}
} else {
this.path(prefix + key, obj[key]);
}
}
};
/**
* Reserved document keys.
*
* Keys in this object are names that are rejected in schema declarations b/c they conflict with mongoose functionality. Using these key name will throw an error.
*
* on, emit, _events, db, get, set, init, isNew, errors, schema, options, modelName, collection, _pres, _posts, toObject
*
* _NOTE:_ Use of these terms as method names is permitted, but play at your own risk, as they may be existing mongoose document methods you are stomping on.
*
* var schema = new Schema(..);
* schema.methods.init = function () {} // potentially breaking
*/
Schema.reserved = Object.create(null);
var reserved = Schema.reserved;
// EventEmitter
reserved.emit =
reserved.on =
reserved.once =
// document properties and functions
reserved.collection =
reserved.db =
reserved.errors =
reserved.init =
reserved.isModified =
reserved.isNew =
reserved.get =
reserved.modelName =
reserved.save =
reserved.schema =
reserved.set =
reserved.toObject =
reserved.validate =
// hooks.js
reserved._pres = reserved._posts = 1;
/**
* Document keys to print warnings for
*/
var warnings = {};
warnings.increment = '`increment` should not be used as a schema path name ' +
'unless you have disabled versioning.';
/**
* Gets/sets schema paths.
*
* Sets a path (if arity 2)
* Gets a path (if arity 1)
*
* ####Example
*
* schema.path('name') // returns a SchemaType
* schema.path('name', Number) // changes the schemaType of `name` to Number
*
* @param {String} path
* @param {Object} constructor
* @api public
*/
Schema.prototype.path = function(path, obj) {
if (obj == undefined) {
if (this.paths[path]) return this.paths[path];
if (this.subpaths[path]) return this.subpaths[path];
// subpaths?
return /\.\d+\.?.*$/.test(path)
? getPositionalPath(this, path)
: undefined;
}
// some path names conflict with document methods
if (reserved[path]) {
throw new Error("`" + path + "` may not be used as a schema pathname");
}
if (warnings[path]) {
console.log('WARN: ' + warnings[path]);
}
// update the tree
var subpaths = path.split(/\./),
last = subpaths.pop(),
branch = this.tree;
subpaths.forEach(function(sub, i) {
if (!branch[sub]) branch[sub] = {};
if ('object' != typeof branch[sub]) {
var msg = 'Cannot set nested path `' + path + '`. '
+ 'Parent path `'
+ subpaths.slice(0, i).concat([sub]).join('.')
+ '` already set to type ' + branch[sub].name
+ '.';
throw new Error(msg);
}
branch = branch[sub];
});
branch[last] = utils.clone(obj);
this.paths[path] = Schema.interpretAsType(path, obj, this.options);
return this;
};
/**
* Converts type arguments into Mongoose Types.
*
* @param {String} path
* @param {Object} obj constructor
* @api private
*/
Schema.interpretAsType = function(path, obj, options) {
if (obj.constructor) {
var constructorName = utils.getFunctionName(obj.constructor);
if (constructorName != 'Object') {
var oldObj = obj;
obj = {};
obj[options.typeKey] = oldObj;
}
}
// Get the type making sure to allow keys named "type"
// and default to mixed if not specified.
// { type: { type: String, default: 'freshcut' } }
var type = obj[options.typeKey] && (options.typeKey !== 'type' || !obj.type.type)
? obj[options.typeKey]
: {};
if ('Object' == utils.getFunctionName(type.constructor) || 'mixed' == type) {
return new MongooseTypes.Mixed(path, obj);
}
if (Array.isArray(type) || Array == type || 'array' == type) {
// if it was specified through { type } look for `cast`
var cast = (Array == type || 'array' == type)
? obj.cast
: type[0];
if (cast instanceof Schema) {
return new MongooseTypes.DocumentArray(path, cast, obj);
}
if ('string' == typeof cast) {
cast = MongooseTypes[cast.charAt(0).toUpperCase() + cast.substring(1)];
} else if (cast && (!cast[options.typeKey] || (options.typeKey === 'type' && cast.type.type))
&& 'Object' == utils.getFunctionName(cast.constructor)
&& Object.keys(cast).length) {
// The `minimize` and `typeKey` options propagate to child schemas
// declared inline, like `{ arr: [{ val: { $type: String } }] }`.
// See gh-3560
var childSchemaOptions = { minimize: options.minimize };
if (options.typeKey) {
childSchemaOptions.typeKey = options.typeKey;
}
var childSchema = new Schema(cast, childSchemaOptions);
return new MongooseTypes.DocumentArray(path, childSchema, obj);
}
return new MongooseTypes.Array(path, cast || MongooseTypes.Mixed, obj);
}
if (type instanceof Schema) {
return new MongooseTypes.Embedded(type, path, obj);
}
var name;
if (Buffer.isBuffer(type)) {
name = 'Buffer';
} else {
name = 'string' == typeof type
? type
// If not string, `type` is a function. Outside of IE, function.name
// gives you the function name. In IE, you need to compute it
: type.schemaName || utils.getFunctionName(type);
}
if (name) {
name = name.charAt(0).toUpperCase() + name.substring(1);
}
if (undefined == MongooseTypes[name]) {
throw new TypeError('Undefined type `' + name + '` at `' + path +
'`\n Did you try nesting Schemas? ' +
'You can only nest using refs or arrays.');
}
return new MongooseTypes[name](path, obj);
};
/**
* Iterates the schemas paths similar to Array#forEach.
*
* The callback is passed the pathname and schemaType as arguments on each iteration.
*
* @param {Function} fn callback function
* @return {Schema} this
* @api public
*/
Schema.prototype.eachPath = function(fn) {
var keys = Object.keys(this.paths),
len = keys.length;
for (var i = 0; i < len; ++i) {
fn(keys[i], this.paths[keys[i]]);
}
return this;
};
/**
* Returns an Array of path strings that are required by this schema.
*
* @api public
* @param {Boolean} invalidate refresh the cache
* @return {Array}
*/
Schema.prototype.requiredPaths = function requiredPaths(invalidate) {
if (this._requiredpaths && !invalidate) return this._requiredpaths;
var paths = Object.keys(this.paths),
i = paths.length,
ret = [];
while (i--) {
var path = paths[i];
if (this.paths[path].isRequired) ret.push(path);
}
return this._requiredpaths = ret;
};
/**
* Returns indexes from fields and schema-level indexes (cached).
*
* @api private
* @return {Array}
*/
Schema.prototype.indexedPaths = function indexedPaths() {
if (this._indexedpaths) return this._indexedpaths;
return this._indexedpaths = this.indexes();
};
/**
* Returns the pathType of `path` for this schema.
*
* Given a path, returns whether it is a real, virtual, nested, or ad-hoc/undefined path.
*
* @param {String} path
* @return {String}
* @api public
*/
Schema.prototype.pathType = function(path) {
if (path in this.paths) return 'real';
if (path in this.virtuals) return 'virtual';
if (path in this.nested) return 'nested';
if (path in this.subpaths) return 'real';
if (/\.\d+\.|\.\d+$/.test(path)) {
return getPositionalPathType(this, path);
} else {
return 'adhocOrUndefined';
}
};
/**
* Returns true iff this path is a child of a mixed schema.
*
* @param {String} path
* @return {Boolean}
* @api private
*/
Schema.prototype.hasMixedParent = function(path) {
var subpaths = path.split(/\./g);
path = '';
for (var i = 0; i < subpaths.length; ++i) {
path = i > 0 ? path + '.' + subpaths[i] : subpaths[i];
if (path in this.paths &&
this.paths[path] instanceof MongooseTypes.Mixed) {
return true;
}
}
return false;
};
/*!
* ignore
*/
function getPositionalPathType(self, path) {
var subpaths = path.split(/\.(\d+)\.|\.(\d+)$/).filter(Boolean);
if (subpaths.length < 2) {
return self.paths[subpaths[0]];
}
var val = self.path(subpaths[0]);
var isNested = false;
if (!val) return val;
var last = subpaths.length - 1,
subpath,
i = 1;
for (; i < subpaths.length; ++i) {
isNested = false;
subpath = subpaths[i];
if (i === last && val && !val.schema && !/\D/.test(subpath)) {
if (val instanceof MongooseTypes.Array) {
// StringSchema, NumberSchema, etc
val = val.caster;
} else {
val = undefined;
}
break;
}
// ignore if its just a position segment: path.0.subpath
if (!/\D/.test(subpath)) continue;
if (!(val && val.schema)) {
val = undefined;
break;
}
var type = val.schema.pathType(subpath);
isNested = (type === 'nested');
val = val.schema.path(subpath);
}
self.subpaths[path] = val;
if (val) {
return 'real';
}
if (isNested) {
return 'nested';
}
return 'adhocOrUndefined';
}
/*!
* ignore
*/
function getPositionalPath(self, path) {
getPositionalPathType(self, path);
return self.subpaths[path];
}
/**
* Adds a method call to the queue.
*
* @param {String} name name of the document method to call later
* @param {Array} args arguments to pass to the method
* @api public
*/
Schema.prototype.queue = function(name, args) {
this.callQueue.push([name, args]);
return this;
};
/**
* Defines a pre hook for the document.
*
* ####Example
*
* var toySchema = new Schema(..);
*
* toySchema.pre('save', function (next) {
* if (!this.created) this.created = new Date;
* next();
* })
*
* toySchema.pre('validate', function (next) {
* if (this.name != 'Woody') this.name = 'Woody';
* next();
* })
*
* @param {String} method
* @param {Function} callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.pre = function() {
var name = arguments[0];
if (IS_QUERY_HOOK[name]) {
this.s.hooks.pre.apply(this.s.hooks, arguments);
return this;
}
return this.queue('pre', arguments);
};
/**
* Defines a post hook for the document
*
* Post hooks fire `on` the event emitted from document instances of Models compiled from this schema.
*
* var schema = new Schema(..);
* schema.post('save', function (doc) {
* console.log('this fired after a document was saved');
* });
*
* var Model = mongoose.model('Model', schema);
*
* var m = new Model(..);
* m.save(function (err) {
* console.log('this fires after the `post` hook');
* });
*
* @param {String} method name of the method to hook
* @param {Function} fn callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.post = function(method, fn) {
if (IS_QUERY_HOOK[method]) {
this.s.hooks.post.apply(this.s.hooks, arguments);
return this;
}
// assuming that all callbacks with arity < 2 are synchronous post hooks
if (fn.length < 2) {
return this.queue('on', [arguments[0], function(doc) {
return fn.call(doc, doc);
}]);
}
return this.queue('post', [arguments[0], function(next) {
// wrap original function so that the callback goes last,
// for compatibility with old code that is using synchronous post hooks
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
fn.call(this, this, function(err) {
return next.apply(self, [err].concat(args));
});
}]);
};
/**
* Registers a plugin for this schema.
*
* @param {Function} plugin callback
* @param {Object} [opts]
* @see plugins
* @api public
*/
Schema.prototype.plugin = function(fn, opts) {
fn(this, opts);
return this;
};
/**
* Adds an instance method to documents constructed from Models compiled from this schema.
*
* ####Example
*
* var schema = kittySchema = new Schema(..);
*
* schema.method('meow', function () {
* console.log('meeeeeoooooooooooow');
* })
*
* var Kitty = mongoose.model('Kitty', schema);
*
* var fizz = new Kitty;
* fizz.meow(); // meeeeeooooooooooooow
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as methods.
*
* schema.method({
* purr: function () {}
* , scratch: function () {}
* });
*
* // later
* fizz.purr();
* fizz.scratch();
*
* @param {String|Object} method name
* @param {Function} [fn]
* @api public
*/
Schema.prototype.method = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.methods[i] = name[i];
else
this.methods[name] = fn;
return this;
};
/**
* Adds static "class" methods to Models compiled from this schema.
*
* ####Example
*
* var schema = new Schema(..);
* schema.static('findByName', function (name, callback) {
* return this.find({ name: name }, callback);
* });
*
* var Drink = mongoose.model('Drink', schema);
* Drink.findByName('sanpellegrino', function (err, drinks) {
* //
* });
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as statics.
*
* @param {String} name
* @param {Function} fn
* @api public
*/
Schema.prototype.static = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.statics[i] = name[i];
else
this.statics[name] = fn;
return this;
};
/**
* Defines an index (most likely compound) for this schema.
*
* ####Example
*
* schema.index({ first: 1, last: -1 })
*
* @param {Object} fields
* @param {Object} [options] Options to pass to [MongoDB driver's `createIndex()` function](http://mongodb.github.io/node-mongodb-native/2.0/api/Collection.html#createIndex)
* @param {String} [options.expires=null] Mongoose-specific syntactic sugar, uses [ms](https://www.npmjs.com/package/ms) to convert `expires` option into seconds for the `expireAfterSeconds` in the above link.
* @api public
*/
Schema.prototype.index = function(fields, options) {
options || (options = {});
if (options.expires)
utils.expires(options);
this._indexes.push([fields, options]);
return this;
};
/**
* Sets/gets a schema option.
*
* ####Example
*
* schema.set('strict'); // 'true' by default
* schema.set('strict', false); // Sets 'strict' to false
* schema.set('strict'); // 'false'
*
* @param {String} key option name
* @param {Object} [value] if not passed, the current option value is returned
* @see Schema ./
* @api public
*/
Schema.prototype.set = function(key, value, _tags) {
if (1 === arguments.length) {
return this.options[key];
}
switch (key) {
case 'read':
this.options[key] = readPref(value, _tags);
break;
case 'safe':
this.options[key] = false === value
? { w: 0 }
: value;
break;
default:
this.options[key] = value;
}
return this;
};
/**
* Gets a schema option.
*
* @param {String} key option name
* @api public
*/
Schema.prototype.get = function(key) {
return this.options[key];
};
/**
* The allowed index types
*
* @static indexTypes
* @receiver Schema
* @api public
*/
var indexTypes = '2d 2dsphere hashed text'.split(' ');
Object.defineProperty(Schema, 'indexTypes', {
get: function() { return indexTypes; },
set: function() { throw new Error('Cannot overwrite Schema.indexTypes'); }
});
/**
* Compiles indexes from fields and schema-level indexes
*
* @api public
*/
Schema.prototype.indexes = function() {
'use strict';
var indexes = [];
var seenPrefix = {};
var collectIndexes = function(schema, prefix) {
if (seenPrefix[prefix]) {
return;
}
seenPrefix[prefix] = true;
prefix = prefix || '';
var key, path, index, field, isObject, options, type;
var keys = Object.keys(schema.paths);
for (var i = 0; i < keys.length; ++i) {
key = keys[i];
path = schema.paths[key];
if (path instanceof MongooseTypes.DocumentArray) {
collectIndexes(path.schema, key + '.');
} else if (path.$isSingleNested) {
collectIndexes(path.schema, key + '.');
} else {
index = path._index;
if (false !== index && null != index) {
field = {};
isObject = utils.isObject(index);
options = isObject ? index : {};
type = 'string' == typeof index ? index :
isObject ? index.type :
false;
if (type && ~Schema.indexTypes.indexOf(type)) {
field[prefix + key] = type;
} else {
field[prefix + key] = 1;
}
delete options.type;
if (!('background' in options)) {
options.background = true;
}
indexes.push([field, options]);
}
}
}
if (prefix) {
fixSubIndexPaths(schema, prefix);
} else {
schema._indexes.forEach(function(index) {
if (!('background' in index[1])) index[1].background = true;
});
indexes = indexes.concat(schema._indexes);
}
};
collectIndexes(this);
return indexes;
/*!
* Checks for indexes added to subdocs using Schema.index().
* These indexes need their paths prefixed properly.
*
* schema._indexes = [ [indexObj, options], [indexObj, options] ..]
*/
function fixSubIndexPaths(schema, prefix) {
var subindexes = schema._indexes,
len = subindexes.length,
indexObj,
newindex,
klen,
keys,
key,
i = 0,
j;
for (i = 0; i < len; ++i) {
indexObj = subindexes[i][0];
keys = Object.keys(indexObj);
klen = keys.length;
newindex = {};
// use forward iteration, order matters
for (j = 0; j < klen; ++j) {
key = keys[j];
newindex[prefix + key] = indexObj[key];
}
indexes.push([newindex, subindexes[i][1]]);
}
}
};
/**
* Creates a virtual type with the given name.
*
* @param {String} name
* @param {Object} [options]
* @return {VirtualType}
*/
Schema.prototype.virtual = function(name, options) {
var virtuals = this.virtuals;
var parts = name.split('.');
return virtuals[name] = parts.reduce(function(mem, part, i) {
mem[part] || (mem[part] = (i === parts.length - 1)
? new VirtualType(options, name)
: {});
return mem[part];
}, this.tree);
};
/**
* Returns the virtual type with the given `name`.
*
* @param {String} name
* @return {VirtualType}
*/
Schema.prototype.virtualpath = function(name) {
return this.virtuals[name];
};
/**
* Removes the given `path` (or [`paths`]).
*
* @param {String|Array} path
*
* @api public
*/
Schema.prototype.remove = function(path) {
if (typeof path === 'string') {
path = [path];
}
if (Array.isArray(path)) {
path.forEach(function(name) {
if (this.path(name)) {
delete this.paths[name];
}
}, this);
}
};
/*!
* ignore
*/
Schema.prototype._getSchema = function(path) {
var schema = this;
var pathschema = schema.path(path);
if (pathschema) {
return pathschema;
}
// look for arrays
return (function search(parts, schema) {
var p = parts.length + 1,
foundschema,
trypath;
while (p--) {
trypath = parts.slice(0, p).join('.');
foundschema = schema.path(trypath);
if (foundschema) {
if (foundschema.caster) {
// array of Mixed?
if (foundschema.caster instanceof MongooseTypes.Mixed) {
return foundschema.caster;
}
// Now that we found the array, we need to check if there
// are remaining document paths to look up for casting.
// Also we need to handle array.$.path since schema.path
// doesn't work for that.
// If there is no foundschema.schema we are dealing with
// a path like array.$
if (p !== parts.length && foundschema.schema) {
if ('$' === parts[p]) {
// comments.$.comments.$.title
return search(parts.slice(p + 1), foundschema.schema);
} else {
// this is the last path of the selector
return search(parts.slice(p), foundschema.schema);
}
}
}
return foundschema;
}
}
})(path.split('.'), schema);
};
/*!
* Module exports.
*/
module.exports = exports = Schema;
// require down here because of reference issues
/**
* The various built-in Mongoose Schema Types.
*
* ####Example:
*
* var mongoose = require('mongoose');
* var ObjectId = mongoose.Schema.Types.ObjectId;
*
* ####Types:
*
* - [String](#schema-string-js)
* - [Number](#schema-number-js)
* - [Boolean](#schema-boolean-js) | Bool
* - [Array](#schema-array-js)
* - [Buffer](#schema-buffer-js)
* - [Date](#schema-date-js)
* - [ObjectId](#schema-objectid-js) | Oid
* - [Mixed](#schema-mixed-js)
*
* Using this exposed access to the `Mixed` SchemaType, we can use them in our schema.
*
* var Mixed = mongoose.Schema.Types.Mixed;
* new mongoose.Schema({ _user: Mixed })
*
* @api public
*/
Schema.Types = MongooseTypes = require('./schema/index');
/*!
* ignore
*/
exports.ObjectId = MongooseTypes.ObjectId;
| {
delete subdocs[i].$__preSavingFromParent;
} | conditional_block |
schema.js | /*!
* Module dependencies.
*/
var readPref = require('./drivers').ReadPreference;
var EventEmitter = require('events').EventEmitter;
var VirtualType = require('./virtualtype');
var utils = require('./utils');
var MongooseTypes;
var Kareem = require('kareem');
var async = require('async');
var PromiseProvider = require('./promise_provider');
var IS_QUERY_HOOK = {
count: true,
find: true,
findOne: true,
findOneAndUpdate: true,
findOneAndRemove: true,
update: true
};
/**
* Schema constructor.
*
* ####Example:
*
* var child = new Schema({ name: String });
* var schema = new Schema({ name: String, age: Number, children: [child] });
* var Tree = mongoose.model('Tree', schema);
*
* // setting schema options
* new Schema({ name: String }, { _id: false, autoIndex: false })
*
* ####Options:
*
* - [autoIndex](/docs/guide.html#autoIndex): bool - defaults to null (which means use the connection's autoIndex option)
* - [bufferCommands](/docs/guide.html#bufferCommands): bool - defaults to true
* - [capped](/docs/guide.html#capped): bool - defaults to false
* - [collection](/docs/guide.html#collection): string - no default
* - [emitIndexErrors](/docs/guide.html#emitIndexErrors): bool - defaults to false.
* - [id](/docs/guide.html#id): bool - defaults to true
* - [_id](/docs/guide.html#_id): bool - defaults to true
* - `minimize`: bool - controls [document#toObject](#document_Document-toObject) behavior when called manually - defaults to true
* - [read](/docs/guide.html#read): string
* - [safe](/docs/guide.html#safe): bool - defaults to true.
* - [shardKey](/docs/guide.html#shardKey): bool - defaults to `null`
* - [strict](/docs/guide.html#strict): bool - defaults to true
* - [toJSON](/docs/guide.html#toJSON) - object - no default
* - [toObject](/docs/guide.html#toObject) - object - no default
* - [typeKey](/docs/guide.html#typeKey) - string - defaults to 'type'
* - [validateBeforeSave](/docs/guide.html#validateBeforeSave) - bool - defaults to `true`
* - [versionKey](/docs/guide.html#versionKey): bool - defaults to "__v"
*
* ####Note:
*
* _When nesting schemas, (`children` in the example above), always declare the child schema first before passing it into its parent._
*
* @param {Object} definition
* @inherits NodeJS EventEmitter http://nodejs.org/api/events.html#events_class_events_eventemitter
* @event `init`: Emitted after the schema is compiled into a `Model`.
* @api public
*/
function Schema(obj, options) {
if (!(this instanceof Schema))
return new Schema(obj, options);
this.paths = {};
this.subpaths = {};
this.virtuals = {};
this.nested = {};
this.inherits = {};
this.callQueue = [];
this._indexes = [];
this.methods = {};
this.statics = {};
this.tree = {};
this._requiredpaths = undefined;
this.discriminatorMapping = undefined;
this._indexedpaths = undefined;
this.s = {
hooks: new Kareem(),
queryHooks: IS_QUERY_HOOK
};
this.options = this.defaultOptions(options);
// build paths
if (obj) {
this.add(obj);
}
// check if _id's value is a subdocument (gh-2276)
var _idSubDoc = obj && obj._id && utils.isObject(obj._id);
// ensure the documents get an auto _id unless disabled
var auto_id = !this.paths['_id'] &&
(!this.options.noId && this.options._id) && !_idSubDoc;
if (auto_id) {
obj = { _id: { auto: true } };
obj._id[this.options.typeKey] = Schema.ObjectId;
this.add(obj);
}
// ensure the documents receive an id getter unless disabled
var autoid = !this.paths['id'] &&
(!this.options.noVirtualId && this.options.id);
if (autoid) {
this.virtual('id').get(idGetter);
}
for (var i = 0; i < this._defaultMiddleware.length; ++i) {
var m = this._defaultMiddleware[i];
this[m.kind](m.hook, !!m.isAsync, m.fn);
}
// adds updatedAt and createdAt timestamps to documents if enabled
var timestamps = this.options.timestamps;
if (timestamps) {
var createdAt = timestamps.createdAt || 'createdAt',
updatedAt = timestamps.updatedAt || 'updatedAt',
schemaAdditions = {};
schemaAdditions[updatedAt] = Date;
if (!this.paths[createdAt]) {
schemaAdditions[createdAt] = Date;
}
this.add(schemaAdditions);
this.pre('save', function(next) {
var defaultTimestamp = new Date();
if (!this[createdAt]) {
this[createdAt] = auto_id ? this._id.getTimestamp() : defaultTimestamp;
}
this[updatedAt] = this.isNew ? this[createdAt] : defaultTimestamp;
next();
});
var genUpdates = function() {
var now = new Date();
var updates = {$set: {}, $setOnInsert: {}};
updates.$set[updatedAt] = now;
updates.$setOnInsert[createdAt] = now;
return updates;
};
this.pre('findOneAndUpdate', function(next) {
this.findOneAndUpdate({}, genUpdates());
next();
});
this.pre('update', function(next) {
this.update({}, genUpdates());
next();
});
}
}
/*!
* Returns this documents _id cast to a string.
*/
function idGetter() {
if (this.$__._id) {
return this.$__._id;
}
return this.$__._id = null == this._id
? null
: String(this._id);
}
/*!
* Inherit from EventEmitter.
*/
Schema.prototype = Object.create( EventEmitter.prototype );
Schema.prototype.constructor = Schema;
/**
* Default middleware attached to a schema. Cannot be changed.
*
* This field is used to make sure discriminators don't get multiple copies of
* built-in middleware. Declared as a constant because changing this at runtime
* may lead to instability with Model.prototype.discriminator().
*
* @api private
* @property _defaultMiddleware
*/
Object.defineProperty(Schema.prototype, '_defaultMiddleware', {
configurable: false,
enumerable: false,
writable: false,
value: [{
kind: 'pre',
hook: 'save',
fn: function(next, options) {
// Nested docs have their own presave
if (this.ownerDocument) {
return next();
}
var hasValidateBeforeSaveOption = options &&
(typeof options === 'object') &&
('validateBeforeSave' in options);
var shouldValidate;
if (hasValidateBeforeSaveOption) {
shouldValidate = !!options.validateBeforeSave;
} else {
shouldValidate = this.schema.options.validateBeforeSave;
}
// Validate
if (shouldValidate) {
// HACK: use $__original_validate to avoid promises so bluebird doesn't
// complain
if (this.$__original_validate) {
this.$__original_validate({ __noPromise: true }, function(error) {
next(error);
});
} else {
this.validate({ __noPromise: true }, function(error) {
next(error);
});
}
} else {
next();
}
}
}, {
kind: 'pre',
hook: 'save',
isAsync: true,
fn: function(next, done) {
var Promise = PromiseProvider.get(),
subdocs = this.$__getAllSubdocs();
if (!subdocs.length || this.$__preSavingFromParent) {
done();
next();
return;
}
new Promise.ES6(function(resolve, reject) {
async.each(subdocs, function(subdoc, cb) {
subdoc.$__preSavingFromParent = true;
subdoc.save(function(err) {
cb(err);
});
}, function(error) {
for (var i = 0; i < subdocs.length; ++i) {
delete subdocs[i].$__preSavingFromParent;
}
if (error) {
reject(error);
return;
}
resolve();
});
}).then(function() {
next();
done();
}, done);
}
}]
});
/**
* Schema as flat paths
*
* ####Example:
* {
* '_id' : SchemaType,
* , 'nested.key' : SchemaType,
* }
*
* @api private
* @property paths
*/
Schema.prototype.paths;
/**
* Schema as a tree
*
* ####Example:
* {
* '_id' : ObjectId
* , 'nested' : {
* 'key' : String
* }
* }
*
* @api private
* @property tree
*/
Schema.prototype.tree;
/**
* Returns default options for this schema, merged with `options`.
*
* @param {Object} options
* @return {Object}
* @api private
*/
Schema.prototype.defaultOptions = function(options) {
if (options && false === options.safe) {
options.safe = { w: 0 };
}
if (options && options.safe && 0 === options.safe.w) {
// if you turn off safe writes, then versioning goes off as well
options.versionKey = false;
}
options = utils.options({
strict: true,
bufferCommands: true,
capped: false, // { size, max, autoIndexId }
versionKey: '__v',
discriminatorKey: '__t',
minimize: true,
autoIndex: null,
shardKey: null,
read: null,
validateBeforeSave: true,
// the following are only applied at construction time
noId: false, // deprecated, use { _id: false }
_id: true,
noVirtualId: false, // deprecated, use { id: false }
id: true,
typeKey: 'type'
}, options);
if (options.read) {
options.read = readPref(options.read);
}
return options;
};
/**
* Adds key path / schema type pairs to this schema.
*
* ####Example:
*
* var ToySchema = new Schema;
* ToySchema.add({ name: 'string', color: 'string', price: 'number' });
*
* @param {Object} obj
* @param {String} prefix
* @api public
*/
Schema.prototype.add = function add(obj, prefix) {
prefix = prefix || '';
var keys = Object.keys(obj);
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (null == obj[key]) {
throw new TypeError('Invalid value for schema path `' + prefix + key + '`');
}
if (Array.isArray(obj[key]) && obj[key].length === 1 && null == obj[key][0]) {
throw new TypeError('Invalid value for schema Array path `' + prefix + key + '`');
}
if (utils.isObject(obj[key]) &&
(!obj[key].constructor || 'Object' == utils.getFunctionName(obj[key].constructor)) &&
(!obj[key][this.options.typeKey] || (this.options.typeKey === 'type' && obj[key].type.type))) {
if (Object.keys(obj[key]).length) {
// nested object { last: { name: String }}
this.nested[prefix + key] = true;
this.add(obj[key], prefix + key + '.');
} else {
this.path(prefix + key, obj[key]); // mixed type
}
} else {
this.path(prefix + key, obj[key]);
}
}
};
/**
* Reserved document keys.
*
* Keys in this object are names that are rejected in schema declarations b/c they conflict with mongoose functionality. Using these key name will throw an error.
*
* on, emit, _events, db, get, set, init, isNew, errors, schema, options, modelName, collection, _pres, _posts, toObject
*
* _NOTE:_ Use of these terms as method names is permitted, but play at your own risk, as they may be existing mongoose document methods you are stomping on.
*
* var schema = new Schema(..);
* schema.methods.init = function () {} // potentially breaking
*/
Schema.reserved = Object.create(null);
var reserved = Schema.reserved;
// EventEmitter
reserved.emit =
reserved.on =
reserved.once =
// document properties and functions
reserved.collection =
reserved.db =
reserved.errors =
reserved.init =
reserved.isModified =
reserved.isNew =
reserved.get =
reserved.modelName =
reserved.save =
reserved.schema =
reserved.set =
reserved.toObject =
reserved.validate =
// hooks.js
reserved._pres = reserved._posts = 1;
/**
* Document keys to print warnings for
*/
var warnings = {};
warnings.increment = '`increment` should not be used as a schema path name ' +
'unless you have disabled versioning.';
/**
* Gets/sets schema paths.
*
* Sets a path (if arity 2)
* Gets a path (if arity 1)
*
* ####Example
*
* schema.path('name') // returns a SchemaType
* schema.path('name', Number) // changes the schemaType of `name` to Number
*
* @param {String} path
* @param {Object} constructor
* @api public
*/
Schema.prototype.path = function(path, obj) {
if (obj == undefined) {
if (this.paths[path]) return this.paths[path];
if (this.subpaths[path]) return this.subpaths[path];
// subpaths?
return /\.\d+\.?.*$/.test(path)
? getPositionalPath(this, path)
: undefined;
}
// some path names conflict with document methods
if (reserved[path]) {
throw new Error("`" + path + "` may not be used as a schema pathname");
}
if (warnings[path]) {
console.log('WARN: ' + warnings[path]);
}
// update the tree
var subpaths = path.split(/\./),
last = subpaths.pop(),
branch = this.tree;
subpaths.forEach(function(sub, i) {
if (!branch[sub]) branch[sub] = {};
if ('object' != typeof branch[sub]) {
var msg = 'Cannot set nested path `' + path + '`. '
+ 'Parent path `'
+ subpaths.slice(0, i).concat([sub]).join('.')
+ '` already set to type ' + branch[sub].name
+ '.';
throw new Error(msg);
}
branch = branch[sub];
});
branch[last] = utils.clone(obj);
this.paths[path] = Schema.interpretAsType(path, obj, this.options);
return this;
};
/**
* Converts type arguments into Mongoose Types.
*
* @param {String} path
* @param {Object} obj constructor
* @api private
*/
Schema.interpretAsType = function(path, obj, options) {
if (obj.constructor) {
var constructorName = utils.getFunctionName(obj.constructor);
if (constructorName != 'Object') { | var oldObj = obj;
obj = {};
obj[options.typeKey] = oldObj;
}
}
// Get the type making sure to allow keys named "type"
// and default to mixed if not specified.
// { type: { type: String, default: 'freshcut' } }
var type = obj[options.typeKey] && (options.typeKey !== 'type' || !obj.type.type)
? obj[options.typeKey]
: {};
if ('Object' == utils.getFunctionName(type.constructor) || 'mixed' == type) {
return new MongooseTypes.Mixed(path, obj);
}
if (Array.isArray(type) || Array == type || 'array' == type) {
// if it was specified through { type } look for `cast`
var cast = (Array == type || 'array' == type)
? obj.cast
: type[0];
if (cast instanceof Schema) {
return new MongooseTypes.DocumentArray(path, cast, obj);
}
if ('string' == typeof cast) {
cast = MongooseTypes[cast.charAt(0).toUpperCase() + cast.substring(1)];
} else if (cast && (!cast[options.typeKey] || (options.typeKey === 'type' && cast.type.type))
&& 'Object' == utils.getFunctionName(cast.constructor)
&& Object.keys(cast).length) {
// The `minimize` and `typeKey` options propagate to child schemas
// declared inline, like `{ arr: [{ val: { $type: String } }] }`.
// See gh-3560
var childSchemaOptions = { minimize: options.minimize };
if (options.typeKey) {
childSchemaOptions.typeKey = options.typeKey;
}
var childSchema = new Schema(cast, childSchemaOptions);
return new MongooseTypes.DocumentArray(path, childSchema, obj);
}
return new MongooseTypes.Array(path, cast || MongooseTypes.Mixed, obj);
}
if (type instanceof Schema) {
return new MongooseTypes.Embedded(type, path, obj);
}
var name;
if (Buffer.isBuffer(type)) {
name = 'Buffer';
} else {
name = 'string' == typeof type
? type
// If not string, `type` is a function. Outside of IE, function.name
// gives you the function name. In IE, you need to compute it
: type.schemaName || utils.getFunctionName(type);
}
if (name) {
name = name.charAt(0).toUpperCase() + name.substring(1);
}
if (undefined == MongooseTypes[name]) {
throw new TypeError('Undefined type `' + name + '` at `' + path +
'`\n Did you try nesting Schemas? ' +
'You can only nest using refs or arrays.');
}
return new MongooseTypes[name](path, obj);
};
/**
* Iterates the schemas paths similar to Array#forEach.
*
* The callback is passed the pathname and schemaType as arguments on each iteration.
*
* @param {Function} fn callback function
* @return {Schema} this
* @api public
*/
Schema.prototype.eachPath = function(fn) {
var keys = Object.keys(this.paths),
len = keys.length;
for (var i = 0; i < len; ++i) {
fn(keys[i], this.paths[keys[i]]);
}
return this;
};
/**
* Returns an Array of path strings that are required by this schema.
*
* @api public
* @param {Boolean} invalidate refresh the cache
* @return {Array}
*/
Schema.prototype.requiredPaths = function requiredPaths(invalidate) {
if (this._requiredpaths && !invalidate) return this._requiredpaths;
var paths = Object.keys(this.paths),
i = paths.length,
ret = [];
while (i--) {
var path = paths[i];
if (this.paths[path].isRequired) ret.push(path);
}
return this._requiredpaths = ret;
};
/**
* Returns indexes from fields and schema-level indexes (cached).
*
* @api private
* @return {Array}
*/
Schema.prototype.indexedPaths = function indexedPaths() {
if (this._indexedpaths) return this._indexedpaths;
return this._indexedpaths = this.indexes();
};
/**
* Returns the pathType of `path` for this schema.
*
* Given a path, returns whether it is a real, virtual, nested, or ad-hoc/undefined path.
*
* @param {String} path
* @return {String}
* @api public
*/
Schema.prototype.pathType = function(path) {
if (path in this.paths) return 'real';
if (path in this.virtuals) return 'virtual';
if (path in this.nested) return 'nested';
if (path in this.subpaths) return 'real';
if (/\.\d+\.|\.\d+$/.test(path)) {
return getPositionalPathType(this, path);
} else {
return 'adhocOrUndefined';
}
};
/**
* Returns true iff this path is a child of a mixed schema.
*
* @param {String} path
* @return {Boolean}
* @api private
*/
Schema.prototype.hasMixedParent = function(path) {
var subpaths = path.split(/\./g);
path = '';
for (var i = 0; i < subpaths.length; ++i) {
path = i > 0 ? path + '.' + subpaths[i] : subpaths[i];
if (path in this.paths &&
this.paths[path] instanceof MongooseTypes.Mixed) {
return true;
}
}
return false;
};
/*!
* ignore
*/
function getPositionalPathType(self, path) {
var subpaths = path.split(/\.(\d+)\.|\.(\d+)$/).filter(Boolean);
if (subpaths.length < 2) {
return self.paths[subpaths[0]];
}
var val = self.path(subpaths[0]);
var isNested = false;
if (!val) return val;
var last = subpaths.length - 1,
subpath,
i = 1;
for (; i < subpaths.length; ++i) {
isNested = false;
subpath = subpaths[i];
if (i === last && val && !val.schema && !/\D/.test(subpath)) {
if (val instanceof MongooseTypes.Array) {
// StringSchema, NumberSchema, etc
val = val.caster;
} else {
val = undefined;
}
break;
}
// ignore if its just a position segment: path.0.subpath
if (!/\D/.test(subpath)) continue;
if (!(val && val.schema)) {
val = undefined;
break;
}
var type = val.schema.pathType(subpath);
isNested = (type === 'nested');
val = val.schema.path(subpath);
}
self.subpaths[path] = val;
if (val) {
return 'real';
}
if (isNested) {
return 'nested';
}
return 'adhocOrUndefined';
}
/*!
* ignore
*/
function getPositionalPath(self, path) {
getPositionalPathType(self, path);
return self.subpaths[path];
}
/**
* Adds a method call to the queue.
*
* @param {String} name name of the document method to call later
* @param {Array} args arguments to pass to the method
* @api public
*/
Schema.prototype.queue = function(name, args) {
this.callQueue.push([name, args]);
return this;
};
/**
* Defines a pre hook for the document.
*
* ####Example
*
* var toySchema = new Schema(..);
*
* toySchema.pre('save', function (next) {
* if (!this.created) this.created = new Date;
* next();
* })
*
* toySchema.pre('validate', function (next) {
* if (this.name != 'Woody') this.name = 'Woody';
* next();
* })
*
* @param {String} method
* @param {Function} callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.pre = function() {
var name = arguments[0];
if (IS_QUERY_HOOK[name]) {
this.s.hooks.pre.apply(this.s.hooks, arguments);
return this;
}
return this.queue('pre', arguments);
};
/**
* Defines a post hook for the document
*
* Post hooks fire `on` the event emitted from document instances of Models compiled from this schema.
*
* var schema = new Schema(..);
* schema.post('save', function (doc) {
* console.log('this fired after a document was saved');
* });
*
* var Model = mongoose.model('Model', schema);
*
* var m = new Model(..);
* m.save(function (err) {
* console.log('this fires after the `post` hook');
* });
*
* @param {String} method name of the method to hook
* @param {Function} fn callback
* @see hooks.js https://github.com/bnoguchi/hooks-js/tree/31ec571cef0332e21121ee7157e0cf9728572cc3
* @api public
*/
Schema.prototype.post = function(method, fn) {
if (IS_QUERY_HOOK[method]) {
this.s.hooks.post.apply(this.s.hooks, arguments);
return this;
}
// assuming that all callbacks with arity < 2 are synchronous post hooks
if (fn.length < 2) {
return this.queue('on', [arguments[0], function(doc) {
return fn.call(doc, doc);
}]);
}
return this.queue('post', [arguments[0], function(next) {
// wrap original function so that the callback goes last,
// for compatibility with old code that is using synchronous post hooks
var self = this;
var args = Array.prototype.slice.call(arguments, 1);
fn.call(this, this, function(err) {
return next.apply(self, [err].concat(args));
});
}]);
};
/**
* Registers a plugin for this schema.
*
* @param {Function} plugin callback
* @param {Object} [opts]
* @see plugins
* @api public
*/
Schema.prototype.plugin = function(fn, opts) {
fn(this, opts);
return this;
};
/**
* Adds an instance method to documents constructed from Models compiled from this schema.
*
* ####Example
*
* var schema = kittySchema = new Schema(..);
*
* schema.method('meow', function () {
* console.log('meeeeeoooooooooooow');
* })
*
* var Kitty = mongoose.model('Kitty', schema);
*
* var fizz = new Kitty;
* fizz.meow(); // meeeeeooooooooooooow
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as methods.
*
* schema.method({
* purr: function () {}
* , scratch: function () {}
* });
*
* // later
* fizz.purr();
* fizz.scratch();
*
* @param {String|Object} method name
* @param {Function} [fn]
* @api public
*/
Schema.prototype.method = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.methods[i] = name[i];
else
this.methods[name] = fn;
return this;
};
/**
* Adds static "class" methods to Models compiled from this schema.
*
* ####Example
*
* var schema = new Schema(..);
* schema.static('findByName', function (name, callback) {
* return this.find({ name: name }, callback);
* });
*
* var Drink = mongoose.model('Drink', schema);
* Drink.findByName('sanpellegrino', function (err, drinks) {
* //
* });
*
* If a hash of name/fn pairs is passed as the only argument, each name/fn pair will be added as statics.
*
* @param {String} name
* @param {Function} fn
* @api public
*/
Schema.prototype.static = function(name, fn) {
if ('string' != typeof name)
for (var i in name)
this.statics[i] = name[i];
else
this.statics[name] = fn;
return this;
};
/**
* Defines an index (most likely compound) for this schema.
*
* ####Example
*
* schema.index({ first: 1, last: -1 })
*
* @param {Object} fields
* @param {Object} [options] Options to pass to [MongoDB driver's `createIndex()` function](http://mongodb.github.io/node-mongodb-native/2.0/api/Collection.html#createIndex)
* @param {String} [options.expires=null] Mongoose-specific syntactic sugar, uses [ms](https://www.npmjs.com/package/ms) to convert `expires` option into seconds for the `expireAfterSeconds` in the above link.
* @api public
*/
Schema.prototype.index = function(fields, options) {
options || (options = {});
if (options.expires)
utils.expires(options);
this._indexes.push([fields, options]);
return this;
};
/**
* Sets/gets a schema option.
*
* ####Example
*
* schema.set('strict'); // 'true' by default
* schema.set('strict', false); // Sets 'strict' to false
* schema.set('strict'); // 'false'
*
* @param {String} key option name
* @param {Object} [value] if not passed, the current option value is returned
* @see Schema ./
* @api public
*/
Schema.prototype.set = function(key, value, _tags) {
if (1 === arguments.length) {
return this.options[key];
}
switch (key) {
case 'read':
this.options[key] = readPref(value, _tags);
break;
case 'safe':
this.options[key] = false === value
? { w: 0 }
: value;
break;
default:
this.options[key] = value;
}
return this;
};
/**
* Gets a schema option.
*
* @param {String} key option name
* @api public
*/
Schema.prototype.get = function(key) {
return this.options[key];
};
/**
* The allowed index types
*
* @static indexTypes
* @receiver Schema
* @api public
*/
var indexTypes = '2d 2dsphere hashed text'.split(' ');
Object.defineProperty(Schema, 'indexTypes', {
get: function() { return indexTypes; },
set: function() { throw new Error('Cannot overwrite Schema.indexTypes'); }
});
/**
* Compiles indexes from fields and schema-level indexes
*
* @api public
*/
Schema.prototype.indexes = function() {
'use strict';
var indexes = [];
var seenPrefix = {};
var collectIndexes = function(schema, prefix) {
if (seenPrefix[prefix]) {
return;
}
seenPrefix[prefix] = true;
prefix = prefix || '';
var key, path, index, field, isObject, options, type;
var keys = Object.keys(schema.paths);
for (var i = 0; i < keys.length; ++i) {
key = keys[i];
path = schema.paths[key];
if (path instanceof MongooseTypes.DocumentArray) {
collectIndexes(path.schema, key + '.');
} else if (path.$isSingleNested) {
collectIndexes(path.schema, key + '.');
} else {
index = path._index;
if (false !== index && null != index) {
field = {};
isObject = utils.isObject(index);
options = isObject ? index : {};
type = 'string' == typeof index ? index :
isObject ? index.type :
false;
if (type && ~Schema.indexTypes.indexOf(type)) {
field[prefix + key] = type;
} else {
field[prefix + key] = 1;
}
delete options.type;
if (!('background' in options)) {
options.background = true;
}
indexes.push([field, options]);
}
}
}
if (prefix) {
fixSubIndexPaths(schema, prefix);
} else {
schema._indexes.forEach(function(index) {
if (!('background' in index[1])) index[1].background = true;
});
indexes = indexes.concat(schema._indexes);
}
};
collectIndexes(this);
return indexes;
/*!
* Checks for indexes added to subdocs using Schema.index().
* These indexes need their paths prefixed properly.
*
* schema._indexes = [ [indexObj, options], [indexObj, options] ..]
*/
function fixSubIndexPaths(schema, prefix) {
var subindexes = schema._indexes,
len = subindexes.length,
indexObj,
newindex,
klen,
keys,
key,
i = 0,
j;
for (i = 0; i < len; ++i) {
indexObj = subindexes[i][0];
keys = Object.keys(indexObj);
klen = keys.length;
newindex = {};
// use forward iteration, order matters
for (j = 0; j < klen; ++j) {
key = keys[j];
newindex[prefix + key] = indexObj[key];
}
indexes.push([newindex, subindexes[i][1]]);
}
}
};
/**
* Creates a virtual type with the given name.
*
* @param {String} name
* @param {Object} [options]
* @return {VirtualType}
*/
Schema.prototype.virtual = function(name, options) {
var virtuals = this.virtuals;
var parts = name.split('.');
return virtuals[name] = parts.reduce(function(mem, part, i) {
mem[part] || (mem[part] = (i === parts.length - 1)
? new VirtualType(options, name)
: {});
return mem[part];
}, this.tree);
};
/**
* Returns the virtual type with the given `name`.
*
* @param {String} name
* @return {VirtualType}
*/
Schema.prototype.virtualpath = function(name) {
return this.virtuals[name];
};
/**
* Removes the given `path` (or [`paths`]).
*
* @param {String|Array} path
*
* @api public
*/
Schema.prototype.remove = function(path) {
if (typeof path === 'string') {
path = [path];
}
if (Array.isArray(path)) {
path.forEach(function(name) {
if (this.path(name)) {
delete this.paths[name];
}
}, this);
}
};
/*!
* ignore
*/
Schema.prototype._getSchema = function(path) {
var schema = this;
var pathschema = schema.path(path);
if (pathschema) {
return pathschema;
}
// look for arrays
return (function search(parts, schema) {
var p = parts.length + 1,
foundschema,
trypath;
while (p--) {
trypath = parts.slice(0, p).join('.');
foundschema = schema.path(trypath);
if (foundschema) {
if (foundschema.caster) {
// array of Mixed?
if (foundschema.caster instanceof MongooseTypes.Mixed) {
return foundschema.caster;
}
// Now that we found the array, we need to check if there
// are remaining document paths to look up for casting.
// Also we need to handle array.$.path since schema.path
// doesn't work for that.
// If there is no foundschema.schema we are dealing with
// a path like array.$
if (p !== parts.length && foundschema.schema) {
if ('$' === parts[p]) {
// comments.$.comments.$.title
return search(parts.slice(p + 1), foundschema.schema);
} else {
// this is the last path of the selector
return search(parts.slice(p), foundschema.schema);
}
}
}
return foundschema;
}
}
})(path.split('.'), schema);
};
/*!
* Module exports.
*/
module.exports = exports = Schema;
// require down here because of reference issues
/**
* The various built-in Mongoose Schema Types.
*
* ####Example:
*
* var mongoose = require('mongoose');
* var ObjectId = mongoose.Schema.Types.ObjectId;
*
* ####Types:
*
* - [String](#schema-string-js)
* - [Number](#schema-number-js)
* - [Boolean](#schema-boolean-js) | Bool
* - [Array](#schema-array-js)
* - [Buffer](#schema-buffer-js)
* - [Date](#schema-date-js)
* - [ObjectId](#schema-objectid-js) | Oid
* - [Mixed](#schema-mixed-js)
*
* Using this exposed access to the `Mixed` SchemaType, we can use them in our schema.
*
* var Mixed = mongoose.Schema.Types.Mixed;
* new mongoose.Schema({ _user: Mixed })
*
* @api public
*/
Schema.Types = MongooseTypes = require('./schema/index');
/*!
* ignore
*/
exports.ObjectId = MongooseTypes.ObjectId; | random_line_split | |
contentRes.js | // import routes | import Routes from './../../client/routes/Routes';
import renderer from './renderer';
import template from './template';
import createStore from './createStore';
module.exports = (app, req, res) => {
// set up the redux store on the server side
const store = createStore(req);
// check all react routes vs req path and return a list of components to be rendered
const promises = matchRoutes(Routes, req.path).map(({ route }) => (
// if route.loadData exists then call loadData(), else do nothing
route.loadData ? route.loadData(store) : null)
).map(promise => {
// if null value from promise (something went wrong)
if (promise) {
return new Promise((resolve) => {
// return a resolved promise
promise.then(resolve).catch(resolve);
});
}
return null;
});
// once all our data requests have been completed
Promise.all(promises).then(() => {
const context = {};
const content = renderer(req, store, context);
const response = template(req, store, content);
// context.url = req.path;
// if a url is loaded into the context then redirect to the url in the context
if (context.url) return res.redirect(301, context.url);
// if context has not found stored then respond with 404 error
if (context.notFound) res.status(404);
// send initialised store to the renderer
res.send(response);
});
}; | import { matchRoutes } from 'react-router-config'; | random_line_split |
RoomItem.js | /* @flow */
import React, { Component, PropTypes } from 'react';
import ReactNative from 'react-native';
import shallowEqual from 'shallowequal';
import Colors from '../../../Colors';
import AppText from '../AppText';
import ListItem from '../ListItem';
import Icon from '../Icon';
import Time from '../Time';
import ActionSheet from '../ActionSheet';
import ActionSheetItem from '../ActionSheetItem';
import Share from '../../../modules/Share';
import { convertRouteToURL } from '../../../../lib/Route';
import { config } from '../../../../core-client';
const {
StyleSheet,
TouchableOpacity,
View,
} = ReactNative;
const styles = StyleSheet.create({
item: {
flex: 1,
justifyContent: 'center',
paddingHorizontal: 16,
},
title: {
color: Colors.darkGrey,
fontWeight: 'bold',
},
subtitle: {
flexDirection: 'row',
alignItems: 'center',
},
label: {
color: Colors.grey,
fontSize: 10,
lineHeight: 15,
},
dot: {
fontSize: 2,
lineHeight: 3,
marginHorizontal: 4,
},
badge: {
backgroundColor: Colors.accent,
height: 6,
width: 6,
marginRight: 8,
borderRadius: 3,
elevation: 1,
},
expand: {
margin: 20,
color: Colors.fadedBlack,
},
});
type Props = {
room: {
id: string;
name: string;
updateTime?: number;
};
unread?: boolean;
onSelect: Function;
}
type State = {
actionSheetVisible: boolean;
}
export default class RoomItem extends Component<void, Props, State> {
static propTypes = {
room: PropTypes.shape({
id: PropTypes.string.isRequired,
name: PropTypes.string,
}),
unread: PropTypes.bool,
onSelect: PropTypes.func,
};
state: State = {
actionSheetVisible: false,
};
shouldComponentUpdate(nextProps: Props, nextState: State): boolean {
return !shallowEqual(this.props, nextProps) || !shallowEqual(this.state, nextState);
}
_getRoomLink: Function = () => {
const { room } = this.props;
return config.server.protocol + '//' + config.server.host + convertRouteToURL({
name: 'room',
props: {
room: room.id,
},
});
};
_getShareText: Function = () => {
const { room } = this.props;
return `Hey! Join me in the ${room.name} group on ${config.app_name}.\n${this._getRoomLink()}`;
};
_handleInvite: Function = () => {
Share.shareItem('Share group', this._getShareText());
};
_handleShowMenu: Function = () => {
this.setState({
actionSheetVisible: true,
});
};
_handleRequestClose: Function = () => {
this.setState({
actionSheetVisible: false,
});
};
_handlePress: Function = () => {
if (this.props.onSelect) {
this.props.onSelect(this.props.room);
}
};
render() {
const {
room,
unread,
} = this.props;
const followers = room.counts && room.counts.follower ? room.counts.follower : 0;
let followersLabel;
switch (followers) {
case 1:
followersLabel = '1 person';
break;
default:
followersLabel = `${followers > 1000 ? Math.round(followers / 100) / 10 + 'k' : followers} people`;
}
return (
<ListItem {...this.props} onPress={this._handlePress}>
<View style={styles.item}>
<AppText numberOfLines={1} style={styles.title}>{room.name || 'Loading…'}</AppText>
{room.updateTime ?
<View style={styles.subtitle}>
{unread ?
<View style={styles.badge} /> :
null
}
<Time
style={styles.label}
time={room.updateTime}
type='long'
/>
<AppText style={styles.dot}>●</AppText>
<AppText style={styles.label}>{followersLabel}</AppText>
</View> :
null
}
</View>
<TouchableOpacity onPress={this._handleShowMenu}>
<Icon
name='expand-more'
style={styles.expand} | <ActionSheet visible={this.state.actionSheetVisible} onRequestClose={this._handleRequestClose}>
<ActionSheetItem onPress={this._handleInvite}>
Invite friends to group
</ActionSheetItem>
</ActionSheet>
</ListItem>
);
}
} | size={20}
/>
</TouchableOpacity>
| random_line_split |
react-imgix-bg.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.__BackgroundImpl = exports.Background = undefined;
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _reactMeasure = require("react-measure");
var _reactMeasure2 = _interopRequireDefault(_reactMeasure);
var _constructUrl = require("./constructUrl");
var _constructUrl2 = _interopRequireDefault(_constructUrl);
var _targetWidths = require("./targetWidths");
var _targetWidths2 = _interopRequireDefault(_targetWidths);
var _findClosest = require("./findClosest");
var _findClosest2 = _interopRequireDefault(_findClosest);
function _interopRequireDefault(obj) |
var PACKAGE_VERSION = "8.5.0";
var noop = function noop() {};
var findNearestWidth = function findNearestWidth(actualWidth) {
return (0, _findClosest2.default)(actualWidth, _targetWidths2.default);
};
var toFixed = function toFixed(dp, value) {
return +value.toFixed(dp);
};
var BackgroundImpl = function BackgroundImpl(props) {
var measureRef = props.measureRef,
measure = props.measure,
contentRect = props.contentRect,
_props$imgixParams = props.imgixParams,
imgixParams = _props$imgixParams === undefined ? {} : _props$imgixParams,
onLoad = props.onLoad,
disableLibraryParam = props.disableLibraryParam,
src = props.src,
children = props.children,
_props$className = props.className,
className = _props$className === undefined ? "" : _props$className;
var forcedWidth = imgixParams.w,
forcedHeight = imgixParams.h;
var hasDOMDimensions = contentRect.bounds.top != null;
var htmlAttributes = props.htmlAttributes || {};
var dpr = toFixed(2, imgixParams.dpr || global.devicePixelRatio || 1);
var ref = htmlAttributes.ref;
var onRef = function onRef(el) {
measureRef(el);
if (typeof ref === "function") {
ref(el);
}
};
var _ref = function () {
var bothWidthAndHeightPassed = forcedWidth != null && forcedHeight != null;
if (bothWidthAndHeightPassed) {
return { width: forcedWidth, height: forcedHeight };
}
if (!hasDOMDimensions) {
return { width: undefined, height: undefined };
}
var ar = contentRect.bounds.width / contentRect.bounds.height;
var neitherWidthNorHeightPassed = forcedWidth == null && forcedHeight == null;
if (neitherWidthNorHeightPassed) {
var _width = findNearestWidth(contentRect.bounds.width);
var _height = Math.ceil(_width / ar);
return { width: _width, height: _height };
}
if (forcedWidth != null) {
var _height2 = Math.ceil(forcedWidth / ar);
return { width: forcedWidth, height: _height2 };
} else if (forcedHeight != null) {
var _width2 = Math.ceil(forcedHeight * ar);
return { width: _width2, height: forcedHeight };
}
}(),
width = _ref.width,
height = _ref.height;
var isReady = width != null && height != null;
var commonProps = _extends({}, htmlAttributes);
if (!isReady) {
return _react2.default.createElement(
"div",
_extends({}, commonProps, {
className: "react-imgix-bg-loading " + className,
ref: onRef
}),
children
);
}
var renderedSrc = function () {
var srcOptions = _extends({}, imgixParams, disableLibraryParam ? {} : { ixlib: "react-" + PACKAGE_VERSION }, {
width: width,
height: height,
fit: "crop",
dpr: dpr
});
return (0, _constructUrl2.default)(src, srcOptions);
}();
var style = _extends({}, htmlAttributes.style, {
backgroundImage: "url(" + renderedSrc + ")",
backgroundSize: (htmlAttributes.style || {}).backgroundSize !== undefined ? htmlAttributes.style.backgroundSize : "cover"
});
return _react2.default.createElement(
"div",
_extends({}, commonProps, { className: className, ref: onRef, style: style }),
children
);
};
var Background = (0, _reactMeasure.withContentRect)("bounds")(BackgroundImpl);
exports.Background = Background;
exports.__BackgroundImpl = BackgroundImpl;
//# sourceMappingURL=react-imgix-bg.js.map | { return obj && obj.__esModule ? obj : { default: obj }; } | identifier_body |
react-imgix-bg.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.__BackgroundImpl = exports.Background = undefined;
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _reactMeasure = require("react-measure");
var _reactMeasure2 = _interopRequireDefault(_reactMeasure);
var _constructUrl = require("./constructUrl");
var _constructUrl2 = _interopRequireDefault(_constructUrl);
var _targetWidths = require("./targetWidths");
var _targetWidths2 = _interopRequireDefault(_targetWidths);
var _findClosest = require("./findClosest");
var _findClosest2 = _interopRequireDefault(_findClosest);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var PACKAGE_VERSION = "8.5.0";
var noop = function noop() {};
var findNearestWidth = function findNearestWidth(actualWidth) {
return (0, _findClosest2.default)(actualWidth, _targetWidths2.default);
};
var toFixed = function toFixed(dp, value) {
return +value.toFixed(dp);
};
var BackgroundImpl = function BackgroundImpl(props) {
var measureRef = props.measureRef,
measure = props.measure,
contentRect = props.contentRect,
_props$imgixParams = props.imgixParams,
imgixParams = _props$imgixParams === undefined ? {} : _props$imgixParams,
onLoad = props.onLoad,
disableLibraryParam = props.disableLibraryParam,
src = props.src,
children = props.children,
_props$className = props.className,
className = _props$className === undefined ? "" : _props$className;
var forcedWidth = imgixParams.w,
forcedHeight = imgixParams.h;
var hasDOMDimensions = contentRect.bounds.top != null;
var htmlAttributes = props.htmlAttributes || {};
var dpr = toFixed(2, imgixParams.dpr || global.devicePixelRatio || 1);
var ref = htmlAttributes.ref;
var onRef = function onRef(el) {
measureRef(el);
if (typeof ref === "function") {
ref(el);
}
};
var _ref = function () {
var bothWidthAndHeightPassed = forcedWidth != null && forcedHeight != null;
if (bothWidthAndHeightPassed) {
return { width: forcedWidth, height: forcedHeight };
}
if (!hasDOMDimensions) {
return { width: undefined, height: undefined };
}
var ar = contentRect.bounds.width / contentRect.bounds.height;
var neitherWidthNorHeightPassed = forcedWidth == null && forcedHeight == null;
if (neitherWidthNorHeightPassed) {
var _width = findNearestWidth(contentRect.bounds.width);
var _height = Math.ceil(_width / ar);
return { width: _width, height: _height };
}
if (forcedWidth != null) {
var _height2 = Math.ceil(forcedWidth / ar);
return { width: forcedWidth, height: _height2 };
} else if (forcedHeight != null) {
var _width2 = Math.ceil(forcedHeight * ar);
return { width: _width2, height: forcedHeight };
}
}(),
width = _ref.width,
height = _ref.height;
var isReady = width != null && height != null;
var commonProps = _extends({}, htmlAttributes);
if (!isReady) {
return _react2.default.createElement(
"div",
_extends({}, commonProps, {
className: "react-imgix-bg-loading " + className,
ref: onRef
}), |
var renderedSrc = function () {
var srcOptions = _extends({}, imgixParams, disableLibraryParam ? {} : { ixlib: "react-" + PACKAGE_VERSION }, {
width: width,
height: height,
fit: "crop",
dpr: dpr
});
return (0, _constructUrl2.default)(src, srcOptions);
}();
var style = _extends({}, htmlAttributes.style, {
backgroundImage: "url(" + renderedSrc + ")",
backgroundSize: (htmlAttributes.style || {}).backgroundSize !== undefined ? htmlAttributes.style.backgroundSize : "cover"
});
return _react2.default.createElement(
"div",
_extends({}, commonProps, { className: className, ref: onRef, style: style }),
children
);
};
var Background = (0, _reactMeasure.withContentRect)("bounds")(BackgroundImpl);
exports.Background = Background;
exports.__BackgroundImpl = BackgroundImpl;
//# sourceMappingURL=react-imgix-bg.js.map | children
);
} | random_line_split |
react-imgix-bg.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.__BackgroundImpl = exports.Background = undefined;
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _reactMeasure = require("react-measure");
var _reactMeasure2 = _interopRequireDefault(_reactMeasure);
var _constructUrl = require("./constructUrl");
var _constructUrl2 = _interopRequireDefault(_constructUrl);
var _targetWidths = require("./targetWidths");
var _targetWidths2 = _interopRequireDefault(_targetWidths);
var _findClosest = require("./findClosest");
var _findClosest2 = _interopRequireDefault(_findClosest);
function | (obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var PACKAGE_VERSION = "8.5.0";
var noop = function noop() {};
var findNearestWidth = function findNearestWidth(actualWidth) {
return (0, _findClosest2.default)(actualWidth, _targetWidths2.default);
};
var toFixed = function toFixed(dp, value) {
return +value.toFixed(dp);
};
var BackgroundImpl = function BackgroundImpl(props) {
var measureRef = props.measureRef,
measure = props.measure,
contentRect = props.contentRect,
_props$imgixParams = props.imgixParams,
imgixParams = _props$imgixParams === undefined ? {} : _props$imgixParams,
onLoad = props.onLoad,
disableLibraryParam = props.disableLibraryParam,
src = props.src,
children = props.children,
_props$className = props.className,
className = _props$className === undefined ? "" : _props$className;
var forcedWidth = imgixParams.w,
forcedHeight = imgixParams.h;
var hasDOMDimensions = contentRect.bounds.top != null;
var htmlAttributes = props.htmlAttributes || {};
var dpr = toFixed(2, imgixParams.dpr || global.devicePixelRatio || 1);
var ref = htmlAttributes.ref;
var onRef = function onRef(el) {
measureRef(el);
if (typeof ref === "function") {
ref(el);
}
};
var _ref = function () {
var bothWidthAndHeightPassed = forcedWidth != null && forcedHeight != null;
if (bothWidthAndHeightPassed) {
return { width: forcedWidth, height: forcedHeight };
}
if (!hasDOMDimensions) {
return { width: undefined, height: undefined };
}
var ar = contentRect.bounds.width / contentRect.bounds.height;
var neitherWidthNorHeightPassed = forcedWidth == null && forcedHeight == null;
if (neitherWidthNorHeightPassed) {
var _width = findNearestWidth(contentRect.bounds.width);
var _height = Math.ceil(_width / ar);
return { width: _width, height: _height };
}
if (forcedWidth != null) {
var _height2 = Math.ceil(forcedWidth / ar);
return { width: forcedWidth, height: _height2 };
} else if (forcedHeight != null) {
var _width2 = Math.ceil(forcedHeight * ar);
return { width: _width2, height: forcedHeight };
}
}(),
width = _ref.width,
height = _ref.height;
var isReady = width != null && height != null;
var commonProps = _extends({}, htmlAttributes);
if (!isReady) {
return _react2.default.createElement(
"div",
_extends({}, commonProps, {
className: "react-imgix-bg-loading " + className,
ref: onRef
}),
children
);
}
var renderedSrc = function () {
var srcOptions = _extends({}, imgixParams, disableLibraryParam ? {} : { ixlib: "react-" + PACKAGE_VERSION }, {
width: width,
height: height,
fit: "crop",
dpr: dpr
});
return (0, _constructUrl2.default)(src, srcOptions);
}();
var style = _extends({}, htmlAttributes.style, {
backgroundImage: "url(" + renderedSrc + ")",
backgroundSize: (htmlAttributes.style || {}).backgroundSize !== undefined ? htmlAttributes.style.backgroundSize : "cover"
});
return _react2.default.createElement(
"div",
_extends({}, commonProps, { className: className, ref: onRef, style: style }),
children
);
};
var Background = (0, _reactMeasure.withContentRect)("bounds")(BackgroundImpl);
exports.Background = Background;
exports.__BackgroundImpl = BackgroundImpl;
//# sourceMappingURL=react-imgix-bg.js.map | _interopRequireDefault | identifier_name |
react-imgix-bg.js | "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.__BackgroundImpl = exports.Background = undefined;
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
var _reactMeasure = require("react-measure");
var _reactMeasure2 = _interopRequireDefault(_reactMeasure);
var _constructUrl = require("./constructUrl");
var _constructUrl2 = _interopRequireDefault(_constructUrl);
var _targetWidths = require("./targetWidths");
var _targetWidths2 = _interopRequireDefault(_targetWidths);
var _findClosest = require("./findClosest");
var _findClosest2 = _interopRequireDefault(_findClosest);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var PACKAGE_VERSION = "8.5.0";
var noop = function noop() {};
var findNearestWidth = function findNearestWidth(actualWidth) {
return (0, _findClosest2.default)(actualWidth, _targetWidths2.default);
};
var toFixed = function toFixed(dp, value) {
return +value.toFixed(dp);
};
var BackgroundImpl = function BackgroundImpl(props) {
var measureRef = props.measureRef,
measure = props.measure,
contentRect = props.contentRect,
_props$imgixParams = props.imgixParams,
imgixParams = _props$imgixParams === undefined ? {} : _props$imgixParams,
onLoad = props.onLoad,
disableLibraryParam = props.disableLibraryParam,
src = props.src,
children = props.children,
_props$className = props.className,
className = _props$className === undefined ? "" : _props$className;
var forcedWidth = imgixParams.w,
forcedHeight = imgixParams.h;
var hasDOMDimensions = contentRect.bounds.top != null;
var htmlAttributes = props.htmlAttributes || {};
var dpr = toFixed(2, imgixParams.dpr || global.devicePixelRatio || 1);
var ref = htmlAttributes.ref;
var onRef = function onRef(el) {
measureRef(el);
if (typeof ref === "function") |
};
var _ref = function () {
var bothWidthAndHeightPassed = forcedWidth != null && forcedHeight != null;
if (bothWidthAndHeightPassed) {
return { width: forcedWidth, height: forcedHeight };
}
if (!hasDOMDimensions) {
return { width: undefined, height: undefined };
}
var ar = contentRect.bounds.width / contentRect.bounds.height;
var neitherWidthNorHeightPassed = forcedWidth == null && forcedHeight == null;
if (neitherWidthNorHeightPassed) {
var _width = findNearestWidth(contentRect.bounds.width);
var _height = Math.ceil(_width / ar);
return { width: _width, height: _height };
}
if (forcedWidth != null) {
var _height2 = Math.ceil(forcedWidth / ar);
return { width: forcedWidth, height: _height2 };
} else if (forcedHeight != null) {
var _width2 = Math.ceil(forcedHeight * ar);
return { width: _width2, height: forcedHeight };
}
}(),
width = _ref.width,
height = _ref.height;
var isReady = width != null && height != null;
var commonProps = _extends({}, htmlAttributes);
if (!isReady) {
return _react2.default.createElement(
"div",
_extends({}, commonProps, {
className: "react-imgix-bg-loading " + className,
ref: onRef
}),
children
);
}
var renderedSrc = function () {
var srcOptions = _extends({}, imgixParams, disableLibraryParam ? {} : { ixlib: "react-" + PACKAGE_VERSION }, {
width: width,
height: height,
fit: "crop",
dpr: dpr
});
return (0, _constructUrl2.default)(src, srcOptions);
}();
var style = _extends({}, htmlAttributes.style, {
backgroundImage: "url(" + renderedSrc + ")",
backgroundSize: (htmlAttributes.style || {}).backgroundSize !== undefined ? htmlAttributes.style.backgroundSize : "cover"
});
return _react2.default.createElement(
"div",
_extends({}, commonProps, { className: className, ref: onRef, style: style }),
children
);
};
var Background = (0, _reactMeasure.withContentRect)("bounds")(BackgroundImpl);
exports.Background = Background;
exports.__BackgroundImpl = BackgroundImpl;
//# sourceMappingURL=react-imgix-bg.js.map | {
ref(el);
} | conditional_block |
parameter-editor.component.ts | import { Component, Injector, Inject } from '@angular/core';
import { IGraphNode } from '../../../base-classes/node/NodeModule';
import { InputPort, OutputPort, InputPortTypes, OutputPortTypes } from '../../../base-classes/port/PortModule';
import { Viewer } from '../../../base-classes/viz/Viewer';
import { FlowchartService } from '../../../global-services/flowchart.service';
import {MatDialog, MatDialogRef, MAT_DIALOG_DATA} from '@angular/material';
import {ParameterSettingsDialogComponent} from './parameter-settings-dialog.component';
import {IProcedure, ProcedureFactory, ProcedureTypes} from '../../../base-classes/procedure/ProcedureModule';
@Component({
selector: 'app-parameter-editor',
templateUrl: './parameter-editor.component.html',
styleUrls: ['./parameter-editor.component.scss']
})
export class ParameterEditorComponent extends Viewer{
isVisible: boolean = false;
_node: IGraphNode;
_inputs: InputPort[];
_outputs: OutputPort[];
// shift to iport
inputPortOpts: InputPortTypes[] = [
InputPortTypes.Input,
InputPortTypes.Slider
// InputPortTypes.ColorPicker,
// InputPortTypes.FilePicker,
// InputPortTypes.Dropdown
];
outputPortOpts: OutputPortTypes[] = [
OutputPortTypes.Three,
OutputPortTypes.Text,
OutputPortTypes.Code,
OutputPortTypes.Console
];
constructor(injector: Injector, public dialog: MatDialog){
super(injector, "parameter-editor");
}
reset(){
this._node = undefined;
this._inputs = undefined;
this._outputs = undefined;
this.isVisible = false;
}
portHasFunction(port: InputPort): boolean{
let value = port.getValue();
if(value && value.port !== undefined && value.port.length == 2){
return true;
}
else{
return false;
}
}
deletePort(event, type: string, portIndex: number): void{
event.stopPropagation();
this.flowchartService.deletePort(type, portIndex);
}
updatePortName($event, port: InputPort|OutputPort): void{
let name: string = $event.srcElement.innerText;
// check for validity
name = name.replace(/[^\w]/gi, '');
if(name.trim().length > 0){
// put a timeout on this update or something similar to solve jumpiness
port.setName(name);
this.flowchartService.update();
}
}
updateType(type: InputPortTypes|OutputPortTypes, port: InputPort|OutputPort): void{
port.setType(type);
//defaults
if(type == InputPortTypes.Slider){
port.setOpts({min: 0, max: 100, step: 1});
port.setDefaultValue(50);
}
}
// updateDefaultValue($event, port: InputPort|OutputPort): void{
// let value: string = $event.srcElement.innerText;
// if(value.trim().length > 0){
// port.setDefaultValue(value)
// // put a timeout on this update or something similar to solve jumpiness
// }
// else{
// port.setDefaultValue(undefined)
// }
// this.flowchartService.update();
// }
getInputTypeName(type: InputPortTypes): string{
if(type == InputPortTypes.ColorPicker){
return "Color";
}
else if(type == InputPortTypes.Input){
return "Simple Input";
}
else if(type == InputPortTypes.Dropdown){
return "Dropdown";
}
else if(type == InputPortTypes.FilePicker){
return "File";
}
else if(type == InputPortTypes.Slider){
return "Slider";
}
else{
return "Not Identifiable"
}
}
| (type: OutputPortTypes): string{
if(type == OutputPortTypes.Three){
return "Geometry";
}
else if(type == OutputPortTypes.Text){
return "Text Viewer";
}
else if(type == OutputPortTypes.Code){
return "Code Viewer";
}
else if(type == OutputPortTypes.Console){
return "Console";
}
else{
return "Not Identifiable"
}
}
//
// this update runs when there is a message from other viewers that something changed;
// beware of updating flowchart here - it will go into an unending loop :/
//
update(): void{
this._node = this.flowchartService.getSelectedNode();
if( this._node !== undefined ){
this.isVisible = true;
this._inputs = this._node.getInputs();
this._outputs = this._node.getOutputs();
this.isVisible = true;
}
else{
this.isVisible = false;
}
}
openSettingsDialog(input: InputPort): void{
let dialogRef = this.dialog.open(ParameterSettingsDialogComponent, {
height: '400px',
width: '600px',
data: {
inputPortTypes: this.inputPortOpts,
input: input
}
});
dialogRef.afterClosed().subscribe(result => {
console.log('The dialog was closed');
});
}
addFunctionToProcedure(inp: InputPort): void{
// get functional graph node
let value = inp.getValue().port;
if(value){
let fn_node: IGraphNode = this.flowchartService.getNodes()[value[0]];
let prod: IProcedure = ProcedureFactory.getProcedure(ProcedureTypes.Function, {node: fn_node, port: inp});
this.flowchartService.addProcedure(prod);
}
}
}
| getOutputTypeName | identifier_name |
parameter-editor.component.ts | import { Component, Injector, Inject } from '@angular/core';
import { IGraphNode } from '../../../base-classes/node/NodeModule';
import { InputPort, OutputPort, InputPortTypes, OutputPortTypes } from '../../../base-classes/port/PortModule';
import { Viewer } from '../../../base-classes/viz/Viewer';
import { FlowchartService } from '../../../global-services/flowchart.service';
import {MatDialog, MatDialogRef, MAT_DIALOG_DATA} from '@angular/material';
import {ParameterSettingsDialogComponent} from './parameter-settings-dialog.component';
import {IProcedure, ProcedureFactory, ProcedureTypes} from '../../../base-classes/procedure/ProcedureModule';
@Component({
selector: 'app-parameter-editor',
templateUrl: './parameter-editor.component.html',
styleUrls: ['./parameter-editor.component.scss']
})
export class ParameterEditorComponent extends Viewer{
isVisible: boolean = false;
_node: IGraphNode;
_inputs: InputPort[];
_outputs: OutputPort[];
// shift to iport
inputPortOpts: InputPortTypes[] = [
InputPortTypes.Input,
InputPortTypes.Slider
// InputPortTypes.ColorPicker,
// InputPortTypes.FilePicker,
// InputPortTypes.Dropdown
];
outputPortOpts: OutputPortTypes[] = [
OutputPortTypes.Three,
OutputPortTypes.Text,
OutputPortTypes.Code,
OutputPortTypes.Console
];
constructor(injector: Injector, public dialog: MatDialog){
super(injector, "parameter-editor");
}
reset(){
this._node = undefined;
this._inputs = undefined;
this._outputs = undefined;
this.isVisible = false;
}
portHasFunction(port: InputPort): boolean{
let value = port.getValue();
if(value && value.port !== undefined && value.port.length == 2){
return true;
}
else{
return false;
}
}
deletePort(event, type: string, portIndex: number): void |
updatePortName($event, port: InputPort|OutputPort): void{
let name: string = $event.srcElement.innerText;
// check for validity
name = name.replace(/[^\w]/gi, '');
if(name.trim().length > 0){
// put a timeout on this update or something similar to solve jumpiness
port.setName(name);
this.flowchartService.update();
}
}
updateType(type: InputPortTypes|OutputPortTypes, port: InputPort|OutputPort): void{
port.setType(type);
//defaults
if(type == InputPortTypes.Slider){
port.setOpts({min: 0, max: 100, step: 1});
port.setDefaultValue(50);
}
}
// updateDefaultValue($event, port: InputPort|OutputPort): void{
// let value: string = $event.srcElement.innerText;
// if(value.trim().length > 0){
// port.setDefaultValue(value)
// // put a timeout on this update or something similar to solve jumpiness
// }
// else{
// port.setDefaultValue(undefined)
// }
// this.flowchartService.update();
// }
getInputTypeName(type: InputPortTypes): string{
if(type == InputPortTypes.ColorPicker){
return "Color";
}
else if(type == InputPortTypes.Input){
return "Simple Input";
}
else if(type == InputPortTypes.Dropdown){
return "Dropdown";
}
else if(type == InputPortTypes.FilePicker){
return "File";
}
else if(type == InputPortTypes.Slider){
return "Slider";
}
else{
return "Not Identifiable"
}
}
getOutputTypeName(type: OutputPortTypes): string{
if(type == OutputPortTypes.Three){
return "Geometry";
}
else if(type == OutputPortTypes.Text){
return "Text Viewer";
}
else if(type == OutputPortTypes.Code){
return "Code Viewer";
}
else if(type == OutputPortTypes.Console){
return "Console";
}
else{
return "Not Identifiable"
}
}
//
// this update runs when there is a message from other viewers that something changed;
// beware of updating flowchart here - it will go into an unending loop :/
//
update(): void{
this._node = this.flowchartService.getSelectedNode();
if( this._node !== undefined ){
this.isVisible = true;
this._inputs = this._node.getInputs();
this._outputs = this._node.getOutputs();
this.isVisible = true;
}
else{
this.isVisible = false;
}
}
openSettingsDialog(input: InputPort): void{
let dialogRef = this.dialog.open(ParameterSettingsDialogComponent, {
height: '400px',
width: '600px',
data: {
inputPortTypes: this.inputPortOpts,
input: input
}
});
dialogRef.afterClosed().subscribe(result => {
console.log('The dialog was closed');
});
}
addFunctionToProcedure(inp: InputPort): void{
// get functional graph node
let value = inp.getValue().port;
if(value){
let fn_node: IGraphNode = this.flowchartService.getNodes()[value[0]];
let prod: IProcedure = ProcedureFactory.getProcedure(ProcedureTypes.Function, {node: fn_node, port: inp});
this.flowchartService.addProcedure(prod);
}
}
}
| {
event.stopPropagation();
this.flowchartService.deletePort(type, portIndex);
} | identifier_body |
parameter-editor.component.ts | import { Component, Injector, Inject } from '@angular/core';
import { IGraphNode } from '../../../base-classes/node/NodeModule';
import { InputPort, OutputPort, InputPortTypes, OutputPortTypes } from '../../../base-classes/port/PortModule';
import { Viewer } from '../../../base-classes/viz/Viewer';
import { FlowchartService } from '../../../global-services/flowchart.service';
import {MatDialog, MatDialogRef, MAT_DIALOG_DATA} from '@angular/material';
import {ParameterSettingsDialogComponent} from './parameter-settings-dialog.component';
import {IProcedure, ProcedureFactory, ProcedureTypes} from '../../../base-classes/procedure/ProcedureModule';
@Component({
selector: 'app-parameter-editor',
templateUrl: './parameter-editor.component.html',
styleUrls: ['./parameter-editor.component.scss']
})
export class ParameterEditorComponent extends Viewer{
isVisible: boolean = false;
_node: IGraphNode;
_inputs: InputPort[];
_outputs: OutputPort[];
// shift to iport
inputPortOpts: InputPortTypes[] = [
InputPortTypes.Input,
InputPortTypes.Slider
// InputPortTypes.ColorPicker,
// InputPortTypes.FilePicker,
// InputPortTypes.Dropdown
];
outputPortOpts: OutputPortTypes[] = [
OutputPortTypes.Three,
OutputPortTypes.Text,
OutputPortTypes.Code,
OutputPortTypes.Console
];
constructor(injector: Injector, public dialog: MatDialog){
super(injector, "parameter-editor");
}
reset(){
this._node = undefined;
this._inputs = undefined;
this._outputs = undefined;
this.isVisible = false;
}
portHasFunction(port: InputPort): boolean{
let value = port.getValue();
if(value && value.port !== undefined && value.port.length == 2){
return true;
}
else{
return false;
}
}
deletePort(event, type: string, portIndex: number): void{
event.stopPropagation();
this.flowchartService.deletePort(type, portIndex);
}
updatePortName($event, port: InputPort|OutputPort): void{
let name: string = $event.srcElement.innerText;
// check for validity
name = name.replace(/[^\w]/gi, '');
if(name.trim().length > 0){
// put a timeout on this update or something similar to solve jumpiness
port.setName(name);
this.flowchartService.update();
}
}
updateType(type: InputPortTypes|OutputPortTypes, port: InputPort|OutputPort): void{
port.setType(type);
//defaults
if(type == InputPortTypes.Slider){
port.setOpts({min: 0, max: 100, step: 1});
port.setDefaultValue(50);
}
}
// updateDefaultValue($event, port: InputPort|OutputPort): void{
// let value: string = $event.srcElement.innerText;
// if(value.trim().length > 0){
// port.setDefaultValue(value)
// // put a timeout on this update or something similar to solve jumpiness
// }
// else{
// port.setDefaultValue(undefined)
// }
// this.flowchartService.update();
// }
getInputTypeName(type: InputPortTypes): string{
if(type == InputPortTypes.ColorPicker){
return "Color";
}
else if(type == InputPortTypes.Input){
return "Simple Input";
}
else if(type == InputPortTypes.Dropdown){
return "Dropdown";
}
else if(type == InputPortTypes.FilePicker){
return "File";
}
else if(type == InputPortTypes.Slider) |
else{
return "Not Identifiable"
}
}
getOutputTypeName(type: OutputPortTypes): string{
if(type == OutputPortTypes.Three){
return "Geometry";
}
else if(type == OutputPortTypes.Text){
return "Text Viewer";
}
else if(type == OutputPortTypes.Code){
return "Code Viewer";
}
else if(type == OutputPortTypes.Console){
return "Console";
}
else{
return "Not Identifiable"
}
}
//
// this update runs when there is a message from other viewers that something changed;
// beware of updating flowchart here - it will go into an unending loop :/
//
update(): void{
this._node = this.flowchartService.getSelectedNode();
if( this._node !== undefined ){
this.isVisible = true;
this._inputs = this._node.getInputs();
this._outputs = this._node.getOutputs();
this.isVisible = true;
}
else{
this.isVisible = false;
}
}
openSettingsDialog(input: InputPort): void{
let dialogRef = this.dialog.open(ParameterSettingsDialogComponent, {
height: '400px',
width: '600px',
data: {
inputPortTypes: this.inputPortOpts,
input: input
}
});
dialogRef.afterClosed().subscribe(result => {
console.log('The dialog was closed');
});
}
addFunctionToProcedure(inp: InputPort): void{
// get functional graph node
let value = inp.getValue().port;
if(value){
let fn_node: IGraphNode = this.flowchartService.getNodes()[value[0]];
let prod: IProcedure = ProcedureFactory.getProcedure(ProcedureTypes.Function, {node: fn_node, port: inp});
this.flowchartService.addProcedure(prod);
}
}
}
| {
return "Slider";
} | conditional_block |
parameter-editor.component.ts | import { Component, Injector, Inject } from '@angular/core';
import { IGraphNode } from '../../../base-classes/node/NodeModule';
import { InputPort, OutputPort, InputPortTypes, OutputPortTypes } from '../../../base-classes/port/PortModule';
import { Viewer } from '../../../base-classes/viz/Viewer';
import { FlowchartService } from '../../../global-services/flowchart.service';
import {MatDialog, MatDialogRef, MAT_DIALOG_DATA} from '@angular/material';
import {ParameterSettingsDialogComponent} from './parameter-settings-dialog.component';
import {IProcedure, ProcedureFactory, ProcedureTypes} from '../../../base-classes/procedure/ProcedureModule';
@Component({
selector: 'app-parameter-editor',
templateUrl: './parameter-editor.component.html',
styleUrls: ['./parameter-editor.component.scss']
})
export class ParameterEditorComponent extends Viewer{
isVisible: boolean = false;
_node: IGraphNode;
_inputs: InputPort[];
_outputs: OutputPort[];
// shift to iport
inputPortOpts: InputPortTypes[] = [
InputPortTypes.Input,
InputPortTypes.Slider
// InputPortTypes.ColorPicker,
// InputPortTypes.FilePicker,
// InputPortTypes.Dropdown
]; | OutputPortTypes.Text,
OutputPortTypes.Code,
OutputPortTypes.Console
];
constructor(injector: Injector, public dialog: MatDialog){
super(injector, "parameter-editor");
}
reset(){
this._node = undefined;
this._inputs = undefined;
this._outputs = undefined;
this.isVisible = false;
}
portHasFunction(port: InputPort): boolean{
let value = port.getValue();
if(value && value.port !== undefined && value.port.length == 2){
return true;
}
else{
return false;
}
}
deletePort(event, type: string, portIndex: number): void{
event.stopPropagation();
this.flowchartService.deletePort(type, portIndex);
}
updatePortName($event, port: InputPort|OutputPort): void{
let name: string = $event.srcElement.innerText;
// check for validity
name = name.replace(/[^\w]/gi, '');
if(name.trim().length > 0){
// put a timeout on this update or something similar to solve jumpiness
port.setName(name);
this.flowchartService.update();
}
}
updateType(type: InputPortTypes|OutputPortTypes, port: InputPort|OutputPort): void{
port.setType(type);
//defaults
if(type == InputPortTypes.Slider){
port.setOpts({min: 0, max: 100, step: 1});
port.setDefaultValue(50);
}
}
// updateDefaultValue($event, port: InputPort|OutputPort): void{
// let value: string = $event.srcElement.innerText;
// if(value.trim().length > 0){
// port.setDefaultValue(value)
// // put a timeout on this update or something similar to solve jumpiness
// }
// else{
// port.setDefaultValue(undefined)
// }
// this.flowchartService.update();
// }
getInputTypeName(type: InputPortTypes): string{
if(type == InputPortTypes.ColorPicker){
return "Color";
}
else if(type == InputPortTypes.Input){
return "Simple Input";
}
else if(type == InputPortTypes.Dropdown){
return "Dropdown";
}
else if(type == InputPortTypes.FilePicker){
return "File";
}
else if(type == InputPortTypes.Slider){
return "Slider";
}
else{
return "Not Identifiable"
}
}
getOutputTypeName(type: OutputPortTypes): string{
if(type == OutputPortTypes.Three){
return "Geometry";
}
else if(type == OutputPortTypes.Text){
return "Text Viewer";
}
else if(type == OutputPortTypes.Code){
return "Code Viewer";
}
else if(type == OutputPortTypes.Console){
return "Console";
}
else{
return "Not Identifiable"
}
}
//
// this update runs when there is a message from other viewers that something changed;
// beware of updating flowchart here - it will go into an unending loop :/
//
update(): void{
this._node = this.flowchartService.getSelectedNode();
if( this._node !== undefined ){
this.isVisible = true;
this._inputs = this._node.getInputs();
this._outputs = this._node.getOutputs();
this.isVisible = true;
}
else{
this.isVisible = false;
}
}
openSettingsDialog(input: InputPort): void{
let dialogRef = this.dialog.open(ParameterSettingsDialogComponent, {
height: '400px',
width: '600px',
data: {
inputPortTypes: this.inputPortOpts,
input: input
}
});
dialogRef.afterClosed().subscribe(result => {
console.log('The dialog was closed');
});
}
addFunctionToProcedure(inp: InputPort): void{
// get functional graph node
let value = inp.getValue().port;
if(value){
let fn_node: IGraphNode = this.flowchartService.getNodes()[value[0]];
let prod: IProcedure = ProcedureFactory.getProcedure(ProcedureTypes.Function, {node: fn_node, port: inp});
this.flowchartService.addProcedure(prod);
}
}
} |
outputPortOpts: OutputPortTypes[] = [
OutputPortTypes.Three, | random_line_split |
check_const.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies that the types and values of const and static items
// are safe. The rules enforced by this module are:
//
// - For each *mutable* static item, it checks that its **type**:
// - doesn't have a destructor
// - doesn't own an owned pointer
//
// - For each *immutable* static item, it checks that its **value**:
// - doesn't own owned, managed pointers
// - doesn't contain a struct literal or a call to an enum variant / struct constructor where
// - the type of the struct/enum has a dtor
//
// Rules Enforced Elsewhere:
// - It's not possible to take the address of a static item with unsafe interior. This is enforced
// by borrowck::gather_loans
use middle::const_eval;
use middle::def;
use middle::expr_use_visitor as euv;
use middle::infer;
use middle::mem_categorization as mc;
use middle::traits;
use middle::ty::{self, Ty};
use util::nodemap::NodeMap;
use util::ppaux;
use syntax::ast;
use syntax::codemap::Span;
use syntax::print::pprust;
use syntax::visit::{self, Visitor};
use std::collections::hash_map::Entry;
// Const qualification, from partial to completely promotable.
bitflags! {
#[derive(RustcEncodable, RustcDecodable)]
flags ConstQualif: u8 {
// Const rvalue which can be placed behind a reference.
const PURE_CONST = 0b000000,
// Inner mutability (can not be placed behind a reference) or behind
// &mut in a non-global expression. Can be copied from static memory.
const MUTABLE_MEM = 0b000001,
// Constant value with a type that implements Drop. Can be copied
// from static memory, similar to MUTABLE_MEM.
const NEEDS_DROP = 0b000010,
// Even if the value can be placed in static memory, copying it from
// there is more expensive than in-place instantiation, and/or it may
// be too large. This applies to [T; N] and everything containing it.
// N.B.: references need to clear this flag to not end up on the stack.
const PREFER_IN_PLACE = 0b000100,
// May use more than 0 bytes of memory, doesn't impact the constness
// directly, but is not allowed to be borrowed mutably in a constant.
const NON_ZERO_SIZED = 0b001000,
// Actually borrowed, has to always be in static memory. Does not
// propagate, and requires the expression to behave like a 'static
// lvalue. The set of expressions with this flag is the minimum
// that have to be promoted.
const HAS_STATIC_BORROWS = 0b010000,
// Invalid const for miscellaneous reasons (e.g. not implemented).
const NOT_CONST = 0b100000,
// Borrowing the expression won't produce &'static T if any of these
// bits are set, though the value could be copied from static memory
// if `NOT_CONST` isn't set.
const NON_STATIC_BORROWS = MUTABLE_MEM.bits | NEEDS_DROP.bits | NOT_CONST.bits
}
}
#[derive(Copy, Eq, PartialEq)]
enum Mode {
Const,
Static,
StaticMut,
// An expression that occurs outside of any constant context
// (i.e. `const`, `static`, array lengths, etc.). The value
// can be variable at runtime, but will be promotable to
// static memory if we can prove it is actually constant.
Var,
}
struct CheckCrateVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
mode: Mode,
qualif: ConstQualif,
rvalue_borrows: NodeMap<ast::Mutability>
}
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R,
{
let (old_mode, old_qualif) = (self.mode, self.qualif);
self.mode = mode;
self.qualif = PURE_CONST;
let r = f(self);
self.mode = old_mode;
self.qualif = old_qualif;
r
}
fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where
F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'tcx,
ty::ParameterEnvironment<'a, 'tcx>>) -> R,
{
let param_env = match item_id {
Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id),
None => ty::empty_parameter_environment(self.tcx)
};
f(&mut euv::ExprUseVisitor::new(self, ¶m_env))
}
fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif {
assert!(mode != Mode::Var);
match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) {
Entry::Occupied(entry) => return *entry.get(),
Entry::Vacant(entry) => {
// Prevent infinite recursion on re-entry.
entry.insert(PURE_CONST);
}
}
self.with_mode(mode, |this| {
this.with_euv(None, |euv| euv.consume_expr(expr));
this.visit_expr(expr);
this.qualif
})
}
fn add_qualif(&mut self, qualif: ConstQualif) {
self.qualif = self.qualif | qualif;
}
fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) {
match self.rvalue_borrows.entry(id) {
Entry::Occupied(mut entry) => {
// Merge the two borrows, taking the most demanding
// one, mutability-wise.
if mutbl == ast::MutMutable {
entry.insert(mutbl);
}
}
Entry::Vacant(entry) => {
entry.insert(mutbl);
}
}
}
fn msg(&self) -> &'static str {
match self.mode {
Mode::Const => "constant",
Mode::StaticMut | Mode::Static => "static",
Mode::Var => unreachable!(),
} | let tcontents = ty::type_contents(self.tcx, node_ty);
let suffix = if tcontents.has_dtor() {
"destructors"
} else if tcontents.owns_owned() {
"owned pointers"
} else {
return
};
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
to have {}", suffix));
}
fn check_static_type(&self, e: &ast::Expr) {
let ty = ty::node_id_to_type(self.tcx, e.id);
let infcx = infer::new_infer_ctxt(self.tcx);
let mut fulfill_cx = traits::FulfillmentContext::new();
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
let env = ty::empty_parameter_environment(self.tcx);
match fulfill_cx.select_all_or_error(&infcx, &env) {
Ok(()) => { },
Err(ref errors) => {
traits::report_fulfillment_errors(&infcx, errors);
}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &ast::Item) {
debug!("visit_item(item={})", pprust::item_to_string(i));
match i.node {
ast::ItemStatic(_, ast::MutImmutable, ref expr) => {
self.check_static_type(&**expr);
self.global_expr(Mode::Static, &**expr);
}
ast::ItemStatic(_, ast::MutMutable, ref expr) => {
self.check_static_mut_type(&**expr);
self.global_expr(Mode::StaticMut, &**expr);
}
ast::ItemConst(_, ref expr) => {
self.global_expr(Mode::Const, &**expr);
}
ast::ItemEnum(ref enum_definition, _) => {
for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
self.global_expr(Mode::Const, &**ex);
}
}
}
_ => {
self.with_mode(Mode::Var, |v| visit::walk_item(v, i));
}
}
}
fn visit_fn(&mut self,
fk: visit::FnKind<'v>,
fd: &'v ast::FnDecl,
b: &'v ast::Block,
s: Span,
fn_id: ast::NodeId) {
assert!(self.mode == Mode::Var);
self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b));
visit::walk_fn(self, fk, fd, b, s);
}
fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
ast::PatLit(ref lit) => {
self.global_expr(Mode::Const, &**lit);
}
ast::PatRange(ref start, ref end) => {
self.global_expr(Mode::Const, &**start);
self.global_expr(Mode::Const, &**end);
}
_ => visit::walk_pat(self, p)
}
}
fn visit_expr(&mut self, ex: &ast::Expr) {
let mut outer = self.qualif;
self.qualif = PURE_CONST;
let node_ty = ty::node_id_to_type(self.tcx, ex.id);
check_expr(self, ex, node_ty);
// Special-case some expressions to avoid certain flags bubbling up.
match ex.node {
ast::ExprCall(ref callee, ref args) => {
for arg in args.iter() {
self.visit_expr(&**arg)
}
let inner = self.qualif;
self.visit_expr(&**callee);
// The callee's size doesn't count in the call.
let added = self.qualif - inner;
self.qualif = inner | (added - NON_ZERO_SIZED);
}
ast::ExprRepeat(ref element, _) => {
self.visit_expr(&**element);
// The count is checked elsewhere (typeck).
let count = match node_ty.sty {
ty::ty_vec(_, Some(n)) => n,
_ => unreachable!()
};
// [element; 0] is always zero-sized.
if count == 0 {
self.qualif = self.qualif - (NON_ZERO_SIZED | PREFER_IN_PLACE);
}
}
ast::ExprMatch(ref discr, ref arms, _) => {
// Compute the most demanding borrow from all the arms'
// patterns and set that on the discriminator.
let mut borrow = None;
for pat in arms.iter().flat_map(|arm| arm.pats.iter()) {
let pat_borrow = self.rvalue_borrows.remove(&pat.id);
match (borrow, pat_borrow) {
(None, _) | (_, Some(ast::MutMutable)) => {
borrow = pat_borrow;
}
_ => {}
}
}
if let Some(mutbl) = borrow {
self.record_borrow(discr.id, mutbl);
}
visit::walk_expr(self, ex);
}
// Division by zero and overflow checking.
ast::ExprBinary(op, _, _) => {
visit::walk_expr(self, ex);
let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem;
match node_ty.sty {
ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => {
if !self.qualif.intersects(NOT_CONST) {
match const_eval::eval_const_expr_partial(self.tcx, ex, None) {
Ok(_) => {}
Err(msg) => {
span_err!(self.tcx.sess, ex.span, E0020,
"{} in a constant expression", msg)
}
}
}
}
_ => {}
}
}
_ => visit::walk_expr(self, ex)
}
// Handle borrows on (or inside the autorefs of) this expression.
match self.rvalue_borrows.remove(&ex.id) {
Some(ast::MutImmutable) => {
// Constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
// If the type doesn't have interior mutability, then `MUTABLE_MEM` has
// propagated from another error, so erroring again would be just noise.
let tc = ty::type_contents(self.tcx, node_ty);
if self.qualif.intersects(MUTABLE_MEM) && tc.interior_unsafe() {
outer = outer | NOT_CONST;
if self.mode != Mode::Var {
self.tcx.sess.span_err(ex.span,
"cannot borrow a constant which contains \
interior mutability, create a static instead");
}
}
// If the reference has to be 'static, avoid in-place initialization
// as that will end up pointing to the stack instead.
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.qualif = self.qualif - PREFER_IN_PLACE;
self.add_qualif(HAS_STATIC_BORROWS);
}
}
Some(ast::MutMutable) => {
// `&mut expr` means expr could be mutated, unless it's zero-sized.
if self.qualif.intersects(NON_ZERO_SIZED) {
if self.mode == Mode::Var {
outer = outer | NOT_CONST;
self.add_qualif(MUTABLE_MEM);
} else {
span_err!(self.tcx.sess, ex.span, E0017,
"references in {}s may only refer \
to immutable values", self.msg())
}
}
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.add_qualif(HAS_STATIC_BORROWS);
}
}
None => {}
}
self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif);
// Don't propagate certain flags.
self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS);
}
}
/// This function is used to enforce the constraints on
/// const/static items. It walks through the *value*
/// of the item walking down the expression and evaluating
/// every nested expression. If the expression is not part
/// of a const/static item, it is qualified for promotion
/// instead of producing errors.
fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>,
e: &ast::Expr, node_ty: Ty<'tcx>) {
match node_ty.sty {
ty::ty_struct(did, _) |
ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => {
v.add_qualif(NEEDS_DROP);
if v.mode != Mode::Var {
v.tcx.sess.span_err(e.span,
&format!("{}s are not allowed to have destructors",
v.msg()));
}
}
_ => {}
}
let method_call = ty::MethodCall::expr(e.id);
match e.node {
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0011,
"user-defined operators are not allowed in {}s", v.msg());
}
}
ast::ExprBox(..) |
ast::ExprUnary(ast::UnUniq, _) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0010,
"allocations are not allowed in {}s", v.msg());
}
}
ast::ExprUnary(ast::UnDeref, ref ptr) => {
match ty::node_id_to_type(v.tcx, ptr.id).sty {
ty::ty_ptr(_) => {
// This shouldn't be allowed in constants at all.
v.add_qualif(NOT_CONST);
}
_ => {}
}
}
ast::ExprCast(ref from, _) => {
let toty = ty::expr_ty(v.tcx, e);
let fromty = ty::expr_ty(v.tcx, &**from);
let is_legal_cast =
ty::type_is_numeric(toty) ||
ty::type_is_unsafe_ptr(toty) ||
(ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty));
if !is_legal_cast {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in {}s",
ppaux::ty_to_string(v.tcx, toty), v.msg());
}
}
if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0018,
"can not cast a pointer to an integer in {}s", v.msg());
}
}
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
let def = v.tcx.def_map.borrow().get(&e.id).cloned();
match def {
Some(def::DefVariant(_, _, _)) => {
// Count the discriminator or function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStruct(_)) => {
if let ty::ty_bare_fn(..) = node_ty.sty {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
}
Some(def::DefFn(..)) |
Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStatic(..)) => {
match v.mode {
Mode::Static | Mode::StaticMut => {}
Mode::Const => {
span_err!(v.tcx.sess, e.span, E0013,
"constants cannot refer to other statics, \
insert an intermediate constant instead");
}
Mode::Var => v.add_qualif(NOT_CONST)
}
}
Some(def::DefConst(did)) => {
if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) {
let inner = v.global_expr(Mode::Const, expr);
v.add_qualif(inner);
} else {
v.tcx.sess.span_bug(e.span, "DefConst doesn't point \
to an ItemConst");
}
}
def => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
debug!("(checking const) found bad def: {:?}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in {}s may only refer to constants \
or functions", v.msg());
}
}
}
}
ast::ExprCall(ref callee, _) => {
let mut callee = &**callee;
loop {
callee = match callee.node {
ast::ExprParen(ref inner) => &**inner,
ast::ExprBlock(ref block) => match block.expr {
Some(ref tail) => &**tail,
None => break
},
_ => break
};
}
let def = v.tcx.def_map.borrow().get(&callee.id).cloned();
match def {
Some(def::DefStruct(..)) => {}
Some(def::DefVariant(..)) => {
// Count the discriminator.
v.add_qualif(NON_ZERO_SIZED);
}
_ => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in {}s are limited to \
struct and enum constructors", v.msg());
}
}
}
}
ast::ExprBlock(ref block) => {
// Check all statements in the block
let mut block_span_err = |span| {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, span, E0016,
"blocks in {}s are limited to items and \
tail expressions", v.msg());
}
};
for stmt in &block.stmts {
match stmt.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => block_span_err(decl.span),
// Item statements are allowed
ast::DeclItem(_) => {}
}
}
ast::StmtExpr(ref expr, _) => block_span_err(expr.span),
ast::StmtSemi(ref semi, _) => block_span_err(semi.span),
ast::StmtMac(..) => {
v.tcx.sess.span_bug(e.span, "unexpanded statement \
macro in const?!")
}
}
}
}
ast::ExprStruct(..) => {
let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id());
if did == v.tcx.lang_items.unsafe_cell_type() {
v.add_qualif(MUTABLE_MEM);
}
}
ast::ExprLit(_) |
ast::ExprAddrOf(..) => {
v.add_qualif(NON_ZERO_SIZED);
}
ast::ExprRepeat(..) => {
v.add_qualif(PREFER_IN_PLACE);
}
ast::ExprClosure(..) => {
// Paths in constant constexts cannot refer to local variables,
// as there are none, and thus closures can't have upvars there.
if ty::with_freevars(v.tcx, e.id, |fv| !fv.is_empty()) {
assert!(v.mode == Mode::Var,
"global closures can't capture anything");
v.add_qualif(NOT_CONST);
}
}
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprVec(_) |
ast::ExprParen(..) |
ast::ExprTup(..) => {}
// Conditional control flow (possible to implement).
ast::ExprMatch(..) |
ast::ExprIf(..) |
ast::ExprIfLet(..) |
// Loops (not very meaningful in constants).
ast::ExprWhile(..) |
ast::ExprWhileLet(..) |
ast::ExprForLoop(..) |
ast::ExprLoop(..) |
// More control flow (also not very meaningful).
ast::ExprBreak(_) |
ast::ExprAgain(_) |
ast::ExprRet(_) |
// Miscellaneous expressions that could be implemented.
ast::ExprRange(..) |
// Various other expressions.
ast::ExprMethodCall(..) |
ast::ExprAssign(..) |
ast::ExprAssignOp(..) |
ast::ExprInlineAsm(_) |
ast::ExprMac(_) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0019,
"{} contains unimplemented expression type", v.msg());
}
}
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor {
tcx: tcx,
mode: Mode::Var,
qualif: NOT_CONST,
rvalue_borrows: NodeMap()
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> {
fn consume(&mut self,
_consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt,
_mode: euv::ConsumeMode) {
let mut cur = &cmt;
loop {
match cur.cat {
mc::cat_static_item => {
if self.mode != Mode::Var {
// statics cannot be consumed by value at any time, that would imply
// that they're an initializer (what a const is for) or kept in sync
// over time (not feasible), so deny it outright.
self.tcx.sess.span_err(consume_span,
"cannot refer to other statics by value, use the \
address-of operator or a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => cur = cmt,
mc::cat_rvalue(..) |
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
_loan_region: ty::Region,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause) {
let mut cur = &cmt;
let mut is_interior = false;
loop {
match cur.cat {
mc::cat_rvalue(..) => {
if loan_cause == euv::MatchDiscriminant {
// Ignore the dummy immutable borrow created by EUV.
break;
}
let mutbl = bk.to_mutbl_lossy();
if mutbl == ast::MutMutable && self.mode == Mode::StaticMut {
// Mutable slices are the only `&mut` allowed in globals,
// but only in `static mut`, nowhere else.
match cmt.ty.sty {
ty::ty_vec(_, _) => break,
_ => {}
}
}
self.record_borrow(borrow_id, mutbl);
break;
}
mc::cat_static_item => {
if is_interior && self.mode != Mode::Var {
// Borrowed statics can specifically *only* have their address taken,
// not any number of other borrows such as borrowing fields, reading
// elements of an array, etc.
self.tcx.sess.span_err(borrow_span,
"cannot refer to the interior of another \
static, use a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => {
is_interior = true;
cur = cmt;
}
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn decl_without_init(&mut self,
_id: ast::NodeId,
_span: Span) {}
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
_assignee_cmt: mc::cmt,
_mode: euv::MutateMode) {}
fn matched_pat(&mut self,
_: &ast::Pat,
_: mc::cmt,
_: euv::MatchMode) {}
fn consume_pat(&mut self,
_consume_pat: &ast::Pat,
_cmt: mc::cmt,
_mode: euv::ConsumeMode) {}
} | }
fn check_static_mut_type(&self, e: &ast::Expr) {
let node_ty = ty::node_id_to_type(self.tcx, e.id); | random_line_split |
check_const.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies that the types and values of const and static items
// are safe. The rules enforced by this module are:
//
// - For each *mutable* static item, it checks that its **type**:
// - doesn't have a destructor
// - doesn't own an owned pointer
//
// - For each *immutable* static item, it checks that its **value**:
// - doesn't own owned, managed pointers
// - doesn't contain a struct literal or a call to an enum variant / struct constructor where
// - the type of the struct/enum has a dtor
//
// Rules Enforced Elsewhere:
// - It's not possible to take the address of a static item with unsafe interior. This is enforced
// by borrowck::gather_loans
use middle::const_eval;
use middle::def;
use middle::expr_use_visitor as euv;
use middle::infer;
use middle::mem_categorization as mc;
use middle::traits;
use middle::ty::{self, Ty};
use util::nodemap::NodeMap;
use util::ppaux;
use syntax::ast;
use syntax::codemap::Span;
use syntax::print::pprust;
use syntax::visit::{self, Visitor};
use std::collections::hash_map::Entry;
// Const qualification, from partial to completely promotable.
bitflags! {
#[derive(RustcEncodable, RustcDecodable)]
flags ConstQualif: u8 {
// Const rvalue which can be placed behind a reference.
const PURE_CONST = 0b000000,
// Inner mutability (can not be placed behind a reference) or behind
// &mut in a non-global expression. Can be copied from static memory.
const MUTABLE_MEM = 0b000001,
// Constant value with a type that implements Drop. Can be copied
// from static memory, similar to MUTABLE_MEM.
const NEEDS_DROP = 0b000010,
// Even if the value can be placed in static memory, copying it from
// there is more expensive than in-place instantiation, and/or it may
// be too large. This applies to [T; N] and everything containing it.
// N.B.: references need to clear this flag to not end up on the stack.
const PREFER_IN_PLACE = 0b000100,
// May use more than 0 bytes of memory, doesn't impact the constness
// directly, but is not allowed to be borrowed mutably in a constant.
const NON_ZERO_SIZED = 0b001000,
// Actually borrowed, has to always be in static memory. Does not
// propagate, and requires the expression to behave like a 'static
// lvalue. The set of expressions with this flag is the minimum
// that have to be promoted.
const HAS_STATIC_BORROWS = 0b010000,
// Invalid const for miscellaneous reasons (e.g. not implemented).
const NOT_CONST = 0b100000,
// Borrowing the expression won't produce &'static T if any of these
// bits are set, though the value could be copied from static memory
// if `NOT_CONST` isn't set.
const NON_STATIC_BORROWS = MUTABLE_MEM.bits | NEEDS_DROP.bits | NOT_CONST.bits
}
}
#[derive(Copy, Eq, PartialEq)]
enum Mode {
Const,
Static,
StaticMut,
// An expression that occurs outside of any constant context
// (i.e. `const`, `static`, array lengths, etc.). The value
// can be variable at runtime, but will be promotable to
// static memory if we can prove it is actually constant.
Var,
}
struct CheckCrateVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
mode: Mode,
qualif: ConstQualif,
rvalue_borrows: NodeMap<ast::Mutability>
}
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R,
{
let (old_mode, old_qualif) = (self.mode, self.qualif);
self.mode = mode;
self.qualif = PURE_CONST;
let r = f(self);
self.mode = old_mode;
self.qualif = old_qualif;
r
}
fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where
F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'tcx,
ty::ParameterEnvironment<'a, 'tcx>>) -> R,
{
let param_env = match item_id {
Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id),
None => ty::empty_parameter_environment(self.tcx)
};
f(&mut euv::ExprUseVisitor::new(self, ¶m_env))
}
fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif {
assert!(mode != Mode::Var);
match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) {
Entry::Occupied(entry) => return *entry.get(),
Entry::Vacant(entry) => {
// Prevent infinite recursion on re-entry.
entry.insert(PURE_CONST);
}
}
self.with_mode(mode, |this| {
this.with_euv(None, |euv| euv.consume_expr(expr));
this.visit_expr(expr);
this.qualif
})
}
fn add_qualif(&mut self, qualif: ConstQualif) {
self.qualif = self.qualif | qualif;
}
fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) {
match self.rvalue_borrows.entry(id) {
Entry::Occupied(mut entry) => {
// Merge the two borrows, taking the most demanding
// one, mutability-wise.
if mutbl == ast::MutMutable {
entry.insert(mutbl);
}
}
Entry::Vacant(entry) => {
entry.insert(mutbl);
}
}
}
fn msg(&self) -> &'static str {
match self.mode {
Mode::Const => "constant",
Mode::StaticMut | Mode::Static => "static",
Mode::Var => unreachable!(),
}
}
fn check_static_mut_type(&self, e: &ast::Expr) {
let node_ty = ty::node_id_to_type(self.tcx, e.id);
let tcontents = ty::type_contents(self.tcx, node_ty);
let suffix = if tcontents.has_dtor() {
"destructors"
} else if tcontents.owns_owned() {
"owned pointers"
} else {
return
};
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
to have {}", suffix));
}
fn check_static_type(&self, e: &ast::Expr) {
let ty = ty::node_id_to_type(self.tcx, e.id);
let infcx = infer::new_infer_ctxt(self.tcx);
let mut fulfill_cx = traits::FulfillmentContext::new();
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
let env = ty::empty_parameter_environment(self.tcx);
match fulfill_cx.select_all_or_error(&infcx, &env) {
Ok(()) => { },
Err(ref errors) => {
traits::report_fulfillment_errors(&infcx, errors);
}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &ast::Item) {
debug!("visit_item(item={})", pprust::item_to_string(i));
match i.node {
ast::ItemStatic(_, ast::MutImmutable, ref expr) => {
self.check_static_type(&**expr);
self.global_expr(Mode::Static, &**expr);
}
ast::ItemStatic(_, ast::MutMutable, ref expr) => {
self.check_static_mut_type(&**expr);
self.global_expr(Mode::StaticMut, &**expr);
}
ast::ItemConst(_, ref expr) => {
self.global_expr(Mode::Const, &**expr);
}
ast::ItemEnum(ref enum_definition, _) => {
for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
self.global_expr(Mode::Const, &**ex);
}
}
}
_ => {
self.with_mode(Mode::Var, |v| visit::walk_item(v, i));
}
}
}
fn visit_fn(&mut self,
fk: visit::FnKind<'v>,
fd: &'v ast::FnDecl,
b: &'v ast::Block,
s: Span,
fn_id: ast::NodeId) {
assert!(self.mode == Mode::Var);
self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b));
visit::walk_fn(self, fk, fd, b, s);
}
fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
ast::PatLit(ref lit) => {
self.global_expr(Mode::Const, &**lit);
}
ast::PatRange(ref start, ref end) => {
self.global_expr(Mode::Const, &**start);
self.global_expr(Mode::Const, &**end);
}
_ => visit::walk_pat(self, p)
}
}
fn visit_expr(&mut self, ex: &ast::Expr) {
let mut outer = self.qualif;
self.qualif = PURE_CONST;
let node_ty = ty::node_id_to_type(self.tcx, ex.id);
check_expr(self, ex, node_ty);
// Special-case some expressions to avoid certain flags bubbling up.
match ex.node {
ast::ExprCall(ref callee, ref args) => {
for arg in args.iter() {
self.visit_expr(&**arg)
}
let inner = self.qualif;
self.visit_expr(&**callee);
// The callee's size doesn't count in the call.
let added = self.qualif - inner;
self.qualif = inner | (added - NON_ZERO_SIZED);
}
ast::ExprRepeat(ref element, _) => {
self.visit_expr(&**element);
// The count is checked elsewhere (typeck).
let count = match node_ty.sty {
ty::ty_vec(_, Some(n)) => n,
_ => unreachable!()
};
// [element; 0] is always zero-sized.
if count == 0 {
self.qualif = self.qualif - (NON_ZERO_SIZED | PREFER_IN_PLACE);
}
}
ast::ExprMatch(ref discr, ref arms, _) => {
// Compute the most demanding borrow from all the arms'
// patterns and set that on the discriminator.
let mut borrow = None;
for pat in arms.iter().flat_map(|arm| arm.pats.iter()) {
let pat_borrow = self.rvalue_borrows.remove(&pat.id);
match (borrow, pat_borrow) {
(None, _) | (_, Some(ast::MutMutable)) => {
borrow = pat_borrow;
}
_ => {}
}
}
if let Some(mutbl) = borrow {
self.record_borrow(discr.id, mutbl);
}
visit::walk_expr(self, ex);
}
// Division by zero and overflow checking.
ast::ExprBinary(op, _, _) => {
visit::walk_expr(self, ex);
let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem;
match node_ty.sty {
ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => {
if !self.qualif.intersects(NOT_CONST) {
match const_eval::eval_const_expr_partial(self.tcx, ex, None) {
Ok(_) => {}
Err(msg) => {
span_err!(self.tcx.sess, ex.span, E0020,
"{} in a constant expression", msg)
}
}
}
}
_ => {}
}
}
_ => visit::walk_expr(self, ex)
}
// Handle borrows on (or inside the autorefs of) this expression.
match self.rvalue_borrows.remove(&ex.id) {
Some(ast::MutImmutable) => {
// Constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
// If the type doesn't have interior mutability, then `MUTABLE_MEM` has
// propagated from another error, so erroring again would be just noise.
let tc = ty::type_contents(self.tcx, node_ty);
if self.qualif.intersects(MUTABLE_MEM) && tc.interior_unsafe() {
outer = outer | NOT_CONST;
if self.mode != Mode::Var {
self.tcx.sess.span_err(ex.span,
"cannot borrow a constant which contains \
interior mutability, create a static instead");
}
}
// If the reference has to be 'static, avoid in-place initialization
// as that will end up pointing to the stack instead.
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.qualif = self.qualif - PREFER_IN_PLACE;
self.add_qualif(HAS_STATIC_BORROWS);
}
}
Some(ast::MutMutable) => {
// `&mut expr` means expr could be mutated, unless it's zero-sized.
if self.qualif.intersects(NON_ZERO_SIZED) {
if self.mode == Mode::Var {
outer = outer | NOT_CONST;
self.add_qualif(MUTABLE_MEM);
} else {
span_err!(self.tcx.sess, ex.span, E0017,
"references in {}s may only refer \
to immutable values", self.msg())
}
}
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.add_qualif(HAS_STATIC_BORROWS);
}
}
None => {}
}
self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif);
// Don't propagate certain flags.
self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS);
}
}
/// This function is used to enforce the constraints on
/// const/static items. It walks through the *value*
/// of the item walking down the expression and evaluating
/// every nested expression. If the expression is not part
/// of a const/static item, it is qualified for promotion
/// instead of producing errors.
fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>,
e: &ast::Expr, node_ty: Ty<'tcx>) {
match node_ty.sty {
ty::ty_struct(did, _) |
ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => {
v.add_qualif(NEEDS_DROP);
if v.mode != Mode::Var {
v.tcx.sess.span_err(e.span,
&format!("{}s are not allowed to have destructors",
v.msg()));
}
}
_ => {}
}
let method_call = ty::MethodCall::expr(e.id);
match e.node {
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0011,
"user-defined operators are not allowed in {}s", v.msg());
}
}
ast::ExprBox(..) |
ast::ExprUnary(ast::UnUniq, _) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0010,
"allocations are not allowed in {}s", v.msg());
}
}
ast::ExprUnary(ast::UnDeref, ref ptr) => {
match ty::node_id_to_type(v.tcx, ptr.id).sty {
ty::ty_ptr(_) => {
// This shouldn't be allowed in constants at all.
v.add_qualif(NOT_CONST);
}
_ => {}
}
}
ast::ExprCast(ref from, _) => {
let toty = ty::expr_ty(v.tcx, e);
let fromty = ty::expr_ty(v.tcx, &**from);
let is_legal_cast =
ty::type_is_numeric(toty) ||
ty::type_is_unsafe_ptr(toty) ||
(ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty));
if !is_legal_cast {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in {}s",
ppaux::ty_to_string(v.tcx, toty), v.msg());
}
}
if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0018,
"can not cast a pointer to an integer in {}s", v.msg());
}
}
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
let def = v.tcx.def_map.borrow().get(&e.id).cloned();
match def {
Some(def::DefVariant(_, _, _)) => {
// Count the discriminator or function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStruct(_)) => {
if let ty::ty_bare_fn(..) = node_ty.sty {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
}
Some(def::DefFn(..)) |
Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStatic(..)) => {
match v.mode {
Mode::Static | Mode::StaticMut => {}
Mode::Const => {
span_err!(v.tcx.sess, e.span, E0013,
"constants cannot refer to other statics, \
insert an intermediate constant instead");
}
Mode::Var => v.add_qualif(NOT_CONST)
}
}
Some(def::DefConst(did)) => {
if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) {
let inner = v.global_expr(Mode::Const, expr);
v.add_qualif(inner);
} else {
v.tcx.sess.span_bug(e.span, "DefConst doesn't point \
to an ItemConst");
}
}
def => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
debug!("(checking const) found bad def: {:?}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in {}s may only refer to constants \
or functions", v.msg());
}
}
}
}
ast::ExprCall(ref callee, _) => {
let mut callee = &**callee;
loop {
callee = match callee.node {
ast::ExprParen(ref inner) => &**inner,
ast::ExprBlock(ref block) => match block.expr {
Some(ref tail) => &**tail,
None => break
},
_ => break
};
}
let def = v.tcx.def_map.borrow().get(&callee.id).cloned();
match def {
Some(def::DefStruct(..)) => {}
Some(def::DefVariant(..)) => {
// Count the discriminator.
v.add_qualif(NON_ZERO_SIZED);
}
_ => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in {}s are limited to \
struct and enum constructors", v.msg());
}
}
}
}
ast::ExprBlock(ref block) => {
// Check all statements in the block
let mut block_span_err = |span| {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, span, E0016,
"blocks in {}s are limited to items and \
tail expressions", v.msg());
}
};
for stmt in &block.stmts {
match stmt.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => block_span_err(decl.span),
// Item statements are allowed
ast::DeclItem(_) => {}
}
}
ast::StmtExpr(ref expr, _) => block_span_err(expr.span),
ast::StmtSemi(ref semi, _) => block_span_err(semi.span),
ast::StmtMac(..) => {
v.tcx.sess.span_bug(e.span, "unexpanded statement \
macro in const?!")
}
}
}
}
ast::ExprStruct(..) => {
let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id());
if did == v.tcx.lang_items.unsafe_cell_type() {
v.add_qualif(MUTABLE_MEM);
}
}
ast::ExprLit(_) |
ast::ExprAddrOf(..) => {
v.add_qualif(NON_ZERO_SIZED);
}
ast::ExprRepeat(..) => {
v.add_qualif(PREFER_IN_PLACE);
}
ast::ExprClosure(..) => {
// Paths in constant constexts cannot refer to local variables,
// as there are none, and thus closures can't have upvars there.
if ty::with_freevars(v.tcx, e.id, |fv| !fv.is_empty()) {
assert!(v.mode == Mode::Var,
"global closures can't capture anything");
v.add_qualif(NOT_CONST);
}
}
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprVec(_) |
ast::ExprParen(..) |
ast::ExprTup(..) => {}
// Conditional control flow (possible to implement).
ast::ExprMatch(..) |
ast::ExprIf(..) |
ast::ExprIfLet(..) |
// Loops (not very meaningful in constants).
ast::ExprWhile(..) |
ast::ExprWhileLet(..) |
ast::ExprForLoop(..) |
ast::ExprLoop(..) |
// More control flow (also not very meaningful).
ast::ExprBreak(_) |
ast::ExprAgain(_) |
ast::ExprRet(_) |
// Miscellaneous expressions that could be implemented.
ast::ExprRange(..) |
// Various other expressions.
ast::ExprMethodCall(..) |
ast::ExprAssign(..) |
ast::ExprAssignOp(..) |
ast::ExprInlineAsm(_) |
ast::ExprMac(_) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0019,
"{} contains unimplemented expression type", v.msg());
}
}
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor {
tcx: tcx,
mode: Mode::Var,
qualif: NOT_CONST,
rvalue_borrows: NodeMap()
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> {
fn consume(&mut self,
_consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt,
_mode: euv::ConsumeMode) {
let mut cur = &cmt;
loop {
match cur.cat {
mc::cat_static_item => {
if self.mode != Mode::Var {
// statics cannot be consumed by value at any time, that would imply
// that they're an initializer (what a const is for) or kept in sync
// over time (not feasible), so deny it outright.
self.tcx.sess.span_err(consume_span,
"cannot refer to other statics by value, use the \
address-of operator or a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => cur = cmt,
mc::cat_rvalue(..) |
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
_loan_region: ty::Region,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause) {
let mut cur = &cmt;
let mut is_interior = false;
loop {
match cur.cat {
mc::cat_rvalue(..) => {
if loan_cause == euv::MatchDiscriminant {
// Ignore the dummy immutable borrow created by EUV.
break;
}
let mutbl = bk.to_mutbl_lossy();
if mutbl == ast::MutMutable && self.mode == Mode::StaticMut {
// Mutable slices are the only `&mut` allowed in globals,
// but only in `static mut`, nowhere else.
match cmt.ty.sty {
ty::ty_vec(_, _) => break,
_ => {}
}
}
self.record_borrow(borrow_id, mutbl);
break;
}
mc::cat_static_item => {
if is_interior && self.mode != Mode::Var {
// Borrowed statics can specifically *only* have their address taken,
// not any number of other borrows such as borrowing fields, reading
// elements of an array, etc.
self.tcx.sess.span_err(borrow_span,
"cannot refer to the interior of another \
static, use a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => {
is_interior = true;
cur = cmt;
}
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn decl_without_init(&mut self,
_id: ast::NodeId,
_span: Span) {}
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
_assignee_cmt: mc::cmt,
_mode: euv::MutateMode) {}
fn | (&mut self,
_: &ast::Pat,
_: mc::cmt,
_: euv::MatchMode) {}
fn consume_pat(&mut self,
_consume_pat: &ast::Pat,
_cmt: mc::cmt,
_mode: euv::ConsumeMode) {}
}
| matched_pat | identifier_name |
check_const.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies that the types and values of const and static items
// are safe. The rules enforced by this module are:
//
// - For each *mutable* static item, it checks that its **type**:
// - doesn't have a destructor
// - doesn't own an owned pointer
//
// - For each *immutable* static item, it checks that its **value**:
// - doesn't own owned, managed pointers
// - doesn't contain a struct literal or a call to an enum variant / struct constructor where
// - the type of the struct/enum has a dtor
//
// Rules Enforced Elsewhere:
// - It's not possible to take the address of a static item with unsafe interior. This is enforced
// by borrowck::gather_loans
use middle::const_eval;
use middle::def;
use middle::expr_use_visitor as euv;
use middle::infer;
use middle::mem_categorization as mc;
use middle::traits;
use middle::ty::{self, Ty};
use util::nodemap::NodeMap;
use util::ppaux;
use syntax::ast;
use syntax::codemap::Span;
use syntax::print::pprust;
use syntax::visit::{self, Visitor};
use std::collections::hash_map::Entry;
// Const qualification, from partial to completely promotable.
bitflags! {
#[derive(RustcEncodable, RustcDecodable)]
flags ConstQualif: u8 {
// Const rvalue which can be placed behind a reference.
const PURE_CONST = 0b000000,
// Inner mutability (can not be placed behind a reference) or behind
// &mut in a non-global expression. Can be copied from static memory.
const MUTABLE_MEM = 0b000001,
// Constant value with a type that implements Drop. Can be copied
// from static memory, similar to MUTABLE_MEM.
const NEEDS_DROP = 0b000010,
// Even if the value can be placed in static memory, copying it from
// there is more expensive than in-place instantiation, and/or it may
// be too large. This applies to [T; N] and everything containing it.
// N.B.: references need to clear this flag to not end up on the stack.
const PREFER_IN_PLACE = 0b000100,
// May use more than 0 bytes of memory, doesn't impact the constness
// directly, but is not allowed to be borrowed mutably in a constant.
const NON_ZERO_SIZED = 0b001000,
// Actually borrowed, has to always be in static memory. Does not
// propagate, and requires the expression to behave like a 'static
// lvalue. The set of expressions with this flag is the minimum
// that have to be promoted.
const HAS_STATIC_BORROWS = 0b010000,
// Invalid const for miscellaneous reasons (e.g. not implemented).
const NOT_CONST = 0b100000,
// Borrowing the expression won't produce &'static T if any of these
// bits are set, though the value could be copied from static memory
// if `NOT_CONST` isn't set.
const NON_STATIC_BORROWS = MUTABLE_MEM.bits | NEEDS_DROP.bits | NOT_CONST.bits
}
}
#[derive(Copy, Eq, PartialEq)]
enum Mode {
Const,
Static,
StaticMut,
// An expression that occurs outside of any constant context
// (i.e. `const`, `static`, array lengths, etc.). The value
// can be variable at runtime, but will be promotable to
// static memory if we can prove it is actually constant.
Var,
}
struct CheckCrateVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
mode: Mode,
qualif: ConstQualif,
rvalue_borrows: NodeMap<ast::Mutability>
}
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R,
{
let (old_mode, old_qualif) = (self.mode, self.qualif);
self.mode = mode;
self.qualif = PURE_CONST;
let r = f(self);
self.mode = old_mode;
self.qualif = old_qualif;
r
}
fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where
F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'tcx,
ty::ParameterEnvironment<'a, 'tcx>>) -> R,
{
let param_env = match item_id {
Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id),
None => ty::empty_parameter_environment(self.tcx)
};
f(&mut euv::ExprUseVisitor::new(self, ¶m_env))
}
fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif {
assert!(mode != Mode::Var);
match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) {
Entry::Occupied(entry) => return *entry.get(),
Entry::Vacant(entry) => {
// Prevent infinite recursion on re-entry.
entry.insert(PURE_CONST);
}
}
self.with_mode(mode, |this| {
this.with_euv(None, |euv| euv.consume_expr(expr));
this.visit_expr(expr);
this.qualif
})
}
fn add_qualif(&mut self, qualif: ConstQualif) {
self.qualif = self.qualif | qualif;
}
fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) {
match self.rvalue_borrows.entry(id) {
Entry::Occupied(mut entry) => {
// Merge the two borrows, taking the most demanding
// one, mutability-wise.
if mutbl == ast::MutMutable {
entry.insert(mutbl);
}
}
Entry::Vacant(entry) => {
entry.insert(mutbl);
}
}
}
fn msg(&self) -> &'static str {
match self.mode {
Mode::Const => "constant",
Mode::StaticMut | Mode::Static => "static",
Mode::Var => unreachable!(),
}
}
fn check_static_mut_type(&self, e: &ast::Expr) {
let node_ty = ty::node_id_to_type(self.tcx, e.id);
let tcontents = ty::type_contents(self.tcx, node_ty);
let suffix = if tcontents.has_dtor() {
"destructors"
} else if tcontents.owns_owned() {
"owned pointers"
} else {
return
};
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
to have {}", suffix));
}
fn check_static_type(&self, e: &ast::Expr) {
let ty = ty::node_id_to_type(self.tcx, e.id);
let infcx = infer::new_infer_ctxt(self.tcx);
let mut fulfill_cx = traits::FulfillmentContext::new();
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
let env = ty::empty_parameter_environment(self.tcx);
match fulfill_cx.select_all_or_error(&infcx, &env) {
Ok(()) => { },
Err(ref errors) => {
traits::report_fulfillment_errors(&infcx, errors);
}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &ast::Item) {
debug!("visit_item(item={})", pprust::item_to_string(i));
match i.node {
ast::ItemStatic(_, ast::MutImmutable, ref expr) => {
self.check_static_type(&**expr);
self.global_expr(Mode::Static, &**expr);
}
ast::ItemStatic(_, ast::MutMutable, ref expr) => {
self.check_static_mut_type(&**expr);
self.global_expr(Mode::StaticMut, &**expr);
}
ast::ItemConst(_, ref expr) => {
self.global_expr(Mode::Const, &**expr);
}
ast::ItemEnum(ref enum_definition, _) => {
for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
self.global_expr(Mode::Const, &**ex);
}
}
}
_ => {
self.with_mode(Mode::Var, |v| visit::walk_item(v, i));
}
}
}
fn visit_fn(&mut self,
fk: visit::FnKind<'v>,
fd: &'v ast::FnDecl,
b: &'v ast::Block,
s: Span,
fn_id: ast::NodeId) |
fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
ast::PatLit(ref lit) => {
self.global_expr(Mode::Const, &**lit);
}
ast::PatRange(ref start, ref end) => {
self.global_expr(Mode::Const, &**start);
self.global_expr(Mode::Const, &**end);
}
_ => visit::walk_pat(self, p)
}
}
fn visit_expr(&mut self, ex: &ast::Expr) {
let mut outer = self.qualif;
self.qualif = PURE_CONST;
let node_ty = ty::node_id_to_type(self.tcx, ex.id);
check_expr(self, ex, node_ty);
// Special-case some expressions to avoid certain flags bubbling up.
match ex.node {
ast::ExprCall(ref callee, ref args) => {
for arg in args.iter() {
self.visit_expr(&**arg)
}
let inner = self.qualif;
self.visit_expr(&**callee);
// The callee's size doesn't count in the call.
let added = self.qualif - inner;
self.qualif = inner | (added - NON_ZERO_SIZED);
}
ast::ExprRepeat(ref element, _) => {
self.visit_expr(&**element);
// The count is checked elsewhere (typeck).
let count = match node_ty.sty {
ty::ty_vec(_, Some(n)) => n,
_ => unreachable!()
};
// [element; 0] is always zero-sized.
if count == 0 {
self.qualif = self.qualif - (NON_ZERO_SIZED | PREFER_IN_PLACE);
}
}
ast::ExprMatch(ref discr, ref arms, _) => {
// Compute the most demanding borrow from all the arms'
// patterns and set that on the discriminator.
let mut borrow = None;
for pat in arms.iter().flat_map(|arm| arm.pats.iter()) {
let pat_borrow = self.rvalue_borrows.remove(&pat.id);
match (borrow, pat_borrow) {
(None, _) | (_, Some(ast::MutMutable)) => {
borrow = pat_borrow;
}
_ => {}
}
}
if let Some(mutbl) = borrow {
self.record_borrow(discr.id, mutbl);
}
visit::walk_expr(self, ex);
}
// Division by zero and overflow checking.
ast::ExprBinary(op, _, _) => {
visit::walk_expr(self, ex);
let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem;
match node_ty.sty {
ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => {
if !self.qualif.intersects(NOT_CONST) {
match const_eval::eval_const_expr_partial(self.tcx, ex, None) {
Ok(_) => {}
Err(msg) => {
span_err!(self.tcx.sess, ex.span, E0020,
"{} in a constant expression", msg)
}
}
}
}
_ => {}
}
}
_ => visit::walk_expr(self, ex)
}
// Handle borrows on (or inside the autorefs of) this expression.
match self.rvalue_borrows.remove(&ex.id) {
Some(ast::MutImmutable) => {
// Constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
// If the type doesn't have interior mutability, then `MUTABLE_MEM` has
// propagated from another error, so erroring again would be just noise.
let tc = ty::type_contents(self.tcx, node_ty);
if self.qualif.intersects(MUTABLE_MEM) && tc.interior_unsafe() {
outer = outer | NOT_CONST;
if self.mode != Mode::Var {
self.tcx.sess.span_err(ex.span,
"cannot borrow a constant which contains \
interior mutability, create a static instead");
}
}
// If the reference has to be 'static, avoid in-place initialization
// as that will end up pointing to the stack instead.
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.qualif = self.qualif - PREFER_IN_PLACE;
self.add_qualif(HAS_STATIC_BORROWS);
}
}
Some(ast::MutMutable) => {
// `&mut expr` means expr could be mutated, unless it's zero-sized.
if self.qualif.intersects(NON_ZERO_SIZED) {
if self.mode == Mode::Var {
outer = outer | NOT_CONST;
self.add_qualif(MUTABLE_MEM);
} else {
span_err!(self.tcx.sess, ex.span, E0017,
"references in {}s may only refer \
to immutable values", self.msg())
}
}
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.add_qualif(HAS_STATIC_BORROWS);
}
}
None => {}
}
self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif);
// Don't propagate certain flags.
self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS);
}
}
/// This function is used to enforce the constraints on
/// const/static items. It walks through the *value*
/// of the item walking down the expression and evaluating
/// every nested expression. If the expression is not part
/// of a const/static item, it is qualified for promotion
/// instead of producing errors.
fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>,
e: &ast::Expr, node_ty: Ty<'tcx>) {
match node_ty.sty {
ty::ty_struct(did, _) |
ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => {
v.add_qualif(NEEDS_DROP);
if v.mode != Mode::Var {
v.tcx.sess.span_err(e.span,
&format!("{}s are not allowed to have destructors",
v.msg()));
}
}
_ => {}
}
let method_call = ty::MethodCall::expr(e.id);
match e.node {
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0011,
"user-defined operators are not allowed in {}s", v.msg());
}
}
ast::ExprBox(..) |
ast::ExprUnary(ast::UnUniq, _) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0010,
"allocations are not allowed in {}s", v.msg());
}
}
ast::ExprUnary(ast::UnDeref, ref ptr) => {
match ty::node_id_to_type(v.tcx, ptr.id).sty {
ty::ty_ptr(_) => {
// This shouldn't be allowed in constants at all.
v.add_qualif(NOT_CONST);
}
_ => {}
}
}
ast::ExprCast(ref from, _) => {
let toty = ty::expr_ty(v.tcx, e);
let fromty = ty::expr_ty(v.tcx, &**from);
let is_legal_cast =
ty::type_is_numeric(toty) ||
ty::type_is_unsafe_ptr(toty) ||
(ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty));
if !is_legal_cast {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in {}s",
ppaux::ty_to_string(v.tcx, toty), v.msg());
}
}
if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0018,
"can not cast a pointer to an integer in {}s", v.msg());
}
}
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
let def = v.tcx.def_map.borrow().get(&e.id).cloned();
match def {
Some(def::DefVariant(_, _, _)) => {
// Count the discriminator or function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStruct(_)) => {
if let ty::ty_bare_fn(..) = node_ty.sty {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
}
Some(def::DefFn(..)) |
Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStatic(..)) => {
match v.mode {
Mode::Static | Mode::StaticMut => {}
Mode::Const => {
span_err!(v.tcx.sess, e.span, E0013,
"constants cannot refer to other statics, \
insert an intermediate constant instead");
}
Mode::Var => v.add_qualif(NOT_CONST)
}
}
Some(def::DefConst(did)) => {
if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) {
let inner = v.global_expr(Mode::Const, expr);
v.add_qualif(inner);
} else {
v.tcx.sess.span_bug(e.span, "DefConst doesn't point \
to an ItemConst");
}
}
def => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
debug!("(checking const) found bad def: {:?}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in {}s may only refer to constants \
or functions", v.msg());
}
}
}
}
ast::ExprCall(ref callee, _) => {
let mut callee = &**callee;
loop {
callee = match callee.node {
ast::ExprParen(ref inner) => &**inner,
ast::ExprBlock(ref block) => match block.expr {
Some(ref tail) => &**tail,
None => break
},
_ => break
};
}
let def = v.tcx.def_map.borrow().get(&callee.id).cloned();
match def {
Some(def::DefStruct(..)) => {}
Some(def::DefVariant(..)) => {
// Count the discriminator.
v.add_qualif(NON_ZERO_SIZED);
}
_ => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in {}s are limited to \
struct and enum constructors", v.msg());
}
}
}
}
ast::ExprBlock(ref block) => {
// Check all statements in the block
let mut block_span_err = |span| {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, span, E0016,
"blocks in {}s are limited to items and \
tail expressions", v.msg());
}
};
for stmt in &block.stmts {
match stmt.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
ast::DeclLocal(_) => block_span_err(decl.span),
// Item statements are allowed
ast::DeclItem(_) => {}
}
}
ast::StmtExpr(ref expr, _) => block_span_err(expr.span),
ast::StmtSemi(ref semi, _) => block_span_err(semi.span),
ast::StmtMac(..) => {
v.tcx.sess.span_bug(e.span, "unexpanded statement \
macro in const?!")
}
}
}
}
ast::ExprStruct(..) => {
let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id());
if did == v.tcx.lang_items.unsafe_cell_type() {
v.add_qualif(MUTABLE_MEM);
}
}
ast::ExprLit(_) |
ast::ExprAddrOf(..) => {
v.add_qualif(NON_ZERO_SIZED);
}
ast::ExprRepeat(..) => {
v.add_qualif(PREFER_IN_PLACE);
}
ast::ExprClosure(..) => {
// Paths in constant constexts cannot refer to local variables,
// as there are none, and thus closures can't have upvars there.
if ty::with_freevars(v.tcx, e.id, |fv| !fv.is_empty()) {
assert!(v.mode == Mode::Var,
"global closures can't capture anything");
v.add_qualif(NOT_CONST);
}
}
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprVec(_) |
ast::ExprParen(..) |
ast::ExprTup(..) => {}
// Conditional control flow (possible to implement).
ast::ExprMatch(..) |
ast::ExprIf(..) |
ast::ExprIfLet(..) |
// Loops (not very meaningful in constants).
ast::ExprWhile(..) |
ast::ExprWhileLet(..) |
ast::ExprForLoop(..) |
ast::ExprLoop(..) |
// More control flow (also not very meaningful).
ast::ExprBreak(_) |
ast::ExprAgain(_) |
ast::ExprRet(_) |
// Miscellaneous expressions that could be implemented.
ast::ExprRange(..) |
// Various other expressions.
ast::ExprMethodCall(..) |
ast::ExprAssign(..) |
ast::ExprAssignOp(..) |
ast::ExprInlineAsm(_) |
ast::ExprMac(_) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0019,
"{} contains unimplemented expression type", v.msg());
}
}
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor {
tcx: tcx,
mode: Mode::Var,
qualif: NOT_CONST,
rvalue_borrows: NodeMap()
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> {
fn consume(&mut self,
_consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt,
_mode: euv::ConsumeMode) {
let mut cur = &cmt;
loop {
match cur.cat {
mc::cat_static_item => {
if self.mode != Mode::Var {
// statics cannot be consumed by value at any time, that would imply
// that they're an initializer (what a const is for) or kept in sync
// over time (not feasible), so deny it outright.
self.tcx.sess.span_err(consume_span,
"cannot refer to other statics by value, use the \
address-of operator or a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => cur = cmt,
mc::cat_rvalue(..) |
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
_loan_region: ty::Region,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause) {
let mut cur = &cmt;
let mut is_interior = false;
loop {
match cur.cat {
mc::cat_rvalue(..) => {
if loan_cause == euv::MatchDiscriminant {
// Ignore the dummy immutable borrow created by EUV.
break;
}
let mutbl = bk.to_mutbl_lossy();
if mutbl == ast::MutMutable && self.mode == Mode::StaticMut {
// Mutable slices are the only `&mut` allowed in globals,
// but only in `static mut`, nowhere else.
match cmt.ty.sty {
ty::ty_vec(_, _) => break,
_ => {}
}
}
self.record_borrow(borrow_id, mutbl);
break;
}
mc::cat_static_item => {
if is_interior && self.mode != Mode::Var {
// Borrowed statics can specifically *only* have their address taken,
// not any number of other borrows such as borrowing fields, reading
// elements of an array, etc.
self.tcx.sess.span_err(borrow_span,
"cannot refer to the interior of another \
static, use a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => {
is_interior = true;
cur = cmt;
}
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn decl_without_init(&mut self,
_id: ast::NodeId,
_span: Span) {}
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
_assignee_cmt: mc::cmt,
_mode: euv::MutateMode) {}
fn matched_pat(&mut self,
_: &ast::Pat,
_: mc::cmt,
_: euv::MatchMode) {}
fn consume_pat(&mut self,
_consume_pat: &ast::Pat,
_cmt: mc::cmt,
_mode: euv::ConsumeMode) {}
}
| {
assert!(self.mode == Mode::Var);
self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b));
visit::walk_fn(self, fk, fd, b, s);
} | identifier_body |
PokerTableInfoDataSerializer.ts |
import { ArrayBufferBuilder } from "../../../../utils/ArrayBufferBuilder";
import { BitsReader } from "../../../../utils/BitsReader";
import { SerializerUtils } from "../../../../utils/SerializerUtils";
import { PokerTableInfoData } from "../data/PokerTableInfoData";
import { BinaryNetworkID } from "../../core/data/BinaryNetworkID";
import { BinaryNetworkIDSerializer } from "../../core/serializers/BinaryNetworkIDSerializer";
import { OptimizedBinaryNumberSerializer } from "../../core/serializers/OptimizedBinaryNumberSerializer";
export class PokerTableInfoDataSerializer {
public static serialize(buffer: ArrayBufferBuilder, data: PokerTableInfoData): void {
BinaryNetworkIDSerializer.serialize(buffer, data.gameNetworkIDVO);
buffer.setUint8( data.playersCount );
OptimizedBinaryNumberSerializer.serialize( buffer, data.mostChips );
OptimizedBinaryNumberSerializer.serialize( buffer, data.leastChips );
buffer.setUint16( data.tableIndex );
}
public static | (buffer: ArrayBufferBuilder, data: PokerTableInfoData): void {
data.gameNetworkIDVO = new BinaryNetworkID(data);
BinaryNetworkIDSerializer.deserialize(buffer, data.gameNetworkIDVO);
data.playersCount = buffer.getUint8();
data.mostChips = OptimizedBinaryNumberSerializer.deserialize( buffer );
data.leastChips = OptimizedBinaryNumberSerializer.deserialize( buffer );
data.tableIndex = buffer.getUint16();
}
}
| deserialize | identifier_name |
PokerTableInfoDataSerializer.ts | import { ArrayBufferBuilder } from "../../../../utils/ArrayBufferBuilder";
import { BitsReader } from "../../../../utils/BitsReader";
import { SerializerUtils } from "../../../../utils/SerializerUtils";
import { PokerTableInfoData } from "../data/PokerTableInfoData";
import { BinaryNetworkID } from "../../core/data/BinaryNetworkID";
import { BinaryNetworkIDSerializer } from "../../core/serializers/BinaryNetworkIDSerializer";
import { OptimizedBinaryNumberSerializer } from "../../core/serializers/OptimizedBinaryNumberSerializer";
export class PokerTableInfoDataSerializer {
public static serialize(buffer: ArrayBufferBuilder, data: PokerTableInfoData): void {
BinaryNetworkIDSerializer.serialize(buffer, data.gameNetworkIDVO);
buffer.setUint8( data.playersCount );
OptimizedBinaryNumberSerializer.serialize( buffer, data.mostChips );
OptimizedBinaryNumberSerializer.serialize( buffer, data.leastChips );
buffer.setUint16( data.tableIndex );
}
public static deserialize(buffer: ArrayBufferBuilder, data: PokerTableInfoData): void {
data.gameNetworkIDVO = new BinaryNetworkID(data);
BinaryNetworkIDSerializer.deserialize(buffer, data.gameNetworkIDVO); |
}
} | data.playersCount = buffer.getUint8();
data.mostChips = OptimizedBinaryNumberSerializer.deserialize( buffer );
data.leastChips = OptimizedBinaryNumberSerializer.deserialize( buffer );
data.tableIndex = buffer.getUint16(); | random_line_split |
conf.py | # -*- coding: utf-8 -*-
#
# agile-analytics documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 17 13:58:53 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'agile-analytics'
copyright = u'2016, Chris Heisel'
author = u'Chris Heisel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'agile-analytics v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
| # base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'agile-analyticsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'agile-analytics.tex', u'agile-analytics Documentation',
u'Chris Heisel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agile-analytics', u'agile-analytics Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'agile-analytics', u'agile-analytics Documentation',
author, 'agile-analytics', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False | # If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the | random_line_split |
DateTimeInput-dbg.js | /*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2016 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides control sap.m.DateTimeInput.
sap.ui.define(['jquery.sap.global', 'sap/ui/core/Control', './library', 'sap/ui/model/type/Date'],
function(jQuery, Control, library, Date1) {
"use strict";
/**
* Constructor for a new DateTimeInput.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* Allows end users to interact with date and/or time and select from a date and/or time pad.
*
* <b>Note:</b> This control should not be used any longer, instead please use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> control.
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version 1.38.4
*
* @constructor
* @public
* @since 1.9.1
* @deprecated Since version 1.32.8. Instead, use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> controls.
* @alias sap.m.DateTimeInput
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var DateTimeInput = Control.extend("sap.m.DateTimeInput", /** @lends sap.m.DateTimeInput.prototype */ { metadata : {
library : "sap.m",
properties : {
/**
* Defines the value of the control.
*
* The new value must be in the format set by <code>valueFormat</code>.
*
* The "Now" literal can also be assigned as a parameter to show the current date and/or time.
*/
value: { type: "string", group: "Data", defaultValue: null, bindable: "bindable" },
/**
* Defines the width of the control.
*/
width: { type: "sap.ui.core.CSSSize", group: "Dimension", defaultValue: "100%" },
/**
* Indicates whether the user can interact with the control or not.
* <b>Note:</b> Disabled controls cannot be focused and they are out of the tab-chain.
*/
enabled: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Defines whether the control can be modified by the user or not.
* <b>Note:</b> A user can tab to non-editable control, highlight it, and copy the text from it.
* @since 1.12.0
*/
editable: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Visualizes the validation state of the control, e.g. <code>Error</code>, <code>Warning</code>, <code>Success</code>.
*/
valueState: { type: "sap.ui.core.ValueState", group: "Appearance", defaultValue: sap.ui.core.ValueState.None },
/**
* Defines the text that appears in the value state message pop-up. If this is not specified, a default text is shown from the resource bundle.
* @since 1.26.0
*/
valueStateText: { type: "string", group: "Misc", defaultValue: null },
/**
* Indicates whether the value state message should be shown or not.
* @since 1.26.0
*/
showValueStateMessage: { type: "boolean", group: "Misc", defaultValue: true },
/**
* Defines the name of the control for the purposes of form submission.
*/
name: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines a short hint intended to aid the user with data entry when the control has no value.
*/
placeholder: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines the horizontal alignment of the text that is shown inside the input field.
* @since 1.26.0
*/
textAlign: { type: "sap.ui.core.TextAlign", group: "Appearance", defaultValue: sap.ui.core.TextAlign.Initial },
/**
* Defines the text directionality of the input field, e.g. <code>RTL</code>, <code>LTR</code>
* @since 1.28.0
*/
textDirection: { type: "sap.ui.core.TextDirection", group: "Appearance", defaultValue: sap.ui.core.TextDirection.Inherit },
/**
* Type of DateTimeInput (e.g. Date, Time, DateTime)
*/
type : {type : "sap.m.DateTimeInputType", group : "Data", defaultValue : sap.m.DateTimeInputType.Date},
/**
* Displays date value in this given format in text field. Default value is taken from locale settings.
* If you use data-binding on value property with type sap.ui.model.type.Date then you can ignore this property or the latter wins.
* If the user's browser supports native picker then this property is overwritten by browser with locale settings.
*/
displayFormat : {type : "string", group : "Appearance", defaultValue : null},
/**
* Given value property should match with valueFormat to parse date. Default value is taken from locale settings.
* You can only set and get value in this format.
* If you use data-binding on value property with type sap.ui.model.type.Date you can ignore this property or the latter wins.
*/
valueFormat : {type : "string", group : "Data", defaultValue : null},
/**
* This property as JavaScript Date Object can be used to assign a new value which is independent from valueFormat.
*/
dateValue : {type : "object", group : "Data", defaultValue : null}
},
aggregations: {
_picker: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"}
},
associations: {
/**
* Association to controls / IDs that label this control (see WAI-ARIA attribute aria-labelledby).
* @since 1.27.0
*/
ariaLabelledBy: { type: "sap.ui.core.Control", multiple: true, singularName: "ariaLabelledBy" }
},
events : {
/**
* This event gets fired when the selection has finished and the value has changed.
*/
change : {
parameters : {
/**
* The string value of the control in given valueFormat (or locale format).
*/
value : {type : "string"},
/**
* The value of control as JavaScript Date Object or null if value is empty.
*/
dateValue : {type : "object"},
/**
* if set, the entered value is a valid date.
* If not set the entered value cannot be converted to a date.
* @since 1.38.0
*/
valid : {type : "boolean"}
}
}
}
}});
!(function(oPrototype, $, oDevice) {
var oi18n = sap.m.getLocaleData();
$.extend(oPrototype, {
_types : {
Date : {
valueFormat : oi18n.getDatePattern("short"),
displayFormat : oi18n.getDatePattern("medium")
},
Time : {
valueFormat : oi18n.getTimePattern("short"),
displayFormat : oi18n.getTimePattern("short")
},
DateTime : {
valueFormat : oi18n.getDateTimePattern("short"), // does not include pattern but e.g "{1} {0}"
displayFormat : oi18n.getDateTimePattern("short") // does not include pattern but e.g "{1} {0}"
}
}
});
// build DateTime formats from Date And Time values
["Time", "Date"].forEach(function(sType, nIndex) {
["valueFormat", "displayFormat"].forEach(function(sFormat) {
var oTypes = oPrototype._types;
oTypes.DateTime[sFormat] = oTypes.DateTime[sFormat].replace("{" + nIndex + "}", oTypes[sType][sFormat]);
});
});
}(DateTimeInput.prototype, jQuery, sap.ui.Device));
DateTimeInput.prototype.init = function(){
// as date is the default type - > initialize with DatePicker
this.setType(sap.m.DateTimeInputType.Date);
};
DateTimeInput.prototype.onBeforeRendering = function() {
_updateFormatFromBinding.call(this);
};
DateTimeInput.prototype.getFocusDomRef = function() {
var oPicker = _getPicker.call(this);
return oPicker.getFocusDomRef();
};
DateTimeInput.prototype.getIdForLabel = function() {
var oPicker = _getPicker.call(this);
return oPicker.getIdForLabel();
};
DateTimeInput.prototype.setType = function(sType){
if (sType == this.getType() && _getPicker.call(this)) {
return this;
}
this.destroyAggregation("_picker");
var oPicker;
switch (sType) {
case sap.m.DateTimeInputType.DateTime:
jQuery.sap.require("sap.m.DateTimePicker");
oPicker = new sap.m.DateTimePicker(this.getId() + "-Picker");
break;
case sap.m.DateTimeInputType.Time:
jQuery.sap.require("sap.m.TimePicker");
oPicker = new sap.m.TimePicker(this.getId() + "-Picker",
{localeId: sap.ui.getCore().getConfiguration().getFormatSettings().getFormatLocale().toString()});
break;
default: // default is date
jQuery.sap.require("sap.m.DatePicker");
oPicker = new sap.m.DatePicker(this.getId() + "-Picker");
break;
}
// forward properties (also set default, may be different)
oPicker.setDisplayFormat(this.getDisplayFormat() || this._types[sType].displayFormat);
oPicker.setValueFormat(this.getValueFormat() || this._types[sType].valueFormat);
if (this.getDateValue()) {
oPicker.setDateValue(this.getDateValue()); // don't set Value -> as by switching type information can be lost
}
oPicker.setEnabled(this.getEnabled());
oPicker.setEditable(this.getEditable());
oPicker.setValueState(this.getValueState());
oPicker.setValueStateText(this.getValueStateText());
oPicker.setShowValueStateMessage(this.getShowValueStateMessage());
oPicker.setName(this.getName());
oPicker.setPlaceholder(this.getPlaceholder());
oPicker.setTextAlign(this.getTextAlign());
oPicker.setTextDirection(this.getTextDirection());
oPicker.setWidth("100%");
oPicker.attachChange(_handleChange, this);
var aAriaLabelledBy = this.getAriaLabelledBy();
for (var i = 0; i < aAriaLabelledBy.length; i++) {
oPicker.addAriaLabelledBy(aAriaLabelledBy[i]);
}
this.setAggregation("_picker", oPicker);
this.setProperty("type", sType); // re-render because picker control changes
return this;
};
DateTimeInput.prototype.setWidth = function(sWidth) {
this.setProperty("width", sWidth);
if (this.getDomRef()) {
sWidth = this.getWidth(); // to use validator
this.$().css("width", sWidth);
}
return this;
};
DateTimeInput.prototype.setValue = function(sValue) {
_updateFormatFromBinding.call(this); // to be sure to have the right format
sValue = this.validateProperty("value", sValue);
if (sValue.toLowerCase() == "now") {
return this.setDateValue(new Date());
}
if (sValue === this.getValue()) {
return this;
}
this.setProperty("value", sValue, true);
var oPicker = _getPicker.call(this);
oPicker.setValue(sValue);
var oDate = oPicker.getDateValue();
this.setProperty("dateValue", oDate, true);
return this;
};
DateTimeInput.prototype.setDateValue = function(oDate) {
if (oDate && !(oDate instanceof Date)) {
throw new Error("Date must be a JavaScript date object; " + this);
}
_updateFormatFromBinding.call(this); // to be sure to have the right format
this.setProperty("dateValue", oDate, true);
var oPicker = _getPicker.call(this);
oPicker.setDateValue(oDate);
var sValue = oPicker.getValue();
this.setProperty("value", sValue, true);
return this;
};
DateTimeInput.prototype.setDisplayFormat = function(sDisplayFormat) {
this.setProperty("displayFormat", sDisplayFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setDisplayFormat(sDisplayFormat || this._types[this.getType()].displayFormat);
return this;
};
DateTimeInput.prototype.setValueFormat = function(sValueFormat) {
this.setProperty("valueFormat", sValueFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setValueFormat(sValueFormat || this._types[this.getType()].ValueFormat);
return this;
};
DateTimeInput.prototype.setEnabled = function(bEnabled) {
this.setProperty("enabled", bEnabled, true);
var oPicker = _getPicker.call(this);
oPicker.setEnabled(bEnabled);
return this;
};
DateTimeInput.prototype.setEditable = function(bEditable) {
this.setProperty("editable", bEditable, true);
var oPicker = _getPicker.call(this);
oPicker.setEditable(bEditable);
return this;
};
DateTimeInput.prototype.setValueState = function(sValueState) {
this.setProperty("valueState", sValueState, true);
var oPicker = _getPicker.call(this);
oPicker.setValueState(sValueState);
return this;
};
DateTimeInput.prototype.setValueStateText = function(sValueStateText) {
this.setProperty("valueStateText", sValueStateText, true);
var oPicker = _getPicker.call(this);
oPicker.setValueStateText(sValueStateText);
return this;
};
DateTimeInput.prototype.setShowValueStateMessage = function(bShowValueStateMessage) {
this.setProperty("showValueStateMessage", bShowValueStateMessage, true);
var oPicker = _getPicker.call(this);
oPicker.setShowValueStateMessage(bShowValueStateMessage);
return this;
};
DateTimeInput.prototype.setName = function(sName) {
this.setProperty("name", sName, true);
var oPicker = _getPicker.call(this);
oPicker.setName(sName);
return this;
};
DateTimeInput.prototype.setPlaceholder = function(sPlaceholder) {
this.setProperty("placeholder", sPlaceholder, true);
var oPicker = _getPicker.call(this);
oPicker.setPlaceholder(sPlaceholder);
return this;
};
DateTimeInput.prototype.setTextAlign = function(sTextAlign) {
this.setProperty("textAlign", sTextAlign, true);
var oPicker = _getPicker.call(this);
oPicker.setTextAlign(sTextAlign);
return this;
};
DateTimeInput.prototype.setTextDirection = function(sTextDirection) {
this.setProperty("textDirection", sTextDirection, true);
var oPicker = _getPicker.call(this);
oPicker.setTextDirection(sTextDirection);
return this;
};
DateTimeInput.prototype.addAriaLabelledBy = function(sID) {
this.addAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.addAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAriaLabelledBy = function(sID) {
this.removeAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.removeAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAllAriaLabelledBy = function() {
this.removeAssociation("ariaLabelledBy", true);
var oPicker = _getPicker.call(this);
oPicker.removeAllAriaLabelledBy();
return this;
};
/**
* @see {sap.ui.core.Control#getAccessibilityInfo}
* @protected
*/
DateTimeInput.prototype.getAccessibilityInfo = function() {
var oPicker = _getPicker.call(this);
return oPicker && oPicker.getAccessibilityInfo ? oPicker.getAccessibilityInfo() : null;
};
function _getPicker(){
return this.getAggregation("_picker");
}
function _updateFormatFromBinding(){
var oBinding = this.getBinding("value");
if (oBinding && oBinding.oType && (oBinding.oType instanceof Date1)) {
var sPattern = oBinding.oType.getOutputPattern();
var oPicker = _getPicker.call(this);
if (oPicker.getValueFormat() != sPattern) |
if (oPicker.getDisplayFormat() != sPattern) {
oPicker.setDisplayFormat(sPattern);
}
}
}
function _handleChange(oEvent) {
var sValue = oEvent.getParameter("value");
var oDateValue;
var bValid = oEvent.getParameter("valid");
this.setProperty("value", sValue, true);
if (bValid) {
oDateValue = oEvent.oSource.getDateValue();
this.setProperty("dateValue", oDateValue, true);
}
// newValue and newDateValue for compatibility reasons
this.fireChange({value: sValue, newValue: sValue, valid: bValid, dateValue: oDateValue, newDateValue: oDateValue});
}
return DateTimeInput;
}, /* bExport= */ true);
| {
oPicker.setValueFormat(sPattern);
} | conditional_block |
DateTimeInput-dbg.js | /*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2016 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides control sap.m.DateTimeInput.
sap.ui.define(['jquery.sap.global', 'sap/ui/core/Control', './library', 'sap/ui/model/type/Date'],
function(jQuery, Control, library, Date1) {
"use strict";
/**
* Constructor for a new DateTimeInput.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* Allows end users to interact with date and/or time and select from a date and/or time pad.
*
* <b>Note:</b> This control should not be used any longer, instead please use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> control.
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version 1.38.4
*
* @constructor
* @public
* @since 1.9.1
* @deprecated Since version 1.32.8. Instead, use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> controls.
* @alias sap.m.DateTimeInput
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var DateTimeInput = Control.extend("sap.m.DateTimeInput", /** @lends sap.m.DateTimeInput.prototype */ { metadata : {
library : "sap.m",
properties : {
/**
* Defines the value of the control.
*
* The new value must be in the format set by <code>valueFormat</code>.
*
* The "Now" literal can also be assigned as a parameter to show the current date and/or time.
*/
value: { type: "string", group: "Data", defaultValue: null, bindable: "bindable" },
/**
* Defines the width of the control.
*/
width: { type: "sap.ui.core.CSSSize", group: "Dimension", defaultValue: "100%" },
/**
* Indicates whether the user can interact with the control or not.
* <b>Note:</b> Disabled controls cannot be focused and they are out of the tab-chain.
*/
enabled: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Defines whether the control can be modified by the user or not.
* <b>Note:</b> A user can tab to non-editable control, highlight it, and copy the text from it.
* @since 1.12.0
*/
editable: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Visualizes the validation state of the control, e.g. <code>Error</code>, <code>Warning</code>, <code>Success</code>.
*/
valueState: { type: "sap.ui.core.ValueState", group: "Appearance", defaultValue: sap.ui.core.ValueState.None },
/**
* Defines the text that appears in the value state message pop-up. If this is not specified, a default text is shown from the resource bundle.
* @since 1.26.0
*/
valueStateText: { type: "string", group: "Misc", defaultValue: null },
/**
* Indicates whether the value state message should be shown or not.
* @since 1.26.0
*/
showValueStateMessage: { type: "boolean", group: "Misc", defaultValue: true },
/**
* Defines the name of the control for the purposes of form submission.
*/
name: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines a short hint intended to aid the user with data entry when the control has no value.
*/
placeholder: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines the horizontal alignment of the text that is shown inside the input field.
* @since 1.26.0
*/
textAlign: { type: "sap.ui.core.TextAlign", group: "Appearance", defaultValue: sap.ui.core.TextAlign.Initial },
/**
* Defines the text directionality of the input field, e.g. <code>RTL</code>, <code>LTR</code>
* @since 1.28.0
*/
textDirection: { type: "sap.ui.core.TextDirection", group: "Appearance", defaultValue: sap.ui.core.TextDirection.Inherit },
/**
* Type of DateTimeInput (e.g. Date, Time, DateTime)
*/
type : {type : "sap.m.DateTimeInputType", group : "Data", defaultValue : sap.m.DateTimeInputType.Date},
/**
* Displays date value in this given format in text field. Default value is taken from locale settings.
* If you use data-binding on value property with type sap.ui.model.type.Date then you can ignore this property or the latter wins.
* If the user's browser supports native picker then this property is overwritten by browser with locale settings.
*/
displayFormat : {type : "string", group : "Appearance", defaultValue : null},
/**
* Given value property should match with valueFormat to parse date. Default value is taken from locale settings.
* You can only set and get value in this format.
* If you use data-binding on value property with type sap.ui.model.type.Date you can ignore this property or the latter wins.
*/
valueFormat : {type : "string", group : "Data", defaultValue : null},
/**
* This property as JavaScript Date Object can be used to assign a new value which is independent from valueFormat.
*/
dateValue : {type : "object", group : "Data", defaultValue : null}
},
aggregations: {
_picker: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"}
},
associations: {
/**
* Association to controls / IDs that label this control (see WAI-ARIA attribute aria-labelledby).
* @since 1.27.0
*/
ariaLabelledBy: { type: "sap.ui.core.Control", multiple: true, singularName: "ariaLabelledBy" }
},
events : {
/**
* This event gets fired when the selection has finished and the value has changed.
*/
change : {
parameters : {
/**
* The string value of the control in given valueFormat (or locale format).
*/
value : {type : "string"},
/**
* The value of control as JavaScript Date Object or null if value is empty.
*/
dateValue : {type : "object"},
/**
* if set, the entered value is a valid date.
* If not set the entered value cannot be converted to a date.
* @since 1.38.0
*/
valid : {type : "boolean"}
}
}
}
}});
!(function(oPrototype, $, oDevice) {
var oi18n = sap.m.getLocaleData();
$.extend(oPrototype, {
_types : {
Date : {
valueFormat : oi18n.getDatePattern("short"),
displayFormat : oi18n.getDatePattern("medium")
},
Time : {
valueFormat : oi18n.getTimePattern("short"),
displayFormat : oi18n.getTimePattern("short")
},
DateTime : {
valueFormat : oi18n.getDateTimePattern("short"), // does not include pattern but e.g "{1} {0}"
displayFormat : oi18n.getDateTimePattern("short") // does not include pattern but e.g "{1} {0}"
}
}
});
// build DateTime formats from Date And Time values
["Time", "Date"].forEach(function(sType, nIndex) {
["valueFormat", "displayFormat"].forEach(function(sFormat) {
var oTypes = oPrototype._types;
oTypes.DateTime[sFormat] = oTypes.DateTime[sFormat].replace("{" + nIndex + "}", oTypes[sType][sFormat]);
});
});
}(DateTimeInput.prototype, jQuery, sap.ui.Device));
DateTimeInput.prototype.init = function(){
// as date is the default type - > initialize with DatePicker
this.setType(sap.m.DateTimeInputType.Date);
};
DateTimeInput.prototype.onBeforeRendering = function() {
_updateFormatFromBinding.call(this);
};
DateTimeInput.prototype.getFocusDomRef = function() {
var oPicker = _getPicker.call(this);
return oPicker.getFocusDomRef();
};
DateTimeInput.prototype.getIdForLabel = function() {
var oPicker = _getPicker.call(this);
return oPicker.getIdForLabel();
};
DateTimeInput.prototype.setType = function(sType){
if (sType == this.getType() && _getPicker.call(this)) {
return this;
}
this.destroyAggregation("_picker");
var oPicker;
switch (sType) {
case sap.m.DateTimeInputType.DateTime:
jQuery.sap.require("sap.m.DateTimePicker");
oPicker = new sap.m.DateTimePicker(this.getId() + "-Picker");
break;
case sap.m.DateTimeInputType.Time:
jQuery.sap.require("sap.m.TimePicker");
oPicker = new sap.m.TimePicker(this.getId() + "-Picker",
{localeId: sap.ui.getCore().getConfiguration().getFormatSettings().getFormatLocale().toString()});
break;
default: // default is date
jQuery.sap.require("sap.m.DatePicker");
oPicker = new sap.m.DatePicker(this.getId() + "-Picker");
break;
}
// forward properties (also set default, may be different)
oPicker.setDisplayFormat(this.getDisplayFormat() || this._types[sType].displayFormat);
oPicker.setValueFormat(this.getValueFormat() || this._types[sType].valueFormat);
if (this.getDateValue()) {
oPicker.setDateValue(this.getDateValue()); // don't set Value -> as by switching type information can be lost
}
oPicker.setEnabled(this.getEnabled());
oPicker.setEditable(this.getEditable());
oPicker.setValueState(this.getValueState());
oPicker.setValueStateText(this.getValueStateText());
oPicker.setShowValueStateMessage(this.getShowValueStateMessage());
oPicker.setName(this.getName());
oPicker.setPlaceholder(this.getPlaceholder());
oPicker.setTextAlign(this.getTextAlign());
oPicker.setTextDirection(this.getTextDirection());
oPicker.setWidth("100%");
oPicker.attachChange(_handleChange, this);
var aAriaLabelledBy = this.getAriaLabelledBy();
for (var i = 0; i < aAriaLabelledBy.length; i++) {
oPicker.addAriaLabelledBy(aAriaLabelledBy[i]);
}
this.setAggregation("_picker", oPicker);
this.setProperty("type", sType); // re-render because picker control changes
return this;
};
DateTimeInput.prototype.setWidth = function(sWidth) {
this.setProperty("width", sWidth);
if (this.getDomRef()) {
sWidth = this.getWidth(); // to use validator
this.$().css("width", sWidth);
}
return this;
};
DateTimeInput.prototype.setValue = function(sValue) {
_updateFormatFromBinding.call(this); // to be sure to have the right format
sValue = this.validateProperty("value", sValue);
if (sValue.toLowerCase() == "now") {
return this.setDateValue(new Date());
}
if (sValue === this.getValue()) {
return this;
}
this.setProperty("value", sValue, true);
var oPicker = _getPicker.call(this);
oPicker.setValue(sValue);
var oDate = oPicker.getDateValue();
this.setProperty("dateValue", oDate, true);
return this;
};
DateTimeInput.prototype.setDateValue = function(oDate) {
if (oDate && !(oDate instanceof Date)) {
throw new Error("Date must be a JavaScript date object; " + this);
}
_updateFormatFromBinding.call(this); // to be sure to have the right format
this.setProperty("dateValue", oDate, true);
var oPicker = _getPicker.call(this);
oPicker.setDateValue(oDate);
var sValue = oPicker.getValue();
this.setProperty("value", sValue, true);
return this;
};
DateTimeInput.prototype.setDisplayFormat = function(sDisplayFormat) {
this.setProperty("displayFormat", sDisplayFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setDisplayFormat(sDisplayFormat || this._types[this.getType()].displayFormat);
return this;
};
DateTimeInput.prototype.setValueFormat = function(sValueFormat) {
this.setProperty("valueFormat", sValueFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setValueFormat(sValueFormat || this._types[this.getType()].ValueFormat);
return this;
};
DateTimeInput.prototype.setEnabled = function(bEnabled) {
this.setProperty("enabled", bEnabled, true);
var oPicker = _getPicker.call(this);
oPicker.setEnabled(bEnabled);
return this;
};
DateTimeInput.prototype.setEditable = function(bEditable) {
this.setProperty("editable", bEditable, true);
var oPicker = _getPicker.call(this);
oPicker.setEditable(bEditable);
return this;
};
DateTimeInput.prototype.setValueState = function(sValueState) {
this.setProperty("valueState", sValueState, true);
var oPicker = _getPicker.call(this);
oPicker.setValueState(sValueState);
return this;
};
DateTimeInput.prototype.setValueStateText = function(sValueStateText) {
this.setProperty("valueStateText", sValueStateText, true);
var oPicker = _getPicker.call(this);
oPicker.setValueStateText(sValueStateText);
return this;
};
DateTimeInput.prototype.setShowValueStateMessage = function(bShowValueStateMessage) {
this.setProperty("showValueStateMessage", bShowValueStateMessage, true);
var oPicker = _getPicker.call(this);
oPicker.setShowValueStateMessage(bShowValueStateMessage);
return this;
};
DateTimeInput.prototype.setName = function(sName) {
this.setProperty("name", sName, true);
var oPicker = _getPicker.call(this);
oPicker.setName(sName);
return this;
};
DateTimeInput.prototype.setPlaceholder = function(sPlaceholder) {
this.setProperty("placeholder", sPlaceholder, true);
var oPicker = _getPicker.call(this);
oPicker.setPlaceholder(sPlaceholder);
return this;
};
DateTimeInput.prototype.setTextAlign = function(sTextAlign) {
this.setProperty("textAlign", sTextAlign, true);
var oPicker = _getPicker.call(this);
oPicker.setTextAlign(sTextAlign);
return this;
};
DateTimeInput.prototype.setTextDirection = function(sTextDirection) {
this.setProperty("textDirection", sTextDirection, true);
var oPicker = _getPicker.call(this);
oPicker.setTextDirection(sTextDirection);
return this;
};
DateTimeInput.prototype.addAriaLabelledBy = function(sID) {
this.addAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.addAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAriaLabelledBy = function(sID) {
this.removeAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.removeAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAllAriaLabelledBy = function() {
this.removeAssociation("ariaLabelledBy", true);
var oPicker = _getPicker.call(this);
oPicker.removeAllAriaLabelledBy();
return this;
};
/**
* @see {sap.ui.core.Control#getAccessibilityInfo}
* @protected
*/
DateTimeInput.prototype.getAccessibilityInfo = function() {
var oPicker = _getPicker.call(this);
return oPicker && oPicker.getAccessibilityInfo ? oPicker.getAccessibilityInfo() : null;
};
function _getPicker(){
return this.getAggregation("_picker");
}
function _updateFormatFromBinding() |
function _handleChange(oEvent) {
var sValue = oEvent.getParameter("value");
var oDateValue;
var bValid = oEvent.getParameter("valid");
this.setProperty("value", sValue, true);
if (bValid) {
oDateValue = oEvent.oSource.getDateValue();
this.setProperty("dateValue", oDateValue, true);
}
// newValue and newDateValue for compatibility reasons
this.fireChange({value: sValue, newValue: sValue, valid: bValid, dateValue: oDateValue, newDateValue: oDateValue});
}
return DateTimeInput;
}, /* bExport= */ true);
| {
var oBinding = this.getBinding("value");
if (oBinding && oBinding.oType && (oBinding.oType instanceof Date1)) {
var sPattern = oBinding.oType.getOutputPattern();
var oPicker = _getPicker.call(this);
if (oPicker.getValueFormat() != sPattern) {
oPicker.setValueFormat(sPattern);
}
if (oPicker.getDisplayFormat() != sPattern) {
oPicker.setDisplayFormat(sPattern);
}
}
} | identifier_body |
DateTimeInput-dbg.js | /*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2016 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides control sap.m.DateTimeInput.
sap.ui.define(['jquery.sap.global', 'sap/ui/core/Control', './library', 'sap/ui/model/type/Date'],
function(jQuery, Control, library, Date1) {
"use strict";
/**
* Constructor for a new DateTimeInput.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* Allows end users to interact with date and/or time and select from a date and/or time pad.
*
* <b>Note:</b> This control should not be used any longer, instead please use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> control.
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version 1.38.4
*
* @constructor
* @public
* @since 1.9.1
* @deprecated Since version 1.32.8. Instead, use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> controls.
* @alias sap.m.DateTimeInput
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var DateTimeInput = Control.extend("sap.m.DateTimeInput", /** @lends sap.m.DateTimeInput.prototype */ { metadata : {
library : "sap.m",
properties : {
/**
* Defines the value of the control.
*
* The new value must be in the format set by <code>valueFormat</code>.
*
* The "Now" literal can also be assigned as a parameter to show the current date and/or time.
*/
value: { type: "string", group: "Data", defaultValue: null, bindable: "bindable" },
/**
* Defines the width of the control.
*/
width: { type: "sap.ui.core.CSSSize", group: "Dimension", defaultValue: "100%" },
/**
* Indicates whether the user can interact with the control or not.
* <b>Note:</b> Disabled controls cannot be focused and they are out of the tab-chain.
*/
enabled: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Defines whether the control can be modified by the user or not.
* <b>Note:</b> A user can tab to non-editable control, highlight it, and copy the text from it.
* @since 1.12.0
*/
editable: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Visualizes the validation state of the control, e.g. <code>Error</code>, <code>Warning</code>, <code>Success</code>.
*/
valueState: { type: "sap.ui.core.ValueState", group: "Appearance", defaultValue: sap.ui.core.ValueState.None },
/**
* Defines the text that appears in the value state message pop-up. If this is not specified, a default text is shown from the resource bundle.
* @since 1.26.0
*/
valueStateText: { type: "string", group: "Misc", defaultValue: null },
/**
* Indicates whether the value state message should be shown or not.
* @since 1.26.0
*/
showValueStateMessage: { type: "boolean", group: "Misc", defaultValue: true },
/**
* Defines the name of the control for the purposes of form submission.
*/
name: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines a short hint intended to aid the user with data entry when the control has no value.
*/
placeholder: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines the horizontal alignment of the text that is shown inside the input field.
* @since 1.26.0
*/
textAlign: { type: "sap.ui.core.TextAlign", group: "Appearance", defaultValue: sap.ui.core.TextAlign.Initial },
/**
* Defines the text directionality of the input field, e.g. <code>RTL</code>, <code>LTR</code>
* @since 1.28.0
*/
textDirection: { type: "sap.ui.core.TextDirection", group: "Appearance", defaultValue: sap.ui.core.TextDirection.Inherit },
/**
* Type of DateTimeInput (e.g. Date, Time, DateTime)
*/
type : {type : "sap.m.DateTimeInputType", group : "Data", defaultValue : sap.m.DateTimeInputType.Date},
/**
* Displays date value in this given format in text field. Default value is taken from locale settings.
* If you use data-binding on value property with type sap.ui.model.type.Date then you can ignore this property or the latter wins.
* If the user's browser supports native picker then this property is overwritten by browser with locale settings.
*/
displayFormat : {type : "string", group : "Appearance", defaultValue : null},
/**
* Given value property should match with valueFormat to parse date. Default value is taken from locale settings.
* You can only set and get value in this format.
* If you use data-binding on value property with type sap.ui.model.type.Date you can ignore this property or the latter wins.
*/
valueFormat : {type : "string", group : "Data", defaultValue : null},
/**
* This property as JavaScript Date Object can be used to assign a new value which is independent from valueFormat.
*/
dateValue : {type : "object", group : "Data", defaultValue : null}
},
aggregations: {
_picker: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"}
},
associations: {
/**
* Association to controls / IDs that label this control (see WAI-ARIA attribute aria-labelledby).
* @since 1.27.0
*/
ariaLabelledBy: { type: "sap.ui.core.Control", multiple: true, singularName: "ariaLabelledBy" }
},
events : {
/**
* This event gets fired when the selection has finished and the value has changed.
*/
change : {
parameters : {
/**
* The string value of the control in given valueFormat (or locale format).
*/
value : {type : "string"},
/**
* The value of control as JavaScript Date Object or null if value is empty.
*/
dateValue : {type : "object"},
/**
* if set, the entered value is a valid date.
* If not set the entered value cannot be converted to a date.
* @since 1.38.0
*/
valid : {type : "boolean"}
}
}
}
}});
!(function(oPrototype, $, oDevice) {
var oi18n = sap.m.getLocaleData();
$.extend(oPrototype, {
_types : {
Date : {
valueFormat : oi18n.getDatePattern("short"),
displayFormat : oi18n.getDatePattern("medium")
},
Time : {
valueFormat : oi18n.getTimePattern("short"),
displayFormat : oi18n.getTimePattern("short")
},
DateTime : {
valueFormat : oi18n.getDateTimePattern("short"), // does not include pattern but e.g "{1} {0}"
displayFormat : oi18n.getDateTimePattern("short") // does not include pattern but e.g "{1} {0}"
}
}
});
// build DateTime formats from Date And Time values
["Time", "Date"].forEach(function(sType, nIndex) {
["valueFormat", "displayFormat"].forEach(function(sFormat) {
var oTypes = oPrototype._types;
oTypes.DateTime[sFormat] = oTypes.DateTime[sFormat].replace("{" + nIndex + "}", oTypes[sType][sFormat]);
});
});
}(DateTimeInput.prototype, jQuery, sap.ui.Device));
DateTimeInput.prototype.init = function(){
// as date is the default type - > initialize with DatePicker
this.setType(sap.m.DateTimeInputType.Date);
};
DateTimeInput.prototype.onBeforeRendering = function() {
_updateFormatFromBinding.call(this);
};
DateTimeInput.prototype.getFocusDomRef = function() {
var oPicker = _getPicker.call(this);
return oPicker.getFocusDomRef();
};
DateTimeInput.prototype.getIdForLabel = function() {
var oPicker = _getPicker.call(this);
return oPicker.getIdForLabel();
};
DateTimeInput.prototype.setType = function(sType){
if (sType == this.getType() && _getPicker.call(this)) {
return this;
}
this.destroyAggregation("_picker");
var oPicker;
switch (sType) {
case sap.m.DateTimeInputType.DateTime:
jQuery.sap.require("sap.m.DateTimePicker");
oPicker = new sap.m.DateTimePicker(this.getId() + "-Picker");
break;
case sap.m.DateTimeInputType.Time:
jQuery.sap.require("sap.m.TimePicker");
oPicker = new sap.m.TimePicker(this.getId() + "-Picker",
{localeId: sap.ui.getCore().getConfiguration().getFormatSettings().getFormatLocale().toString()});
break;
default: // default is date
jQuery.sap.require("sap.m.DatePicker");
oPicker = new sap.m.DatePicker(this.getId() + "-Picker");
break;
}
// forward properties (also set default, may be different)
oPicker.setDisplayFormat(this.getDisplayFormat() || this._types[sType].displayFormat);
oPicker.setValueFormat(this.getValueFormat() || this._types[sType].valueFormat);
if (this.getDateValue()) {
oPicker.setDateValue(this.getDateValue()); // don't set Value -> as by switching type information can be lost
}
oPicker.setEnabled(this.getEnabled());
oPicker.setEditable(this.getEditable());
oPicker.setValueState(this.getValueState());
oPicker.setValueStateText(this.getValueStateText());
oPicker.setShowValueStateMessage(this.getShowValueStateMessage());
oPicker.setName(this.getName());
oPicker.setPlaceholder(this.getPlaceholder());
oPicker.setTextAlign(this.getTextAlign());
oPicker.setTextDirection(this.getTextDirection());
oPicker.setWidth("100%");
oPicker.attachChange(_handleChange, this);
var aAriaLabelledBy = this.getAriaLabelledBy();
for (var i = 0; i < aAriaLabelledBy.length; i++) {
oPicker.addAriaLabelledBy(aAriaLabelledBy[i]);
}
this.setAggregation("_picker", oPicker);
this.setProperty("type", sType); // re-render because picker control changes
return this;
};
DateTimeInput.prototype.setWidth = function(sWidth) {
this.setProperty("width", sWidth);
if (this.getDomRef()) {
sWidth = this.getWidth(); // to use validator
this.$().css("width", sWidth);
}
return this;
};
DateTimeInput.prototype.setValue = function(sValue) {
_updateFormatFromBinding.call(this); // to be sure to have the right format
sValue = this.validateProperty("value", sValue);
if (sValue.toLowerCase() == "now") {
return this.setDateValue(new Date());
}
if (sValue === this.getValue()) {
return this;
}
this.setProperty("value", sValue, true);
var oPicker = _getPicker.call(this);
oPicker.setValue(sValue);
var oDate = oPicker.getDateValue();
this.setProperty("dateValue", oDate, true);
return this;
};
DateTimeInput.prototype.setDateValue = function(oDate) {
if (oDate && !(oDate instanceof Date)) {
throw new Error("Date must be a JavaScript date object; " + this);
}
_updateFormatFromBinding.call(this); // to be sure to have the right format
this.setProperty("dateValue", oDate, true);
var oPicker = _getPicker.call(this);
oPicker.setDateValue(oDate);
var sValue = oPicker.getValue();
this.setProperty("value", sValue, true);
return this;
};
DateTimeInput.prototype.setDisplayFormat = function(sDisplayFormat) {
this.setProperty("displayFormat", sDisplayFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setDisplayFormat(sDisplayFormat || this._types[this.getType()].displayFormat);
return this;
};
DateTimeInput.prototype.setValueFormat = function(sValueFormat) {
this.setProperty("valueFormat", sValueFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setValueFormat(sValueFormat || this._types[this.getType()].ValueFormat);
return this;
};
DateTimeInput.prototype.setEnabled = function(bEnabled) {
this.setProperty("enabled", bEnabled, true);
var oPicker = _getPicker.call(this);
oPicker.setEnabled(bEnabled);
return this;
};
DateTimeInput.prototype.setEditable = function(bEditable) {
this.setProperty("editable", bEditable, true);
var oPicker = _getPicker.call(this);
oPicker.setEditable(bEditable);
return this;
};
DateTimeInput.prototype.setValueState = function(sValueState) {
this.setProperty("valueState", sValueState, true);
var oPicker = _getPicker.call(this);
oPicker.setValueState(sValueState);
return this;
};
DateTimeInput.prototype.setValueStateText = function(sValueStateText) {
this.setProperty("valueStateText", sValueStateText, true);
var oPicker = _getPicker.call(this);
oPicker.setValueStateText(sValueStateText);
return this;
};
DateTimeInput.prototype.setShowValueStateMessage = function(bShowValueStateMessage) {
this.setProperty("showValueStateMessage", bShowValueStateMessage, true);
var oPicker = _getPicker.call(this);
oPicker.setShowValueStateMessage(bShowValueStateMessage);
return this;
};
DateTimeInput.prototype.setName = function(sName) {
this.setProperty("name", sName, true);
var oPicker = _getPicker.call(this);
oPicker.setName(sName);
return this;
};
DateTimeInput.prototype.setPlaceholder = function(sPlaceholder) {
this.setProperty("placeholder", sPlaceholder, true);
var oPicker = _getPicker.call(this);
oPicker.setPlaceholder(sPlaceholder);
return this;
};
DateTimeInput.prototype.setTextAlign = function(sTextAlign) {
this.setProperty("textAlign", sTextAlign, true);
var oPicker = _getPicker.call(this);
oPicker.setTextAlign(sTextAlign);
return this;
};
DateTimeInput.prototype.setTextDirection = function(sTextDirection) {
this.setProperty("textDirection", sTextDirection, true);
var oPicker = _getPicker.call(this);
oPicker.setTextDirection(sTextDirection);
return this;
};
DateTimeInput.prototype.addAriaLabelledBy = function(sID) {
this.addAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.addAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAriaLabelledBy = function(sID) {
this.removeAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.removeAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAllAriaLabelledBy = function() {
this.removeAssociation("ariaLabelledBy", true);
var oPicker = _getPicker.call(this);
oPicker.removeAllAriaLabelledBy();
return this;
};
/**
* @see {sap.ui.core.Control#getAccessibilityInfo}
* @protected
*/
DateTimeInput.prototype.getAccessibilityInfo = function() {
var oPicker = _getPicker.call(this);
return oPicker && oPicker.getAccessibilityInfo ? oPicker.getAccessibilityInfo() : null;
};
function _getPicker(){
return this.getAggregation("_picker");
}
function | (){
var oBinding = this.getBinding("value");
if (oBinding && oBinding.oType && (oBinding.oType instanceof Date1)) {
var sPattern = oBinding.oType.getOutputPattern();
var oPicker = _getPicker.call(this);
if (oPicker.getValueFormat() != sPattern) {
oPicker.setValueFormat(sPattern);
}
if (oPicker.getDisplayFormat() != sPattern) {
oPicker.setDisplayFormat(sPattern);
}
}
}
function _handleChange(oEvent) {
var sValue = oEvent.getParameter("value");
var oDateValue;
var bValid = oEvent.getParameter("valid");
this.setProperty("value", sValue, true);
if (bValid) {
oDateValue = oEvent.oSource.getDateValue();
this.setProperty("dateValue", oDateValue, true);
}
// newValue and newDateValue for compatibility reasons
this.fireChange({value: sValue, newValue: sValue, valid: bValid, dateValue: oDateValue, newDateValue: oDateValue});
}
return DateTimeInput;
}, /* bExport= */ true);
| _updateFormatFromBinding | identifier_name |
DateTimeInput-dbg.js | /*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2016 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides control sap.m.DateTimeInput.
sap.ui.define(['jquery.sap.global', 'sap/ui/core/Control', './library', 'sap/ui/model/type/Date'],
function(jQuery, Control, library, Date1) {
"use strict";
/**
* Constructor for a new DateTimeInput.
*
* @param {string} [sId] ID for the new control, generated automatically if no ID is given
* @param {object} [mSettings] Initial settings for the new control
*
* @class
* Allows end users to interact with date and/or time and select from a date and/or time pad.
*
* <b>Note:</b> This control should not be used any longer, instead please use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> control.
* @extends sap.ui.core.Control
*
* @author SAP SE
* @version 1.38.4
*
* @constructor
* @public
* @since 1.9.1
* @deprecated Since version 1.32.8. Instead, use the dedicated <code>sap.m.DatePicker</code>, <code>sap.m.TimePicker</code> or <code>sap.m.DateTimePicker</code> controls.
* @alias sap.m.DateTimeInput
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var DateTimeInput = Control.extend("sap.m.DateTimeInput", /** @lends sap.m.DateTimeInput.prototype */ { metadata : {
library : "sap.m",
properties : {
/**
* Defines the value of the control.
*
* The new value must be in the format set by <code>valueFormat</code>.
*
* The "Now" literal can also be assigned as a parameter to show the current date and/or time.
*/
value: { type: "string", group: "Data", defaultValue: null, bindable: "bindable" },
/**
* Defines the width of the control.
*/
width: { type: "sap.ui.core.CSSSize", group: "Dimension", defaultValue: "100%" },
/**
* Indicates whether the user can interact with the control or not.
* <b>Note:</b> Disabled controls cannot be focused and they are out of the tab-chain.
*/
enabled: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Defines whether the control can be modified by the user or not.
* <b>Note:</b> A user can tab to non-editable control, highlight it, and copy the text from it.
* @since 1.12.0
*/
editable: { type: "boolean", group: "Behavior", defaultValue: true },
/**
* Visualizes the validation state of the control, e.g. <code>Error</code>, <code>Warning</code>, <code>Success</code>.
*/
valueState: { type: "sap.ui.core.ValueState", group: "Appearance", defaultValue: sap.ui.core.ValueState.None },
/**
* Defines the text that appears in the value state message pop-up. If this is not specified, a default text is shown from the resource bundle.
* @since 1.26.0
*/
valueStateText: { type: "string", group: "Misc", defaultValue: null },
/**
* Indicates whether the value state message should be shown or not.
* @since 1.26.0
*/
showValueStateMessage: { type: "boolean", group: "Misc", defaultValue: true },
/**
* Defines the name of the control for the purposes of form submission.
*/
name: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines a short hint intended to aid the user with data entry when the control has no value.
*/
placeholder: { type: "string", group: "Misc", defaultValue: null },
/**
* Defines the horizontal alignment of the text that is shown inside the input field.
* @since 1.26.0
*/
textAlign: { type: "sap.ui.core.TextAlign", group: "Appearance", defaultValue: sap.ui.core.TextAlign.Initial },
/**
* Defines the text directionality of the input field, e.g. <code>RTL</code>, <code>LTR</code>
* @since 1.28.0
*/
textDirection: { type: "sap.ui.core.TextDirection", group: "Appearance", defaultValue: sap.ui.core.TextDirection.Inherit },
/**
* Type of DateTimeInput (e.g. Date, Time, DateTime)
*/
type : {type : "sap.m.DateTimeInputType", group : "Data", defaultValue : sap.m.DateTimeInputType.Date},
/**
* Displays date value in this given format in text field. Default value is taken from locale settings.
* If you use data-binding on value property with type sap.ui.model.type.Date then you can ignore this property or the latter wins.
* If the user's browser supports native picker then this property is overwritten by browser with locale settings.
*/
displayFormat : {type : "string", group : "Appearance", defaultValue : null},
/**
* Given value property should match with valueFormat to parse date. Default value is taken from locale settings.
* You can only set and get value in this format.
* If you use data-binding on value property with type sap.ui.model.type.Date you can ignore this property or the latter wins.
*/
valueFormat : {type : "string", group : "Data", defaultValue : null},
/**
* This property as JavaScript Date Object can be used to assign a new value which is independent from valueFormat.
*/
dateValue : {type : "object", group : "Data", defaultValue : null}
},
aggregations: {
_picker: {type: "sap.ui.core.Control", multiple: false, visibility: "hidden"}
},
associations: {
/**
* Association to controls / IDs that label this control (see WAI-ARIA attribute aria-labelledby).
* @since 1.27.0
*/
ariaLabelledBy: { type: "sap.ui.core.Control", multiple: true, singularName: "ariaLabelledBy" }
},
events : {
/**
* This event gets fired when the selection has finished and the value has changed.
*/
change : {
parameters : {
/**
* The string value of the control in given valueFormat (or locale format).
*/
value : {type : "string"},
/**
* The value of control as JavaScript Date Object or null if value is empty.
*/
dateValue : {type : "object"},
/**
* if set, the entered value is a valid date.
* If not set the entered value cannot be converted to a date.
* @since 1.38.0
*/
valid : {type : "boolean"}
}
}
}
}});
!(function(oPrototype, $, oDevice) {
var oi18n = sap.m.getLocaleData();
$.extend(oPrototype, {
_types : {
Date : {
valueFormat : oi18n.getDatePattern("short"),
displayFormat : oi18n.getDatePattern("medium")
},
Time : {
valueFormat : oi18n.getTimePattern("short"),
displayFormat : oi18n.getTimePattern("short")
},
DateTime : {
valueFormat : oi18n.getDateTimePattern("short"), // does not include pattern but e.g "{1} {0}"
displayFormat : oi18n.getDateTimePattern("short") // does not include pattern but e.g "{1} {0}"
}
}
});
// build DateTime formats from Date And Time values
["Time", "Date"].forEach(function(sType, nIndex) {
["valueFormat", "displayFormat"].forEach(function(sFormat) {
var oTypes = oPrototype._types;
oTypes.DateTime[sFormat] = oTypes.DateTime[sFormat].replace("{" + nIndex + "}", oTypes[sType][sFormat]);
});
});
}(DateTimeInput.prototype, jQuery, sap.ui.Device));
DateTimeInput.prototype.init = function(){
// as date is the default type - > initialize with DatePicker
this.setType(sap.m.DateTimeInputType.Date);
};
DateTimeInput.prototype.onBeforeRendering = function() {
_updateFormatFromBinding.call(this);
};
DateTimeInput.prototype.getFocusDomRef = function() {
var oPicker = _getPicker.call(this);
return oPicker.getFocusDomRef();
};
DateTimeInput.prototype.getIdForLabel = function() {
var oPicker = _getPicker.call(this);
return oPicker.getIdForLabel();
};
DateTimeInput.prototype.setType = function(sType){
if (sType == this.getType() && _getPicker.call(this)) {
return this;
}
this.destroyAggregation("_picker");
var oPicker;
switch (sType) {
case sap.m.DateTimeInputType.DateTime:
jQuery.sap.require("sap.m.DateTimePicker");
oPicker = new sap.m.DateTimePicker(this.getId() + "-Picker");
break;
case sap.m.DateTimeInputType.Time:
jQuery.sap.require("sap.m.TimePicker");
oPicker = new sap.m.TimePicker(this.getId() + "-Picker",
{localeId: sap.ui.getCore().getConfiguration().getFormatSettings().getFormatLocale().toString()});
break;
default: // default is date
jQuery.sap.require("sap.m.DatePicker");
oPicker = new sap.m.DatePicker(this.getId() + "-Picker");
break;
} |
// forward properties (also set default, may be different)
oPicker.setDisplayFormat(this.getDisplayFormat() || this._types[sType].displayFormat);
oPicker.setValueFormat(this.getValueFormat() || this._types[sType].valueFormat);
if (this.getDateValue()) {
oPicker.setDateValue(this.getDateValue()); // don't set Value -> as by switching type information can be lost
}
oPicker.setEnabled(this.getEnabled());
oPicker.setEditable(this.getEditable());
oPicker.setValueState(this.getValueState());
oPicker.setValueStateText(this.getValueStateText());
oPicker.setShowValueStateMessage(this.getShowValueStateMessage());
oPicker.setName(this.getName());
oPicker.setPlaceholder(this.getPlaceholder());
oPicker.setTextAlign(this.getTextAlign());
oPicker.setTextDirection(this.getTextDirection());
oPicker.setWidth("100%");
oPicker.attachChange(_handleChange, this);
var aAriaLabelledBy = this.getAriaLabelledBy();
for (var i = 0; i < aAriaLabelledBy.length; i++) {
oPicker.addAriaLabelledBy(aAriaLabelledBy[i]);
}
this.setAggregation("_picker", oPicker);
this.setProperty("type", sType); // re-render because picker control changes
return this;
};
DateTimeInput.prototype.setWidth = function(sWidth) {
this.setProperty("width", sWidth);
if (this.getDomRef()) {
sWidth = this.getWidth(); // to use validator
this.$().css("width", sWidth);
}
return this;
};
DateTimeInput.prototype.setValue = function(sValue) {
_updateFormatFromBinding.call(this); // to be sure to have the right format
sValue = this.validateProperty("value", sValue);
if (sValue.toLowerCase() == "now") {
return this.setDateValue(new Date());
}
if (sValue === this.getValue()) {
return this;
}
this.setProperty("value", sValue, true);
var oPicker = _getPicker.call(this);
oPicker.setValue(sValue);
var oDate = oPicker.getDateValue();
this.setProperty("dateValue", oDate, true);
return this;
};
DateTimeInput.prototype.setDateValue = function(oDate) {
if (oDate && !(oDate instanceof Date)) {
throw new Error("Date must be a JavaScript date object; " + this);
}
_updateFormatFromBinding.call(this); // to be sure to have the right format
this.setProperty("dateValue", oDate, true);
var oPicker = _getPicker.call(this);
oPicker.setDateValue(oDate);
var sValue = oPicker.getValue();
this.setProperty("value", sValue, true);
return this;
};
DateTimeInput.prototype.setDisplayFormat = function(sDisplayFormat) {
this.setProperty("displayFormat", sDisplayFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setDisplayFormat(sDisplayFormat || this._types[this.getType()].displayFormat);
return this;
};
DateTimeInput.prototype.setValueFormat = function(sValueFormat) {
this.setProperty("valueFormat", sValueFormat, true);
var oPicker = _getPicker.call(this);
oPicker.setValueFormat(sValueFormat || this._types[this.getType()].ValueFormat);
return this;
};
DateTimeInput.prototype.setEnabled = function(bEnabled) {
this.setProperty("enabled", bEnabled, true);
var oPicker = _getPicker.call(this);
oPicker.setEnabled(bEnabled);
return this;
};
DateTimeInput.prototype.setEditable = function(bEditable) {
this.setProperty("editable", bEditable, true);
var oPicker = _getPicker.call(this);
oPicker.setEditable(bEditable);
return this;
};
DateTimeInput.prototype.setValueState = function(sValueState) {
this.setProperty("valueState", sValueState, true);
var oPicker = _getPicker.call(this);
oPicker.setValueState(sValueState);
return this;
};
DateTimeInput.prototype.setValueStateText = function(sValueStateText) {
this.setProperty("valueStateText", sValueStateText, true);
var oPicker = _getPicker.call(this);
oPicker.setValueStateText(sValueStateText);
return this;
};
DateTimeInput.prototype.setShowValueStateMessage = function(bShowValueStateMessage) {
this.setProperty("showValueStateMessage", bShowValueStateMessage, true);
var oPicker = _getPicker.call(this);
oPicker.setShowValueStateMessage(bShowValueStateMessage);
return this;
};
DateTimeInput.prototype.setName = function(sName) {
this.setProperty("name", sName, true);
var oPicker = _getPicker.call(this);
oPicker.setName(sName);
return this;
};
DateTimeInput.prototype.setPlaceholder = function(sPlaceholder) {
this.setProperty("placeholder", sPlaceholder, true);
var oPicker = _getPicker.call(this);
oPicker.setPlaceholder(sPlaceholder);
return this;
};
DateTimeInput.prototype.setTextAlign = function(sTextAlign) {
this.setProperty("textAlign", sTextAlign, true);
var oPicker = _getPicker.call(this);
oPicker.setTextAlign(sTextAlign);
return this;
};
DateTimeInput.prototype.setTextDirection = function(sTextDirection) {
this.setProperty("textDirection", sTextDirection, true);
var oPicker = _getPicker.call(this);
oPicker.setTextDirection(sTextDirection);
return this;
};
DateTimeInput.prototype.addAriaLabelledBy = function(sID) {
this.addAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.addAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAriaLabelledBy = function(sID) {
this.removeAssociation("ariaLabelledBy", sID, true);
var oPicker = _getPicker.call(this);
oPicker.removeAriaLabelledBy(sID);
return this;
};
DateTimeInput.prototype.removeAllAriaLabelledBy = function() {
this.removeAssociation("ariaLabelledBy", true);
var oPicker = _getPicker.call(this);
oPicker.removeAllAriaLabelledBy();
return this;
};
/**
* @see {sap.ui.core.Control#getAccessibilityInfo}
* @protected
*/
DateTimeInput.prototype.getAccessibilityInfo = function() {
var oPicker = _getPicker.call(this);
return oPicker && oPicker.getAccessibilityInfo ? oPicker.getAccessibilityInfo() : null;
};
function _getPicker(){
return this.getAggregation("_picker");
}
function _updateFormatFromBinding(){
var oBinding = this.getBinding("value");
if (oBinding && oBinding.oType && (oBinding.oType instanceof Date1)) {
var sPattern = oBinding.oType.getOutputPattern();
var oPicker = _getPicker.call(this);
if (oPicker.getValueFormat() != sPattern) {
oPicker.setValueFormat(sPattern);
}
if (oPicker.getDisplayFormat() != sPattern) {
oPicker.setDisplayFormat(sPattern);
}
}
}
function _handleChange(oEvent) {
var sValue = oEvent.getParameter("value");
var oDateValue;
var bValid = oEvent.getParameter("valid");
this.setProperty("value", sValue, true);
if (bValid) {
oDateValue = oEvent.oSource.getDateValue();
this.setProperty("dateValue", oDateValue, true);
}
// newValue and newDateValue for compatibility reasons
this.fireChange({value: sValue, newValue: sValue, valid: bValid, dateValue: oDateValue, newDateValue: oDateValue});
}
return DateTimeInput;
}, /* bExport= */ true); | random_line_split | |
project-requirements-change.py | #! /usr/bin/env python
# Copyright (C) 2011 OpenStack, LLC.
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import contextlib
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
requirement = None
project = None
def run_command(cmd):
print(cmd)
cmd_list = shlex.split(str(cmd))
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
if p.returncode != 0:
raise SystemError(err)
return (out.strip(), err.strip())
class RequirementsList(object):
def __init__(self, name, project):
self.name = name
self.reqs_by_file = {}
self.project = project
self.failed = False
@property
def reqs(self):
return {k: v for d in self.reqs_by_file.values()
for k, v in d.items()}
def extract_reqs(self, content):
reqs = collections.defaultdict(set)
parsed = requirement.parse(content)
for name, entries in parsed.items():
if not name:
# Comments and other unprocessed lines
continue
list_reqs = [r for (r, line) in entries]
# Strip the comments out before checking if there are duplicates
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
if len(list_reqs_stripped) != len(set(list_reqs_stripped)):
print("Requirements file has duplicate entries "
"for package %s : %r." % (name, list_reqs))
self.failed = True
reqs[name].update(list_reqs)
return reqs
def process(self, strict=True):
"""Convert the project into ready to use data.
- an iterable of requirement sets to check
- each set has the following rules:
- each has a list of Requirements objects
- duplicates are not permitted within that list
"""
print("Checking %(name)s" % {'name': self.name})
# First, parse.
for fname, content in self.project.get('requirements', {}).items():
print("Processing %(fname)s" % {'fname': fname})
if strict and not content.endswith('\n'):
print("Requirements file %s does not "
"end with a newline." % fname)
self.reqs_by_file[fname] = self.extract_reqs(content)
for name, content in project.extras(self.project).items():
print("Processing .[%(extra)s]" % {'extra': name})
self.reqs_by_file[name] = self.extract_reqs(content)
def grab_args():
"""Grab and return arguments"""
parser = argparse.ArgumentParser(
description="Check if project requirements have changed"
)
parser.add_argument('--local', action='store_true',
help='check local changes (not yet in git)')
parser.add_argument('branch', nargs='?', default='master',
help='target branch for diffs')
parser.add_argument('--zc', help='what zuul cloner to call')
parser.add_argument('--reqs', help='use a specified requirements tree')
return parser.parse_args()
@contextlib.contextmanager
def tempdir():
try:
reqroot = tempfile.mkdtemp()
yield reqroot
finally:
shutil.rmtree(reqroot)
def install_and_load_requirements(reqroot, reqdir):
sha = run_command("git --git-dir %s/.git rev-parse HEAD" % reqdir)[0]
print "requirements git sha: %s" % sha
req_venv = os.path.join(reqroot, 'venv')
req_pip = os.path.join(req_venv, 'bin/pip')
req_lib = os.path.join(req_venv, 'lib/python2.7/site-packages')
out, err = run_command("virtualenv " + req_venv)
out, err = run_command(req_pip + " install " + reqdir)
sys.path.append(req_lib)
global project
global requirement
from openstack_requirements import project # noqa
from openstack_requirements import requirement # noqa
def _is_requirement_in_global_reqs(req, global_reqs):
# Compare all fields except the extras field as the global
# requirements should not have any lines with the extras syntax
# example: oslo.db[xyz]<1.2.3
for req2 in global_reqs:
if (req.package == req2.package and
req.location == req2.location and
req.specifiers == req2.specifiers and
req.markers == req2.markers and
req.comment == req2.comment):
return True
return False
def main(): | # openstack/requirements project so we can match them to the changes
with tempdir() as reqroot:
# Only clone requirements repo if no local repo is specified
# on the command line.
if args.reqs is None:
reqdir = os.path.join(reqroot, "openstack/requirements")
if args.zc is not None:
zc = args.zc
else:
zc = '/usr/zuul-env/bin/zuul-cloner'
out, err = run_command("%(zc)s "
"--cache-dir /opt/git "
"--workspace %(root)s "
"git://git.openstack.org "
"openstack/requirements"
% dict(zc=zc, root=reqroot))
print out
print err
else:
reqdir = args.reqs
install_and_load_requirements(reqroot, reqdir)
global_reqs = requirement.parse(
open(reqdir + '/global-requirements.txt', 'rt').read())
for k, entries in global_reqs.items():
# Discard the lines: we don't need them.
global_reqs[k] = set(r for (r, line) in entries)
cwd = os.getcwd()
# build a list of requirements in the proposed change,
# and check them for style violations while doing so
head = run_command("git rev-parse HEAD")[0]
head_proj = project.read(cwd)
head_reqs = RequirementsList('HEAD', head_proj)
# Don't apply strict parsing rules to stable branches.
# Reasoning is:
# - devstack etc protect us from functional issues
# - we're backporting to stable, so guarding against
# aesthetics and DRY concerns is not our business anymore
# - if in future we have other not-functional linty style
# things to add, we don't want them to affect stable
# either.
head_strict = not branch.startswith('stable/')
head_reqs.process(strict=head_strict)
if not args.local:
# build a list of requirements already in the target branch,
# so that we can create a diff and identify what's being changed
run_command("git remote update")
run_command("git checkout remotes/origin/%s" % branch)
branch_proj = project.read(cwd)
# switch back to the proposed change now
run_command("git checkout %s" % head)
else:
branch_proj = {'root': cwd}
branch_reqs = RequirementsList(branch, branch_proj)
# Don't error on the target branch being broken.
branch_reqs.process(strict=False)
# iterate through the changing entries and see if they match the global
# equivalents we want enforced
for fname, freqs in head_reqs.reqs_by_file.items():
print("Validating %(fname)s" % {'fname': fname})
for name, reqs in freqs.items():
counts = {}
if (name in branch_reqs.reqs and
reqs == branch_reqs.reqs[name]):
# Unchanged [or a change that preserves a current value]
continue
if name not in global_reqs:
failed = True
print("Requirement %s not in openstack/requirements" %
str(reqs))
continue
if reqs == global_reqs[name]:
continue
for req in reqs:
if req.extras:
for extra in req.extras:
counts[extra] = counts.get(extra, 0) + 1
else:
counts[''] = counts.get('', 0) + 1
if not _is_requirement_in_global_reqs(
req, global_reqs[name]):
failed = True
print("Requirement for package %s : %s does "
"not match openstack/requirements value : %s" % (
name, str(req), str(global_reqs[name])))
for extra, count in counts.items():
if count != len(global_reqs[name]):
failed = True
print("Package %s%s requirement does not match "
"number of lines (%d) in "
"openstack/requirements" % (
name,
('[%s]' % extra) if extra else '',
len(global_reqs[name])))
# report the results
if failed or head_reqs.failed or branch_reqs.failed:
sys.exit(1)
print("Updated requirements match openstack/requirements.")
if __name__ == '__main__':
main() | args = grab_args()
branch = args.branch
failed = False
# build a list of requirements from the global list in the | random_line_split |
project-requirements-change.py | #! /usr/bin/env python
# Copyright (C) 2011 OpenStack, LLC.
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import contextlib
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
requirement = None
project = None
def run_command(cmd):
print(cmd)
cmd_list = shlex.split(str(cmd))
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
if p.returncode != 0:
raise SystemError(err)
return (out.strip(), err.strip())
class RequirementsList(object):
def __init__(self, name, project):
self.name = name
self.reqs_by_file = {}
self.project = project
self.failed = False
@property
def reqs(self):
return {k: v for d in self.reqs_by_file.values()
for k, v in d.items()}
def extract_reqs(self, content):
reqs = collections.defaultdict(set)
parsed = requirement.parse(content)
for name, entries in parsed.items():
if not name:
# Comments and other unprocessed lines
continue
list_reqs = [r for (r, line) in entries]
# Strip the comments out before checking if there are duplicates
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
if len(list_reqs_stripped) != len(set(list_reqs_stripped)):
print("Requirements file has duplicate entries "
"for package %s : %r." % (name, list_reqs))
self.failed = True
reqs[name].update(list_reqs)
return reqs
def process(self, strict=True):
"""Convert the project into ready to use data.
- an iterable of requirement sets to check
- each set has the following rules:
- each has a list of Requirements objects
- duplicates are not permitted within that list
"""
print("Checking %(name)s" % {'name': self.name})
# First, parse.
for fname, content in self.project.get('requirements', {}).items():
print("Processing %(fname)s" % {'fname': fname})
if strict and not content.endswith('\n'):
print("Requirements file %s does not "
"end with a newline." % fname)
self.reqs_by_file[fname] = self.extract_reqs(content)
for name, content in project.extras(self.project).items():
print("Processing .[%(extra)s]" % {'extra': name})
self.reqs_by_file[name] = self.extract_reqs(content)
def | ():
"""Grab and return arguments"""
parser = argparse.ArgumentParser(
description="Check if project requirements have changed"
)
parser.add_argument('--local', action='store_true',
help='check local changes (not yet in git)')
parser.add_argument('branch', nargs='?', default='master',
help='target branch for diffs')
parser.add_argument('--zc', help='what zuul cloner to call')
parser.add_argument('--reqs', help='use a specified requirements tree')
return parser.parse_args()
@contextlib.contextmanager
def tempdir():
try:
reqroot = tempfile.mkdtemp()
yield reqroot
finally:
shutil.rmtree(reqroot)
def install_and_load_requirements(reqroot, reqdir):
sha = run_command("git --git-dir %s/.git rev-parse HEAD" % reqdir)[0]
print "requirements git sha: %s" % sha
req_venv = os.path.join(reqroot, 'venv')
req_pip = os.path.join(req_venv, 'bin/pip')
req_lib = os.path.join(req_venv, 'lib/python2.7/site-packages')
out, err = run_command("virtualenv " + req_venv)
out, err = run_command(req_pip + " install " + reqdir)
sys.path.append(req_lib)
global project
global requirement
from openstack_requirements import project # noqa
from openstack_requirements import requirement # noqa
def _is_requirement_in_global_reqs(req, global_reqs):
# Compare all fields except the extras field as the global
# requirements should not have any lines with the extras syntax
# example: oslo.db[xyz]<1.2.3
for req2 in global_reqs:
if (req.package == req2.package and
req.location == req2.location and
req.specifiers == req2.specifiers and
req.markers == req2.markers and
req.comment == req2.comment):
return True
return False
def main():
args = grab_args()
branch = args.branch
failed = False
# build a list of requirements from the global list in the
# openstack/requirements project so we can match them to the changes
with tempdir() as reqroot:
# Only clone requirements repo if no local repo is specified
# on the command line.
if args.reqs is None:
reqdir = os.path.join(reqroot, "openstack/requirements")
if args.zc is not None:
zc = args.zc
else:
zc = '/usr/zuul-env/bin/zuul-cloner'
out, err = run_command("%(zc)s "
"--cache-dir /opt/git "
"--workspace %(root)s "
"git://git.openstack.org "
"openstack/requirements"
% dict(zc=zc, root=reqroot))
print out
print err
else:
reqdir = args.reqs
install_and_load_requirements(reqroot, reqdir)
global_reqs = requirement.parse(
open(reqdir + '/global-requirements.txt', 'rt').read())
for k, entries in global_reqs.items():
# Discard the lines: we don't need them.
global_reqs[k] = set(r for (r, line) in entries)
cwd = os.getcwd()
# build a list of requirements in the proposed change,
# and check them for style violations while doing so
head = run_command("git rev-parse HEAD")[0]
head_proj = project.read(cwd)
head_reqs = RequirementsList('HEAD', head_proj)
# Don't apply strict parsing rules to stable branches.
# Reasoning is:
# - devstack etc protect us from functional issues
# - we're backporting to stable, so guarding against
# aesthetics and DRY concerns is not our business anymore
# - if in future we have other not-functional linty style
# things to add, we don't want them to affect stable
# either.
head_strict = not branch.startswith('stable/')
head_reqs.process(strict=head_strict)
if not args.local:
# build a list of requirements already in the target branch,
# so that we can create a diff and identify what's being changed
run_command("git remote update")
run_command("git checkout remotes/origin/%s" % branch)
branch_proj = project.read(cwd)
# switch back to the proposed change now
run_command("git checkout %s" % head)
else:
branch_proj = {'root': cwd}
branch_reqs = RequirementsList(branch, branch_proj)
# Don't error on the target branch being broken.
branch_reqs.process(strict=False)
# iterate through the changing entries and see if they match the global
# equivalents we want enforced
for fname, freqs in head_reqs.reqs_by_file.items():
print("Validating %(fname)s" % {'fname': fname})
for name, reqs in freqs.items():
counts = {}
if (name in branch_reqs.reqs and
reqs == branch_reqs.reqs[name]):
# Unchanged [or a change that preserves a current value]
continue
if name not in global_reqs:
failed = True
print("Requirement %s not in openstack/requirements" %
str(reqs))
continue
if reqs == global_reqs[name]:
continue
for req in reqs:
if req.extras:
for extra in req.extras:
counts[extra] = counts.get(extra, 0) + 1
else:
counts[''] = counts.get('', 0) + 1
if not _is_requirement_in_global_reqs(
req, global_reqs[name]):
failed = True
print("Requirement for package %s : %s does "
"not match openstack/requirements value : %s" % (
name, str(req), str(global_reqs[name])))
for extra, count in counts.items():
if count != len(global_reqs[name]):
failed = True
print("Package %s%s requirement does not match "
"number of lines (%d) in "
"openstack/requirements" % (
name,
('[%s]' % extra) if extra else '',
len(global_reqs[name])))
# report the results
if failed or head_reqs.failed or branch_reqs.failed:
sys.exit(1)
print("Updated requirements match openstack/requirements.")
if __name__ == '__main__':
main()
| grab_args | identifier_name |
project-requirements-change.py | #! /usr/bin/env python
# Copyright (C) 2011 OpenStack, LLC.
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import contextlib
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
requirement = None
project = None
def run_command(cmd):
print(cmd)
cmd_list = shlex.split(str(cmd))
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
if p.returncode != 0:
raise SystemError(err)
return (out.strip(), err.strip())
class RequirementsList(object):
def __init__(self, name, project):
self.name = name
self.reqs_by_file = {}
self.project = project
self.failed = False
@property
def reqs(self):
return {k: v for d in self.reqs_by_file.values()
for k, v in d.items()}
def extract_reqs(self, content):
reqs = collections.defaultdict(set)
parsed = requirement.parse(content)
for name, entries in parsed.items():
if not name:
# Comments and other unprocessed lines
continue
list_reqs = [r for (r, line) in entries]
# Strip the comments out before checking if there are duplicates
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
if len(list_reqs_stripped) != len(set(list_reqs_stripped)):
print("Requirements file has duplicate entries "
"for package %s : %r." % (name, list_reqs))
self.failed = True
reqs[name].update(list_reqs)
return reqs
def process(self, strict=True):
"""Convert the project into ready to use data.
- an iterable of requirement sets to check
- each set has the following rules:
- each has a list of Requirements objects
- duplicates are not permitted within that list
"""
print("Checking %(name)s" % {'name': self.name})
# First, parse.
for fname, content in self.project.get('requirements', {}).items():
print("Processing %(fname)s" % {'fname': fname})
if strict and not content.endswith('\n'):
print("Requirements file %s does not "
"end with a newline." % fname)
self.reqs_by_file[fname] = self.extract_reqs(content)
for name, content in project.extras(self.project).items():
print("Processing .[%(extra)s]" % {'extra': name})
self.reqs_by_file[name] = self.extract_reqs(content)
def grab_args():
"""Grab and return arguments"""
parser = argparse.ArgumentParser(
description="Check if project requirements have changed"
)
parser.add_argument('--local', action='store_true',
help='check local changes (not yet in git)')
parser.add_argument('branch', nargs='?', default='master',
help='target branch for diffs')
parser.add_argument('--zc', help='what zuul cloner to call')
parser.add_argument('--reqs', help='use a specified requirements tree')
return parser.parse_args()
@contextlib.contextmanager
def tempdir():
try:
reqroot = tempfile.mkdtemp()
yield reqroot
finally:
shutil.rmtree(reqroot)
def install_and_load_requirements(reqroot, reqdir):
sha = run_command("git --git-dir %s/.git rev-parse HEAD" % reqdir)[0]
print "requirements git sha: %s" % sha
req_venv = os.path.join(reqroot, 'venv')
req_pip = os.path.join(req_venv, 'bin/pip')
req_lib = os.path.join(req_venv, 'lib/python2.7/site-packages')
out, err = run_command("virtualenv " + req_venv)
out, err = run_command(req_pip + " install " + reqdir)
sys.path.append(req_lib)
global project
global requirement
from openstack_requirements import project # noqa
from openstack_requirements import requirement # noqa
def _is_requirement_in_global_reqs(req, global_reqs):
# Compare all fields except the extras field as the global
# requirements should not have any lines with the extras syntax
# example: oslo.db[xyz]<1.2.3
for req2 in global_reqs:
if (req.package == req2.package and
req.location == req2.location and
req.specifiers == req2.specifiers and
req.markers == req2.markers and
req.comment == req2.comment):
return True
return False
def main():
args = grab_args()
branch = args.branch
failed = False
# build a list of requirements from the global list in the
# openstack/requirements project so we can match them to the changes
with tempdir() as reqroot:
# Only clone requirements repo if no local repo is specified
# on the command line.
if args.reqs is None:
reqdir = os.path.join(reqroot, "openstack/requirements")
if args.zc is not None:
zc = args.zc
else:
zc = '/usr/zuul-env/bin/zuul-cloner'
out, err = run_command("%(zc)s "
"--cache-dir /opt/git "
"--workspace %(root)s "
"git://git.openstack.org "
"openstack/requirements"
% dict(zc=zc, root=reqroot))
print out
print err
else:
reqdir = args.reqs
install_and_load_requirements(reqroot, reqdir)
global_reqs = requirement.parse(
open(reqdir + '/global-requirements.txt', 'rt').read())
for k, entries in global_reqs.items():
# Discard the lines: we don't need them.
global_reqs[k] = set(r for (r, line) in entries)
cwd = os.getcwd()
# build a list of requirements in the proposed change,
# and check them for style violations while doing so
head = run_command("git rev-parse HEAD")[0]
head_proj = project.read(cwd)
head_reqs = RequirementsList('HEAD', head_proj)
# Don't apply strict parsing rules to stable branches.
# Reasoning is:
# - devstack etc protect us from functional issues
# - we're backporting to stable, so guarding against
# aesthetics and DRY concerns is not our business anymore
# - if in future we have other not-functional linty style
# things to add, we don't want them to affect stable
# either.
head_strict = not branch.startswith('stable/')
head_reqs.process(strict=head_strict)
if not args.local:
# build a list of requirements already in the target branch,
# so that we can create a diff and identify what's being changed
run_command("git remote update")
run_command("git checkout remotes/origin/%s" % branch)
branch_proj = project.read(cwd)
# switch back to the proposed change now
run_command("git checkout %s" % head)
else:
branch_proj = {'root': cwd}
branch_reqs = RequirementsList(branch, branch_proj)
# Don't error on the target branch being broken.
branch_reqs.process(strict=False)
# iterate through the changing entries and see if they match the global
# equivalents we want enforced
for fname, freqs in head_reqs.reqs_by_file.items():
print("Validating %(fname)s" % {'fname': fname})
for name, reqs in freqs.items():
counts = {}
if (name in branch_reqs.reqs and
reqs == branch_reqs.reqs[name]):
# Unchanged [or a change that preserves a current value]
continue
if name not in global_reqs:
failed = True
print("Requirement %s not in openstack/requirements" %
str(reqs))
continue
if reqs == global_reqs[name]:
continue
for req in reqs:
if req.extras:
for extra in req.extras:
counts[extra] = counts.get(extra, 0) + 1
else:
counts[''] = counts.get('', 0) + 1
if not _is_requirement_in_global_reqs(
req, global_reqs[name]):
failed = True
print("Requirement for package %s : %s does "
"not match openstack/requirements value : %s" % (
name, str(req), str(global_reqs[name])))
for extra, count in counts.items():
|
# report the results
if failed or head_reqs.failed or branch_reqs.failed:
sys.exit(1)
print("Updated requirements match openstack/requirements.")
if __name__ == '__main__':
main()
| if count != len(global_reqs[name]):
failed = True
print("Package %s%s requirement does not match "
"number of lines (%d) in "
"openstack/requirements" % (
name,
('[%s]' % extra) if extra else '',
len(global_reqs[name]))) | conditional_block |
project-requirements-change.py | #! /usr/bin/env python
# Copyright (C) 2011 OpenStack, LLC.
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import argparse
import collections
import contextlib
import os
import shlex
import shutil
import subprocess
import sys
import tempfile
requirement = None
project = None
def run_command(cmd):
print(cmd)
cmd_list = shlex.split(str(cmd))
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
if p.returncode != 0:
raise SystemError(err)
return (out.strip(), err.strip())
class RequirementsList(object):
|
def grab_args():
"""Grab and return arguments"""
parser = argparse.ArgumentParser(
description="Check if project requirements have changed"
)
parser.add_argument('--local', action='store_true',
help='check local changes (not yet in git)')
parser.add_argument('branch', nargs='?', default='master',
help='target branch for diffs')
parser.add_argument('--zc', help='what zuul cloner to call')
parser.add_argument('--reqs', help='use a specified requirements tree')
return parser.parse_args()
@contextlib.contextmanager
def tempdir():
try:
reqroot = tempfile.mkdtemp()
yield reqroot
finally:
shutil.rmtree(reqroot)
def install_and_load_requirements(reqroot, reqdir):
sha = run_command("git --git-dir %s/.git rev-parse HEAD" % reqdir)[0]
print "requirements git sha: %s" % sha
req_venv = os.path.join(reqroot, 'venv')
req_pip = os.path.join(req_venv, 'bin/pip')
req_lib = os.path.join(req_venv, 'lib/python2.7/site-packages')
out, err = run_command("virtualenv " + req_venv)
out, err = run_command(req_pip + " install " + reqdir)
sys.path.append(req_lib)
global project
global requirement
from openstack_requirements import project # noqa
from openstack_requirements import requirement # noqa
def _is_requirement_in_global_reqs(req, global_reqs):
# Compare all fields except the extras field as the global
# requirements should not have any lines with the extras syntax
# example: oslo.db[xyz]<1.2.3
for req2 in global_reqs:
if (req.package == req2.package and
req.location == req2.location and
req.specifiers == req2.specifiers and
req.markers == req2.markers and
req.comment == req2.comment):
return True
return False
def main():
args = grab_args()
branch = args.branch
failed = False
# build a list of requirements from the global list in the
# openstack/requirements project so we can match them to the changes
with tempdir() as reqroot:
# Only clone requirements repo if no local repo is specified
# on the command line.
if args.reqs is None:
reqdir = os.path.join(reqroot, "openstack/requirements")
if args.zc is not None:
zc = args.zc
else:
zc = '/usr/zuul-env/bin/zuul-cloner'
out, err = run_command("%(zc)s "
"--cache-dir /opt/git "
"--workspace %(root)s "
"git://git.openstack.org "
"openstack/requirements"
% dict(zc=zc, root=reqroot))
print out
print err
else:
reqdir = args.reqs
install_and_load_requirements(reqroot, reqdir)
global_reqs = requirement.parse(
open(reqdir + '/global-requirements.txt', 'rt').read())
for k, entries in global_reqs.items():
# Discard the lines: we don't need them.
global_reqs[k] = set(r for (r, line) in entries)
cwd = os.getcwd()
# build a list of requirements in the proposed change,
# and check them for style violations while doing so
head = run_command("git rev-parse HEAD")[0]
head_proj = project.read(cwd)
head_reqs = RequirementsList('HEAD', head_proj)
# Don't apply strict parsing rules to stable branches.
# Reasoning is:
# - devstack etc protect us from functional issues
# - we're backporting to stable, so guarding against
# aesthetics and DRY concerns is not our business anymore
# - if in future we have other not-functional linty style
# things to add, we don't want them to affect stable
# either.
head_strict = not branch.startswith('stable/')
head_reqs.process(strict=head_strict)
if not args.local:
# build a list of requirements already in the target branch,
# so that we can create a diff and identify what's being changed
run_command("git remote update")
run_command("git checkout remotes/origin/%s" % branch)
branch_proj = project.read(cwd)
# switch back to the proposed change now
run_command("git checkout %s" % head)
else:
branch_proj = {'root': cwd}
branch_reqs = RequirementsList(branch, branch_proj)
# Don't error on the target branch being broken.
branch_reqs.process(strict=False)
# iterate through the changing entries and see if they match the global
# equivalents we want enforced
for fname, freqs in head_reqs.reqs_by_file.items():
print("Validating %(fname)s" % {'fname': fname})
for name, reqs in freqs.items():
counts = {}
if (name in branch_reqs.reqs and
reqs == branch_reqs.reqs[name]):
# Unchanged [or a change that preserves a current value]
continue
if name not in global_reqs:
failed = True
print("Requirement %s not in openstack/requirements" %
str(reqs))
continue
if reqs == global_reqs[name]:
continue
for req in reqs:
if req.extras:
for extra in req.extras:
counts[extra] = counts.get(extra, 0) + 1
else:
counts[''] = counts.get('', 0) + 1
if not _is_requirement_in_global_reqs(
req, global_reqs[name]):
failed = True
print("Requirement for package %s : %s does "
"not match openstack/requirements value : %s" % (
name, str(req), str(global_reqs[name])))
for extra, count in counts.items():
if count != len(global_reqs[name]):
failed = True
print("Package %s%s requirement does not match "
"number of lines (%d) in "
"openstack/requirements" % (
name,
('[%s]' % extra) if extra else '',
len(global_reqs[name])))
# report the results
if failed or head_reqs.failed or branch_reqs.failed:
sys.exit(1)
print("Updated requirements match openstack/requirements.")
if __name__ == '__main__':
main()
| def __init__(self, name, project):
self.name = name
self.reqs_by_file = {}
self.project = project
self.failed = False
@property
def reqs(self):
return {k: v for d in self.reqs_by_file.values()
for k, v in d.items()}
def extract_reqs(self, content):
reqs = collections.defaultdict(set)
parsed = requirement.parse(content)
for name, entries in parsed.items():
if not name:
# Comments and other unprocessed lines
continue
list_reqs = [r for (r, line) in entries]
# Strip the comments out before checking if there are duplicates
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
if len(list_reqs_stripped) != len(set(list_reqs_stripped)):
print("Requirements file has duplicate entries "
"for package %s : %r." % (name, list_reqs))
self.failed = True
reqs[name].update(list_reqs)
return reqs
def process(self, strict=True):
"""Convert the project into ready to use data.
- an iterable of requirement sets to check
- each set has the following rules:
- each has a list of Requirements objects
- duplicates are not permitted within that list
"""
print("Checking %(name)s" % {'name': self.name})
# First, parse.
for fname, content in self.project.get('requirements', {}).items():
print("Processing %(fname)s" % {'fname': fname})
if strict and not content.endswith('\n'):
print("Requirements file %s does not "
"end with a newline." % fname)
self.reqs_by_file[fname] = self.extract_reqs(content)
for name, content in project.extras(self.project).items():
print("Processing .[%(extra)s]" % {'extra': name})
self.reqs_by_file[name] = self.extract_reqs(content) | identifier_body |
query-parameters-v2.spec.ts | import { asUrlQueryString } from './query-parameters-v2';
describe('asUrlQueryString', () => {
it('should create a empty query', () => {
expect(asUrlQueryString({})).toBe('');
}); | it('should create a query string with one argument', () => {
expect(asUrlQueryString({ foo: 'bar' })).toBe('?foo=bar');
});
it('should create a query string with multiple argument', () => {
expect(asUrlQueryString({ foo1: 'bar1', foo2: 'bar2' })).toBe('?foo1=bar1&foo2=bar2');
});
it('should expand any array argument', () => {
expect(asUrlQueryString({ foo: ['bar1', 'bar2'] })).toBe('?foo=bar1&foo=bar2');
});
it('should skip undefined values', () => {
expect(asUrlQueryString({ foo: 'bar', foo1: undefined })).toBe('?foo=bar');
});
it('should skip undefined values in array', () => {
expect(asUrlQueryString({ foo: ['bar1', undefined, 'bar2'] })).toBe('?foo=bar1&foo=bar2');
});
}); | random_line_split | |
sigint.py | #
# Allows GTK 3 python applications to exit when CTRL-C is raised
# From https://bugzilla.gnome.org/show_bug.cgi?id=622084
#
# Author: Simon Feltman
# License: Presume same as pygobject
#
import sys
import signal
from typing import ClassVar, List
from gi.repository import GLib
class InterruptibleLoopContext:
"""
Context Manager for GLib/Gtk based loops.
Usage of this context manager will install a single GLib unix signal handler
and allow for multiple context managers to be nested using this single handler.
"""
#: Global stack context loops. This is added to per InterruptibleLoopContext
#: instance and allows for context nesting using the same GLib signal handler.
_loop_contexts: ClassVar[List['InterruptibleLoopContext']] = []
#: Single source id for the unix signal handler.
_signal_source_id = None
@classmethod
def _glib_sigint_handler(cls, user_data):
context = cls._loop_contexts[-1]
context._quit_by_sigint = True
context._loop_exit_func()
# keep the handler around until we explicitly remove it
return True
def __init__(self, loop_exit_func):
self._loop_exit_func = loop_exit_func
self._quit_by_sigint = False
def __enter__(self):
# Only use unix_signal_add if this is not win32 and there has
# not already been one.
if sys.platform != 'win32' and not InterruptibleLoopContext._loop_contexts:
# Add a glib signal handler
source_id = GLib.unix_signal_add(
GLib.PRIORITY_DEFAULT, signal.SIGINT, self._glib_sigint_handler, None
)
InterruptibleLoopContext._signal_source_id = source_id
InterruptibleLoopContext._loop_contexts.append(self)
def __exit__(self, exc_type, exc_value, traceback):
context = InterruptibleLoopContext._loop_contexts.pop()
assert self == context
# if the context stack is empty and we have a GLib signal source,
# remove the source from GLib and clear out the variable.
if (
not InterruptibleLoopContext._loop_contexts
and InterruptibleLoopContext._signal_source_id is not None
):
GLib.source_remove(InterruptibleLoopContext._signal_source_id)
InterruptibleLoopContext._signal_source_id = None
if self._quit_by_sigint:
# caught by _glib_sigint_handler()
| raise KeyboardInterrupt | conditional_block | |
sigint.py | #
# Allows GTK 3 python applications to exit when CTRL-C is raised
# From https://bugzilla.gnome.org/show_bug.cgi?id=622084
#
# Author: Simon Feltman
# License: Presume same as pygobject
#
import sys
import signal
from typing import ClassVar, List
from gi.repository import GLib
class InterruptibleLoopContext:
"""
Context Manager for GLib/Gtk based loops.
Usage of this context manager will install a single GLib unix signal handler
and allow for multiple context managers to be nested using this single handler.
"""
#: Global stack context loops. This is added to per InterruptibleLoopContext
#: instance and allows for context nesting using the same GLib signal handler.
_loop_contexts: ClassVar[List['InterruptibleLoopContext']] = []
#: Single source id for the unix signal handler.
_signal_source_id = None
@classmethod
def | (cls, user_data):
context = cls._loop_contexts[-1]
context._quit_by_sigint = True
context._loop_exit_func()
# keep the handler around until we explicitly remove it
return True
def __init__(self, loop_exit_func):
self._loop_exit_func = loop_exit_func
self._quit_by_sigint = False
def __enter__(self):
# Only use unix_signal_add if this is not win32 and there has
# not already been one.
if sys.platform != 'win32' and not InterruptibleLoopContext._loop_contexts:
# Add a glib signal handler
source_id = GLib.unix_signal_add(
GLib.PRIORITY_DEFAULT, signal.SIGINT, self._glib_sigint_handler, None
)
InterruptibleLoopContext._signal_source_id = source_id
InterruptibleLoopContext._loop_contexts.append(self)
def __exit__(self, exc_type, exc_value, traceback):
context = InterruptibleLoopContext._loop_contexts.pop()
assert self == context
# if the context stack is empty and we have a GLib signal source,
# remove the source from GLib and clear out the variable.
if (
not InterruptibleLoopContext._loop_contexts
and InterruptibleLoopContext._signal_source_id is not None
):
GLib.source_remove(InterruptibleLoopContext._signal_source_id)
InterruptibleLoopContext._signal_source_id = None
if self._quit_by_sigint:
# caught by _glib_sigint_handler()
raise KeyboardInterrupt
| _glib_sigint_handler | identifier_name |
sigint.py | #
# Allows GTK 3 python applications to exit when CTRL-C is raised
# From https://bugzilla.gnome.org/show_bug.cgi?id=622084
#
# Author: Simon Feltman
# License: Presume same as pygobject
#
import sys
import signal
from typing import ClassVar, List
from gi.repository import GLib
class InterruptibleLoopContext: | """
Context Manager for GLib/Gtk based loops.
Usage of this context manager will install a single GLib unix signal handler
and allow for multiple context managers to be nested using this single handler.
"""
#: Global stack context loops. This is added to per InterruptibleLoopContext
#: instance and allows for context nesting using the same GLib signal handler.
_loop_contexts: ClassVar[List['InterruptibleLoopContext']] = []
#: Single source id for the unix signal handler.
_signal_source_id = None
@classmethod
def _glib_sigint_handler(cls, user_data):
context = cls._loop_contexts[-1]
context._quit_by_sigint = True
context._loop_exit_func()
# keep the handler around until we explicitly remove it
return True
def __init__(self, loop_exit_func):
self._loop_exit_func = loop_exit_func
self._quit_by_sigint = False
def __enter__(self):
# Only use unix_signal_add if this is not win32 and there has
# not already been one.
if sys.platform != 'win32' and not InterruptibleLoopContext._loop_contexts:
# Add a glib signal handler
source_id = GLib.unix_signal_add(
GLib.PRIORITY_DEFAULT, signal.SIGINT, self._glib_sigint_handler, None
)
InterruptibleLoopContext._signal_source_id = source_id
InterruptibleLoopContext._loop_contexts.append(self)
def __exit__(self, exc_type, exc_value, traceback):
context = InterruptibleLoopContext._loop_contexts.pop()
assert self == context
# if the context stack is empty and we have a GLib signal source,
# remove the source from GLib and clear out the variable.
if (
not InterruptibleLoopContext._loop_contexts
and InterruptibleLoopContext._signal_source_id is not None
):
GLib.source_remove(InterruptibleLoopContext._signal_source_id)
InterruptibleLoopContext._signal_source_id = None
if self._quit_by_sigint:
# caught by _glib_sigint_handler()
raise KeyboardInterrupt | random_line_split | |
sigint.py | #
# Allows GTK 3 python applications to exit when CTRL-C is raised
# From https://bugzilla.gnome.org/show_bug.cgi?id=622084
#
# Author: Simon Feltman
# License: Presume same as pygobject
#
import sys
import signal
from typing import ClassVar, List
from gi.repository import GLib
class InterruptibleLoopContext:
"""
Context Manager for GLib/Gtk based loops.
Usage of this context manager will install a single GLib unix signal handler
and allow for multiple context managers to be nested using this single handler.
"""
#: Global stack context loops. This is added to per InterruptibleLoopContext
#: instance and allows for context nesting using the same GLib signal handler.
_loop_contexts: ClassVar[List['InterruptibleLoopContext']] = []
#: Single source id for the unix signal handler.
_signal_source_id = None
@classmethod
def _glib_sigint_handler(cls, user_data):
context = cls._loop_contexts[-1]
context._quit_by_sigint = True
context._loop_exit_func()
# keep the handler around until we explicitly remove it
return True
def __init__(self, loop_exit_func):
self._loop_exit_func = loop_exit_func
self._quit_by_sigint = False
def __enter__(self):
# Only use unix_signal_add if this is not win32 and there has
# not already been one.
|
def __exit__(self, exc_type, exc_value, traceback):
context = InterruptibleLoopContext._loop_contexts.pop()
assert self == context
# if the context stack is empty and we have a GLib signal source,
# remove the source from GLib and clear out the variable.
if (
not InterruptibleLoopContext._loop_contexts
and InterruptibleLoopContext._signal_source_id is not None
):
GLib.source_remove(InterruptibleLoopContext._signal_source_id)
InterruptibleLoopContext._signal_source_id = None
if self._quit_by_sigint:
# caught by _glib_sigint_handler()
raise KeyboardInterrupt
| if sys.platform != 'win32' and not InterruptibleLoopContext._loop_contexts:
# Add a glib signal handler
source_id = GLib.unix_signal_add(
GLib.PRIORITY_DEFAULT, signal.SIGINT, self._glib_sigint_handler, None
)
InterruptibleLoopContext._signal_source_id = source_id
InterruptibleLoopContext._loop_contexts.append(self) | identifier_body |
get_exact_record.rs |
use uuid::Uuid;
use rustorm::dao::{Dao, IsDao};
use rustorm::pool::ManagedPool;
use rustorm::em::EntityManager;
use rustorm::table::{Table, Column};
use rustorm::table::IsTable;
#[derive(Debug, Clone)]
pub struct Product {
pub product_id: Uuid,
pub name: String,
pub description: Option<String>,
}
impl IsDao for Product{
fn from_dao(dao: &Dao) -> Self {
Product {
product_id: dao.get("product_id"),
name: dao.get("name"),
description: dao.get_opt("description"),
}
}
fn to_dao(&self) -> Dao {
let mut dao = Dao::new();
dao.set("product_id", &self.product_id);
dao.set("name", &self.name);
match self.description {
Some(ref _value) => dao.set("description", _value),
None => dao.set_null("description"),
}
dao
}
}
impl IsTable for Product{
fn table() -> Table {
Table {
schema: "bazaar".to_string(),
name: "product".to_string(),
parent_table: None,
sub_table: vec![],
comment: None,
columns: vec![
Column{
name:"product_id".to_string(),
data_type:"Uuid".to_string(),
db_data_type:"uuid".to_string(),
is_primary:true, is_unique:false, not_null:true, is_inherited:false,
default:Some("uuid_generate_v4()".to_string()),
comment:None,
foreign:None,
},
Column{
name:"name".to_string(),
data_type:"String".to_string(),
db_data_type:"character varying".to_string(),
is_primary:false, is_unique:false, not_null:true, is_inherited:false,
default:None,
comment:None,
foreign:None,
},
Column{
name:"description".to_string(),
data_type:"String".to_string(),
db_data_type:"character varying".to_string(),
is_primary:false, is_unique:false, not_null:false, is_inherited:true,
default:None,
comment:None,
foreign:None,
},
],
is_view: false,
}
}
}
fn main() {
let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6";
let pool = ManagedPool::init(&url, 1).unwrap();
let db = pool.connect().unwrap();
let em = EntityManager::new(db.as_ref());
let pid = Uuid::parse_str("6db712e6-cc50-4c3a-8269-451c98ace5ad").unwrap();
let prod: Product = em.get_exact(&pid).unwrap();
println!("{} {} {:?}", prod.product_id, prod.name, prod.description);
//pool.release(db);
} | extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize; | random_line_split | |
get_exact_record.rs | extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use uuid::Uuid;
use rustorm::dao::{Dao, IsDao};
use rustorm::pool::ManagedPool;
use rustorm::em::EntityManager;
use rustorm::table::{Table, Column};
use rustorm::table::IsTable;
#[derive(Debug, Clone)]
pub struct Product {
pub product_id: Uuid,
pub name: String,
pub description: Option<String>,
}
impl IsDao for Product{
fn from_dao(dao: &Dao) -> Self {
Product {
product_id: dao.get("product_id"),
name: dao.get("name"),
description: dao.get_opt("description"),
}
}
fn to_dao(&self) -> Dao {
let mut dao = Dao::new();
dao.set("product_id", &self.product_id);
dao.set("name", &self.name);
match self.description {
Some(ref _value) => dao.set("description", _value),
None => dao.set_null("description"),
}
dao
}
}
impl IsTable for Product{
fn table() -> Table |
}
fn main() {
let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6";
let pool = ManagedPool::init(&url, 1).unwrap();
let db = pool.connect().unwrap();
let em = EntityManager::new(db.as_ref());
let pid = Uuid::parse_str("6db712e6-cc50-4c3a-8269-451c98ace5ad").unwrap();
let prod: Product = em.get_exact(&pid).unwrap();
println!("{} {} {:?}", prod.product_id, prod.name, prod.description);
//pool.release(db);
}
| {
Table {
schema: "bazaar".to_string(),
name: "product".to_string(),
parent_table: None,
sub_table: vec![],
comment: None,
columns: vec![
Column{
name:"product_id".to_string(),
data_type:"Uuid".to_string(),
db_data_type:"uuid".to_string(),
is_primary:true, is_unique:false, not_null:true, is_inherited:false,
default:Some("uuid_generate_v4()".to_string()),
comment:None,
foreign:None,
},
Column{
name:"name".to_string(),
data_type:"String".to_string(),
db_data_type:"character varying".to_string(),
is_primary:false, is_unique:false, not_null:true, is_inherited:false,
default:None,
comment:None,
foreign:None,
},
Column{
name:"description".to_string(),
data_type:"String".to_string(),
db_data_type:"character varying".to_string(),
is_primary:false, is_unique:false, not_null:false, is_inherited:true,
default:None,
comment:None,
foreign:None,
},
],
is_view: false,
}
} | identifier_body |
get_exact_record.rs | extern crate rustorm;
extern crate uuid;
extern crate chrono;
extern crate rustc_serialize;
use uuid::Uuid;
use rustorm::dao::{Dao, IsDao};
use rustorm::pool::ManagedPool;
use rustorm::em::EntityManager;
use rustorm::table::{Table, Column};
use rustorm::table::IsTable;
#[derive(Debug, Clone)]
pub struct Product {
pub product_id: Uuid,
pub name: String,
pub description: Option<String>,
}
impl IsDao for Product{
fn from_dao(dao: &Dao) -> Self {
Product {
product_id: dao.get("product_id"),
name: dao.get("name"),
description: dao.get_opt("description"),
}
}
fn to_dao(&self) -> Dao {
let mut dao = Dao::new();
dao.set("product_id", &self.product_id);
dao.set("name", &self.name);
match self.description {
Some(ref _value) => dao.set("description", _value),
None => dao.set_null("description"),
}
dao
}
}
impl IsTable for Product{
fn table() -> Table {
Table {
schema: "bazaar".to_string(),
name: "product".to_string(),
parent_table: None,
sub_table: vec![],
comment: None,
columns: vec![
Column{
name:"product_id".to_string(),
data_type:"Uuid".to_string(),
db_data_type:"uuid".to_string(),
is_primary:true, is_unique:false, not_null:true, is_inherited:false,
default:Some("uuid_generate_v4()".to_string()),
comment:None,
foreign:None,
},
Column{
name:"name".to_string(),
data_type:"String".to_string(),
db_data_type:"character varying".to_string(),
is_primary:false, is_unique:false, not_null:true, is_inherited:false,
default:None,
comment:None,
foreign:None,
},
Column{
name:"description".to_string(),
data_type:"String".to_string(),
db_data_type:"character varying".to_string(),
is_primary:false, is_unique:false, not_null:false, is_inherited:true,
default:None,
comment:None,
foreign:None,
},
],
is_view: false,
}
}
}
fn | () {
let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6";
let pool = ManagedPool::init(&url, 1).unwrap();
let db = pool.connect().unwrap();
let em = EntityManager::new(db.as_ref());
let pid = Uuid::parse_str("6db712e6-cc50-4c3a-8269-451c98ace5ad").unwrap();
let prod: Product = em.get_exact(&pid).unwrap();
println!("{} {} {:?}", prod.product_id, prod.name, prod.description);
//pool.release(db);
}
| main | identifier_name |
Gauge.tsx | // Libraries
import React, {Component} from 'react'
import _ from 'lodash'
// Components
import {ErrorHandling} from 'src/shared/decorators/errors'
// Utils
import {formatStatValue} from 'src/shared/utils/formatStatValue'
// Constants
import {GAUGE_THEME_DARK, GaugeTheme} from 'src/shared/constants/gaugeSpecs'
import {
COLOR_TYPE_MIN,
COLOR_TYPE_MAX,
DEFAULT_VALUE_MAX,
DEFAULT_VALUE_MIN,
MIN_THRESHOLDS,
} from 'src/shared/constants/thresholds'
// Types
import {Color} from 'src/types/colors'
import {DecimalPlaces} from 'src/types/dashboards'
interface Props {
width: number
height: number
gaugePosition: number
colors?: Color[]
prefix: string
tickPrefix: string
suffix: string
tickSuffix: string
decimalPlaces: DecimalPlaces
theme?: GaugeTheme
}
@ErrorHandling
class Gauge extends Component<Props> {
private canvasRef: React.RefObject<HTMLCanvasElement>
public static defaultProps = {
theme: GAUGE_THEME_DARK,
}
constructor(props: Props) {
super(props)
this.canvasRef = React.createRef()
}
public componentDidMount() {
this.updateCanvas()
}
public componentDidUpdate() {
this.updateCanvas()
}
public render() {
const {width, height} = this.props
return (
<canvas
className="gauge"
width={width}
height={height}
ref={this.canvasRef}
/>
)
}
private updateCanvas = () => {
this.resetCanvas()
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const centerX = width / 2
const centerY = (height / 2) * 1.13
const radius = (Math.min(width, height) / 2) * 0.5
const {minLineWidth, minFontSize} = this.props.theme
const gradientThickness = Math.max(minLineWidth, radius / 4)
const labelValueFontSize = Math.max(minFontSize, radius / 4)
const {colors} = this.props
if (!colors || colors.length === 0) {
return
}
// Distill out max and min values
const minValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MIN),
'value',
DEFAULT_VALUE_MIN
)
)
const maxValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MAX),
'value',
DEFAULT_VALUE_MAX
)
)
// The following functions must be called in the specified order
if (colors.length === MIN_THRESHOLDS) {
this.drawGradientGauge(ctx, centerX, centerY, radius, gradientThickness)
} else {
this.drawSegmentedGauge(
ctx,
centerX,
centerY,
radius,
minValue,
maxValue,
gradientThickness
)
}
this.drawGaugeLines(ctx, centerX, centerY, radius, gradientThickness)
this.drawGaugeLabels(ctx, radius, gradientThickness, minValue, maxValue)
this.drawGaugeValue(ctx, radius, labelValueFontSize)
this.drawNeedle(ctx, radius, minValue, maxValue)
}
private resetCanvas = () => {
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const dpRatio = window.devicePixelRatio || 1
// Set up canvas to draw on HiDPI / Retina screens correctly
canvas.width = width * dpRatio
canvas.height = height * dpRatio
canvas.style.width = `${width}px`
canvas.style.height = `${height}px`
ctx.scale(dpRatio, dpRatio)
// Clear the canvas
ctx.clearRect(0, 0, width, height)
}
private drawGradientGauge = (ctx, xc, yc, r, gradientThickness) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const arcStart = Math.PI * 0.75
const arcEnd = arcStart + Math.PI * 1.5
// Determine coordinates for gradient
const xStart = xc + Math.cos(arcStart) * r
const yStart = yc + Math.sin(arcStart) * r
const xEnd = xc + Math.cos(arcEnd) * r
const yEnd = yc + Math.sin(arcEnd) * r
const gradient = ctx.createLinearGradient(xStart, yStart, xEnd, yEnd)
gradient.addColorStop(0, sortedColors[0].hex)
gradient.addColorStop(1.0, sortedColors[1].hex)
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = gradient
ctx.arc(xc, yc, r, arcStart, arcEnd)
ctx.stroke()
}
private drawSegmentedGauge = (
ctx,
xc,
yc,
r,
minValue,
maxValue,
gradientThickness
) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const trueValueRange = Math.abs(maxValue - minValue)
const totalArcLength = Math.PI * 1.5
let startingPoint = Math.PI * 0.75
// Iterate through colors, draw arc for each
for (let c = 0; c < sortedColors.length - 1; c++) {
// Use this color and the next to determine arc length
const color = sortedColors[c]
const nextColor = sortedColors[c + 1]
// adjust values by subtracting minValue from them
const adjustedValue = Number(color.value) - minValue
const adjustedNextValue = Number(nextColor.value) - minValue
const thisArc = Math.abs(adjustedValue - adjustedNextValue)
// Multiply by arcLength to determine this arc's length
const arcLength = totalArcLength * (thisArc / trueValueRange)
// Draw arc
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = color.hex
ctx.arc(xc, yc, r, startingPoint, startingPoint + arcLength)
ctx.stroke()
// Add this arc's length to starting point
startingPoint += arcLength
}
}
private drawGaugeLines = (ctx, xc, yc, radius, gradientThickness) => {
const {
degree,
lineCount,
lineColor,
lineStrokeSmall,
lineStrokeLarge,
tickSizeSmall,
tickSizeLarge,
smallLineCount,
} = this.props.theme
const arcStart = Math.PI * 0.75
const arcLength = Math.PI * 1.5
const arcStop = arcStart + arcLength
const totalSmallLineCount = lineCount * smallLineCount
const startDegree = degree * 135
const arcLargeIncrement = arcLength / lineCount
const arcSmallIncrement = arcLength / totalSmallLineCount
// Semi-circle
const arcRadius = radius + gradientThickness * 0.8
ctx.beginPath()
ctx.arc(xc, yc, arcRadius, arcStart, arcStop)
ctx.lineWidth = 3
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.stroke()
ctx.closePath()
// Match center of canvas to center of gauge
ctx.translate(xc, yc)
// Draw Large ticks
for (let lt = 0; lt <= lineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcLargeIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeLarge
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeLarge, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcLargeIncrement)
ctx.rotate(-startDegree)
}
// Draw Small ticks
for (let lt = 0; lt <= totalSmallLineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcSmallIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeSmall
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeSmall, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcSmallIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeLabels = (
ctx,
radius,
gradientThickness,
minValue,
maxValue
) => {
const {tickPrefix, tickSuffix, decimalPlaces} = this.props
let {prefix, suffix} = this.props
const {degree, lineCount, labelColor, labelFontSize} = this.props.theme
const tickValues = [
..._.range(minValue, maxValue, Math.abs(maxValue - minValue) / lineCount),
maxValue,
]
if (tickPrefix === 'true') {
prefix = ''
}
if (tickSuffix === 'true') {
suffix = ''
}
const labels = tickValues.map(tick =>
formatStatValue(tick, {decimalPlaces, prefix, suffix})
)
const startDegree = degree * 135
const arcLength = Math.PI * 1.5
const arcIncrement = arcLength / lineCount
// Format labels text
ctx.font = `bold ${labelFontSize}px Rubik`
ctx.fillStyle = labelColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'right'
let labelRadius
for (let i = 0; i <= lineCount; i++) {
if (i === 3) {
ctx.textAlign = 'center'
labelRadius = radius + gradientThickness + 30
} else {
labelRadius = radius + gradientThickness + 23
}
if (i > 3) {
ctx.textAlign = 'left'
}
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
ctx.fillText(labels[i], 0, 0)
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(-labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeValue = (ctx, radius, labelValueFontSize) => {
const {gaugePosition, prefix, suffix, decimalPlaces} = this.props
const {valueColor} = this.props.theme
ctx.font = `${labelValueFontSize}px Rubik` | ctx.textAlign = 'center'
const textY = radius
const textContent = formatStatValue(gaugePosition, {
decimalPlaces,
prefix,
suffix,
})
ctx.fillText(textContent, 0, textY)
}
private drawNeedle = (ctx, radius, minValue, maxValue) => {
const {gaugePosition} = this.props
const {degree, needleColor0, needleColor1, overflowDelta} = this.props.theme
const arcDistance = Math.PI * 1.5
let needleRotation: number
if (gaugePosition <= minValue) {
needleRotation = 0 - overflowDelta
} else if (gaugePosition >= maxValue) {
needleRotation = 1 + overflowDelta
} else {
needleRotation = (gaugePosition - minValue) / (maxValue - minValue)
}
const needleGradient = ctx.createLinearGradient(0, -10, 0, radius)
needleGradient.addColorStop(0, needleColor0)
needleGradient.addColorStop(1, needleColor1)
// Starting position of needle is at minimum
ctx.rotate(degree * 45)
ctx.rotate(arcDistance * needleRotation)
ctx.beginPath()
ctx.fillStyle = needleGradient
ctx.arc(0, 0, 10, 0, Math.PI, true)
ctx.lineTo(0, radius)
ctx.lineTo(10, 0)
ctx.fill()
}
}
export default Gauge | ctx.fillStyle = valueColor
ctx.textBaseline = 'middle' | random_line_split |
Gauge.tsx | // Libraries
import React, {Component} from 'react'
import _ from 'lodash'
// Components
import {ErrorHandling} from 'src/shared/decorators/errors'
// Utils
import {formatStatValue} from 'src/shared/utils/formatStatValue'
// Constants
import {GAUGE_THEME_DARK, GaugeTheme} from 'src/shared/constants/gaugeSpecs'
import {
COLOR_TYPE_MIN,
COLOR_TYPE_MAX,
DEFAULT_VALUE_MAX,
DEFAULT_VALUE_MIN,
MIN_THRESHOLDS,
} from 'src/shared/constants/thresholds'
// Types
import {Color} from 'src/types/colors'
import {DecimalPlaces} from 'src/types/dashboards'
interface Props {
width: number
height: number
gaugePosition: number
colors?: Color[]
prefix: string
tickPrefix: string
suffix: string
tickSuffix: string
decimalPlaces: DecimalPlaces
theme?: GaugeTheme
}
@ErrorHandling
class Gauge extends Component<Props> {
private canvasRef: React.RefObject<HTMLCanvasElement>
public static defaultProps = {
theme: GAUGE_THEME_DARK,
}
constructor(props: Props) {
super(props)
this.canvasRef = React.createRef()
}
public componentDidMount() {
this.updateCanvas()
}
public | () {
this.updateCanvas()
}
public render() {
const {width, height} = this.props
return (
<canvas
className="gauge"
width={width}
height={height}
ref={this.canvasRef}
/>
)
}
private updateCanvas = () => {
this.resetCanvas()
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const centerX = width / 2
const centerY = (height / 2) * 1.13
const radius = (Math.min(width, height) / 2) * 0.5
const {minLineWidth, minFontSize} = this.props.theme
const gradientThickness = Math.max(minLineWidth, radius / 4)
const labelValueFontSize = Math.max(minFontSize, radius / 4)
const {colors} = this.props
if (!colors || colors.length === 0) {
return
}
// Distill out max and min values
const minValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MIN),
'value',
DEFAULT_VALUE_MIN
)
)
const maxValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MAX),
'value',
DEFAULT_VALUE_MAX
)
)
// The following functions must be called in the specified order
if (colors.length === MIN_THRESHOLDS) {
this.drawGradientGauge(ctx, centerX, centerY, radius, gradientThickness)
} else {
this.drawSegmentedGauge(
ctx,
centerX,
centerY,
radius,
minValue,
maxValue,
gradientThickness
)
}
this.drawGaugeLines(ctx, centerX, centerY, radius, gradientThickness)
this.drawGaugeLabels(ctx, radius, gradientThickness, minValue, maxValue)
this.drawGaugeValue(ctx, radius, labelValueFontSize)
this.drawNeedle(ctx, radius, minValue, maxValue)
}
private resetCanvas = () => {
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const dpRatio = window.devicePixelRatio || 1
// Set up canvas to draw on HiDPI / Retina screens correctly
canvas.width = width * dpRatio
canvas.height = height * dpRatio
canvas.style.width = `${width}px`
canvas.style.height = `${height}px`
ctx.scale(dpRatio, dpRatio)
// Clear the canvas
ctx.clearRect(0, 0, width, height)
}
private drawGradientGauge = (ctx, xc, yc, r, gradientThickness) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const arcStart = Math.PI * 0.75
const arcEnd = arcStart + Math.PI * 1.5
// Determine coordinates for gradient
const xStart = xc + Math.cos(arcStart) * r
const yStart = yc + Math.sin(arcStart) * r
const xEnd = xc + Math.cos(arcEnd) * r
const yEnd = yc + Math.sin(arcEnd) * r
const gradient = ctx.createLinearGradient(xStart, yStart, xEnd, yEnd)
gradient.addColorStop(0, sortedColors[0].hex)
gradient.addColorStop(1.0, sortedColors[1].hex)
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = gradient
ctx.arc(xc, yc, r, arcStart, arcEnd)
ctx.stroke()
}
private drawSegmentedGauge = (
ctx,
xc,
yc,
r,
minValue,
maxValue,
gradientThickness
) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const trueValueRange = Math.abs(maxValue - minValue)
const totalArcLength = Math.PI * 1.5
let startingPoint = Math.PI * 0.75
// Iterate through colors, draw arc for each
for (let c = 0; c < sortedColors.length - 1; c++) {
// Use this color and the next to determine arc length
const color = sortedColors[c]
const nextColor = sortedColors[c + 1]
// adjust values by subtracting minValue from them
const adjustedValue = Number(color.value) - minValue
const adjustedNextValue = Number(nextColor.value) - minValue
const thisArc = Math.abs(adjustedValue - adjustedNextValue)
// Multiply by arcLength to determine this arc's length
const arcLength = totalArcLength * (thisArc / trueValueRange)
// Draw arc
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = color.hex
ctx.arc(xc, yc, r, startingPoint, startingPoint + arcLength)
ctx.stroke()
// Add this arc's length to starting point
startingPoint += arcLength
}
}
private drawGaugeLines = (ctx, xc, yc, radius, gradientThickness) => {
const {
degree,
lineCount,
lineColor,
lineStrokeSmall,
lineStrokeLarge,
tickSizeSmall,
tickSizeLarge,
smallLineCount,
} = this.props.theme
const arcStart = Math.PI * 0.75
const arcLength = Math.PI * 1.5
const arcStop = arcStart + arcLength
const totalSmallLineCount = lineCount * smallLineCount
const startDegree = degree * 135
const arcLargeIncrement = arcLength / lineCount
const arcSmallIncrement = arcLength / totalSmallLineCount
// Semi-circle
const arcRadius = radius + gradientThickness * 0.8
ctx.beginPath()
ctx.arc(xc, yc, arcRadius, arcStart, arcStop)
ctx.lineWidth = 3
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.stroke()
ctx.closePath()
// Match center of canvas to center of gauge
ctx.translate(xc, yc)
// Draw Large ticks
for (let lt = 0; lt <= lineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcLargeIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeLarge
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeLarge, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcLargeIncrement)
ctx.rotate(-startDegree)
}
// Draw Small ticks
for (let lt = 0; lt <= totalSmallLineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcSmallIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeSmall
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeSmall, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcSmallIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeLabels = (
ctx,
radius,
gradientThickness,
minValue,
maxValue
) => {
const {tickPrefix, tickSuffix, decimalPlaces} = this.props
let {prefix, suffix} = this.props
const {degree, lineCount, labelColor, labelFontSize} = this.props.theme
const tickValues = [
..._.range(minValue, maxValue, Math.abs(maxValue - minValue) / lineCount),
maxValue,
]
if (tickPrefix === 'true') {
prefix = ''
}
if (tickSuffix === 'true') {
suffix = ''
}
const labels = tickValues.map(tick =>
formatStatValue(tick, {decimalPlaces, prefix, suffix})
)
const startDegree = degree * 135
const arcLength = Math.PI * 1.5
const arcIncrement = arcLength / lineCount
// Format labels text
ctx.font = `bold ${labelFontSize}px Rubik`
ctx.fillStyle = labelColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'right'
let labelRadius
for (let i = 0; i <= lineCount; i++) {
if (i === 3) {
ctx.textAlign = 'center'
labelRadius = radius + gradientThickness + 30
} else {
labelRadius = radius + gradientThickness + 23
}
if (i > 3) {
ctx.textAlign = 'left'
}
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
ctx.fillText(labels[i], 0, 0)
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(-labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeValue = (ctx, radius, labelValueFontSize) => {
const {gaugePosition, prefix, suffix, decimalPlaces} = this.props
const {valueColor} = this.props.theme
ctx.font = `${labelValueFontSize}px Rubik`
ctx.fillStyle = valueColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'center'
const textY = radius
const textContent = formatStatValue(gaugePosition, {
decimalPlaces,
prefix,
suffix,
})
ctx.fillText(textContent, 0, textY)
}
private drawNeedle = (ctx, radius, minValue, maxValue) => {
const {gaugePosition} = this.props
const {degree, needleColor0, needleColor1, overflowDelta} = this.props.theme
const arcDistance = Math.PI * 1.5
let needleRotation: number
if (gaugePosition <= minValue) {
needleRotation = 0 - overflowDelta
} else if (gaugePosition >= maxValue) {
needleRotation = 1 + overflowDelta
} else {
needleRotation = (gaugePosition - minValue) / (maxValue - minValue)
}
const needleGradient = ctx.createLinearGradient(0, -10, 0, radius)
needleGradient.addColorStop(0, needleColor0)
needleGradient.addColorStop(1, needleColor1)
// Starting position of needle is at minimum
ctx.rotate(degree * 45)
ctx.rotate(arcDistance * needleRotation)
ctx.beginPath()
ctx.fillStyle = needleGradient
ctx.arc(0, 0, 10, 0, Math.PI, true)
ctx.lineTo(0, radius)
ctx.lineTo(10, 0)
ctx.fill()
}
}
export default Gauge
| componentDidUpdate | identifier_name |
Gauge.tsx | // Libraries
import React, {Component} from 'react'
import _ from 'lodash'
// Components
import {ErrorHandling} from 'src/shared/decorators/errors'
// Utils
import {formatStatValue} from 'src/shared/utils/formatStatValue'
// Constants
import {GAUGE_THEME_DARK, GaugeTheme} from 'src/shared/constants/gaugeSpecs'
import {
COLOR_TYPE_MIN,
COLOR_TYPE_MAX,
DEFAULT_VALUE_MAX,
DEFAULT_VALUE_MIN,
MIN_THRESHOLDS,
} from 'src/shared/constants/thresholds'
// Types
import {Color} from 'src/types/colors'
import {DecimalPlaces} from 'src/types/dashboards'
interface Props {
width: number
height: number
gaugePosition: number
colors?: Color[]
prefix: string
tickPrefix: string
suffix: string
tickSuffix: string
decimalPlaces: DecimalPlaces
theme?: GaugeTheme
}
@ErrorHandling
class Gauge extends Component<Props> {
private canvasRef: React.RefObject<HTMLCanvasElement>
public static defaultProps = {
theme: GAUGE_THEME_DARK,
}
constructor(props: Props) |
public componentDidMount() {
this.updateCanvas()
}
public componentDidUpdate() {
this.updateCanvas()
}
public render() {
const {width, height} = this.props
return (
<canvas
className="gauge"
width={width}
height={height}
ref={this.canvasRef}
/>
)
}
private updateCanvas = () => {
this.resetCanvas()
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const centerX = width / 2
const centerY = (height / 2) * 1.13
const radius = (Math.min(width, height) / 2) * 0.5
const {minLineWidth, minFontSize} = this.props.theme
const gradientThickness = Math.max(minLineWidth, radius / 4)
const labelValueFontSize = Math.max(minFontSize, radius / 4)
const {colors} = this.props
if (!colors || colors.length === 0) {
return
}
// Distill out max and min values
const minValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MIN),
'value',
DEFAULT_VALUE_MIN
)
)
const maxValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MAX),
'value',
DEFAULT_VALUE_MAX
)
)
// The following functions must be called in the specified order
if (colors.length === MIN_THRESHOLDS) {
this.drawGradientGauge(ctx, centerX, centerY, radius, gradientThickness)
} else {
this.drawSegmentedGauge(
ctx,
centerX,
centerY,
radius,
minValue,
maxValue,
gradientThickness
)
}
this.drawGaugeLines(ctx, centerX, centerY, radius, gradientThickness)
this.drawGaugeLabels(ctx, radius, gradientThickness, minValue, maxValue)
this.drawGaugeValue(ctx, radius, labelValueFontSize)
this.drawNeedle(ctx, radius, minValue, maxValue)
}
private resetCanvas = () => {
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const dpRatio = window.devicePixelRatio || 1
// Set up canvas to draw on HiDPI / Retina screens correctly
canvas.width = width * dpRatio
canvas.height = height * dpRatio
canvas.style.width = `${width}px`
canvas.style.height = `${height}px`
ctx.scale(dpRatio, dpRatio)
// Clear the canvas
ctx.clearRect(0, 0, width, height)
}
private drawGradientGauge = (ctx, xc, yc, r, gradientThickness) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const arcStart = Math.PI * 0.75
const arcEnd = arcStart + Math.PI * 1.5
// Determine coordinates for gradient
const xStart = xc + Math.cos(arcStart) * r
const yStart = yc + Math.sin(arcStart) * r
const xEnd = xc + Math.cos(arcEnd) * r
const yEnd = yc + Math.sin(arcEnd) * r
const gradient = ctx.createLinearGradient(xStart, yStart, xEnd, yEnd)
gradient.addColorStop(0, sortedColors[0].hex)
gradient.addColorStop(1.0, sortedColors[1].hex)
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = gradient
ctx.arc(xc, yc, r, arcStart, arcEnd)
ctx.stroke()
}
private drawSegmentedGauge = (
ctx,
xc,
yc,
r,
minValue,
maxValue,
gradientThickness
) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const trueValueRange = Math.abs(maxValue - minValue)
const totalArcLength = Math.PI * 1.5
let startingPoint = Math.PI * 0.75
// Iterate through colors, draw arc for each
for (let c = 0; c < sortedColors.length - 1; c++) {
// Use this color and the next to determine arc length
const color = sortedColors[c]
const nextColor = sortedColors[c + 1]
// adjust values by subtracting minValue from them
const adjustedValue = Number(color.value) - minValue
const adjustedNextValue = Number(nextColor.value) - minValue
const thisArc = Math.abs(adjustedValue - adjustedNextValue)
// Multiply by arcLength to determine this arc's length
const arcLength = totalArcLength * (thisArc / trueValueRange)
// Draw arc
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = color.hex
ctx.arc(xc, yc, r, startingPoint, startingPoint + arcLength)
ctx.stroke()
// Add this arc's length to starting point
startingPoint += arcLength
}
}
private drawGaugeLines = (ctx, xc, yc, radius, gradientThickness) => {
const {
degree,
lineCount,
lineColor,
lineStrokeSmall,
lineStrokeLarge,
tickSizeSmall,
tickSizeLarge,
smallLineCount,
} = this.props.theme
const arcStart = Math.PI * 0.75
const arcLength = Math.PI * 1.5
const arcStop = arcStart + arcLength
const totalSmallLineCount = lineCount * smallLineCount
const startDegree = degree * 135
const arcLargeIncrement = arcLength / lineCount
const arcSmallIncrement = arcLength / totalSmallLineCount
// Semi-circle
const arcRadius = radius + gradientThickness * 0.8
ctx.beginPath()
ctx.arc(xc, yc, arcRadius, arcStart, arcStop)
ctx.lineWidth = 3
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.stroke()
ctx.closePath()
// Match center of canvas to center of gauge
ctx.translate(xc, yc)
// Draw Large ticks
for (let lt = 0; lt <= lineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcLargeIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeLarge
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeLarge, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcLargeIncrement)
ctx.rotate(-startDegree)
}
// Draw Small ticks
for (let lt = 0; lt <= totalSmallLineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcSmallIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeSmall
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeSmall, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcSmallIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeLabels = (
ctx,
radius,
gradientThickness,
minValue,
maxValue
) => {
const {tickPrefix, tickSuffix, decimalPlaces} = this.props
let {prefix, suffix} = this.props
const {degree, lineCount, labelColor, labelFontSize} = this.props.theme
const tickValues = [
..._.range(minValue, maxValue, Math.abs(maxValue - minValue) / lineCount),
maxValue,
]
if (tickPrefix === 'true') {
prefix = ''
}
if (tickSuffix === 'true') {
suffix = ''
}
const labels = tickValues.map(tick =>
formatStatValue(tick, {decimalPlaces, prefix, suffix})
)
const startDegree = degree * 135
const arcLength = Math.PI * 1.5
const arcIncrement = arcLength / lineCount
// Format labels text
ctx.font = `bold ${labelFontSize}px Rubik`
ctx.fillStyle = labelColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'right'
let labelRadius
for (let i = 0; i <= lineCount; i++) {
if (i === 3) {
ctx.textAlign = 'center'
labelRadius = radius + gradientThickness + 30
} else {
labelRadius = radius + gradientThickness + 23
}
if (i > 3) {
ctx.textAlign = 'left'
}
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
ctx.fillText(labels[i], 0, 0)
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(-labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeValue = (ctx, radius, labelValueFontSize) => {
const {gaugePosition, prefix, suffix, decimalPlaces} = this.props
const {valueColor} = this.props.theme
ctx.font = `${labelValueFontSize}px Rubik`
ctx.fillStyle = valueColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'center'
const textY = radius
const textContent = formatStatValue(gaugePosition, {
decimalPlaces,
prefix,
suffix,
})
ctx.fillText(textContent, 0, textY)
}
private drawNeedle = (ctx, radius, minValue, maxValue) => {
const {gaugePosition} = this.props
const {degree, needleColor0, needleColor1, overflowDelta} = this.props.theme
const arcDistance = Math.PI * 1.5
let needleRotation: number
if (gaugePosition <= minValue) {
needleRotation = 0 - overflowDelta
} else if (gaugePosition >= maxValue) {
needleRotation = 1 + overflowDelta
} else {
needleRotation = (gaugePosition - minValue) / (maxValue - minValue)
}
const needleGradient = ctx.createLinearGradient(0, -10, 0, radius)
needleGradient.addColorStop(0, needleColor0)
needleGradient.addColorStop(1, needleColor1)
// Starting position of needle is at minimum
ctx.rotate(degree * 45)
ctx.rotate(arcDistance * needleRotation)
ctx.beginPath()
ctx.fillStyle = needleGradient
ctx.arc(0, 0, 10, 0, Math.PI, true)
ctx.lineTo(0, radius)
ctx.lineTo(10, 0)
ctx.fill()
}
}
export default Gauge
| {
super(props)
this.canvasRef = React.createRef()
} | identifier_body |
Gauge.tsx | // Libraries
import React, {Component} from 'react'
import _ from 'lodash'
// Components
import {ErrorHandling} from 'src/shared/decorators/errors'
// Utils
import {formatStatValue} from 'src/shared/utils/formatStatValue'
// Constants
import {GAUGE_THEME_DARK, GaugeTheme} from 'src/shared/constants/gaugeSpecs'
import {
COLOR_TYPE_MIN,
COLOR_TYPE_MAX,
DEFAULT_VALUE_MAX,
DEFAULT_VALUE_MIN,
MIN_THRESHOLDS,
} from 'src/shared/constants/thresholds'
// Types
import {Color} from 'src/types/colors'
import {DecimalPlaces} from 'src/types/dashboards'
interface Props {
width: number
height: number
gaugePosition: number
colors?: Color[]
prefix: string
tickPrefix: string
suffix: string
tickSuffix: string
decimalPlaces: DecimalPlaces
theme?: GaugeTheme
}
@ErrorHandling
class Gauge extends Component<Props> {
private canvasRef: React.RefObject<HTMLCanvasElement>
public static defaultProps = {
theme: GAUGE_THEME_DARK,
}
constructor(props: Props) {
super(props)
this.canvasRef = React.createRef()
}
public componentDidMount() {
this.updateCanvas()
}
public componentDidUpdate() {
this.updateCanvas()
}
public render() {
const {width, height} = this.props
return (
<canvas
className="gauge"
width={width}
height={height}
ref={this.canvasRef}
/>
)
}
private updateCanvas = () => {
this.resetCanvas()
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const centerX = width / 2
const centerY = (height / 2) * 1.13
const radius = (Math.min(width, height) / 2) * 0.5
const {minLineWidth, minFontSize} = this.props.theme
const gradientThickness = Math.max(minLineWidth, radius / 4)
const labelValueFontSize = Math.max(minFontSize, radius / 4)
const {colors} = this.props
if (!colors || colors.length === 0) {
return
}
// Distill out max and min values
const minValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MIN),
'value',
DEFAULT_VALUE_MIN
)
)
const maxValue = Number(
_.get(
colors.find(color => color.type === COLOR_TYPE_MAX),
'value',
DEFAULT_VALUE_MAX
)
)
// The following functions must be called in the specified order
if (colors.length === MIN_THRESHOLDS) {
this.drawGradientGauge(ctx, centerX, centerY, radius, gradientThickness)
} else {
this.drawSegmentedGauge(
ctx,
centerX,
centerY,
radius,
minValue,
maxValue,
gradientThickness
)
}
this.drawGaugeLines(ctx, centerX, centerY, radius, gradientThickness)
this.drawGaugeLabels(ctx, radius, gradientThickness, minValue, maxValue)
this.drawGaugeValue(ctx, radius, labelValueFontSize)
this.drawNeedle(ctx, radius, minValue, maxValue)
}
private resetCanvas = () => {
const canvas = this.canvasRef.current
const ctx = canvas.getContext('2d')
const {width, height} = this.props
const dpRatio = window.devicePixelRatio || 1
// Set up canvas to draw on HiDPI / Retina screens correctly
canvas.width = width * dpRatio
canvas.height = height * dpRatio
canvas.style.width = `${width}px`
canvas.style.height = `${height}px`
ctx.scale(dpRatio, dpRatio)
// Clear the canvas
ctx.clearRect(0, 0, width, height)
}
private drawGradientGauge = (ctx, xc, yc, r, gradientThickness) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const arcStart = Math.PI * 0.75
const arcEnd = arcStart + Math.PI * 1.5
// Determine coordinates for gradient
const xStart = xc + Math.cos(arcStart) * r
const yStart = yc + Math.sin(arcStart) * r
const xEnd = xc + Math.cos(arcEnd) * r
const yEnd = yc + Math.sin(arcEnd) * r
const gradient = ctx.createLinearGradient(xStart, yStart, xEnd, yEnd)
gradient.addColorStop(0, sortedColors[0].hex)
gradient.addColorStop(1.0, sortedColors[1].hex)
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = gradient
ctx.arc(xc, yc, r, arcStart, arcEnd)
ctx.stroke()
}
private drawSegmentedGauge = (
ctx,
xc,
yc,
r,
minValue,
maxValue,
gradientThickness
) => {
const {colors} = this.props
const sortedColors = _.sortBy(colors, color => Number(color.value))
const trueValueRange = Math.abs(maxValue - minValue)
const totalArcLength = Math.PI * 1.5
let startingPoint = Math.PI * 0.75
// Iterate through colors, draw arc for each
for (let c = 0; c < sortedColors.length - 1; c++) |
}
private drawGaugeLines = (ctx, xc, yc, radius, gradientThickness) => {
const {
degree,
lineCount,
lineColor,
lineStrokeSmall,
lineStrokeLarge,
tickSizeSmall,
tickSizeLarge,
smallLineCount,
} = this.props.theme
const arcStart = Math.PI * 0.75
const arcLength = Math.PI * 1.5
const arcStop = arcStart + arcLength
const totalSmallLineCount = lineCount * smallLineCount
const startDegree = degree * 135
const arcLargeIncrement = arcLength / lineCount
const arcSmallIncrement = arcLength / totalSmallLineCount
// Semi-circle
const arcRadius = radius + gradientThickness * 0.8
ctx.beginPath()
ctx.arc(xc, yc, arcRadius, arcStart, arcStop)
ctx.lineWidth = 3
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.stroke()
ctx.closePath()
// Match center of canvas to center of gauge
ctx.translate(xc, yc)
// Draw Large ticks
for (let lt = 0; lt <= lineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcLargeIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeLarge
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeLarge, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcLargeIncrement)
ctx.rotate(-startDegree)
}
// Draw Small ticks
for (let lt = 0; lt <= totalSmallLineCount; lt++) {
// Rotation before drawing line
ctx.rotate(startDegree)
ctx.rotate(lt * arcSmallIncrement)
// Draw line
ctx.beginPath()
ctx.lineWidth = lineStrokeSmall
ctx.lineCap = 'round'
ctx.strokeStyle = lineColor
ctx.moveTo(arcRadius, 0)
ctx.lineTo(arcRadius + tickSizeSmall, 0)
ctx.stroke()
ctx.closePath()
// Return to starting rotation
ctx.rotate(-lt * arcSmallIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeLabels = (
ctx,
radius,
gradientThickness,
minValue,
maxValue
) => {
const {tickPrefix, tickSuffix, decimalPlaces} = this.props
let {prefix, suffix} = this.props
const {degree, lineCount, labelColor, labelFontSize} = this.props.theme
const tickValues = [
..._.range(minValue, maxValue, Math.abs(maxValue - minValue) / lineCount),
maxValue,
]
if (tickPrefix === 'true') {
prefix = ''
}
if (tickSuffix === 'true') {
suffix = ''
}
const labels = tickValues.map(tick =>
formatStatValue(tick, {decimalPlaces, prefix, suffix})
)
const startDegree = degree * 135
const arcLength = Math.PI * 1.5
const arcIncrement = arcLength / lineCount
// Format labels text
ctx.font = `bold ${labelFontSize}px Rubik`
ctx.fillStyle = labelColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'right'
let labelRadius
for (let i = 0; i <= lineCount; i++) {
if (i === 3) {
ctx.textAlign = 'center'
labelRadius = radius + gradientThickness + 30
} else {
labelRadius = radius + gradientThickness + 23
}
if (i > 3) {
ctx.textAlign = 'left'
}
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
ctx.fillText(labels[i], 0, 0)
ctx.rotate(startDegree)
ctx.rotate(i * arcIncrement)
ctx.translate(-labelRadius, 0)
ctx.rotate(i * -arcIncrement)
ctx.rotate(-startDegree)
}
}
private drawGaugeValue = (ctx, radius, labelValueFontSize) => {
const {gaugePosition, prefix, suffix, decimalPlaces} = this.props
const {valueColor} = this.props.theme
ctx.font = `${labelValueFontSize}px Rubik`
ctx.fillStyle = valueColor
ctx.textBaseline = 'middle'
ctx.textAlign = 'center'
const textY = radius
const textContent = formatStatValue(gaugePosition, {
decimalPlaces,
prefix,
suffix,
})
ctx.fillText(textContent, 0, textY)
}
private drawNeedle = (ctx, radius, minValue, maxValue) => {
const {gaugePosition} = this.props
const {degree, needleColor0, needleColor1, overflowDelta} = this.props.theme
const arcDistance = Math.PI * 1.5
let needleRotation: number
if (gaugePosition <= minValue) {
needleRotation = 0 - overflowDelta
} else if (gaugePosition >= maxValue) {
needleRotation = 1 + overflowDelta
} else {
needleRotation = (gaugePosition - minValue) / (maxValue - minValue)
}
const needleGradient = ctx.createLinearGradient(0, -10, 0, radius)
needleGradient.addColorStop(0, needleColor0)
needleGradient.addColorStop(1, needleColor1)
// Starting position of needle is at minimum
ctx.rotate(degree * 45)
ctx.rotate(arcDistance * needleRotation)
ctx.beginPath()
ctx.fillStyle = needleGradient
ctx.arc(0, 0, 10, 0, Math.PI, true)
ctx.lineTo(0, radius)
ctx.lineTo(10, 0)
ctx.fill()
}
}
export default Gauge
| {
// Use this color and the next to determine arc length
const color = sortedColors[c]
const nextColor = sortedColors[c + 1]
// adjust values by subtracting minValue from them
const adjustedValue = Number(color.value) - minValue
const adjustedNextValue = Number(nextColor.value) - minValue
const thisArc = Math.abs(adjustedValue - adjustedNextValue)
// Multiply by arcLength to determine this arc's length
const arcLength = totalArcLength * (thisArc / trueValueRange)
// Draw arc
ctx.beginPath()
ctx.lineWidth = gradientThickness
ctx.strokeStyle = color.hex
ctx.arc(xc, yc, r, startingPoint, startingPoint + arcLength)
ctx.stroke()
// Add this arc's length to starting point
startingPoint += arcLength
} | conditional_block |
integration.py | import cgi
import hashlib
import http.server
import io
import os
import posixpath
import ssl
import threading
import time
import urllib.parse
import pyftpdlib.authorizers
import pyftpdlib.handlers
import pyftpdlib.servers
class FTPServer:
def __init__(self, port, root, report_size):
class FTPHandlerNoSIZE(pyftpdlib.handlers.FTPHandler):
proto_cmds = {k: v for k, v in pyftpdlib.handlers.proto_cmds.items() if k != 'SIZE'}
authorizer = pyftpdlib.authorizers.DummyAuthorizer()
authorizer.add_anonymous(root)
handler = pyftpdlib.handlers.FTPHandler if report_size else FTPHandlerNoSIZE
handler.authorizer = authorizer
self.server = pyftpdlib.servers.FTPServer(('', port), handler)
def serve(self):
self.server.serve_forever()
class HTTPServer:
def __init__(self, port, cert, root, report_size):
class RequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
path = self.path.split('?', 1)[0].split('#', 1)[0]
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
path = os.path.join(root, path.lstrip('/'))
try:
with open(path, 'rb') as f:
data = f.read()
self.send_response(200)
content_type = 'application/json' if 'versioneers' in path else 'application/octet-stream'
self.send_header('Content-Type', content_type)
self.send_header('Content-Transfer-Encoding', 'binary')
if report_size:
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
except FileNotFoundError:
self.send_error(404)
def do_POST(self):
def dechunk(f):
bio = io.BytesIO()
while True:
chunksize = bytearray()
while not chunksize.endswith(b'\r\n'):
chunksize += f.read(1)
chunksize = chunksize.decode().split(':')[0]
chunksize = int(chunksize, 16)
if chunksize == 0:
break
chunk = f.read(chunksize)
assert(f.read(2) == b'\r\n')
bio.write(chunk)
bio.seek(0)
return bio
def verify_hash(f, hashtype, hsh):
try:
chksum = hashlib.new(hashtype)
except ValueError:
return False
chksum.update(f.read())
return chksum.hexdigest() == hsh
if self.headers.get('Transfer-Encoding') == 'chunked':
fp = dechunk(self.rfile)
else:
fp = self.rfile
data = cgi.FieldStorage(fp=fp, headers=self.headers,
environ={'REQUEST_METHOD': 'POST'},
# accept maximum of 10MB of data
limit=10 * 1024 * 1024)
try:
if 'filename' in data:
resp = b'Missing'
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', len(resp)) | hsh = data[hashtype].value
hashtype = hashtype.split('sum')[0]
if verify_hash(data['file'].file, hashtype, hsh):
self.send_response(204)
self.end_headers()
else:
self.send_error(500)
except (KeyError, IndexError):
self.send_error(400)
self.server = http.server.HTTPServer(('', port), RequestHandler)
if cert:
self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True)
def serve(self):
self.server.serve_forever()
def main():
servers = [
FTPServer(2100, '/srv', True),
FTPServer(2101, '/srv', False),
HTTPServer(8000, None, '/srv', True),
HTTPServer(8001, None, '/srv', False),
HTTPServer(4430, '/cert.pem', '/srv', True),
HTTPServer(4431, '/cert.pem', '/srv', False),
]
threads = [threading.Thread(target=s.serve) for s in servers[1:]]
for t in threads:
t.setDaemon(True)
t.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main() | self.end_headers()
self.wfile.write(resp)
else:
hashtype = [k for k in data.keys() if k.endswith('sum')][0] | random_line_split |
integration.py | import cgi
import hashlib
import http.server
import io
import os
import posixpath
import ssl
import threading
import time
import urllib.parse
import pyftpdlib.authorizers
import pyftpdlib.handlers
import pyftpdlib.servers
class FTPServer:
def __init__(self, port, root, report_size):
class FTPHandlerNoSIZE(pyftpdlib.handlers.FTPHandler):
proto_cmds = {k: v for k, v in pyftpdlib.handlers.proto_cmds.items() if k != 'SIZE'}
authorizer = pyftpdlib.authorizers.DummyAuthorizer()
authorizer.add_anonymous(root)
handler = pyftpdlib.handlers.FTPHandler if report_size else FTPHandlerNoSIZE
handler.authorizer = authorizer
self.server = pyftpdlib.servers.FTPServer(('', port), handler)
def serve(self):
self.server.serve_forever()
class HTTPServer:
def __init__(self, port, cert, root, report_size):
class RequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
path = self.path.split('?', 1)[0].split('#', 1)[0]
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
path = os.path.join(root, path.lstrip('/'))
try:
with open(path, 'rb') as f:
data = f.read()
self.send_response(200)
content_type = 'application/json' if 'versioneers' in path else 'application/octet-stream'
self.send_header('Content-Type', content_type)
self.send_header('Content-Transfer-Encoding', 'binary')
if report_size:
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
except FileNotFoundError:
self.send_error(404)
def do_POST(self):
def dechunk(f):
bio = io.BytesIO()
while True:
chunksize = bytearray()
while not chunksize.endswith(b'\r\n'):
chunksize += f.read(1)
chunksize = chunksize.decode().split(':')[0]
chunksize = int(chunksize, 16)
if chunksize == 0:
break
chunk = f.read(chunksize)
assert(f.read(2) == b'\r\n')
bio.write(chunk)
bio.seek(0)
return bio
def verify_hash(f, hashtype, hsh):
try:
chksum = hashlib.new(hashtype)
except ValueError:
return False
chksum.update(f.read())
return chksum.hexdigest() == hsh
if self.headers.get('Transfer-Encoding') == 'chunked':
fp = dechunk(self.rfile)
else:
fp = self.rfile
data = cgi.FieldStorage(fp=fp, headers=self.headers,
environ={'REQUEST_METHOD': 'POST'},
# accept maximum of 10MB of data
limit=10 * 1024 * 1024)
try:
if 'filename' in data:
resp = b'Missing'
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', len(resp))
self.end_headers()
self.wfile.write(resp)
else:
hashtype = [k for k in data.keys() if k.endswith('sum')][0]
hsh = data[hashtype].value
hashtype = hashtype.split('sum')[0]
if verify_hash(data['file'].file, hashtype, hsh):
self.send_response(204)
self.end_headers()
else:
self.send_error(500)
except (KeyError, IndexError):
self.send_error(400)
self.server = http.server.HTTPServer(('', port), RequestHandler)
if cert:
self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True)
def serve(self):
|
def main():
servers = [
FTPServer(2100, '/srv', True),
FTPServer(2101, '/srv', False),
HTTPServer(8000, None, '/srv', True),
HTTPServer(8001, None, '/srv', False),
HTTPServer(4430, '/cert.pem', '/srv', True),
HTTPServer(4431, '/cert.pem', '/srv', False),
]
threads = [threading.Thread(target=s.serve) for s in servers[1:]]
for t in threads:
t.setDaemon(True)
t.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| self.server.serve_forever() | identifier_body |
integration.py | import cgi
import hashlib
import http.server
import io
import os
import posixpath
import ssl
import threading
import time
import urllib.parse
import pyftpdlib.authorizers
import pyftpdlib.handlers
import pyftpdlib.servers
class FTPServer:
def __init__(self, port, root, report_size):
class FTPHandlerNoSIZE(pyftpdlib.handlers.FTPHandler):
proto_cmds = {k: v for k, v in pyftpdlib.handlers.proto_cmds.items() if k != 'SIZE'}
authorizer = pyftpdlib.authorizers.DummyAuthorizer()
authorizer.add_anonymous(root)
handler = pyftpdlib.handlers.FTPHandler if report_size else FTPHandlerNoSIZE
handler.authorizer = authorizer
self.server = pyftpdlib.servers.FTPServer(('', port), handler)
def serve(self):
self.server.serve_forever()
class HTTPServer:
def __init__(self, port, cert, root, report_size):
class RequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
path = self.path.split('?', 1)[0].split('#', 1)[0]
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
path = os.path.join(root, path.lstrip('/'))
try:
with open(path, 'rb') as f:
data = f.read()
self.send_response(200)
content_type = 'application/json' if 'versioneers' in path else 'application/octet-stream'
self.send_header('Content-Type', content_type)
self.send_header('Content-Transfer-Encoding', 'binary')
if report_size:
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
except FileNotFoundError:
self.send_error(404)
def do_POST(self):
def dechunk(f):
bio = io.BytesIO()
while True:
chunksize = bytearray()
while not chunksize.endswith(b'\r\n'):
chunksize += f.read(1)
chunksize = chunksize.decode().split(':')[0]
chunksize = int(chunksize, 16)
if chunksize == 0:
break
chunk = f.read(chunksize)
assert(f.read(2) == b'\r\n')
bio.write(chunk)
bio.seek(0)
return bio
def verify_hash(f, hashtype, hsh):
try:
chksum = hashlib.new(hashtype)
except ValueError:
return False
chksum.update(f.read())
return chksum.hexdigest() == hsh
if self.headers.get('Transfer-Encoding') == 'chunked':
fp = dechunk(self.rfile)
else:
fp = self.rfile
data = cgi.FieldStorage(fp=fp, headers=self.headers,
environ={'REQUEST_METHOD': 'POST'},
# accept maximum of 10MB of data
limit=10 * 1024 * 1024)
try:
if 'filename' in data:
resp = b'Missing'
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', len(resp))
self.end_headers()
self.wfile.write(resp)
else:
hashtype = [k for k in data.keys() if k.endswith('sum')][0]
hsh = data[hashtype].value
hashtype = hashtype.split('sum')[0]
if verify_hash(data['file'].file, hashtype, hsh):
self.send_response(204)
self.end_headers()
else:
self.send_error(500)
except (KeyError, IndexError):
self.send_error(400)
self.server = http.server.HTTPServer(('', port), RequestHandler)
if cert:
self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True)
def serve(self):
self.server.serve_forever()
def main():
servers = [
FTPServer(2100, '/srv', True),
FTPServer(2101, '/srv', False),
HTTPServer(8000, None, '/srv', True),
HTTPServer(8001, None, '/srv', False),
HTTPServer(4430, '/cert.pem', '/srv', True),
HTTPServer(4431, '/cert.pem', '/srv', False),
]
threads = [threading.Thread(target=s.serve) for s in servers[1:]]
for t in threads:
t.setDaemon(True)
t.start()
try:
while True:
|
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| time.sleep(1) | conditional_block |
integration.py | import cgi
import hashlib
import http.server
import io
import os
import posixpath
import ssl
import threading
import time
import urllib.parse
import pyftpdlib.authorizers
import pyftpdlib.handlers
import pyftpdlib.servers
class FTPServer:
def __init__(self, port, root, report_size):
class FTPHandlerNoSIZE(pyftpdlib.handlers.FTPHandler):
proto_cmds = {k: v for k, v in pyftpdlib.handlers.proto_cmds.items() if k != 'SIZE'}
authorizer = pyftpdlib.authorizers.DummyAuthorizer()
authorizer.add_anonymous(root)
handler = pyftpdlib.handlers.FTPHandler if report_size else FTPHandlerNoSIZE
handler.authorizer = authorizer
self.server = pyftpdlib.servers.FTPServer(('', port), handler)
def serve(self):
self.server.serve_forever()
class HTTPServer:
def __init__(self, port, cert, root, report_size):
class | (http.server.BaseHTTPRequestHandler):
def do_GET(self):
path = self.path.split('?', 1)[0].split('#', 1)[0]
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
path = os.path.join(root, path.lstrip('/'))
try:
with open(path, 'rb') as f:
data = f.read()
self.send_response(200)
content_type = 'application/json' if 'versioneers' in path else 'application/octet-stream'
self.send_header('Content-Type', content_type)
self.send_header('Content-Transfer-Encoding', 'binary')
if report_size:
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
except FileNotFoundError:
self.send_error(404)
def do_POST(self):
def dechunk(f):
bio = io.BytesIO()
while True:
chunksize = bytearray()
while not chunksize.endswith(b'\r\n'):
chunksize += f.read(1)
chunksize = chunksize.decode().split(':')[0]
chunksize = int(chunksize, 16)
if chunksize == 0:
break
chunk = f.read(chunksize)
assert(f.read(2) == b'\r\n')
bio.write(chunk)
bio.seek(0)
return bio
def verify_hash(f, hashtype, hsh):
try:
chksum = hashlib.new(hashtype)
except ValueError:
return False
chksum.update(f.read())
return chksum.hexdigest() == hsh
if self.headers.get('Transfer-Encoding') == 'chunked':
fp = dechunk(self.rfile)
else:
fp = self.rfile
data = cgi.FieldStorage(fp=fp, headers=self.headers,
environ={'REQUEST_METHOD': 'POST'},
# accept maximum of 10MB of data
limit=10 * 1024 * 1024)
try:
if 'filename' in data:
resp = b'Missing'
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', len(resp))
self.end_headers()
self.wfile.write(resp)
else:
hashtype = [k for k in data.keys() if k.endswith('sum')][0]
hsh = data[hashtype].value
hashtype = hashtype.split('sum')[0]
if verify_hash(data['file'].file, hashtype, hsh):
self.send_response(204)
self.end_headers()
else:
self.send_error(500)
except (KeyError, IndexError):
self.send_error(400)
self.server = http.server.HTTPServer(('', port), RequestHandler)
if cert:
self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True)
def serve(self):
self.server.serve_forever()
def main():
servers = [
FTPServer(2100, '/srv', True),
FTPServer(2101, '/srv', False),
HTTPServer(8000, None, '/srv', True),
HTTPServer(8001, None, '/srv', False),
HTTPServer(4430, '/cert.pem', '/srv', True),
HTTPServer(4431, '/cert.pem', '/srv', False),
]
threads = [threading.Thread(target=s.serve) for s in servers[1:]]
for t in threads:
t.setDaemon(True)
t.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()
| RequestHandler | identifier_name |
im_export.py | # -* coding: utf-8 *-
from PyQt4 import QtGui
from collector.ui.gen.im_export import Ui_Dialog
from collector.ui.views import Dialog
from collector.ui.helpers.customtoolbar import CustomToolbar
from collector.core.controller import get_manager
from collector.core.plugin import PluginExporter, PluginImporter
import logging
class BaseDialog(QtGui.QDialog, Ui_Dialog):
"""
BaseDialog
----------
Common parts for ImportDialog and ExportDialog
"""
def __init__(self, parent=None):
super(BaseDialog, self).__init__(parent)
self.setupUi(self)
self.customize()
def customize(self):
self.label_noplugins.hide()
plugins = self.get_plugins()
man = get_manager('plugin')
items = []
for i in plugins:
plugin = man.get(i)
items.append(
{'class': 'link', 'name': plugin.get_name(),
'path': 'plugin/' + plugin.get_id(),
'image': plugin.icon}
)
# Toolbar
items.append({'class': 'spacer'})
CustomToolbar(self.toolbar, items, self.select_plugin)
if not len(plugins):
self.toolbar.hide()
self.label_noplugins.show()
def select_plugin(self, uri):
"""Select plugin callback"""
params = self.parent().collector_uri_call(uri)
if params is not None:
plugin = params.get('plugin', None)
if plugin is not None:
man = get_manager('plugin')
self.hide()
try:
man.get(plugin).run()
self.done(1)
except Exception as exc:
logging.exception(exc)
self.done(-1)
def get_plugins(self):
plugins = get_manager('plugin').filter(self.filter_)
return plugins
class | (BaseDialog):
"""
ExportDialog
------------
"""
# TODO
filter_ = PluginExporter
def customize(self):
super(ExportDialog, self).customize()
self.setWindowTitle(self.tr("Export"))
class ImportDialog(BaseDialog):
"""
ImportDialog
------------
"""
filter_ = PluginImporter
def customize(self):
super(ImportDialog, self).customize()
self.setWindowTitle(self.tr("Import"))
class ImportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ImportDialog(self.parent)
class ExportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ExportDialog(self.parent)
| ExportDialog | identifier_name |
im_export.py | # -* coding: utf-8 *-
from PyQt4 import QtGui
from collector.ui.gen.im_export import Ui_Dialog
from collector.ui.views import Dialog
from collector.ui.helpers.customtoolbar import CustomToolbar
from collector.core.controller import get_manager
from collector.core.plugin import PluginExporter, PluginImporter
import logging
class BaseDialog(QtGui.QDialog, Ui_Dialog):
"""
BaseDialog
----------
Common parts for ImportDialog and ExportDialog
"""
def __init__(self, parent=None):
super(BaseDialog, self).__init__(parent)
self.setupUi(self)
self.customize()
def customize(self):
self.label_noplugins.hide()
plugins = self.get_plugins()
man = get_manager('plugin')
items = []
for i in plugins:
plugin = man.get(i)
items.append(
{'class': 'link', 'name': plugin.get_name(),
'path': 'plugin/' + plugin.get_id(),
'image': plugin.icon}
)
# Toolbar
items.append({'class': 'spacer'})
CustomToolbar(self.toolbar, items, self.select_plugin)
if not len(plugins):
self.toolbar.hide()
self.label_noplugins.show()
def select_plugin(self, uri):
"""Select plugin callback"""
params = self.parent().collector_uri_call(uri)
if params is not None:
plugin = params.get('plugin', None)
if plugin is not None:
man = get_manager('plugin')
self.hide()
try:
man.get(plugin).run()
self.done(1)
except Exception as exc:
logging.exception(exc)
self.done(-1)
def get_plugins(self):
plugins = get_manager('plugin').filter(self.filter_)
return plugins
class ExportDialog(BaseDialog):
"""
ExportDialog
------------
"""
# TODO
filter_ = PluginExporter
def customize(self):
super(ExportDialog, self).customize()
self.setWindowTitle(self.tr("Export"))
class ImportDialog(BaseDialog):
"""
ImportDialog
------------
"""
filter_ = PluginImporter
def customize(self):
super(ImportDialog, self).customize()
self.setWindowTitle(self.tr("Import"))
class ImportView(Dialog):
|
class ExportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ExportDialog(self.parent)
| """Properties view"""
def get_widget(self, params):
return ImportDialog(self.parent) | identifier_body |
im_export.py | # -* coding: utf-8 *-
from PyQt4 import QtGui
from collector.ui.gen.im_export import Ui_Dialog
from collector.ui.views import Dialog
from collector.ui.helpers.customtoolbar import CustomToolbar
from collector.core.controller import get_manager
from collector.core.plugin import PluginExporter, PluginImporter
import logging
class BaseDialog(QtGui.QDialog, Ui_Dialog):
"""
BaseDialog
----------
Common parts for ImportDialog and ExportDialog
"""
def __init__(self, parent=None):
super(BaseDialog, self).__init__(parent)
self.setupUi(self)
self.customize()
def customize(self):
self.label_noplugins.hide()
plugins = self.get_plugins()
man = get_manager('plugin')
items = []
for i in plugins:
|
# Toolbar
items.append({'class': 'spacer'})
CustomToolbar(self.toolbar, items, self.select_plugin)
if not len(plugins):
self.toolbar.hide()
self.label_noplugins.show()
def select_plugin(self, uri):
"""Select plugin callback"""
params = self.parent().collector_uri_call(uri)
if params is not None:
plugin = params.get('plugin', None)
if plugin is not None:
man = get_manager('plugin')
self.hide()
try:
man.get(plugin).run()
self.done(1)
except Exception as exc:
logging.exception(exc)
self.done(-1)
def get_plugins(self):
plugins = get_manager('plugin').filter(self.filter_)
return plugins
class ExportDialog(BaseDialog):
"""
ExportDialog
------------
"""
# TODO
filter_ = PluginExporter
def customize(self):
super(ExportDialog, self).customize()
self.setWindowTitle(self.tr("Export"))
class ImportDialog(BaseDialog):
"""
ImportDialog
------------
"""
filter_ = PluginImporter
def customize(self):
super(ImportDialog, self).customize()
self.setWindowTitle(self.tr("Import"))
class ImportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ImportDialog(self.parent)
class ExportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ExportDialog(self.parent)
| plugin = man.get(i)
items.append(
{'class': 'link', 'name': plugin.get_name(),
'path': 'plugin/' + plugin.get_id(),
'image': plugin.icon}
) | conditional_block |
im_export.py | # -* coding: utf-8 *-
from PyQt4 import QtGui
from collector.ui.gen.im_export import Ui_Dialog
from collector.ui.views import Dialog
from collector.ui.helpers.customtoolbar import CustomToolbar
from collector.core.controller import get_manager
from collector.core.plugin import PluginExporter, PluginImporter
import logging
class BaseDialog(QtGui.QDialog, Ui_Dialog):
"""
BaseDialog
---------- | super(BaseDialog, self).__init__(parent)
self.setupUi(self)
self.customize()
def customize(self):
self.label_noplugins.hide()
plugins = self.get_plugins()
man = get_manager('plugin')
items = []
for i in plugins:
plugin = man.get(i)
items.append(
{'class': 'link', 'name': plugin.get_name(),
'path': 'plugin/' + plugin.get_id(),
'image': plugin.icon}
)
# Toolbar
items.append({'class': 'spacer'})
CustomToolbar(self.toolbar, items, self.select_plugin)
if not len(plugins):
self.toolbar.hide()
self.label_noplugins.show()
def select_plugin(self, uri):
"""Select plugin callback"""
params = self.parent().collector_uri_call(uri)
if params is not None:
plugin = params.get('plugin', None)
if plugin is not None:
man = get_manager('plugin')
self.hide()
try:
man.get(plugin).run()
self.done(1)
except Exception as exc:
logging.exception(exc)
self.done(-1)
def get_plugins(self):
plugins = get_manager('plugin').filter(self.filter_)
return plugins
class ExportDialog(BaseDialog):
"""
ExportDialog
------------
"""
# TODO
filter_ = PluginExporter
def customize(self):
super(ExportDialog, self).customize()
self.setWindowTitle(self.tr("Export"))
class ImportDialog(BaseDialog):
"""
ImportDialog
------------
"""
filter_ = PluginImporter
def customize(self):
super(ImportDialog, self).customize()
self.setWindowTitle(self.tr("Import"))
class ImportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ImportDialog(self.parent)
class ExportView(Dialog):
"""Properties view"""
def get_widget(self, params):
return ExportDialog(self.parent) | Common parts for ImportDialog and ExportDialog
"""
def __init__(self, parent=None): | random_line_split |
Root.test.tsx | import React from 'react';
import { RenderResult } from '@testing-library/react';
import { addDays } from 'date-fns';
import { focusDay } from 'test/actions';
import { getDayButton, queryMonthGrids } from 'test/po';
import { customRender } from 'test/render';
import { freezeBeforeAll } from 'test/utils';
import { defaultClassNames } from 'contexts/DayPicker/defaultClassNames';
import { DayPickerProps } from 'types/DayPicker';
import { ClassNames } from 'types/Styles';
import { Root } from './Root';
const today = new Date(2020, 10, 4);
freezeBeforeAll(today);
let container: HTMLElement;
let renderResult: RenderResult;
function | (dayPickerProps: DayPickerProps = {}) {
renderResult = customRender(<Root />, dayPickerProps);
container = renderResult.container;
}
describe('when the number of months is 1', () => {
const props: DayPickerProps = { numberOfMonths: 1 };
beforeEach(() => {
setup(props);
});
test('should display one month grid', () => {
expect(queryMonthGrids()).toHaveLength(1);
});
});
describe('when the number of months is greater than 1', () => {
const props: DayPickerProps = { numberOfMonths: 3 };
beforeEach(() => {
setup(props);
});
test('should display the specified number of month grids', () => {
expect(queryMonthGrids()).toHaveLength(3);
});
});
describe('when using the "classNames" prop', () => {
const classNames: ClassNames = {
root: 'foo'
};
beforeEach(() => {
setup({ classNames });
});
test('should display the specified number of month grids', () => {
expect(container.firstChild).toHaveClass('foo');
});
});
describe('when using the "className" prop', () => {
const props: DayPickerProps = { className: 'foo' };
beforeEach(() => {
setup(props);
});
test('should append the class name to the root element', () => {
expect(container.firstChild).toHaveClass('foo');
});
});
describe('when the "numberOfMonths" is greater than 1', () => {
const props: DayPickerProps = { numberOfMonths: 3 };
const expectedClassName = defaultClassNames.multiple_months;
beforeEach(() => {
setup(props);
});
test(`should have the ${expectedClassName} class name`, () => {
expect(container.firstChild).toHaveClass(expectedClassName);
});
});
describe('when showing the week numbers', () => {
const props: DayPickerProps = { showWeekNumber: true };
const expectedClassName = defaultClassNames.with_weeknumber;
beforeEach(() => {
setup(props);
});
test(`should have the ${expectedClassName} class name`, () => {
expect(container.firstChild).toHaveClass(expectedClassName);
});
});
describe('when "initialFocus" is set', () => {
const baseProps: DayPickerProps = {
initialFocus: true,
mode: 'single'
};
describe('when a day is not selected', () => {
beforeEach(() => {
setup(baseProps);
});
test('should focus today', () => {
expect(getDayButton(today)).toHaveFocus();
});
describe('when a new day is focused', () => {
beforeEach(() => {
focusDay(addDays(today, 1));
});
describe('and the calendar is rerendered', () => {
test.todo('should focus the new day');
});
});
});
describe('when a day is selected', () => {
const selected = addDays(today, 1);
const props: DayPickerProps = { ...baseProps, selected };
beforeEach(() => {
setup(props);
});
test('should focus the selected day', () => {
expect(getDayButton(selected)).toHaveFocus();
});
});
});
| setup | identifier_name |
Root.test.tsx | import React from 'react';
import { RenderResult } from '@testing-library/react';
import { addDays } from 'date-fns';
import { focusDay } from 'test/actions';
import { getDayButton, queryMonthGrids } from 'test/po';
import { customRender } from 'test/render';
import { freezeBeforeAll } from 'test/utils';
import { defaultClassNames } from 'contexts/DayPicker/defaultClassNames';
import { DayPickerProps } from 'types/DayPicker';
import { ClassNames } from 'types/Styles';
import { Root } from './Root';
const today = new Date(2020, 10, 4);
freezeBeforeAll(today);
let container: HTMLElement;
let renderResult: RenderResult;
function setup(dayPickerProps: DayPickerProps = {}) {
renderResult = customRender(<Root />, dayPickerProps); | describe('when the number of months is 1', () => {
const props: DayPickerProps = { numberOfMonths: 1 };
beforeEach(() => {
setup(props);
});
test('should display one month grid', () => {
expect(queryMonthGrids()).toHaveLength(1);
});
});
describe('when the number of months is greater than 1', () => {
const props: DayPickerProps = { numberOfMonths: 3 };
beforeEach(() => {
setup(props);
});
test('should display the specified number of month grids', () => {
expect(queryMonthGrids()).toHaveLength(3);
});
});
describe('when using the "classNames" prop', () => {
const classNames: ClassNames = {
root: 'foo'
};
beforeEach(() => {
setup({ classNames });
});
test('should display the specified number of month grids', () => {
expect(container.firstChild).toHaveClass('foo');
});
});
describe('when using the "className" prop', () => {
const props: DayPickerProps = { className: 'foo' };
beforeEach(() => {
setup(props);
});
test('should append the class name to the root element', () => {
expect(container.firstChild).toHaveClass('foo');
});
});
describe('when the "numberOfMonths" is greater than 1', () => {
const props: DayPickerProps = { numberOfMonths: 3 };
const expectedClassName = defaultClassNames.multiple_months;
beforeEach(() => {
setup(props);
});
test(`should have the ${expectedClassName} class name`, () => {
expect(container.firstChild).toHaveClass(expectedClassName);
});
});
describe('when showing the week numbers', () => {
const props: DayPickerProps = { showWeekNumber: true };
const expectedClassName = defaultClassNames.with_weeknumber;
beforeEach(() => {
setup(props);
});
test(`should have the ${expectedClassName} class name`, () => {
expect(container.firstChild).toHaveClass(expectedClassName);
});
});
describe('when "initialFocus" is set', () => {
const baseProps: DayPickerProps = {
initialFocus: true,
mode: 'single'
};
describe('when a day is not selected', () => {
beforeEach(() => {
setup(baseProps);
});
test('should focus today', () => {
expect(getDayButton(today)).toHaveFocus();
});
describe('when a new day is focused', () => {
beforeEach(() => {
focusDay(addDays(today, 1));
});
describe('and the calendar is rerendered', () => {
test.todo('should focus the new day');
});
});
});
describe('when a day is selected', () => {
const selected = addDays(today, 1);
const props: DayPickerProps = { ...baseProps, selected };
beforeEach(() => {
setup(props);
});
test('should focus the selected day', () => {
expect(getDayButton(selected)).toHaveFocus();
});
});
}); | container = renderResult.container;
}
| random_line_split |
Root.test.tsx | import React from 'react';
import { RenderResult } from '@testing-library/react';
import { addDays } from 'date-fns';
import { focusDay } from 'test/actions';
import { getDayButton, queryMonthGrids } from 'test/po';
import { customRender } from 'test/render';
import { freezeBeforeAll } from 'test/utils';
import { defaultClassNames } from 'contexts/DayPicker/defaultClassNames';
import { DayPickerProps } from 'types/DayPicker';
import { ClassNames } from 'types/Styles';
import { Root } from './Root';
const today = new Date(2020, 10, 4);
freezeBeforeAll(today);
let container: HTMLElement;
let renderResult: RenderResult;
function setup(dayPickerProps: DayPickerProps = {}) |
describe('when the number of months is 1', () => {
const props: DayPickerProps = { numberOfMonths: 1 };
beforeEach(() => {
setup(props);
});
test('should display one month grid', () => {
expect(queryMonthGrids()).toHaveLength(1);
});
});
describe('when the number of months is greater than 1', () => {
const props: DayPickerProps = { numberOfMonths: 3 };
beforeEach(() => {
setup(props);
});
test('should display the specified number of month grids', () => {
expect(queryMonthGrids()).toHaveLength(3);
});
});
describe('when using the "classNames" prop', () => {
const classNames: ClassNames = {
root: 'foo'
};
beforeEach(() => {
setup({ classNames });
});
test('should display the specified number of month grids', () => {
expect(container.firstChild).toHaveClass('foo');
});
});
describe('when using the "className" prop', () => {
const props: DayPickerProps = { className: 'foo' };
beforeEach(() => {
setup(props);
});
test('should append the class name to the root element', () => {
expect(container.firstChild).toHaveClass('foo');
});
});
describe('when the "numberOfMonths" is greater than 1', () => {
const props: DayPickerProps = { numberOfMonths: 3 };
const expectedClassName = defaultClassNames.multiple_months;
beforeEach(() => {
setup(props);
});
test(`should have the ${expectedClassName} class name`, () => {
expect(container.firstChild).toHaveClass(expectedClassName);
});
});
describe('when showing the week numbers', () => {
const props: DayPickerProps = { showWeekNumber: true };
const expectedClassName = defaultClassNames.with_weeknumber;
beforeEach(() => {
setup(props);
});
test(`should have the ${expectedClassName} class name`, () => {
expect(container.firstChild).toHaveClass(expectedClassName);
});
});
describe('when "initialFocus" is set', () => {
const baseProps: DayPickerProps = {
initialFocus: true,
mode: 'single'
};
describe('when a day is not selected', () => {
beforeEach(() => {
setup(baseProps);
});
test('should focus today', () => {
expect(getDayButton(today)).toHaveFocus();
});
describe('when a new day is focused', () => {
beforeEach(() => {
focusDay(addDays(today, 1));
});
describe('and the calendar is rerendered', () => {
test.todo('should focus the new day');
});
});
});
describe('when a day is selected', () => {
const selected = addDays(today, 1);
const props: DayPickerProps = { ...baseProps, selected };
beforeEach(() => {
setup(props);
});
test('should focus the selected day', () => {
expect(getDayButton(selected)).toHaveFocus();
});
});
});
| {
renderResult = customRender(<Root />, dayPickerProps);
container = renderResult.container;
} | identifier_body |
experiments_service.py | # PMR WebServices
# Copyright (C) 2016 Manhoi Hur, Belyaeva, Irina
# This file is part of PMR WebServices API.
#
# PMR API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# PMR API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PMR API. If not, see <http://www.gnu.org/licenses/>.
"""
Experiments Service API. Provides search, and serialization sevices
"""
import logging
import request_handler as rh
import exception
import jsonpickle
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
# This function retrieves experiment record by experiment ID
def get_experiment_by_id(url, args):
""" Retrieve experiment record by experiment ID
from passed service url and parameters
validate parameters
perform experiment lookup
:type url: string
:param url: The service url
:type args: dict
:param args: The dictionary(map) of parameters submitted via query string
:rtype: json like string
:return: Returns Experiment as json-like string
"""
# retrieve experiment ID from request parameters
lookup_id = args['experimentID']
try:
int_lookup_id = int(lookup_id)
except ValueError:
raise Exception("Non integer experiment ID was submitted!")
log.debug("Experiment Lookup Id:" + str(lookup_id))
# get list of Experiment objects
response = get_experiment_as_objects(url, args)
if not response:
raise Exception ("Error ocurred. Cannot load experiments to search for experiment ID.")
# search for experiment by ID
lookup_object = find(lambda item: item.expId == int_lookup_id, response)
log.debug(lookup_object)
# raise not found exception if no such experiment
if not lookup_object:
raise exception.NotFound("No experiment found for experiment ID: " + str(lookup_id))
# transform to json like string
lookup_object_as_json_string = jsonpickle.encode(lookup_object, unpicklable=False)
log.debug("JSON deserialization:")
log.debug(lookup_object_as_json_string)
return lookup_object_as_json_string
# get all experiments as list of Experiment objects
def get_experiment_as_objects(url, args):
""" Retrieves all experiments as Experiment objects
return list of Experiment objects
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects if success raises exception otherwise
"""
response = rh.loadExperiments(url, args, 'list')
if not response:
raise Exception ("Error ocurred. Cannot load list of experiments.")
return response
# This function get all experiments in json format
def get_experiments_as_json(url, args):
|
# This function performs an exact search by identifier
def find(f, seq):
""" Retrieves object by identifier
return experiment object
:type f: int
:param f: current value of identifier
:type seq: string
:param seq: value to search for
:rtype: Experiment
:return: Returns Experiment object if object found None otherwise
"""
for item in seq:
if f(item):
return item
| """ Retrieves all experiments in json format
return experiments in json format
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects in json format if success raises exception otherwise
"""
response = rh.build_payload(url, args, 'list')
return response | identifier_body |
experiments_service.py | # PMR WebServices
# Copyright (C) 2016 Manhoi Hur, Belyaeva, Irina
# This file is part of PMR WebServices API.
#
# PMR API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# PMR API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PMR API. If not, see <http://www.gnu.org/licenses/>.
"""
Experiments Service API. Provides search, and serialization sevices
"""
import logging
import request_handler as rh
import exception
import jsonpickle
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
# This function retrieves experiment record by experiment ID
def get_experiment_by_id(url, args):
""" Retrieve experiment record by experiment ID
from passed service url and parameters
validate parameters
perform experiment lookup
:type url: string
:param url: The service url
:type args: dict
:param args: The dictionary(map) of parameters submitted via query string
:rtype: json like string
:return: Returns Experiment as json-like string
"""
# retrieve experiment ID from request parameters
lookup_id = args['experimentID']
try:
int_lookup_id = int(lookup_id)
except ValueError:
raise Exception("Non integer experiment ID was submitted!")
log.debug("Experiment Lookup Id:" + str(lookup_id))
# get list of Experiment objects
response = get_experiment_as_objects(url, args)
if not response:
raise Exception ("Error ocurred. Cannot load experiments to search for experiment ID.")
# search for experiment by ID
lookup_object = find(lambda item: item.expId == int_lookup_id, response)
log.debug(lookup_object)
# raise not found exception if no such experiment
if not lookup_object:
raise exception.NotFound("No experiment found for experiment ID: " + str(lookup_id))
# transform to json like string
lookup_object_as_json_string = jsonpickle.encode(lookup_object, unpicklable=False) | # get all experiments as list of Experiment objects
def get_experiment_as_objects(url, args):
""" Retrieves all experiments as Experiment objects
return list of Experiment objects
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects if success raises exception otherwise
"""
response = rh.loadExperiments(url, args, 'list')
if not response:
raise Exception ("Error ocurred. Cannot load list of experiments.")
return response
# This function get all experiments in json format
def get_experiments_as_json(url, args):
""" Retrieves all experiments in json format
return experiments in json format
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects in json format if success raises exception otherwise
"""
response = rh.build_payload(url, args, 'list')
return response
# This function performs an exact search by identifier
def find(f, seq):
""" Retrieves object by identifier
return experiment object
:type f: int
:param f: current value of identifier
:type seq: string
:param seq: value to search for
:rtype: Experiment
:return: Returns Experiment object if object found None otherwise
"""
for item in seq:
if f(item):
return item | log.debug("JSON deserialization:")
log.debug(lookup_object_as_json_string)
return lookup_object_as_json_string
| random_line_split |
experiments_service.py | # PMR WebServices
# Copyright (C) 2016 Manhoi Hur, Belyaeva, Irina
# This file is part of PMR WebServices API.
#
# PMR API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# PMR API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PMR API. If not, see <http://www.gnu.org/licenses/>.
"""
Experiments Service API. Provides search, and serialization sevices
"""
import logging
import request_handler as rh
import exception
import jsonpickle
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
# This function retrieves experiment record by experiment ID
def get_experiment_by_id(url, args):
""" Retrieve experiment record by experiment ID
from passed service url and parameters
validate parameters
perform experiment lookup
:type url: string
:param url: The service url
:type args: dict
:param args: The dictionary(map) of parameters submitted via query string
:rtype: json like string
:return: Returns Experiment as json-like string
"""
# retrieve experiment ID from request parameters
lookup_id = args['experimentID']
try:
int_lookup_id = int(lookup_id)
except ValueError:
raise Exception("Non integer experiment ID was submitted!")
log.debug("Experiment Lookup Id:" + str(lookup_id))
# get list of Experiment objects
response = get_experiment_as_objects(url, args)
if not response:
raise Exception ("Error ocurred. Cannot load experiments to search for experiment ID.")
# search for experiment by ID
lookup_object = find(lambda item: item.expId == int_lookup_id, response)
log.debug(lookup_object)
# raise not found exception if no such experiment
if not lookup_object:
raise exception.NotFound("No experiment found for experiment ID: " + str(lookup_id))
# transform to json like string
lookup_object_as_json_string = jsonpickle.encode(lookup_object, unpicklable=False)
log.debug("JSON deserialization:")
log.debug(lookup_object_as_json_string)
return lookup_object_as_json_string
# get all experiments as list of Experiment objects
def get_experiment_as_objects(url, args):
""" Retrieves all experiments as Experiment objects
return list of Experiment objects
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects if success raises exception otherwise
"""
response = rh.loadExperiments(url, args, 'list')
if not response:
raise Exception ("Error ocurred. Cannot load list of experiments.")
return response
# This function get all experiments in json format
def get_experiments_as_json(url, args):
""" Retrieves all experiments in json format
return experiments in json format
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects in json format if success raises exception otherwise
"""
response = rh.build_payload(url, args, 'list')
return response
# This function performs an exact search by identifier
def | (f, seq):
""" Retrieves object by identifier
return experiment object
:type f: int
:param f: current value of identifier
:type seq: string
:param seq: value to search for
:rtype: Experiment
:return: Returns Experiment object if object found None otherwise
"""
for item in seq:
if f(item):
return item
| find | identifier_name |
experiments_service.py | # PMR WebServices
# Copyright (C) 2016 Manhoi Hur, Belyaeva, Irina
# This file is part of PMR WebServices API.
#
# PMR API is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# PMR API is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PMR API. If not, see <http://www.gnu.org/licenses/>.
"""
Experiments Service API. Provides search, and serialization sevices
"""
import logging
import request_handler as rh
import exception
import jsonpickle
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
# This function retrieves experiment record by experiment ID
def get_experiment_by_id(url, args):
""" Retrieve experiment record by experiment ID
from passed service url and parameters
validate parameters
perform experiment lookup
:type url: string
:param url: The service url
:type args: dict
:param args: The dictionary(map) of parameters submitted via query string
:rtype: json like string
:return: Returns Experiment as json-like string
"""
# retrieve experiment ID from request parameters
lookup_id = args['experimentID']
try:
int_lookup_id = int(lookup_id)
except ValueError:
raise Exception("Non integer experiment ID was submitted!")
log.debug("Experiment Lookup Id:" + str(lookup_id))
# get list of Experiment objects
response = get_experiment_as_objects(url, args)
if not response:
raise Exception ("Error ocurred. Cannot load experiments to search for experiment ID.")
# search for experiment by ID
lookup_object = find(lambda item: item.expId == int_lookup_id, response)
log.debug(lookup_object)
# raise not found exception if no such experiment
if not lookup_object:
raise exception.NotFound("No experiment found for experiment ID: " + str(lookup_id))
# transform to json like string
lookup_object_as_json_string = jsonpickle.encode(lookup_object, unpicklable=False)
log.debug("JSON deserialization:")
log.debug(lookup_object_as_json_string)
return lookup_object_as_json_string
# get all experiments as list of Experiment objects
def get_experiment_as_objects(url, args):
""" Retrieves all experiments as Experiment objects
return list of Experiment objects
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects if success raises exception otherwise
"""
response = rh.loadExperiments(url, args, 'list')
if not response:
|
return response
# This function get all experiments in json format
def get_experiments_as_json(url, args):
""" Retrieves all experiments in json format
return experiments in json format
:type url: string
:param url: request url
:type args: string
:param args: request parameters
:rtype: list
:return: Returns list of Experiment objects in json format if success raises exception otherwise
"""
response = rh.build_payload(url, args, 'list')
return response
# This function performs an exact search by identifier
def find(f, seq):
""" Retrieves object by identifier
return experiment object
:type f: int
:param f: current value of identifier
:type seq: string
:param seq: value to search for
:rtype: Experiment
:return: Returns Experiment object if object found None otherwise
"""
for item in seq:
if f(item):
return item
| raise Exception ("Error ocurred. Cannot load list of experiments.") | conditional_block |
statics-and-consts.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// compile-flags:-Zprint-mono-items=eager
#![deny(dead_code)]
#![feature(start)]
static STATIC1: i64 = {
const STATIC1_CONST1: i64 = 2;
1 + CONST1 as i64 + STATIC1_CONST1
};
const CONST1: i64 = {
const CONST1_1: i64 = {
const CONST1_1_1: i64 = 2;
CONST1_1_1 + 1
};
1 + CONST1_1 as i64
};
fn foo() {
let _ = {
const CONST2: i64 = 0;
static STATIC2: i64 = CONST2;
let x = {
const CONST2: i64 = 1;
static STATIC2: i64 = CONST2;
STATIC2
};
x + STATIC2
};
let _ = {
const CONST2: i64 = 0;
static STATIC2: i64 = CONST2;
STATIC2
};
}
//~ MONO_ITEM fn statics_and_consts::start[0]
#[start]
fn start(_: isize, _: *const *const u8) -> isize |
//~ MONO_ITEM static statics_and_consts::STATIC1[0]
//~ MONO_ITEM fn statics_and_consts::foo[0]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[0]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[1]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[2]
| {
foo();
let _ = STATIC1;
0
} | identifier_body |
statics-and-consts.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// compile-flags:-Zprint-mono-items=eager
#![deny(dead_code)]
#![feature(start)]
static STATIC1: i64 = {
const STATIC1_CONST1: i64 = 2;
1 + CONST1 as i64 + STATIC1_CONST1
};
const CONST1: i64 = {
const CONST1_1: i64 = {
const CONST1_1_1: i64 = 2;
CONST1_1_1 + 1
};
1 + CONST1_1 as i64
};
fn foo() {
let _ = {
const CONST2: i64 = 0;
static STATIC2: i64 = CONST2;
let x = {
const CONST2: i64 = 1;
static STATIC2: i64 = CONST2;
STATIC2
};
x + STATIC2
};
let _ = {
const CONST2: i64 = 0;
static STATIC2: i64 = CONST2;
STATIC2
};
}
//~ MONO_ITEM fn statics_and_consts::start[0]
#[start]
fn | (_: isize, _: *const *const u8) -> isize {
foo();
let _ = STATIC1;
0
}
//~ MONO_ITEM static statics_and_consts::STATIC1[0]
//~ MONO_ITEM fn statics_and_consts::foo[0]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[0]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[1]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[2]
| start | identifier_name |
statics-and-consts.rs | // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
|
static STATIC1: i64 = {
const STATIC1_CONST1: i64 = 2;
1 + CONST1 as i64 + STATIC1_CONST1
};
const CONST1: i64 = {
const CONST1_1: i64 = {
const CONST1_1_1: i64 = 2;
CONST1_1_1 + 1
};
1 + CONST1_1 as i64
};
fn foo() {
let _ = {
const CONST2: i64 = 0;
static STATIC2: i64 = CONST2;
let x = {
const CONST2: i64 = 1;
static STATIC2: i64 = CONST2;
STATIC2
};
x + STATIC2
};
let _ = {
const CONST2: i64 = 0;
static STATIC2: i64 = CONST2;
STATIC2
};
}
//~ MONO_ITEM fn statics_and_consts::start[0]
#[start]
fn start(_: isize, _: *const *const u8) -> isize {
foo();
let _ = STATIC1;
0
}
//~ MONO_ITEM static statics_and_consts::STATIC1[0]
//~ MONO_ITEM fn statics_and_consts::foo[0]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[0]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[1]
//~ MONO_ITEM static statics_and_consts::foo[0]::STATIC2[2] | // ignore-tidy-linelength
// compile-flags:-Zprint-mono-items=eager
#![deny(dead_code)]
#![feature(start)] | random_line_split |
CivicDataFetcher.ts | import * as request from 'superagent';
import {
EvidenceLevel,
ICivicEvidenceSummary,
ICivicGeneSummary,
ICivicVariantSummary,
} from '../model/Civic';
type CivicAPIGene = {
id: number;
name: string;
description: string;
variants: Array<CivicAPIGeneVariant>;
};
type CivicAPIGeneVariant = {
id: number;
name: string;
evidence_items: Evidence[];
};
type Evidence = {
id: number;
name: string;
evidence_type: string;
clinical_significance: string;
evidence_direction: EvidenceDirection;
evidence_level: EvidenceLevel;
drugs: Drug[];
disease: Disease;
};
type Disease = {
id: number;
name: string;
display_name: string;
url: string;
};
type Drug = {
id: number;
name: string;
ncit_id: string;
aliases: string[];
};
enum EvidenceDirection {
Supports = 'Supports',
DoesNotSupport = 'Does Not Support',
}
enum ClinicalSignificance {
// Clinical Significance For Predictive Evidence
Sensitivity = 'Sensitivity/Response',
Resistance = 'Resistance',
AdverseResponse = 'Adverse Response',
ReducedSensitivity = 'Reduced Sensitivity',
NA = 'N/A',
}
/**
* Returns a map with the different types of evidence and the number of times that each evidence happens.
*/
function countEvidenceTypes(
evidenceItems: Evidence[]
): { [evidenceType: string]: number } {
const counts: { [evidenceType: string]: number } = {};
evidenceItems.forEach(function(evidenceItem: Evidence) {
const evidenceType = evidenceItem.evidence_type;
if (counts.hasOwnProperty(evidenceType)) {
counts[evidenceType] += 1;
} else {
counts[evidenceType] = 1;
}
});
return counts;
}
function findSupportingEvidences(
evidences: Evidence[],
filter: (evidence: Evidence) => boolean = () => true
): Evidence[] {
const filteredEvidences = evidences.filter(
evidence =>
evidence.evidence_direction === EvidenceDirection.Supports &&
filter(evidence)
);
filteredEvidences.sort((a, b) => {
const aLevel = a.evidence_level;
const bLevel = b.evidence_level;
if (aLevel === undefined && bLevel === undefined) {
return 0;
} else if (aLevel === undefined) {
return -1;
} else if (bLevel === undefined) {
return 1;
} else if (bLevel > aLevel) {
return -1;
} else if (aLevel > bLevel) {
return 1;
} else {
return 0;
}
});
return filteredEvidences;
}
function summarizeEvidence(evidence: Evidence): ICivicEvidenceSummary {
return {
id: evidence.id,
type: evidence.evidence_type,
clinicalSignificance: evidence.clinical_significance,
level: evidence.evidence_level,
drugs: (evidence.drugs || []).map(d => d.name),
disease: evidence.disease?.display_name || evidence.disease?.name,
};
}
/**
* Returns a map with the different variant names and their variant id.
*/
function | (
variantArray: CivicAPIGeneVariant[]
): { [variantName: string]: number } {
let variantMap: { [variantName: string]: number } = {};
if (variantArray && variantArray.length > 0) {
variantArray.forEach(function(variant) {
variantMap[variant.name] = variant.id;
});
}
return variantMap;
}
/**
* CIViC
*/
export class CivicAPI {
/**
* Retrieves the gene entries for the ids given, if they are in the Civic API.
*/
getCivicGenesBatch(ids: string): Promise<ICivicGeneSummary[]> {
return request
.get('https://civicdb.org/api/genes/' + ids)
.query({ identifier_type: 'entrez_id' })
.then(res => {
const response = res.body;
const result: CivicAPIGene[] =
response instanceof Array ? response : [response];
return result.map((record: CivicAPIGene) => ({
id: record.id,
name: record.name,
description: record.description,
url:
'https://civicdb.org/events/genes/' +
record.id +
'/summary',
variants: createVariantMap(record.variants),
}));
});
}
/**
* Returns a promise that resolves with the variants for the parameters given.
*/
getCivicVariantSummary(
id: number,
name: string,
geneId: number
): Promise<ICivicVariantSummary> {
return request
.get('https://civicdb.org/api/variants/' + id)
.then(response => {
const result = response.body;
const supportingEvidences = findSupportingEvidences(
result.evidence_items
);
return {
id,
name,
geneId,
description: result.description,
url:
'https://civicdb.org/events/genes/' +
geneId +
'/summary/variants/' +
id +
'/summary#variant',
evidenceCounts: countEvidenceTypes(result.evidence_items),
evidences: supportingEvidences.map(summarizeEvidence),
};
});
}
}
export default CivicAPI;
| createVariantMap | identifier_name |
CivicDataFetcher.ts | import * as request from 'superagent';
import {
EvidenceLevel,
ICivicEvidenceSummary,
ICivicGeneSummary,
ICivicVariantSummary,
} from '../model/Civic';
type CivicAPIGene = {
id: number;
name: string;
description: string;
variants: Array<CivicAPIGeneVariant>;
};
type CivicAPIGeneVariant = {
id: number;
name: string;
evidence_items: Evidence[];
};
type Evidence = {
id: number;
name: string;
evidence_type: string;
clinical_significance: string;
evidence_direction: EvidenceDirection;
evidence_level: EvidenceLevel;
drugs: Drug[];
disease: Disease;
};
type Disease = {
id: number;
name: string;
display_name: string;
url: string;
};
type Drug = {
id: number;
name: string;
ncit_id: string;
aliases: string[];
};
enum EvidenceDirection {
Supports = 'Supports',
DoesNotSupport = 'Does Not Support',
}
enum ClinicalSignificance {
// Clinical Significance For Predictive Evidence
Sensitivity = 'Sensitivity/Response',
Resistance = 'Resistance',
AdverseResponse = 'Adverse Response',
ReducedSensitivity = 'Reduced Sensitivity',
NA = 'N/A',
}
/**
* Returns a map with the different types of evidence and the number of times that each evidence happens.
*/
function countEvidenceTypes(
evidenceItems: Evidence[]
): { [evidenceType: string]: number } {
const counts: { [evidenceType: string]: number } = {};
evidenceItems.forEach(function(evidenceItem: Evidence) {
const evidenceType = evidenceItem.evidence_type;
if (counts.hasOwnProperty(evidenceType)) {
counts[evidenceType] += 1;
} else {
counts[evidenceType] = 1;
}
});
return counts;
}
| evidence =>
evidence.evidence_direction === EvidenceDirection.Supports &&
filter(evidence)
);
filteredEvidences.sort((a, b) => {
const aLevel = a.evidence_level;
const bLevel = b.evidence_level;
if (aLevel === undefined && bLevel === undefined) {
return 0;
} else if (aLevel === undefined) {
return -1;
} else if (bLevel === undefined) {
return 1;
} else if (bLevel > aLevel) {
return -1;
} else if (aLevel > bLevel) {
return 1;
} else {
return 0;
}
});
return filteredEvidences;
}
function summarizeEvidence(evidence: Evidence): ICivicEvidenceSummary {
return {
id: evidence.id,
type: evidence.evidence_type,
clinicalSignificance: evidence.clinical_significance,
level: evidence.evidence_level,
drugs: (evidence.drugs || []).map(d => d.name),
disease: evidence.disease?.display_name || evidence.disease?.name,
};
}
/**
* Returns a map with the different variant names and their variant id.
*/
function createVariantMap(
variantArray: CivicAPIGeneVariant[]
): { [variantName: string]: number } {
let variantMap: { [variantName: string]: number } = {};
if (variantArray && variantArray.length > 0) {
variantArray.forEach(function(variant) {
variantMap[variant.name] = variant.id;
});
}
return variantMap;
}
/**
* CIViC
*/
export class CivicAPI {
/**
* Retrieves the gene entries for the ids given, if they are in the Civic API.
*/
getCivicGenesBatch(ids: string): Promise<ICivicGeneSummary[]> {
return request
.get('https://civicdb.org/api/genes/' + ids)
.query({ identifier_type: 'entrez_id' })
.then(res => {
const response = res.body;
const result: CivicAPIGene[] =
response instanceof Array ? response : [response];
return result.map((record: CivicAPIGene) => ({
id: record.id,
name: record.name,
description: record.description,
url:
'https://civicdb.org/events/genes/' +
record.id +
'/summary',
variants: createVariantMap(record.variants),
}));
});
}
/**
* Returns a promise that resolves with the variants for the parameters given.
*/
getCivicVariantSummary(
id: number,
name: string,
geneId: number
): Promise<ICivicVariantSummary> {
return request
.get('https://civicdb.org/api/variants/' + id)
.then(response => {
const result = response.body;
const supportingEvidences = findSupportingEvidences(
result.evidence_items
);
return {
id,
name,
geneId,
description: result.description,
url:
'https://civicdb.org/events/genes/' +
geneId +
'/summary/variants/' +
id +
'/summary#variant',
evidenceCounts: countEvidenceTypes(result.evidence_items),
evidences: supportingEvidences.map(summarizeEvidence),
};
});
}
}
export default CivicAPI; | function findSupportingEvidences(
evidences: Evidence[],
filter: (evidence: Evidence) => boolean = () => true
): Evidence[] {
const filteredEvidences = evidences.filter( | random_line_split |
CivicDataFetcher.ts | import * as request from 'superagent';
import {
EvidenceLevel,
ICivicEvidenceSummary,
ICivicGeneSummary,
ICivicVariantSummary,
} from '../model/Civic';
type CivicAPIGene = {
id: number;
name: string;
description: string;
variants: Array<CivicAPIGeneVariant>;
};
type CivicAPIGeneVariant = {
id: number;
name: string;
evidence_items: Evidence[];
};
type Evidence = {
id: number;
name: string;
evidence_type: string;
clinical_significance: string;
evidence_direction: EvidenceDirection;
evidence_level: EvidenceLevel;
drugs: Drug[];
disease: Disease;
};
type Disease = {
id: number;
name: string;
display_name: string;
url: string;
};
type Drug = {
id: number;
name: string;
ncit_id: string;
aliases: string[];
};
enum EvidenceDirection {
Supports = 'Supports',
DoesNotSupport = 'Does Not Support',
}
enum ClinicalSignificance {
// Clinical Significance For Predictive Evidence
Sensitivity = 'Sensitivity/Response',
Resistance = 'Resistance',
AdverseResponse = 'Adverse Response',
ReducedSensitivity = 'Reduced Sensitivity',
NA = 'N/A',
}
/**
* Returns a map with the different types of evidence and the number of times that each evidence happens.
*/
function countEvidenceTypes(
evidenceItems: Evidence[]
): { [evidenceType: string]: number } {
const counts: { [evidenceType: string]: number } = {};
evidenceItems.forEach(function(evidenceItem: Evidence) {
const evidenceType = evidenceItem.evidence_type;
if (counts.hasOwnProperty(evidenceType)) {
counts[evidenceType] += 1;
} else {
counts[evidenceType] = 1;
}
});
return counts;
}
function findSupportingEvidences(
evidences: Evidence[],
filter: (evidence: Evidence) => boolean = () => true
): Evidence[] {
const filteredEvidences = evidences.filter(
evidence =>
evidence.evidence_direction === EvidenceDirection.Supports &&
filter(evidence)
);
filteredEvidences.sort((a, b) => {
const aLevel = a.evidence_level;
const bLevel = b.evidence_level;
if (aLevel === undefined && bLevel === undefined) {
return 0;
} else if (aLevel === undefined) {
return -1;
} else if (bLevel === undefined) {
return 1;
} else if (bLevel > aLevel) {
return -1;
} else if (aLevel > bLevel) {
return 1;
} else {
return 0;
}
});
return filteredEvidences;
}
function summarizeEvidence(evidence: Evidence): ICivicEvidenceSummary {
return {
id: evidence.id,
type: evidence.evidence_type,
clinicalSignificance: evidence.clinical_significance,
level: evidence.evidence_level,
drugs: (evidence.drugs || []).map(d => d.name),
disease: evidence.disease?.display_name || evidence.disease?.name,
};
}
/**
* Returns a map with the different variant names and their variant id.
*/
function createVariantMap(
variantArray: CivicAPIGeneVariant[]
): { [variantName: string]: number } {
let variantMap: { [variantName: string]: number } = {};
if (variantArray && variantArray.length > 0) {
variantArray.forEach(function(variant) {
variantMap[variant.name] = variant.id;
});
}
return variantMap;
}
/**
* CIViC
*/
export class CivicAPI {
/**
* Retrieves the gene entries for the ids given, if they are in the Civic API.
*/
getCivicGenesBatch(ids: string): Promise<ICivicGeneSummary[]> {
return request
.get('https://civicdb.org/api/genes/' + ids)
.query({ identifier_type: 'entrez_id' })
.then(res => {
const response = res.body;
const result: CivicAPIGene[] =
response instanceof Array ? response : [response];
return result.map((record: CivicAPIGene) => ({
id: record.id,
name: record.name,
description: record.description,
url:
'https://civicdb.org/events/genes/' +
record.id +
'/summary',
variants: createVariantMap(record.variants),
}));
});
}
/**
* Returns a promise that resolves with the variants for the parameters given.
*/
getCivicVariantSummary(
id: number,
name: string,
geneId: number
): Promise<ICivicVariantSummary> |
}
export default CivicAPI;
| {
return request
.get('https://civicdb.org/api/variants/' + id)
.then(response => {
const result = response.body;
const supportingEvidences = findSupportingEvidences(
result.evidence_items
);
return {
id,
name,
geneId,
description: result.description,
url:
'https://civicdb.org/events/genes/' +
geneId +
'/summary/variants/' +
id +
'/summary#variant',
evidenceCounts: countEvidenceTypes(result.evidence_items),
evidences: supportingEvidences.map(summarizeEvidence),
};
});
} | identifier_body |
CreateGroupData.ts | import {create} from "../../common/utils/EntityUtils.js"
import {TypeRef, downcast} from "@tutao/tutanota-utils"
import type {TypeModel} from "../../common/EntityTypes.js"
export const CreateGroupDataTypeRef: TypeRef<CreateGroupData> = new TypeRef("sys", "CreateGroupData")
export const _TypeModel: TypeModel = {
"name": "CreateGroupData",
"since": 1,
"type": "AGGREGATED_TYPE",
"id": 356,
"rootId": "A3N5cwABZA",
"versioned": false,
"encrypted": false,
"values": {
"_id": {
"id": 357,
"type": "CustomId",
"cardinality": "One",
"final": true,
"encrypted": false
},
"adminEncGKey": {
"id": 363,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"customerEncUserGroupInfoSessionKey": {
"id": 1040,
"type": "Bytes",
"cardinality": "ZeroOrOne",
"final": false,
"encrypted": false
},
"encryptedName": {
"id": 358,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"listEncSessionKey": {
"id": 364,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"mailAddress": {
"id": 359,
"type": "String",
"cardinality": "ZeroOrOne",
"final": false,
"encrypted": false
},
"pubKey": {
"id": 360,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
}, | "id": 362,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"symEncPrivKey": {
"id": 361,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
}
},
"associations": {},
"app": "sys",
"version": "72"
}
export function createCreateGroupData(values?: Partial<CreateGroupData>): CreateGroupData {
return Object.assign(create(_TypeModel, CreateGroupDataTypeRef), downcast<CreateGroupData>(values))
}
export type CreateGroupData = {
_type: TypeRef<CreateGroupData>;
_id: Id;
adminEncGKey: Uint8Array;
customerEncUserGroupInfoSessionKey: null | Uint8Array;
encryptedName: Uint8Array;
listEncSessionKey: Uint8Array;
mailAddress: null | string;
pubKey: Uint8Array;
symEncGKey: Uint8Array;
symEncPrivKey: Uint8Array;
} | "symEncGKey": { | random_line_split |
CreateGroupData.ts | import {create} from "../../common/utils/EntityUtils.js"
import {TypeRef, downcast} from "@tutao/tutanota-utils"
import type {TypeModel} from "../../common/EntityTypes.js"
export const CreateGroupDataTypeRef: TypeRef<CreateGroupData> = new TypeRef("sys", "CreateGroupData")
export const _TypeModel: TypeModel = {
"name": "CreateGroupData",
"since": 1,
"type": "AGGREGATED_TYPE",
"id": 356,
"rootId": "A3N5cwABZA",
"versioned": false,
"encrypted": false,
"values": {
"_id": {
"id": 357,
"type": "CustomId",
"cardinality": "One",
"final": true,
"encrypted": false
},
"adminEncGKey": {
"id": 363,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"customerEncUserGroupInfoSessionKey": {
"id": 1040,
"type": "Bytes",
"cardinality": "ZeroOrOne",
"final": false,
"encrypted": false
},
"encryptedName": {
"id": 358,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"listEncSessionKey": {
"id": 364,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"mailAddress": {
"id": 359,
"type": "String",
"cardinality": "ZeroOrOne",
"final": false,
"encrypted": false
},
"pubKey": {
"id": 360,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"symEncGKey": {
"id": 362,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"symEncPrivKey": {
"id": 361,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
}
},
"associations": {},
"app": "sys",
"version": "72"
}
export function createCreateGroupData(values?: Partial<CreateGroupData>): CreateGroupData |
export type CreateGroupData = {
_type: TypeRef<CreateGroupData>;
_id: Id;
adminEncGKey: Uint8Array;
customerEncUserGroupInfoSessionKey: null | Uint8Array;
encryptedName: Uint8Array;
listEncSessionKey: Uint8Array;
mailAddress: null | string;
pubKey: Uint8Array;
symEncGKey: Uint8Array;
symEncPrivKey: Uint8Array;
} | {
return Object.assign(create(_TypeModel, CreateGroupDataTypeRef), downcast<CreateGroupData>(values))
} | identifier_body |
CreateGroupData.ts | import {create} from "../../common/utils/EntityUtils.js"
import {TypeRef, downcast} from "@tutao/tutanota-utils"
import type {TypeModel} from "../../common/EntityTypes.js"
export const CreateGroupDataTypeRef: TypeRef<CreateGroupData> = new TypeRef("sys", "CreateGroupData")
export const _TypeModel: TypeModel = {
"name": "CreateGroupData",
"since": 1,
"type": "AGGREGATED_TYPE",
"id": 356,
"rootId": "A3N5cwABZA",
"versioned": false,
"encrypted": false,
"values": {
"_id": {
"id": 357,
"type": "CustomId",
"cardinality": "One",
"final": true,
"encrypted": false
},
"adminEncGKey": {
"id": 363,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"customerEncUserGroupInfoSessionKey": {
"id": 1040,
"type": "Bytes",
"cardinality": "ZeroOrOne",
"final": false,
"encrypted": false
},
"encryptedName": {
"id": 358,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"listEncSessionKey": {
"id": 364,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"mailAddress": {
"id": 359,
"type": "String",
"cardinality": "ZeroOrOne",
"final": false,
"encrypted": false
},
"pubKey": {
"id": 360,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"symEncGKey": {
"id": 362,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
},
"symEncPrivKey": {
"id": 361,
"type": "Bytes",
"cardinality": "One",
"final": false,
"encrypted": false
}
},
"associations": {},
"app": "sys",
"version": "72"
}
export function | (values?: Partial<CreateGroupData>): CreateGroupData {
return Object.assign(create(_TypeModel, CreateGroupDataTypeRef), downcast<CreateGroupData>(values))
}
export type CreateGroupData = {
_type: TypeRef<CreateGroupData>;
_id: Id;
adminEncGKey: Uint8Array;
customerEncUserGroupInfoSessionKey: null | Uint8Array;
encryptedName: Uint8Array;
listEncSessionKey: Uint8Array;
mailAddress: null | string;
pubKey: Uint8Array;
symEncGKey: Uint8Array;
symEncPrivKey: Uint8Array;
} | createCreateGroupData | identifier_name |
win32.py | #!/usr/bin/python
# $Id:$
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = []
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_type = WINFUNCTYPE(BOOL, HMONITOR, HDC, POINTER(RECT), LPARAM)
enum_proc_ptr = enum_proc_type(enum_proc)
_user32.EnumDisplayMonitors(NULL, NULL, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def | (self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc
| set_mode | identifier_name |
win32.py | #!/usr/bin/python
# $Id:$
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = []
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_type = WINFUNCTYPE(BOOL, HMONITOR, HDC, POINTER(RECT), LPARAM)
enum_proc_ptr = enum_proc_type(enum_proc)
_user32.EnumDisplayMonitors(NULL, NULL, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
| def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc | identifier_body | |
win32.py | #!/usr/bin/python
# $Id:$
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
| def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_type = WINFUNCTYPE(BOOL, HMONITOR, HDC, POINTER(RECT), LPARAM)
enum_proc_ptr = enum_proc_type(enum_proc)
_user32.EnumDisplayMonitors(NULL, NULL, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc | class Win32Display(Display):
def get_screens(self):
screens = [] | random_line_split |
win32.py | #!/usr/bin/python
# $Id:$
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = []
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_type = WINFUNCTYPE(BOOL, HMONITOR, HDC, POINTER(RECT), LPARAM)
enum_proc_ptr = enum_proc_type(enum_proc)
_user32.EnumDisplayMonitors(NULL, NULL, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
|
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc
| break | conditional_block |
app.spec.ts | import {
it,
inject,
injectAsync,
beforeEachProviders
} from '@angular/core/testing';
// to use Translate Service, we need Http, and to test Http we need to mock the backend
import {
BaseRequestOptions,
Http,
Response,
ResponseOptions
} from '@angular/http';
import {MockBackend} from '@angular/http/testing';
import {provide} from "@angular/core";
// Load the implementations that should be tested
// import {Api} from './services/api/api';
import { AppComponent } from './app';
describe('AppComponent', () => {
// provide our implementations or mocks to the dependency injector
beforeEachProviders(() => [
// App,
// Api,
BaseRequestOptions,
MockBackend,
// Provide a mocked (fake) backend for Http
provide(Http, {
useFactory: function useFactory(backend, defaultOptions) {
return new Http(backend, defaultOptions);
},
deps: [MockBackend, BaseRequestOptions]
})
]); | }));
}); |
it('should have a non-empty appHeaderMenuModel', inject([AppComponent], (app: AppComponent) => {
expect(app.appHeaderMenuModel.length).toBeGreaterThan(0); | random_line_split |
variables_0.js | var searchData=
[
['addra',['addrA',['../a00028.html#a3cae9d5d9bef65c080151c7a068aba59',1,'ip_addr']]],
['addrb',['addrB',['../a00028.html#affc4950defbac0add14648c302d1cca3',1,'ip_addr']]],
['addrc',['addrC',['../a00028.html#a2965c835c1e5f0a593e0ce78a9e22596',1,'ip_addr']]],
['addrd',['addrD',['../a00028.html#a46e67573cf0c4c6cc8fcaf4573a441c5',1,'ip_addr']]],
['adv',['adv',['../a00043.html#aa0b33ec0b236628dd65628a7aa238b03',1,'utils_temperature']]],
['aitc',['aitc',['../a00046.html#a4ef06d5c9fc774f79cf065ec4d971518',1,'aitc.c']]],
['all_5fships',['all_ships',['../a00101.html#af4ba9abb0410a1ddc659f565de7a0e00',1,'model_battleship.c']]],
['apprunning',['appRunning',['../a00099.html#a2c9ae8a4de631e631a531446605ee47d',1,'model.c']]],
['apptext',['appText',['../a00099.html#acb5bd952a87114eb0c59dcba58e5f782',1,'model.c']]], | ['arpcache',['arpcache',['../a00108.html#a7489612cbdbbec7ea6dc20811cafd90f',1,'network.h']]]
]; | random_line_split | |
index.ts | /* eslint-disable @typescript-eslint/ban-ts-ignore */
import crypto from 'crypto';
// @ts-ignore
import cryptoAsync from '@ronomon/crypto-async';
import {
CreateDigest,
CreateRandomBytes,
HashAlgorithms,
HexString,
KeyEncodings
} from '@otplib/core';
export const createDigest: CreateDigest<Promise<string>> = async (
algorithm: HashAlgorithms,
hmacKey: HexString,
counter: HexString
): Promise<HexString> => { | algorithm,
Buffer.from(hmacKey, 'hex'),
Buffer.from(counter, 'hex'),
(error: string, hmac: Buffer): void => {
if (error) {
reject(error);
return;
}
resolve(hmac);
}
);
});
return digest.toString('hex');
};
export const createRandomBytes: CreateRandomBytes<Promise<string>> = async (
size: number,
encoding: KeyEncodings
): Promise<string> => {
return crypto.randomBytes(size).toString(encoding);
}; | const digest = await new Promise<Buffer>((resolve, reject): void => {
cryptoAsync.hmac( | random_line_split |
index.ts | /* eslint-disable @typescript-eslint/ban-ts-ignore */
import crypto from 'crypto';
// @ts-ignore
import cryptoAsync from '@ronomon/crypto-async';
import {
CreateDigest,
CreateRandomBytes,
HashAlgorithms,
HexString,
KeyEncodings
} from '@otplib/core';
export const createDigest: CreateDigest<Promise<string>> = async (
algorithm: HashAlgorithms,
hmacKey: HexString,
counter: HexString
): Promise<HexString> => {
const digest = await new Promise<Buffer>((resolve, reject): void => {
cryptoAsync.hmac(
algorithm,
Buffer.from(hmacKey, 'hex'),
Buffer.from(counter, 'hex'),
(error: string, hmac: Buffer): void => {
if (error) |
resolve(hmac);
}
);
});
return digest.toString('hex');
};
export const createRandomBytes: CreateRandomBytes<Promise<string>> = async (
size: number,
encoding: KeyEncodings
): Promise<string> => {
return crypto.randomBytes(size).toString(encoding);
};
| {
reject(error);
return;
} | conditional_block |
issue-23442.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-pass
#![allow(dead_code)]
use std::marker::PhantomData;
pub struct UnionedKeys<'a,K>
where K: UnifyKey + 'a
{
table: &'a mut UnificationTable<K>,
root_key: K,
stack: Vec<K>,
}
pub trait UnifyKey {
type Value;
}
pub struct | <K:UnifyKey> {
values: Delegate<K>,
}
pub struct Delegate<K>(PhantomData<K>);
fn main() {}
| UnificationTable | identifier_name |
issue-23442.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-pass
#![allow(dead_code)]
use std::marker::PhantomData;
pub struct UnionedKeys<'a,K>
where K: UnifyKey + 'a
{
table: &'a mut UnificationTable<K>,
root_key: K,
stack: Vec<K>,
}
pub trait UnifyKey {
type Value;
}
pub struct UnificationTable<K:UnifyKey> {
values: Delegate<K>,
}
pub struct Delegate<K>(PhantomData<K>);
fn main() {} | // file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT. | random_line_split |
issue-23442.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-pass
#![allow(dead_code)]
use std::marker::PhantomData;
pub struct UnionedKeys<'a,K>
where K: UnifyKey + 'a
{
table: &'a mut UnificationTable<K>,
root_key: K,
stack: Vec<K>,
}
pub trait UnifyKey {
type Value;
}
pub struct UnificationTable<K:UnifyKey> {
values: Delegate<K>,
}
pub struct Delegate<K>(PhantomData<K>);
fn main() | {} | identifier_body |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.