file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
verify.js | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
var fs = require('fs');
var jsdom = require('jsdom');
function cleanTag (tree) {
cleanAttributes(tree);
}
function cleanAttributes (tree, acceptableAttribtues) {
var attributesToRemove = [];
for (var i = 0, l = tree.attributes.length; i < l; ++i) {
var name = tree.attributes[i].name;
if ( (acceptableAttribtues && acceptableAttribtues.indexOf(name) === -1)
|| ( name !== 'on' &&
( name.indexOf('on') === 0 || name.indexOf('On') === 0 || name.indexOf('oN') === 0
|| name.indexOf('ON') === 0) ) ) {
attributesToRemove.push(name);
}
}
while (attributesToRemove.length > 0) {
tree.removeAttribute(attributesToRemove.shift());
}
}
function cleanContentSubTree (tree) {
var output = '';
var childOutput = '';
if (tree._nodeName.indexOf('app-') === 0) {
for (var i = 0, l = tree._childNodes.length; i < l; ++i) {
childOutput += cleanContentSubTree(tree._childNodes[i]);
}
output += '<' + tree._nodeName + createAttributeString(tree) + '>' + childOutput + '</' + tree._nodeName + '>';
}
else if (tree._nodeName === '#text') {
output += tree._nodeValue.replace(/[^\s]/g, '');
}
return output;
}
function checkTag (tree, tagName, attributes) {
if (!tree || tree._nodeName !== tagName) {
throw 'Invalid tag.';
}
var attr;
attributes = attributes || [];
Object.keys(attributes).forEach(function (name) {
var expectedValue = attributes[name];
var inputAttribute = tree.attributes[name];
if (name === 'id') {
if (expectedValue !== inputAttribute.value) {
throw 'Attribute "' + 'id' + '" does not match: ' + attributes[name];
}
}
else {
if (inputAttribute.value.indexOf(expectedValue) === -1) {
throw 'Attribute "' + name + '" does not match: ' + expectedValue + ', ' + inputAttribute.value;
}
}
});
}
function siftThroughChildren (childNodes, childFunction) {
var childrenToRemove = [];
Array.prototype.forEach.call(childNodes, function (child) {
if (child._nodeName === '#text') {
child._nodeValue.replace(/[^\s]/g, '');
}
else {
if (!childFunction(child)) {
childrenToRemove.push(child);
}
}
});
while (childrenToRemove.length > 0) {
var child = childrenToRemove.shift();
child.parentNode.removeChild(child);
}
}
function filterApp (tree) {
checkTag(tree, 'div', { id: 'flathead-app' });
cleanTag(tree);
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterCard);
}
else {
throw "No children!";
}
return tree;
}
function filterCard (tree) {
checkTag(tree, 'div', { class: 'ceci-card' });
cleanTag(tree);
var childClasses = ['fixed-top', 'phone-canvas', 'fixed-bottom'];
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, function (child) {
return filterSection(child, childClasses.shift());
});
return true;
}
else {
throw "No children!";
}
}
function filterSection (tree, name) {
checkTag(tree, 'div', { class: name });
cleanTag(tree);
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterComponent);
}
return true;
}
function filterSubscription (tree) {
cleanAttributes(tree, ['on', 'for']);
siftThroughChildren(tree._childNodes, function (child) {
cleanAttributes(child, ['color']);
});
return true;
}
function filterComponent (tree) {
if (tree._nodeName.indexOf('app-') === 0) {
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterComponent);
return true;
}
}
else if (['broadcast', 'listen'].indexOf(tree._nodeName) > -1) {
return filterSubscription(tree);
}
return false;
}
module.exports = {
filter: function (html, callback) {
html = html.replace(/<script>[\s.]*<\/script>/g, '');
jsdom.env(html, {
done: function (errors, window) {
var inputDocument = window.document;
var inputNode = inputDocument.firstChild;
var output = null;
var appDiv = inputNode.firstChild.firstChild;
try {
output = filterApp(appDiv).outerHTML;
}
catch (e) {
console.error(e);
throw e;
}
callback(output);
}
}); | }
}; | random_line_split | |
verify.js | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
var fs = require('fs');
var jsdom = require('jsdom');
function cleanTag (tree) {
cleanAttributes(tree);
}
function cleanAttributes (tree, acceptableAttribtues) {
var attributesToRemove = [];
for (var i = 0, l = tree.attributes.length; i < l; ++i) {
var name = tree.attributes[i].name;
if ( (acceptableAttribtues && acceptableAttribtues.indexOf(name) === -1)
|| ( name !== 'on' &&
( name.indexOf('on') === 0 || name.indexOf('On') === 0 || name.indexOf('oN') === 0
|| name.indexOf('ON') === 0) ) ) {
attributesToRemove.push(name);
}
}
while (attributesToRemove.length > 0) {
tree.removeAttribute(attributesToRemove.shift());
}
}
function cleanContentSubTree (tree) {
var output = '';
var childOutput = '';
if (tree._nodeName.indexOf('app-') === 0) {
for (var i = 0, l = tree._childNodes.length; i < l; ++i) {
childOutput += cleanContentSubTree(tree._childNodes[i]);
}
output += '<' + tree._nodeName + createAttributeString(tree) + '>' + childOutput + '</' + tree._nodeName + '>';
}
else if (tree._nodeName === '#text') {
output += tree._nodeValue.replace(/[^\s]/g, '');
}
return output;
}
function checkTag (tree, tagName, attributes) {
if (!tree || tree._nodeName !== tagName) {
throw 'Invalid tag.';
}
var attr;
attributes = attributes || [];
Object.keys(attributes).forEach(function (name) {
var expectedValue = attributes[name];
var inputAttribute = tree.attributes[name];
if (name === 'id') {
if (expectedValue !== inputAttribute.value) {
throw 'Attribute "' + 'id' + '" does not match: ' + attributes[name];
}
}
else {
if (inputAttribute.value.indexOf(expectedValue) === -1) {
throw 'Attribute "' + name + '" does not match: ' + expectedValue + ', ' + inputAttribute.value;
}
}
});
}
function siftThroughChildren (childNodes, childFunction) {
var childrenToRemove = [];
Array.prototype.forEach.call(childNodes, function (child) {
if (child._nodeName === '#text') {
child._nodeValue.replace(/[^\s]/g, '');
}
else {
if (!childFunction(child)) {
childrenToRemove.push(child);
}
}
});
while (childrenToRemove.length > 0) {
var child = childrenToRemove.shift();
child.parentNode.removeChild(child);
}
}
function | (tree) {
checkTag(tree, 'div', { id: 'flathead-app' });
cleanTag(tree);
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterCard);
}
else {
throw "No children!";
}
return tree;
}
function filterCard (tree) {
checkTag(tree, 'div', { class: 'ceci-card' });
cleanTag(tree);
var childClasses = ['fixed-top', 'phone-canvas', 'fixed-bottom'];
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, function (child) {
return filterSection(child, childClasses.shift());
});
return true;
}
else {
throw "No children!";
}
}
function filterSection (tree, name) {
checkTag(tree, 'div', { class: name });
cleanTag(tree);
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterComponent);
}
return true;
}
function filterSubscription (tree) {
cleanAttributes(tree, ['on', 'for']);
siftThroughChildren(tree._childNodes, function (child) {
cleanAttributes(child, ['color']);
});
return true;
}
function filterComponent (tree) {
if (tree._nodeName.indexOf('app-') === 0) {
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterComponent);
return true;
}
}
else if (['broadcast', 'listen'].indexOf(tree._nodeName) > -1) {
return filterSubscription(tree);
}
return false;
}
module.exports = {
filter: function (html, callback) {
html = html.replace(/<script>[\s.]*<\/script>/g, '');
jsdom.env(html, {
done: function (errors, window) {
var inputDocument = window.document;
var inputNode = inputDocument.firstChild;
var output = null;
var appDiv = inputNode.firstChild.firstChild;
try {
output = filterApp(appDiv).outerHTML;
}
catch (e) {
console.error(e);
throw e;
}
callback(output);
}
});
}
}; | filterApp | identifier_name |
verify.js | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
var fs = require('fs');
var jsdom = require('jsdom');
function cleanTag (tree) {
cleanAttributes(tree);
}
function cleanAttributes (tree, acceptableAttribtues) {
var attributesToRemove = [];
for (var i = 0, l = tree.attributes.length; i < l; ++i) {
var name = tree.attributes[i].name;
if ( (acceptableAttribtues && acceptableAttribtues.indexOf(name) === -1)
|| ( name !== 'on' &&
( name.indexOf('on') === 0 || name.indexOf('On') === 0 || name.indexOf('oN') === 0
|| name.indexOf('ON') === 0) ) ) {
attributesToRemove.push(name);
}
}
while (attributesToRemove.length > 0) {
tree.removeAttribute(attributesToRemove.shift());
}
}
function cleanContentSubTree (tree) {
var output = '';
var childOutput = '';
if (tree._nodeName.indexOf('app-') === 0) {
for (var i = 0, l = tree._childNodes.length; i < l; ++i) {
childOutput += cleanContentSubTree(tree._childNodes[i]);
}
output += '<' + tree._nodeName + createAttributeString(tree) + '>' + childOutput + '</' + tree._nodeName + '>';
}
else if (tree._nodeName === '#text') {
output += tree._nodeValue.replace(/[^\s]/g, '');
}
return output;
}
function checkTag (tree, tagName, attributes) {
if (!tree || tree._nodeName !== tagName) {
throw 'Invalid tag.';
}
var attr;
attributes = attributes || [];
Object.keys(attributes).forEach(function (name) {
var expectedValue = attributes[name];
var inputAttribute = tree.attributes[name];
if (name === 'id') {
if (expectedValue !== inputAttribute.value) {
throw 'Attribute "' + 'id' + '" does not match: ' + attributes[name];
}
}
else {
if (inputAttribute.value.indexOf(expectedValue) === -1) {
throw 'Attribute "' + name + '" does not match: ' + expectedValue + ', ' + inputAttribute.value;
}
}
});
}
function siftThroughChildren (childNodes, childFunction) {
var childrenToRemove = [];
Array.prototype.forEach.call(childNodes, function (child) {
if (child._nodeName === '#text') {
child._nodeValue.replace(/[^\s]/g, '');
}
else {
if (!childFunction(child)) {
childrenToRemove.push(child);
}
}
});
while (childrenToRemove.length > 0) {
var child = childrenToRemove.shift();
child.parentNode.removeChild(child);
}
}
function filterApp (tree) {
checkTag(tree, 'div', { id: 'flathead-app' });
cleanTag(tree);
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterCard);
}
else {
throw "No children!";
}
return tree;
}
function filterCard (tree) {
checkTag(tree, 'div', { class: 'ceci-card' });
cleanTag(tree);
var childClasses = ['fixed-top', 'phone-canvas', 'fixed-bottom'];
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, function (child) {
return filterSection(child, childClasses.shift());
});
return true;
}
else |
}
function filterSection (tree, name) {
checkTag(tree, 'div', { class: name });
cleanTag(tree);
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterComponent);
}
return true;
}
function filterSubscription (tree) {
cleanAttributes(tree, ['on', 'for']);
siftThroughChildren(tree._childNodes, function (child) {
cleanAttributes(child, ['color']);
});
return true;
}
function filterComponent (tree) {
if (tree._nodeName.indexOf('app-') === 0) {
if (tree._childNodes) {
siftThroughChildren(tree._childNodes, filterComponent);
return true;
}
}
else if (['broadcast', 'listen'].indexOf(tree._nodeName) > -1) {
return filterSubscription(tree);
}
return false;
}
module.exports = {
filter: function (html, callback) {
html = html.replace(/<script>[\s.]*<\/script>/g, '');
jsdom.env(html, {
done: function (errors, window) {
var inputDocument = window.document;
var inputNode = inputDocument.firstChild;
var output = null;
var appDiv = inputNode.firstChild.firstChild;
try {
output = filterApp(appDiv).outerHTML;
}
catch (e) {
console.error(e);
throw e;
}
callback(output);
}
});
}
}; | {
throw "No children!";
} | conditional_block |
rustc.rs | //! Shim which is passed to Cargo as "rustc" when running the bootstrap.
//!
//! This shim will take care of some various tasks that our build process
//! requires that Cargo can't quite do through normal configuration:
//!
//! 1. When compiling build scripts and build dependencies, we need a guaranteed
//! full standard library available. The only compiler which actually has
//! this is the snapshot, so we detect this situation and always compile with
//! the snapshot compiler.
//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
//! (and this slightly differs based on a whether we're using a snapshot or
//! not), so we do that all here.
//!
//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
//! switching compilers for the bootstrap and for build scripts will probably
//! never get replaced.
use std::env;
use std::path::PathBuf;
use std::process::{Child, Command};
use std::str::FromStr;
use std::time::Instant;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str());
let version = args.iter().find(|w| &**w == "-vV");
let verbose = match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
};
// Use a different compiler for build scripts, since there may not yet be a
// libstd for the real compiler to use. However, if Cargo is attempting to
// determine the version of the compiler, the real compiler needs to be
// used. Currently, these two states are differentiated based on whether
// --target and -vV is/isn't passed.
let (rustc, libdir) = if target.is_none() && version.is_none() {
("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
} else {
("RUSTC_REAL", "RUSTC_LIBDIR")
};
let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new);
let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(&libdir));
let mut cmd = Command::new(rustc);
cmd.args(&args).env(bootstrap::util::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
// Get the name of the crate we're compiling, if any.
let crate_name =
args.windows(2).find(|args| args[0] == "--crate-name").and_then(|args| args[1].to_str());
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
cmd.arg("-Ztime");
}
}
}
// Print backtrace in case of ICE
if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() {
cmd.env("RUST_BACKTRACE", "1");
}
if let Ok(lint_flags) = env::var("RUSTC_LINT_FLAGS") {
cmd.args(lint_flags.split_whitespace());
}
if target.is_some() {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option,
// unless one is already set.
if !args.iter().any(|arg| arg == "--sysroot") |
// If we're compiling specifically the `panic_abort` crate then we pass
// the `-C panic=abort` option. Note that we do not do this for any
// other crate intentionally as this is the only crate for now that we
// ship with panic=abort.
//
// This... is a bit of a hack how we detect this. Ideally this
// information should be encoded in the crate I guess? Would likely
// require an RFC amendment to RFC 1513, however.
//
// `compiler_builtins` are unconditionally compiled with panic=abort to
// workaround undefined references to `rust_eh_unwind_resume` generated
// otherwise, see issue https://github.com/rust-lang/rust/issues/43095.
if crate_name == Some("panic_abort")
|| crate_name == Some("compiler_builtins") && stage != "0"
{
cmd.arg("-C").arg("panic=abort");
}
} else {
// FIXME(rust-lang/cargo#5754) we shouldn't be using special env vars
// here, but rather Cargo should know what flags to pass rustc itself.
// Override linker if necessary.
if let Ok(host_linker) = env::var("RUSTC_HOST_LINKER") {
cmd.arg(format!("-Clinker={}", host_linker));
}
if env::var_os("RUSTC_HOST_FUSE_LD_LLD").is_some() {
cmd.arg("-Clink-args=-fuse-ld=lld");
}
if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") {
if s == "true" {
cmd.arg("-C").arg("target-feature=+crt-static");
}
if s == "false" {
cmd.arg("-C").arg("target-feature=-crt-static");
}
}
if stage == "0" {
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Set `--cfg=bootstrap` explicitly instead.
cmd.arg("--cfg=bootstrap");
}
}
if let Ok(map) = env::var("RUSTC_DEBUGINFO_MAP") {
cmd.arg("--remap-path-prefix").arg(&map);
}
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot. We also do this for host crates, since those
// may be proc macros, in which case we might ship them.
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() && (stage != "0" || target.is_some()) {
cmd.arg("-Z").arg("force-unstable-if-unmarked");
}
let is_test = args.iter().any(|a| a == "--test");
if verbose > 1 {
let rust_env_vars =
env::vars().filter(|(k, _)| k.starts_with("RUST") || k.starts_with("CARGO"));
let prefix = if is_test { "[RUSTC-SHIM] rustc --test" } else { "[RUSTC-SHIM] rustc" };
let prefix = match crate_name {
Some(crate_name) => format!("{} {}", prefix, crate_name),
None => prefix.to_string(),
};
for (i, (k, v)) in rust_env_vars.enumerate() {
eprintln!("{} env[{}]: {:?}={:?}", prefix, i, k, v);
}
eprintln!("{} working directory: {}", prefix, env::current_dir().unwrap().display());
eprintln!(
"{} command: {:?}={:?} {:?}",
prefix,
bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap(),
cmd,
);
eprintln!("{} sysroot: {:?}", prefix, sysroot);
eprintln!("{} libdir: {:?}", prefix, libdir);
}
let start = Instant::now();
let (child, status) = {
let errmsg = format!("\nFailed to run:\n{:?}\n-------------", cmd);
let mut child = cmd.spawn().expect(&errmsg);
let status = child.wait().expect(&errmsg);
(child, status)
};
if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some()
|| env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some()
{
if let Some(crate_name) = crate_name {
let dur = start.elapsed();
// If the user requested resource usage data, then
// include that in addition to the timing output.
let rusage_data =
env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child));
eprintln!(
"[RUSTC-TIMING] {} test:{} {}.{:03}{}{}",
crate_name,
is_test,
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or(String::new()),
);
}
}
if status.success() {
std::process::exit(0);
// note: everything below here is unreachable. do not put code that
// should run on success, after this block.
}
if verbose > 0 {
println!("\nDid not run successfully: {}\n{:?}\n-------------", status, cmd);
}
if let Some(mut on_fail) = on_fail {
on_fail.status().expect("Could not run the on_fail command");
}
// Preserve the exit code. In case of signal, exit with 0xfe since it's
// awkward to preserve this status in a cross-platform way.
match status.code() {
Some(i) => std::process::exit(i),
None => {
eprintln!("rustc exited with {}", status);
std::process::exit(0xfe);
}
}
}
#[cfg(all(not(unix), not(windows)))]
// In the future we can add this for more platforms
fn format_rusage_data(_child: Child) -> Option<String> {
None
}
#[cfg(windows)]
fn format_rusage_data(child: Child) -> Option<String> {
use std::os::windows::io::AsRawHandle;
use winapi::um::{processthreadsapi, psapi, timezoneapi};
let handle = child.as_raw_handle();
macro_rules! try_bool {
($e:expr) => {
if $e != 1 {
return None;
}
};
}
let mut user_filetime = Default::default();
let mut user_time = Default::default();
let mut kernel_filetime = Default::default();
let mut kernel_time = Default::default();
let mut memory_counters = psapi::PROCESS_MEMORY_COUNTERS::default();
unsafe {
try_bool!(processthreadsapi::GetProcessTimes(
handle,
&mut Default::default(),
&mut Default::default(),
&mut kernel_filetime,
&mut user_filetime,
));
try_bool!(timezoneapi::FileTimeToSystemTime(&user_filetime, &mut user_time));
try_bool!(timezoneapi::FileTimeToSystemTime(&kernel_filetime, &mut kernel_time));
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
// with the given handle and none of that process's children.
try_bool!(psapi::GetProcessMemoryInfo(
handle as _,
&mut memory_counters as *mut _ as _,
std::mem::size_of::<psapi::PROCESS_MEMORY_COUNTERS_EX>() as u32,
));
}
// Guide on interpreting these numbers:
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
let peak_working_set = memory_counters.PeakWorkingSetSize / 1024;
let peak_page_file = memory_counters.PeakPagefileUsage / 1024;
let peak_paged_pool = memory_counters.QuotaPeakPagedPoolUsage / 1024;
let peak_nonpaged_pool = memory_counters.QuotaPeakNonPagedPoolUsage / 1024;
Some(format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
peak working set (kb): {PEAK_WORKING_SET} \
peak page file usage (kb): {PEAK_PAGE_FILE} \
peak paged pool usage (kb): {PEAK_PAGED_POOL} \
peak non-paged pool usage (kb): {PEAK_NONPAGED_POOL} \
page faults: {PAGE_FAULTS}",
USER_SEC = user_time.wSecond + (user_time.wMinute * 60),
USER_USEC = user_time.wMilliseconds,
SYS_SEC = kernel_time.wSecond + (kernel_time.wMinute * 60),
SYS_USEC = kernel_time.wMilliseconds,
PEAK_WORKING_SET = peak_working_set,
PEAK_PAGE_FILE = peak_page_file,
PEAK_PAGED_POOL = peak_paged_pool,
PEAK_NONPAGED_POOL = peak_nonpaged_pool,
PAGE_FAULTS = memory_counters.PageFaultCount,
))
}
#[cfg(unix)]
/// Tries to build a string with human readable data for several of the rusage
/// fields. Note that we are focusing mainly on data that we believe to be
/// supplied on Linux (the `rusage` struct has other fields in it but they are
/// currently unsupported by Linux).
fn format_rusage_data(_child: Child) -> Option<String> {
let rusage: libc::rusage = unsafe {
let mut recv = std::mem::zeroed();
// -1 is RUSAGE_CHILDREN, which means to get the rusage for all children
// (and grandchildren, etc) processes that have respectively terminated
// and been waited for.
let retval = libc::getrusage(-1, &mut recv);
if retval != 0 {
return None;
}
recv
};
// Mac OS X reports the maxrss in bytes, not kb.
let divisor = if env::consts::OS == "macos" { 1024 } else { 1 };
let maxrss = (rusage.ru_maxrss + (divisor - 1)) / divisor;
let mut init_str = format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
max rss (kb): {MAXRSS}",
USER_SEC = rusage.ru_utime.tv_sec,
USER_USEC = rusage.ru_utime.tv_usec,
SYS_SEC = rusage.ru_stime.tv_sec,
SYS_USEC = rusage.ru_stime.tv_usec,
MAXRSS = maxrss
);
// The remaining rusage stats vary in platform support. So we treat
// uniformly zero values in each category as "not worth printing", since it
// either means no events of that type occurred, or that the platform
// does not support it.
let minflt = rusage.ru_minflt;
let majflt = rusage.ru_majflt;
if minflt != 0 || majflt != 0 {
init_str.push_str(&format!(" page reclaims: {} page faults: {}", minflt, majflt));
}
let inblock = rusage.ru_inblock;
let oublock = rusage.ru_oublock;
if inblock != 0 || oublock != 0 {
init_str.push_str(&format!(" fs block inputs: {} fs block outputs: {}", inblock, oublock));
}
let nvcsw = rusage.ru_nvcsw;
let nivcsw = rusage.ru_nivcsw;
if nvcsw != 0 || nivcsw != 0 {
init_str.push_str(&format!(
" voluntary ctxt switches: {} involuntary ctxt switches: {}",
nvcsw, nivcsw
));
}
return Some(init_str);
}
| {
cmd.arg("--sysroot").arg(&sysroot);
} | conditional_block |
rustc.rs | //! Shim which is passed to Cargo as "rustc" when running the bootstrap.
//!
//! This shim will take care of some various tasks that our build process
//! requires that Cargo can't quite do through normal configuration:
//!
//! 1. When compiling build scripts and build dependencies, we need a guaranteed
//! full standard library available. The only compiler which actually has
//! this is the snapshot, so we detect this situation and always compile with
//! the snapshot compiler.
//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
//! (and this slightly differs based on a whether we're using a snapshot or
//! not), so we do that all here.
//!
//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
//! switching compilers for the bootstrap and for build scripts will probably
//! never get replaced.
use std::env;
use std::path::PathBuf;
use std::process::{Child, Command};
use std::str::FromStr;
use std::time::Instant;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str());
let version = args.iter().find(|w| &**w == "-vV");
let verbose = match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
};
// Use a different compiler for build scripts, since there may not yet be a
// libstd for the real compiler to use. However, if Cargo is attempting to
// determine the version of the compiler, the real compiler needs to be
// used. Currently, these two states are differentiated based on whether
// --target and -vV is/isn't passed.
let (rustc, libdir) = if target.is_none() && version.is_none() {
("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
} else {
("RUSTC_REAL", "RUSTC_LIBDIR")
};
let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new);
let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(&libdir));
let mut cmd = Command::new(rustc);
cmd.args(&args).env(bootstrap::util::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
// Get the name of the crate we're compiling, if any.
let crate_name =
args.windows(2).find(|args| args[0] == "--crate-name").and_then(|args| args[1].to_str());
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
cmd.arg("-Ztime");
}
}
}
// Print backtrace in case of ICE
if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() {
cmd.env("RUST_BACKTRACE", "1");
}
if let Ok(lint_flags) = env::var("RUSTC_LINT_FLAGS") {
cmd.args(lint_flags.split_whitespace());
}
if target.is_some() {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option,
// unless one is already set.
if !args.iter().any(|arg| arg == "--sysroot") {
cmd.arg("--sysroot").arg(&sysroot);
}
// If we're compiling specifically the `panic_abort` crate then we pass
// the `-C panic=abort` option. Note that we do not do this for any
// other crate intentionally as this is the only crate for now that we
// ship with panic=abort.
//
// This... is a bit of a hack how we detect this. Ideally this
// information should be encoded in the crate I guess? Would likely
// require an RFC amendment to RFC 1513, however.
//
// `compiler_builtins` are unconditionally compiled with panic=abort to
// workaround undefined references to `rust_eh_unwind_resume` generated
// otherwise, see issue https://github.com/rust-lang/rust/issues/43095.
if crate_name == Some("panic_abort")
|| crate_name == Some("compiler_builtins") && stage != "0"
{
cmd.arg("-C").arg("panic=abort");
}
} else {
// FIXME(rust-lang/cargo#5754) we shouldn't be using special env vars
// here, but rather Cargo should know what flags to pass rustc itself.
// Override linker if necessary.
if let Ok(host_linker) = env::var("RUSTC_HOST_LINKER") {
cmd.arg(format!("-Clinker={}", host_linker));
}
if env::var_os("RUSTC_HOST_FUSE_LD_LLD").is_some() {
cmd.arg("-Clink-args=-fuse-ld=lld");
}
if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") {
if s == "true" {
cmd.arg("-C").arg("target-feature=+crt-static");
}
if s == "false" {
cmd.arg("-C").arg("target-feature=-crt-static");
}
}
if stage == "0" {
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Set `--cfg=bootstrap` explicitly instead.
cmd.arg("--cfg=bootstrap");
}
}
if let Ok(map) = env::var("RUSTC_DEBUGINFO_MAP") {
cmd.arg("--remap-path-prefix").arg(&map);
}
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot. We also do this for host crates, since those
// may be proc macros, in which case we might ship them.
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() && (stage != "0" || target.is_some()) {
cmd.arg("-Z").arg("force-unstable-if-unmarked");
}
let is_test = args.iter().any(|a| a == "--test");
if verbose > 1 {
let rust_env_vars =
env::vars().filter(|(k, _)| k.starts_with("RUST") || k.starts_with("CARGO"));
let prefix = if is_test { "[RUSTC-SHIM] rustc --test" } else { "[RUSTC-SHIM] rustc" };
let prefix = match crate_name {
Some(crate_name) => format!("{} {}", prefix, crate_name),
None => prefix.to_string(),
};
for (i, (k, v)) in rust_env_vars.enumerate() {
eprintln!("{} env[{}]: {:?}={:?}", prefix, i, k, v);
}
eprintln!("{} working directory: {}", prefix, env::current_dir().unwrap().display());
eprintln!(
"{} command: {:?}={:?} {:?}",
prefix,
bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap(),
cmd,
);
eprintln!("{} sysroot: {:?}", prefix, sysroot);
eprintln!("{} libdir: {:?}", prefix, libdir);
}
let start = Instant::now();
let (child, status) = {
let errmsg = format!("\nFailed to run:\n{:?}\n-------------", cmd);
let mut child = cmd.spawn().expect(&errmsg);
let status = child.wait().expect(&errmsg);
(child, status)
};
if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some()
|| env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some()
{
if let Some(crate_name) = crate_name {
let dur = start.elapsed();
// If the user requested resource usage data, then
// include that in addition to the timing output.
let rusage_data =
env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child));
eprintln!(
"[RUSTC-TIMING] {} test:{} {}.{:03}{}{}",
crate_name,
is_test,
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or(String::new()),
);
}
}
if status.success() {
std::process::exit(0);
// note: everything below here is unreachable. do not put code that
// should run on success, after this block.
}
if verbose > 0 {
println!("\nDid not run successfully: {}\n{:?}\n-------------", status, cmd);
}
if let Some(mut on_fail) = on_fail {
on_fail.status().expect("Could not run the on_fail command");
}
// Preserve the exit code. In case of signal, exit with 0xfe since it's
// awkward to preserve this status in a cross-platform way.
match status.code() {
Some(i) => std::process::exit(i),
None => {
eprintln!("rustc exited with {}", status);
std::process::exit(0xfe);
}
}
}
#[cfg(all(not(unix), not(windows)))]
// In the future we can add this for more platforms
fn format_rusage_data(_child: Child) -> Option<String> {
None
}
#[cfg(windows)]
fn format_rusage_data(child: Child) -> Option<String> {
use std::os::windows::io::AsRawHandle;
use winapi::um::{processthreadsapi, psapi, timezoneapi};
let handle = child.as_raw_handle();
macro_rules! try_bool {
($e:expr) => {
if $e != 1 {
return None;
}
};
}
let mut user_filetime = Default::default();
let mut user_time = Default::default();
let mut kernel_filetime = Default::default();
let mut kernel_time = Default::default();
let mut memory_counters = psapi::PROCESS_MEMORY_COUNTERS::default();
unsafe {
try_bool!(processthreadsapi::GetProcessTimes(
handle,
&mut Default::default(),
&mut Default::default(),
&mut kernel_filetime,
&mut user_filetime,
));
try_bool!(timezoneapi::FileTimeToSystemTime(&user_filetime, &mut user_time));
try_bool!(timezoneapi::FileTimeToSystemTime(&kernel_filetime, &mut kernel_time));
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
// with the given handle and none of that process's children.
try_bool!(psapi::GetProcessMemoryInfo(
handle as _,
&mut memory_counters as *mut _ as _,
std::mem::size_of::<psapi::PROCESS_MEMORY_COUNTERS_EX>() as u32,
));
}
// Guide on interpreting these numbers:
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
let peak_working_set = memory_counters.PeakWorkingSetSize / 1024;
let peak_page_file = memory_counters.PeakPagefileUsage / 1024;
let peak_paged_pool = memory_counters.QuotaPeakPagedPoolUsage / 1024;
let peak_nonpaged_pool = memory_counters.QuotaPeakNonPagedPoolUsage / 1024;
Some(format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
peak working set (kb): {PEAK_WORKING_SET} \
peak page file usage (kb): {PEAK_PAGE_FILE} \
peak paged pool usage (kb): {PEAK_PAGED_POOL} \
peak non-paged pool usage (kb): {PEAK_NONPAGED_POOL} \
page faults: {PAGE_FAULTS}",
USER_SEC = user_time.wSecond + (user_time.wMinute * 60),
USER_USEC = user_time.wMilliseconds,
SYS_SEC = kernel_time.wSecond + (kernel_time.wMinute * 60),
SYS_USEC = kernel_time.wMilliseconds,
PEAK_WORKING_SET = peak_working_set,
PEAK_PAGE_FILE = peak_page_file,
PEAK_PAGED_POOL = peak_paged_pool,
PEAK_NONPAGED_POOL = peak_nonpaged_pool,
PAGE_FAULTS = memory_counters.PageFaultCount,
))
}
#[cfg(unix)]
/// Tries to build a string with human readable data for several of the rusage
/// fields. Note that we are focusing mainly on data that we believe to be
/// supplied on Linux (the `rusage` struct has other fields in it but they are
/// currently unsupported by Linux).
fn format_rusage_data(_child: Child) -> Option<String> | {
let rusage: libc::rusage = unsafe {
let mut recv = std::mem::zeroed();
// -1 is RUSAGE_CHILDREN, which means to get the rusage for all children
// (and grandchildren, etc) processes that have respectively terminated
// and been waited for.
let retval = libc::getrusage(-1, &mut recv);
if retval != 0 {
return None;
}
recv
};
// Mac OS X reports the maxrss in bytes, not kb.
let divisor = if env::consts::OS == "macos" { 1024 } else { 1 };
let maxrss = (rusage.ru_maxrss + (divisor - 1)) / divisor;
let mut init_str = format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
max rss (kb): {MAXRSS}",
USER_SEC = rusage.ru_utime.tv_sec,
USER_USEC = rusage.ru_utime.tv_usec,
SYS_SEC = rusage.ru_stime.tv_sec,
SYS_USEC = rusage.ru_stime.tv_usec,
MAXRSS = maxrss
);
// The remaining rusage stats vary in platform support. So we treat
// uniformly zero values in each category as "not worth printing", since it
// either means no events of that type occurred, or that the platform
// does not support it.
let minflt = rusage.ru_minflt;
let majflt = rusage.ru_majflt;
if minflt != 0 || majflt != 0 {
init_str.push_str(&format!(" page reclaims: {} page faults: {}", minflt, majflt));
}
let inblock = rusage.ru_inblock;
let oublock = rusage.ru_oublock;
if inblock != 0 || oublock != 0 {
init_str.push_str(&format!(" fs block inputs: {} fs block outputs: {}", inblock, oublock));
}
let nvcsw = rusage.ru_nvcsw;
let nivcsw = rusage.ru_nivcsw;
if nvcsw != 0 || nivcsw != 0 {
init_str.push_str(&format!(
" voluntary ctxt switches: {} involuntary ctxt switches: {}",
nvcsw, nivcsw
));
}
return Some(init_str);
} | identifier_body | |
rustc.rs | //! Shim which is passed to Cargo as "rustc" when running the bootstrap.
//!
//! This shim will take care of some various tasks that our build process
//! requires that Cargo can't quite do through normal configuration:
//!
//! 1. When compiling build scripts and build dependencies, we need a guaranteed
//! full standard library available. The only compiler which actually has
//! this is the snapshot, so we detect this situation and always compile with
//! the snapshot compiler.
//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
//! (and this slightly differs based on a whether we're using a snapshot or
//! not), so we do that all here.
//!
//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
//! switching compilers for the bootstrap and for build scripts will probably
//! never get replaced.
use std::env;
use std::path::PathBuf;
use std::process::{Child, Command};
use std::str::FromStr;
use std::time::Instant;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str());
let version = args.iter().find(|w| &**w == "-vV");
let verbose = match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
};
// Use a different compiler for build scripts, since there may not yet be a
// libstd for the real compiler to use. However, if Cargo is attempting to
// determine the version of the compiler, the real compiler needs to be
// used. Currently, these two states are differentiated based on whether
// --target and -vV is/isn't passed.
let (rustc, libdir) = if target.is_none() && version.is_none() {
("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
} else {
("RUSTC_REAL", "RUSTC_LIBDIR")
};
let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set");
let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new);
let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(&libdir));
let mut cmd = Command::new(rustc);
cmd.args(&args).env(bootstrap::util::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
// Get the name of the crate we're compiling, if any.
let crate_name =
args.windows(2).find(|args| args[0] == "--crate-name").and_then(|args| args[1].to_str());
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
cmd.arg("-Ztime");
}
}
}
// Print backtrace in case of ICE
if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() {
cmd.env("RUST_BACKTRACE", "1");
}
if let Ok(lint_flags) = env::var("RUSTC_LINT_FLAGS") {
cmd.args(lint_flags.split_whitespace());
}
if target.is_some() {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option,
// unless one is already set.
if !args.iter().any(|arg| arg == "--sysroot") {
cmd.arg("--sysroot").arg(&sysroot);
}
// If we're compiling specifically the `panic_abort` crate then we pass
// the `-C panic=abort` option. Note that we do not do this for any
// other crate intentionally as this is the only crate for now that we
// ship with panic=abort.
//
// This... is a bit of a hack how we detect this. Ideally this
// information should be encoded in the crate I guess? Would likely
// require an RFC amendment to RFC 1513, however.
//
// `compiler_builtins` are unconditionally compiled with panic=abort to
// workaround undefined references to `rust_eh_unwind_resume` generated
// otherwise, see issue https://github.com/rust-lang/rust/issues/43095.
if crate_name == Some("panic_abort")
|| crate_name == Some("compiler_builtins") && stage != "0"
{
cmd.arg("-C").arg("panic=abort");
}
} else {
// FIXME(rust-lang/cargo#5754) we shouldn't be using special env vars
// here, but rather Cargo should know what flags to pass rustc itself.
// Override linker if necessary.
if let Ok(host_linker) = env::var("RUSTC_HOST_LINKER") {
cmd.arg(format!("-Clinker={}", host_linker));
}
if env::var_os("RUSTC_HOST_FUSE_LD_LLD").is_some() {
cmd.arg("-Clink-args=-fuse-ld=lld");
}
if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") {
if s == "true" {
cmd.arg("-C").arg("target-feature=+crt-static");
}
if s == "false" {
cmd.arg("-C").arg("target-feature=-crt-static");
}
}
if stage == "0" {
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Set `--cfg=bootstrap` explicitly instead.
cmd.arg("--cfg=bootstrap");
}
}
if let Ok(map) = env::var("RUSTC_DEBUGINFO_MAP") {
cmd.arg("--remap-path-prefix").arg(&map);
}
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot. We also do this for host crates, since those
// may be proc macros, in which case we might ship them.
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() && (stage != "0" || target.is_some()) {
cmd.arg("-Z").arg("force-unstable-if-unmarked");
}
let is_test = args.iter().any(|a| a == "--test");
if verbose > 1 {
let rust_env_vars =
env::vars().filter(|(k, _)| k.starts_with("RUST") || k.starts_with("CARGO"));
let prefix = if is_test { "[RUSTC-SHIM] rustc --test" } else { "[RUSTC-SHIM] rustc" };
let prefix = match crate_name {
Some(crate_name) => format!("{} {}", prefix, crate_name),
None => prefix.to_string(),
};
for (i, (k, v)) in rust_env_vars.enumerate() {
eprintln!("{} env[{}]: {:?}={:?}", prefix, i, k, v);
}
eprintln!("{} working directory: {}", prefix, env::current_dir().unwrap().display());
eprintln!(
"{} command: {:?}={:?} {:?}",
prefix,
bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap(),
cmd,
);
eprintln!("{} sysroot: {:?}", prefix, sysroot);
eprintln!("{} libdir: {:?}", prefix, libdir);
}
let start = Instant::now();
let (child, status) = {
let errmsg = format!("\nFailed to run:\n{:?}\n-------------", cmd);
let mut child = cmd.spawn().expect(&errmsg);
let status = child.wait().expect(&errmsg);
(child, status)
};
if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some()
|| env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some()
{
if let Some(crate_name) = crate_name {
let dur = start.elapsed();
// If the user requested resource usage data, then
// include that in addition to the timing output.
let rusage_data =
env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child));
eprintln!(
"[RUSTC-TIMING] {} test:{} {}.{:03}{}{}",
crate_name,
is_test,
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or(String::new()),
);
}
}
if status.success() {
std::process::exit(0);
// note: everything below here is unreachable. do not put code that
// should run on success, after this block.
}
if verbose > 0 {
println!("\nDid not run successfully: {}\n{:?}\n-------------", status, cmd);
}
if let Some(mut on_fail) = on_fail {
on_fail.status().expect("Could not run the on_fail command");
}
// Preserve the exit code. In case of signal, exit with 0xfe since it's
// awkward to preserve this status in a cross-platform way.
match status.code() {
Some(i) => std::process::exit(i),
None => {
eprintln!("rustc exited with {}", status);
std::process::exit(0xfe);
}
}
}
#[cfg(all(not(unix), not(windows)))]
// In the future we can add this for more platforms
fn format_rusage_data(_child: Child) -> Option<String> {
None
}
#[cfg(windows)]
fn | (child: Child) -> Option<String> {
use std::os::windows::io::AsRawHandle;
use winapi::um::{processthreadsapi, psapi, timezoneapi};
let handle = child.as_raw_handle();
macro_rules! try_bool {
($e:expr) => {
if $e != 1 {
return None;
}
};
}
let mut user_filetime = Default::default();
let mut user_time = Default::default();
let mut kernel_filetime = Default::default();
let mut kernel_time = Default::default();
let mut memory_counters = psapi::PROCESS_MEMORY_COUNTERS::default();
unsafe {
try_bool!(processthreadsapi::GetProcessTimes(
handle,
&mut Default::default(),
&mut Default::default(),
&mut kernel_filetime,
&mut user_filetime,
));
try_bool!(timezoneapi::FileTimeToSystemTime(&user_filetime, &mut user_time));
try_bool!(timezoneapi::FileTimeToSystemTime(&kernel_filetime, &mut kernel_time));
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
// with the given handle and none of that process's children.
try_bool!(psapi::GetProcessMemoryInfo(
handle as _,
&mut memory_counters as *mut _ as _,
std::mem::size_of::<psapi::PROCESS_MEMORY_COUNTERS_EX>() as u32,
));
}
// Guide on interpreting these numbers:
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
let peak_working_set = memory_counters.PeakWorkingSetSize / 1024;
let peak_page_file = memory_counters.PeakPagefileUsage / 1024;
let peak_paged_pool = memory_counters.QuotaPeakPagedPoolUsage / 1024;
let peak_nonpaged_pool = memory_counters.QuotaPeakNonPagedPoolUsage / 1024;
Some(format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
peak working set (kb): {PEAK_WORKING_SET} \
peak page file usage (kb): {PEAK_PAGE_FILE} \
peak paged pool usage (kb): {PEAK_PAGED_POOL} \
peak non-paged pool usage (kb): {PEAK_NONPAGED_POOL} \
page faults: {PAGE_FAULTS}",
USER_SEC = user_time.wSecond + (user_time.wMinute * 60),
USER_USEC = user_time.wMilliseconds,
SYS_SEC = kernel_time.wSecond + (kernel_time.wMinute * 60),
SYS_USEC = kernel_time.wMilliseconds,
PEAK_WORKING_SET = peak_working_set,
PEAK_PAGE_FILE = peak_page_file,
PEAK_PAGED_POOL = peak_paged_pool,
PEAK_NONPAGED_POOL = peak_nonpaged_pool,
PAGE_FAULTS = memory_counters.PageFaultCount,
))
}
#[cfg(unix)]
/// Tries to build a string with human readable data for several of the rusage
/// fields. Note that we are focusing mainly on data that we believe to be
/// supplied on Linux (the `rusage` struct has other fields in it but they are
/// currently unsupported by Linux).
fn format_rusage_data(_child: Child) -> Option<String> {
let rusage: libc::rusage = unsafe {
let mut recv = std::mem::zeroed();
// -1 is RUSAGE_CHILDREN, which means to get the rusage for all children
// (and grandchildren, etc) processes that have respectively terminated
// and been waited for.
let retval = libc::getrusage(-1, &mut recv);
if retval != 0 {
return None;
}
recv
};
// Mac OS X reports the maxrss in bytes, not kb.
let divisor = if env::consts::OS == "macos" { 1024 } else { 1 };
let maxrss = (rusage.ru_maxrss + (divisor - 1)) / divisor;
let mut init_str = format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
max rss (kb): {MAXRSS}",
USER_SEC = rusage.ru_utime.tv_sec,
USER_USEC = rusage.ru_utime.tv_usec,
SYS_SEC = rusage.ru_stime.tv_sec,
SYS_USEC = rusage.ru_stime.tv_usec,
MAXRSS = maxrss
);
// The remaining rusage stats vary in platform support. So we treat
// uniformly zero values in each category as "not worth printing", since it
// either means no events of that type occurred, or that the platform
// does not support it.
let minflt = rusage.ru_minflt;
let majflt = rusage.ru_majflt;
if minflt != 0 || majflt != 0 {
init_str.push_str(&format!(" page reclaims: {} page faults: {}", minflt, majflt));
}
let inblock = rusage.ru_inblock;
let oublock = rusage.ru_oublock;
if inblock != 0 || oublock != 0 {
init_str.push_str(&format!(" fs block inputs: {} fs block outputs: {}", inblock, oublock));
}
let nvcsw = rusage.ru_nvcsw;
let nivcsw = rusage.ru_nivcsw;
if nvcsw != 0 || nivcsw != 0 {
init_str.push_str(&format!(
" voluntary ctxt switches: {} involuntary ctxt switches: {}",
nvcsw, nivcsw
));
}
return Some(init_str);
}
| format_rusage_data | identifier_name |
rustc.rs | //! Shim which is passed to Cargo as "rustc" when running the bootstrap.
//!
//! This shim will take care of some various tasks that our build process
//! requires that Cargo can't quite do through normal configuration:
//!
//! 1. When compiling build scripts and build dependencies, we need a guaranteed
//! full standard library available. The only compiler which actually has
//! this is the snapshot, so we detect this situation and always compile with
//! the snapshot compiler.
//! 2. We pass a bunch of `--cfg` and other flags based on what we're compiling
//! (and this slightly differs based on a whether we're using a snapshot or
//! not), so we do that all here.
//!
//! This may one day be replaced by RUSTFLAGS, but the dynamic nature of
//! switching compilers for the bootstrap and for build scripts will probably
//! never get replaced.
use std::env;
use std::path::PathBuf;
use std::process::{Child, Command};
use std::str::FromStr;
use std::time::Instant;
fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>();
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2).find(|w| &*w[0] == "--target").and_then(|w| w[1].to_str());
let version = args.iter().find(|w| &**w == "-vV");
let verbose = match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
};
// Use a different compiler for build scripts, since there may not yet be a
// libstd for the real compiler to use. However, if Cargo is attempting to
// determine the version of the compiler, the real compiler needs to be
// used. Currently, these two states are differentiated based on whether
// --target and -vV is/isn't passed.
let (rustc, libdir) = if target.is_none() && version.is_none() {
("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR")
} else {
("RUSTC_REAL", "RUSTC_LIBDIR")
};
let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set"); | let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc));
let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir));
let mut dylib_path = bootstrap::util::dylib_path();
dylib_path.insert(0, PathBuf::from(&libdir));
let mut cmd = Command::new(rustc);
cmd.args(&args).env(bootstrap::util::dylib_path_var(), env::join_paths(&dylib_path).unwrap());
// Get the name of the crate we're compiling, if any.
let crate_name =
args.windows(2).find(|args| args[0] == "--crate-name").and_then(|args| args[1].to_str());
if let Some(crate_name) = crate_name {
if let Some(target) = env::var_os("RUSTC_TIME") {
if target == "all"
|| target.into_string().unwrap().split(',').any(|c| c.trim() == crate_name)
{
cmd.arg("-Ztime");
}
}
}
// Print backtrace in case of ICE
if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() {
cmd.env("RUST_BACKTRACE", "1");
}
if let Ok(lint_flags) = env::var("RUSTC_LINT_FLAGS") {
cmd.args(lint_flags.split_whitespace());
}
if target.is_some() {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option,
// unless one is already set.
if !args.iter().any(|arg| arg == "--sysroot") {
cmd.arg("--sysroot").arg(&sysroot);
}
// If we're compiling specifically the `panic_abort` crate then we pass
// the `-C panic=abort` option. Note that we do not do this for any
// other crate intentionally as this is the only crate for now that we
// ship with panic=abort.
//
// This... is a bit of a hack how we detect this. Ideally this
// information should be encoded in the crate I guess? Would likely
// require an RFC amendment to RFC 1513, however.
//
// `compiler_builtins` are unconditionally compiled with panic=abort to
// workaround undefined references to `rust_eh_unwind_resume` generated
// otherwise, see issue https://github.com/rust-lang/rust/issues/43095.
if crate_name == Some("panic_abort")
|| crate_name == Some("compiler_builtins") && stage != "0"
{
cmd.arg("-C").arg("panic=abort");
}
} else {
// FIXME(rust-lang/cargo#5754) we shouldn't be using special env vars
// here, but rather Cargo should know what flags to pass rustc itself.
// Override linker if necessary.
if let Ok(host_linker) = env::var("RUSTC_HOST_LINKER") {
cmd.arg(format!("-Clinker={}", host_linker));
}
if env::var_os("RUSTC_HOST_FUSE_LD_LLD").is_some() {
cmd.arg("-Clink-args=-fuse-ld=lld");
}
if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") {
if s == "true" {
cmd.arg("-C").arg("target-feature=+crt-static");
}
if s == "false" {
cmd.arg("-C").arg("target-feature=-crt-static");
}
}
if stage == "0" {
// Cargo doesn't pass RUSTFLAGS to proc_macros:
// https://github.com/rust-lang/cargo/issues/4423
// Set `--cfg=bootstrap` explicitly instead.
cmd.arg("--cfg=bootstrap");
}
}
if let Ok(map) = env::var("RUSTC_DEBUGINFO_MAP") {
cmd.arg("--remap-path-prefix").arg(&map);
}
// Force all crates compiled by this compiler to (a) be unstable and (b)
// allow the `rustc_private` feature to link to other unstable crates
// also in the sysroot. We also do this for host crates, since those
// may be proc macros, in which case we might ship them.
if env::var_os("RUSTC_FORCE_UNSTABLE").is_some() && (stage != "0" || target.is_some()) {
cmd.arg("-Z").arg("force-unstable-if-unmarked");
}
let is_test = args.iter().any(|a| a == "--test");
if verbose > 1 {
let rust_env_vars =
env::vars().filter(|(k, _)| k.starts_with("RUST") || k.starts_with("CARGO"));
let prefix = if is_test { "[RUSTC-SHIM] rustc --test" } else { "[RUSTC-SHIM] rustc" };
let prefix = match crate_name {
Some(crate_name) => format!("{} {}", prefix, crate_name),
None => prefix.to_string(),
};
for (i, (k, v)) in rust_env_vars.enumerate() {
eprintln!("{} env[{}]: {:?}={:?}", prefix, i, k, v);
}
eprintln!("{} working directory: {}", prefix, env::current_dir().unwrap().display());
eprintln!(
"{} command: {:?}={:?} {:?}",
prefix,
bootstrap::util::dylib_path_var(),
env::join_paths(&dylib_path).unwrap(),
cmd,
);
eprintln!("{} sysroot: {:?}", prefix, sysroot);
eprintln!("{} libdir: {:?}", prefix, libdir);
}
let start = Instant::now();
let (child, status) = {
let errmsg = format!("\nFailed to run:\n{:?}\n-------------", cmd);
let mut child = cmd.spawn().expect(&errmsg);
let status = child.wait().expect(&errmsg);
(child, status)
};
if env::var_os("RUSTC_PRINT_STEP_TIMINGS").is_some()
|| env::var_os("RUSTC_PRINT_STEP_RUSAGE").is_some()
{
if let Some(crate_name) = crate_name {
let dur = start.elapsed();
// If the user requested resource usage data, then
// include that in addition to the timing output.
let rusage_data =
env::var_os("RUSTC_PRINT_STEP_RUSAGE").and_then(|_| format_rusage_data(child));
eprintln!(
"[RUSTC-TIMING] {} test:{} {}.{:03}{}{}",
crate_name,
is_test,
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or(String::new()),
);
}
}
if status.success() {
std::process::exit(0);
// note: everything below here is unreachable. do not put code that
// should run on success, after this block.
}
if verbose > 0 {
println!("\nDid not run successfully: {}\n{:?}\n-------------", status, cmd);
}
if let Some(mut on_fail) = on_fail {
on_fail.status().expect("Could not run the on_fail command");
}
// Preserve the exit code. In case of signal, exit with 0xfe since it's
// awkward to preserve this status in a cross-platform way.
match status.code() {
Some(i) => std::process::exit(i),
None => {
eprintln!("rustc exited with {}", status);
std::process::exit(0xfe);
}
}
}
#[cfg(all(not(unix), not(windows)))]
// In the future we can add this for more platforms
fn format_rusage_data(_child: Child) -> Option<String> {
None
}
#[cfg(windows)]
fn format_rusage_data(child: Child) -> Option<String> {
use std::os::windows::io::AsRawHandle;
use winapi::um::{processthreadsapi, psapi, timezoneapi};
let handle = child.as_raw_handle();
macro_rules! try_bool {
($e:expr) => {
if $e != 1 {
return None;
}
};
}
let mut user_filetime = Default::default();
let mut user_time = Default::default();
let mut kernel_filetime = Default::default();
let mut kernel_time = Default::default();
let mut memory_counters = psapi::PROCESS_MEMORY_COUNTERS::default();
unsafe {
try_bool!(processthreadsapi::GetProcessTimes(
handle,
&mut Default::default(),
&mut Default::default(),
&mut kernel_filetime,
&mut user_filetime,
));
try_bool!(timezoneapi::FileTimeToSystemTime(&user_filetime, &mut user_time));
try_bool!(timezoneapi::FileTimeToSystemTime(&kernel_filetime, &mut kernel_time));
// Unlike on Linux with RUSAGE_CHILDREN, this will only return memory information for the process
// with the given handle and none of that process's children.
try_bool!(psapi::GetProcessMemoryInfo(
handle as _,
&mut memory_counters as *mut _ as _,
std::mem::size_of::<psapi::PROCESS_MEMORY_COUNTERS_EX>() as u32,
));
}
// Guide on interpreting these numbers:
// https://docs.microsoft.com/en-us/windows/win32/psapi/process-memory-usage-information
let peak_working_set = memory_counters.PeakWorkingSetSize / 1024;
let peak_page_file = memory_counters.PeakPagefileUsage / 1024;
let peak_paged_pool = memory_counters.QuotaPeakPagedPoolUsage / 1024;
let peak_nonpaged_pool = memory_counters.QuotaPeakNonPagedPoolUsage / 1024;
Some(format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
peak working set (kb): {PEAK_WORKING_SET} \
peak page file usage (kb): {PEAK_PAGE_FILE} \
peak paged pool usage (kb): {PEAK_PAGED_POOL} \
peak non-paged pool usage (kb): {PEAK_NONPAGED_POOL} \
page faults: {PAGE_FAULTS}",
USER_SEC = user_time.wSecond + (user_time.wMinute * 60),
USER_USEC = user_time.wMilliseconds,
SYS_SEC = kernel_time.wSecond + (kernel_time.wMinute * 60),
SYS_USEC = kernel_time.wMilliseconds,
PEAK_WORKING_SET = peak_working_set,
PEAK_PAGE_FILE = peak_page_file,
PEAK_PAGED_POOL = peak_paged_pool,
PEAK_NONPAGED_POOL = peak_nonpaged_pool,
PAGE_FAULTS = memory_counters.PageFaultCount,
))
}
#[cfg(unix)]
/// Tries to build a string with human readable data for several of the rusage
/// fields. Note that we are focusing mainly on data that we believe to be
/// supplied on Linux (the `rusage` struct has other fields in it but they are
/// currently unsupported by Linux).
fn format_rusage_data(_child: Child) -> Option<String> {
let rusage: libc::rusage = unsafe {
let mut recv = std::mem::zeroed();
// -1 is RUSAGE_CHILDREN, which means to get the rusage for all children
// (and grandchildren, etc) processes that have respectively terminated
// and been waited for.
let retval = libc::getrusage(-1, &mut recv);
if retval != 0 {
return None;
}
recv
};
// Mac OS X reports the maxrss in bytes, not kb.
let divisor = if env::consts::OS == "macos" { 1024 } else { 1 };
let maxrss = (rusage.ru_maxrss + (divisor - 1)) / divisor;
let mut init_str = format!(
"user: {USER_SEC}.{USER_USEC:03} \
sys: {SYS_SEC}.{SYS_USEC:03} \
max rss (kb): {MAXRSS}",
USER_SEC = rusage.ru_utime.tv_sec,
USER_USEC = rusage.ru_utime.tv_usec,
SYS_SEC = rusage.ru_stime.tv_sec,
SYS_USEC = rusage.ru_stime.tv_usec,
MAXRSS = maxrss
);
// The remaining rusage stats vary in platform support. So we treat
// uniformly zero values in each category as "not worth printing", since it
// either means no events of that type occurred, or that the platform
// does not support it.
let minflt = rusage.ru_minflt;
let majflt = rusage.ru_majflt;
if minflt != 0 || majflt != 0 {
init_str.push_str(&format!(" page reclaims: {} page faults: {}", minflt, majflt));
}
let inblock = rusage.ru_inblock;
let oublock = rusage.ru_oublock;
if inblock != 0 || oublock != 0 {
init_str.push_str(&format!(" fs block inputs: {} fs block outputs: {}", inblock, oublock));
}
let nvcsw = rusage.ru_nvcsw;
let nivcsw = rusage.ru_nivcsw;
if nvcsw != 0 || nivcsw != 0 {
init_str.push_str(&format!(
" voluntary ctxt switches: {} involuntary ctxt switches: {}",
nvcsw, nivcsw
));
}
return Some(init_str);
} | let sysroot = env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set");
let on_fail = env::var_os("RUSTC_ON_FAIL").map(Command::new);
| random_line_split |
scroll_viewer_mode.rs | /// The `ScrollMode` defines the mode of a scroll direction.
#[derive(Copy, Debug, Clone, PartialEq)]
pub enum ScrollMode {
/// Scrolling will process by `ScrollViewer` logic
Auto,
/// Scrolling could be handled from outside. It will not be
/// process by `ScrollViewer` logic.
Custom,
/// Scrolling will be disabled.
Disabled,
}
impl Default for ScrollMode {
fn default() -> Self {
ScrollMode::Auto
}
}
impl From<&str> for ScrollMode {
fn | (s: &str) -> ScrollMode {
match s {
"Custom" | "custom" => ScrollMode::Custom,
"Disabled" | "disabled" => ScrollMode::Disabled,
_ => ScrollMode::Auto,
}
}
}
/// `ScrollViewerMode` describes the vertical and horizontal scroll
/// behavior of the `ScrollViewer`.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct ScrollViewerMode {
/// Vertical scroll mode.
pub vertical: ScrollMode,
/// Horizontal scroll mode.
pub horizontal: ScrollMode,
}
// --- Conversions ---
impl From<(&str, &str)> for ScrollViewerMode {
fn from(s: (&str, &str)) -> ScrollViewerMode {
ScrollViewerMode {
horizontal: ScrollMode::from(s.0),
vertical: ScrollMode::from(s.1),
}
}
}
impl Default for ScrollViewerMode {
fn default() -> ScrollViewerMode {
ScrollViewerMode {
vertical: ScrollMode::Auto,
horizontal: ScrollMode::Auto,
}
}
}
| from | identifier_name |
scroll_viewer_mode.rs | /// The `ScrollMode` defines the mode of a scroll direction.
#[derive(Copy, Debug, Clone, PartialEq)]
pub enum ScrollMode {
/// Scrolling will process by `ScrollViewer` logic
Auto,
/// Scrolling could be handled from outside. It will not be
/// process by `ScrollViewer` logic.
Custom,
/// Scrolling will be disabled.
Disabled,
}
impl Default for ScrollMode {
fn default() -> Self {
ScrollMode::Auto
}
}
impl From<&str> for ScrollMode {
fn from(s: &str) -> ScrollMode {
match s {
"Custom" | "custom" => ScrollMode::Custom,
"Disabled" | "disabled" => ScrollMode::Disabled,
_ => ScrollMode::Auto,
}
}
}
/// `ScrollViewerMode` describes the vertical and horizontal scroll
/// behavior of the `ScrollViewer`.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct ScrollViewerMode {
/// Vertical scroll mode.
pub vertical: ScrollMode,
/// Horizontal scroll mode.
pub horizontal: ScrollMode,
}
// --- Conversions ---
impl From<(&str, &str)> for ScrollViewerMode {
fn from(s: (&str, &str)) -> ScrollViewerMode {
ScrollViewerMode {
horizontal: ScrollMode::from(s.0), | }
}
}
impl Default for ScrollViewerMode {
fn default() -> ScrollViewerMode {
ScrollViewerMode {
vertical: ScrollMode::Auto,
horizontal: ScrollMode::Auto,
}
}
} | vertical: ScrollMode::from(s.1), | random_line_split |
ember-weakmap-polyfill.js | /* globals Ember, require */
(function() {
var _Ember;
var id = 0;
var dateKey = new Date().getTime();
if (typeof Ember !== 'undefined') {
_Ember = Ember;
} else {
_Ember = require('ember').default;
}
function symbol() {
return '__ember' + dateKey + id++;
}
function UNDEFINED() {}
function FakeWeakMap(iterable) {
this._id = symbol();
if (iterable === null || iterable === undefined) {
return;
} else if (Array.isArray(iterable)) {
for (var i = 0; i < iterable.length; i++) {
var key = iterable[i][0];
var value = iterable[i][1];
this.set(key, value);
}
} else {
throw new TypeError('The weak map constructor polyfill only supports an array argument');
}
}
if (!_Ember.WeakMap) {
var meta = _Ember.meta;
var metaKey = symbol();
/*
* @method get
* @param key {Object}
* @return {*} stored value
*/
FakeWeakMap.prototype.get = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
if (metaInfo && metaObject) {
if (metaObject[this._id] === UNDEFINED) {
return undefined;
}
return metaObject[this._id];
}
}
/*
* @method set
* @param key {Object}
* @param value {Any}
* @return {Any} stored value
*/
FakeWeakMap.prototype.set = function(obj, value) {
var type = typeof obj;
if (!obj || (type !== 'object' && type !== 'function')) {
throw new TypeError('Invalid value used as weak map key');
}
var metaInfo = meta(obj);
if (value === undefined) {
value = UNDEFINED;
}
if (!metaInfo[metaKey]) {
metaInfo[metaKey] = {};
}
metaInfo[metaKey][this._id] = value;
return this;
}
/*
* @method has
* @param key {Object}
* @return {Boolean} if the key exists
*/
FakeWeakMap.prototype.has = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
return (metaObject && metaObject[this._id] !== undefined);
}
/*
* @method delete
* @param key {Object} |
if (this.has(obj)) {
delete metaInfo[metaKey][this._id];
return true;
}
return false;
}
if (typeof WeakMap === 'function' && typeof window !== 'undefined' && window.OVERRIDE_WEAKMAP !== true) {
_Ember.WeakMap = WeakMap;
} else {
_Ember.WeakMap = FakeWeakMap;
}
}
})(); | */
FakeWeakMap.prototype.delete = function(obj) {
var metaInfo = meta(obj); | random_line_split |
ember-weakmap-polyfill.js | /* globals Ember, require */
(function() {
var _Ember;
var id = 0;
var dateKey = new Date().getTime();
if (typeof Ember !== 'undefined') | else {
_Ember = require('ember').default;
}
function symbol() {
return '__ember' + dateKey + id++;
}
function UNDEFINED() {}
function FakeWeakMap(iterable) {
this._id = symbol();
if (iterable === null || iterable === undefined) {
return;
} else if (Array.isArray(iterable)) {
for (var i = 0; i < iterable.length; i++) {
var key = iterable[i][0];
var value = iterable[i][1];
this.set(key, value);
}
} else {
throw new TypeError('The weak map constructor polyfill only supports an array argument');
}
}
if (!_Ember.WeakMap) {
var meta = _Ember.meta;
var metaKey = symbol();
/*
* @method get
* @param key {Object}
* @return {*} stored value
*/
FakeWeakMap.prototype.get = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
if (metaInfo && metaObject) {
if (metaObject[this._id] === UNDEFINED) {
return undefined;
}
return metaObject[this._id];
}
}
/*
* @method set
* @param key {Object}
* @param value {Any}
* @return {Any} stored value
*/
FakeWeakMap.prototype.set = function(obj, value) {
var type = typeof obj;
if (!obj || (type !== 'object' && type !== 'function')) {
throw new TypeError('Invalid value used as weak map key');
}
var metaInfo = meta(obj);
if (value === undefined) {
value = UNDEFINED;
}
if (!metaInfo[metaKey]) {
metaInfo[metaKey] = {};
}
metaInfo[metaKey][this._id] = value;
return this;
}
/*
* @method has
* @param key {Object}
* @return {Boolean} if the key exists
*/
FakeWeakMap.prototype.has = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
return (metaObject && metaObject[this._id] !== undefined);
}
/*
* @method delete
* @param key {Object}
*/
FakeWeakMap.prototype.delete = function(obj) {
var metaInfo = meta(obj);
if (this.has(obj)) {
delete metaInfo[metaKey][this._id];
return true;
}
return false;
}
if (typeof WeakMap === 'function' && typeof window !== 'undefined' && window.OVERRIDE_WEAKMAP !== true) {
_Ember.WeakMap = WeakMap;
} else {
_Ember.WeakMap = FakeWeakMap;
}
}
})();
| {
_Ember = Ember;
} | conditional_block |
ember-weakmap-polyfill.js | /* globals Ember, require */
(function() {
var _Ember;
var id = 0;
var dateKey = new Date().getTime();
if (typeof Ember !== 'undefined') {
_Ember = Ember;
} else {
_Ember = require('ember').default;
}
function symbol() {
return '__ember' + dateKey + id++;
}
function | () {}
function FakeWeakMap(iterable) {
this._id = symbol();
if (iterable === null || iterable === undefined) {
return;
} else if (Array.isArray(iterable)) {
for (var i = 0; i < iterable.length; i++) {
var key = iterable[i][0];
var value = iterable[i][1];
this.set(key, value);
}
} else {
throw new TypeError('The weak map constructor polyfill only supports an array argument');
}
}
if (!_Ember.WeakMap) {
var meta = _Ember.meta;
var metaKey = symbol();
/*
* @method get
* @param key {Object}
* @return {*} stored value
*/
FakeWeakMap.prototype.get = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
if (metaInfo && metaObject) {
if (metaObject[this._id] === UNDEFINED) {
return undefined;
}
return metaObject[this._id];
}
}
/*
* @method set
* @param key {Object}
* @param value {Any}
* @return {Any} stored value
*/
FakeWeakMap.prototype.set = function(obj, value) {
var type = typeof obj;
if (!obj || (type !== 'object' && type !== 'function')) {
throw new TypeError('Invalid value used as weak map key');
}
var metaInfo = meta(obj);
if (value === undefined) {
value = UNDEFINED;
}
if (!metaInfo[metaKey]) {
metaInfo[metaKey] = {};
}
metaInfo[metaKey][this._id] = value;
return this;
}
/*
* @method has
* @param key {Object}
* @return {Boolean} if the key exists
*/
FakeWeakMap.prototype.has = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
return (metaObject && metaObject[this._id] !== undefined);
}
/*
* @method delete
* @param key {Object}
*/
FakeWeakMap.prototype.delete = function(obj) {
var metaInfo = meta(obj);
if (this.has(obj)) {
delete metaInfo[metaKey][this._id];
return true;
}
return false;
}
if (typeof WeakMap === 'function' && typeof window !== 'undefined' && window.OVERRIDE_WEAKMAP !== true) {
_Ember.WeakMap = WeakMap;
} else {
_Ember.WeakMap = FakeWeakMap;
}
}
})();
| UNDEFINED | identifier_name |
ember-weakmap-polyfill.js | /* globals Ember, require */
(function() {
var _Ember;
var id = 0;
var dateKey = new Date().getTime();
if (typeof Ember !== 'undefined') {
_Ember = Ember;
} else {
_Ember = require('ember').default;
}
function symbol() |
function UNDEFINED() {}
function FakeWeakMap(iterable) {
this._id = symbol();
if (iterable === null || iterable === undefined) {
return;
} else if (Array.isArray(iterable)) {
for (var i = 0; i < iterable.length; i++) {
var key = iterable[i][0];
var value = iterable[i][1];
this.set(key, value);
}
} else {
throw new TypeError('The weak map constructor polyfill only supports an array argument');
}
}
if (!_Ember.WeakMap) {
var meta = _Ember.meta;
var metaKey = symbol();
/*
* @method get
* @param key {Object}
* @return {*} stored value
*/
FakeWeakMap.prototype.get = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
if (metaInfo && metaObject) {
if (metaObject[this._id] === UNDEFINED) {
return undefined;
}
return metaObject[this._id];
}
}
/*
* @method set
* @param key {Object}
* @param value {Any}
* @return {Any} stored value
*/
FakeWeakMap.prototype.set = function(obj, value) {
var type = typeof obj;
if (!obj || (type !== 'object' && type !== 'function')) {
throw new TypeError('Invalid value used as weak map key');
}
var metaInfo = meta(obj);
if (value === undefined) {
value = UNDEFINED;
}
if (!metaInfo[metaKey]) {
metaInfo[metaKey] = {};
}
metaInfo[metaKey][this._id] = value;
return this;
}
/*
* @method has
* @param key {Object}
* @return {Boolean} if the key exists
*/
FakeWeakMap.prototype.has = function(obj) {
var metaInfo = meta(obj);
var metaObject = metaInfo[metaKey];
return (metaObject && metaObject[this._id] !== undefined);
}
/*
* @method delete
* @param key {Object}
*/
FakeWeakMap.prototype.delete = function(obj) {
var metaInfo = meta(obj);
if (this.has(obj)) {
delete metaInfo[metaKey][this._id];
return true;
}
return false;
}
if (typeof WeakMap === 'function' && typeof window !== 'undefined' && window.OVERRIDE_WEAKMAP !== true) {
_Ember.WeakMap = WeakMap;
} else {
_Ember.WeakMap = FakeWeakMap;
}
}
})();
| {
return '__ember' + dateKey + id++;
} | identifier_body |
index.ts | import styled from 'styled-components' |
import Img from '@/Img'
import { theme } from '@/utils/themes'
import css from '@/utils/css'
export const Wrapper = styled.div<{ center: boolean }>`
${css.flex()};
justify-content: ${({ center }) => (center ? 'center' : 'flex-start')};
flex-wrap: wrap;
color: ${theme('thread.articleDigest')};
width: 100%;
${css.media.mobile`
padding: 0 30px;
overflow-x: hidden;
`};
`
type TBlock = { level: string }
export const Block = styled.div<TBlock>`
${css.flexColumn('justify-between')};
width: ${({ level }) => (level === 'gold' ? '20%' : '25%')};
height: ${({ level }) => (level === 'gold' ? '280px' : '130px')};
padding: ${({ level }) => (level === 'gold' ? '25px 25px' : '18px 25px')};
border: 1px solid transparent;
margin-bottom: ${({ level }) => (level === 'gold' ? '20px' : '10px')};
:last-child {
border-right: none;
}
&:hover {
background: #04313e;
border-color: #074c61;
border: 1px solid #044c5f;
cursor: pointer;
padding-top: 12px;
}
transition: all 0.2s;
transition-delay: 0.2s;
${css.media.mobile`
width: 50%;
padding: 0;
`};
`
export const Header = styled.div`
${css.flexColumn()};
`
export const IntroHead = styled.div`
${css.flex('align-center', 'justify-between')};
width: 100%;
&:hover {
cursor: pointer;
}
${css.media.mobile`
${css.flex('align-both')};
padding-right: 20%;
margin-top: 45px;
`};
`
export const Icon = styled.div`
${css.size(20)};
background: #024b59;
border-radius: 4px;
`
export const Title = styled.div<{ level: string }>`
color: ${theme('thread.articleTitle')};
border-top: 1px solid;
border-color: ${theme('thread.articleTitle')};
font-size: ${({ level }) => (level === 'gold' ? '18px' : '16px')};
cursor: pointer;
padding-top: 5px;
${Block}:hover & {
padding-top: 0;
border-color: transparent;
}
transition: all 0.2s;
transition-delay: 0.2s;
`
export const IntroGoldHolder = styled.div`
background: #003a47;
width: 100%;
height: 100px;
`
export const IntroImg = styled(Img)`
width: 100%;
height: 100px;
object-fit: cover;
display: block;
`
export const Desc = styled.div<{ level: string }>`
color: ${theme('thread.articleDigest')};
font-size: ${({ level }) => (level === 'gold' ? '14px' : '13px')};
cursor: pointer;
height: 45px;
margin-top: ${({ level }) => (level === 'gold' ? '3px' : '5px')};
position: relative;
display: -webkit-box;
-webkit-line-clamp: 2;
overflow: hidden;
text-overflow: ellipsis;
-webkit-box-orient: vertical;
${css.media.mobile`
padding-right: 20px;
height: auto;
`};
${Block}:hover & {
color: ${theme('thread.articleTitle')};
opacity: 1;
}
transition: all 0.3s;
transition-delay: 0.2s;
`
export const LinkWrapper = styled.div`
opacity: 0;
${Block}:hover & {
opacity: 1;
}
transition: all 0.2s;
transition-delay: 0.2s;
` | random_line_split | |
process_mz_query.py | from __future__ import print_function
import numpy as np
import sys
import bisect
import datetime
import gzip
def | (s):
print("[" + str(datetime.datetime.now()) + "] " + s, file=sys.stderr)
if len(sys.argv) < 3:
print("Usage: process_mz_query.py dump_file[.gz] query_file")
exit(0)
my_print("Reading dump file from %s..." % sys.argv[1])
if sys.argv[1][-2:] == 'gz':
f = gzip.open(sys.argv[1], 'rb')
else:
f = open(sys.argv[1])
spectra = []
arr = []
for line in f:
arr = line.strip().split("|")
if len(arr) < 3:
continue
spectra.append( ( arr[0], np.array([ float(x) for x in arr[2].split(" ") ]), np.array([ float(x) for x in arr[1].split(" ") ]) ) )
f.close()
## at this point, spectra array contains triples of the form
## (group_id, list of mzs, list of intensities)
my_print("Reading and processing queries from %s..." % sys.argv[2])
def get_one_group_total(mz_lower, mz_upper, mzs, intensities):
return np.sum(intensities[ bisect.bisect_left(mzs, mz_lower) : bisect.bisect_right(mzs, mz_upper) ])
def get_all_totals(mz, tol, spectra):
mz_lower = mz - tol
mz_upper = mz + tol
return [ (x[0], get_one_group_total(mz_lower, mz_upper, x[1], x[2])) for x in spectra ]
with open(sys.argv[2]) as f:
for line in f:
arr = line.strip().split(",")
print(" ".join([ "%s:%.3f" % x for x in get_all_totals(float(arr[0]), float(arr[1]), spectra)]))
my_print("All done!")
exit(0)
| my_print | identifier_name |
process_mz_query.py | from __future__ import print_function
import numpy as np
import sys
import bisect
import datetime
import gzip
def my_print(s):
print("[" + str(datetime.datetime.now()) + "] " + s, file=sys.stderr)
if len(sys.argv) < 3:
print("Usage: process_mz_query.py dump_file[.gz] query_file")
exit(0)
my_print("Reading dump file from %s..." % sys.argv[1])
if sys.argv[1][-2:] == 'gz':
f = gzip.open(sys.argv[1], 'rb')
else:
f = open(sys.argv[1])
spectra = []
arr = []
for line in f:
arr = line.strip().split("|")
if len(arr) < 3:
continue
spectra.append( ( arr[0], np.array([ float(x) for x in arr[2].split(" ") ]), np.array([ float(x) for x in arr[1].split(" ") ]) ) )
f.close()
## at this point, spectra array contains triples of the form
## (group_id, list of mzs, list of intensities) |
my_print("Reading and processing queries from %s..." % sys.argv[2])
def get_one_group_total(mz_lower, mz_upper, mzs, intensities):
return np.sum(intensities[ bisect.bisect_left(mzs, mz_lower) : bisect.bisect_right(mzs, mz_upper) ])
def get_all_totals(mz, tol, spectra):
mz_lower = mz - tol
mz_upper = mz + tol
return [ (x[0], get_one_group_total(mz_lower, mz_upper, x[1], x[2])) for x in spectra ]
with open(sys.argv[2]) as f:
for line in f:
arr = line.strip().split(",")
print(" ".join([ "%s:%.3f" % x for x in get_all_totals(float(arr[0]), float(arr[1]), spectra)]))
my_print("All done!")
exit(0) | random_line_split | |
process_mz_query.py | from __future__ import print_function
import numpy as np
import sys
import bisect
import datetime
import gzip
def my_print(s):
|
if len(sys.argv) < 3:
print("Usage: process_mz_query.py dump_file[.gz] query_file")
exit(0)
my_print("Reading dump file from %s..." % sys.argv[1])
if sys.argv[1][-2:] == 'gz':
f = gzip.open(sys.argv[1], 'rb')
else:
f = open(sys.argv[1])
spectra = []
arr = []
for line in f:
arr = line.strip().split("|")
if len(arr) < 3:
continue
spectra.append( ( arr[0], np.array([ float(x) for x in arr[2].split(" ") ]), np.array([ float(x) for x in arr[1].split(" ") ]) ) )
f.close()
## at this point, spectra array contains triples of the form
## (group_id, list of mzs, list of intensities)
my_print("Reading and processing queries from %s..." % sys.argv[2])
def get_one_group_total(mz_lower, mz_upper, mzs, intensities):
return np.sum(intensities[ bisect.bisect_left(mzs, mz_lower) : bisect.bisect_right(mzs, mz_upper) ])
def get_all_totals(mz, tol, spectra):
mz_lower = mz - tol
mz_upper = mz + tol
return [ (x[0], get_one_group_total(mz_lower, mz_upper, x[1], x[2])) for x in spectra ]
with open(sys.argv[2]) as f:
for line in f:
arr = line.strip().split(",")
print(" ".join([ "%s:%.3f" % x for x in get_all_totals(float(arr[0]), float(arr[1]), spectra)]))
my_print("All done!")
exit(0)
| print("[" + str(datetime.datetime.now()) + "] " + s, file=sys.stderr) | identifier_body |
process_mz_query.py | from __future__ import print_function
import numpy as np
import sys
import bisect
import datetime
import gzip
def my_print(s):
print("[" + str(datetime.datetime.now()) + "] " + s, file=sys.stderr)
if len(sys.argv) < 3:
print("Usage: process_mz_query.py dump_file[.gz] query_file")
exit(0)
my_print("Reading dump file from %s..." % sys.argv[1])
if sys.argv[1][-2:] == 'gz':
f = gzip.open(sys.argv[1], 'rb')
else:
f = open(sys.argv[1])
spectra = []
arr = []
for line in f:
arr = line.strip().split("|")
if len(arr) < 3:
|
spectra.append( ( arr[0], np.array([ float(x) for x in arr[2].split(" ") ]), np.array([ float(x) for x in arr[1].split(" ") ]) ) )
f.close()
## at this point, spectra array contains triples of the form
## (group_id, list of mzs, list of intensities)
my_print("Reading and processing queries from %s..." % sys.argv[2])
def get_one_group_total(mz_lower, mz_upper, mzs, intensities):
return np.sum(intensities[ bisect.bisect_left(mzs, mz_lower) : bisect.bisect_right(mzs, mz_upper) ])
def get_all_totals(mz, tol, spectra):
mz_lower = mz - tol
mz_upper = mz + tol
return [ (x[0], get_one_group_total(mz_lower, mz_upper, x[1], x[2])) for x in spectra ]
with open(sys.argv[2]) as f:
for line in f:
arr = line.strip().split(",")
print(" ".join([ "%s:%.3f" % x for x in get_all_totals(float(arr[0]), float(arr[1]), spectra)]))
my_print("All done!")
exit(0)
| continue | conditional_block |
Draggable.ts | import { Pair } from '@/types'
import { assert, normalize, pair, zip } from '@/utils'
import CanvasZoom from '@/view/CanvasZoom'
import Element from '@/view/Element'
interface Placeable {
position: Pair<number>
}
interface DragOptions {
type: DragType,
position: MousePosition,
element: HTMLElement,
object: Placeable,
zoom: CanvasZoom,
callback?: () => void
}
export interface MousePosition {
clientX: number,
clientY: number
}
export enum DragType {
DRAG, STICK
}
export class Draggable {
public element: HTMLElement
public object: Placeable
public zoom: CanvasZoom
private offset: Pair<number>
public callback: () => void
constructor({ type, position, element, object, zoom, callback }: DragOptions) | [DragType.STICK](position: MousePosition): void {
assert(this.element.parentElement)
const parentProp = this.element.parentElement.getBoundingClientRect() //? use custom Element
this.offset = [ parentProp.x + 50, parentProp.y + 10 ] as Pair<number>
this.dragging(position)
document.addEventListener('mousemove', this.dragging)
document.addEventListener('mousedown', this.endDrag, { once: true })
}
[DragType.DRAG]({ clientX, clientY }: MousePosition): void {
const selfPos = this.element.getBoundingClientRect()
assert(this.element.parentElement)
const parentPos = this.element.parentElement.getBoundingClientRect()
this.offset = [ clientX - selfPos.x + parentPos.x, clientY - selfPos.y + parentPos.y ] //# zip map normalize
// const offset = zip([clientX, clientY], [selfPos.x, selfPos.y], [], [parentPos.x, parentPos.y]).map(normalize)
document.addEventListener('mousemove', this.dragging)
document.addEventListener('mouseup', this.endDrag, { once: true })
}
dragging = ({ clientX, clientY }: MousePosition): void => {
// $.Draggable.log(`├──> client=[${e.clientX}, ${e.clientY}], offset=${this.offset}`)
const pos = zip([clientX, clientY], this.offset, pair(this.zoom.level), []).map(normalize)
// $.Draggable.pipe()
// $.Draggable.log(`└──> new position = [${targetPosition}]`)
// $.Draggable.indent()
this.object.position = pos as Pair<number>
// $.Draggable.unindent()
// $.Draggable.unindent()
this.callback()
}
endDrag = (): void => {
// $.Draggable.log('└──/ dragging ended')
document.removeEventListener('mousemove', this.dragging)
}
} | {
// console.log('Draggable', type, element, object, zoom)
// <? just setter position
// $.Draggable.log(`┌── Starting dragging`, element)
this.element = element
this.object = object
this.callback = callback || (() => void 0)
this.zoom = zoom
this[type](position)
}
| identifier_body |
Draggable.ts | import { Pair } from '@/types'
import { assert, normalize, pair, zip } from '@/utils'
import CanvasZoom from '@/view/CanvasZoom'
import Element from '@/view/Element'
interface Placeable {
position: Pair<number> | element: HTMLElement,
object: Placeable,
zoom: CanvasZoom,
callback?: () => void
}
export interface MousePosition {
clientX: number,
clientY: number
}
export enum DragType {
DRAG, STICK
}
export class Draggable {
public element: HTMLElement
public object: Placeable
public zoom: CanvasZoom
private offset: Pair<number>
public callback: () => void
constructor({ type, position, element, object, zoom, callback }: DragOptions) {
// console.log('Draggable', type, element, object, zoom)
// <? just setter position
// $.Draggable.log(`┌── Starting dragging`, element)
this.element = element
this.object = object
this.callback = callback || (() => void 0)
this.zoom = zoom
this[type](position)
}
[DragType.STICK](position: MousePosition): void {
assert(this.element.parentElement)
const parentProp = this.element.parentElement.getBoundingClientRect() //? use custom Element
this.offset = [ parentProp.x + 50, parentProp.y + 10 ] as Pair<number>
this.dragging(position)
document.addEventListener('mousemove', this.dragging)
document.addEventListener('mousedown', this.endDrag, { once: true })
}
[DragType.DRAG]({ clientX, clientY }: MousePosition): void {
const selfPos = this.element.getBoundingClientRect()
assert(this.element.parentElement)
const parentPos = this.element.parentElement.getBoundingClientRect()
this.offset = [ clientX - selfPos.x + parentPos.x, clientY - selfPos.y + parentPos.y ] //# zip map normalize
// const offset = zip([clientX, clientY], [selfPos.x, selfPos.y], [], [parentPos.x, parentPos.y]).map(normalize)
document.addEventListener('mousemove', this.dragging)
document.addEventListener('mouseup', this.endDrag, { once: true })
}
dragging = ({ clientX, clientY }: MousePosition): void => {
// $.Draggable.log(`├──> client=[${e.clientX}, ${e.clientY}], offset=${this.offset}`)
const pos = zip([clientX, clientY], this.offset, pair(this.zoom.level), []).map(normalize)
// $.Draggable.pipe()
// $.Draggable.log(`└──> new position = [${targetPosition}]`)
// $.Draggable.indent()
this.object.position = pos as Pair<number>
// $.Draggable.unindent()
// $.Draggable.unindent()
this.callback()
}
endDrag = (): void => {
// $.Draggable.log('└──/ dragging ended')
document.removeEventListener('mousemove', this.dragging)
}
} | }
interface DragOptions {
type: DragType,
position: MousePosition, | random_line_split |
Draggable.ts | import { Pair } from '@/types'
import { assert, normalize, pair, zip } from '@/utils'
import CanvasZoom from '@/view/CanvasZoom'
import Element from '@/view/Element'
interface Placeable {
position: Pair<number>
}
interface DragOptions {
type: DragType,
position: MousePosition,
element: HTMLElement,
object: Placeable,
zoom: CanvasZoom,
callback?: () => void
}
export interface MousePosition {
clientX: number,
clientY: number
}
export enum DragType {
DRAG, STICK
}
export class Draggable {
public element: HTMLElement
public object: Placeable
public zoom: CanvasZoom
private offset: Pair<number>
public callback: () => void
constructor({ type, position, element, object, zoom, callback }: DragOptions) {
// console.log('Draggable', type, element, object, zoom)
// <? just setter position
// $.Draggable.log(`┌── Starting dragging`, element)
this.element = element
this.object = object
this.callback = callback || (() => void 0)
this.zoom = zoom
this[type](position)
}
[DragT | ion: MousePosition): void {
assert(this.element.parentElement)
const parentProp = this.element.parentElement.getBoundingClientRect() //? use custom Element
this.offset = [ parentProp.x + 50, parentProp.y + 10 ] as Pair<number>
this.dragging(position)
document.addEventListener('mousemove', this.dragging)
document.addEventListener('mousedown', this.endDrag, { once: true })
}
[DragType.DRAG]({ clientX, clientY }: MousePosition): void {
const selfPos = this.element.getBoundingClientRect()
assert(this.element.parentElement)
const parentPos = this.element.parentElement.getBoundingClientRect()
this.offset = [ clientX - selfPos.x + parentPos.x, clientY - selfPos.y + parentPos.y ] //# zip map normalize
// const offset = zip([clientX, clientY], [selfPos.x, selfPos.y], [], [parentPos.x, parentPos.y]).map(normalize)
document.addEventListener('mousemove', this.dragging)
document.addEventListener('mouseup', this.endDrag, { once: true })
}
dragging = ({ clientX, clientY }: MousePosition): void => {
// $.Draggable.log(`├──> client=[${e.clientX}, ${e.clientY}], offset=${this.offset}`)
const pos = zip([clientX, clientY], this.offset, pair(this.zoom.level), []).map(normalize)
// $.Draggable.pipe()
// $.Draggable.log(`└──> new position = [${targetPosition}]`)
// $.Draggable.indent()
this.object.position = pos as Pair<number>
// $.Draggable.unindent()
// $.Draggable.unindent()
this.callback()
}
endDrag = (): void => {
// $.Draggable.log('└──/ dragging ended')
document.removeEventListener('mousemove', this.dragging)
}
} | ype.STICK](posit | identifier_name |
db_test_lib_test.py | #!/usr/bin/env python
from absl.testing import absltest
from typing import Text
from grr_response_core.lib import rdfvalue
from grr_response_server import blob_store
from grr_response_server.databases import db as abstract_db
from grr.test_lib import db_test_lib
class WithDatabaseTest(absltest.TestCase):
def | (self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertIsInstance(db, abstract_db.Database)
TestMethod() # pylint: disable=no-value-for-parameter
def testDatabaseWorks(self):
now = rdfvalue.RDFDatetime.Now()
@db_test_lib.WithDatabase
def TestMethod(self, db: abstract_db.Database):
client_id = "C.0123456789abcdef"
db.WriteClientMetadata(client_id, first_seen=now)
client = db.ReadClientFullInfo(client_id)
self.assertEqual(client.metadata.first_seen, now)
TestMethod(self) # pylint: disable=no-value-for-parameter
def testDatabaseIsFresh(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertEqual(db.CountGRRUsers(), 0)
db.WriteGRRUser("foo")
self.assertEqual(db.CountGRRUsers(), 1)
# We execute test method twice to ensure that each time the database is
# really empty.
TestMethod() # pylint: disable=no-value-for-parameter
TestMethod() # pylint: disable=no-value-for-parameter
def testPassesArguments(self):
@db_test_lib.WithDatabase
def TestMethod(self, username: Text, db: abstract_db.Database):
db.WriteGRRUser(username)
user = db.ReadGRRUser(username)
self.assertEqual(user.username, username)
TestMethod(self, "foo") # pylint: disable=no-value-for-parameter
TestMethod(self, "bar") # pylint: disable=no-value-for-parameter
class WithDatabaseBlobstore(absltest.TestCase):
@db_test_lib.WithDatabase
def testBlobstoreIsProvided(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
self.assertIsInstance(bs, blob_store.BlobStore)
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testBlobstoreWorks(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(b"foobarbaz")
self.assertEqual(bs.ReadBlob(blob_id), b"foobarbaz")
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testPassesArguments(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(self, data: bytes, bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(data)
self.assertEqual(bs.ReadBlob(blob_id), data)
TestMethod(self, b"quux") # pylint: disable=no-value-for-parameter
TestMethod(self, b"norf") # pylint: disable=no-value-for-parameter
if __name__ == "__main__":
absltest.main()
| testDatabaseIsProvided | identifier_name |
db_test_lib_test.py | #!/usr/bin/env python
from absl.testing import absltest
from typing import Text
from grr_response_core.lib import rdfvalue
from grr_response_server import blob_store
from grr_response_server.databases import db as abstract_db
from grr.test_lib import db_test_lib
class WithDatabaseTest(absltest.TestCase):
def testDatabaseIsProvided(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertIsInstance(db, abstract_db.Database)
TestMethod() # pylint: disable=no-value-for-parameter
def testDatabaseWorks(self):
|
def testDatabaseIsFresh(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertEqual(db.CountGRRUsers(), 0)
db.WriteGRRUser("foo")
self.assertEqual(db.CountGRRUsers(), 1)
# We execute test method twice to ensure that each time the database is
# really empty.
TestMethod() # pylint: disable=no-value-for-parameter
TestMethod() # pylint: disable=no-value-for-parameter
def testPassesArguments(self):
@db_test_lib.WithDatabase
def TestMethod(self, username: Text, db: abstract_db.Database):
db.WriteGRRUser(username)
user = db.ReadGRRUser(username)
self.assertEqual(user.username, username)
TestMethod(self, "foo") # pylint: disable=no-value-for-parameter
TestMethod(self, "bar") # pylint: disable=no-value-for-parameter
class WithDatabaseBlobstore(absltest.TestCase):
@db_test_lib.WithDatabase
def testBlobstoreIsProvided(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
self.assertIsInstance(bs, blob_store.BlobStore)
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testBlobstoreWorks(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(b"foobarbaz")
self.assertEqual(bs.ReadBlob(blob_id), b"foobarbaz")
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testPassesArguments(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(self, data: bytes, bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(data)
self.assertEqual(bs.ReadBlob(blob_id), data)
TestMethod(self, b"quux") # pylint: disable=no-value-for-parameter
TestMethod(self, b"norf") # pylint: disable=no-value-for-parameter
if __name__ == "__main__":
absltest.main()
| now = rdfvalue.RDFDatetime.Now()
@db_test_lib.WithDatabase
def TestMethod(self, db: abstract_db.Database):
client_id = "C.0123456789abcdef"
db.WriteClientMetadata(client_id, first_seen=now)
client = db.ReadClientFullInfo(client_id)
self.assertEqual(client.metadata.first_seen, now)
TestMethod(self) # pylint: disable=no-value-for-parameter | identifier_body |
db_test_lib_test.py | #!/usr/bin/env python
from absl.testing import absltest
from typing import Text
from grr_response_core.lib import rdfvalue
from grr_response_server import blob_store
from grr_response_server.databases import db as abstract_db
from grr.test_lib import db_test_lib
class WithDatabaseTest(absltest.TestCase):
def testDatabaseIsProvided(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertIsInstance(db, abstract_db.Database)
TestMethod() # pylint: disable=no-value-for-parameter
def testDatabaseWorks(self):
now = rdfvalue.RDFDatetime.Now()
@db_test_lib.WithDatabase
def TestMethod(self, db: abstract_db.Database):
client_id = "C.0123456789abcdef"
db.WriteClientMetadata(client_id, first_seen=now)
client = db.ReadClientFullInfo(client_id)
self.assertEqual(client.metadata.first_seen, now)
TestMethod(self) # pylint: disable=no-value-for-parameter
def testDatabaseIsFresh(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertEqual(db.CountGRRUsers(), 0)
db.WriteGRRUser("foo")
self.assertEqual(db.CountGRRUsers(), 1)
# We execute test method twice to ensure that each time the database is
# really empty.
TestMethod() # pylint: disable=no-value-for-parameter
TestMethod() # pylint: disable=no-value-for-parameter
def testPassesArguments(self):
@db_test_lib.WithDatabase
def TestMethod(self, username: Text, db: abstract_db.Database):
db.WriteGRRUser(username)
user = db.ReadGRRUser(username)
self.assertEqual(user.username, username)
TestMethod(self, "foo") # pylint: disable=no-value-for-parameter
TestMethod(self, "bar") # pylint: disable=no-value-for-parameter
class WithDatabaseBlobstore(absltest.TestCase):
@db_test_lib.WithDatabase
def testBlobstoreIsProvided(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
self.assertIsInstance(bs, blob_store.BlobStore)
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testBlobstoreWorks(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(b"foobarbaz")
self.assertEqual(bs.ReadBlob(blob_id), b"foobarbaz")
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testPassesArguments(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(self, data: bytes, bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(data)
self.assertEqual(bs.ReadBlob(blob_id), data)
TestMethod(self, b"quux") # pylint: disable=no-value-for-parameter
TestMethod(self, b"norf") # pylint: disable=no-value-for-parameter
if __name__ == "__main__": | absltest.main() | random_line_split | |
db_test_lib_test.py | #!/usr/bin/env python
from absl.testing import absltest
from typing import Text
from grr_response_core.lib import rdfvalue
from grr_response_server import blob_store
from grr_response_server.databases import db as abstract_db
from grr.test_lib import db_test_lib
class WithDatabaseTest(absltest.TestCase):
def testDatabaseIsProvided(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertIsInstance(db, abstract_db.Database)
TestMethod() # pylint: disable=no-value-for-parameter
def testDatabaseWorks(self):
now = rdfvalue.RDFDatetime.Now()
@db_test_lib.WithDatabase
def TestMethod(self, db: abstract_db.Database):
client_id = "C.0123456789abcdef"
db.WriteClientMetadata(client_id, first_seen=now)
client = db.ReadClientFullInfo(client_id)
self.assertEqual(client.metadata.first_seen, now)
TestMethod(self) # pylint: disable=no-value-for-parameter
def testDatabaseIsFresh(self):
@db_test_lib.WithDatabase
def TestMethod(db: abstract_db.Database):
self.assertEqual(db.CountGRRUsers(), 0)
db.WriteGRRUser("foo")
self.assertEqual(db.CountGRRUsers(), 1)
# We execute test method twice to ensure that each time the database is
# really empty.
TestMethod() # pylint: disable=no-value-for-parameter
TestMethod() # pylint: disable=no-value-for-parameter
def testPassesArguments(self):
@db_test_lib.WithDatabase
def TestMethod(self, username: Text, db: abstract_db.Database):
db.WriteGRRUser(username)
user = db.ReadGRRUser(username)
self.assertEqual(user.username, username)
TestMethod(self, "foo") # pylint: disable=no-value-for-parameter
TestMethod(self, "bar") # pylint: disable=no-value-for-parameter
class WithDatabaseBlobstore(absltest.TestCase):
@db_test_lib.WithDatabase
def testBlobstoreIsProvided(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
self.assertIsInstance(bs, blob_store.BlobStore)
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testBlobstoreWorks(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(b"foobarbaz")
self.assertEqual(bs.ReadBlob(blob_id), b"foobarbaz")
TestMethod() # pylint: disable=no-value-for-parameter
@db_test_lib.WithDatabase
def testPassesArguments(self, db: abstract_db.Database):
del db # Unused.
@db_test_lib.WithDatabaseBlobstore
def TestMethod(self, data: bytes, bs: blob_store.BlobStore):
blob_id = bs.WriteBlobWithUnknownHash(data)
self.assertEqual(bs.ReadBlob(blob_id), data)
TestMethod(self, b"quux") # pylint: disable=no-value-for-parameter
TestMethod(self, b"norf") # pylint: disable=no-value-for-parameter
if __name__ == "__main__":
| absltest.main() | conditional_block | |
self_stats.js | $(document).ready(function() {
/* Done setting the chart up? Time to render it!*/
var data = [
{
values: [],
key: 'Event queue length',
color: '#000000'
},
]
/*These lines are all chart setup. Pick and choose which chart features you want to utilize. */
nv.addGraph(function() {
var chart = nv.models.lineChart()
.margin({right: 80}) //Adjust chart margins to give the x-axis some breathing room.
.useInteractiveGuideline(true) //We want nice looking tooltips and a guideline!
.transitionDuration(350) //how fast do you want the lines to transition?
.showLegend(false) //Show the legend, allowing users to turn on/off line series.
.showYAxis(true) //Show the y-axis
.showXAxis(true) //Show the x-axis
.noData("Waiting for queue length data...")
;
// Chart x-axis settings
chart.xAxis
.tickPadding(9)
.tickFormat(function(d) {
return d3.time.format('%H:%M:%S')(new Date(d))
});
// Chart y-axis settings
chart.yAxis
.tickPadding(7)
.tickFormat(d3.format(',d'));
d3.select('#chart svg')
.datum(data)
.call(chart);
updateData();
chart.update();
// Poll every 5 seconds
setInterval(function() {
updateData()
d3.select('#chart svg')
.datum(data)
.transition();
d3.select("#chart svg rect")
.style("opacity", 1)
.style("fill", '#fff')
chart.update()
}, 3000);
// Update the chart when window resizes.
nv.utils.windowResize(function() { chart.update() });
return chart;
});
function updateData() {
var api_url = $('div#data-api-url').data('api-url');
$.get(api_url + 'metrics?fields=event_queue_length', function(json) {
var d = new Date().getTime();
var value = {x: d, y: json.event_queue_length}
data[0].values.push(value);
// Remove old data, to keep the graph performant
if (data[0].values.length > 100) |
}, 'json')
}
});
| {
data[0].values.shift()
} | conditional_block |
self_stats.js | $(document).ready(function() {
/* Done setting the chart up? Time to render it!*/
var data = [
{
values: [],
key: 'Event queue length',
color: '#000000'
},
]
/*These lines are all chart setup. Pick and choose which chart features you want to utilize. */
nv.addGraph(function() {
var chart = nv.models.lineChart()
.margin({right: 80}) //Adjust chart margins to give the x-axis some breathing room.
.useInteractiveGuideline(true) //We want nice looking tooltips and a guideline!
.transitionDuration(350) //how fast do you want the lines to transition?
.showLegend(false) //Show the legend, allowing users to turn on/off line series.
.showYAxis(true) //Show the y-axis
.showXAxis(true) //Show the x-axis
.noData("Waiting for queue length data...")
;
// Chart x-axis settings
chart.xAxis
.tickPadding(9)
.tickFormat(function(d) {
return d3.time.format('%H:%M:%S')(new Date(d))
});
// Chart y-axis settings
chart.yAxis
.tickPadding(7)
.tickFormat(d3.format(',d'));
d3.select('#chart svg')
.datum(data)
.call(chart);
updateData();
chart.update();
// Poll every 5 seconds
setInterval(function() {
updateData()
d3.select('#chart svg')
.datum(data)
.transition();
d3.select("#chart svg rect")
.style("opacity", 1)
.style("fill", '#fff')
chart.update()
}, 3000);
// Update the chart when window resizes.
nv.utils.windowResize(function() { chart.update() });
return chart;
});
function updateData() |
});
| {
var api_url = $('div#data-api-url').data('api-url');
$.get(api_url + 'metrics?fields=event_queue_length', function(json) {
var d = new Date().getTime();
var value = {x: d, y: json.event_queue_length}
data[0].values.push(value);
// Remove old data, to keep the graph performant
if (data[0].values.length > 100) {
data[0].values.shift()
}
}, 'json')
} | identifier_body |
self_stats.js | $(document).ready(function() {
/* Done setting the chart up? Time to render it!*/
var data = [
{
values: [],
key: 'Event queue length',
color: '#000000'
},
]
/*These lines are all chart setup. Pick and choose which chart features you want to utilize. */
nv.addGraph(function() {
var chart = nv.models.lineChart()
.margin({right: 80}) //Adjust chart margins to give the x-axis some breathing room.
.useInteractiveGuideline(true) //We want nice looking tooltips and a guideline!
.transitionDuration(350) //how fast do you want the lines to transition?
.showLegend(false) //Show the legend, allowing users to turn on/off line series.
.showYAxis(true) //Show the y-axis
.showXAxis(true) //Show the x-axis
.noData("Waiting for queue length data...")
;
// Chart x-axis settings
chart.xAxis
.tickPadding(9)
.tickFormat(function(d) {
return d3.time.format('%H:%M:%S')(new Date(d))
});
// Chart y-axis settings
chart.yAxis
.tickPadding(7)
.tickFormat(d3.format(',d'));
d3.select('#chart svg')
.datum(data)
.call(chart);
updateData();
chart.update();
// Poll every 5 seconds
setInterval(function() {
updateData()
d3.select('#chart svg')
.datum(data)
.transition();
d3.select("#chart svg rect")
.style("opacity", 1)
.style("fill", '#fff')
chart.update()
}, 3000);
// Update the chart when window resizes.
nv.utils.windowResize(function() { chart.update() });
return chart;
});
function | () {
var api_url = $('div#data-api-url').data('api-url');
$.get(api_url + 'metrics?fields=event_queue_length', function(json) {
var d = new Date().getTime();
var value = {x: d, y: json.event_queue_length}
data[0].values.push(value);
// Remove old data, to keep the graph performant
if (data[0].values.length > 100) {
data[0].values.shift()
}
}, 'json')
}
});
| updateData | identifier_name |
self_stats.js | $(document).ready(function() {
/* Done setting the chart up? Time to render it!*/
var data = [
{
values: [],
key: 'Event queue length',
color: '#000000'
},
]
/*These lines are all chart setup. Pick and choose which chart features you want to utilize. */
nv.addGraph(function() {
var chart = nv.models.lineChart()
.margin({right: 80}) //Adjust chart margins to give the x-axis some breathing room.
.useInteractiveGuideline(true) //We want nice looking tooltips and a guideline!
.transitionDuration(350) //how fast do you want the lines to transition?
.showLegend(false) //Show the legend, allowing users to turn on/off line series.
.showYAxis(true) //Show the y-axis
.showXAxis(true) //Show the x-axis
.noData("Waiting for queue length data...")
;
// Chart x-axis settings
chart.xAxis
.tickPadding(9)
.tickFormat(function(d) {
return d3.time.format('%H:%M:%S')(new Date(d))
});
// Chart y-axis settings
chart.yAxis
.tickPadding(7)
.tickFormat(d3.format(',d'));
d3.select('#chart svg')
.datum(data)
.call(chart);
updateData();
chart.update();
// Poll every 5 seconds
setInterval(function() {
updateData()
d3.select('#chart svg')
.datum(data)
.transition();
d3.select("#chart svg rect")
.style("opacity", 1)
.style("fill", '#fff')
chart.update() | // Update the chart when window resizes.
nv.utils.windowResize(function() { chart.update() });
return chart;
});
function updateData() {
var api_url = $('div#data-api-url').data('api-url');
$.get(api_url + 'metrics?fields=event_queue_length', function(json) {
var d = new Date().getTime();
var value = {x: d, y: json.event_queue_length}
data[0].values.push(value);
// Remove old data, to keep the graph performant
if (data[0].values.length > 100) {
data[0].values.shift()
}
}, 'json')
}
}); | }, 3000);
| random_line_split |
macro_crate_test.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, plugin_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
extern crate rustc;
use syntax::ast::{TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
use rustc::plugin::Registry;
use std::gc::{Gc, GC};
#[macro_export]
macro_rules! exported_macro (() => (2i))
macro_rules! unexported_macro (() => (3i))
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("make_a_1", expand_make_a_1);
reg.register_syntax_extension(
token::intern("into_foo"),
ItemModifier(expand_into_foo));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<MacResult> {
if !tts.is_empty() |
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc<MetaItem>, it: Gc<Item>)
-> Gc<Item> {
box(GC) Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {}
| {
cx.span_fatal(sp, "make_a_1 takes no arguments");
} | conditional_block |
macro_crate_test.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, plugin_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
extern crate rustc;
use syntax::ast::{TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
use rustc::plugin::Registry;
use std::gc::{Gc, GC};
#[macro_export]
macro_rules! exported_macro (() => (2i))
macro_rules! unexported_macro (() => (3i))
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) |
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<MacResult> {
if !tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc<MetaItem>, it: Gc<Item>)
-> Gc<Item> {
box(GC) Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {}
| {
reg.register_macro("make_a_1", expand_make_a_1);
reg.register_syntax_extension(
token::intern("into_foo"),
ItemModifier(expand_into_foo));
} | identifier_body |
macro_crate_test.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, plugin_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
extern crate rustc;
use syntax::ast::{TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
use rustc::plugin::Registry;
use std::gc::{Gc, GC};
#[macro_export]
macro_rules! exported_macro (() => (2i))
macro_rules! unexported_macro (() => (3i))
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("make_a_1", expand_make_a_1);
reg.register_syntax_extension(
token::intern("into_foo"),
ItemModifier(expand_into_foo));
}
fn | (cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<MacResult> {
if !tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacExpr::new(quote_expr!(cx, 1i))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc<MetaItem>, it: Gc<Item>)
-> Gc<Item> {
box(GC) Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {}
| expand_make_a_1 | identifier_name |
macro_crate_test.rs | // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(globs, plugin_registrar, macro_rules, quote, managed_boxes)]
extern crate syntax;
extern crate rustc;
use syntax::ast::{TokenTree, Item, MetaItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::token;
use rustc::plugin::Registry;
use std::gc::{Gc, GC};
#[macro_export]
macro_rules! exported_macro (() => (2i))
macro_rules! unexported_macro (() => (3i))
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("make_a_1", expand_make_a_1);
reg.register_syntax_extension(
token::intern("into_foo"),
ItemModifier(expand_into_foo));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<MacResult> {
if !tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacExpr::new(quote_expr!(cx, 1i))
} | -> Gc<Item> {
box(GC) Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
}
}
pub fn foo() {} |
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc<MetaItem>, it: Gc<Item>) | random_line_split |
stylesheeteditor.py | #############################################################################
##
## Copyright (C) 2010 Hans-Peter Jansen <hpj@urpla.net>.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
###########################################################################
from PyQt5.QtCore import pyqtSlot, QFile, QRegExp, Qt, QTextStream
from PyQt5.QtWidgets import (QApplication, QDialog, QFileDialog, QMessageBox,
QStyleFactory)
from ui_stylesheeteditor import Ui_StyleSheetEditor
class StyleSheetEditor(QDialog):
def __init__(self, parent=None):
super(StyleSheetEditor, self).__init__(parent)
self.ui = Ui_StyleSheetEditor()
self.ui.setupUi(self)
regExp = QRegExp(r'.(.*)\+?Style')
defaultStyle = QApplication.style().metaObject().className()
if regExp.exactMatch(defaultStyle):
defaultStyle = regExp.cap(1)
self.ui.styleCombo.addItems(QStyleFactory.keys())
self.ui.styleCombo.setCurrentIndex(
self.ui.styleCombo.findText(defaultStyle, Qt.MatchContains))
self.ui.styleSheetCombo.setCurrentIndex(
self.ui.styleSheetCombo.findText('Coffee'))
self.loadStyleSheet('Coffee')
@pyqtSlot(str)
def on_styleCombo_activated(self, styleName):
QApplication.setStyle(styleName)
self.ui.applyButton.setEnabled(False)
@pyqtSlot(str)
def | (self, sheetName):
self.loadStyleSheet(sheetName)
def on_styleTextEdit_textChanged(self):
self.ui.applyButton.setEnabled(True)
def on_applyButton_clicked(self):
QApplication.instance().setStyleSheet(
self.ui.styleTextEdit.toPlainText())
self.ui.applyButton.setEnabled(False)
def on_saveButton_clicked(self):
fileName, _ = QFileDialog.getSaveFileName(self)
if fileName:
self.saveStyleSheet(fileName)
def loadStyleSheet(self, sheetName):
file = QFile(':/qss/%s.qss' % sheetName.lower())
file.open(QFile.ReadOnly)
styleSheet = file.readAll()
try:
# Python v2.
styleSheet = unicode(styleSheet, encoding='utf8')
except NameError:
# Python v3.
styleSheet = str(styleSheet, encoding='utf8')
self.ui.styleTextEdit.setPlainText(styleSheet)
QApplication.instance().setStyleSheet(styleSheet)
self.ui.applyButton.setEnabled(False)
def saveStyleSheet(self, fileName):
styleSheet = self.ui.styleTextEdit.toPlainText()
file = QFile(fileName)
if file.open(QFile.WriteOnly):
QTextStream(file) << styleSheet
else:
QMessageBox.information(self, "Unable to open file",
file.errorString())
| on_styleSheetCombo_activated | identifier_name |
stylesheeteditor.py | #############################################################################
##
## Copyright (C) 2010 Hans-Peter Jansen <hpj@urpla.net>.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
###########################################################################
from PyQt5.QtCore import pyqtSlot, QFile, QRegExp, Qt, QTextStream
from PyQt5.QtWidgets import (QApplication, QDialog, QFileDialog, QMessageBox,
QStyleFactory)
from ui_stylesheeteditor import Ui_StyleSheetEditor
class StyleSheetEditor(QDialog):
def __init__(self, parent=None):
super(StyleSheetEditor, self).__init__(parent)
self.ui = Ui_StyleSheetEditor()
self.ui.setupUi(self)
regExp = QRegExp(r'.(.*)\+?Style')
defaultStyle = QApplication.style().metaObject().className()
if regExp.exactMatch(defaultStyle):
defaultStyle = regExp.cap(1)
self.ui.styleCombo.addItems(QStyleFactory.keys())
self.ui.styleCombo.setCurrentIndex(
self.ui.styleCombo.findText(defaultStyle, Qt.MatchContains))
self.ui.styleSheetCombo.setCurrentIndex(
self.ui.styleSheetCombo.findText('Coffee'))
self.loadStyleSheet('Coffee')
@pyqtSlot(str)
def on_styleCombo_activated(self, styleName):
QApplication.setStyle(styleName) | self.loadStyleSheet(sheetName)
def on_styleTextEdit_textChanged(self):
self.ui.applyButton.setEnabled(True)
def on_applyButton_clicked(self):
QApplication.instance().setStyleSheet(
self.ui.styleTextEdit.toPlainText())
self.ui.applyButton.setEnabled(False)
def on_saveButton_clicked(self):
fileName, _ = QFileDialog.getSaveFileName(self)
if fileName:
self.saveStyleSheet(fileName)
def loadStyleSheet(self, sheetName):
file = QFile(':/qss/%s.qss' % sheetName.lower())
file.open(QFile.ReadOnly)
styleSheet = file.readAll()
try:
# Python v2.
styleSheet = unicode(styleSheet, encoding='utf8')
except NameError:
# Python v3.
styleSheet = str(styleSheet, encoding='utf8')
self.ui.styleTextEdit.setPlainText(styleSheet)
QApplication.instance().setStyleSheet(styleSheet)
self.ui.applyButton.setEnabled(False)
def saveStyleSheet(self, fileName):
styleSheet = self.ui.styleTextEdit.toPlainText()
file = QFile(fileName)
if file.open(QFile.WriteOnly):
QTextStream(file) << styleSheet
else:
QMessageBox.information(self, "Unable to open file",
file.errorString()) | self.ui.applyButton.setEnabled(False)
@pyqtSlot(str)
def on_styleSheetCombo_activated(self, sheetName): | random_line_split |
stylesheeteditor.py | #############################################################################
##
## Copyright (C) 2010 Hans-Peter Jansen <hpj@urpla.net>.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
###########################################################################
from PyQt5.QtCore import pyqtSlot, QFile, QRegExp, Qt, QTextStream
from PyQt5.QtWidgets import (QApplication, QDialog, QFileDialog, QMessageBox,
QStyleFactory)
from ui_stylesheeteditor import Ui_StyleSheetEditor
class StyleSheetEditor(QDialog):
def __init__(self, parent=None):
super(StyleSheetEditor, self).__init__(parent)
self.ui = Ui_StyleSheetEditor()
self.ui.setupUi(self)
regExp = QRegExp(r'.(.*)\+?Style')
defaultStyle = QApplication.style().metaObject().className()
if regExp.exactMatch(defaultStyle):
|
self.ui.styleCombo.addItems(QStyleFactory.keys())
self.ui.styleCombo.setCurrentIndex(
self.ui.styleCombo.findText(defaultStyle, Qt.MatchContains))
self.ui.styleSheetCombo.setCurrentIndex(
self.ui.styleSheetCombo.findText('Coffee'))
self.loadStyleSheet('Coffee')
@pyqtSlot(str)
def on_styleCombo_activated(self, styleName):
QApplication.setStyle(styleName)
self.ui.applyButton.setEnabled(False)
@pyqtSlot(str)
def on_styleSheetCombo_activated(self, sheetName):
self.loadStyleSheet(sheetName)
def on_styleTextEdit_textChanged(self):
self.ui.applyButton.setEnabled(True)
def on_applyButton_clicked(self):
QApplication.instance().setStyleSheet(
self.ui.styleTextEdit.toPlainText())
self.ui.applyButton.setEnabled(False)
def on_saveButton_clicked(self):
fileName, _ = QFileDialog.getSaveFileName(self)
if fileName:
self.saveStyleSheet(fileName)
def loadStyleSheet(self, sheetName):
file = QFile(':/qss/%s.qss' % sheetName.lower())
file.open(QFile.ReadOnly)
styleSheet = file.readAll()
try:
# Python v2.
styleSheet = unicode(styleSheet, encoding='utf8')
except NameError:
# Python v3.
styleSheet = str(styleSheet, encoding='utf8')
self.ui.styleTextEdit.setPlainText(styleSheet)
QApplication.instance().setStyleSheet(styleSheet)
self.ui.applyButton.setEnabled(False)
def saveStyleSheet(self, fileName):
styleSheet = self.ui.styleTextEdit.toPlainText()
file = QFile(fileName)
if file.open(QFile.WriteOnly):
QTextStream(file) << styleSheet
else:
QMessageBox.information(self, "Unable to open file",
file.errorString())
| defaultStyle = regExp.cap(1) | conditional_block |
stylesheeteditor.py | #############################################################################
##
## Copyright (C) 2010 Hans-Peter Jansen <hpj@urpla.net>.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
###########################################################################
from PyQt5.QtCore import pyqtSlot, QFile, QRegExp, Qt, QTextStream
from PyQt5.QtWidgets import (QApplication, QDialog, QFileDialog, QMessageBox,
QStyleFactory)
from ui_stylesheeteditor import Ui_StyleSheetEditor
class StyleSheetEditor(QDialog):
def __init__(self, parent=None):
super(StyleSheetEditor, self).__init__(parent)
self.ui = Ui_StyleSheetEditor()
self.ui.setupUi(self)
regExp = QRegExp(r'.(.*)\+?Style')
defaultStyle = QApplication.style().metaObject().className()
if regExp.exactMatch(defaultStyle):
defaultStyle = regExp.cap(1)
self.ui.styleCombo.addItems(QStyleFactory.keys())
self.ui.styleCombo.setCurrentIndex(
self.ui.styleCombo.findText(defaultStyle, Qt.MatchContains))
self.ui.styleSheetCombo.setCurrentIndex(
self.ui.styleSheetCombo.findText('Coffee'))
self.loadStyleSheet('Coffee')
@pyqtSlot(str)
def on_styleCombo_activated(self, styleName):
QApplication.setStyle(styleName)
self.ui.applyButton.setEnabled(False)
@pyqtSlot(str)
def on_styleSheetCombo_activated(self, sheetName):
self.loadStyleSheet(sheetName)
def on_styleTextEdit_textChanged(self):
self.ui.applyButton.setEnabled(True)
def on_applyButton_clicked(self):
QApplication.instance().setStyleSheet(
self.ui.styleTextEdit.toPlainText())
self.ui.applyButton.setEnabled(False)
def on_saveButton_clicked(self):
fileName, _ = QFileDialog.getSaveFileName(self)
if fileName:
self.saveStyleSheet(fileName)
def loadStyleSheet(self, sheetName):
|
def saveStyleSheet(self, fileName):
styleSheet = self.ui.styleTextEdit.toPlainText()
file = QFile(fileName)
if file.open(QFile.WriteOnly):
QTextStream(file) << styleSheet
else:
QMessageBox.information(self, "Unable to open file",
file.errorString())
| file = QFile(':/qss/%s.qss' % sheetName.lower())
file.open(QFile.ReadOnly)
styleSheet = file.readAll()
try:
# Python v2.
styleSheet = unicode(styleSheet, encoding='utf8')
except NameError:
# Python v3.
styleSheet = str(styleSheet, encoding='utf8')
self.ui.styleTextEdit.setPlainText(styleSheet)
QApplication.instance().setStyleSheet(styleSheet)
self.ui.applyButton.setEnabled(False) | identifier_body |
string.rs | use std::fmt;
use std::ops;
#[derive(Clone, Eq)]
pub enum StringOrStatic {
String(String),
Static(&'static str),
}
impl From<&str> for StringOrStatic {
fn from(s: &str) -> Self {
StringOrStatic::String(s.to_owned())
}
}
impl From<String> for StringOrStatic {
fn from(s: String) -> Self {
StringOrStatic::String(s)
}
}
impl StringOrStatic {
pub fn | (&self) -> &str {
match self {
StringOrStatic::String(s) => &s,
StringOrStatic::Static(s) => s,
}
}
pub fn to_string(&self) -> String {
format!("{}", self)
}
}
impl ops::Deref for StringOrStatic {
type Target = str;
fn deref(&self) -> &str {
self.as_str()
}
}
impl PartialEq<StringOrStatic> for StringOrStatic {
fn eq(&self, other: &StringOrStatic) -> bool {
self.as_str() == other.as_str()
}
}
impl PartialEq<str> for StringOrStatic {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl PartialEq<&str> for StringOrStatic {
fn eq(&self, other: &&str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<StringOrStatic> for str {
fn eq(&self, other: &StringOrStatic) -> bool {
self == other.as_str()
}
}
impl PartialEq<StringOrStatic> for &str {
fn eq(&self, other: &StringOrStatic) -> bool {
*self == other.as_str()
}
}
impl fmt::Display for StringOrStatic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
StringOrStatic::String(s) => fmt::Display::fmt(s, f),
StringOrStatic::Static(s) => fmt::Display::fmt(s, f),
}
}
}
| as_str | identifier_name |
string.rs | use std::fmt;
use std::ops;
#[derive(Clone, Eq)]
pub enum StringOrStatic {
String(String),
Static(&'static str),
}
impl From<&str> for StringOrStatic {
fn from(s: &str) -> Self {
StringOrStatic::String(s.to_owned())
} | }
impl From<String> for StringOrStatic {
fn from(s: String) -> Self {
StringOrStatic::String(s)
}
}
impl StringOrStatic {
pub fn as_str(&self) -> &str {
match self {
StringOrStatic::String(s) => &s,
StringOrStatic::Static(s) => s,
}
}
pub fn to_string(&self) -> String {
format!("{}", self)
}
}
impl ops::Deref for StringOrStatic {
type Target = str;
fn deref(&self) -> &str {
self.as_str()
}
}
impl PartialEq<StringOrStatic> for StringOrStatic {
fn eq(&self, other: &StringOrStatic) -> bool {
self.as_str() == other.as_str()
}
}
impl PartialEq<str> for StringOrStatic {
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl PartialEq<&str> for StringOrStatic {
fn eq(&self, other: &&str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<StringOrStatic> for str {
fn eq(&self, other: &StringOrStatic) -> bool {
self == other.as_str()
}
}
impl PartialEq<StringOrStatic> for &str {
fn eq(&self, other: &StringOrStatic) -> bool {
*self == other.as_str()
}
}
impl fmt::Display for StringOrStatic {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
StringOrStatic::String(s) => fmt::Display::fmt(s, f),
StringOrStatic::Static(s) => fmt::Display::fmt(s, f),
}
}
} | random_line_split | |
Item.js | var virt = require("@nathanfaucett/virt"),
virtDOM = require("@nathanfaucett/virt-dom"),
css = require("@nathanfaucett/css"),
propTypes = require("@nathanfaucett/prop_types"),
domDimensions = require("@nathanfaucett/dom_dimensions"),
getImageDimensions = require("../../utils/getImageDimensions");
var ItemPrototype;
module.exports = Item;
function | (props, children, context) {
var _this = this;
virt.Component.call(this, props, children, context);
this.state = {
loaded: false,
ratio: 1,
width: 0,
height: 0,
top: 0,
left: 0
};
this.onMouseOver = function(e) {
return _this.__onMouseOver(e);
};
this.onMouseOut = function(e) {
return _this.__onMouseOut(e);
};
this.onClick = function(e) {
return _this.__onClick(e);
};
this.onLoad = function(e) {
return _this.__onLoad(e);
};
}
virt.Component.extend(Item, "Item");
ItemPrototype = Item.prototype;
Item.propTypes = {
item: propTypes.object.isRequired,
height: propTypes.number.isRequired
};
Item.contextTypes = {
theme: propTypes.object.isRequired
};
ItemPrototype.__onLoad = function() {
if (!this.state.loaded) {
this.getImageDimensions();
}
};
ItemPrototype.__onMouseOver = function() {
this.setState({
hover: true
});
};
ItemPrototype.__onMouseOut = function() {
this.setState({
hover: false
});
};
ItemPrototype.getImageDimensions = function() {
var node = virtDOM.findDOMNode(this),
dims = getImageDimensions(
virtDOM.findDOMNode(this.refs.img),
domDimensions.width(node),
domDimensions.height(node)
);
this.setState({
loaded: true,
width: dims.width,
height: dims.height,
top: -dims.top,
left: -dims.left
});
};
ItemPrototype.getStyles = function() {
var context = this.context,
theme = context.theme,
state = this.state,
props = this.props,
styles = {
root: {
position: "relative",
height: props.height + "px",
overflow: "hidden"
},
hover: {
zIndex: 1,
display: "block",
cursor: "pointer",
position: "absolute",
width: "100%",
height: props.height + "px",
background: theme.palette.accent2Color // theme.palette.canvasColor
},
imgWrap: {
zIndex: 0,
position: "relative"
},
img: {
position: "absolute",
maxWidth: "inherit",
top: state.top + "px",
left: state.left + "px",
width: state.loaded ? state.width + "px" : "inherit",
height: state.loaded ? state.height + "px" : "inherit"
}
};
css.transition(styles.hover, "opacity 300ms cubic-bezier(.25,.8,.25,1)");
if (state.hover) {
css.opacity(styles.hover, 0.5);
} else {
css.opacity(styles.hover, 0);
}
return styles;
};
ItemPrototype.render = function() {
var styles = this.getStyles(),
item = this.props.item;
return (
virt.createView("div", {
className: "Item",
style: styles.root
},
virt.createView("a", {
onMouseOver: this.onMouseOver,
onMouseOut: this.onMouseOut,
href: "/residential_gallery/" + this.props.item.id,
style: styles.hover
}),
virt.createView("div", {
style: styles.imgWrap
},
virt.createView("img", {
onLoad: this.onLoad,
style: styles.img,
ref: "img",
src: item.thumbnail
})
)
)
);
};
| Item | identifier_name |
Item.js | var virt = require("@nathanfaucett/virt"),
virtDOM = require("@nathanfaucett/virt-dom"),
css = require("@nathanfaucett/css"),
propTypes = require("@nathanfaucett/prop_types"),
domDimensions = require("@nathanfaucett/dom_dimensions"),
getImageDimensions = require("../../utils/getImageDimensions");
var ItemPrototype;
module.exports = Item;
function Item(props, children, context) |
virt.Component.extend(Item, "Item");
ItemPrototype = Item.prototype;
Item.propTypes = {
item: propTypes.object.isRequired,
height: propTypes.number.isRequired
};
Item.contextTypes = {
theme: propTypes.object.isRequired
};
ItemPrototype.__onLoad = function() {
if (!this.state.loaded) {
this.getImageDimensions();
}
};
ItemPrototype.__onMouseOver = function() {
this.setState({
hover: true
});
};
ItemPrototype.__onMouseOut = function() {
this.setState({
hover: false
});
};
ItemPrototype.getImageDimensions = function() {
var node = virtDOM.findDOMNode(this),
dims = getImageDimensions(
virtDOM.findDOMNode(this.refs.img),
domDimensions.width(node),
domDimensions.height(node)
);
this.setState({
loaded: true,
width: dims.width,
height: dims.height,
top: -dims.top,
left: -dims.left
});
};
ItemPrototype.getStyles = function() {
var context = this.context,
theme = context.theme,
state = this.state,
props = this.props,
styles = {
root: {
position: "relative",
height: props.height + "px",
overflow: "hidden"
},
hover: {
zIndex: 1,
display: "block",
cursor: "pointer",
position: "absolute",
width: "100%",
height: props.height + "px",
background: theme.palette.accent2Color // theme.palette.canvasColor
},
imgWrap: {
zIndex: 0,
position: "relative"
},
img: {
position: "absolute",
maxWidth: "inherit",
top: state.top + "px",
left: state.left + "px",
width: state.loaded ? state.width + "px" : "inherit",
height: state.loaded ? state.height + "px" : "inherit"
}
};
css.transition(styles.hover, "opacity 300ms cubic-bezier(.25,.8,.25,1)");
if (state.hover) {
css.opacity(styles.hover, 0.5);
} else {
css.opacity(styles.hover, 0);
}
return styles;
};
ItemPrototype.render = function() {
var styles = this.getStyles(),
item = this.props.item;
return (
virt.createView("div", {
className: "Item",
style: styles.root
},
virt.createView("a", {
onMouseOver: this.onMouseOver,
onMouseOut: this.onMouseOut,
href: "/residential_gallery/" + this.props.item.id,
style: styles.hover
}),
virt.createView("div", {
style: styles.imgWrap
},
virt.createView("img", {
onLoad: this.onLoad,
style: styles.img,
ref: "img",
src: item.thumbnail
})
)
)
);
};
| {
var _this = this;
virt.Component.call(this, props, children, context);
this.state = {
loaded: false,
ratio: 1,
width: 0,
height: 0,
top: 0,
left: 0
};
this.onMouseOver = function(e) {
return _this.__onMouseOver(e);
};
this.onMouseOut = function(e) {
return _this.__onMouseOut(e);
};
this.onClick = function(e) {
return _this.__onClick(e);
};
this.onLoad = function(e) {
return _this.__onLoad(e);
};
} | identifier_body |
Item.js | var virt = require("@nathanfaucett/virt"),
virtDOM = require("@nathanfaucett/virt-dom"),
css = require("@nathanfaucett/css"),
propTypes = require("@nathanfaucett/prop_types"),
domDimensions = require("@nathanfaucett/dom_dimensions"),
getImageDimensions = require("../../utils/getImageDimensions");
var ItemPrototype;
module.exports = Item;
function Item(props, children, context) {
var _this = this;
virt.Component.call(this, props, children, context);
this.state = {
loaded: false,
ratio: 1,
width: 0,
height: 0,
top: 0,
left: 0
};
this.onMouseOver = function(e) {
return _this.__onMouseOver(e);
};
this.onMouseOut = function(e) {
return _this.__onMouseOut(e);
};
this.onClick = function(e) {
return _this.__onClick(e);
};
this.onLoad = function(e) {
return _this.__onLoad(e);
};
}
virt.Component.extend(Item, "Item");
ItemPrototype = Item.prototype;
Item.propTypes = {
item: propTypes.object.isRequired,
height: propTypes.number.isRequired
};
Item.contextTypes = {
theme: propTypes.object.isRequired
};
ItemPrototype.__onLoad = function() {
if (!this.state.loaded) {
this.getImageDimensions();
}
};
ItemPrototype.__onMouseOver = function() {
this.setState({
hover: true
});
};
ItemPrototype.__onMouseOut = function() {
this.setState({
hover: false
});
};
ItemPrototype.getImageDimensions = function() {
var node = virtDOM.findDOMNode(this),
dims = getImageDimensions(
virtDOM.findDOMNode(this.refs.img),
domDimensions.width(node),
domDimensions.height(node)
);
this.setState({
loaded: true,
width: dims.width,
height: dims.height,
top: -dims.top,
left: -dims.left
});
};
ItemPrototype.getStyles = function() {
var context = this.context,
theme = context.theme,
state = this.state,
props = this.props,
styles = {
root: {
position: "relative",
height: props.height + "px",
overflow: "hidden"
},
hover: {
zIndex: 1,
display: "block",
cursor: "pointer",
position: "absolute", | zIndex: 0,
position: "relative"
},
img: {
position: "absolute",
maxWidth: "inherit",
top: state.top + "px",
left: state.left + "px",
width: state.loaded ? state.width + "px" : "inherit",
height: state.loaded ? state.height + "px" : "inherit"
}
};
css.transition(styles.hover, "opacity 300ms cubic-bezier(.25,.8,.25,1)");
if (state.hover) {
css.opacity(styles.hover, 0.5);
} else {
css.opacity(styles.hover, 0);
}
return styles;
};
ItemPrototype.render = function() {
var styles = this.getStyles(),
item = this.props.item;
return (
virt.createView("div", {
className: "Item",
style: styles.root
},
virt.createView("a", {
onMouseOver: this.onMouseOver,
onMouseOut: this.onMouseOut,
href: "/residential_gallery/" + this.props.item.id,
style: styles.hover
}),
virt.createView("div", {
style: styles.imgWrap
},
virt.createView("img", {
onLoad: this.onLoad,
style: styles.img,
ref: "img",
src: item.thumbnail
})
)
)
);
}; | width: "100%",
height: props.height + "px",
background: theme.palette.accent2Color // theme.palette.canvasColor
},
imgWrap: { | random_line_split |
Item.js | var virt = require("@nathanfaucett/virt"),
virtDOM = require("@nathanfaucett/virt-dom"),
css = require("@nathanfaucett/css"),
propTypes = require("@nathanfaucett/prop_types"),
domDimensions = require("@nathanfaucett/dom_dimensions"),
getImageDimensions = require("../../utils/getImageDimensions");
var ItemPrototype;
module.exports = Item;
function Item(props, children, context) {
var _this = this;
virt.Component.call(this, props, children, context);
this.state = {
loaded: false,
ratio: 1,
width: 0,
height: 0,
top: 0,
left: 0
};
this.onMouseOver = function(e) {
return _this.__onMouseOver(e);
};
this.onMouseOut = function(e) {
return _this.__onMouseOut(e);
};
this.onClick = function(e) {
return _this.__onClick(e);
};
this.onLoad = function(e) {
return _this.__onLoad(e);
};
}
virt.Component.extend(Item, "Item");
ItemPrototype = Item.prototype;
Item.propTypes = {
item: propTypes.object.isRequired,
height: propTypes.number.isRequired
};
Item.contextTypes = {
theme: propTypes.object.isRequired
};
ItemPrototype.__onLoad = function() {
if (!this.state.loaded) {
this.getImageDimensions();
}
};
ItemPrototype.__onMouseOver = function() {
this.setState({
hover: true
});
};
ItemPrototype.__onMouseOut = function() {
this.setState({
hover: false
});
};
ItemPrototype.getImageDimensions = function() {
var node = virtDOM.findDOMNode(this),
dims = getImageDimensions(
virtDOM.findDOMNode(this.refs.img),
domDimensions.width(node),
domDimensions.height(node)
);
this.setState({
loaded: true,
width: dims.width,
height: dims.height,
top: -dims.top,
left: -dims.left
});
};
ItemPrototype.getStyles = function() {
var context = this.context,
theme = context.theme,
state = this.state,
props = this.props,
styles = {
root: {
position: "relative",
height: props.height + "px",
overflow: "hidden"
},
hover: {
zIndex: 1,
display: "block",
cursor: "pointer",
position: "absolute",
width: "100%",
height: props.height + "px",
background: theme.palette.accent2Color // theme.palette.canvasColor
},
imgWrap: {
zIndex: 0,
position: "relative"
},
img: {
position: "absolute",
maxWidth: "inherit",
top: state.top + "px",
left: state.left + "px",
width: state.loaded ? state.width + "px" : "inherit",
height: state.loaded ? state.height + "px" : "inherit"
}
};
css.transition(styles.hover, "opacity 300ms cubic-bezier(.25,.8,.25,1)");
if (state.hover) {
css.opacity(styles.hover, 0.5);
} else |
return styles;
};
ItemPrototype.render = function() {
var styles = this.getStyles(),
item = this.props.item;
return (
virt.createView("div", {
className: "Item",
style: styles.root
},
virt.createView("a", {
onMouseOver: this.onMouseOver,
onMouseOut: this.onMouseOut,
href: "/residential_gallery/" + this.props.item.id,
style: styles.hover
}),
virt.createView("div", {
style: styles.imgWrap
},
virt.createView("img", {
onLoad: this.onLoad,
style: styles.img,
ref: "img",
src: item.thumbnail
})
)
)
);
};
| {
css.opacity(styles.hover, 0);
} | conditional_block |
package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Simul(Package):
"""simul is an MPI coordinated test of parallel
filesystem system calls and library functions. """
homepage = "https://github.com/LLNL/simul"
url = "https://github.com/LLNL/simul/archive/1.16.tar.gz"
version('1.16', 'd616c1046a170c1e1b7956c402d23a95')
version('1.15', 'a5744673c094a87c05c6f0799d1f496f')
version('1.14', 'f8c14f0bac15741e2af354e3f9a0e30f')
version('1.13', '8a80a62d569557715d6c9c326e39a8ef')
depends_on('mpi')
def install(self, spec, prefix):
| filter_file('mpicc', '$(MPICC)', 'Makefile', string=True)
filter_file('inline void', 'void', 'simul.c', string=True)
make('simul')
mkdirp(prefix.bin)
install('simul', prefix.bin) | identifier_body | |
package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and | ##############################################################################
from spack import *
class Simul(Package):
"""simul is an MPI coordinated test of parallel
filesystem system calls and library functions. """
homepage = "https://github.com/LLNL/simul"
url = "https://github.com/LLNL/simul/archive/1.16.tar.gz"
version('1.16', 'd616c1046a170c1e1b7956c402d23a95')
version('1.15', 'a5744673c094a87c05c6f0799d1f496f')
version('1.14', 'f8c14f0bac15741e2af354e3f9a0e30f')
version('1.13', '8a80a62d569557715d6c9c326e39a8ef')
depends_on('mpi')
def install(self, spec, prefix):
filter_file('mpicc', '$(MPICC)', 'Makefile', string=True)
filter_file('inline void', 'void', 'simul.c', string=True)
make('simul')
mkdirp(prefix.bin)
install('simul', prefix.bin) | # conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | random_line_split |
package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class | (Package):
"""simul is an MPI coordinated test of parallel
filesystem system calls and library functions. """
homepage = "https://github.com/LLNL/simul"
url = "https://github.com/LLNL/simul/archive/1.16.tar.gz"
version('1.16', 'd616c1046a170c1e1b7956c402d23a95')
version('1.15', 'a5744673c094a87c05c6f0799d1f496f')
version('1.14', 'f8c14f0bac15741e2af354e3f9a0e30f')
version('1.13', '8a80a62d569557715d6c9c326e39a8ef')
depends_on('mpi')
def install(self, spec, prefix):
filter_file('mpicc', '$(MPICC)', 'Makefile', string=True)
filter_file('inline void', 'void', 'simul.c', string=True)
make('simul')
mkdirp(prefix.bin)
install('simul', prefix.bin)
| Simul | identifier_name |
socket.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::convert::From;
use std::io;
use std::io::{ErrorKind, Read, Write};
use std::net::{Shutdown, TcpStream};
use {TransportErrorKind, new_transport_error};
use super::{ReadHalf, TIoChannel, WriteHalf};
/// Bidirectional TCP/IP channel.
///
/// # Examples
///
/// Create a `TTcpChannel`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use thrift::transport::TTcpChannel;
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
///
/// Create a `TTcpChannel` by wrapping an existing `TcpStream`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use std::net::TcpStream;
/// use thrift::transport::TTcpChannel;
///
/// let stream = TcpStream::connect("127.0.0.1:9189").unwrap();
///
/// // no need to call c.open() since we've already connected above
/// let mut c = TTcpChannel::with_stream(stream);
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
#[derive(Debug, Default)]
pub struct TTcpChannel {
stream: Option<TcpStream>,
}
impl TTcpChannel {
/// Create an uninitialized `TTcpChannel`.
///
/// The returned instance must be opened using `TTcpChannel::open(...)`
/// before it can be used.
pub fn new() -> TTcpChannel {
TTcpChannel { stream: None }
}
/// Create a `TTcpChannel` that wraps an existing `TcpStream`.
///
/// The passed-in stream is assumed to have been opened before being wrapped
/// by the created `TTcpChannel` instance.
pub fn with_stream(stream: TcpStream) -> TTcpChannel {
TTcpChannel { stream: Some(stream) }
}
/// Connect to `remote_address`, which should have the form `host:port`.
pub fn open(&mut self, remote_address: &str) -> ::Result<()> {
if self.stream.is_some() {
Err(
new_transport_error(
TransportErrorKind::AlreadyOpen,
"tcp connection previously opened",
),
)
} else {
match TcpStream::connect(&remote_address) {
Ok(s) => {
self.stream = Some(s);
Ok(())
}
Err(e) => Err(From::from(e)),
}
}
}
/// Shut down this channel.
///
/// Both send and receive halves are closed, and this instance can no
/// longer be used to communicate with another endpoint.
pub fn close(&mut self) -> ::Result<()> {
self.if_set(|s| s.shutdown(Shutdown::Both))
.map_err(From::from)
}
fn if_set<F, T>(&mut self, mut stream_operation: F) -> io::Result<T>
where
F: FnMut(&mut TcpStream) -> io::Result<T>,
|
}
impl TIoChannel for TTcpChannel {
fn split(self) -> ::Result<(ReadHalf<Self>, WriteHalf<Self>)>
where
Self: Sized,
{
let mut s = self;
s.stream
.as_mut()
.and_then(|s| s.try_clone().ok())
.map(
|cloned| {
(ReadHalf { handle: TTcpChannel { stream: s.stream.take() } },
WriteHalf { handle: TTcpChannel { stream: Some(cloned) } })
},
)
.ok_or_else(
|| {
new_transport_error(
TransportErrorKind::Unknown,
"cannot clone underlying tcp stream",
)
},
)
}
}
impl Read for TTcpChannel {
fn read(&mut self, b: &mut [u8]) -> io::Result<usize> {
self.if_set(|s| s.read(b))
}
}
impl Write for TTcpChannel {
fn write(&mut self, b: &[u8]) -> io::Result<usize> {
self.if_set(|s| s.write_all(b)).map(|_| b.len())
}
fn flush(&mut self) -> io::Result<()> {
self.if_set(|s| s.flush())
}
}
| {
if let Some(ref mut s) = self.stream {
stream_operation(s)
} else {
Err(io::Error::new(ErrorKind::NotConnected, "tcp endpoint not connected"),)
}
} | identifier_body |
socket.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::convert::From;
use std::io;
use std::io::{ErrorKind, Read, Write};
use std::net::{Shutdown, TcpStream};
use {TransportErrorKind, new_transport_error};
use super::{ReadHalf, TIoChannel, WriteHalf};
/// Bidirectional TCP/IP channel.
///
/// # Examples
///
/// Create a `TTcpChannel`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use thrift::transport::TTcpChannel;
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
///
/// Create a `TTcpChannel` by wrapping an existing `TcpStream`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use std::net::TcpStream;
/// use thrift::transport::TTcpChannel;
///
/// let stream = TcpStream::connect("127.0.0.1:9189").unwrap();
///
/// // no need to call c.open() since we've already connected above
/// let mut c = TTcpChannel::with_stream(stream);
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
#[derive(Debug, Default)]
pub struct TTcpChannel {
stream: Option<TcpStream>,
}
impl TTcpChannel {
/// Create an uninitialized `TTcpChannel`.
///
/// The returned instance must be opened using `TTcpChannel::open(...)`
/// before it can be used.
pub fn new() -> TTcpChannel {
TTcpChannel { stream: None }
}
/// Create a `TTcpChannel` that wraps an existing `TcpStream`.
///
/// The passed-in stream is assumed to have been opened before being wrapped
/// by the created `TTcpChannel` instance.
pub fn with_stream(stream: TcpStream) -> TTcpChannel {
TTcpChannel { stream: Some(stream) }
}
/// Connect to `remote_address`, which should have the form `host:port`.
pub fn open(&mut self, remote_address: &str) -> ::Result<()> {
if self.stream.is_some() {
Err(
new_transport_error(
TransportErrorKind::AlreadyOpen,
"tcp connection previously opened",
),
)
} else {
match TcpStream::connect(&remote_address) {
Ok(s) => {
self.stream = Some(s);
Ok(())
}
Err(e) => Err(From::from(e)),
}
}
}
/// Shut down this channel.
///
/// Both send and receive halves are closed, and this instance can no
/// longer be used to communicate with another endpoint.
pub fn close(&mut self) -> ::Result<()> {
self.if_set(|s| s.shutdown(Shutdown::Both))
.map_err(From::from)
}
fn if_set<F, T>(&mut self, mut stream_operation: F) -> io::Result<T>
where
F: FnMut(&mut TcpStream) -> io::Result<T>,
{
if let Some(ref mut s) = self.stream | else {
Err(io::Error::new(ErrorKind::NotConnected, "tcp endpoint not connected"),)
}
}
}
impl TIoChannel for TTcpChannel {
fn split(self) -> ::Result<(ReadHalf<Self>, WriteHalf<Self>)>
where
Self: Sized,
{
let mut s = self;
s.stream
.as_mut()
.and_then(|s| s.try_clone().ok())
.map(
|cloned| {
(ReadHalf { handle: TTcpChannel { stream: s.stream.take() } },
WriteHalf { handle: TTcpChannel { stream: Some(cloned) } })
},
)
.ok_or_else(
|| {
new_transport_error(
TransportErrorKind::Unknown,
"cannot clone underlying tcp stream",
)
},
)
}
}
impl Read for TTcpChannel {
fn read(&mut self, b: &mut [u8]) -> io::Result<usize> {
self.if_set(|s| s.read(b))
}
}
impl Write for TTcpChannel {
fn write(&mut self, b: &[u8]) -> io::Result<usize> {
self.if_set(|s| s.write_all(b)).map(|_| b.len())
}
fn flush(&mut self) -> io::Result<()> {
self.if_set(|s| s.flush())
}
}
| {
stream_operation(s)
} | conditional_block |
socket.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::convert::From;
use std::io;
use std::io::{ErrorKind, Read, Write};
use std::net::{Shutdown, TcpStream};
use {TransportErrorKind, new_transport_error};
use super::{ReadHalf, TIoChannel, WriteHalf};
/// Bidirectional TCP/IP channel.
///
/// # Examples
///
/// Create a `TTcpChannel`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use thrift::transport::TTcpChannel;
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
///
/// Create a `TTcpChannel` by wrapping an existing `TcpStream`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use std::net::TcpStream;
/// use thrift::transport::TTcpChannel;
///
/// let stream = TcpStream::connect("127.0.0.1:9189").unwrap();
///
/// // no need to call c.open() since we've already connected above
/// let mut c = TTcpChannel::with_stream(stream);
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
#[derive(Debug, Default)]
pub struct TTcpChannel {
stream: Option<TcpStream>,
}
impl TTcpChannel {
/// Create an uninitialized `TTcpChannel`.
///
/// The returned instance must be opened using `TTcpChannel::open(...)`
/// before it can be used.
pub fn new() -> TTcpChannel {
TTcpChannel { stream: None }
}
/// Create a `TTcpChannel` that wraps an existing `TcpStream`.
///
/// The passed-in stream is assumed to have been opened before being wrapped
/// by the created `TTcpChannel` instance.
pub fn with_stream(stream: TcpStream) -> TTcpChannel {
TTcpChannel { stream: Some(stream) }
}
/// Connect to `remote_address`, which should have the form `host:port`.
pub fn open(&mut self, remote_address: &str) -> ::Result<()> {
if self.stream.is_some() {
Err(
new_transport_error( | )
} else {
match TcpStream::connect(&remote_address) {
Ok(s) => {
self.stream = Some(s);
Ok(())
}
Err(e) => Err(From::from(e)),
}
}
}
/// Shut down this channel.
///
/// Both send and receive halves are closed, and this instance can no
/// longer be used to communicate with another endpoint.
pub fn close(&mut self) -> ::Result<()> {
self.if_set(|s| s.shutdown(Shutdown::Both))
.map_err(From::from)
}
fn if_set<F, T>(&mut self, mut stream_operation: F) -> io::Result<T>
where
F: FnMut(&mut TcpStream) -> io::Result<T>,
{
if let Some(ref mut s) = self.stream {
stream_operation(s)
} else {
Err(io::Error::new(ErrorKind::NotConnected, "tcp endpoint not connected"),)
}
}
}
impl TIoChannel for TTcpChannel {
fn split(self) -> ::Result<(ReadHalf<Self>, WriteHalf<Self>)>
where
Self: Sized,
{
let mut s = self;
s.stream
.as_mut()
.and_then(|s| s.try_clone().ok())
.map(
|cloned| {
(ReadHalf { handle: TTcpChannel { stream: s.stream.take() } },
WriteHalf { handle: TTcpChannel { stream: Some(cloned) } })
},
)
.ok_or_else(
|| {
new_transport_error(
TransportErrorKind::Unknown,
"cannot clone underlying tcp stream",
)
},
)
}
}
impl Read for TTcpChannel {
fn read(&mut self, b: &mut [u8]) -> io::Result<usize> {
self.if_set(|s| s.read(b))
}
}
impl Write for TTcpChannel {
fn write(&mut self, b: &[u8]) -> io::Result<usize> {
self.if_set(|s| s.write_all(b)).map(|_| b.len())
}
fn flush(&mut self) -> io::Result<()> {
self.if_set(|s| s.flush())
}
} | TransportErrorKind::AlreadyOpen,
"tcp connection previously opened",
), | random_line_split |
socket.rs | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::convert::From;
use std::io;
use std::io::{ErrorKind, Read, Write};
use std::net::{Shutdown, TcpStream};
use {TransportErrorKind, new_transport_error};
use super::{ReadHalf, TIoChannel, WriteHalf};
/// Bidirectional TCP/IP channel.
///
/// # Examples
///
/// Create a `TTcpChannel`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use thrift::transport::TTcpChannel;
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
///
/// Create a `TTcpChannel` by wrapping an existing `TcpStream`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use std::net::TcpStream;
/// use thrift::transport::TTcpChannel;
///
/// let stream = TcpStream::connect("127.0.0.1:9189").unwrap();
///
/// // no need to call c.open() since we've already connected above
/// let mut c = TTcpChannel::with_stream(stream);
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
#[derive(Debug, Default)]
pub struct TTcpChannel {
stream: Option<TcpStream>,
}
impl TTcpChannel {
/// Create an uninitialized `TTcpChannel`.
///
/// The returned instance must be opened using `TTcpChannel::open(...)`
/// before it can be used.
pub fn new() -> TTcpChannel {
TTcpChannel { stream: None }
}
/// Create a `TTcpChannel` that wraps an existing `TcpStream`.
///
/// The passed-in stream is assumed to have been opened before being wrapped
/// by the created `TTcpChannel` instance.
pub fn with_stream(stream: TcpStream) -> TTcpChannel {
TTcpChannel { stream: Some(stream) }
}
/// Connect to `remote_address`, which should have the form `host:port`.
pub fn open(&mut self, remote_address: &str) -> ::Result<()> {
if self.stream.is_some() {
Err(
new_transport_error(
TransportErrorKind::AlreadyOpen,
"tcp connection previously opened",
),
)
} else {
match TcpStream::connect(&remote_address) {
Ok(s) => {
self.stream = Some(s);
Ok(())
}
Err(e) => Err(From::from(e)),
}
}
}
/// Shut down this channel.
///
/// Both send and receive halves are closed, and this instance can no
/// longer be used to communicate with another endpoint.
pub fn close(&mut self) -> ::Result<()> {
self.if_set(|s| s.shutdown(Shutdown::Both))
.map_err(From::from)
}
fn | <F, T>(&mut self, mut stream_operation: F) -> io::Result<T>
where
F: FnMut(&mut TcpStream) -> io::Result<T>,
{
if let Some(ref mut s) = self.stream {
stream_operation(s)
} else {
Err(io::Error::new(ErrorKind::NotConnected, "tcp endpoint not connected"),)
}
}
}
impl TIoChannel for TTcpChannel {
fn split(self) -> ::Result<(ReadHalf<Self>, WriteHalf<Self>)>
where
Self: Sized,
{
let mut s = self;
s.stream
.as_mut()
.and_then(|s| s.try_clone().ok())
.map(
|cloned| {
(ReadHalf { handle: TTcpChannel { stream: s.stream.take() } },
WriteHalf { handle: TTcpChannel { stream: Some(cloned) } })
},
)
.ok_or_else(
|| {
new_transport_error(
TransportErrorKind::Unknown,
"cannot clone underlying tcp stream",
)
},
)
}
}
impl Read for TTcpChannel {
fn read(&mut self, b: &mut [u8]) -> io::Result<usize> {
self.if_set(|s| s.read(b))
}
}
impl Write for TTcpChannel {
fn write(&mut self, b: &[u8]) -> io::Result<usize> {
self.if_set(|s| s.write_all(b)).map(|_| b.len())
}
fn flush(&mut self) -> io::Result<()> {
self.if_set(|s| s.flush())
}
}
| if_set | identifier_name |
thread_local.rs | use std::cell::RefCell;
use std::mem;
use thread::Thread;
pub struct Data {
pub current_thread: Thread,
pub parkable: bool, // i.e. for frame stack lock
}
pub unsafe fn init(data: Data) {
set_data_ptr(Box::new(RefCell::new(data)));
}
pub fn data() -> &'static RefCell<Data> {
unsafe{&*get_data_ptr()}
} | let mut data: *const RefCell<Data>;
asm!("movq %fs:0, $0" : "=r"(data) ::: "volatile");
data
}
#[cfg(target_arch = "x86_64")]
unsafe fn set_data_ptr(data: Box<RefCell<Data>>) {
let data_ptr: *const RefCell<Data> = mem::transmute(data);
asm!("movq $0, %fs:0" :: "r"(data_ptr) :: "volatile");
} |
#[cfg(target_arch = "x86_64")]
unsafe fn get_data_ptr() -> *const RefCell<Data> { | random_line_split |
thread_local.rs | use std::cell::RefCell;
use std::mem;
use thread::Thread;
pub struct Data {
pub current_thread: Thread,
pub parkable: bool, // i.e. for frame stack lock
}
pub unsafe fn init(data: Data) {
set_data_ptr(Box::new(RefCell::new(data)));
}
pub fn data() -> &'static RefCell<Data> {
unsafe{&*get_data_ptr()}
}
#[cfg(target_arch = "x86_64")]
unsafe fn get_data_ptr() -> *const RefCell<Data> {
let mut data: *const RefCell<Data>;
asm!("movq %fs:0, $0" : "=r"(data) ::: "volatile");
data
}
#[cfg(target_arch = "x86_64")]
unsafe fn set_data_ptr(data: Box<RefCell<Data>>) | {
let data_ptr: *const RefCell<Data> = mem::transmute(data);
asm!("movq $0, %fs:0" :: "r"(data_ptr) :: "volatile");
} | identifier_body | |
thread_local.rs | use std::cell::RefCell;
use std::mem;
use thread::Thread;
pub struct Data {
pub current_thread: Thread,
pub parkable: bool, // i.e. for frame stack lock
}
pub unsafe fn init(data: Data) {
set_data_ptr(Box::new(RefCell::new(data)));
}
pub fn data() -> &'static RefCell<Data> {
unsafe{&*get_data_ptr()}
}
#[cfg(target_arch = "x86_64")]
unsafe fn | () -> *const RefCell<Data> {
let mut data: *const RefCell<Data>;
asm!("movq %fs:0, $0" : "=r"(data) ::: "volatile");
data
}
#[cfg(target_arch = "x86_64")]
unsafe fn set_data_ptr(data: Box<RefCell<Data>>) {
let data_ptr: *const RefCell<Data> = mem::transmute(data);
asm!("movq $0, %fs:0" :: "r"(data_ptr) :: "volatile");
} | get_data_ptr | identifier_name |
hyperparams_builder.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder function to construct tf-slim arg_scope for convolution, fc ops."""
import tensorflow as tf
from object_detection.protos import hyperparams_pb2
slim = tf.contrib.slim
def build(hyperparams_config, is_training):
"""Builds tf-slim arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: tf-slim arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config,
hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = slim.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [slim.conv2d, slim.separable_conv2d, slim.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [slim.fully_connected]
with slim.arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def | (regularizer):
"""Builds a tf-slim regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf-slim regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return slim.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
return slim.l2_regularizer(scale=float(regularizer.l2_regularizer.weight))
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'random_normal_initializer':
return tf.random_normal_initializer(
mean=initializer.random_normal_initializer.mean,
stddev=initializer.random_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return slim.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params
| _build_regularizer | identifier_name |
hyperparams_builder.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder function to construct tf-slim arg_scope for convolution, fc ops."""
import tensorflow as tf
from object_detection.protos import hyperparams_pb2
slim = tf.contrib.slim
def build(hyperparams_config, is_training):
"""Builds tf-slim arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: tf-slim arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config,
hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = slim.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [slim.conv2d, slim.separable_conv2d, slim.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [slim.fully_connected]
with slim.arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def _build_regularizer(regularizer):
"""Builds a tf-slim regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf-slim regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return slim.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
|
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'random_normal_initializer':
return tf.random_normal_initializer(
mean=initializer.random_normal_initializer.mean,
stddev=initializer.random_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return slim.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params
| return slim.l2_regularizer(scale=float(regularizer.l2_regularizer.weight)) | conditional_block |
hyperparams_builder.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder function to construct tf-slim arg_scope for convolution, fc ops."""
import tensorflow as tf
from object_detection.protos import hyperparams_pb2
slim = tf.contrib.slim
def build(hyperparams_config, is_training):
"""Builds tf-slim arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: tf-slim arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config, | batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = slim.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [slim.conv2d, slim.separable_conv2d, slim.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [slim.fully_connected]
with slim.arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def _build_regularizer(regularizer):
"""Builds a tf-slim regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf-slim regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return slim.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
return slim.l2_regularizer(scale=float(regularizer.l2_regularizer.weight))
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'random_normal_initializer':
return tf.random_normal_initializer(
mean=initializer.random_normal_initializer.mean,
stddev=initializer.random_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return slim.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params | hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None | random_line_split |
hyperparams_builder.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Builder function to construct tf-slim arg_scope for convolution, fc ops."""
import tensorflow as tf
from object_detection.protos import hyperparams_pb2
slim = tf.contrib.slim
def build(hyperparams_config, is_training):
|
def _build_activation_fn(activation_fn):
"""Builds a callable activation from config.
Args:
activation_fn: hyperparams_pb2.Hyperparams.activation
Returns:
Callable activation function.
Raises:
ValueError: On unknown activation function.
"""
if activation_fn == hyperparams_pb2.Hyperparams.NONE:
return None
if activation_fn == hyperparams_pb2.Hyperparams.RELU:
return tf.nn.relu
if activation_fn == hyperparams_pb2.Hyperparams.RELU_6:
return tf.nn.relu6
raise ValueError('Unknown activation function: {}'.format(activation_fn))
def _build_regularizer(regularizer):
"""Builds a tf-slim regularizer from config.
Args:
regularizer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf-slim regularizer.
Raises:
ValueError: On unknown regularizer.
"""
regularizer_oneof = regularizer.WhichOneof('regularizer_oneof')
if regularizer_oneof == 'l1_regularizer':
return slim.l1_regularizer(scale=float(regularizer.l1_regularizer.weight))
if regularizer_oneof == 'l2_regularizer':
return slim.l2_regularizer(scale=float(regularizer.l2_regularizer.weight))
raise ValueError('Unknown regularizer function: {}'.format(regularizer_oneof))
def _build_initializer(initializer):
"""Build a tf initializer from config.
Args:
initializer: hyperparams_pb2.Hyperparams.regularizer proto.
Returns:
tf initializer.
Raises:
ValueError: On unknown initializer.
"""
initializer_oneof = initializer.WhichOneof('initializer_oneof')
if initializer_oneof == 'truncated_normal_initializer':
return tf.truncated_normal_initializer(
mean=initializer.truncated_normal_initializer.mean,
stddev=initializer.truncated_normal_initializer.stddev)
if initializer_oneof == 'random_normal_initializer':
return tf.random_normal_initializer(
mean=initializer.random_normal_initializer.mean,
stddev=initializer.random_normal_initializer.stddev)
if initializer_oneof == 'variance_scaling_initializer':
enum_descriptor = (hyperparams_pb2.VarianceScalingInitializer.
DESCRIPTOR.enum_types_by_name['Mode'])
mode = enum_descriptor.values_by_number[initializer.
variance_scaling_initializer.
mode].name
return slim.variance_scaling_initializer(
factor=initializer.variance_scaling_initializer.factor,
mode=mode,
uniform=initializer.variance_scaling_initializer.uniform)
raise ValueError('Unknown initializer function: {}'.format(
initializer_oneof))
def _build_batch_norm_params(batch_norm, is_training):
"""Build a dictionary of batch_norm params from config.
Args:
batch_norm: hyperparams_pb2.ConvHyperparams.batch_norm proto.
is_training: Whether the models is in training mode.
Returns:
A dictionary containing batch_norm parameters.
"""
batch_norm_params = {
'decay': batch_norm.decay,
'center': batch_norm.center,
'scale': batch_norm.scale,
'epsilon': batch_norm.epsilon,
'is_training': is_training and batch_norm.train,
}
return batch_norm_params
| """Builds tf-slim arg_scope for convolution ops based on the config.
Returns an arg_scope to use for convolution ops containing weights
initializer, weights regularizer, activation function, batch norm function
and batch norm parameters based on the configuration.
Note that if the batch_norm parameteres are not specified in the config
(i.e. left to default) then batch norm is excluded from the arg_scope.
The batch norm parameters are set for updates based on `is_training` argument
and conv_hyperparams_config.batch_norm.train parameter. During training, they
are updated only if batch_norm.train parameter is true. However, during eval,
no updates are made to the batch norm variables. In both cases, their current
values are used during forward pass.
Args:
hyperparams_config: hyperparams.proto object containing
hyperparameters.
is_training: Whether the network is in training mode.
Returns:
arg_scope: tf-slim arg_scope containing hyperparameters for ops.
Raises:
ValueError: if hyperparams_config is not of type hyperparams.Hyperparams.
"""
if not isinstance(hyperparams_config,
hyperparams_pb2.Hyperparams):
raise ValueError('hyperparams_config not of type '
'hyperparams_pb.Hyperparams.')
batch_norm = None
batch_norm_params = None
if hyperparams_config.HasField('batch_norm'):
batch_norm = slim.batch_norm
batch_norm_params = _build_batch_norm_params(
hyperparams_config.batch_norm, is_training)
affected_ops = [slim.conv2d, slim.separable_conv2d, slim.conv2d_transpose]
if hyperparams_config.HasField('op') and (
hyperparams_config.op == hyperparams_pb2.Hyperparams.FC):
affected_ops = [slim.fully_connected]
with slim.arg_scope(
affected_ops,
weights_regularizer=_build_regularizer(
hyperparams_config.regularizer),
weights_initializer=_build_initializer(
hyperparams_config.initializer),
activation_fn=_build_activation_fn(hyperparams_config.activation),
normalizer_fn=batch_norm,
normalizer_params=batch_norm_params) as sc:
return sc | identifier_body |
test_main.py | import sys
import pytest
from flask import current_app
from flask_wtf import Form
from wtforms import TextField
from faker import Faker
import arrow
import uuid
from unifispot.core.models import Wifisite,Device,Guesttrack
from tests.helpers import get_guestentry_url,randomMAC,loggin_as_admin
def test_guest_portal(session,client):
#test unifi entry point
site1 = Wifisite.query.get(1)
#with emptry MAC/APMAC
res = client.get(get_guestentry_url(site1)).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with empty MAC/APMAC' %res
|
#with invalid MAC/APMAC
res = client.get(get_guestentry_url(site1,mac='11:22:33:44',apmac='22:44:55')).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with empty MAC/APMAC' %res
#with invalid sitekey
site2 = Wifisite(sitekey = 'test',backend_type='unifi')
res = client.get(get_guestentry_url(site2,mac=randomMAC(),apmac=randomMAC())).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with invalid sitekey' %res
#with everything valid
res = client.get(get_guestentry_url(site1,mac=randomMAC(),apmac=randomMAC())).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 1 == Guesttrack.query.count(),'More than one guesttrack '
#check demo is not set with no-auth visit
mac=randomMAC()
res = client.get(get_guestentry_url(site1,mac=mac,apmac=randomMAC(),demo=1)).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 0 == Guesttrack.query.filter_by(devicemac=mac).first().demo,\
'Demo is not rejected for non auth visits '
#check demo is not set with auth visit
mac=randomMAC()
admin = loggin_as_admin()
res = admin.get(get_guestentry_url(site1,mac=mac,apmac=randomMAC(),demo=1)).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 1 == Guesttrack.query.filter_by(devicemac=mac).first().demo,\
'Demo is rejected for auth visits ' | random_line_split | |
test_main.py | import sys
import pytest
from flask import current_app
from flask_wtf import Form
from wtforms import TextField
from faker import Faker
import arrow
import uuid
from unifispot.core.models import Wifisite,Device,Guesttrack
from tests.helpers import get_guestentry_url,randomMAC,loggin_as_admin
def | (session,client):
#test unifi entry point
site1 = Wifisite.query.get(1)
#with emptry MAC/APMAC
res = client.get(get_guestentry_url(site1)).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with empty MAC/APMAC' %res
#with invalid MAC/APMAC
res = client.get(get_guestentry_url(site1,mac='11:22:33:44',apmac='22:44:55')).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with empty MAC/APMAC' %res
#with invalid sitekey
site2 = Wifisite(sitekey = 'test',backend_type='unifi')
res = client.get(get_guestentry_url(site2,mac=randomMAC(),apmac=randomMAC())).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with invalid sitekey' %res
#with everything valid
res = client.get(get_guestentry_url(site1,mac=randomMAC(),apmac=randomMAC())).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 1 == Guesttrack.query.count(),'More than one guesttrack '
#check demo is not set with no-auth visit
mac=randomMAC()
res = client.get(get_guestentry_url(site1,mac=mac,apmac=randomMAC(),demo=1)).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 0 == Guesttrack.query.filter_by(devicemac=mac).first().demo,\
'Demo is not rejected for non auth visits '
#check demo is not set with auth visit
mac=randomMAC()
admin = loggin_as_admin()
res = admin.get(get_guestentry_url(site1,mac=mac,apmac=randomMAC(),demo=1)).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 1 == Guesttrack.query.filter_by(devicemac=mac).first().demo,\
'Demo is rejected for auth visits '
| test_guest_portal | identifier_name |
test_main.py | import sys
import pytest
from flask import current_app
from flask_wtf import Form
from wtforms import TextField
from faker import Faker
import arrow
import uuid
from unifispot.core.models import Wifisite,Device,Guesttrack
from tests.helpers import get_guestentry_url,randomMAC,loggin_as_admin
def test_guest_portal(session,client):
#test unifi entry point
| site1 = Wifisite.query.get(1)
#with emptry MAC/APMAC
res = client.get(get_guestentry_url(site1)).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with empty MAC/APMAC' %res
#with invalid MAC/APMAC
res = client.get(get_guestentry_url(site1,mac='11:22:33:44',apmac='22:44:55')).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with empty MAC/APMAC' %res
#with invalid sitekey
site2 = Wifisite(sitekey = 'test',backend_type='unifi')
res = client.get(get_guestentry_url(site2,mac=randomMAC(),apmac=randomMAC())).status
assert '404 NOT FOUND' == res , 'Getting :%s instead 404 for\
with invalid sitekey' %res
#with everything valid
res = client.get(get_guestentry_url(site1,mac=randomMAC(),apmac=randomMAC())).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 1 == Guesttrack.query.count(),'More than one guesttrack '
#check demo is not set with no-auth visit
mac=randomMAC()
res = client.get(get_guestentry_url(site1,mac=mac,apmac=randomMAC(),demo=1)).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 0 == Guesttrack.query.filter_by(devicemac=mac).first().demo,\
'Demo is not rejected for non auth visits '
#check demo is not set with auth visit
mac=randomMAC()
admin = loggin_as_admin()
res = admin.get(get_guestentry_url(site1,mac=mac,apmac=randomMAC(),demo=1)).status
assert '302 FOUND' == res , 'Getting :%s instead 302 FOUND for\
with valid data' %res
assert 1 == Guesttrack.query.filter_by(devicemac=mac).first().demo,\
'Demo is rejected for auth visits ' | identifier_body | |
glob.js | // Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module.exports = glob
var rp = require('fs.realpath')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var inherits = require('inherits')
var EE = require('events').EventEmitter
var path = require('path')
var assert = require('assert')
var isAbsolute = require('path-is-absolute')
var globSync = require('./sync.js')
var common = require('./common.js')
var setopts = common.setopts
var ownProp = common.ownProp
var inflight = require('inflight')
var util = require('util')
var childrenIgnored = common.childrenIgnored
var isIgnored = common.isIgnored
var once = require('once')
function glob (pattern, options, cb) {
if (typeof options === 'function') cb = options, options = {}
if (!options) options = {}
if (options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return globSync(pattern, options)
}
return new Glob(pattern, options, cb)
}
glob.sync = globSync
var GlobSync = glob.GlobSync = globSync.GlobSync
// old api surface
glob.glob = glob
function extend (origin, add) {
if (add === null || typeof add !== 'object') {
return origin
}
var keys = Object.keys(add)
var i = keys.length
while (i--) {
origin[keys[i]] = add[keys[i]]
}
return origin
}
glob.hasMagic = function (pattern, options_) {
var options = extend({}, options_)
options.noprocess = true
var g = new Glob(pattern, options)
var set = g.minimatch.set
if (!pattern)
return false
if (set.length > 1)
return true
for (var j = 0; j < set[0].length; j++) {
if (typeof set[0][j] !== 'string')
return true
}
return false
}
glob.Glob = Glob
inherits(Glob, EE)
function Glob (pattern, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
if (options && options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return new GlobSync(pattern, options)
}
if (!(this instanceof Glob))
return new Glob(pattern, options, cb)
setopts(this, pattern, options)
this._didRealPath = false
// process each pattern in the minimatch set
var n = this.minimatch.set.length
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n)
if (typeof cb === 'function') {
cb = once(cb)
this.on('error', cb)
this.on('end', function (matches) {
cb(null, matches)
})
}
var self = this
this._processing = 0
this._emitQueue = []
this._processQueue = []
this.paused = false
if (this.noprocess)
return this
if (n === 0)
return done()
var sync = true
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false, done)
}
sync = false
function done () {
--self._processing
if (self._processing <= 0) {
if (sync) {
process.nextTick(function () {
self._finish()
})
} else {
self._finish()
}
}
}
}
Glob.prototype._finish = function () {
assert(this instanceof Glob)
if (this.aborted)
return
if (this.realpath && !this._didRealpath)
return this._realpath()
common.finish(this)
this.emit('end', this.found)
}
Glob.prototype._realpath = function () {
if (this._didRealpath)
return
this._didRealpath = true
var n = this.matches.length
if (n === 0)
return this._finish()
var self = this
for (var i = 0; i < this.matches.length; i++)
this._realpathSet(i, next)
function | () {
if (--n === 0)
self._finish()
}
}
Glob.prototype._realpathSet = function (index, cb) {
var matchset = this.matches[index]
if (!matchset)
return cb()
var found = Object.keys(matchset)
var self = this
var n = found.length
if (n === 0)
return cb()
var set = this.matches[index] = Object.create(null)
found.forEach(function (p, i) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self._makeAbs(p)
rp.realpath(p, self.realpathCache, function (er, real) {
if (!er)
set[real] = true
else if (er.syscall === 'stat')
set[p] = true
else
self.emit('error', er) // srsly wtf right here
if (--n === 0) {
self.matches[index] = set
cb()
}
})
})
}
Glob.prototype._mark = function (p) {
return common.mark(this, p)
}
Glob.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}
Glob.prototype.abort = function () {
this.aborted = true
this.emit('abort')
}
Glob.prototype.pause = function () {
if (!this.paused) {
this.paused = true
this.emit('pause')
}
}
Glob.prototype.resume = function () {
if (this.paused) {
this.emit('resume')
this.paused = false
if (this._emitQueue.length) {
var eq = this._emitQueue.slice(0)
this._emitQueue.length = 0
for (var i = 0; i < eq.length; i ++) {
var e = eq[i]
this._emitMatch(e[0], e[1])
}
}
if (this._processQueue.length) {
var pq = this._processQueue.slice(0)
this._processQueue.length = 0
for (var i = 0; i < pq.length; i ++) {
var p = pq[i]
this._processing--
this._process(p[0], p[1], p[2], p[3])
}
}
}
}
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
assert(this instanceof Glob)
assert(typeof cb === 'function')
if (this.aborted)
return
this._processing++
if (this.paused) {
this._processQueue.push([pattern, index, inGlobStar, cb])
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index, cb)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip _processing
if (childrenIgnored(this, read))
return cb()
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
}
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
// if the abs isn't a dir, then nothing can match!
if (!entries)
return cb()
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return cb()
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this._emitMatch(index, e)
}
// This was the last one, and no stats were needed
return cb()
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
this._process([e].concat(remain), index, inGlobStar, cb)
}
cb()
}
Glob.prototype._emitMatch = function (index, e) {
if (this.aborted)
return
if (isIgnored(this, e))
return
if (this.paused) {
this._emitQueue.push([index, e])
return
}
var abs = isAbsolute(e) ? e : this._makeAbs(e)
if (this.mark)
e = this._mark(e)
if (this.absolute)
e = abs
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[abs]
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true
var st = this.statCache[abs]
if (st)
this.emit('stat', e, st)
this.emit('match', e)
}
Glob.prototype._readdirInGlobStar = function (abs, cb) {
if (this.aborted)
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false, cb)
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight(lstatkey, lstatcb_)
if (lstatcb)
self.fs.lstat(abs, lstatcb)
function lstatcb_ (er, lstat) {
if (er && er.code === 'ENOENT')
return cb()
var isSym = lstat && lstat.isSymbolicLink()
self.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && lstat && !lstat.isDirectory()) {
self.cache[abs] = 'FILE'
cb()
} else
self._readdir(abs, false, cb)
}
}
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
if (this.aborted)
return
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
if (!cb)
return
//console.error('RD %j %j', +inGlobStar, abs)
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs, cb)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return cb()
if (Array.isArray(c))
return cb(null, c)
}
var self = this
self.fs.readdir(abs, readdirCb(this, abs, cb))
}
function readdirCb (self, abs, cb) {
return function (er, entries) {
if (er)
self._readdirError(abs, er, cb)
else
self._readdirEntries(abs, entries, cb)
}
}
Glob.prototype._readdirEntries = function (abs, entries, cb) {
if (this.aborted)
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
return cb(null, entries)
}
Glob.prototype._readdirError = function (f, er, cb) {
if (this.aborted)
return
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
var abs = this._makeAbs(f)
this.cache[abs] = 'FILE'
if (abs === this.cwdAbs) {
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
error.path = this.cwd
error.code = er.code
this.emit('error', error)
this.abort()
}
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict) {
this.emit('error', er)
// If the error is handled, then we abort
// if not, we threw out of here
this.abort()
}
if (!this.silent)
console.error('glob error', er)
break
}
return cb()
}
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return cb()
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false, cb)
var isSym = this.symlinks[abs]
var len = entries.length
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return cb()
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true, cb)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true, cb)
}
cb()
}
Glob.prototype._processSimple = function (prefix, index, cb) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this._stat(prefix, function (er, exists) {
self._processSimple2(prefix, index, er, exists, cb)
})
}
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
//console.error('ps2', prefix, exists)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return cb()
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this._emitMatch(index, prefix)
cb()
}
// Returns either 'DIR', 'FILE', or false
Glob.prototype._stat = function (f, cb) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return cb()
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return cb(null, c)
if (needDir && c === 'FILE')
return cb()
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (stat !== undefined) {
if (stat === false)
return cb(null, stat)
else {
var type = stat.isDirectory() ? 'DIR' : 'FILE'
if (needDir && type === 'FILE')
return cb()
else
return cb(null, type, stat)
}
}
var self = this
var statcb = inflight('stat\0' + abs, lstatcb_)
if (statcb)
self.fs.lstat(abs, statcb)
function lstatcb_ (er, lstat) {
if (lstat && lstat.isSymbolicLink()) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return self.fs.stat(abs, function (er, stat) {
if (er)
self._stat2(f, abs, null, lstat, cb)
else
self._stat2(f, abs, er, stat, cb)
})
} else {
self._stat2(f, abs, er, lstat, cb)
}
}
}
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
this.statCache[abs] = false
return cb()
}
var needDir = f.slice(-1) === '/'
this.statCache[abs] = stat
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
return cb(null, false, stat)
var c = true
if (stat)
c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c === 'FILE')
return cb()
return cb(null, c, stat)
}
| next | identifier_name |
glob.js | // Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module.exports = glob
var rp = require('fs.realpath')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var inherits = require('inherits')
var EE = require('events').EventEmitter
var path = require('path')
var assert = require('assert')
var isAbsolute = require('path-is-absolute')
var globSync = require('./sync.js')
var common = require('./common.js')
var setopts = common.setopts
var ownProp = common.ownProp
var inflight = require('inflight')
var util = require('util')
var childrenIgnored = common.childrenIgnored
var isIgnored = common.isIgnored
var once = require('once')
function glob (pattern, options, cb) |
glob.sync = globSync
var GlobSync = glob.GlobSync = globSync.GlobSync
// old api surface
glob.glob = glob
function extend (origin, add) {
if (add === null || typeof add !== 'object') {
return origin
}
var keys = Object.keys(add)
var i = keys.length
while (i--) {
origin[keys[i]] = add[keys[i]]
}
return origin
}
glob.hasMagic = function (pattern, options_) {
var options = extend({}, options_)
options.noprocess = true
var g = new Glob(pattern, options)
var set = g.minimatch.set
if (!pattern)
return false
if (set.length > 1)
return true
for (var j = 0; j < set[0].length; j++) {
if (typeof set[0][j] !== 'string')
return true
}
return false
}
glob.Glob = Glob
inherits(Glob, EE)
function Glob (pattern, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
if (options && options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return new GlobSync(pattern, options)
}
if (!(this instanceof Glob))
return new Glob(pattern, options, cb)
setopts(this, pattern, options)
this._didRealPath = false
// process each pattern in the minimatch set
var n = this.minimatch.set.length
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n)
if (typeof cb === 'function') {
cb = once(cb)
this.on('error', cb)
this.on('end', function (matches) {
cb(null, matches)
})
}
var self = this
this._processing = 0
this._emitQueue = []
this._processQueue = []
this.paused = false
if (this.noprocess)
return this
if (n === 0)
return done()
var sync = true
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false, done)
}
sync = false
function done () {
--self._processing
if (self._processing <= 0) {
if (sync) {
process.nextTick(function () {
self._finish()
})
} else {
self._finish()
}
}
}
}
Glob.prototype._finish = function () {
assert(this instanceof Glob)
if (this.aborted)
return
if (this.realpath && !this._didRealpath)
return this._realpath()
common.finish(this)
this.emit('end', this.found)
}
Glob.prototype._realpath = function () {
if (this._didRealpath)
return
this._didRealpath = true
var n = this.matches.length
if (n === 0)
return this._finish()
var self = this
for (var i = 0; i < this.matches.length; i++)
this._realpathSet(i, next)
function next () {
if (--n === 0)
self._finish()
}
}
Glob.prototype._realpathSet = function (index, cb) {
var matchset = this.matches[index]
if (!matchset)
return cb()
var found = Object.keys(matchset)
var self = this
var n = found.length
if (n === 0)
return cb()
var set = this.matches[index] = Object.create(null)
found.forEach(function (p, i) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self._makeAbs(p)
rp.realpath(p, self.realpathCache, function (er, real) {
if (!er)
set[real] = true
else if (er.syscall === 'stat')
set[p] = true
else
self.emit('error', er) // srsly wtf right here
if (--n === 0) {
self.matches[index] = set
cb()
}
})
})
}
Glob.prototype._mark = function (p) {
return common.mark(this, p)
}
Glob.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}
Glob.prototype.abort = function () {
this.aborted = true
this.emit('abort')
}
Glob.prototype.pause = function () {
if (!this.paused) {
this.paused = true
this.emit('pause')
}
}
Glob.prototype.resume = function () {
if (this.paused) {
this.emit('resume')
this.paused = false
if (this._emitQueue.length) {
var eq = this._emitQueue.slice(0)
this._emitQueue.length = 0
for (var i = 0; i < eq.length; i ++) {
var e = eq[i]
this._emitMatch(e[0], e[1])
}
}
if (this._processQueue.length) {
var pq = this._processQueue.slice(0)
this._processQueue.length = 0
for (var i = 0; i < pq.length; i ++) {
var p = pq[i]
this._processing--
this._process(p[0], p[1], p[2], p[3])
}
}
}
}
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
assert(this instanceof Glob)
assert(typeof cb === 'function')
if (this.aborted)
return
this._processing++
if (this.paused) {
this._processQueue.push([pattern, index, inGlobStar, cb])
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index, cb)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip _processing
if (childrenIgnored(this, read))
return cb()
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
}
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
// if the abs isn't a dir, then nothing can match!
if (!entries)
return cb()
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return cb()
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this._emitMatch(index, e)
}
// This was the last one, and no stats were needed
return cb()
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
this._process([e].concat(remain), index, inGlobStar, cb)
}
cb()
}
Glob.prototype._emitMatch = function (index, e) {
if (this.aborted)
return
if (isIgnored(this, e))
return
if (this.paused) {
this._emitQueue.push([index, e])
return
}
var abs = isAbsolute(e) ? e : this._makeAbs(e)
if (this.mark)
e = this._mark(e)
if (this.absolute)
e = abs
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[abs]
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true
var st = this.statCache[abs]
if (st)
this.emit('stat', e, st)
this.emit('match', e)
}
Glob.prototype._readdirInGlobStar = function (abs, cb) {
if (this.aborted)
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false, cb)
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight(lstatkey, lstatcb_)
if (lstatcb)
self.fs.lstat(abs, lstatcb)
function lstatcb_ (er, lstat) {
if (er && er.code === 'ENOENT')
return cb()
var isSym = lstat && lstat.isSymbolicLink()
self.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && lstat && !lstat.isDirectory()) {
self.cache[abs] = 'FILE'
cb()
} else
self._readdir(abs, false, cb)
}
}
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
if (this.aborted)
return
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
if (!cb)
return
//console.error('RD %j %j', +inGlobStar, abs)
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs, cb)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return cb()
if (Array.isArray(c))
return cb(null, c)
}
var self = this
self.fs.readdir(abs, readdirCb(this, abs, cb))
}
function readdirCb (self, abs, cb) {
return function (er, entries) {
if (er)
self._readdirError(abs, er, cb)
else
self._readdirEntries(abs, entries, cb)
}
}
Glob.prototype._readdirEntries = function (abs, entries, cb) {
if (this.aborted)
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
return cb(null, entries)
}
Glob.prototype._readdirError = function (f, er, cb) {
if (this.aborted)
return
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
var abs = this._makeAbs(f)
this.cache[abs] = 'FILE'
if (abs === this.cwdAbs) {
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
error.path = this.cwd
error.code = er.code
this.emit('error', error)
this.abort()
}
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict) {
this.emit('error', er)
// If the error is handled, then we abort
// if not, we threw out of here
this.abort()
}
if (!this.silent)
console.error('glob error', er)
break
}
return cb()
}
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return cb()
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false, cb)
var isSym = this.symlinks[abs]
var len = entries.length
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return cb()
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true, cb)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true, cb)
}
cb()
}
Glob.prototype._processSimple = function (prefix, index, cb) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this._stat(prefix, function (er, exists) {
self._processSimple2(prefix, index, er, exists, cb)
})
}
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
//console.error('ps2', prefix, exists)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return cb()
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this._emitMatch(index, prefix)
cb()
}
// Returns either 'DIR', 'FILE', or false
Glob.prototype._stat = function (f, cb) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return cb()
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return cb(null, c)
if (needDir && c === 'FILE')
return cb()
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (stat !== undefined) {
if (stat === false)
return cb(null, stat)
else {
var type = stat.isDirectory() ? 'DIR' : 'FILE'
if (needDir && type === 'FILE')
return cb()
else
return cb(null, type, stat)
}
}
var self = this
var statcb = inflight('stat\0' + abs, lstatcb_)
if (statcb)
self.fs.lstat(abs, statcb)
function lstatcb_ (er, lstat) {
if (lstat && lstat.isSymbolicLink()) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return self.fs.stat(abs, function (er, stat) {
if (er)
self._stat2(f, abs, null, lstat, cb)
else
self._stat2(f, abs, er, stat, cb)
})
} else {
self._stat2(f, abs, er, lstat, cb)
}
}
}
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
this.statCache[abs] = false
return cb()
}
var needDir = f.slice(-1) === '/'
this.statCache[abs] = stat
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
return cb(null, false, stat)
var c = true
if (stat)
c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c === 'FILE')
return cb()
return cb(null, c, stat)
}
| {
if (typeof options === 'function') cb = options, options = {}
if (!options) options = {}
if (options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return globSync(pattern, options)
}
return new Glob(pattern, options, cb)
} | identifier_body |
glob.js | // Approach:
//
// 1. Get the minimatch set
// 2. For each pattern in the set, PROCESS(pattern, false)
// 3. Store matches per-set, then uniq them
//
// PROCESS(pattern, inGlobStar)
// Get the first [n] items from pattern that are all strings
// Join these together. This is PREFIX.
// If there is no more remaining, then stat(PREFIX) and
// add to matches if it succeeds. END.
//
// If inGlobStar and PREFIX is symlink and points to dir
// set ENTRIES = []
// else readdir(PREFIX) as ENTRIES
// If fail, END
//
// with ENTRIES
// If pattern[n] is GLOBSTAR
// // handle the case where the globstar match is empty
// // by pruning it out, and testing the resulting pattern
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
// // handle other cases.
// for ENTRY in ENTRIES (not dotfiles)
// // attach globstar + tail onto the entry
// // Mark that this entry is a globstar match
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
//
// else // not globstar
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
// Test ENTRY against pattern[n]
// If fails, continue
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
//
// Caveat:
// Cache all stats and readdirs results to minimize syscall. Since all
// we ever care about is existence and directory-ness, we can just keep
// `true` for files, and [children,...] for directories, or `false` for
// things that don't exist.
module.exports = glob
var rp = require('fs.realpath')
var minimatch = require('minimatch')
var Minimatch = minimatch.Minimatch
var inherits = require('inherits')
var EE = require('events').EventEmitter
var path = require('path')
var assert = require('assert')
var isAbsolute = require('path-is-absolute')
var globSync = require('./sync.js')
var common = require('./common.js')
var setopts = common.setopts
var ownProp = common.ownProp
var inflight = require('inflight')
var util = require('util')
var childrenIgnored = common.childrenIgnored
var isIgnored = common.isIgnored
var once = require('once')
function glob (pattern, options, cb) {
if (typeof options === 'function') cb = options, options = {}
if (!options) options = {}
if (options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return globSync(pattern, options)
}
return new Glob(pattern, options, cb)
}
glob.sync = globSync
var GlobSync = glob.GlobSync = globSync.GlobSync
// old api surface
glob.glob = glob
function extend (origin, add) {
if (add === null || typeof add !== 'object') {
return origin
}
var keys = Object.keys(add)
var i = keys.length
while (i--) {
origin[keys[i]] = add[keys[i]]
}
return origin
}
glob.hasMagic = function (pattern, options_) {
var options = extend({}, options_)
options.noprocess = true
var g = new Glob(pattern, options)
var set = g.minimatch.set
if (!pattern)
return false
if (set.length > 1)
return true
for (var j = 0; j < set[0].length; j++) {
if (typeof set[0][j] !== 'string')
return true
}
return false
}
glob.Glob = Glob
inherits(Glob, EE)
function Glob (pattern, options, cb) {
if (typeof options === 'function') {
cb = options
options = null
}
if (options && options.sync) {
if (cb)
throw new TypeError('callback provided to sync glob')
return new GlobSync(pattern, options)
}
if (!(this instanceof Glob))
return new Glob(pattern, options, cb)
setopts(this, pattern, options)
this._didRealPath = false
// process each pattern in the minimatch set
var n = this.minimatch.set.length
// The matches are stored as {<filename>: true,...} so that
// duplicates are automagically pruned.
// Later, we do an Object.keys() on these.
// Keep them as a list so we can fill in when nonull is set.
this.matches = new Array(n)
if (typeof cb === 'function') {
cb = once(cb)
this.on('error', cb)
this.on('end', function (matches) {
cb(null, matches)
})
}
var self = this
this._processing = 0
this._emitQueue = []
this._processQueue = []
this.paused = false
if (this.noprocess)
return this
if (n === 0)
return done()
var sync = true
for (var i = 0; i < n; i ++) {
this._process(this.minimatch.set[i], i, false, done)
}
sync = false
function done () {
--self._processing
if (self._processing <= 0) {
if (sync) {
process.nextTick(function () {
self._finish()
})
} else {
self._finish()
}
}
}
}
Glob.prototype._finish = function () {
assert(this instanceof Glob)
if (this.aborted)
return
if (this.realpath && !this._didRealpath)
return this._realpath()
common.finish(this)
this.emit('end', this.found)
}
Glob.prototype._realpath = function () {
if (this._didRealpath)
return
this._didRealpath = true
var n = this.matches.length
if (n === 0)
return this._finish()
var self = this
for (var i = 0; i < this.matches.length; i++)
this._realpathSet(i, next)
function next () {
if (--n === 0)
self._finish()
}
}
Glob.prototype._realpathSet = function (index, cb) {
var matchset = this.matches[index]
if (!matchset)
return cb()
var found = Object.keys(matchset)
var self = this
var n = found.length
if (n === 0)
return cb()
var set = this.matches[index] = Object.create(null)
found.forEach(function (p, i) {
// If there's a problem with the stat, then it means that
// one or more of the links in the realpath couldn't be
// resolved. just return the abs value in that case.
p = self._makeAbs(p)
rp.realpath(p, self.realpathCache, function (er, real) {
if (!er)
set[real] = true
else if (er.syscall === 'stat')
set[p] = true
else
self.emit('error', er) // srsly wtf right here
if (--n === 0) {
self.matches[index] = set
cb()
}
})
})
}
Glob.prototype._mark = function (p) {
return common.mark(this, p)
}
Glob.prototype._makeAbs = function (f) {
return common.makeAbs(this, f)
}
Glob.prototype.abort = function () {
this.aborted = true
this.emit('abort')
}
Glob.prototype.pause = function () {
if (!this.paused) {
this.paused = true
this.emit('pause')
}
}
Glob.prototype.resume = function () {
if (this.paused) {
this.emit('resume')
this.paused = false
if (this._emitQueue.length) {
var eq = this._emitQueue.slice(0)
this._emitQueue.length = 0
for (var i = 0; i < eq.length; i ++) { | var e = eq[i]
this._emitMatch(e[0], e[1])
}
}
if (this._processQueue.length) {
var pq = this._processQueue.slice(0)
this._processQueue.length = 0
for (var i = 0; i < pq.length; i ++) {
var p = pq[i]
this._processing--
this._process(p[0], p[1], p[2], p[3])
}
}
}
}
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
assert(this instanceof Glob)
assert(typeof cb === 'function')
if (this.aborted)
return
this._processing++
if (this.paused) {
this._processQueue.push([pattern, index, inGlobStar, cb])
return
}
//console.error('PROCESS %d', this._processing, pattern)
// Get the first [n] parts of pattern that are all strings.
var n = 0
while (typeof pattern[n] === 'string') {
n ++
}
// now n is the index of the first one that is *not* a string.
// see if there's anything else
var prefix
switch (n) {
// if not, then this is rather simple
case pattern.length:
this._processSimple(pattern.join('/'), index, cb)
return
case 0:
// pattern *starts* with some non-trivial item.
// going to readdir(cwd), but not include the prefix in matches.
prefix = null
break
default:
// pattern has some string bits in the front.
// whatever it starts with, whether that's 'absolute' like /foo/bar,
// or 'relative' like '../baz'
prefix = pattern.slice(0, n).join('/')
break
}
var remain = pattern.slice(n)
// get the list of entries.
var read
if (prefix === null)
read = '.'
else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) {
if (!prefix || !isAbsolute(prefix))
prefix = '/' + prefix
read = prefix
} else
read = prefix
var abs = this._makeAbs(read)
//if ignored, skip _processing
if (childrenIgnored(this, read))
return cb()
var isGlobStar = remain[0] === minimatch.GLOBSTAR
if (isGlobStar)
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
else
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
}
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
// if the abs isn't a dir, then nothing can match!
if (!entries)
return cb()
// It will only match dot entries if it starts with a dot, or if
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
var pn = remain[0]
var negate = !!this.minimatch.negate
var rawGlob = pn._glob
var dotOk = this.dot || rawGlob.charAt(0) === '.'
var matchedEntries = []
for (var i = 0; i < entries.length; i++) {
var e = entries[i]
if (e.charAt(0) !== '.' || dotOk) {
var m
if (negate && !prefix) {
m = !e.match(pn)
} else {
m = e.match(pn)
}
if (m)
matchedEntries.push(e)
}
}
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
var len = matchedEntries.length
// If there are no matched entries, then nothing matches.
if (len === 0)
return cb()
// if this is the last remaining pattern bit, then no need for
// an additional stat *unless* the user has specified mark or
// stat explicitly. We know they exist, since readdir returned
// them.
if (remain.length === 1 && !this.mark && !this.stat) {
if (!this.matches[index])
this.matches[index] = Object.create(null)
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
if (e.charAt(0) === '/' && !this.nomount) {
e = path.join(this.root, e)
}
this._emitMatch(index, e)
}
// This was the last one, and no stats were needed
return cb()
}
// now test all matched entries as stand-ins for that part
// of the pattern.
remain.shift()
for (var i = 0; i < len; i ++) {
var e = matchedEntries[i]
var newPattern
if (prefix) {
if (prefix !== '/')
e = prefix + '/' + e
else
e = prefix + e
}
this._process([e].concat(remain), index, inGlobStar, cb)
}
cb()
}
Glob.prototype._emitMatch = function (index, e) {
if (this.aborted)
return
if (isIgnored(this, e))
return
if (this.paused) {
this._emitQueue.push([index, e])
return
}
var abs = isAbsolute(e) ? e : this._makeAbs(e)
if (this.mark)
e = this._mark(e)
if (this.absolute)
e = abs
if (this.matches[index][e])
return
if (this.nodir) {
var c = this.cache[abs]
if (c === 'DIR' || Array.isArray(c))
return
}
this.matches[index][e] = true
var st = this.statCache[abs]
if (st)
this.emit('stat', e, st)
this.emit('match', e)
}
Glob.prototype._readdirInGlobStar = function (abs, cb) {
if (this.aborted)
return
// follow all symlinked directories forever
// just proceed as if this is a non-globstar situation
if (this.follow)
return this._readdir(abs, false, cb)
var lstatkey = 'lstat\0' + abs
var self = this
var lstatcb = inflight(lstatkey, lstatcb_)
if (lstatcb)
self.fs.lstat(abs, lstatcb)
function lstatcb_ (er, lstat) {
if (er && er.code === 'ENOENT')
return cb()
var isSym = lstat && lstat.isSymbolicLink()
self.symlinks[abs] = isSym
// If it's not a symlink or a dir, then it's definitely a regular file.
// don't bother doing a readdir in that case.
if (!isSym && lstat && !lstat.isDirectory()) {
self.cache[abs] = 'FILE'
cb()
} else
self._readdir(abs, false, cb)
}
}
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
if (this.aborted)
return
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
if (!cb)
return
//console.error('RD %j %j', +inGlobStar, abs)
if (inGlobStar && !ownProp(this.symlinks, abs))
return this._readdirInGlobStar(abs, cb)
if (ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (!c || c === 'FILE')
return cb()
if (Array.isArray(c))
return cb(null, c)
}
var self = this
self.fs.readdir(abs, readdirCb(this, abs, cb))
}
function readdirCb (self, abs, cb) {
return function (er, entries) {
if (er)
self._readdirError(abs, er, cb)
else
self._readdirEntries(abs, entries, cb)
}
}
Glob.prototype._readdirEntries = function (abs, entries, cb) {
if (this.aborted)
return
// if we haven't asked to stat everything, then just
// assume that everything in there exists, so we can avoid
// having to stat it a second time.
if (!this.mark && !this.stat) {
for (var i = 0; i < entries.length; i ++) {
var e = entries[i]
if (abs === '/')
e = abs + e
else
e = abs + '/' + e
this.cache[e] = true
}
}
this.cache[abs] = entries
return cb(null, entries)
}
Glob.prototype._readdirError = function (f, er, cb) {
if (this.aborted)
return
// handle errors, and cache the information
switch (er.code) {
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
case 'ENOTDIR': // totally normal. means it *does* exist.
var abs = this._makeAbs(f)
this.cache[abs] = 'FILE'
if (abs === this.cwdAbs) {
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
error.path = this.cwd
error.code = er.code
this.emit('error', error)
this.abort()
}
break
case 'ENOENT': // not terribly unusual
case 'ELOOP':
case 'ENAMETOOLONG':
case 'UNKNOWN':
this.cache[this._makeAbs(f)] = false
break
default: // some unusual error. Treat as failure.
this.cache[this._makeAbs(f)] = false
if (this.strict) {
this.emit('error', er)
// If the error is handled, then we abort
// if not, we threw out of here
this.abort()
}
if (!this.silent)
console.error('glob error', er)
break
}
return cb()
}
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
var self = this
this._readdir(abs, inGlobStar, function (er, entries) {
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
})
}
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
//console.error('pgs2', prefix, remain[0], entries)
// no entries means not a dir, so it can never have matches
// foo.txt/** doesn't match foo.txt
if (!entries)
return cb()
// test without the globstar, and with every child both below
// and replacing the globstar.
var remainWithoutGlobStar = remain.slice(1)
var gspref = prefix ? [ prefix ] : []
var noGlobStar = gspref.concat(remainWithoutGlobStar)
// the noGlobStar pattern exits the inGlobStar state
this._process(noGlobStar, index, false, cb)
var isSym = this.symlinks[abs]
var len = entries.length
// If it's a symlink, and we're in a globstar, then stop
if (isSym && inGlobStar)
return cb()
for (var i = 0; i < len; i++) {
var e = entries[i]
if (e.charAt(0) === '.' && !this.dot)
continue
// these two cases enter the inGlobStar state
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
this._process(instead, index, true, cb)
var below = gspref.concat(entries[i], remain)
this._process(below, index, true, cb)
}
cb()
}
Glob.prototype._processSimple = function (prefix, index, cb) {
// XXX review this. Shouldn't it be doing the mounting etc
// before doing stat? kinda weird?
var self = this
this._stat(prefix, function (er, exists) {
self._processSimple2(prefix, index, er, exists, cb)
})
}
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
//console.error('ps2', prefix, exists)
if (!this.matches[index])
this.matches[index] = Object.create(null)
// If it doesn't exist, then just mark the lack of results
if (!exists)
return cb()
if (prefix && isAbsolute(prefix) && !this.nomount) {
var trail = /[\/\\]$/.test(prefix)
if (prefix.charAt(0) === '/') {
prefix = path.join(this.root, prefix)
} else {
prefix = path.resolve(this.root, prefix)
if (trail)
prefix += '/'
}
}
if (process.platform === 'win32')
prefix = prefix.replace(/\\/g, '/')
// Mark this as a match
this._emitMatch(index, prefix)
cb()
}
// Returns either 'DIR', 'FILE', or false
Glob.prototype._stat = function (f, cb) {
var abs = this._makeAbs(f)
var needDir = f.slice(-1) === '/'
if (f.length > this.maxLength)
return cb()
if (!this.stat && ownProp(this.cache, abs)) {
var c = this.cache[abs]
if (Array.isArray(c))
c = 'DIR'
// It exists, but maybe not how we need it
if (!needDir || c === 'DIR')
return cb(null, c)
if (needDir && c === 'FILE')
return cb()
// otherwise we have to stat, because maybe c=true
// if we know it exists, but not what it is.
}
var exists
var stat = this.statCache[abs]
if (stat !== undefined) {
if (stat === false)
return cb(null, stat)
else {
var type = stat.isDirectory() ? 'DIR' : 'FILE'
if (needDir && type === 'FILE')
return cb()
else
return cb(null, type, stat)
}
}
var self = this
var statcb = inflight('stat\0' + abs, lstatcb_)
if (statcb)
self.fs.lstat(abs, statcb)
function lstatcb_ (er, lstat) {
if (lstat && lstat.isSymbolicLink()) {
// If it's a symlink, then treat it as the target, unless
// the target does not exist, then treat it as a file.
return self.fs.stat(abs, function (er, stat) {
if (er)
self._stat2(f, abs, null, lstat, cb)
else
self._stat2(f, abs, er, stat, cb)
})
} else {
self._stat2(f, abs, er, lstat, cb)
}
}
}
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
this.statCache[abs] = false
return cb()
}
var needDir = f.slice(-1) === '/'
this.statCache[abs] = stat
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
return cb(null, false, stat)
var c = true
if (stat)
c = stat.isDirectory() ? 'DIR' : 'FILE'
this.cache[abs] = this.cache[abs] || c
if (needDir && c === 'FILE')
return cb()
return cb(null, c, stat)
} | random_line_split | |
storage.ts | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
/* tslint:disable:no-namespace variable-name */
/**
* The Storage Module provides storage for URL parameters, and an API for
* getting and setting TensorBoard's stateful URI.
*
* It generates URI components like: events&runPrefix=train*
* which TensorBoard uses after like localhost:8000/#events&runPrefix=train*
* to store state in the URI.
*/
module TF.URIStorage {
type StringDict = {[key: string]: string};
/**
* A key that users cannot use, since TensorBoard uses this to store info
* about the active tab.
*/
export let TAB = '__tab__';
/**
* The name of the property for users to set on a Polymer component
* in order for its stored properties to be stored in the URI unambiguously.
* (No need to set this if you want mutliple instances of the component to
* share URI state)
*
* Example:
* <my-component disambiguator="0"></my-component>
*
* The disambiguator should be set to any unique value so that multiple
* instances of the component can store properties in URI storage.
*
* Because it's hard to dereference this variable in HTML property bindings,
* it is NOT safe to change the disambiguator string without find+replace
* across the codebase.
*/
export let DISAMBIGUATOR = 'disambiguator';
/**
* Return a boolean stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getBoolean(key: string): boolean {
let items = _componentToDict(_readComponent());
let item = items[key];
return item === 'true' ? true : item === 'false' ? false : undefined;
}
/**
* Store a boolean in the URI, with a corresponding key.
*/
export function setBoolean(key: string, value: boolean) {
let items = _componentToDict(_readComponent());
items[key] = value.toString();
_writeComponent(_dictToComponent(items));
}
/**
* Return a string stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getString(key: string): string {
let items = _componentToDict(_readComponent());
return items[key];
}
/**
* Store a string in the URI, with a corresponding key.
*/
export function setString(key: string, value: string) {
let items = _componentToDict(_readComponent());
items[key] = value;
_writeComponent(_dictToComponent(items));
}
/**
* Return a number stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getNumber(key: string): number {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : +items[key];
}
/**
* Store a number in the URI, with a corresponding key.
*/
export function setNumber(key: string, value: number) {
let items = _componentToDict(_readComponent());
items[key] = '' + value;
_writeComponent(_dictToComponent(items));
}
/**
* Return an object stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getObject(key: string): Object {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : JSON.parse(atob(items[key]));
}
/**
* Store an object in the URI, with a corresponding key.
*/
export function setObject(key: string, value: Object) {
let items = _componentToDict(_readComponent());
items[key] = btoa(JSON.stringify(value));
_writeComponent(_dictToComponent(items));
}
/**
* Get a unique storage name for a (Polymer component, propertyName) tuple.
*
* DISAMBIGUATOR must be set on the component, if other components use the
* same propertyName.
*/
export function getURIStorageName(
component: Object, propertyName: string): string {
let d = component[DISAMBIGUATOR];
let components = d == null ? [propertyName] : [d, propertyName];
return components.join('.');
}
/**
* Return a function that:
* (1) Initializes a Polymer boolean property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getBooleanInitializer(
propertyName: string, defaultVal: boolean): Function {
return _getInitializer(getBoolean, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer string property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getStringInitializer(
propertyName: string, defaultVal: string): Function {
return _getInitializer(getString, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer number property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getNumberInitializer(
propertyName: string, defaultVal: number): Function {
return _getInitializer(getNumber, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer Object property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectInitializer(
propertyName: string, defaultVal: Object): Function {
return _getInitializer(getObject, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getBooleanObserver(
propertyName: string, defaultVal: boolean): Function {
return _getObserver(getBoolean, setBoolean, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getStringObserver(
propertyName: string, defaultVal: string): Function {
return _getObserver(getString, setString, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a number property changes.
*/
export function getNumberObserver(
propertyName: string, defaultVal: number): Function {
return _getObserver(getNumber, setNumber, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when an object property changes.
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectObserver(
propertyName: string, defaultVal: Object): Function {
let clone = _.cloneDeep(defaultVal);
return _getObserver(getObject, setObject, propertyName, clone);
}
/**
* Read component from URI (e.g. returns "events&runPrefix=train*").
*/
function _readComponent(): string {
return TF.Globals.USE_HASH ? window.location.hash.slice(1) :
TF.Globals.FAKE_HASH;
}
/**
* Write component to URI.
*/
function _writeComponent(component: string) {
if (TF.Globals.USE_HASH) {
window.location.hash = component;
} else {
TF.Globals.FAKE_HASH = component;
}
}
/**
* Convert dictionary of strings into a URI Component.
* All key value entries get added as key value pairs in the component,
* with the exception of a key with the TAB value, which if present
* gets prepended to the URI Component string for backwards comptability
* reasons.
*/
function _dictToComponent(items: StringDict): string {
let component = '';
// Add the tab name e.g. 'events', 'images', 'histograms' as a prefix
// for backwards compatbility.
if (items[TAB] !== undefined) {
component += items[TAB];
}
// Join other strings with &key=value notation
let nonTab = _.pairs(items)
.filter(function(pair) { return pair[0] !== TAB; })
.map(function(pair) {
return encodeURIComponent(pair[0]) + '=' +
encodeURIComponent(pair[1]);
})
.join('&');
return nonTab.length > 0 ? (component + '&' + nonTab) : component;
}
/**
* Convert a URI Component into a dictionary of strings.
* Component should consist of key-value pairs joined by a delimiter
* with the exception of the tabName.
* Returns dict consisting of all key-value pairs and
* dict[TAB] = tabName
*/
function _componentToDict(component: string): StringDict {
let items = {} as StringDict;
let tokens = component.split('&');
tokens.forEach(function(token) {
let kv = token.split('=');
// Special backwards compatibility for URI components like #events
if (kv.length === 1 && _.contains(TF.Globals.TABS, kv[0])) {
items[TAB] = kv[0];
} else if (kv.length === 2) {
items[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]);
}
});
return items;
}
/**
* Return a function that:
* (1) Initializes a Polymer property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
function _getInitializer<T>(
get: (name: string) => T, propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
// setComponentValue will be called every time the hash changes, and is
// responsible for ensuring that new state in the hash will be propagated
// to the component with that property.
// It is important that this function does not re-assign needlessly,
// to avoid Polymer observer churn.
let setComponentValue = () => {
let uriValue = get(URIStorageName);
let currentValue = this[propertyName];
// if uriValue is undefined, we will ensure that the property has the
// default value
if (uriValue === undefined) {
if (!_.isEqual(currentValue, defaultVal)) {
// If we don't have an explicit URI value, then we need to ensure
// the property value is equal to the default value.
// We will assign a clone rather than the canonical default, because
// the component receiving this property may mutate it, and we need
// to keep a pristine copy of the default.
this[propertyName] = _.clone(defaultVal);
}
// In this case, we have an explicit URI value, so we will ensure that
// the component has an equivalent value.
} else if (!_.isEqual(uriValue, currentValue)) {
this[propertyName] = uriValue;
}
};
// Set the value on the property.
setComponentValue();
// Update it when the hashchanges.
window.addEventListener('hashchange', setComponentValue);
};
}
/**
* Return a function that updates URIStorage when a property changes.
*/
function _getObserver<T>(
get: (name: string) => T, set: (name: string, newVal: T) => void,
propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
let newVal = this[propertyName];
if (!_.isEqual(newVal, get(URIStorageName))) |
};
}
/**
* Delete a key from the URI.
*/
function _unset(key) {
let items = _componentToDict(_readComponent());
delete items[key];
_writeComponent(_dictToComponent(items));
}
}
| {
if (_.isEqual(newVal, defaultVal)) {
_unset(URIStorageName);
} else {
set(URIStorageName, newVal);
}
} | conditional_block |
storage.ts | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
/* tslint:disable:no-namespace variable-name */
/**
* The Storage Module provides storage for URL parameters, and an API for
* getting and setting TensorBoard's stateful URI.
*
* It generates URI components like: events&runPrefix=train*
* which TensorBoard uses after like localhost:8000/#events&runPrefix=train*
* to store state in the URI.
*/
module TF.URIStorage {
type StringDict = {[key: string]: string};
/**
* A key that users cannot use, since TensorBoard uses this to store info
* about the active tab.
*/
export let TAB = '__tab__';
/**
* The name of the property for users to set on a Polymer component
* in order for its stored properties to be stored in the URI unambiguously.
* (No need to set this if you want mutliple instances of the component to
* share URI state)
*
* Example:
* <my-component disambiguator="0"></my-component>
*
* The disambiguator should be set to any unique value so that multiple
* instances of the component can store properties in URI storage.
*
* Because it's hard to dereference this variable in HTML property bindings,
* it is NOT safe to change the disambiguator string without find+replace
* across the codebase.
*/
export let DISAMBIGUATOR = 'disambiguator';
/**
* Return a boolean stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getBoolean(key: string): boolean {
let items = _componentToDict(_readComponent());
let item = items[key];
return item === 'true' ? true : item === 'false' ? false : undefined;
}
/**
* Store a boolean in the URI, with a corresponding key.
*/
export function setBoolean(key: string, value: boolean) {
let items = _componentToDict(_readComponent());
items[key] = value.toString();
_writeComponent(_dictToComponent(items));
}
/**
* Return a string stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getString(key: string): string |
/**
* Store a string in the URI, with a corresponding key.
*/
export function setString(key: string, value: string) {
let items = _componentToDict(_readComponent());
items[key] = value;
_writeComponent(_dictToComponent(items));
}
/**
* Return a number stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getNumber(key: string): number {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : +items[key];
}
/**
* Store a number in the URI, with a corresponding key.
*/
export function setNumber(key: string, value: number) {
let items = _componentToDict(_readComponent());
items[key] = '' + value;
_writeComponent(_dictToComponent(items));
}
/**
* Return an object stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getObject(key: string): Object {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : JSON.parse(atob(items[key]));
}
/**
* Store an object in the URI, with a corresponding key.
*/
export function setObject(key: string, value: Object) {
let items = _componentToDict(_readComponent());
items[key] = btoa(JSON.stringify(value));
_writeComponent(_dictToComponent(items));
}
/**
* Get a unique storage name for a (Polymer component, propertyName) tuple.
*
* DISAMBIGUATOR must be set on the component, if other components use the
* same propertyName.
*/
export function getURIStorageName(
component: Object, propertyName: string): string {
let d = component[DISAMBIGUATOR];
let components = d == null ? [propertyName] : [d, propertyName];
return components.join('.');
}
/**
* Return a function that:
* (1) Initializes a Polymer boolean property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getBooleanInitializer(
propertyName: string, defaultVal: boolean): Function {
return _getInitializer(getBoolean, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer string property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getStringInitializer(
propertyName: string, defaultVal: string): Function {
return _getInitializer(getString, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer number property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getNumberInitializer(
propertyName: string, defaultVal: number): Function {
return _getInitializer(getNumber, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer Object property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectInitializer(
propertyName: string, defaultVal: Object): Function {
return _getInitializer(getObject, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getBooleanObserver(
propertyName: string, defaultVal: boolean): Function {
return _getObserver(getBoolean, setBoolean, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getStringObserver(
propertyName: string, defaultVal: string): Function {
return _getObserver(getString, setString, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a number property changes.
*/
export function getNumberObserver(
propertyName: string, defaultVal: number): Function {
return _getObserver(getNumber, setNumber, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when an object property changes.
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectObserver(
propertyName: string, defaultVal: Object): Function {
let clone = _.cloneDeep(defaultVal);
return _getObserver(getObject, setObject, propertyName, clone);
}
/**
* Read component from URI (e.g. returns "events&runPrefix=train*").
*/
function _readComponent(): string {
return TF.Globals.USE_HASH ? window.location.hash.slice(1) :
TF.Globals.FAKE_HASH;
}
/**
* Write component to URI.
*/
function _writeComponent(component: string) {
if (TF.Globals.USE_HASH) {
window.location.hash = component;
} else {
TF.Globals.FAKE_HASH = component;
}
}
/**
* Convert dictionary of strings into a URI Component.
* All key value entries get added as key value pairs in the component,
* with the exception of a key with the TAB value, which if present
* gets prepended to the URI Component string for backwards comptability
* reasons.
*/
function _dictToComponent(items: StringDict): string {
let component = '';
// Add the tab name e.g. 'events', 'images', 'histograms' as a prefix
// for backwards compatbility.
if (items[TAB] !== undefined) {
component += items[TAB];
}
// Join other strings with &key=value notation
let nonTab = _.pairs(items)
.filter(function(pair) { return pair[0] !== TAB; })
.map(function(pair) {
return encodeURIComponent(pair[0]) + '=' +
encodeURIComponent(pair[1]);
})
.join('&');
return nonTab.length > 0 ? (component + '&' + nonTab) : component;
}
/**
* Convert a URI Component into a dictionary of strings.
* Component should consist of key-value pairs joined by a delimiter
* with the exception of the tabName.
* Returns dict consisting of all key-value pairs and
* dict[TAB] = tabName
*/
function _componentToDict(component: string): StringDict {
let items = {} as StringDict;
let tokens = component.split('&');
tokens.forEach(function(token) {
let kv = token.split('=');
// Special backwards compatibility for URI components like #events
if (kv.length === 1 && _.contains(TF.Globals.TABS, kv[0])) {
items[TAB] = kv[0];
} else if (kv.length === 2) {
items[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]);
}
});
return items;
}
/**
* Return a function that:
* (1) Initializes a Polymer property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
function _getInitializer<T>(
get: (name: string) => T, propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
// setComponentValue will be called every time the hash changes, and is
// responsible for ensuring that new state in the hash will be propagated
// to the component with that property.
// It is important that this function does not re-assign needlessly,
// to avoid Polymer observer churn.
let setComponentValue = () => {
let uriValue = get(URIStorageName);
let currentValue = this[propertyName];
// if uriValue is undefined, we will ensure that the property has the
// default value
if (uriValue === undefined) {
if (!_.isEqual(currentValue, defaultVal)) {
// If we don't have an explicit URI value, then we need to ensure
// the property value is equal to the default value.
// We will assign a clone rather than the canonical default, because
// the component receiving this property may mutate it, and we need
// to keep a pristine copy of the default.
this[propertyName] = _.clone(defaultVal);
}
// In this case, we have an explicit URI value, so we will ensure that
// the component has an equivalent value.
} else if (!_.isEqual(uriValue, currentValue)) {
this[propertyName] = uriValue;
}
};
// Set the value on the property.
setComponentValue();
// Update it when the hashchanges.
window.addEventListener('hashchange', setComponentValue);
};
}
/**
* Return a function that updates URIStorage when a property changes.
*/
function _getObserver<T>(
get: (name: string) => T, set: (name: string, newVal: T) => void,
propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
let newVal = this[propertyName];
if (!_.isEqual(newVal, get(URIStorageName))) {
if (_.isEqual(newVal, defaultVal)) {
_unset(URIStorageName);
} else {
set(URIStorageName, newVal);
}
}
};
}
/**
* Delete a key from the URI.
*/
function _unset(key) {
let items = _componentToDict(_readComponent());
delete items[key];
_writeComponent(_dictToComponent(items));
}
}
| {
let items = _componentToDict(_readComponent());
return items[key];
} | identifier_body |
storage.ts | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
/* tslint:disable:no-namespace variable-name */
/**
* The Storage Module provides storage for URL parameters, and an API for
* getting and setting TensorBoard's stateful URI.
*
* It generates URI components like: events&runPrefix=train*
* which TensorBoard uses after like localhost:8000/#events&runPrefix=train*
* to store state in the URI.
*/
module TF.URIStorage {
type StringDict = {[key: string]: string};
/**
* A key that users cannot use, since TensorBoard uses this to store info
* about the active tab.
*/
export let TAB = '__tab__';
/**
* The name of the property for users to set on a Polymer component
* in order for its stored properties to be stored in the URI unambiguously.
* (No need to set this if you want mutliple instances of the component to
* share URI state)
*
* Example:
* <my-component disambiguator="0"></my-component>
*
* The disambiguator should be set to any unique value so that multiple
* instances of the component can store properties in URI storage.
*
* Because it's hard to dereference this variable in HTML property bindings,
* it is NOT safe to change the disambiguator string without find+replace
* across the codebase.
*/
export let DISAMBIGUATOR = 'disambiguator';
/**
* Return a boolean stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getBoolean(key: string): boolean {
let items = _componentToDict(_readComponent());
let item = items[key];
return item === 'true' ? true : item === 'false' ? false : undefined;
}
/**
* Store a boolean in the URI, with a corresponding key.
*/
export function setBoolean(key: string, value: boolean) {
let items = _componentToDict(_readComponent());
items[key] = value.toString();
_writeComponent(_dictToComponent(items));
}
/**
* Return a string stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getString(key: string): string {
let items = _componentToDict(_readComponent());
return items[key];
}
/**
* Store a string in the URI, with a corresponding key.
*/
export function setString(key: string, value: string) {
let items = _componentToDict(_readComponent());
items[key] = value;
_writeComponent(_dictToComponent(items));
}
/**
* Return a number stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getNumber(key: string): number {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : +items[key];
}
/**
* Store a number in the URI, with a corresponding key.
*/
export function setNumber(key: string, value: number) {
let items = _componentToDict(_readComponent());
items[key] = '' + value;
_writeComponent(_dictToComponent(items));
}
/**
* Return an object stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getObject(key: string): Object {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : JSON.parse(atob(items[key]));
}
/**
* Store an object in the URI, with a corresponding key.
*/
export function setObject(key: string, value: Object) {
let items = _componentToDict(_readComponent());
items[key] = btoa(JSON.stringify(value));
_writeComponent(_dictToComponent(items));
}
/**
* Get a unique storage name for a (Polymer component, propertyName) tuple.
*
* DISAMBIGUATOR must be set on the component, if other components use the
* same propertyName.
*/
export function getURIStorageName(
component: Object, propertyName: string): string {
let d = component[DISAMBIGUATOR];
let components = d == null ? [propertyName] : [d, propertyName];
return components.join('.');
}
/**
* Return a function that:
* (1) Initializes a Polymer boolean property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getBooleanInitializer(
propertyName: string, defaultVal: boolean): Function {
return _getInitializer(getBoolean, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer string property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getStringInitializer(
propertyName: string, defaultVal: string): Function {
return _getInitializer(getString, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer number property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getNumberInitializer(
propertyName: string, defaultVal: number): Function {
return _getInitializer(getNumber, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer Object property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectInitializer(
propertyName: string, defaultVal: Object): Function {
return _getInitializer(getObject, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function | (
propertyName: string, defaultVal: boolean): Function {
return _getObserver(getBoolean, setBoolean, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getStringObserver(
propertyName: string, defaultVal: string): Function {
return _getObserver(getString, setString, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a number property changes.
*/
export function getNumberObserver(
propertyName: string, defaultVal: number): Function {
return _getObserver(getNumber, setNumber, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when an object property changes.
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectObserver(
propertyName: string, defaultVal: Object): Function {
let clone = _.cloneDeep(defaultVal);
return _getObserver(getObject, setObject, propertyName, clone);
}
/**
* Read component from URI (e.g. returns "events&runPrefix=train*").
*/
function _readComponent(): string {
return TF.Globals.USE_HASH ? window.location.hash.slice(1) :
TF.Globals.FAKE_HASH;
}
/**
* Write component to URI.
*/
function _writeComponent(component: string) {
if (TF.Globals.USE_HASH) {
window.location.hash = component;
} else {
TF.Globals.FAKE_HASH = component;
}
}
/**
* Convert dictionary of strings into a URI Component.
* All key value entries get added as key value pairs in the component,
* with the exception of a key with the TAB value, which if present
* gets prepended to the URI Component string for backwards comptability
* reasons.
*/
function _dictToComponent(items: StringDict): string {
let component = '';
// Add the tab name e.g. 'events', 'images', 'histograms' as a prefix
// for backwards compatbility.
if (items[TAB] !== undefined) {
component += items[TAB];
}
// Join other strings with &key=value notation
let nonTab = _.pairs(items)
.filter(function(pair) { return pair[0] !== TAB; })
.map(function(pair) {
return encodeURIComponent(pair[0]) + '=' +
encodeURIComponent(pair[1]);
})
.join('&');
return nonTab.length > 0 ? (component + '&' + nonTab) : component;
}
/**
* Convert a URI Component into a dictionary of strings.
* Component should consist of key-value pairs joined by a delimiter
* with the exception of the tabName.
* Returns dict consisting of all key-value pairs and
* dict[TAB] = tabName
*/
function _componentToDict(component: string): StringDict {
let items = {} as StringDict;
let tokens = component.split('&');
tokens.forEach(function(token) {
let kv = token.split('=');
// Special backwards compatibility for URI components like #events
if (kv.length === 1 && _.contains(TF.Globals.TABS, kv[0])) {
items[TAB] = kv[0];
} else if (kv.length === 2) {
items[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]);
}
});
return items;
}
/**
* Return a function that:
* (1) Initializes a Polymer property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
function _getInitializer<T>(
get: (name: string) => T, propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
// setComponentValue will be called every time the hash changes, and is
// responsible for ensuring that new state in the hash will be propagated
// to the component with that property.
// It is important that this function does not re-assign needlessly,
// to avoid Polymer observer churn.
let setComponentValue = () => {
let uriValue = get(URIStorageName);
let currentValue = this[propertyName];
// if uriValue is undefined, we will ensure that the property has the
// default value
if (uriValue === undefined) {
if (!_.isEqual(currentValue, defaultVal)) {
// If we don't have an explicit URI value, then we need to ensure
// the property value is equal to the default value.
// We will assign a clone rather than the canonical default, because
// the component receiving this property may mutate it, and we need
// to keep a pristine copy of the default.
this[propertyName] = _.clone(defaultVal);
}
// In this case, we have an explicit URI value, so we will ensure that
// the component has an equivalent value.
} else if (!_.isEqual(uriValue, currentValue)) {
this[propertyName] = uriValue;
}
};
// Set the value on the property.
setComponentValue();
// Update it when the hashchanges.
window.addEventListener('hashchange', setComponentValue);
};
}
/**
* Return a function that updates URIStorage when a property changes.
*/
function _getObserver<T>(
get: (name: string) => T, set: (name: string, newVal: T) => void,
propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
let newVal = this[propertyName];
if (!_.isEqual(newVal, get(URIStorageName))) {
if (_.isEqual(newVal, defaultVal)) {
_unset(URIStorageName);
} else {
set(URIStorageName, newVal);
}
}
};
}
/**
* Delete a key from the URI.
*/
function _unset(key) {
let items = _componentToDict(_readComponent());
delete items[key];
_writeComponent(_dictToComponent(items));
}
}
| getBooleanObserver | identifier_name |
storage.ts | /* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
/* tslint:disable:no-namespace variable-name */
/**
* The Storage Module provides storage for URL parameters, and an API for
* getting and setting TensorBoard's stateful URI.
*
* It generates URI components like: events&runPrefix=train*
* which TensorBoard uses after like localhost:8000/#events&runPrefix=train*
* to store state in the URI.
*/
module TF.URIStorage {
type StringDict = {[key: string]: string};
/**
* A key that users cannot use, since TensorBoard uses this to store info
* about the active tab.
*/
export let TAB = '__tab__';
/**
* The name of the property for users to set on a Polymer component
* in order for its stored properties to be stored in the URI unambiguously.
* (No need to set this if you want mutliple instances of the component to
* share URI state)
*
* Example:
* <my-component disambiguator="0"></my-component>
*
* The disambiguator should be set to any unique value so that multiple
* instances of the component can store properties in URI storage.
*
* Because it's hard to dereference this variable in HTML property bindings,
* it is NOT safe to change the disambiguator string without find+replace
* across the codebase.
*/
export let DISAMBIGUATOR = 'disambiguator';
/**
* Return a boolean stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getBoolean(key: string): boolean {
let items = _componentToDict(_readComponent());
let item = items[key];
return item === 'true' ? true : item === 'false' ? false : undefined;
}
/**
* Store a boolean in the URI, with a corresponding key.
*/
export function setBoolean(key: string, value: boolean) {
let items = _componentToDict(_readComponent());
items[key] = value.toString();
_writeComponent(_dictToComponent(items));
}
/**
* Return a string stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getString(key: string): string {
let items = _componentToDict(_readComponent());
return items[key];
}
/**
* Store a string in the URI, with a corresponding key.
*/
export function setString(key: string, value: string) {
let items = _componentToDict(_readComponent());
items[key] = value;
_writeComponent(_dictToComponent(items));
}
/**
* Return a number stored in the URI, given a corresponding key.
* Undefined if not found.
*/
export function getNumber(key: string): number {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : +items[key];
}
/**
* Store a number in the URI, with a corresponding key.
*/
export function setNumber(key: string, value: number) {
let items = _componentToDict(_readComponent());
items[key] = '' + value;
_writeComponent(_dictToComponent(items));
}
| export function getObject(key: string): Object {
let items = _componentToDict(_readComponent());
return items[key] === undefined ? undefined : JSON.parse(atob(items[key]));
}
/**
* Store an object in the URI, with a corresponding key.
*/
export function setObject(key: string, value: Object) {
let items = _componentToDict(_readComponent());
items[key] = btoa(JSON.stringify(value));
_writeComponent(_dictToComponent(items));
}
/**
* Get a unique storage name for a (Polymer component, propertyName) tuple.
*
* DISAMBIGUATOR must be set on the component, if other components use the
* same propertyName.
*/
export function getURIStorageName(
component: Object, propertyName: string): string {
let d = component[DISAMBIGUATOR];
let components = d == null ? [propertyName] : [d, propertyName];
return components.join('.');
}
/**
* Return a function that:
* (1) Initializes a Polymer boolean property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getBooleanInitializer(
propertyName: string, defaultVal: boolean): Function {
return _getInitializer(getBoolean, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer string property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getStringInitializer(
propertyName: string, defaultVal: string): Function {
return _getInitializer(getString, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer number property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
export function getNumberInitializer(
propertyName: string, defaultVal: number): Function {
return _getInitializer(getNumber, propertyName, defaultVal);
}
/**
* Return a function that:
* (1) Initializes a Polymer Object property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectInitializer(
propertyName: string, defaultVal: Object): Function {
return _getInitializer(getObject, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getBooleanObserver(
propertyName: string, defaultVal: boolean): Function {
return _getObserver(getBoolean, setBoolean, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a string property changes.
*/
export function getStringObserver(
propertyName: string, defaultVal: string): Function {
return _getObserver(getString, setString, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when a number property changes.
*/
export function getNumberObserver(
propertyName: string, defaultVal: number): Function {
return _getObserver(getNumber, setNumber, propertyName, defaultVal);
}
/**
* Return a function that updates URIStorage when an object property changes.
* Generates a deep clone of the defaultVal to avoid mutation issues.
*/
export function getObjectObserver(
propertyName: string, defaultVal: Object): Function {
let clone = _.cloneDeep(defaultVal);
return _getObserver(getObject, setObject, propertyName, clone);
}
/**
* Read component from URI (e.g. returns "events&runPrefix=train*").
*/
function _readComponent(): string {
return TF.Globals.USE_HASH ? window.location.hash.slice(1) :
TF.Globals.FAKE_HASH;
}
/**
* Write component to URI.
*/
function _writeComponent(component: string) {
if (TF.Globals.USE_HASH) {
window.location.hash = component;
} else {
TF.Globals.FAKE_HASH = component;
}
}
/**
* Convert dictionary of strings into a URI Component.
* All key value entries get added as key value pairs in the component,
* with the exception of a key with the TAB value, which if present
* gets prepended to the URI Component string for backwards comptability
* reasons.
*/
function _dictToComponent(items: StringDict): string {
let component = '';
// Add the tab name e.g. 'events', 'images', 'histograms' as a prefix
// for backwards compatbility.
if (items[TAB] !== undefined) {
component += items[TAB];
}
// Join other strings with &key=value notation
let nonTab = _.pairs(items)
.filter(function(pair) { return pair[0] !== TAB; })
.map(function(pair) {
return encodeURIComponent(pair[0]) + '=' +
encodeURIComponent(pair[1]);
})
.join('&');
return nonTab.length > 0 ? (component + '&' + nonTab) : component;
}
/**
* Convert a URI Component into a dictionary of strings.
* Component should consist of key-value pairs joined by a delimiter
* with the exception of the tabName.
* Returns dict consisting of all key-value pairs and
* dict[TAB] = tabName
*/
function _componentToDict(component: string): StringDict {
let items = {} as StringDict;
let tokens = component.split('&');
tokens.forEach(function(token) {
let kv = token.split('=');
// Special backwards compatibility for URI components like #events
if (kv.length === 1 && _.contains(TF.Globals.TABS, kv[0])) {
items[TAB] = kv[0];
} else if (kv.length === 2) {
items[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]);
}
});
return items;
}
/**
* Return a function that:
* (1) Initializes a Polymer property with a default value, if its
* value is not already set
* (2) Sets up listener that updates Polymer property on hash change.
*/
function _getInitializer<T>(
get: (name: string) => T, propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
// setComponentValue will be called every time the hash changes, and is
// responsible for ensuring that new state in the hash will be propagated
// to the component with that property.
// It is important that this function does not re-assign needlessly,
// to avoid Polymer observer churn.
let setComponentValue = () => {
let uriValue = get(URIStorageName);
let currentValue = this[propertyName];
// if uriValue is undefined, we will ensure that the property has the
// default value
if (uriValue === undefined) {
if (!_.isEqual(currentValue, defaultVal)) {
// If we don't have an explicit URI value, then we need to ensure
// the property value is equal to the default value.
// We will assign a clone rather than the canonical default, because
// the component receiving this property may mutate it, and we need
// to keep a pristine copy of the default.
this[propertyName] = _.clone(defaultVal);
}
// In this case, we have an explicit URI value, so we will ensure that
// the component has an equivalent value.
} else if (!_.isEqual(uriValue, currentValue)) {
this[propertyName] = uriValue;
}
};
// Set the value on the property.
setComponentValue();
// Update it when the hashchanges.
window.addEventListener('hashchange', setComponentValue);
};
}
/**
* Return a function that updates URIStorage when a property changes.
*/
function _getObserver<T>(
get: (name: string) => T, set: (name: string, newVal: T) => void,
propertyName: string, defaultVal: T): Function {
return function() {
let URIStorageName = getURIStorageName(this, propertyName);
let newVal = this[propertyName];
if (!_.isEqual(newVal, get(URIStorageName))) {
if (_.isEqual(newVal, defaultVal)) {
_unset(URIStorageName);
} else {
set(URIStorageName, newVal);
}
}
};
}
/**
* Delete a key from the URI.
*/
function _unset(key) {
let items = _componentToDict(_readComponent());
delete items[key];
_writeComponent(_dictToComponent(items));
}
} | /**
* Return an object stored in the URI, given a corresponding key.
* Undefined if not found.
*/ | random_line_split |
runUnitTests.py | #!/usr/bin/env python
#
# =========================================================================
# This file is part of six.sicd-python
# =========================================================================
#
# (C) Copyright 2004 - 2016, MDA Information Systems LLC
#
# six.sicd-python is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; If not,
# see <http://www.gnu.org/licenses/>.
#
import os
from subprocess import call
import sys
import utils
def run():
|
if __name__ == '__main__':
utils.setPaths()
run()
| install = utils.installPath()
unitTestDir = os.path.join(install, 'unittests')
childDirs = os.listdir(unitTestDir)
for childDir in childDirs:
for test in os.listdir(os.path.join(unitTestDir, childDir)):
print(os.path.join(unitTestDir, childDir, test))
testPathname = os.path.join(unitTestDir, childDir, test)
if test.endswith('.py'):
command = ['python', testPathname]
else:
command = [utils.executableName(testPathname)]
if call(command) != 0:
print('{} failed'.format(testPathname))
return False
return True | identifier_body |
runUnitTests.py | #!/usr/bin/env python
#
# =========================================================================
# This file is part of six.sicd-python
# =========================================================================
#
# (C) Copyright 2004 - 2016, MDA Information Systems LLC
#
# six.sicd-python is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of | # You should have received a copy of the GNU Lesser General Public
# License along with this program; If not,
# see <http://www.gnu.org/licenses/>.
#
import os
from subprocess import call
import sys
import utils
def run():
install = utils.installPath()
unitTestDir = os.path.join(install, 'unittests')
childDirs = os.listdir(unitTestDir)
for childDir in childDirs:
for test in os.listdir(os.path.join(unitTestDir, childDir)):
print(os.path.join(unitTestDir, childDir, test))
testPathname = os.path.join(unitTestDir, childDir, test)
if test.endswith('.py'):
command = ['python', testPathname]
else:
command = [utils.executableName(testPathname)]
if call(command) != 0:
print('{} failed'.format(testPathname))
return False
return True
if __name__ == '__main__':
utils.setPaths()
run() | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# | random_line_split |
runUnitTests.py | #!/usr/bin/env python
#
# =========================================================================
# This file is part of six.sicd-python
# =========================================================================
#
# (C) Copyright 2004 - 2016, MDA Information Systems LLC
#
# six.sicd-python is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; If not,
# see <http://www.gnu.org/licenses/>.
#
import os
from subprocess import call
import sys
import utils
def | ():
install = utils.installPath()
unitTestDir = os.path.join(install, 'unittests')
childDirs = os.listdir(unitTestDir)
for childDir in childDirs:
for test in os.listdir(os.path.join(unitTestDir, childDir)):
print(os.path.join(unitTestDir, childDir, test))
testPathname = os.path.join(unitTestDir, childDir, test)
if test.endswith('.py'):
command = ['python', testPathname]
else:
command = [utils.executableName(testPathname)]
if call(command) != 0:
print('{} failed'.format(testPathname))
return False
return True
if __name__ == '__main__':
utils.setPaths()
run()
| run | identifier_name |
runUnitTests.py | #!/usr/bin/env python
#
# =========================================================================
# This file is part of six.sicd-python
# =========================================================================
#
# (C) Copyright 2004 - 2016, MDA Information Systems LLC
#
# six.sicd-python is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; If not,
# see <http://www.gnu.org/licenses/>.
#
import os
from subprocess import call
import sys
import utils
def run():
install = utils.installPath()
unitTestDir = os.path.join(install, 'unittests')
childDirs = os.listdir(unitTestDir)
for childDir in childDirs:
for test in os.listdir(os.path.join(unitTestDir, childDir)):
print(os.path.join(unitTestDir, childDir, test))
testPathname = os.path.join(unitTestDir, childDir, test)
if test.endswith('.py'):
command = ['python', testPathname]
else:
command = [utils.executableName(testPathname)]
if call(command) != 0:
print('{} failed'.format(testPathname))
return False
return True
if __name__ == '__main__':
| utils.setPaths()
run() | conditional_block | |
label.rs | use Scalar;
use color::{Color, Colorable};
use elmesque::Element;
use graphics::character::CharacterCache;
use label::FontSize;
use theme::Theme;
use ui::GlyphCache;
use widget::{self, Widget, WidgetId};
/// Displays some given text centred within a rectangle.
#[derive(Clone, Debug)]
pub struct Label<'a> {
common: widget::CommonBuilder,
text: &'a str,
style: Style,
maybe_parent_id: Option<WidgetId>,
}
/// The styling for a Label's renderable Element.
#[allow(missing_docs, missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub struct Style {
maybe_font_size: Option<FontSize>,
maybe_color: Option<Color>,
}
/// The state to be stored between updates for the Label.
#[derive(Clone, Debug, PartialEq)]
pub struct State(String);
impl<'a> Label<'a> {
/// Construct a new Label widget.
pub fn new(text: &'a str) -> Label<'a> {
Label {
common: widget::CommonBuilder::new(),
text: text,
style: Style::new(),
maybe_parent_id: None,
}
}
/// Set the font size for the label.
#[inline]
pub fn font_size(mut self, size: FontSize) -> Label<'a> {
self.style.maybe_font_size = Some(size);
self
}
}
impl<'a> Widget for Label<'a> {
type State = State;
type Style = Style;
fn common(&self) -> &widget::CommonBuilder { &self.common }
fn common_mut(&mut self) -> &mut widget::CommonBuilder { &mut self.common }
fn unique_kind(&self) -> &'static str { "Label" }
fn init_state(&self) -> State { State(String::new()) }
fn style(&self) -> Style { self.style.clone() }
fn default_width<C: CharacterCache>(&self, theme: &Theme, glyph_cache: &GlyphCache<C>) -> Scalar {
glyph_cache.width(self.style.font_size(theme), self.text)
}
fn default_height(&self, theme: &Theme) -> Scalar {
self.style.font_size(theme) as Scalar
}
/// Update the state of the Label.
fn update<'b, 'c, C>(self, args: widget::UpdateArgs<'b, 'c, Self, C>) -> Option<State> | let widget::UpdateArgs { prev_state, .. } = args;
let widget::State { state: State(ref string), .. } = *prev_state;
if &string[..] != self.text { Some(State(self.text.to_string())) } else { None }
}
/// Construct an Element for the Label.
fn draw<'b, C>(args: widget::DrawArgs<'b, Self, C>) -> Element
where C: CharacterCache,
{
use elmesque::form::{text, collage};
use elmesque::text::Text;
let widget::DrawArgs { state, style, theme, .. } = args;
let widget::State { state: State(ref string), dim, xy, .. } = *state;
let size = style.font_size(theme);
let color = style.color(theme);
let form = text(Text::from_string(string.clone())
.color(color)
.height(size as f64)).shift(xy[0].floor(), xy[1].floor());
collage(dim[0] as i32, dim[1] as i32, vec![form])
}
}
impl Style {
/// Construct the default Style.
pub fn new() -> Style {
Style {
maybe_color: None,
maybe_font_size: None,
}
}
/// Get the Color for an Element.
pub fn color(&self, theme: &Theme) -> Color {
self.maybe_color.unwrap_or(theme.label_color)
}
/// Get the label font size for an Element.
pub fn font_size(&self, theme: &Theme) -> FontSize {
self.maybe_font_size.unwrap_or(theme.font_size_medium)
}
}
impl<'a> Colorable for Label<'a> {
fn color(mut self, color: Color) -> Self {
self.style.maybe_color = Some(color);
self
}
} | where C: CharacterCache,
{ | random_line_split |
label.rs |
use Scalar;
use color::{Color, Colorable};
use elmesque::Element;
use graphics::character::CharacterCache;
use label::FontSize;
use theme::Theme;
use ui::GlyphCache;
use widget::{self, Widget, WidgetId};
/// Displays some given text centred within a rectangle.
#[derive(Clone, Debug)]
pub struct Label<'a> {
common: widget::CommonBuilder,
text: &'a str,
style: Style,
maybe_parent_id: Option<WidgetId>,
}
/// The styling for a Label's renderable Element.
#[allow(missing_docs, missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub struct Style {
maybe_font_size: Option<FontSize>,
maybe_color: Option<Color>,
}
/// The state to be stored between updates for the Label.
#[derive(Clone, Debug, PartialEq)]
pub struct State(String);
impl<'a> Label<'a> {
/// Construct a new Label widget.
pub fn new(text: &'a str) -> Label<'a> {
Label {
common: widget::CommonBuilder::new(),
text: text,
style: Style::new(),
maybe_parent_id: None,
}
}
/// Set the font size for the label.
#[inline]
pub fn font_size(mut self, size: FontSize) -> Label<'a> {
self.style.maybe_font_size = Some(size);
self
}
}
impl<'a> Widget for Label<'a> {
type State = State;
type Style = Style;
fn common(&self) -> &widget::CommonBuilder { &self.common }
fn | (&mut self) -> &mut widget::CommonBuilder { &mut self.common }
fn unique_kind(&self) -> &'static str { "Label" }
fn init_state(&self) -> State { State(String::new()) }
fn style(&self) -> Style { self.style.clone() }
fn default_width<C: CharacterCache>(&self, theme: &Theme, glyph_cache: &GlyphCache<C>) -> Scalar {
glyph_cache.width(self.style.font_size(theme), self.text)
}
fn default_height(&self, theme: &Theme) -> Scalar {
self.style.font_size(theme) as Scalar
}
/// Update the state of the Label.
fn update<'b, 'c, C>(self, args: widget::UpdateArgs<'b, 'c, Self, C>) -> Option<State>
where C: CharacterCache,
{
let widget::UpdateArgs { prev_state, .. } = args;
let widget::State { state: State(ref string), .. } = *prev_state;
if &string[..] != self.text { Some(State(self.text.to_string())) } else { None }
}
/// Construct an Element for the Label.
fn draw<'b, C>(args: widget::DrawArgs<'b, Self, C>) -> Element
where C: CharacterCache,
{
use elmesque::form::{text, collage};
use elmesque::text::Text;
let widget::DrawArgs { state, style, theme, .. } = args;
let widget::State { state: State(ref string), dim, xy, .. } = *state;
let size = style.font_size(theme);
let color = style.color(theme);
let form = text(Text::from_string(string.clone())
.color(color)
.height(size as f64)).shift(xy[0].floor(), xy[1].floor());
collage(dim[0] as i32, dim[1] as i32, vec![form])
}
}
impl Style {
/// Construct the default Style.
pub fn new() -> Style {
Style {
maybe_color: None,
maybe_font_size: None,
}
}
/// Get the Color for an Element.
pub fn color(&self, theme: &Theme) -> Color {
self.maybe_color.unwrap_or(theme.label_color)
}
/// Get the label font size for an Element.
pub fn font_size(&self, theme: &Theme) -> FontSize {
self.maybe_font_size.unwrap_or(theme.font_size_medium)
}
}
impl<'a> Colorable for Label<'a> {
fn color(mut self, color: Color) -> Self {
self.style.maybe_color = Some(color);
self
}
}
| common_mut | identifier_name |
label.rs |
use Scalar;
use color::{Color, Colorable};
use elmesque::Element;
use graphics::character::CharacterCache;
use label::FontSize;
use theme::Theme;
use ui::GlyphCache;
use widget::{self, Widget, WidgetId};
/// Displays some given text centred within a rectangle.
#[derive(Clone, Debug)]
pub struct Label<'a> {
common: widget::CommonBuilder,
text: &'a str,
style: Style,
maybe_parent_id: Option<WidgetId>,
}
/// The styling for a Label's renderable Element.
#[allow(missing_docs, missing_copy_implementations)]
#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
pub struct Style {
maybe_font_size: Option<FontSize>,
maybe_color: Option<Color>,
}
/// The state to be stored between updates for the Label.
#[derive(Clone, Debug, PartialEq)]
pub struct State(String);
impl<'a> Label<'a> {
/// Construct a new Label widget.
pub fn new(text: &'a str) -> Label<'a> {
Label {
common: widget::CommonBuilder::new(),
text: text,
style: Style::new(),
maybe_parent_id: None,
}
}
/// Set the font size for the label.
#[inline]
pub fn font_size(mut self, size: FontSize) -> Label<'a> {
self.style.maybe_font_size = Some(size);
self
}
}
impl<'a> Widget for Label<'a> {
type State = State;
type Style = Style;
fn common(&self) -> &widget::CommonBuilder { &self.common }
fn common_mut(&mut self) -> &mut widget::CommonBuilder { &mut self.common }
fn unique_kind(&self) -> &'static str { "Label" }
fn init_state(&self) -> State { State(String::new()) }
fn style(&self) -> Style { self.style.clone() }
fn default_width<C: CharacterCache>(&self, theme: &Theme, glyph_cache: &GlyphCache<C>) -> Scalar {
glyph_cache.width(self.style.font_size(theme), self.text)
}
fn default_height(&self, theme: &Theme) -> Scalar {
self.style.font_size(theme) as Scalar
}
/// Update the state of the Label.
fn update<'b, 'c, C>(self, args: widget::UpdateArgs<'b, 'c, Self, C>) -> Option<State>
where C: CharacterCache,
{
let widget::UpdateArgs { prev_state, .. } = args;
let widget::State { state: State(ref string), .. } = *prev_state;
if &string[..] != self.text | else { None }
}
/// Construct an Element for the Label.
fn draw<'b, C>(args: widget::DrawArgs<'b, Self, C>) -> Element
where C: CharacterCache,
{
use elmesque::form::{text, collage};
use elmesque::text::Text;
let widget::DrawArgs { state, style, theme, .. } = args;
let widget::State { state: State(ref string), dim, xy, .. } = *state;
let size = style.font_size(theme);
let color = style.color(theme);
let form = text(Text::from_string(string.clone())
.color(color)
.height(size as f64)).shift(xy[0].floor(), xy[1].floor());
collage(dim[0] as i32, dim[1] as i32, vec![form])
}
}
impl Style {
/// Construct the default Style.
pub fn new() -> Style {
Style {
maybe_color: None,
maybe_font_size: None,
}
}
/// Get the Color for an Element.
pub fn color(&self, theme: &Theme) -> Color {
self.maybe_color.unwrap_or(theme.label_color)
}
/// Get the label font size for an Element.
pub fn font_size(&self, theme: &Theme) -> FontSize {
self.maybe_font_size.unwrap_or(theme.font_size_medium)
}
}
impl<'a> Colorable for Label<'a> {
fn color(mut self, color: Color) -> Self {
self.style.maybe_color = Some(color);
self
}
}
| { Some(State(self.text.to_string())) } | conditional_block |
io.ts | /**
* Wechaty Chatbot SDK - https://github.com/wechaty/wechaty
*
* @copyright 2016 Huan LI (李卓桓) <https://github.com/huan>, and
* Wechaty Contributors <https://github.com/wechaty>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import { StateSwitch } from 'state-switch'
import WebSocket from 'ws'
import {
Message,
} from './user/mod'
import {
EventScanPayload,
} from 'wechaty-puppet'
import Peer, {
JsonRpcPayload,
JsonRpcPayloadResponse,
parse,
} from 'json-rpc-peer'
import {
config,
log,
} from './config'
import {
AnyFunction,
} from './types'
import {
Wechaty,
} from './wechaty'
import {
getPeer,
isJsonRpcRequest,
} from './io-peer/io-peer'
export interface IoOptions {
wechaty : Wechaty,
token : string,
apihost? : string,
protocol? : string,
servicePort? : number,
}
export const IO_EVENT_DICT = {
botie : 'tbw',
error : 'tbw',
heartbeat : 'tbw',
jsonrpc : 'JSON RPC',
login : 'tbw',
logout : 'tbw',
message : 'tbw',
raw : 'tbw',
reset : 'tbw',
scan : 'tbw',
shutdown : 'tbw',
sys : 'tbw',
update : 'tbw',
}
type IoEventName = keyof typeof IO_EVENT_DICT
interface IoEventScan {
name : 'scan',
payload : EventScanPayload,
}
interface IoEventJsonRpc {
name: 'jsonrpc',
payload: JsonRpcPayload,
}
interface IoEventAny {
name: IoEventName,
payload: any,
}
type IoEvent = IoEventScan | IoEventJsonRpc | IoEventAny
/**
* https://github.com/Chatie/botie/issues/2
* https://github.com/actions/github-script/blob/f035cea4677903b153fa754aa8c2bba66f8dc3eb/src/async-function.ts#L6
*/
const AsyncFunction = Object.getPrototypeOf(async () => null).constructor
// function callAsyncFunction<U extends {} = {}, V = unknown> (
// args: U,
// source: string
// ): Promise<V> {
// const fn = new AsyncFunction(...Object.keys(args), source)
// return fn(...Object.values(args))
// }
export class Io {
private readonly id : string
private readonly protocol : string
private eventBuffer : IoEvent[] = []
private ws : undefined | WebSocket
private readonly state = new StateSwitch('Io', { log })
private reconnectTimer? : NodeJS.Timer
private reconnectTimeout? : number
private lifeTimer? : NodeJS.Timer
private onMessage: undefined | AnyFunction
private scanPayload?: EventScanPayload
protected jsonRpc?: Peer
constructor (
private options: IoOptions,
) {
| blic toString () {
return `Io<${this.options.token}>`
}
private connected () {
return this.ws && this.ws.readyState === WebSocket.OPEN
}
public async start (): Promise<void> {
log.verbose('Io', 'start()')
if (this.lifeTimer) {
throw new Error('lifeTimer exist')
}
this.state.on('pending')
try {
this.initEventHook()
this.ws = await this.initWebSocket()
this.options.wechaty.on('login', () => { this.scanPayload = undefined })
this.options.wechaty.on('scan', (qrcode, status) => {
this.scanPayload = {
...this.scanPayload,
qrcode,
status,
}
})
this.lifeTimer = setInterval(() => {
if (this.ws && this.connected()) {
log.silly('Io', 'start() setInterval() ws.ping()')
// TODO: check 'pong' event on ws
this.ws.ping()
}
}, 1000 * 10)
this.state.on(true)
} catch (e) {
log.warn('Io', 'start() exception: %s', e.message)
this.state.off(true)
throw e
}
}
private initEventHook () {
log.verbose('Io', 'initEventHook()')
const wechaty = this.options.wechaty
wechaty.on('error', error => this.send({ name: 'error', payload: error }))
wechaty.on('heartbeat', data => this.send({ name: 'heartbeat', payload: { cuid: this.id, data } }))
wechaty.on('login', user => this.send({ name: 'login', payload: user.payload }))
wechaty.on('logout', user => this.send({ name: 'logout', payload: user.payload }))
wechaty.on('message', message => this.ioMessage(message))
// FIXME: payload schema need to be defined universal
// wechaty.on('scan', (url, code) => this.send({ name: 'scan', payload: { url, code } }))
wechaty.on('scan', (qrcode, status) => this.send({ name: 'scan', payload: { qrcode, status } } as IoEventScan))
}
private async initWebSocket (): Promise<WebSocket> {
log.verbose('Io', 'initWebSocket()')
// this.state.current('on', false)
// const auth = 'Basic ' + new Buffer(this.setting.token + ':X').toString('base64')
const auth = 'Token ' + this.options.token
const headers = { Authorization: auth }
if (!this.options.apihost) {
throw new Error('no apihost')
}
let endpoint = 'wss://' + this.options.apihost + '/v0/websocket'
// XXX quick and dirty: use no ssl for API_HOST other than official
// FIXME: use a configurable VARIABLE for the domain name at here:
if (!/api\.chatie\.io/.test(this.options.apihost)) {
endpoint = 'ws://' + this.options.apihost + '/v0/websocket'
}
const ws = this.ws = new WebSocket(endpoint, this.protocol, { headers })
ws.on('open', () => this.wsOnOpen(ws))
ws.on('message', data => this.wsOnMessage(data))
ws.on('error', e => this.wsOnError(e))
ws.on('close', (code, reason) => this.wsOnClose(ws, code, reason))
await new Promise((resolve, reject) => {
ws.once('open', resolve)
ws.once('error', reject)
ws.once('close', reject)
})
return ws
}
private async wsOnOpen (ws: WebSocket): Promise<void> {
if (this.protocol !== ws.protocol) {
log.error('Io', 'initWebSocket() require protocol[%s] failed', this.protocol)
// XXX deal with error?
}
log.verbose('Io', 'initWebSocket() connected with protocol [%s]', ws.protocol)
// this.currentState('connected')
// this.state.current('on')
// FIXME: how to keep alive???
// ws._socket.setKeepAlive(true, 100)
this.reconnectTimeout = undefined
const name = 'sys'
const payload = 'Wechaty version ' + this.options.wechaty.version() + ` with CUID: ${this.id}`
const initEvent: IoEvent = {
name,
payload,
}
await this.send(initEvent)
}
private async wsOnMessage (data: WebSocket.Data) {
log.silly('Io', 'initWebSocket() ws.on(message): %s', data)
// flags.binary will be set if a binary data is received.
// flags.masked will be set if the data was masked.
if (typeof data !== 'string') {
throw new Error('data should be string...')
}
const ioEvent: IoEvent = {
name : 'raw',
payload : data,
}
try {
const obj = JSON.parse(data)
ioEvent.name = obj.name
ioEvent.payload = obj.payload
} catch (e) {
log.verbose('Io', 'on(message) recv a non IoEvent data[%s]', data)
}
switch (ioEvent.name) {
case 'botie':
{
const payload = ioEvent.payload
const args = payload.args
const source = payload.source
try {
if (args[0] === 'message' && args.length === 1) {
const fn = new AsyncFunction(...args, source)
this.onMessage = fn
} else {
log.warn('Io', 'server pushed function is invalid. args: %s', JSON.stringify(args))
}
} catch (e) {
log.warn('Io', 'server pushed function exception: %s', e)
this.options.wechaty.emit('error', e)
}
}
break
case 'reset':
log.verbose('Io', 'on(reset): %s', ioEvent.payload)
await this.options.wechaty.reset(ioEvent.payload)
break
case 'shutdown':
log.info('Io', 'on(shutdown): %s', ioEvent.payload)
process.exit(0)
// eslint-disable-next-line
break
case 'update':
log.verbose('Io', 'on(update): %s', ioEvent.payload)
{
const wechaty = this.options.wechaty
if (wechaty.logonoff()) {
const loginEvent: IoEvent = {
name : 'login',
payload : (wechaty.userSelf() as any).payload,
}
await this.send(loginEvent)
}
if (this.scanPayload) {
const scanEvent: IoEventScan = {
name: 'scan',
payload: this.scanPayload,
}
await this.send(scanEvent)
}
}
break
case 'sys':
// do nothing
break
case 'logout':
log.info('Io', 'on(logout): %s', ioEvent.payload)
await this.options.wechaty.logout()
break
case 'jsonrpc':
log.info('Io', 'on(jsonrpc): %s', ioEvent.payload)
try {
const request = (ioEvent as IoEventJsonRpc).payload
if (!isJsonRpcRequest(request)) {
log.warn('Io', 'on(jsonrpc) payload is not a jsonrpc request: %s', JSON.stringify(request))
return
}
if (!this.jsonRpc) {
throw new Error('jsonRpc not initialized!')
}
const response = await this.jsonRpc.exec(request)
if (!response) {
log.warn('Io', 'on(jsonrpc) response is undefined.')
return
}
const payload = parse(response) as JsonRpcPayloadResponse
const jsonrpcEvent: IoEventJsonRpc = {
name: 'jsonrpc',
payload,
}
log.verbose('Io', 'on(jsonrpc) send(%s)', response)
await this.send(jsonrpcEvent)
} catch (e) {
log.error('Io', 'on(jsonrpc): %s', e)
}
break
default:
log.warn('Io', 'UNKNOWN on(%s): %s', ioEvent.name, ioEvent.payload)
break
}
}
// FIXME: it seems the parameter `e` might be `undefined`.
// @types/ws might has bug for `ws.on('error', e => this.wsOnError(e))`
private wsOnError (e?: Error) {
log.warn('Io', 'initWebSocket() error event[%s]', e && e.message)
if (!e) {
return
}
this.options.wechaty.emit('error', e)
// when `error`, there must have already a `close` event
// we should not call this.reconnect() again
//
// this.close()
// this.reconnect()
}
private wsOnClose (
ws : WebSocket,
code : number,
message : string,
): void {
if (this.state.on()) {
log.warn('Io', 'initWebSocket() close event[%d: %s]', code, message)
ws.close()
this.reconnect()
}
}
private reconnect () {
log.verbose('Io', 'reconnect()')
if (this.state.off()) {
log.warn('Io', 'reconnect() canceled because state.target() === offline')
return
}
if (this.connected()) {
log.warn('Io', 'reconnect() on a already connected io')
return
}
if (this.reconnectTimer) {
log.warn('Io', 'reconnect() on a already re-connecting io')
return
}
if (!this.reconnectTimeout) {
this.reconnectTimeout = 1
} else if (this.reconnectTimeout < 10 * 1000) {
this.reconnectTimeout *= 3
}
log.warn('Io', 'reconnect() will reconnect after %d s', Math.floor(this.reconnectTimeout / 1000))
this.reconnectTimer = setTimeout(async () => {
this.reconnectTimer = undefined
await this.initWebSocket()
}, this.reconnectTimeout)// as any as NodeJS.Timer
}
private async send (ioEvent?: IoEvent): Promise<void> {
if (!this.ws) {
throw new Error('no ws')
}
const ws = this.ws
if (ioEvent) {
log.silly('Io', 'send(%s)', JSON.stringify(ioEvent))
this.eventBuffer.push(ioEvent)
} else { log.silly('Io', 'send()') }
if (!this.connected()) {
log.verbose('Io', 'send() without a connected websocket, eventBuffer.length = %d', this.eventBuffer.length)
return
}
const list: Array<Promise<any>> = []
while (this.eventBuffer.length) {
const data = JSON.stringify(
this.eventBuffer.shift(),
)
const p = new Promise<void>((resolve, reject) => ws.send(
data,
(err: undefined | Error) => {
if (err) {
reject(err)
} else {
resolve()
}
},
))
list.push(p)
}
try {
await Promise.all(list)
} catch (e) {
log.error('Io', 'send() exception: %s', e.stack)
throw e
}
}
public async stop (): Promise<void> {
log.verbose('Io', 'stop()')
if (!this.ws) {
throw new Error('no ws')
}
this.state.off('pending')
// try to send IoEvents in buffer
await this.send()
this.eventBuffer = []
if (this.reconnectTimer) {
clearTimeout(this.reconnectTimer)
this.reconnectTimer = undefined
}
if (this.lifeTimer) {
clearInterval(this.lifeTimer)
this.lifeTimer = undefined
}
this.ws.close()
await new Promise<void>(resolve => {
if (this.ws) {
this.ws.once('close', resolve)
} else {
resolve()
}
})
this.ws = undefined
this.state.off(true)
}
/**
*
* Prepare to be overwritten by server setting
*
*/
private async ioMessage (m: Message): Promise<void> {
log.silly('Io', 'ioMessage() is a nop function before be overwritten from cloud')
if (typeof this.onMessage === 'function') {
await this.onMessage(m)
}
}
protected async syncMessage (m: Message): Promise<void> {
log.silly('Io', 'syncMessage(%s)', m)
const messageEvent: IoEvent = {
name : 'message',
payload : (m as any).payload,
}
await this.send(messageEvent)
}
}
| options.apihost = options.apihost || config.apihost
options.protocol = options.protocol || config.default.DEFAULT_PROTOCOL
this.id = options.wechaty.id
this.protocol = options.protocol + '|' + options.wechaty.id + '|' + config.serviceIp + '|' + options.servicePort
log.verbose('Io', 'instantiated with apihost[%s], token[%s], protocol[%s], cuid[%s]',
options.apihost,
options.token,
options.protocol,
this.id,
)
if (options.servicePort) {
this.jsonRpc = getPeer({
serviceGrpcPort: this.options.servicePort!,
})
}
}
pu | identifier_body |
io.ts | /**
* Wechaty Chatbot SDK - https://github.com/wechaty/wechaty
*
* @copyright 2016 Huan LI (李卓桓) <https://github.com/huan>, and
* Wechaty Contributors <https://github.com/wechaty>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import { StateSwitch } from 'state-switch'
import WebSocket from 'ws'
import {
Message,
} from './user/mod'
import {
EventScanPayload,
} from 'wechaty-puppet'
import Peer, {
JsonRpcPayload,
JsonRpcPayloadResponse,
parse,
} from 'json-rpc-peer'
import {
config,
log,
} from './config'
import {
AnyFunction,
} from './types'
import {
Wechaty,
} from './wechaty'
import {
getPeer,
isJsonRpcRequest,
} from './io-peer/io-peer'
export interface IoOptions {
wechaty : Wechaty,
token : string,
apihost? : string,
protocol? : string,
servicePort? : number,
}
export const IO_EVENT_DICT = {
botie : 'tbw',
error : 'tbw',
heartbeat : 'tbw',
jsonrpc : 'JSON RPC',
login : 'tbw',
logout : 'tbw',
message : 'tbw',
raw : 'tbw',
reset : 'tbw',
scan : 'tbw',
shutdown : 'tbw',
sys : 'tbw',
update : 'tbw',
}
type IoEventName = keyof typeof IO_EVENT_DICT
interface IoEventScan {
name : 'scan',
payload : EventScanPayload,
}
interface IoEventJsonRpc {
name: 'jsonrpc',
payload: JsonRpcPayload,
}
interface IoEventAny {
name: IoEventName,
payload: any,
}
type IoEvent = IoEventScan | IoEventJsonRpc | IoEventAny
/**
* https://github.com/Chatie/botie/issues/2
* https://github.com/actions/github-script/blob/f035cea4677903b153fa754aa8c2bba66f8dc3eb/src/async-function.ts#L6
*/
const AsyncFunction = Object.getPrototypeOf(async () => null).constructor
// function callAsyncFunction<U extends {} = {}, V = unknown> (
// args: U,
// source: string
// ): Promise<V> {
// const fn = new AsyncFunction(...Object.keys(args), source)
// return fn(...Object.values(args))
// }
export class Io {
private readonly id : string
private readonly protocol : string
private eventBuffer : IoEvent[] = []
private ws : undefined | WebSocket
private readonly state = new StateSwitch('Io', { log })
private reconnectTimer? : NodeJS.Timer
private reconnectTimeout? : number
private lifeTimer? : NodeJS.Timer
private onMessage: undefined | AnyFunction
private scanPayload?: EventScanPayload
protected jsonRpc?: Peer
constructor (
private options: IoOptions,
) {
options.apihost = options.apihost || config.apihost
options.protocol = options.protocol || config.default.DEFAULT_PROTOCOL
this.id = options.wechaty.id
this.protocol = options.protocol + '|' + options.wechaty.id + '|' + config.serviceIp + '|' + options.servicePort
log.verbose('Io', 'instantiated with apihost[%s], token[%s], protocol[%s], cuid[%s]',
options.apihost,
options.token,
options.protocol,
this.id,
)
if (options.servicePort) {
this.jsonRpc = getPeer({
serviceGrpcPort: this.options.servicePort!,
})
}
}
public toString () {
return `Io<${this.options.token}>`
}
private connected () {
return this.ws && this.ws.readyState === WebSocket.OPEN
}
public async start (): Promise<void> {
log.verbose('Io', 'start()')
if (this.lifeTimer) {
throw new Error('lifeTimer exist')
}
this.state.on('pending')
try {
this.initEventHook()
this.ws = await this.initWebSocket()
this.options.wechaty.on('login', () => { this.scanPayload = undefined })
this.options.wechaty.on('scan', (qrcode, status) => {
this.scanPayload = {
...this.scanPayload,
qrcode,
status,
}
})
this.lifeTimer = setInterval(() => {
if (this.ws && this.connected()) {
log.silly('Io', 'start() setInterval() ws.ping()')
// TODO: check 'pong' event on ws
this.ws.ping()
}
}, 1000 * 10)
this.state.on(true)
} catch (e) {
log.warn('Io', 'start() exception: %s', e.message)
this.state.off(true)
throw e
}
}
private initEventHook () {
log.verbose('Io', 'initEventHook()')
const wechaty = this.options.wechaty
wechaty.on('error', error => this.send({ name: 'error', payload: error }))
wechaty.on('heartbeat', data => this.send({ name: 'heartbeat', payload: { cuid: this.id, data } }))
wechaty.on('login', user => this.send({ name: 'login', payload: user.payload }))
wechaty.on('logout', user => this.send({ name: 'logout', payload: user.payload }))
wechaty.on('message', message => this.ioMessage(message))
// FIXME: payload schema need to be defined universal
// wechaty.on('scan', (url, code) => this.send({ name: 'scan', payload: { url, code } }))
wechaty.on('scan', (qrcode, status) => this.send({ name: 'scan', payload: { qrcode, status } } as IoEventScan))
}
private async initWebSocket (): Promise<WebSocket> {
log.verbose('Io', 'initWebSocket()')
// this.state.current('on', false)
// const auth = 'Basic ' + new Buffer(this.setting.token + ':X').toString('base64')
const auth = 'Token ' + this.options.token
const headers = { Authorization: auth }
if (!this.options.apihost) {
throw new Error('no apihost')
}
let endpoint = 'wss://' + this.options.apihost + '/v0/websocket'
// XXX quick and dirty: use no ssl for API_HOST other than official
// FIXME: use a configurable VARIABLE for the domain name at here:
if (!/api\.chatie\.io/.test(this.options.apihost)) {
endpoint = 'ws://' + this.options.apihost + '/v0/websocket'
}
const ws = this.ws = new WebSocket(endpoint, this.protocol, { headers })
ws.on('open', () => this.wsOnOpen(ws))
ws.on('message', data => this.wsOnMessage(data))
ws.on('error', e => this.wsOnError(e))
ws.on('close', (code, reason) => this.wsOnClose(ws, code, reason))
await new Promise((resolve, reject) => {
ws.once('open', resolve)
ws.once('error', reject)
ws.once('close', reject)
})
return ws
}
private async wsOnOpen (ws: WebSocket): Promise<void> {
if (this.protocol !== ws.protocol) {
log.error('Io', 'initWebSocket() require protocol[%s] failed', this.protocol)
// XXX deal with error?
}
log.verbose('Io', 'initWebSocket() connected with protocol [%s]', ws.protocol)
// this.currentState('connected')
// this.state.current('on')
// FIXME: how to keep alive???
// ws._socket.setKeepAlive(true, 100)
this.reconnectTimeout = undefined
const name = 'sys'
const payload = 'Wechaty version ' + this.options.wechaty.version() + ` with CUID: ${this.id}`
const initEvent: IoEvent = {
name,
payload,
}
await this.send(initEvent)
}
private async wsOnMe | : WebSocket.Data) {
log.silly('Io', 'initWebSocket() ws.on(message): %s', data)
// flags.binary will be set if a binary data is received.
// flags.masked will be set if the data was masked.
if (typeof data !== 'string') {
throw new Error('data should be string...')
}
const ioEvent: IoEvent = {
name : 'raw',
payload : data,
}
try {
const obj = JSON.parse(data)
ioEvent.name = obj.name
ioEvent.payload = obj.payload
} catch (e) {
log.verbose('Io', 'on(message) recv a non IoEvent data[%s]', data)
}
switch (ioEvent.name) {
case 'botie':
{
const payload = ioEvent.payload
const args = payload.args
const source = payload.source
try {
if (args[0] === 'message' && args.length === 1) {
const fn = new AsyncFunction(...args, source)
this.onMessage = fn
} else {
log.warn('Io', 'server pushed function is invalid. args: %s', JSON.stringify(args))
}
} catch (e) {
log.warn('Io', 'server pushed function exception: %s', e)
this.options.wechaty.emit('error', e)
}
}
break
case 'reset':
log.verbose('Io', 'on(reset): %s', ioEvent.payload)
await this.options.wechaty.reset(ioEvent.payload)
break
case 'shutdown':
log.info('Io', 'on(shutdown): %s', ioEvent.payload)
process.exit(0)
// eslint-disable-next-line
break
case 'update':
log.verbose('Io', 'on(update): %s', ioEvent.payload)
{
const wechaty = this.options.wechaty
if (wechaty.logonoff()) {
const loginEvent: IoEvent = {
name : 'login',
payload : (wechaty.userSelf() as any).payload,
}
await this.send(loginEvent)
}
if (this.scanPayload) {
const scanEvent: IoEventScan = {
name: 'scan',
payload: this.scanPayload,
}
await this.send(scanEvent)
}
}
break
case 'sys':
// do nothing
break
case 'logout':
log.info('Io', 'on(logout): %s', ioEvent.payload)
await this.options.wechaty.logout()
break
case 'jsonrpc':
log.info('Io', 'on(jsonrpc): %s', ioEvent.payload)
try {
const request = (ioEvent as IoEventJsonRpc).payload
if (!isJsonRpcRequest(request)) {
log.warn('Io', 'on(jsonrpc) payload is not a jsonrpc request: %s', JSON.stringify(request))
return
}
if (!this.jsonRpc) {
throw new Error('jsonRpc not initialized!')
}
const response = await this.jsonRpc.exec(request)
if (!response) {
log.warn('Io', 'on(jsonrpc) response is undefined.')
return
}
const payload = parse(response) as JsonRpcPayloadResponse
const jsonrpcEvent: IoEventJsonRpc = {
name: 'jsonrpc',
payload,
}
log.verbose('Io', 'on(jsonrpc) send(%s)', response)
await this.send(jsonrpcEvent)
} catch (e) {
log.error('Io', 'on(jsonrpc): %s', e)
}
break
default:
log.warn('Io', 'UNKNOWN on(%s): %s', ioEvent.name, ioEvent.payload)
break
}
}
// FIXME: it seems the parameter `e` might be `undefined`.
// @types/ws might has bug for `ws.on('error', e => this.wsOnError(e))`
private wsOnError (e?: Error) {
log.warn('Io', 'initWebSocket() error event[%s]', e && e.message)
if (!e) {
return
}
this.options.wechaty.emit('error', e)
// when `error`, there must have already a `close` event
// we should not call this.reconnect() again
//
// this.close()
// this.reconnect()
}
private wsOnClose (
ws : WebSocket,
code : number,
message : string,
): void {
if (this.state.on()) {
log.warn('Io', 'initWebSocket() close event[%d: %s]', code, message)
ws.close()
this.reconnect()
}
}
private reconnect () {
log.verbose('Io', 'reconnect()')
if (this.state.off()) {
log.warn('Io', 'reconnect() canceled because state.target() === offline')
return
}
if (this.connected()) {
log.warn('Io', 'reconnect() on a already connected io')
return
}
if (this.reconnectTimer) {
log.warn('Io', 'reconnect() on a already re-connecting io')
return
}
if (!this.reconnectTimeout) {
this.reconnectTimeout = 1
} else if (this.reconnectTimeout < 10 * 1000) {
this.reconnectTimeout *= 3
}
log.warn('Io', 'reconnect() will reconnect after %d s', Math.floor(this.reconnectTimeout / 1000))
this.reconnectTimer = setTimeout(async () => {
this.reconnectTimer = undefined
await this.initWebSocket()
}, this.reconnectTimeout)// as any as NodeJS.Timer
}
private async send (ioEvent?: IoEvent): Promise<void> {
if (!this.ws) {
throw new Error('no ws')
}
const ws = this.ws
if (ioEvent) {
log.silly('Io', 'send(%s)', JSON.stringify(ioEvent))
this.eventBuffer.push(ioEvent)
} else { log.silly('Io', 'send()') }
if (!this.connected()) {
log.verbose('Io', 'send() without a connected websocket, eventBuffer.length = %d', this.eventBuffer.length)
return
}
const list: Array<Promise<any>> = []
while (this.eventBuffer.length) {
const data = JSON.stringify(
this.eventBuffer.shift(),
)
const p = new Promise<void>((resolve, reject) => ws.send(
data,
(err: undefined | Error) => {
if (err) {
reject(err)
} else {
resolve()
}
},
))
list.push(p)
}
try {
await Promise.all(list)
} catch (e) {
log.error('Io', 'send() exception: %s', e.stack)
throw e
}
}
public async stop (): Promise<void> {
log.verbose('Io', 'stop()')
if (!this.ws) {
throw new Error('no ws')
}
this.state.off('pending')
// try to send IoEvents in buffer
await this.send()
this.eventBuffer = []
if (this.reconnectTimer) {
clearTimeout(this.reconnectTimer)
this.reconnectTimer = undefined
}
if (this.lifeTimer) {
clearInterval(this.lifeTimer)
this.lifeTimer = undefined
}
this.ws.close()
await new Promise<void>(resolve => {
if (this.ws) {
this.ws.once('close', resolve)
} else {
resolve()
}
})
this.ws = undefined
this.state.off(true)
}
/**
*
* Prepare to be overwritten by server setting
*
*/
private async ioMessage (m: Message): Promise<void> {
log.silly('Io', 'ioMessage() is a nop function before be overwritten from cloud')
if (typeof this.onMessage === 'function') {
await this.onMessage(m)
}
}
protected async syncMessage (m: Message): Promise<void> {
log.silly('Io', 'syncMessage(%s)', m)
const messageEvent: IoEvent = {
name : 'message',
payload : (m as any).payload,
}
await this.send(messageEvent)
}
}
| ssage (data | identifier_name |
io.ts | /**
* Wechaty Chatbot SDK - https://github.com/wechaty/wechaty
*
* @copyright 2016 Huan LI (李卓桓) <https://github.com/huan>, and
* Wechaty Contributors <https://github.com/wechaty>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import { StateSwitch } from 'state-switch'
import WebSocket from 'ws'
import {
Message,
} from './user/mod'
import {
EventScanPayload,
} from 'wechaty-puppet'
import Peer, {
JsonRpcPayload,
JsonRpcPayloadResponse,
parse,
} from 'json-rpc-peer'
import {
config,
log,
} from './config'
import {
AnyFunction,
} from './types'
import {
Wechaty,
} from './wechaty'
import {
getPeer,
isJsonRpcRequest,
} from './io-peer/io-peer'
export interface IoOptions {
wechaty : Wechaty,
token : string,
apihost? : string, | protocol? : string,
servicePort? : number,
}
export const IO_EVENT_DICT = {
botie : 'tbw',
error : 'tbw',
heartbeat : 'tbw',
jsonrpc : 'JSON RPC',
login : 'tbw',
logout : 'tbw',
message : 'tbw',
raw : 'tbw',
reset : 'tbw',
scan : 'tbw',
shutdown : 'tbw',
sys : 'tbw',
update : 'tbw',
}
type IoEventName = keyof typeof IO_EVENT_DICT
interface IoEventScan {
name : 'scan',
payload : EventScanPayload,
}
interface IoEventJsonRpc {
name: 'jsonrpc',
payload: JsonRpcPayload,
}
interface IoEventAny {
name: IoEventName,
payload: any,
}
type IoEvent = IoEventScan | IoEventJsonRpc | IoEventAny
/**
* https://github.com/Chatie/botie/issues/2
* https://github.com/actions/github-script/blob/f035cea4677903b153fa754aa8c2bba66f8dc3eb/src/async-function.ts#L6
*/
const AsyncFunction = Object.getPrototypeOf(async () => null).constructor
// function callAsyncFunction<U extends {} = {}, V = unknown> (
// args: U,
// source: string
// ): Promise<V> {
// const fn = new AsyncFunction(...Object.keys(args), source)
// return fn(...Object.values(args))
// }
export class Io {
private readonly id : string
private readonly protocol : string
private eventBuffer : IoEvent[] = []
private ws : undefined | WebSocket
private readonly state = new StateSwitch('Io', { log })
private reconnectTimer? : NodeJS.Timer
private reconnectTimeout? : number
private lifeTimer? : NodeJS.Timer
private onMessage: undefined | AnyFunction
private scanPayload?: EventScanPayload
protected jsonRpc?: Peer
constructor (
private options: IoOptions,
) {
options.apihost = options.apihost || config.apihost
options.protocol = options.protocol || config.default.DEFAULT_PROTOCOL
this.id = options.wechaty.id
this.protocol = options.protocol + '|' + options.wechaty.id + '|' + config.serviceIp + '|' + options.servicePort
log.verbose('Io', 'instantiated with apihost[%s], token[%s], protocol[%s], cuid[%s]',
options.apihost,
options.token,
options.protocol,
this.id,
)
if (options.servicePort) {
this.jsonRpc = getPeer({
serviceGrpcPort: this.options.servicePort!,
})
}
}
public toString () {
return `Io<${this.options.token}>`
}
private connected () {
return this.ws && this.ws.readyState === WebSocket.OPEN
}
public async start (): Promise<void> {
log.verbose('Io', 'start()')
if (this.lifeTimer) {
throw new Error('lifeTimer exist')
}
this.state.on('pending')
try {
this.initEventHook()
this.ws = await this.initWebSocket()
this.options.wechaty.on('login', () => { this.scanPayload = undefined })
this.options.wechaty.on('scan', (qrcode, status) => {
this.scanPayload = {
...this.scanPayload,
qrcode,
status,
}
})
this.lifeTimer = setInterval(() => {
if (this.ws && this.connected()) {
log.silly('Io', 'start() setInterval() ws.ping()')
// TODO: check 'pong' event on ws
this.ws.ping()
}
}, 1000 * 10)
this.state.on(true)
} catch (e) {
log.warn('Io', 'start() exception: %s', e.message)
this.state.off(true)
throw e
}
}
private initEventHook () {
log.verbose('Io', 'initEventHook()')
const wechaty = this.options.wechaty
wechaty.on('error', error => this.send({ name: 'error', payload: error }))
wechaty.on('heartbeat', data => this.send({ name: 'heartbeat', payload: { cuid: this.id, data } }))
wechaty.on('login', user => this.send({ name: 'login', payload: user.payload }))
wechaty.on('logout', user => this.send({ name: 'logout', payload: user.payload }))
wechaty.on('message', message => this.ioMessage(message))
// FIXME: payload schema need to be defined universal
// wechaty.on('scan', (url, code) => this.send({ name: 'scan', payload: { url, code } }))
wechaty.on('scan', (qrcode, status) => this.send({ name: 'scan', payload: { qrcode, status } } as IoEventScan))
}
private async initWebSocket (): Promise<WebSocket> {
log.verbose('Io', 'initWebSocket()')
// this.state.current('on', false)
// const auth = 'Basic ' + new Buffer(this.setting.token + ':X').toString('base64')
const auth = 'Token ' + this.options.token
const headers = { Authorization: auth }
if (!this.options.apihost) {
throw new Error('no apihost')
}
let endpoint = 'wss://' + this.options.apihost + '/v0/websocket'
// XXX quick and dirty: use no ssl for API_HOST other than official
// FIXME: use a configurable VARIABLE for the domain name at here:
if (!/api\.chatie\.io/.test(this.options.apihost)) {
endpoint = 'ws://' + this.options.apihost + '/v0/websocket'
}
const ws = this.ws = new WebSocket(endpoint, this.protocol, { headers })
ws.on('open', () => this.wsOnOpen(ws))
ws.on('message', data => this.wsOnMessage(data))
ws.on('error', e => this.wsOnError(e))
ws.on('close', (code, reason) => this.wsOnClose(ws, code, reason))
await new Promise((resolve, reject) => {
ws.once('open', resolve)
ws.once('error', reject)
ws.once('close', reject)
})
return ws
}
private async wsOnOpen (ws: WebSocket): Promise<void> {
if (this.protocol !== ws.protocol) {
log.error('Io', 'initWebSocket() require protocol[%s] failed', this.protocol)
// XXX deal with error?
}
log.verbose('Io', 'initWebSocket() connected with protocol [%s]', ws.protocol)
// this.currentState('connected')
// this.state.current('on')
// FIXME: how to keep alive???
// ws._socket.setKeepAlive(true, 100)
this.reconnectTimeout = undefined
const name = 'sys'
const payload = 'Wechaty version ' + this.options.wechaty.version() + ` with CUID: ${this.id}`
const initEvent: IoEvent = {
name,
payload,
}
await this.send(initEvent)
}
private async wsOnMessage (data: WebSocket.Data) {
log.silly('Io', 'initWebSocket() ws.on(message): %s', data)
// flags.binary will be set if a binary data is received.
// flags.masked will be set if the data was masked.
if (typeof data !== 'string') {
throw new Error('data should be string...')
}
const ioEvent: IoEvent = {
name : 'raw',
payload : data,
}
try {
const obj = JSON.parse(data)
ioEvent.name = obj.name
ioEvent.payload = obj.payload
} catch (e) {
log.verbose('Io', 'on(message) recv a non IoEvent data[%s]', data)
}
switch (ioEvent.name) {
case 'botie':
{
const payload = ioEvent.payload
const args = payload.args
const source = payload.source
try {
if (args[0] === 'message' && args.length === 1) {
const fn = new AsyncFunction(...args, source)
this.onMessage = fn
} else {
log.warn('Io', 'server pushed function is invalid. args: %s', JSON.stringify(args))
}
} catch (e) {
log.warn('Io', 'server pushed function exception: %s', e)
this.options.wechaty.emit('error', e)
}
}
break
case 'reset':
log.verbose('Io', 'on(reset): %s', ioEvent.payload)
await this.options.wechaty.reset(ioEvent.payload)
break
case 'shutdown':
log.info('Io', 'on(shutdown): %s', ioEvent.payload)
process.exit(0)
// eslint-disable-next-line
break
case 'update':
log.verbose('Io', 'on(update): %s', ioEvent.payload)
{
const wechaty = this.options.wechaty
if (wechaty.logonoff()) {
const loginEvent: IoEvent = {
name : 'login',
payload : (wechaty.userSelf() as any).payload,
}
await this.send(loginEvent)
}
if (this.scanPayload) {
const scanEvent: IoEventScan = {
name: 'scan',
payload: this.scanPayload,
}
await this.send(scanEvent)
}
}
break
case 'sys':
// do nothing
break
case 'logout':
log.info('Io', 'on(logout): %s', ioEvent.payload)
await this.options.wechaty.logout()
break
case 'jsonrpc':
log.info('Io', 'on(jsonrpc): %s', ioEvent.payload)
try {
const request = (ioEvent as IoEventJsonRpc).payload
if (!isJsonRpcRequest(request)) {
log.warn('Io', 'on(jsonrpc) payload is not a jsonrpc request: %s', JSON.stringify(request))
return
}
if (!this.jsonRpc) {
throw new Error('jsonRpc not initialized!')
}
const response = await this.jsonRpc.exec(request)
if (!response) {
log.warn('Io', 'on(jsonrpc) response is undefined.')
return
}
const payload = parse(response) as JsonRpcPayloadResponse
const jsonrpcEvent: IoEventJsonRpc = {
name: 'jsonrpc',
payload,
}
log.verbose('Io', 'on(jsonrpc) send(%s)', response)
await this.send(jsonrpcEvent)
} catch (e) {
log.error('Io', 'on(jsonrpc): %s', e)
}
break
default:
log.warn('Io', 'UNKNOWN on(%s): %s', ioEvent.name, ioEvent.payload)
break
}
}
// FIXME: it seems the parameter `e` might be `undefined`.
// @types/ws might has bug for `ws.on('error', e => this.wsOnError(e))`
private wsOnError (e?: Error) {
log.warn('Io', 'initWebSocket() error event[%s]', e && e.message)
if (!e) {
return
}
this.options.wechaty.emit('error', e)
// when `error`, there must have already a `close` event
// we should not call this.reconnect() again
//
// this.close()
// this.reconnect()
}
private wsOnClose (
ws : WebSocket,
code : number,
message : string,
): void {
if (this.state.on()) {
log.warn('Io', 'initWebSocket() close event[%d: %s]', code, message)
ws.close()
this.reconnect()
}
}
private reconnect () {
log.verbose('Io', 'reconnect()')
if (this.state.off()) {
log.warn('Io', 'reconnect() canceled because state.target() === offline')
return
}
if (this.connected()) {
log.warn('Io', 'reconnect() on a already connected io')
return
}
if (this.reconnectTimer) {
log.warn('Io', 'reconnect() on a already re-connecting io')
return
}
if (!this.reconnectTimeout) {
this.reconnectTimeout = 1
} else if (this.reconnectTimeout < 10 * 1000) {
this.reconnectTimeout *= 3
}
log.warn('Io', 'reconnect() will reconnect after %d s', Math.floor(this.reconnectTimeout / 1000))
this.reconnectTimer = setTimeout(async () => {
this.reconnectTimer = undefined
await this.initWebSocket()
}, this.reconnectTimeout)// as any as NodeJS.Timer
}
private async send (ioEvent?: IoEvent): Promise<void> {
if (!this.ws) {
throw new Error('no ws')
}
const ws = this.ws
if (ioEvent) {
log.silly('Io', 'send(%s)', JSON.stringify(ioEvent))
this.eventBuffer.push(ioEvent)
} else { log.silly('Io', 'send()') }
if (!this.connected()) {
log.verbose('Io', 'send() without a connected websocket, eventBuffer.length = %d', this.eventBuffer.length)
return
}
const list: Array<Promise<any>> = []
while (this.eventBuffer.length) {
const data = JSON.stringify(
this.eventBuffer.shift(),
)
const p = new Promise<void>((resolve, reject) => ws.send(
data,
(err: undefined | Error) => {
if (err) {
reject(err)
} else {
resolve()
}
},
))
list.push(p)
}
try {
await Promise.all(list)
} catch (e) {
log.error('Io', 'send() exception: %s', e.stack)
throw e
}
}
public async stop (): Promise<void> {
log.verbose('Io', 'stop()')
if (!this.ws) {
throw new Error('no ws')
}
this.state.off('pending')
// try to send IoEvents in buffer
await this.send()
this.eventBuffer = []
if (this.reconnectTimer) {
clearTimeout(this.reconnectTimer)
this.reconnectTimer = undefined
}
if (this.lifeTimer) {
clearInterval(this.lifeTimer)
this.lifeTimer = undefined
}
this.ws.close()
await new Promise<void>(resolve => {
if (this.ws) {
this.ws.once('close', resolve)
} else {
resolve()
}
})
this.ws = undefined
this.state.off(true)
}
/**
*
* Prepare to be overwritten by server setting
*
*/
private async ioMessage (m: Message): Promise<void> {
log.silly('Io', 'ioMessage() is a nop function before be overwritten from cloud')
if (typeof this.onMessage === 'function') {
await this.onMessage(m)
}
}
protected async syncMessage (m: Message): Promise<void> {
log.silly('Io', 'syncMessage(%s)', m)
const messageEvent: IoEvent = {
name : 'message',
payload : (m as any).payload,
}
await this.send(messageEvent)
}
} | random_line_split | |
formant.py | ###################################
## SPADE formant analysis script ##
###################################
## Processes and extracts 'static' (single point) formant values, along with linguistic
## and acoustic information from corpora collected as part of the SPeech Across Dialects
## of English (SPADE) project.
## Input:
## - corpus name (e.g., Buckeye, SOTC)
## - corpus metadata (stored in a YAML file)
## this file should specify the path to the
## audio, transcripts, metadata files (e.g.,
## speaker, lexicon), and the a datafile containing
## prototype formant values to be used for formant
## estimation
## Output:
## - CSV of single-point vowel measurements (1 row per token),
## with columns for the linguistic, acoustic, and speaker information
## associated with that token
import sys
import os
import argparse
base_dir = os.path.dirname(os.path.abspath(__file__))
script_dir = os.path.join(base_dir, 'Common')
sys.path.insert(0, script_dir)
drop_formant = True
import common
from polyglotdb.utils import ensure_local_database_running
from polyglotdb import CorpusConfig
## Define and process command line arguments
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('corpus_name', help='Name of the corpus')
parser.add_argument('-r', '--reset', help="Reset the corpus", action='store_true')
parser.add_argument('-f', '--formant_reset', help="Reset formant measures", action = 'store_true', default=False)
parser.add_argument('-d', '--docker', help="This script is being called from Docker", action='store_true')
args = parser.parse_args()
corpus_name = args.corpus_name
reset = args.reset
docker = args.docker
reset_formants = args.formant_reset
directories = [x for x in os.listdir(base_dir) if os.path.isdir(x) and x != 'Common'] | 'The corpus {0} does not have a directory (available: {1}). Please make it with a {0}.yaml file inside.'.format(
args.corpus_name, ', '.join(directories)))
sys.exit(1)
corpus_conf = common.load_config(corpus_name)
print('Processing...')
## apply corpus reset or docker application
## if flags are used
if reset:
common.reset(corpus_name)
ip = common.server_ip
if docker:
ip = common.docker_ip
with ensure_local_database_running(corpus_name, port=common.server_port, ip=ip, token=common.load_token()) as params:
print(params)
config = CorpusConfig(corpus_name, **params)
config.formant_source = 'praat'
## Common set up: see commony.py for details of these functions ##
## Check if the corpus already has an associated graph object; if not,
## perform importing and parsing of the corpus files
common.loading(config, corpus_conf['corpus_directory'], corpus_conf['input_format'])
## Perform linguistic, speaker, and acoustic enrichment
common.lexicon_enrichment(config, corpus_conf['unisyn_spade_directory'], corpus_conf['dialect_code'])
common.speaker_enrichment(config, corpus_conf['speaker_enrichment_file'])
common.basic_enrichment(config, corpus_conf['vowel_inventory'] + corpus_conf['extra_syllabic_segments'], corpus_conf['pauses'])
## Check if the YAML specifies the path to the YAML file
## if not, load the prototypes file from the default location
## (within the SPADE corpus directory)
vowel_prototypes_path = corpus_conf.get('vowel_prototypes_path','')
if not vowel_prototypes_path:
vowel_prototypes_path = os.path.join(base_dir, corpus_name, '{}_prototypes.csv'.format(corpus_name))
## Determine the class of phone labels to be used for formant analysis
## based on lists provided in the YAML file.
if corpus_conf['stressed_vowels']:
vowels_to_analyze = corpus_conf['stressed_vowels']
else:
vowels_to_analyze = corpus_conf['vowel_inventory']
## Perform formant estimation and analysis
## see common.py for the details of this implementation
common.formant_acoustic_analysis(config, vowels_to_analyze, vowel_prototypes_path, drop_formant=drop_formant, reset_formants=reset_formants)
## Output the query (determined in common.py) as a CSV file
common.formant_export(config, corpus_name, corpus_conf['dialect_code'],
corpus_conf['speakers'], vowels_to_analyze, output_tracks=False)
print('Finishing up!') |
## sanity-check the corpus name (i.e., that it directs to a YAML file)
if args.corpus_name not in directories:
print( | random_line_split |
formant.py | ###################################
## SPADE formant analysis script ##
###################################
## Processes and extracts 'static' (single point) formant values, along with linguistic
## and acoustic information from corpora collected as part of the SPeech Across Dialects
## of English (SPADE) project.
## Input:
## - corpus name (e.g., Buckeye, SOTC)
## - corpus metadata (stored in a YAML file)
## this file should specify the path to the
## audio, transcripts, metadata files (e.g.,
## speaker, lexicon), and the a datafile containing
## prototype formant values to be used for formant
## estimation
## Output:
## - CSV of single-point vowel measurements (1 row per token),
## with columns for the linguistic, acoustic, and speaker information
## associated with that token
import sys
import os
import argparse
base_dir = os.path.dirname(os.path.abspath(__file__))
script_dir = os.path.join(base_dir, 'Common')
sys.path.insert(0, script_dir)
drop_formant = True
import common
from polyglotdb.utils import ensure_local_database_running
from polyglotdb import CorpusConfig
## Define and process command line arguments
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('corpus_name', help='Name of the corpus')
parser.add_argument('-r', '--reset', help="Reset the corpus", action='store_true')
parser.add_argument('-f', '--formant_reset', help="Reset formant measures", action = 'store_true', default=False)
parser.add_argument('-d', '--docker', help="This script is being called from Docker", action='store_true')
args = parser.parse_args()
corpus_name = args.corpus_name
reset = args.reset
docker = args.docker
reset_formants = args.formant_reset
directories = [x for x in os.listdir(base_dir) if os.path.isdir(x) and x != 'Common']
## sanity-check the corpus name (i.e., that it directs to a YAML file)
if args.corpus_name not in directories:
print(
'The corpus {0} does not have a directory (available: {1}). Please make it with a {0}.yaml file inside.'.format(
args.corpus_name, ', '.join(directories)))
sys.exit(1)
corpus_conf = common.load_config(corpus_name)
print('Processing...')
## apply corpus reset or docker application
## if flags are used
if reset:
common.reset(corpus_name)
ip = common.server_ip
if docker:
ip = common.docker_ip
with ensure_local_database_running(corpus_name, port=common.server_port, ip=ip, token=common.load_token()) as params:
print(params)
config = CorpusConfig(corpus_name, **params)
config.formant_source = 'praat'
## Common set up: see commony.py for details of these functions ##
## Check if the corpus already has an associated graph object; if not,
## perform importing and parsing of the corpus files
common.loading(config, corpus_conf['corpus_directory'], corpus_conf['input_format'])
## Perform linguistic, speaker, and acoustic enrichment
common.lexicon_enrichment(config, corpus_conf['unisyn_spade_directory'], corpus_conf['dialect_code'])
common.speaker_enrichment(config, corpus_conf['speaker_enrichment_file'])
common.basic_enrichment(config, corpus_conf['vowel_inventory'] + corpus_conf['extra_syllabic_segments'], corpus_conf['pauses'])
## Check if the YAML specifies the path to the YAML file
## if not, load the prototypes file from the default location
## (within the SPADE corpus directory)
vowel_prototypes_path = corpus_conf.get('vowel_prototypes_path','')
if not vowel_prototypes_path:
|
## Determine the class of phone labels to be used for formant analysis
## based on lists provided in the YAML file.
if corpus_conf['stressed_vowels']:
vowels_to_analyze = corpus_conf['stressed_vowels']
else:
vowels_to_analyze = corpus_conf['vowel_inventory']
## Perform formant estimation and analysis
## see common.py for the details of this implementation
common.formant_acoustic_analysis(config, vowels_to_analyze, vowel_prototypes_path, drop_formant=drop_formant, reset_formants=reset_formants)
## Output the query (determined in common.py) as a CSV file
common.formant_export(config, corpus_name, corpus_conf['dialect_code'],
corpus_conf['speakers'], vowels_to_analyze, output_tracks=False)
print('Finishing up!')
| vowel_prototypes_path = os.path.join(base_dir, corpus_name, '{}_prototypes.csv'.format(corpus_name)) | conditional_block |
config_validation.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
# Note: this module is tested by a unit test config_validation_test.py,
# rather than recipe simulation tests.
_BISECT_CONFIG_SCHEMA = {
'command': {'type': 'string', 'required': True},
'good_revision': {'type': 'revision', 'required': True},
'bad_revision': {'type': 'revision', 'required': True},
'bisect_bot': {'type': 'string'},
'metric': {'type': 'string'},
'bug_id': {'type': 'integer'},
'repeat_count': {'type': 'integer'},
'max_time_minutes': {'type': 'integer'},
'bisect_mode': {'type': 'string',
'choices': ['mean', 'return_code', 'std_dev']},
'gs_bucket': {'type': 'string'},
'builder_host': {'type': 'string'},
'builder_port': {'type': 'integer'},
'test_type': {'type': 'string'},
'improvement_direction': {'type': 'integer'},
'recipe_tester_name': {'type': 'string'},
'try_job_id': {'type': 'integer'},
}
class ValidationFail(Exception):
"""An exception class that represents a failure to validate."""
def validate_bisect_config(config, schema=None):
"""Checks the correctness of the given bisect job config."""
schema = _BISECT_CONFIG_SCHEMA if schema is None else schema
for key in set(schema):
validate_key(config, schema, key)
if 'good_revision' in schema and 'bad_revision' in schema:
_validate_revisions(config.get('good_revision'), config.get('bad_revision'))
if 'bisect_mode' in schema and 'metric' in schema:
_validate_metric(config.get('bisect_mode'), config.get('metric'))
def | (config, schema, key): # pragma: no cover
"""Checks the correctness of the given field in a config."""
if schema[key].get('required') and config.get(key) is None:
raise ValidationFail('Required key "%s" missing.' % key)
if config.get(key) is None:
return # Optional field.
value = config[key]
field_type = schema[key].get('type')
if field_type == 'string':
_validate_string(value, key)
elif field_type == 'integer':
_validate_integer(value, key)
elif field_type == 'revision':
_validate_revision(value, key)
elif field_type == 'boolean':
_validate_boolean(value, key)
if 'choices' in schema[key] and value not in schema[key]['choices']:
_fail(value, key)
def _fail(value, key):
raise ValidationFail('Invalid value %r for "%s".' % (value, key))
def _validate_string(value, key): # pragma: no cover
if not isinstance(value, basestring):
_fail(value, key)
def _validate_revision(value, key): # pragma: no cover
s = str(value)
if not (s.isdigit() or re.match('^[0-9A-Fa-f]{40}$', s)):
_fail(value, key)
def _validate_integer(value, key): # pragma: no cover
try:
int(value)
except ValueError:
_fail(value, key)
def _validate_boolean(value, key): # pragma: no cover
if value not in (True, False):
_fail(value, key)
def _validate_revisions(good_revision, bad_revision): # pragma: no cover
try:
earlier = int(good_revision)
later = int(bad_revision)
except ValueError:
return # The revisions could be sha1 hashes.
if earlier >= later:
raise ValidationFail('Order of good_revision (%d) and bad_revision(%d) '
'is reversed.' % (earlier, later))
def _validate_metric(bisect_mode, metric): # pragma: no cover
if bisect_mode not in ('mean', 'std_dev'):
return
if not (isinstance(metric, basestring) and metric.count('/') == 1):
raise ValidationFail('Invalid value for "metric": %s' % metric)
| validate_key | identifier_name |
config_validation.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
# Note: this module is tested by a unit test config_validation_test.py,
# rather than recipe simulation tests.
_BISECT_CONFIG_SCHEMA = {
'command': {'type': 'string', 'required': True},
'good_revision': {'type': 'revision', 'required': True},
'bad_revision': {'type': 'revision', 'required': True},
'bisect_bot': {'type': 'string'},
'metric': {'type': 'string'},
'bug_id': {'type': 'integer'},
'repeat_count': {'type': 'integer'},
'max_time_minutes': {'type': 'integer'},
'bisect_mode': {'type': 'string',
'choices': ['mean', 'return_code', 'std_dev']},
'gs_bucket': {'type': 'string'},
'builder_host': {'type': 'string'},
'builder_port': {'type': 'integer'},
'test_type': {'type': 'string'},
'improvement_direction': {'type': 'integer'},
'recipe_tester_name': {'type': 'string'},
'try_job_id': {'type': 'integer'},
}
class ValidationFail(Exception):
"""An exception class that represents a failure to validate."""
def validate_bisect_config(config, schema=None):
"""Checks the correctness of the given bisect job config."""
schema = _BISECT_CONFIG_SCHEMA if schema is None else schema
for key in set(schema):
validate_key(config, schema, key)
if 'good_revision' in schema and 'bad_revision' in schema:
_validate_revisions(config.get('good_revision'), config.get('bad_revision'))
if 'bisect_mode' in schema and 'metric' in schema:
_validate_metric(config.get('bisect_mode'), config.get('metric'))
def validate_key(config, schema, key): # pragma: no cover
"""Checks the correctness of the given field in a config."""
if schema[key].get('required') and config.get(key) is None:
raise ValidationFail('Required key "%s" missing.' % key)
if config.get(key) is None:
return # Optional field.
value = config[key]
field_type = schema[key].get('type')
if field_type == 'string':
_validate_string(value, key)
elif field_type == 'integer':
_validate_integer(value, key)
elif field_type == 'revision':
_validate_revision(value, key)
elif field_type == 'boolean':
_validate_boolean(value, key)
if 'choices' in schema[key] and value not in schema[key]['choices']:
_fail(value, key)
def _fail(value, key):
raise ValidationFail('Invalid value %r for "%s".' % (value, key))
def _validate_string(value, key): # pragma: no cover
if not isinstance(value, basestring):
_fail(value, key)
def _validate_revision(value, key): # pragma: no cover
s = str(value)
if not (s.isdigit() or re.match('^[0-9A-Fa-f]{40}$', s)):
_fail(value, key)
def _validate_integer(value, key): # pragma: no cover
try:
int(value)
except ValueError:
_fail(value, key)
def _validate_boolean(value, key): # pragma: no cover
if value not in (True, False):
_fail(value, key)
def _validate_revisions(good_revision, bad_revision): # pragma: no cover
|
def _validate_metric(bisect_mode, metric): # pragma: no cover
if bisect_mode not in ('mean', 'std_dev'):
return
if not (isinstance(metric, basestring) and metric.count('/') == 1):
raise ValidationFail('Invalid value for "metric": %s' % metric)
| try:
earlier = int(good_revision)
later = int(bad_revision)
except ValueError:
return # The revisions could be sha1 hashes.
if earlier >= later:
raise ValidationFail('Order of good_revision (%d) and bad_revision(%d) '
'is reversed.' % (earlier, later)) | identifier_body |
config_validation.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
# Note: this module is tested by a unit test config_validation_test.py,
# rather than recipe simulation tests.
_BISECT_CONFIG_SCHEMA = {
'command': {'type': 'string', 'required': True},
'good_revision': {'type': 'revision', 'required': True},
'bad_revision': {'type': 'revision', 'required': True},
'bisect_bot': {'type': 'string'},
'metric': {'type': 'string'},
'bug_id': {'type': 'integer'},
'repeat_count': {'type': 'integer'},
'max_time_minutes': {'type': 'integer'},
'bisect_mode': {'type': 'string',
'choices': ['mean', 'return_code', 'std_dev']},
'gs_bucket': {'type': 'string'},
'builder_host': {'type': 'string'},
'builder_port': {'type': 'integer'},
'test_type': {'type': 'string'},
'improvement_direction': {'type': 'integer'},
'recipe_tester_name': {'type': 'string'},
'try_job_id': {'type': 'integer'},
}
class ValidationFail(Exception):
"""An exception class that represents a failure to validate."""
def validate_bisect_config(config, schema=None):
"""Checks the correctness of the given bisect job config.""" |
if 'good_revision' in schema and 'bad_revision' in schema:
_validate_revisions(config.get('good_revision'), config.get('bad_revision'))
if 'bisect_mode' in schema and 'metric' in schema:
_validate_metric(config.get('bisect_mode'), config.get('metric'))
def validate_key(config, schema, key): # pragma: no cover
"""Checks the correctness of the given field in a config."""
if schema[key].get('required') and config.get(key) is None:
raise ValidationFail('Required key "%s" missing.' % key)
if config.get(key) is None:
return # Optional field.
value = config[key]
field_type = schema[key].get('type')
if field_type == 'string':
_validate_string(value, key)
elif field_type == 'integer':
_validate_integer(value, key)
elif field_type == 'revision':
_validate_revision(value, key)
elif field_type == 'boolean':
_validate_boolean(value, key)
if 'choices' in schema[key] and value not in schema[key]['choices']:
_fail(value, key)
def _fail(value, key):
raise ValidationFail('Invalid value %r for "%s".' % (value, key))
def _validate_string(value, key): # pragma: no cover
if not isinstance(value, basestring):
_fail(value, key)
def _validate_revision(value, key): # pragma: no cover
s = str(value)
if not (s.isdigit() or re.match('^[0-9A-Fa-f]{40}$', s)):
_fail(value, key)
def _validate_integer(value, key): # pragma: no cover
try:
int(value)
except ValueError:
_fail(value, key)
def _validate_boolean(value, key): # pragma: no cover
if value not in (True, False):
_fail(value, key)
def _validate_revisions(good_revision, bad_revision): # pragma: no cover
try:
earlier = int(good_revision)
later = int(bad_revision)
except ValueError:
return # The revisions could be sha1 hashes.
if earlier >= later:
raise ValidationFail('Order of good_revision (%d) and bad_revision(%d) '
'is reversed.' % (earlier, later))
def _validate_metric(bisect_mode, metric): # pragma: no cover
if bisect_mode not in ('mean', 'std_dev'):
return
if not (isinstance(metric, basestring) and metric.count('/') == 1):
raise ValidationFail('Invalid value for "metric": %s' % metric) | schema = _BISECT_CONFIG_SCHEMA if schema is None else schema
for key in set(schema):
validate_key(config, schema, key) | random_line_split |
config_validation.py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
# Note: this module is tested by a unit test config_validation_test.py,
# rather than recipe simulation tests.
_BISECT_CONFIG_SCHEMA = {
'command': {'type': 'string', 'required': True},
'good_revision': {'type': 'revision', 'required': True},
'bad_revision': {'type': 'revision', 'required': True},
'bisect_bot': {'type': 'string'},
'metric': {'type': 'string'},
'bug_id': {'type': 'integer'},
'repeat_count': {'type': 'integer'},
'max_time_minutes': {'type': 'integer'},
'bisect_mode': {'type': 'string',
'choices': ['mean', 'return_code', 'std_dev']},
'gs_bucket': {'type': 'string'},
'builder_host': {'type': 'string'},
'builder_port': {'type': 'integer'},
'test_type': {'type': 'string'},
'improvement_direction': {'type': 'integer'},
'recipe_tester_name': {'type': 'string'},
'try_job_id': {'type': 'integer'},
}
class ValidationFail(Exception):
"""An exception class that represents a failure to validate."""
def validate_bisect_config(config, schema=None):
"""Checks the correctness of the given bisect job config."""
schema = _BISECT_CONFIG_SCHEMA if schema is None else schema
for key in set(schema):
validate_key(config, schema, key)
if 'good_revision' in schema and 'bad_revision' in schema:
_validate_revisions(config.get('good_revision'), config.get('bad_revision'))
if 'bisect_mode' in schema and 'metric' in schema:
_validate_metric(config.get('bisect_mode'), config.get('metric'))
def validate_key(config, schema, key): # pragma: no cover
"""Checks the correctness of the given field in a config."""
if schema[key].get('required') and config.get(key) is None:
raise ValidationFail('Required key "%s" missing.' % key)
if config.get(key) is None:
return # Optional field.
value = config[key]
field_type = schema[key].get('type')
if field_type == 'string':
_validate_string(value, key)
elif field_type == 'integer':
_validate_integer(value, key)
elif field_type == 'revision':
_validate_revision(value, key)
elif field_type == 'boolean':
_validate_boolean(value, key)
if 'choices' in schema[key] and value not in schema[key]['choices']:
_fail(value, key)
def _fail(value, key):
raise ValidationFail('Invalid value %r for "%s".' % (value, key))
def _validate_string(value, key): # pragma: no cover
if not isinstance(value, basestring):
_fail(value, key)
def _validate_revision(value, key): # pragma: no cover
s = str(value)
if not (s.isdigit() or re.match('^[0-9A-Fa-f]{40}$', s)):
|
def _validate_integer(value, key): # pragma: no cover
try:
int(value)
except ValueError:
_fail(value, key)
def _validate_boolean(value, key): # pragma: no cover
if value not in (True, False):
_fail(value, key)
def _validate_revisions(good_revision, bad_revision): # pragma: no cover
try:
earlier = int(good_revision)
later = int(bad_revision)
except ValueError:
return # The revisions could be sha1 hashes.
if earlier >= later:
raise ValidationFail('Order of good_revision (%d) and bad_revision(%d) '
'is reversed.' % (earlier, later))
def _validate_metric(bisect_mode, metric): # pragma: no cover
if bisect_mode not in ('mean', 'std_dev'):
return
if not (isinstance(metric, basestring) and metric.count('/') == 1):
raise ValidationFail('Invalid value for "metric": %s' % metric)
| _fail(value, key) | conditional_block |
validator.schema.test.ts | import * as fs from 'fs';
import * as mocha from 'mocha'
import {expect} from 'chai'
import {Validator} from '../src/validator/validator'
describe('Validate Schemas', () => {
var validator: Validator; | .then(() => done())
.catch((err: any) => done(err));
})
it('BasicValidation', (done) => {
var fileContent = fs.readFileSync('./test/baseFiles/validParam.json', 'utf8').toString();
let paramSchema: any = JSON.parse(fileContent);
_self.validator.validateSchema('./test/baseFiles/validTemplate.json', paramSchema.parameters)
.then((result: Error[]) => {
if(result) expect(result).to.be.empty;
done();
})
.catch((err: any) => done(err));
});
it('Invalid Json', () => {
var errors = _self.validator.validateJson('./test/baseFiles/invalidJson.json');
expect(errors).to.be.not.undefined;
});
}); | var _self = this;
before((done) => {
_self.validator = new Validator();
_self.validator.Initialize() | random_line_split |
shift.ts | import { Data } from '../models';
export function shift(data: Data): Data | {
const placement = data.placement;
const basePlacement = placement.split(' ')[0];
const shiftVariation = placement.split(' ')[1];
if (shiftVariation) {
const { host, target } = data.offsets;
const isVertical = ['bottom', 'top'].indexOf(basePlacement) !== -1;
const side = isVertical ? 'left' : 'top';
const measurement = isVertical ? 'width' : 'height';
const shiftOffsets = {
start: { [side]: host[side] },
end: {
[side]: (host[side] ?? 0) + host[measurement] - target[measurement]
}
};
data.offsets.target = {
...target, ...{
[side]: (side === shiftVariation ? shiftOffsets.start[side] : shiftOffsets.end[side])
}
};
}
return data;
} | identifier_body | |
shift.ts | import { Data } from '../models';
export function | (data: Data): Data {
const placement = data.placement;
const basePlacement = placement.split(' ')[0];
const shiftVariation = placement.split(' ')[1];
if (shiftVariation) {
const { host, target } = data.offsets;
const isVertical = ['bottom', 'top'].indexOf(basePlacement) !== -1;
const side = isVertical ? 'left' : 'top';
const measurement = isVertical ? 'width' : 'height';
const shiftOffsets = {
start: { [side]: host[side] },
end: {
[side]: (host[side] ?? 0) + host[measurement] - target[measurement]
}
};
data.offsets.target = {
...target, ...{
[side]: (side === shiftVariation ? shiftOffsets.start[side] : shiftOffsets.end[side])
}
};
}
return data;
}
| shift | identifier_name |
shift.ts | import { Data } from '../models';
export function shift(data: Data): Data {
const placement = data.placement;
const basePlacement = placement.split(' ')[0];
const shiftVariation = placement.split(' ')[1];
if (shiftVariation) {
const { host, target } = data.offsets; | const shiftOffsets = {
start: { [side]: host[side] },
end: {
[side]: (host[side] ?? 0) + host[measurement] - target[measurement]
}
};
data.offsets.target = {
...target, ...{
[side]: (side === shiftVariation ? shiftOffsets.start[side] : shiftOffsets.end[side])
}
};
}
return data;
} | const isVertical = ['bottom', 'top'].indexOf(basePlacement) !== -1;
const side = isVertical ? 'left' : 'top';
const measurement = isVertical ? 'width' : 'height';
| random_line_split |
shift.ts | import { Data } from '../models';
export function shift(data: Data): Data {
const placement = data.placement;
const basePlacement = placement.split(' ')[0];
const shiftVariation = placement.split(' ')[1];
if (shiftVariation) |
return data;
}
| {
const { host, target } = data.offsets;
const isVertical = ['bottom', 'top'].indexOf(basePlacement) !== -1;
const side = isVertical ? 'left' : 'top';
const measurement = isVertical ? 'width' : 'height';
const shiftOffsets = {
start: { [side]: host[side] },
end: {
[side]: (host[side] ?? 0) + host[measurement] - target[measurement]
}
};
data.offsets.target = {
...target, ...{
[side]: (side === shiftVariation ? shiftOffsets.start[side] : shiftOffsets.end[side])
}
};
} | conditional_block |
bootstrap-table-en-US.js | (function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jQuery));
}(this, (function ($) { 'use strict';
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var check = function (it) {
return it && it.Math == Math && it;
};
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global_1 =
/* global globalThis -- safe */
check(typeof globalThis == 'object' && globalThis) ||
check(typeof window == 'object' && window) ||
check(typeof self == 'object' && self) ||
check(typeof commonjsGlobal == 'object' && commonjsGlobal) ||
// eslint-disable-next-line no-new-func -- fallback
(function () { return this; })() || Function('return this')();
var fails = function (exec) {
try {
return !!exec();
} catch (error) {
return true;
}
};
// Detect IE8's incomplete defineProperty implementation
var descriptors = !fails(function () {
return Object.defineProperty({}, 1, { get: function () { return 7; } })[1] != 7;
});
var nativePropertyIsEnumerable = {}.propertyIsEnumerable;
var getOwnPropertyDescriptor$1 = Object.getOwnPropertyDescriptor;
// Nashorn ~ JDK8 bug
var NASHORN_BUG = getOwnPropertyDescriptor$1 && !nativePropertyIsEnumerable.call({ 1: 2 }, 1);
// `Object.prototype.propertyIsEnumerable` method implementation
// https://tc39.es/ecma262/#sec-object.prototype.propertyisenumerable
var f$4 = NASHORN_BUG ? function propertyIsEnumerable(V) {
var descriptor = getOwnPropertyDescriptor$1(this, V);
return !!descriptor && descriptor.enumerable;
} : nativePropertyIsEnumerable;
var objectPropertyIsEnumerable = {
f: f$4
};
var createPropertyDescriptor = function (bitmap, value) {
return {
enumerable: !(bitmap & 1),
configurable: !(bitmap & 2),
writable: !(bitmap & 4),
value: value
};
};
var toString = {}.toString;
var classofRaw = function (it) {
return toString.call(it).slice(8, -1);
};
var split = ''.split;
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var indexedObject = fails(function () {
// throws an error in rhino, see https://github.com/mozilla/rhino/issues/346
// eslint-disable-next-line no-prototype-builtins -- safe
return !Object('z').propertyIsEnumerable(0);
}) ? function (it) {
return classofRaw(it) == 'String' ? split.call(it, '') : Object(it);
} : Object;
// `RequireObjectCoercible` abstract operation
// https://tc39.es/ecma262/#sec-requireobjectcoercible
var requireObjectCoercible = function (it) {
if (it == undefined) throw TypeError("Can't call method on " + it);
return it;
};
// toObject with fallback for non-array-like ES3 strings
var toIndexedObject = function (it) {
return indexedObject(requireObjectCoercible(it));
};
var isObject = function (it) {
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
// `ToPrimitive` abstract operation
// https://tc39.es/ecma262/#sec-toprimitive
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
var toPrimitive = function (input, PREFERRED_STRING) {
if (!isObject(input)) return input;
var fn, val;
if (PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
if (typeof (fn = input.valueOf) == 'function' && !isObject(val = fn.call(input))) return val;
if (!PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
throw TypeError("Can't convert object to primitive value");
};
var hasOwnProperty = {}.hasOwnProperty;
var has$1 = function (it, key) {
return hasOwnProperty.call(it, key);
};
var document = global_1.document;
// typeof document.createElement is 'object' in old IE
var EXISTS = isObject(document) && isObject(document.createElement);
var documentCreateElement = function (it) {
return EXISTS ? document.createElement(it) : {};
};
// Thank's IE8 for his funny defineProperty
var ie8DomDefine = !descriptors && !fails(function () {
return Object.defineProperty(documentCreateElement('div'), 'a', {
get: function () { return 7; }
}).a != 7;
});
var nativeGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
// `Object.getOwnPropertyDescriptor` method
// https://tc39.es/ecma262/#sec-object.getownpropertydescriptor
var f$3 = descriptors ? nativeGetOwnPropertyDescriptor : function getOwnPropertyDescriptor(O, P) {
O = toIndexedObject(O);
P = toPrimitive(P, true);
if (ie8DomDefine) try {
return nativeGetOwnPropertyDescriptor(O, P);
} catch (error) { /* empty */ }
if (has$1(O, P)) return createPropertyDescriptor(!objectPropertyIsEnumerable.f.call(O, P), O[P]);
};
var objectGetOwnPropertyDescriptor = {
f: f$3
};
var anObject = function (it) {
if (!isObject(it)) {
throw TypeError(String(it) + ' is not an object');
} return it;
};
var nativeDefineProperty = Object.defineProperty;
// `Object.defineProperty` method
// https://tc39.es/ecma262/#sec-object.defineproperty
var f$2 = descriptors ? nativeDefineProperty : function defineProperty(O, P, Attributes) {
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if (ie8DomDefine) try {
return nativeDefineProperty(O, P, Attributes);
} catch (error) { /* empty */ }
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported');
if ('value' in Attributes) O[P] = Attributes.value;
return O;
};
var objectDefineProperty = {
f: f$2
};
var createNonEnumerableProperty = descriptors ? function (object, key, value) {
return objectDefineProperty.f(object, key, createPropertyDescriptor(1, value));
} : function (object, key, value) {
object[key] = value;
return object;
};
var setGlobal = function (key, value) {
try {
createNonEnumerableProperty(global_1, key, value);
} catch (error) {
global_1[key] = value;
} return value;
};
var SHARED = '__core-js_shared__';
var store$1 = global_1[SHARED] || setGlobal(SHARED, {});
var sharedStore = store$1;
var functionToString = Function.toString;
// this helper broken in `3.4.1-3.4.4`, so we can't use `shared` helper
if (typeof sharedStore.inspectSource != 'function') {
sharedStore.inspectSource = function (it) {
return functionToString.call(it);
};
}
var inspectSource = sharedStore.inspectSource;
var WeakMap$1 = global_1.WeakMap;
var nativeWeakMap = typeof WeakMap$1 === 'function' && /native code/.test(inspectSource(WeakMap$1));
var shared = createCommonjsModule(function (module) {
(module.exports = function (key, value) {
return sharedStore[key] || (sharedStore[key] = value !== undefined ? value : {});
})('versions', []).push({
version: '3.9.1',
mode: 'global',
copyright: '© 2021 Denis Pushkarev (zloirock.ru)'
});
});
var id = 0;
var postfix = Math.random();
var uid = function (key) {
return 'Symbol(' + String(key === undefined ? '' : key) + ')_' + (++id + postfix).toString(36);
};
var keys = shared('keys');
var sharedKey = function (key) {
return keys[key] || (keys[key] = uid(key));
};
var hiddenKeys$1 = {};
var WeakMap = global_1.WeakMap;
var set, get, has;
var enforce = function (it) {
return has(it) ? get(it) : set(it, {});
};
var getterFor = function (TYPE) {
return function (it) {
var state;
if (!isObject(it) || (state = get(it)).type !== TYPE) {
throw TypeError('Incompatible receiver, ' + TYPE + ' required');
} return state;
};
};
if (nativeWeakMap) {
var store = sharedStore.state || (sharedStore.state = new WeakMap());
var wmget = store.get;
var wmhas = store.has;
var wmset = store.set;
set = function (it, metadata) {
metadata.facade = it;
wmset.call(store, it, metadata);
return metadata;
};
get = function (it) {
return wmget.call(store, it) || {};
};
has = function (it) {
return wmhas.call(store, it);
};
} else {
var STATE = sharedKey('state');
hiddenKeys$1[STATE] = true;
set = function (it, metadata) {
metadata.facade = it;
createNonEnumerableProperty(it, STATE, metadata);
return metadata;
};
get = function (it) {
return has$1(it, STATE) ? it[STATE] : {};
};
has = function (it) {
return has$1(it, STATE);
};
}
var internalState = {
set: set,
get: get,
has: has,
enforce: enforce,
getterFor: getterFor
};
var redefine = createCommonjsModule(function (module) {
var getInternalState = internalState.get;
var enforceInternalState = internalState.enforce;
var TEMPLATE = String(String).split('String');
(module.exports = function (O, key, value, options) {
var unsafe = options ? !!options.unsafe : false;
var simple = options ? !!options.enumerable : false;
var noTargetGet = options ? !!options.noTargetGet : false;
var state;
if (typeof value == 'function') {
if (typeof key == 'string' && !has$1(value, 'name')) {
createNonEnumerableProperty(value, 'name', key);
}
state = enforceInternalState(value);
if (!state.source) {
state.source = TEMPLATE.join(typeof key == 'string' ? key : '');
}
}
if (O === global_1) {
if (simple) O[key] = value;
else setGlobal(key, value);
return;
} else if (!unsafe) {
delete O[key];
} else if (!noTargetGet && O[key]) {
simple = true;
}
if (simple) O[key] = value;
else createNonEnumerableProperty(O, key, value);
// add fake Function#toString for correct work wrapped methods / constructors with methods like LoDash isNative
})(Function.prototype, 'toString', function toString() {
return typeof this == 'function' && getInternalState(this).source || inspectSource(this);
});
});
var path = global_1;
var aFunction = function (variable) {
return typeof variable == 'function' ? variable : undefined;
};
var getBuiltIn = function (namespace, method) {
return arguments.length < 2 ? aFunction(path[namespace]) || aFunction(global_1[namespace])
: path[namespace] && path[namespace][method] || global_1[namespace] && global_1[namespace][method];
};
var ceil = Math.ceil;
var floor = Math.floor;
// `ToInteger` abstract operation
// https://tc39.es/ecma262/#sec-tointeger
var toInteger = function (argument) {
return isNaN(argument = +argument) ? 0 : (argument > 0 ? floor : ceil)(argument);
};
var min$1 = Math.min;
// `ToLength` abstract operation
// https://tc39.es/ecma262/#sec-tolength
var toLength = function (argument) {
return argument > 0 ? min$1(toInteger(argument), 0x1FFFFFFFFFFFFF) : 0; // 2 ** 53 - 1 == 9007199254740991
};
var max = Math.max;
var min = Math.min;
// Helper for a popular repeating case of the spec:
// Let integer be ? ToInteger(index).
// If integer < 0, let result be max((length + integer), 0); else let result be min(integer, length).
var toAbsoluteIndex = function (index, length) {
var integer = toInteger(index);
return integer < 0 ? max(integer + length, 0) : min(integer, length);
};
// `Array.prototype.{ indexOf, includes }` methods implementation
var createMethod = function (IS_INCLUDES) {
return function ($this, el, fromIndex) {
var O = toIndexedObject($this);
var length = toLength(O.length);
var index = toAbsoluteIndex(fromIndex, length);
var value;
// Array#includes uses SameValueZero equality algorithm
// eslint-disable-next-line no-self-compare -- NaN check
if (IS_INCLUDES && el != el) while (length > index) {
value = O[index++];
// eslint-disable-next-line no-self-compare -- NaN check
if (value != value) return true;
// Array#indexOf ignores holes, Array#includes - not
} else for (;length > index; index++) {
if ((IS_INCLUDES || index in O) && O[index] === el) return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
var arrayIncludes = {
// `Array.prototype.includes` method
// https://tc39.es/ecma262/#sec-array.prototype.includes
includes: createMethod(true),
// `Array.prototype.indexOf` method
// https://tc39.es/ecma262/#sec-array.prototype.indexof
indexOf: createMethod(false)
};
var indexOf = arrayIncludes.indexOf;
var objectKeysInternal = function (object, names) {
var O = toIndexedObject(object);
var i = 0;
var result = [];
var key;
for (key in O) !has$1(hiddenKeys$1, key) && has$1(O, key) && result.push(key);
// Don't enum bug & hidden keys
while (names.length > i) if (has$1(O, key = names[i++])) {
~indexOf(result, key) || result.push(key);
}
return result;
};
// IE8- don't enum bug keys
var enumBugKeys = [
'constructor',
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'toLocaleString',
'toString',
'valueOf'
];
var hiddenKeys = enumBugKeys.concat('length', 'prototype');
// `Object.getOwnPropertyNames` method
// https://tc39.es/ecma262/#sec-object.getownpropertynames
var f$1 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
return objectKeysInternal(O, hiddenKeys);
};
var objectGetOwnPropertyNames = {
f: f$1
};
var f = Object.getOwnPropertySymbols;
var objectGetOwnPropertySymbols = {
f: f
};
// all object keys, includes non-enumerable and symbols
var ownKeys = getBuiltIn('Reflect', 'ownKeys') || function ownKeys(it) {
var keys = objectGetOwnPropertyNames.f(anObject(it));
var getOwnPropertySymbols = objectGetOwnPropertySymbols.f;
return getOwnPropertySymbols ? keys.concat(getOwnPropertySymbols(it)) : keys;
};
var copyConstructorProperties = function (target, source) {
var keys = ownKeys(source);
var defineProperty = objectDefineProperty.f;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (!has$1(target, key)) defineProperty(target, key, getOwnPropertyDescriptor(source, key));
}
};
var replacement = /#|\.prototype\./;
var isForced = function (feature, detection) {
var value = data[normalize(feature)];
return value == POLYFILL ? true
: value == NATIVE ? false
: typeof detection == 'function' ? fails(detection)
: !!detection;
};
var normalize = isForced.normalize = function (string) {
return String(string).replace(replacement, '.').toLowerCase();
};
var data = isForced.data = {};
var NATIVE = isForced.NATIVE = 'N';
var POLYFILL = isForced.POLYFILL = 'P';
var isForced_1 = isForced;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
/*
options.target - name of the target object
options.global - target is the global object
options.stat - export as static methods of target
options.proto - export as prototype methods of target
options.real - real prototype method for the `pure` version
options.forced - export even if the native feature is available
options.bind - bind methods to the target, required for the `pure` version
options.wrap - wrap constructors to preventing global pollution, required for the `pure` version
options.unsafe - use the simple assignment of property instead of delete + defineProperty
options.sham - add a flag to not completely full polyfills
options.enumerable - export as enumerable property
options.noTargetGet - prevent calling a getter on target
*/
var _export = function (options, source) {
var TARGET = options.target;
var GLOBAL = options.global;
var STATIC = options.stat;
var FORCED, target, key, targetProperty, sourceProperty, descriptor;
if (GLOBAL) {
target = global_1;
} else if (STATIC) {
target = global_1[TARGET] || setGlobal(TARGET, {});
} else {
target = (global_1[TARGET] || {}).prototype;
}
if (target) for (key in source) {
sourceProperty = source[key];
if (options.noTargetGet) {
descriptor = getOwnPropertyDescriptor(target, key);
targetProperty = descriptor && descriptor.value;
} else targetProperty = target[key];
FORCED = isForced_1(GLOBAL ? key : TARGET + (STATIC ? '.' : '#') + key, options.forced);
// contained in target
if (!FORCED && targetProperty !== undefined) {
if (typeof sourceProperty === typeof targetProperty) continue;
copyConstructorProperties(sourceProperty, targetProperty);
}
// add a flag to not completely full polyfills
if (options.sham || (targetProperty && targetProperty.sham)) {
createNonEnumerableProperty(sourceProperty, 'sham', true);
}
// extend global
redefine(target, key, sourceProperty, options);
}
};
// `IsArray` abstract operation
// https://tc39.es/ecma262/#sec-isarray
var isArray = Array.isArray || function isArray(arg) {
return classofRaw(arg) == 'Array';
};
// `ToObject` abstract operation
// https://tc39.es/ecma262/#sec-toobject
var toObject = function (argument) {
return Object(requireObjectCoercible(argument));
};
var createProperty = function (object, key, value) {
var propertyKey = toPrimitive(key);
if (propertyKey in object) objectDefineProperty.f(object, propertyKey, createPropertyDescriptor(0, value));
else object[propertyKey] = value;
};
var engineIsNode = classofRaw(global_1.process) == 'process';
var engineUserAgent = getBuiltIn('navigator', 'userAgent') || '';
var process = global_1.process;
var versions = process && process.versions;
var v8 = versions && versions.v8;
var match, version;
if (v8) {
match = v8.split('.');
version = match[0] + match[1];
} else if (engineUserAgent) {
match = engineUserAgent.match(/Edge\/(\d+)/);
if (!match || match[1] >= 74) {
match = engineUserAgent.match(/Chrome\/(\d+)/);
if (match) version = match[1];
}
}
var engineV8Version = version && +version;
var nativeSymbol = !!Object.getOwnPropertySymbols && !fails(function () {
/* global Symbol -- required for testing */
return !Symbol.sham &&
// Chrome 38 Symbol has incorrect toString conversion
// Chrome 38-40 symbols are not inherited from DOM collections prototypes to instances
(engineIsNode ? engineV8Version === 38 : engineV8Version > 37 && engineV8Version < 41);
});
var useSymbolAsUid = nativeSymbol
/* global Symbol -- safe */
&& !Symbol.sham
&& typeof Symbol.iterator == 'symbol';
var WellKnownSymbolsStore = shared('wks');
var Symbol$1 = global_1.Symbol;
var createWellKnownSymbol = useSymbolAsUid ? Symbol$1 : Symbol$1 && Symbol$1.withoutSetter || uid;
var wellKnownSymbol = function (name) {
if (!has$1(WellKnownSymbolsStore, name) || !(nativeSymbol || typeof WellKnownSymbolsStore[name] == 'string')) {
if (nativeSymbol && has$1(Symbol$1, name)) {
WellKnownSymbolsStore[name] = Symbol$1[name];
} else {
WellKnownSymbolsStore[name] = createWellKnownSymbol('Symbol.' + name);
}
} return WellKnownSymbolsStore[name];
};
var SPECIES$1 = wellKnownSymbol('species');
// `ArraySpeciesCreate` abstract operation
// https://tc39.es/ecma262/#sec-arrayspeciescreate
var arraySpeciesCreate = function (originalArray, length) {
var C;
if (isArray(originalArray)) { | if (C === null) C = undefined;
}
} return new (C === undefined ? Array : C)(length === 0 ? 0 : length);
};
var SPECIES = wellKnownSymbol('species');
var arrayMethodHasSpeciesSupport = function (METHOD_NAME) {
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/677
return engineV8Version >= 51 || !fails(function () {
var array = [];
var constructor = array.constructor = {};
constructor[SPECIES] = function () {
return { foo: 1 };
};
return array[METHOD_NAME](Boolean).foo !== 1;
});
};
var IS_CONCAT_SPREADABLE = wellKnownSymbol('isConcatSpreadable');
var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF;
var MAXIMUM_ALLOWED_INDEX_EXCEEDED = 'Maximum allowed index exceeded';
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/679
var IS_CONCAT_SPREADABLE_SUPPORT = engineV8Version >= 51 || !fails(function () {
var array = [];
array[IS_CONCAT_SPREADABLE] = false;
return array.concat()[0] !== array;
});
var SPECIES_SUPPORT = arrayMethodHasSpeciesSupport('concat');
var isConcatSpreadable = function (O) {
if (!isObject(O)) return false;
var spreadable = O[IS_CONCAT_SPREADABLE];
return spreadable !== undefined ? !!spreadable : isArray(O);
};
var FORCED = !IS_CONCAT_SPREADABLE_SUPPORT || !SPECIES_SUPPORT;
// `Array.prototype.concat` method
// https://tc39.es/ecma262/#sec-array.prototype.concat
// with adding support of @@isConcatSpreadable and @@species
_export({ target: 'Array', proto: true, forced: FORCED }, {
// eslint-disable-next-line no-unused-vars -- required for `.length`
concat: function concat(arg) {
var O = toObject(this);
var A = arraySpeciesCreate(O, 0);
var n = 0;
var i, k, length, len, E;
for (i = -1, length = arguments.length; i < length; i++) {
E = i === -1 ? O : arguments[i];
if (isConcatSpreadable(E)) {
len = toLength(E.length);
if (n + len > MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
for (k = 0; k < len; k++, n++) if (k in E) createProperty(A, n, E[k]);
} else {
if (n >= MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
createProperty(A, n++, E);
}
}
A.length = n;
return A;
}
});
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<wenzhixin2010@gmail.com>
*/
$__default['default'].fn.bootstrapTable.locales['en-US'] = $__default['default'].fn.bootstrapTable.locales['en'] = {
formatCopyRows: function formatCopyRows() {
return 'Copy Rows';
},
formatPrint: function formatPrint() {
return 'Print';
},
formatLoadingMessage: function formatLoadingMessage() {
return 'Loading, please wait';
},
formatRecordsPerPage: function formatRecordsPerPage(pageNumber) {
return "".concat(pageNumber, " rows per page");
},
formatShowingRows: function formatShowingRows(pageFrom, pageTo, totalRows, totalNotFiltered) {
if (totalNotFiltered !== undefined && totalNotFiltered > 0 && totalNotFiltered > totalRows) {
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows (filtered from ").concat(totalNotFiltered, " total rows)");
}
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows");
},
formatSRPaginationPreText: function formatSRPaginationPreText() {
return 'previous page';
},
formatSRPaginationPageText: function formatSRPaginationPageText(page) {
return "to page ".concat(page);
},
formatSRPaginationNextText: function formatSRPaginationNextText() {
return 'next page';
},
formatDetailPagination: function formatDetailPagination(totalRows) {
return "Showing ".concat(totalRows, " rows");
},
formatClearSearch: function formatClearSearch() {
return 'Clear Search';
},
formatSearch: function formatSearch() {
return 'Search';
},
formatNoMatches: function formatNoMatches() {
return 'No matching records found';
},
formatPaginationSwitch: function formatPaginationSwitch() {
return 'Hide/Show pagination';
},
formatPaginationSwitchDown: function formatPaginationSwitchDown() {
return 'Show pagination';
},
formatPaginationSwitchUp: function formatPaginationSwitchUp() {
return 'Hide pagination';
},
formatRefresh: function formatRefresh() {
return 'Refresh';
},
formatToggle: function formatToggle() {
return 'Toggle';
},
formatToggleOn: function formatToggleOn() {
return 'Show card view';
},
formatToggleOff: function formatToggleOff() {
return 'Hide card view';
},
formatColumns: function formatColumns() {
return 'Columns';
},
formatColumnsToggleAll: function formatColumnsToggleAll() {
return 'Toggle all';
},
formatFullscreen: function formatFullscreen() {
return 'Fullscreen';
},
formatAllRows: function formatAllRows() {
return 'All';
},
formatAutoRefresh: function formatAutoRefresh() {
return 'Auto Refresh';
},
formatExport: function formatExport() {
return 'Export data';
},
formatJumpTo: function formatJumpTo() {
return 'GO';
},
formatAdvancedSearch: function formatAdvancedSearch() {
return 'Advanced search';
},
formatAdvancedCloseButton: function formatAdvancedCloseButton() {
return 'Close';
},
formatFilterControlSwitch: function formatFilterControlSwitch() {
return 'Hide/Show controls';
},
formatFilterControlSwitchHide: function formatFilterControlSwitchHide() {
return 'Hide controls';
},
formatFilterControlSwitchShow: function formatFilterControlSwitchShow() {
return 'Show controls';
}
};
$__default['default'].extend($__default['default'].fn.bootstrapTable.defaults, $__default['default'].fn.bootstrapTable.locales['en-US']);
}))); | C = originalArray.constructor;
// cross-realm fallback
if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
else if (isObject(C)) {
C = C[SPECIES$1]; | random_line_split |
bootstrap-table-en-US.js | (function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jQuery));
}(this, (function ($) { 'use strict';
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function | (fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var check = function (it) {
return it && it.Math == Math && it;
};
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global_1 =
/* global globalThis -- safe */
check(typeof globalThis == 'object' && globalThis) ||
check(typeof window == 'object' && window) ||
check(typeof self == 'object' && self) ||
check(typeof commonjsGlobal == 'object' && commonjsGlobal) ||
// eslint-disable-next-line no-new-func -- fallback
(function () { return this; })() || Function('return this')();
var fails = function (exec) {
try {
return !!exec();
} catch (error) {
return true;
}
};
// Detect IE8's incomplete defineProperty implementation
var descriptors = !fails(function () {
return Object.defineProperty({}, 1, { get: function () { return 7; } })[1] != 7;
});
var nativePropertyIsEnumerable = {}.propertyIsEnumerable;
var getOwnPropertyDescriptor$1 = Object.getOwnPropertyDescriptor;
// Nashorn ~ JDK8 bug
var NASHORN_BUG = getOwnPropertyDescriptor$1 && !nativePropertyIsEnumerable.call({ 1: 2 }, 1);
// `Object.prototype.propertyIsEnumerable` method implementation
// https://tc39.es/ecma262/#sec-object.prototype.propertyisenumerable
var f$4 = NASHORN_BUG ? function propertyIsEnumerable(V) {
var descriptor = getOwnPropertyDescriptor$1(this, V);
return !!descriptor && descriptor.enumerable;
} : nativePropertyIsEnumerable;
var objectPropertyIsEnumerable = {
f: f$4
};
var createPropertyDescriptor = function (bitmap, value) {
return {
enumerable: !(bitmap & 1),
configurable: !(bitmap & 2),
writable: !(bitmap & 4),
value: value
};
};
var toString = {}.toString;
var classofRaw = function (it) {
return toString.call(it).slice(8, -1);
};
var split = ''.split;
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var indexedObject = fails(function () {
// throws an error in rhino, see https://github.com/mozilla/rhino/issues/346
// eslint-disable-next-line no-prototype-builtins -- safe
return !Object('z').propertyIsEnumerable(0);
}) ? function (it) {
return classofRaw(it) == 'String' ? split.call(it, '') : Object(it);
} : Object;
// `RequireObjectCoercible` abstract operation
// https://tc39.es/ecma262/#sec-requireobjectcoercible
var requireObjectCoercible = function (it) {
if (it == undefined) throw TypeError("Can't call method on " + it);
return it;
};
// toObject with fallback for non-array-like ES3 strings
var toIndexedObject = function (it) {
return indexedObject(requireObjectCoercible(it));
};
var isObject = function (it) {
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
// `ToPrimitive` abstract operation
// https://tc39.es/ecma262/#sec-toprimitive
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
var toPrimitive = function (input, PREFERRED_STRING) {
if (!isObject(input)) return input;
var fn, val;
if (PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
if (typeof (fn = input.valueOf) == 'function' && !isObject(val = fn.call(input))) return val;
if (!PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
throw TypeError("Can't convert object to primitive value");
};
var hasOwnProperty = {}.hasOwnProperty;
var has$1 = function (it, key) {
return hasOwnProperty.call(it, key);
};
var document = global_1.document;
// typeof document.createElement is 'object' in old IE
var EXISTS = isObject(document) && isObject(document.createElement);
var documentCreateElement = function (it) {
return EXISTS ? document.createElement(it) : {};
};
// Thank's IE8 for his funny defineProperty
var ie8DomDefine = !descriptors && !fails(function () {
return Object.defineProperty(documentCreateElement('div'), 'a', {
get: function () { return 7; }
}).a != 7;
});
var nativeGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
// `Object.getOwnPropertyDescriptor` method
// https://tc39.es/ecma262/#sec-object.getownpropertydescriptor
var f$3 = descriptors ? nativeGetOwnPropertyDescriptor : function getOwnPropertyDescriptor(O, P) {
O = toIndexedObject(O);
P = toPrimitive(P, true);
if (ie8DomDefine) try {
return nativeGetOwnPropertyDescriptor(O, P);
} catch (error) { /* empty */ }
if (has$1(O, P)) return createPropertyDescriptor(!objectPropertyIsEnumerable.f.call(O, P), O[P]);
};
var objectGetOwnPropertyDescriptor = {
f: f$3
};
var anObject = function (it) {
if (!isObject(it)) {
throw TypeError(String(it) + ' is not an object');
} return it;
};
var nativeDefineProperty = Object.defineProperty;
// `Object.defineProperty` method
// https://tc39.es/ecma262/#sec-object.defineproperty
var f$2 = descriptors ? nativeDefineProperty : function defineProperty(O, P, Attributes) {
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if (ie8DomDefine) try {
return nativeDefineProperty(O, P, Attributes);
} catch (error) { /* empty */ }
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported');
if ('value' in Attributes) O[P] = Attributes.value;
return O;
};
var objectDefineProperty = {
f: f$2
};
var createNonEnumerableProperty = descriptors ? function (object, key, value) {
return objectDefineProperty.f(object, key, createPropertyDescriptor(1, value));
} : function (object, key, value) {
object[key] = value;
return object;
};
var setGlobal = function (key, value) {
try {
createNonEnumerableProperty(global_1, key, value);
} catch (error) {
global_1[key] = value;
} return value;
};
var SHARED = '__core-js_shared__';
var store$1 = global_1[SHARED] || setGlobal(SHARED, {});
var sharedStore = store$1;
var functionToString = Function.toString;
// this helper broken in `3.4.1-3.4.4`, so we can't use `shared` helper
if (typeof sharedStore.inspectSource != 'function') {
sharedStore.inspectSource = function (it) {
return functionToString.call(it);
};
}
var inspectSource = sharedStore.inspectSource;
var WeakMap$1 = global_1.WeakMap;
var nativeWeakMap = typeof WeakMap$1 === 'function' && /native code/.test(inspectSource(WeakMap$1));
var shared = createCommonjsModule(function (module) {
(module.exports = function (key, value) {
return sharedStore[key] || (sharedStore[key] = value !== undefined ? value : {});
})('versions', []).push({
version: '3.9.1',
mode: 'global',
copyright: '© 2021 Denis Pushkarev (zloirock.ru)'
});
});
var id = 0;
var postfix = Math.random();
var uid = function (key) {
return 'Symbol(' + String(key === undefined ? '' : key) + ')_' + (++id + postfix).toString(36);
};
var keys = shared('keys');
var sharedKey = function (key) {
return keys[key] || (keys[key] = uid(key));
};
var hiddenKeys$1 = {};
var WeakMap = global_1.WeakMap;
var set, get, has;
var enforce = function (it) {
return has(it) ? get(it) : set(it, {});
};
var getterFor = function (TYPE) {
return function (it) {
var state;
if (!isObject(it) || (state = get(it)).type !== TYPE) {
throw TypeError('Incompatible receiver, ' + TYPE + ' required');
} return state;
};
};
if (nativeWeakMap) {
var store = sharedStore.state || (sharedStore.state = new WeakMap());
var wmget = store.get;
var wmhas = store.has;
var wmset = store.set;
set = function (it, metadata) {
metadata.facade = it;
wmset.call(store, it, metadata);
return metadata;
};
get = function (it) {
return wmget.call(store, it) || {};
};
has = function (it) {
return wmhas.call(store, it);
};
} else {
var STATE = sharedKey('state');
hiddenKeys$1[STATE] = true;
set = function (it, metadata) {
metadata.facade = it;
createNonEnumerableProperty(it, STATE, metadata);
return metadata;
};
get = function (it) {
return has$1(it, STATE) ? it[STATE] : {};
};
has = function (it) {
return has$1(it, STATE);
};
}
var internalState = {
set: set,
get: get,
has: has,
enforce: enforce,
getterFor: getterFor
};
var redefine = createCommonjsModule(function (module) {
var getInternalState = internalState.get;
var enforceInternalState = internalState.enforce;
var TEMPLATE = String(String).split('String');
(module.exports = function (O, key, value, options) {
var unsafe = options ? !!options.unsafe : false;
var simple = options ? !!options.enumerable : false;
var noTargetGet = options ? !!options.noTargetGet : false;
var state;
if (typeof value == 'function') {
if (typeof key == 'string' && !has$1(value, 'name')) {
createNonEnumerableProperty(value, 'name', key);
}
state = enforceInternalState(value);
if (!state.source) {
state.source = TEMPLATE.join(typeof key == 'string' ? key : '');
}
}
if (O === global_1) {
if (simple) O[key] = value;
else setGlobal(key, value);
return;
} else if (!unsafe) {
delete O[key];
} else if (!noTargetGet && O[key]) {
simple = true;
}
if (simple) O[key] = value;
else createNonEnumerableProperty(O, key, value);
// add fake Function#toString for correct work wrapped methods / constructors with methods like LoDash isNative
})(Function.prototype, 'toString', function toString() {
return typeof this == 'function' && getInternalState(this).source || inspectSource(this);
});
});
var path = global_1;
var aFunction = function (variable) {
return typeof variable == 'function' ? variable : undefined;
};
var getBuiltIn = function (namespace, method) {
return arguments.length < 2 ? aFunction(path[namespace]) || aFunction(global_1[namespace])
: path[namespace] && path[namespace][method] || global_1[namespace] && global_1[namespace][method];
};
var ceil = Math.ceil;
var floor = Math.floor;
// `ToInteger` abstract operation
// https://tc39.es/ecma262/#sec-tointeger
var toInteger = function (argument) {
return isNaN(argument = +argument) ? 0 : (argument > 0 ? floor : ceil)(argument);
};
var min$1 = Math.min;
// `ToLength` abstract operation
// https://tc39.es/ecma262/#sec-tolength
var toLength = function (argument) {
return argument > 0 ? min$1(toInteger(argument), 0x1FFFFFFFFFFFFF) : 0; // 2 ** 53 - 1 == 9007199254740991
};
var max = Math.max;
var min = Math.min;
// Helper for a popular repeating case of the spec:
// Let integer be ? ToInteger(index).
// If integer < 0, let result be max((length + integer), 0); else let result be min(integer, length).
var toAbsoluteIndex = function (index, length) {
var integer = toInteger(index);
return integer < 0 ? max(integer + length, 0) : min(integer, length);
};
// `Array.prototype.{ indexOf, includes }` methods implementation
var createMethod = function (IS_INCLUDES) {
return function ($this, el, fromIndex) {
var O = toIndexedObject($this);
var length = toLength(O.length);
var index = toAbsoluteIndex(fromIndex, length);
var value;
// Array#includes uses SameValueZero equality algorithm
// eslint-disable-next-line no-self-compare -- NaN check
if (IS_INCLUDES && el != el) while (length > index) {
value = O[index++];
// eslint-disable-next-line no-self-compare -- NaN check
if (value != value) return true;
// Array#indexOf ignores holes, Array#includes - not
} else for (;length > index; index++) {
if ((IS_INCLUDES || index in O) && O[index] === el) return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
var arrayIncludes = {
// `Array.prototype.includes` method
// https://tc39.es/ecma262/#sec-array.prototype.includes
includes: createMethod(true),
// `Array.prototype.indexOf` method
// https://tc39.es/ecma262/#sec-array.prototype.indexof
indexOf: createMethod(false)
};
var indexOf = arrayIncludes.indexOf;
var objectKeysInternal = function (object, names) {
var O = toIndexedObject(object);
var i = 0;
var result = [];
var key;
for (key in O) !has$1(hiddenKeys$1, key) && has$1(O, key) && result.push(key);
// Don't enum bug & hidden keys
while (names.length > i) if (has$1(O, key = names[i++])) {
~indexOf(result, key) || result.push(key);
}
return result;
};
// IE8- don't enum bug keys
var enumBugKeys = [
'constructor',
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'toLocaleString',
'toString',
'valueOf'
];
var hiddenKeys = enumBugKeys.concat('length', 'prototype');
// `Object.getOwnPropertyNames` method
// https://tc39.es/ecma262/#sec-object.getownpropertynames
var f$1 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
return objectKeysInternal(O, hiddenKeys);
};
var objectGetOwnPropertyNames = {
f: f$1
};
var f = Object.getOwnPropertySymbols;
var objectGetOwnPropertySymbols = {
f: f
};
// all object keys, includes non-enumerable and symbols
var ownKeys = getBuiltIn('Reflect', 'ownKeys') || function ownKeys(it) {
var keys = objectGetOwnPropertyNames.f(anObject(it));
var getOwnPropertySymbols = objectGetOwnPropertySymbols.f;
return getOwnPropertySymbols ? keys.concat(getOwnPropertySymbols(it)) : keys;
};
var copyConstructorProperties = function (target, source) {
var keys = ownKeys(source);
var defineProperty = objectDefineProperty.f;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (!has$1(target, key)) defineProperty(target, key, getOwnPropertyDescriptor(source, key));
}
};
var replacement = /#|\.prototype\./;
var isForced = function (feature, detection) {
var value = data[normalize(feature)];
return value == POLYFILL ? true
: value == NATIVE ? false
: typeof detection == 'function' ? fails(detection)
: !!detection;
};
var normalize = isForced.normalize = function (string) {
return String(string).replace(replacement, '.').toLowerCase();
};
var data = isForced.data = {};
var NATIVE = isForced.NATIVE = 'N';
var POLYFILL = isForced.POLYFILL = 'P';
var isForced_1 = isForced;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
/*
options.target - name of the target object
options.global - target is the global object
options.stat - export as static methods of target
options.proto - export as prototype methods of target
options.real - real prototype method for the `pure` version
options.forced - export even if the native feature is available
options.bind - bind methods to the target, required for the `pure` version
options.wrap - wrap constructors to preventing global pollution, required for the `pure` version
options.unsafe - use the simple assignment of property instead of delete + defineProperty
options.sham - add a flag to not completely full polyfills
options.enumerable - export as enumerable property
options.noTargetGet - prevent calling a getter on target
*/
var _export = function (options, source) {
var TARGET = options.target;
var GLOBAL = options.global;
var STATIC = options.stat;
var FORCED, target, key, targetProperty, sourceProperty, descriptor;
if (GLOBAL) {
target = global_1;
} else if (STATIC) {
target = global_1[TARGET] || setGlobal(TARGET, {});
} else {
target = (global_1[TARGET] || {}).prototype;
}
if (target) for (key in source) {
sourceProperty = source[key];
if (options.noTargetGet) {
descriptor = getOwnPropertyDescriptor(target, key);
targetProperty = descriptor && descriptor.value;
} else targetProperty = target[key];
FORCED = isForced_1(GLOBAL ? key : TARGET + (STATIC ? '.' : '#') + key, options.forced);
// contained in target
if (!FORCED && targetProperty !== undefined) {
if (typeof sourceProperty === typeof targetProperty) continue;
copyConstructorProperties(sourceProperty, targetProperty);
}
// add a flag to not completely full polyfills
if (options.sham || (targetProperty && targetProperty.sham)) {
createNonEnumerableProperty(sourceProperty, 'sham', true);
}
// extend global
redefine(target, key, sourceProperty, options);
}
};
// `IsArray` abstract operation
// https://tc39.es/ecma262/#sec-isarray
var isArray = Array.isArray || function isArray(arg) {
return classofRaw(arg) == 'Array';
};
// `ToObject` abstract operation
// https://tc39.es/ecma262/#sec-toobject
var toObject = function (argument) {
return Object(requireObjectCoercible(argument));
};
var createProperty = function (object, key, value) {
var propertyKey = toPrimitive(key);
if (propertyKey in object) objectDefineProperty.f(object, propertyKey, createPropertyDescriptor(0, value));
else object[propertyKey] = value;
};
var engineIsNode = classofRaw(global_1.process) == 'process';
var engineUserAgent = getBuiltIn('navigator', 'userAgent') || '';
var process = global_1.process;
var versions = process && process.versions;
var v8 = versions && versions.v8;
var match, version;
if (v8) {
match = v8.split('.');
version = match[0] + match[1];
} else if (engineUserAgent) {
match = engineUserAgent.match(/Edge\/(\d+)/);
if (!match || match[1] >= 74) {
match = engineUserAgent.match(/Chrome\/(\d+)/);
if (match) version = match[1];
}
}
var engineV8Version = version && +version;
var nativeSymbol = !!Object.getOwnPropertySymbols && !fails(function () {
/* global Symbol -- required for testing */
return !Symbol.sham &&
// Chrome 38 Symbol has incorrect toString conversion
// Chrome 38-40 symbols are not inherited from DOM collections prototypes to instances
(engineIsNode ? engineV8Version === 38 : engineV8Version > 37 && engineV8Version < 41);
});
var useSymbolAsUid = nativeSymbol
/* global Symbol -- safe */
&& !Symbol.sham
&& typeof Symbol.iterator == 'symbol';
var WellKnownSymbolsStore = shared('wks');
var Symbol$1 = global_1.Symbol;
var createWellKnownSymbol = useSymbolAsUid ? Symbol$1 : Symbol$1 && Symbol$1.withoutSetter || uid;
var wellKnownSymbol = function (name) {
if (!has$1(WellKnownSymbolsStore, name) || !(nativeSymbol || typeof WellKnownSymbolsStore[name] == 'string')) {
if (nativeSymbol && has$1(Symbol$1, name)) {
WellKnownSymbolsStore[name] = Symbol$1[name];
} else {
WellKnownSymbolsStore[name] = createWellKnownSymbol('Symbol.' + name);
}
} return WellKnownSymbolsStore[name];
};
var SPECIES$1 = wellKnownSymbol('species');
// `ArraySpeciesCreate` abstract operation
// https://tc39.es/ecma262/#sec-arrayspeciescreate
var arraySpeciesCreate = function (originalArray, length) {
var C;
if (isArray(originalArray)) {
C = originalArray.constructor;
// cross-realm fallback
if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
else if (isObject(C)) {
C = C[SPECIES$1];
if (C === null) C = undefined;
}
} return new (C === undefined ? Array : C)(length === 0 ? 0 : length);
};
var SPECIES = wellKnownSymbol('species');
var arrayMethodHasSpeciesSupport = function (METHOD_NAME) {
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/677
return engineV8Version >= 51 || !fails(function () {
var array = [];
var constructor = array.constructor = {};
constructor[SPECIES] = function () {
return { foo: 1 };
};
return array[METHOD_NAME](Boolean).foo !== 1;
});
};
var IS_CONCAT_SPREADABLE = wellKnownSymbol('isConcatSpreadable');
var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF;
var MAXIMUM_ALLOWED_INDEX_EXCEEDED = 'Maximum allowed index exceeded';
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/679
var IS_CONCAT_SPREADABLE_SUPPORT = engineV8Version >= 51 || !fails(function () {
var array = [];
array[IS_CONCAT_SPREADABLE] = false;
return array.concat()[0] !== array;
});
var SPECIES_SUPPORT = arrayMethodHasSpeciesSupport('concat');
var isConcatSpreadable = function (O) {
if (!isObject(O)) return false;
var spreadable = O[IS_CONCAT_SPREADABLE];
return spreadable !== undefined ? !!spreadable : isArray(O);
};
var FORCED = !IS_CONCAT_SPREADABLE_SUPPORT || !SPECIES_SUPPORT;
// `Array.prototype.concat` method
// https://tc39.es/ecma262/#sec-array.prototype.concat
// with adding support of @@isConcatSpreadable and @@species
_export({ target: 'Array', proto: true, forced: FORCED }, {
// eslint-disable-next-line no-unused-vars -- required for `.length`
concat: function concat(arg) {
var O = toObject(this);
var A = arraySpeciesCreate(O, 0);
var n = 0;
var i, k, length, len, E;
for (i = -1, length = arguments.length; i < length; i++) {
E = i === -1 ? O : arguments[i];
if (isConcatSpreadable(E)) {
len = toLength(E.length);
if (n + len > MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
for (k = 0; k < len; k++, n++) if (k in E) createProperty(A, n, E[k]);
} else {
if (n >= MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
createProperty(A, n++, E);
}
}
A.length = n;
return A;
}
});
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<wenzhixin2010@gmail.com>
*/
$__default['default'].fn.bootstrapTable.locales['en-US'] = $__default['default'].fn.bootstrapTable.locales['en'] = {
formatCopyRows: function formatCopyRows() {
return 'Copy Rows';
},
formatPrint: function formatPrint() {
return 'Print';
},
formatLoadingMessage: function formatLoadingMessage() {
return 'Loading, please wait';
},
formatRecordsPerPage: function formatRecordsPerPage(pageNumber) {
return "".concat(pageNumber, " rows per page");
},
formatShowingRows: function formatShowingRows(pageFrom, pageTo, totalRows, totalNotFiltered) {
if (totalNotFiltered !== undefined && totalNotFiltered > 0 && totalNotFiltered > totalRows) {
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows (filtered from ").concat(totalNotFiltered, " total rows)");
}
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows");
},
formatSRPaginationPreText: function formatSRPaginationPreText() {
return 'previous page';
},
formatSRPaginationPageText: function formatSRPaginationPageText(page) {
return "to page ".concat(page);
},
formatSRPaginationNextText: function formatSRPaginationNextText() {
return 'next page';
},
formatDetailPagination: function formatDetailPagination(totalRows) {
return "Showing ".concat(totalRows, " rows");
},
formatClearSearch: function formatClearSearch() {
return 'Clear Search';
},
formatSearch: function formatSearch() {
return 'Search';
},
formatNoMatches: function formatNoMatches() {
return 'No matching records found';
},
formatPaginationSwitch: function formatPaginationSwitch() {
return 'Hide/Show pagination';
},
formatPaginationSwitchDown: function formatPaginationSwitchDown() {
return 'Show pagination';
},
formatPaginationSwitchUp: function formatPaginationSwitchUp() {
return 'Hide pagination';
},
formatRefresh: function formatRefresh() {
return 'Refresh';
},
formatToggle: function formatToggle() {
return 'Toggle';
},
formatToggleOn: function formatToggleOn() {
return 'Show card view';
},
formatToggleOff: function formatToggleOff() {
return 'Hide card view';
},
formatColumns: function formatColumns() {
return 'Columns';
},
formatColumnsToggleAll: function formatColumnsToggleAll() {
return 'Toggle all';
},
formatFullscreen: function formatFullscreen() {
return 'Fullscreen';
},
formatAllRows: function formatAllRows() {
return 'All';
},
formatAutoRefresh: function formatAutoRefresh() {
return 'Auto Refresh';
},
formatExport: function formatExport() {
return 'Export data';
},
formatJumpTo: function formatJumpTo() {
return 'GO';
},
formatAdvancedSearch: function formatAdvancedSearch() {
return 'Advanced search';
},
formatAdvancedCloseButton: function formatAdvancedCloseButton() {
return 'Close';
},
formatFilterControlSwitch: function formatFilterControlSwitch() {
return 'Hide/Show controls';
},
formatFilterControlSwitchHide: function formatFilterControlSwitchHide() {
return 'Hide controls';
},
formatFilterControlSwitchShow: function formatFilterControlSwitchShow() {
return 'Show controls';
}
};
$__default['default'].extend($__default['default'].fn.bootstrapTable.defaults, $__default['default'].fn.bootstrapTable.locales['en-US']);
})));
| createCommonjsModule | identifier_name |
bootstrap-table-en-US.js | (function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('jquery')) :
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jQuery));
}(this, (function ($) { 'use strict';
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var $__default = /*#__PURE__*/_interopDefaultLegacy($);
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
function createCommonjsModule(fn, module) |
var check = function (it) {
return it && it.Math == Math && it;
};
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global_1 =
/* global globalThis -- safe */
check(typeof globalThis == 'object' && globalThis) ||
check(typeof window == 'object' && window) ||
check(typeof self == 'object' && self) ||
check(typeof commonjsGlobal == 'object' && commonjsGlobal) ||
// eslint-disable-next-line no-new-func -- fallback
(function () { return this; })() || Function('return this')();
var fails = function (exec) {
try {
return !!exec();
} catch (error) {
return true;
}
};
// Detect IE8's incomplete defineProperty implementation
var descriptors = !fails(function () {
return Object.defineProperty({}, 1, { get: function () { return 7; } })[1] != 7;
});
var nativePropertyIsEnumerable = {}.propertyIsEnumerable;
var getOwnPropertyDescriptor$1 = Object.getOwnPropertyDescriptor;
// Nashorn ~ JDK8 bug
var NASHORN_BUG = getOwnPropertyDescriptor$1 && !nativePropertyIsEnumerable.call({ 1: 2 }, 1);
// `Object.prototype.propertyIsEnumerable` method implementation
// https://tc39.es/ecma262/#sec-object.prototype.propertyisenumerable
var f$4 = NASHORN_BUG ? function propertyIsEnumerable(V) {
var descriptor = getOwnPropertyDescriptor$1(this, V);
return !!descriptor && descriptor.enumerable;
} : nativePropertyIsEnumerable;
var objectPropertyIsEnumerable = {
f: f$4
};
var createPropertyDescriptor = function (bitmap, value) {
return {
enumerable: !(bitmap & 1),
configurable: !(bitmap & 2),
writable: !(bitmap & 4),
value: value
};
};
var toString = {}.toString;
var classofRaw = function (it) {
return toString.call(it).slice(8, -1);
};
var split = ''.split;
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var indexedObject = fails(function () {
// throws an error in rhino, see https://github.com/mozilla/rhino/issues/346
// eslint-disable-next-line no-prototype-builtins -- safe
return !Object('z').propertyIsEnumerable(0);
}) ? function (it) {
return classofRaw(it) == 'String' ? split.call(it, '') : Object(it);
} : Object;
// `RequireObjectCoercible` abstract operation
// https://tc39.es/ecma262/#sec-requireobjectcoercible
var requireObjectCoercible = function (it) {
if (it == undefined) throw TypeError("Can't call method on " + it);
return it;
};
// toObject with fallback for non-array-like ES3 strings
var toIndexedObject = function (it) {
return indexedObject(requireObjectCoercible(it));
};
var isObject = function (it) {
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
// `ToPrimitive` abstract operation
// https://tc39.es/ecma262/#sec-toprimitive
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
var toPrimitive = function (input, PREFERRED_STRING) {
if (!isObject(input)) return input;
var fn, val;
if (PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
if (typeof (fn = input.valueOf) == 'function' && !isObject(val = fn.call(input))) return val;
if (!PREFERRED_STRING && typeof (fn = input.toString) == 'function' && !isObject(val = fn.call(input))) return val;
throw TypeError("Can't convert object to primitive value");
};
var hasOwnProperty = {}.hasOwnProperty;
var has$1 = function (it, key) {
return hasOwnProperty.call(it, key);
};
var document = global_1.document;
// typeof document.createElement is 'object' in old IE
var EXISTS = isObject(document) && isObject(document.createElement);
var documentCreateElement = function (it) {
return EXISTS ? document.createElement(it) : {};
};
// Thank's IE8 for his funny defineProperty
var ie8DomDefine = !descriptors && !fails(function () {
return Object.defineProperty(documentCreateElement('div'), 'a', {
get: function () { return 7; }
}).a != 7;
});
var nativeGetOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
// `Object.getOwnPropertyDescriptor` method
// https://tc39.es/ecma262/#sec-object.getownpropertydescriptor
var f$3 = descriptors ? nativeGetOwnPropertyDescriptor : function getOwnPropertyDescriptor(O, P) {
O = toIndexedObject(O);
P = toPrimitive(P, true);
if (ie8DomDefine) try {
return nativeGetOwnPropertyDescriptor(O, P);
} catch (error) { /* empty */ }
if (has$1(O, P)) return createPropertyDescriptor(!objectPropertyIsEnumerable.f.call(O, P), O[P]);
};
var objectGetOwnPropertyDescriptor = {
f: f$3
};
var anObject = function (it) {
if (!isObject(it)) {
throw TypeError(String(it) + ' is not an object');
} return it;
};
var nativeDefineProperty = Object.defineProperty;
// `Object.defineProperty` method
// https://tc39.es/ecma262/#sec-object.defineproperty
var f$2 = descriptors ? nativeDefineProperty : function defineProperty(O, P, Attributes) {
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if (ie8DomDefine) try {
return nativeDefineProperty(O, P, Attributes);
} catch (error) { /* empty */ }
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported');
if ('value' in Attributes) O[P] = Attributes.value;
return O;
};
var objectDefineProperty = {
f: f$2
};
var createNonEnumerableProperty = descriptors ? function (object, key, value) {
return objectDefineProperty.f(object, key, createPropertyDescriptor(1, value));
} : function (object, key, value) {
object[key] = value;
return object;
};
var setGlobal = function (key, value) {
try {
createNonEnumerableProperty(global_1, key, value);
} catch (error) {
global_1[key] = value;
} return value;
};
var SHARED = '__core-js_shared__';
var store$1 = global_1[SHARED] || setGlobal(SHARED, {});
var sharedStore = store$1;
var functionToString = Function.toString;
// this helper broken in `3.4.1-3.4.4`, so we can't use `shared` helper
if (typeof sharedStore.inspectSource != 'function') {
sharedStore.inspectSource = function (it) {
return functionToString.call(it);
};
}
var inspectSource = sharedStore.inspectSource;
var WeakMap$1 = global_1.WeakMap;
var nativeWeakMap = typeof WeakMap$1 === 'function' && /native code/.test(inspectSource(WeakMap$1));
var shared = createCommonjsModule(function (module) {
(module.exports = function (key, value) {
return sharedStore[key] || (sharedStore[key] = value !== undefined ? value : {});
})('versions', []).push({
version: '3.9.1',
mode: 'global',
copyright: '© 2021 Denis Pushkarev (zloirock.ru)'
});
});
var id = 0;
var postfix = Math.random();
var uid = function (key) {
return 'Symbol(' + String(key === undefined ? '' : key) + ')_' + (++id + postfix).toString(36);
};
var keys = shared('keys');
var sharedKey = function (key) {
return keys[key] || (keys[key] = uid(key));
};
var hiddenKeys$1 = {};
var WeakMap = global_1.WeakMap;
var set, get, has;
var enforce = function (it) {
return has(it) ? get(it) : set(it, {});
};
var getterFor = function (TYPE) {
return function (it) {
var state;
if (!isObject(it) || (state = get(it)).type !== TYPE) {
throw TypeError('Incompatible receiver, ' + TYPE + ' required');
} return state;
};
};
if (nativeWeakMap) {
var store = sharedStore.state || (sharedStore.state = new WeakMap());
var wmget = store.get;
var wmhas = store.has;
var wmset = store.set;
set = function (it, metadata) {
metadata.facade = it;
wmset.call(store, it, metadata);
return metadata;
};
get = function (it) {
return wmget.call(store, it) || {};
};
has = function (it) {
return wmhas.call(store, it);
};
} else {
var STATE = sharedKey('state');
hiddenKeys$1[STATE] = true;
set = function (it, metadata) {
metadata.facade = it;
createNonEnumerableProperty(it, STATE, metadata);
return metadata;
};
get = function (it) {
return has$1(it, STATE) ? it[STATE] : {};
};
has = function (it) {
return has$1(it, STATE);
};
}
var internalState = {
set: set,
get: get,
has: has,
enforce: enforce,
getterFor: getterFor
};
var redefine = createCommonjsModule(function (module) {
var getInternalState = internalState.get;
var enforceInternalState = internalState.enforce;
var TEMPLATE = String(String).split('String');
(module.exports = function (O, key, value, options) {
var unsafe = options ? !!options.unsafe : false;
var simple = options ? !!options.enumerable : false;
var noTargetGet = options ? !!options.noTargetGet : false;
var state;
if (typeof value == 'function') {
if (typeof key == 'string' && !has$1(value, 'name')) {
createNonEnumerableProperty(value, 'name', key);
}
state = enforceInternalState(value);
if (!state.source) {
state.source = TEMPLATE.join(typeof key == 'string' ? key : '');
}
}
if (O === global_1) {
if (simple) O[key] = value;
else setGlobal(key, value);
return;
} else if (!unsafe) {
delete O[key];
} else if (!noTargetGet && O[key]) {
simple = true;
}
if (simple) O[key] = value;
else createNonEnumerableProperty(O, key, value);
// add fake Function#toString for correct work wrapped methods / constructors with methods like LoDash isNative
})(Function.prototype, 'toString', function toString() {
return typeof this == 'function' && getInternalState(this).source || inspectSource(this);
});
});
var path = global_1;
var aFunction = function (variable) {
return typeof variable == 'function' ? variable : undefined;
};
var getBuiltIn = function (namespace, method) {
return arguments.length < 2 ? aFunction(path[namespace]) || aFunction(global_1[namespace])
: path[namespace] && path[namespace][method] || global_1[namespace] && global_1[namespace][method];
};
var ceil = Math.ceil;
var floor = Math.floor;
// `ToInteger` abstract operation
// https://tc39.es/ecma262/#sec-tointeger
var toInteger = function (argument) {
return isNaN(argument = +argument) ? 0 : (argument > 0 ? floor : ceil)(argument);
};
var min$1 = Math.min;
// `ToLength` abstract operation
// https://tc39.es/ecma262/#sec-tolength
var toLength = function (argument) {
return argument > 0 ? min$1(toInteger(argument), 0x1FFFFFFFFFFFFF) : 0; // 2 ** 53 - 1 == 9007199254740991
};
var max = Math.max;
var min = Math.min;
// Helper for a popular repeating case of the spec:
// Let integer be ? ToInteger(index).
// If integer < 0, let result be max((length + integer), 0); else let result be min(integer, length).
var toAbsoluteIndex = function (index, length) {
var integer = toInteger(index);
return integer < 0 ? max(integer + length, 0) : min(integer, length);
};
// `Array.prototype.{ indexOf, includes }` methods implementation
var createMethod = function (IS_INCLUDES) {
return function ($this, el, fromIndex) {
var O = toIndexedObject($this);
var length = toLength(O.length);
var index = toAbsoluteIndex(fromIndex, length);
var value;
// Array#includes uses SameValueZero equality algorithm
// eslint-disable-next-line no-self-compare -- NaN check
if (IS_INCLUDES && el != el) while (length > index) {
value = O[index++];
// eslint-disable-next-line no-self-compare -- NaN check
if (value != value) return true;
// Array#indexOf ignores holes, Array#includes - not
} else for (;length > index; index++) {
if ((IS_INCLUDES || index in O) && O[index] === el) return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
var arrayIncludes = {
// `Array.prototype.includes` method
// https://tc39.es/ecma262/#sec-array.prototype.includes
includes: createMethod(true),
// `Array.prototype.indexOf` method
// https://tc39.es/ecma262/#sec-array.prototype.indexof
indexOf: createMethod(false)
};
var indexOf = arrayIncludes.indexOf;
var objectKeysInternal = function (object, names) {
var O = toIndexedObject(object);
var i = 0;
var result = [];
var key;
for (key in O) !has$1(hiddenKeys$1, key) && has$1(O, key) && result.push(key);
// Don't enum bug & hidden keys
while (names.length > i) if (has$1(O, key = names[i++])) {
~indexOf(result, key) || result.push(key);
}
return result;
};
// IE8- don't enum bug keys
var enumBugKeys = [
'constructor',
'hasOwnProperty',
'isPrototypeOf',
'propertyIsEnumerable',
'toLocaleString',
'toString',
'valueOf'
];
var hiddenKeys = enumBugKeys.concat('length', 'prototype');
// `Object.getOwnPropertyNames` method
// https://tc39.es/ecma262/#sec-object.getownpropertynames
var f$1 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
return objectKeysInternal(O, hiddenKeys);
};
var objectGetOwnPropertyNames = {
f: f$1
};
var f = Object.getOwnPropertySymbols;
var objectGetOwnPropertySymbols = {
f: f
};
// all object keys, includes non-enumerable and symbols
var ownKeys = getBuiltIn('Reflect', 'ownKeys') || function ownKeys(it) {
var keys = objectGetOwnPropertyNames.f(anObject(it));
var getOwnPropertySymbols = objectGetOwnPropertySymbols.f;
return getOwnPropertySymbols ? keys.concat(getOwnPropertySymbols(it)) : keys;
};
var copyConstructorProperties = function (target, source) {
var keys = ownKeys(source);
var defineProperty = objectDefineProperty.f;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
for (var i = 0; i < keys.length; i++) {
var key = keys[i];
if (!has$1(target, key)) defineProperty(target, key, getOwnPropertyDescriptor(source, key));
}
};
var replacement = /#|\.prototype\./;
var isForced = function (feature, detection) {
var value = data[normalize(feature)];
return value == POLYFILL ? true
: value == NATIVE ? false
: typeof detection == 'function' ? fails(detection)
: !!detection;
};
var normalize = isForced.normalize = function (string) {
return String(string).replace(replacement, '.').toLowerCase();
};
var data = isForced.data = {};
var NATIVE = isForced.NATIVE = 'N';
var POLYFILL = isForced.POLYFILL = 'P';
var isForced_1 = isForced;
var getOwnPropertyDescriptor = objectGetOwnPropertyDescriptor.f;
/*
options.target - name of the target object
options.global - target is the global object
options.stat - export as static methods of target
options.proto - export as prototype methods of target
options.real - real prototype method for the `pure` version
options.forced - export even if the native feature is available
options.bind - bind methods to the target, required for the `pure` version
options.wrap - wrap constructors to preventing global pollution, required for the `pure` version
options.unsafe - use the simple assignment of property instead of delete + defineProperty
options.sham - add a flag to not completely full polyfills
options.enumerable - export as enumerable property
options.noTargetGet - prevent calling a getter on target
*/
var _export = function (options, source) {
var TARGET = options.target;
var GLOBAL = options.global;
var STATIC = options.stat;
var FORCED, target, key, targetProperty, sourceProperty, descriptor;
if (GLOBAL) {
target = global_1;
} else if (STATIC) {
target = global_1[TARGET] || setGlobal(TARGET, {});
} else {
target = (global_1[TARGET] || {}).prototype;
}
if (target) for (key in source) {
sourceProperty = source[key];
if (options.noTargetGet) {
descriptor = getOwnPropertyDescriptor(target, key);
targetProperty = descriptor && descriptor.value;
} else targetProperty = target[key];
FORCED = isForced_1(GLOBAL ? key : TARGET + (STATIC ? '.' : '#') + key, options.forced);
// contained in target
if (!FORCED && targetProperty !== undefined) {
if (typeof sourceProperty === typeof targetProperty) continue;
copyConstructorProperties(sourceProperty, targetProperty);
}
// add a flag to not completely full polyfills
if (options.sham || (targetProperty && targetProperty.sham)) {
createNonEnumerableProperty(sourceProperty, 'sham', true);
}
// extend global
redefine(target, key, sourceProperty, options);
}
};
// `IsArray` abstract operation
// https://tc39.es/ecma262/#sec-isarray
var isArray = Array.isArray || function isArray(arg) {
return classofRaw(arg) == 'Array';
};
// `ToObject` abstract operation
// https://tc39.es/ecma262/#sec-toobject
var toObject = function (argument) {
return Object(requireObjectCoercible(argument));
};
var createProperty = function (object, key, value) {
var propertyKey = toPrimitive(key);
if (propertyKey in object) objectDefineProperty.f(object, propertyKey, createPropertyDescriptor(0, value));
else object[propertyKey] = value;
};
var engineIsNode = classofRaw(global_1.process) == 'process';
var engineUserAgent = getBuiltIn('navigator', 'userAgent') || '';
var process = global_1.process;
var versions = process && process.versions;
var v8 = versions && versions.v8;
var match, version;
if (v8) {
match = v8.split('.');
version = match[0] + match[1];
} else if (engineUserAgent) {
match = engineUserAgent.match(/Edge\/(\d+)/);
if (!match || match[1] >= 74) {
match = engineUserAgent.match(/Chrome\/(\d+)/);
if (match) version = match[1];
}
}
var engineV8Version = version && +version;
var nativeSymbol = !!Object.getOwnPropertySymbols && !fails(function () {
/* global Symbol -- required for testing */
return !Symbol.sham &&
// Chrome 38 Symbol has incorrect toString conversion
// Chrome 38-40 symbols are not inherited from DOM collections prototypes to instances
(engineIsNode ? engineV8Version === 38 : engineV8Version > 37 && engineV8Version < 41);
});
var useSymbolAsUid = nativeSymbol
/* global Symbol -- safe */
&& !Symbol.sham
&& typeof Symbol.iterator == 'symbol';
var WellKnownSymbolsStore = shared('wks');
var Symbol$1 = global_1.Symbol;
var createWellKnownSymbol = useSymbolAsUid ? Symbol$1 : Symbol$1 && Symbol$1.withoutSetter || uid;
var wellKnownSymbol = function (name) {
if (!has$1(WellKnownSymbolsStore, name) || !(nativeSymbol || typeof WellKnownSymbolsStore[name] == 'string')) {
if (nativeSymbol && has$1(Symbol$1, name)) {
WellKnownSymbolsStore[name] = Symbol$1[name];
} else {
WellKnownSymbolsStore[name] = createWellKnownSymbol('Symbol.' + name);
}
} return WellKnownSymbolsStore[name];
};
var SPECIES$1 = wellKnownSymbol('species');
// `ArraySpeciesCreate` abstract operation
// https://tc39.es/ecma262/#sec-arrayspeciescreate
var arraySpeciesCreate = function (originalArray, length) {
var C;
if (isArray(originalArray)) {
C = originalArray.constructor;
// cross-realm fallback
if (typeof C == 'function' && (C === Array || isArray(C.prototype))) C = undefined;
else if (isObject(C)) {
C = C[SPECIES$1];
if (C === null) C = undefined;
}
} return new (C === undefined ? Array : C)(length === 0 ? 0 : length);
};
var SPECIES = wellKnownSymbol('species');
var arrayMethodHasSpeciesSupport = function (METHOD_NAME) {
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/677
return engineV8Version >= 51 || !fails(function () {
var array = [];
var constructor = array.constructor = {};
constructor[SPECIES] = function () {
return { foo: 1 };
};
return array[METHOD_NAME](Boolean).foo !== 1;
});
};
var IS_CONCAT_SPREADABLE = wellKnownSymbol('isConcatSpreadable');
var MAX_SAFE_INTEGER = 0x1FFFFFFFFFFFFF;
var MAXIMUM_ALLOWED_INDEX_EXCEEDED = 'Maximum allowed index exceeded';
// We can't use this feature detection in V8 since it causes
// deoptimization and serious performance degradation
// https://github.com/zloirock/core-js/issues/679
var IS_CONCAT_SPREADABLE_SUPPORT = engineV8Version >= 51 || !fails(function () {
var array = [];
array[IS_CONCAT_SPREADABLE] = false;
return array.concat()[0] !== array;
});
var SPECIES_SUPPORT = arrayMethodHasSpeciesSupport('concat');
var isConcatSpreadable = function (O) {
if (!isObject(O)) return false;
var spreadable = O[IS_CONCAT_SPREADABLE];
return spreadable !== undefined ? !!spreadable : isArray(O);
};
var FORCED = !IS_CONCAT_SPREADABLE_SUPPORT || !SPECIES_SUPPORT;
// `Array.prototype.concat` method
// https://tc39.es/ecma262/#sec-array.prototype.concat
// with adding support of @@isConcatSpreadable and @@species
_export({ target: 'Array', proto: true, forced: FORCED }, {
// eslint-disable-next-line no-unused-vars -- required for `.length`
concat: function concat(arg) {
var O = toObject(this);
var A = arraySpeciesCreate(O, 0);
var n = 0;
var i, k, length, len, E;
for (i = -1, length = arguments.length; i < length; i++) {
E = i === -1 ? O : arguments[i];
if (isConcatSpreadable(E)) {
len = toLength(E.length);
if (n + len > MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
for (k = 0; k < len; k++, n++) if (k in E) createProperty(A, n, E[k]);
} else {
if (n >= MAX_SAFE_INTEGER) throw TypeError(MAXIMUM_ALLOWED_INDEX_EXCEEDED);
createProperty(A, n++, E);
}
}
A.length = n;
return A;
}
});
/**
* Bootstrap Table English translation
* Author: Zhixin Wen<wenzhixin2010@gmail.com>
*/
$__default['default'].fn.bootstrapTable.locales['en-US'] = $__default['default'].fn.bootstrapTable.locales['en'] = {
formatCopyRows: function formatCopyRows() {
return 'Copy Rows';
},
formatPrint: function formatPrint() {
return 'Print';
},
formatLoadingMessage: function formatLoadingMessage() {
return 'Loading, please wait';
},
formatRecordsPerPage: function formatRecordsPerPage(pageNumber) {
return "".concat(pageNumber, " rows per page");
},
formatShowingRows: function formatShowingRows(pageFrom, pageTo, totalRows, totalNotFiltered) {
if (totalNotFiltered !== undefined && totalNotFiltered > 0 && totalNotFiltered > totalRows) {
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows (filtered from ").concat(totalNotFiltered, " total rows)");
}
return "Showing ".concat(pageFrom, " to ").concat(pageTo, " of ").concat(totalRows, " rows");
},
formatSRPaginationPreText: function formatSRPaginationPreText() {
return 'previous page';
},
formatSRPaginationPageText: function formatSRPaginationPageText(page) {
return "to page ".concat(page);
},
formatSRPaginationNextText: function formatSRPaginationNextText() {
return 'next page';
},
formatDetailPagination: function formatDetailPagination(totalRows) {
return "Showing ".concat(totalRows, " rows");
},
formatClearSearch: function formatClearSearch() {
return 'Clear Search';
},
formatSearch: function formatSearch() {
return 'Search';
},
formatNoMatches: function formatNoMatches() {
return 'No matching records found';
},
formatPaginationSwitch: function formatPaginationSwitch() {
return 'Hide/Show pagination';
},
formatPaginationSwitchDown: function formatPaginationSwitchDown() {
return 'Show pagination';
},
formatPaginationSwitchUp: function formatPaginationSwitchUp() {
return 'Hide pagination';
},
formatRefresh: function formatRefresh() {
return 'Refresh';
},
formatToggle: function formatToggle() {
return 'Toggle';
},
formatToggleOn: function formatToggleOn() {
return 'Show card view';
},
formatToggleOff: function formatToggleOff() {
return 'Hide card view';
},
formatColumns: function formatColumns() {
return 'Columns';
},
formatColumnsToggleAll: function formatColumnsToggleAll() {
return 'Toggle all';
},
formatFullscreen: function formatFullscreen() {
return 'Fullscreen';
},
formatAllRows: function formatAllRows() {
return 'All';
},
formatAutoRefresh: function formatAutoRefresh() {
return 'Auto Refresh';
},
formatExport: function formatExport() {
return 'Export data';
},
formatJumpTo: function formatJumpTo() {
return 'GO';
},
formatAdvancedSearch: function formatAdvancedSearch() {
return 'Advanced search';
},
formatAdvancedCloseButton: function formatAdvancedCloseButton() {
return 'Close';
},
formatFilterControlSwitch: function formatFilterControlSwitch() {
return 'Hide/Show controls';
},
formatFilterControlSwitchHide: function formatFilterControlSwitchHide() {
return 'Hide controls';
},
formatFilterControlSwitchShow: function formatFilterControlSwitchShow() {
return 'Show controls';
}
};
$__default['default'].extend($__default['default'].fn.bootstrapTable.defaults, $__default['default'].fn.bootstrapTable.locales['en-US']);
})));
| {
return module = { exports: {} }, fn(module, module.exports), module.exports;
} | identifier_body |
cell-static.ts | import { Gutters } from './gutters';
import { Gutters as DefaultGutters, GuttersProps } from '../components/gutters';
import { CellProperties } from './cell-properties';
/**
* Creates a single breakpoint sized grid.
*
* @param {string | number} size The size of your cell. Can be `full` (default) for 100% width, `auto` to use up available space and `shrink` to use up only required space.
* @param {boolean} outputGutter Whether or not to output gutters.
* @param {GuttersProps} gutters Array of gutter values.
* @param {string} gutterType The gutter type padding or margin.
* @param {string} breakpoint The name of the breakpoint size in your gutters array to get the size from.
* @param {boolean} vertical Set to true to output vertical (height) styles rather than widths.
*
* @return {Array<string>}
*/
export const CellStatic = (
size: string | number = 'full',
outputGutter: boolean = true,
gutters: number | GuttersProps = DefaultGutters,
gutterType: string = 'margin',
breakpoint: string = 'small',
vertical: boolean = false,
): string[] => {
let gutter;
let gutterPosition = ['left', 'right'];
if (typeof gutters === 'object' && breakpoint in gutters) {
gutter = gutters[breakpoint];
} else if (typeof gutters === 'number') {
gutter = gutters;
} else {
const value = typeof gutters === 'object' ? JSON.stringify(gutters) : gutters;
throw new Error(
`No gutters were found in "${value}" for "breakpoint: ${breakpoint}", cell was not generated.`,
);
}
if (vertical) {
gutterPosition = ['top', 'bottom'];
}
let css: string[] = [];
if (gutterType === 'margin') {
css.push(CellProperties(size, gutter, vertical));
} else {
css.push(CellProperties(size, 0, vertical));
}
if (outputGutter) |
return css;
};
| {
css = css.concat(Gutters(gutter, gutterType, gutterPosition));
} | conditional_block |
cell-static.ts | import { Gutters } from './gutters';
import { Gutters as DefaultGutters, GuttersProps } from '../components/gutters';
import { CellProperties } from './cell-properties';
/**
* Creates a single breakpoint sized grid.
*
* @param {string | number} size The size of your cell. Can be `full` (default) for 100% width, `auto` to use up available space and `shrink` to use up only required space.
* @param {boolean} outputGutter Whether or not to output gutters.
* @param {GuttersProps} gutters Array of gutter values.
* @param {string} gutterType The gutter type padding or margin.
* @param {string} breakpoint The name of the breakpoint size in your gutters array to get the size from.
* @param {boolean} vertical Set to true to output vertical (height) styles rather than widths.
*
* @return {Array<string>}
*/
export const CellStatic = (
size: string | number = 'full',
outputGutter: boolean = true,
gutters: number | GuttersProps = DefaultGutters,
gutterType: string = 'margin',
breakpoint: string = 'small',
vertical: boolean = false,
): string[] => {
let gutter;
let gutterPosition = ['left', 'right'];
if (typeof gutters === 'object' && breakpoint in gutters) {
gutter = gutters[breakpoint];
} else if (typeof gutters === 'number') {
gutter = gutters;
} else {
const value = typeof gutters === 'object' ? JSON.stringify(gutters) : gutters;
throw new Error(
`No gutters were found in "${value}" for "breakpoint: ${breakpoint}", cell was not generated.`,
);
}
if (vertical) {
gutterPosition = ['top', 'bottom'];
}
let css: string[] = [];
if (gutterType === 'margin') {
css.push(CellProperties(size, gutter, vertical));
} else {
css.push(CellProperties(size, 0, vertical));
}
if (outputGutter) {
css = css.concat(Gutters(gutter, gutterType, gutterPosition));
} |
return css;
}; | random_line_split | |
index.tsx | import React, { FC, ReactElement } from 'react';
import styled from 'styled-components';
import theme from '../../theme';
import { isMac } from '../../utils';
type P = {
title?: string;
children?: ReactElement | ReactElement[] | string;
};
const Layout: FC<P> = ({ title = '', children }) => {
return (
<Win>
{isMac && <Frame>{title}</Frame>}
<>{children}</>
</Win>
);
};
export default Layout;
const { colors } = theme;
const Win = styled.div`
display: flex;
flex-direction: column;
user-select: none;
cursor: default;
`;
const Frame = styled.div`
color: ${colors.text.soft};
background: ${colors.frame};
display: flex;
align-items: center;
justify-content: center;
height: 28px;
font-weight: bold;
font-size: 12px; | text-align: center;
-webkit-app-region: drag;
-webkit-user-select: none;
`; | random_line_split | |
licenseck.py | # Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# These licenses are valid for use in Servo
licenses = [
"""\
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
""",
"""\
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
""",
"""\
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
""",
"""\
// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
""",
"""\
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
""", | ] | random_line_split | |
fitadd.run.js | var options;
chrome.runtime.sendMessage({method: "getSettings"}, function(response) {
options = response;
var readyStateCheckInterval = setInterval(function() {
if (document.readyState === "complete") {
WireUp();
clearInterval(readyStateCheckInterval);
}
}, 10);
});
function WireUp(){
if (fitadd.fitnessepage.IsFitnesse()){
if (fitadd.fitnessepage.AreAdding()){
if (options.cloneShortcut){
fitadd.fitnesseui.LoadClonedContentIfNeeded();
}
}
if (options.spEnabled){
fitadd.fixtures.search.RegisterHandlers(options);
}
if (fitadd.fitnessepage.AreEditing()) | else{
if (options.editShortcut){
fitadd.fitnesseui.RegisterEditShortcut();
if (options.cloneShortcut){
fitadd.fitnesseui.RegisterCloneLink();
}
}
}
}
}
| {
if (options.saveShortcut){
fitadd.fitnesseui.RegisterSaveShortcut();
}
if (options.autoFormat){
fitadd.fitnesseui.RegisterAutoFormatOnSave();
}
} | conditional_block |
fitadd.run.js | var options;
chrome.runtime.sendMessage({method: "getSettings"}, function(response) {
options = response;
var readyStateCheckInterval = setInterval(function() {
if (document.readyState === "complete") {
WireUp();
clearInterval(readyStateCheckInterval);
}
}, 10);
});
function WireUp(){
if (fitadd.fitnessepage.IsFitnesse()){
if (fitadd.fitnessepage.AreAdding()){
if (options.cloneShortcut){
fitadd.fitnesseui.LoadClonedContentIfNeeded();
}
}
if (options.spEnabled){
fitadd.fixtures.search.RegisterHandlers(options);
}
if (fitadd.fitnessepage.AreEditing()){
if (options.saveShortcut){
fitadd.fitnesseui.RegisterSaveShortcut();
}
if (options.autoFormat){
fitadd.fitnesseui.RegisterAutoFormatOnSave();
}
}else{
if (options.editShortcut){
| fitadd.fitnesseui.RegisterEditShortcut();
if (options.cloneShortcut){
fitadd.fitnesseui.RegisterCloneLink();
}
}
}
}
} | random_line_split | |
fitadd.run.js | var options;
chrome.runtime.sendMessage({method: "getSettings"}, function(response) {
options = response;
var readyStateCheckInterval = setInterval(function() {
if (document.readyState === "complete") {
WireUp();
clearInterval(readyStateCheckInterval);
}
}, 10);
});
function | (){
if (fitadd.fitnessepage.IsFitnesse()){
if (fitadd.fitnessepage.AreAdding()){
if (options.cloneShortcut){
fitadd.fitnesseui.LoadClonedContentIfNeeded();
}
}
if (options.spEnabled){
fitadd.fixtures.search.RegisterHandlers(options);
}
if (fitadd.fitnessepage.AreEditing()){
if (options.saveShortcut){
fitadd.fitnesseui.RegisterSaveShortcut();
}
if (options.autoFormat){
fitadd.fitnesseui.RegisterAutoFormatOnSave();
}
}else{
if (options.editShortcut){
fitadd.fitnesseui.RegisterEditShortcut();
if (options.cloneShortcut){
fitadd.fitnesseui.RegisterCloneLink();
}
}
}
}
}
| WireUp | identifier_name |
fitadd.run.js | var options;
chrome.runtime.sendMessage({method: "getSettings"}, function(response) {
options = response;
var readyStateCheckInterval = setInterval(function() {
if (document.readyState === "complete") {
WireUp();
clearInterval(readyStateCheckInterval);
}
}, 10);
});
function WireUp() | {
if (fitadd.fitnessepage.IsFitnesse()){
if (fitadd.fitnessepage.AreAdding()){
if (options.cloneShortcut){
fitadd.fitnesseui.LoadClonedContentIfNeeded();
}
}
if (options.spEnabled){
fitadd.fixtures.search.RegisterHandlers(options);
}
if (fitadd.fitnessepage.AreEditing()){
if (options.saveShortcut){
fitadd.fitnesseui.RegisterSaveShortcut();
}
if (options.autoFormat){
fitadd.fitnesseui.RegisterAutoFormatOnSave();
}
}else{
if (options.editShortcut){
fitadd.fitnesseui.RegisterEditShortcut();
if (options.cloneShortcut){
fitadd.fitnesseui.RegisterCloneLink();
}
}
}
}
} | identifier_body | |
Gruntfile.js | 'use strict';
module.exports = function (grunt) {
// Time how long tasks take. Can help when optimizing build times
require('time-grunt')(grunt);
// Automatically load required grunt tasks
require('jit-grunt')(grunt, {
lockfile: 'grunt-lock'
});
// Configurable paths
var config = {
sass_dir: 'bundle/Resources/sass/admin',
public_dir: 'bundle/Resources/public/admin'
};
// Define the configuration for all the tasks
grunt.initConfig({
// Project settings
config: config,
//Prevent multiple grunt instances
lockfile: {
grunt: {
path: 'grunt.lock'
}
},
// Watches files for changes and runs tasks based on the changed files
watch: {
gruntfile: {
files: ['Gruntfile.js'],
options: {
reload: true
}
},
sass: {
files: ['<%= config.sass_dir %>/{,*/}*.{scss,sass}'],
tasks: ['sass', 'postcss']
}
},
// Compiles Sass to CSS and generates necessary files if requested
sass: {
options: {
sourceMap: true,
sourceMapEmbed: true,
sourceMapContents: true,
includePaths: ['.']
},
dist: {
files: [{
expand: true,
cwd: '<%= config.sass_dir %>',
src: ['*.{scss,sass}'],
dest: '.tmp/css',
ext: '.css'
}]
}
},
postcss: {
options: {
map: true,
processors: [
// Add vendor prefixed styles
require('autoprefixer')({
browsers: ['> 1%', 'last 3 versions', 'Firefox ESR', 'Opera 12.1']
})
]
},
dist: { | files: [{
expand: true,
cwd: '.tmp/css/',
src: '{,*/}*.css',
dest: '<%= config.public_dir %>/css'
}]
}
}
});
grunt.registerTask('serve', 'Start the server and preview your app', function () {
grunt.task.run([
'lockfile',
'sass:dist',
'postcss',
'watch'
]);
});
grunt.registerTask('default', [
'serve'
]);
}; | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.