prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>brand.js<|end_file_name|><|fim▁begin|>/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ({
/***/ 0:
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(249)
var __weex_template__ = __webpack_require__(250)
var __weex_style__ = __webpack_require__(251)
var __weex_script__ = __webpack_require__(252)
__weex_define__('@weex-component/81fdd9b8b8bce1b304791aba10e15462', [], function(__weex_require__, __weex_exports__, __weex_module__) {
__weex_script__(__weex_module__, __weex_exports__, __weex_require__)
if (__weex_exports__.__esModule && __weex_exports__.default) {
__weex_module__.exports = __weex_exports__.default
}
__weex_module__.exports.template = __weex_template__
__weex_module__.exports.style = __weex_style__
})
__weex_bootstrap__('@weex-component/81fdd9b8b8bce1b304791aba10e15462',undefined,undefined)
/***/ },
/***/ 244:
/***/ function(module, exports) {
module.exports = {
"type": "image",
"style": {
"width": function () {return this.width},
"height": function () {return this.height}
},
"attr": {
"src": function () {return this.src},
"imageQuality": function () {return this.quality}
},
"events": {
"click": "_clickHandler"
}
}
/***/ },
/***/ 245:
/***/ function(module, exports) {
module.exports = function(module, exports, __weex_require__){'use strict';
module.exports = {
data: function () {return {
quality: 'normal',
width: 0,
height: 0,
src: '',
href: '',
spmc: 0,
spmd: 0
}},
methods: {
ready: function ready() {},
_clickHandler: function _clickHandler() {
this.$call('modal', 'toast', {
message: 'click',
duration: 1
});
}
}
};}
/* generated by weex-loader */
/***/ },
/***/ 246:
/***/ function(module, exports, __webpack_require__) {
var __weex_template__ = __webpack_require__(244)
var __weex_script__ = __webpack_require__(245)
__weex_define__('@weex-component/banner', [], function(__weex_require__, __weex_exports__, __weex_module__) {
__weex_script__(__weex_module__, __weex_exports__, __weex_require__)
if (__weex_exports__.__esModule && __weex_exports__.default) {
__weex_module__.exports = __weex_exports__.default
}
__weex_module__.exports.template = __weex_template__
})
/***/ },
/***/ 247:
/***/ function(module, exports) {
module.exports = {
"type": "container",
"children": [
{
"type": "container",
"shown": function () {return this.direction==='row'},
"style": {
"flexDirection": "row"
},
"children": [
{
"type": "container",
"repeat": function () {return this.ds},
"style": {
"width": function () {return this.width},
"height": function () {return this.height},
"marginLeft": function () {return this.space}
},
"children": [
{
"type": "banner",
"attr": {
"width": function () {return this.width},
"height": function () {return this.height},
"src": function () {return this.img},
"href": function () {return this.url}
}
}
]
}
]
},
{
"type": "container",
"shown": function () {return this.direction==='column'},
"children": [
{
"type": "container",
"repeat": function () {return this.ds},
"style": {
"width": function () {return this.width},
"height": function () {return this.height},
"marginTop": function () {return this.space}
},
"children": [
{
"type": "banner",
"attr": {
"width": function () {return this.width},
"height": function () {return this.height},
"src": function () {return this.img},
"href": function () {return this.url}
}
}
]
}
]
}
]
}
/***/ },
/***/ 248:
/***/ function(module, exports) {
module.exports = function(module, exports, __weex_require__){'use strict';
module.exports = {
data: function () {return {
space: 0,
width: 0,
height: 0,
spmc: 0,
spmdprefix: '',
ds: []
}},
methods: {
ready: function ready() {
var self = this;
var ds = self.ds;
var length = ds.length;
for (var i = 0; i < length; i++) {
var item = ds[i];
item.index = i;
item.space = i % length === 0 ? 0 : self.space;
}
}
}
};}
/* generated by weex-loader */
/***/ },
/***/ 249:
/***/ function(module, exports, __webpack_require__) {
__webpack_require__(246)
var __weex_template__ = __webpack_require__(247)
var __weex_script__ = __webpack_require__(248)
__weex_define__('@weex-component/banners', [], function(__weex_require__, __weex_exports__, __weex_module__) {
__weex_script__(__weex_module__, __weex_exports__, __weex_require__)
if (__weex_exports__.__esModule && __weex_exports__.default) {
__weex_module__.exports = __weex_exports__.default
}
__weex_module__.exports.template = __weex_template__
})
/***/ },
/***/ 250:
/***/ function(module, exports) {
module.exports = {
"type": "container",
"classList": [
"container"<|fim▁hole|> "children": [
{
"type": "image",
"shown": function () {return this.ds.floorTitle},
"classList": [
"title"
],
"attr": {
"src": function () {return this.ds.floorTitle}
}
},
{
"type": "container",
"style": {
"marginLeft": 4,
"marginRight": 4
},
"children": [
{
"type": "banners",
"attr": {
"ds": function () {return this.bannerItems},
"direction": "column",
"width": function () {return this.NUMBER_742},
"height": function () {return this.NUMBER_230},
"space": function () {return this.NUMBER_4}
}
}
]
}
]
}
/***/ },
/***/ 251:
/***/ function(module, exports) {
module.exports = {
"title": {
"width": 750,
"height": 100
},
"container": {
"marginBottom": 4,
"backgroundColor": "#C0BABC"
}
}
/***/ },
/***/ 252:
/***/ function(module, exports) {
module.exports = function(module, exports, __weex_require__){"use strict";
module.exports = {
data: function () {return {
NUMBER_742: 742,
NUMBER_230: 230,
NUMBER_4: 4
}},
methods: {
ready: function ready() {
var self = this;
self._randomBrand();
},
_randomBrand: function _randomBrand() {
var self = this;
var bannerItems = self.ds.bannerItems;
bannerItems = bannerItems.sort(function () {
return Math.random() - 0.5;
});
self.bannerItems = bannerItems.slice(0, 8);
for (var i = 0; i < bannerItems.length; i++) {
var item = bannerItems[i];
if (i % 2 === 0) {
item.img = item.leftImg;
item.url = item.rightUrl;
} else {
item.img = item.rightImg;
item.url = item.rightUrl;
}
}
}
}
};}
/* generated by weex-loader */
/***/ }
/******/ });<|fim▁end|> | ], |
<|file_name|>node.rs<|end_file_name|><|fim▁begin|>use std::fs::{ create_dir, read_dir };
use std::mem::{ MaybeUninit };
use std::path::{ Path, PathBuf };
use std::ptr::{ null_mut };
use std::sync::mpsc::{ channel, Sender, Receiver, TryRecvError };
use std::thread::{ sleep, spawn, JoinHandle };
use std::time::{ Duration };
use std::vec::{ Vec };
use libc::{ sigwait, sigemptyset, sigaddset, SIGTERM, SIGINT, c_int, size_t, sigprocmask, SIG_SETMASK };
use rand::{ random };
use crate::node::client::{ Client };
use crate::node::common::{ NodeId, FileDescriptor, OpenMode, ADMIN_GROUP, Timestamp, FileType, FileRevision, log_crypto_context_error, utc_timestamp };
use crate::node::tls_connection::{ TlsServer };
use crate::node::crypto::{ Crypto };
use crate::node::file_handle::{ FileAccess, FileProperties };
use crate::node::filesystem::{ Filesystem, FilesystemError, Node as FsNode };
use crate::node::directory::{ Child };
use crate::node::user_authority::{ UserAuthority, Id };
use crate::node::serialize::{ SerializedNode };
pub enum NodeError {
InvalidUsernamePassword,
ParentIsNotDirectory,
UnknownAuthority,
AuthorityError,
UnauthorizedOperation,
InternalCommunicationError,
InternalError,
UnknownFile,
InvalidPageSize,
FailedToResolveAuthority,
FailedToAllocateAuthenticationToken,
FailedToConsumeAuthenticationToken,
}
pub enum ErrorResponse {
NodeError { error: NodeError },
FilesystemError { error: FilesystemError },
}
fn fs_error_to_rsp(error: FilesystemError) -> ErrorResponse {
ErrorResponse::FilesystemError {
error: error,
}
}
fn node_error_to_rsp(error: NodeError) -> ErrorResponse {
ErrorResponse::NodeError {
error: error,
}
}
pub enum FilesystemElementType {
File,
Directory,
}
pub enum Authority {
User(String),
Group(String),
}
pub enum FilesystemElement {
File {
properties: FileProperties,
created_by: Authority,
modified_by: Authority,
read: Authority,
write: Authority,
node_id: NodeId,
},
Directory {
created_at: Timestamp,
modified_at: Timestamp,
read: Authority,
write: Authority,
node_id: NodeId,
},
}
pub enum FilesystemElementProperties {
File {
name: String,
node_id: NodeId,
revision: FileRevision,
file_type: FileType,
size: u64,
},
Directory {
name: String,
node_id: NodeId,
},
}
pub enum FileSystemListElement {
File {
name: String,
node_id: NodeId,
revision: FileRevision,
file_type: FileType,
size: u64,
is_open: bool,
},
Directory {
name: String,
node_id: NodeId,
read: Authority,
write: Authority,
},
}
pub struct Counters {
pub active_connections: u32,
pub number_of_open_files: u32,
pub number_of_files: u32,
}
pub struct AdminSystemInformation {
pub certificate_expiration: Timestamp,
}
pub struct SystemInformation {
pub started_at: Timestamp,
pub server_id: u64,
pub admin_system_information: Option<AdminSystemInformation>,
}
pub enum ShutdownReason {
NodeClosing,
}
pub enum ClientProtocol {
AuthenticateResponse { result: Result<Id, ErrorResponse> },
AllocateAuthenticationTokenResponse { result: Result<String, ErrorResponse> },
CreateFileResponse { result: Result<(NodeId, FileProperties), ErrorResponse> },
CreateDirectoryResponse { result: Result<NodeId, ErrorResponse> },
OpenFileResponse { result: Result<(FileAccess, NodeId, FileProperties), ErrorResponse> },
Shutdown { reason: ShutdownReason },
CountersResponse { result: Result<Counters, ErrorResponse> },
QuerySystemResponse { result: Result<SystemInformation, ErrorResponse> },
QueryFsChildrenResponse { result: Result<Vec<FileSystemListElement>, ErrorResponse> },
QueryFsElementResponse { result: Result<FilesystemElement, ErrorResponse> },
QueryFsElementPropertiesResponse { result: Result<FilesystemElementProperties, ErrorResponse> },
DeleteResponse { result: Result<(), ErrorResponse> },
AddUserGroupResponse { result: Result<(), ErrorResponse> },
ModifyUserGroupResponse { result: Result<(), ErrorResponse> },
}
pub enum NodeProtocol {
AuthenticateWithPasswordRequest { username: String, password: String },
AuthenticateWithTokenRequest { token: String },
AllocateAuthenticationTokenRequest { user: Id },
CreateFileRequest { parent: FileDescriptor, type_of_file: FileType, name: String, user: Id, page_size: Option<u64> },
CreateDirecotryRequest { parent: FileDescriptor, name: String, user: Id },
OpenFileRequest { mode: OpenMode, file_descriptor: FileDescriptor, user: Id },
CountersRequest { user: Id, },
QuerySystemRequest { user: Id, },
QueryFsChildrenRequest { user: Id, fd: FileDescriptor, },
QueryFsElementRequest { user: Id, fd: FileDescriptor, },
QueryFsElementPropertiesRequest { user: Id, fd: FileDescriptor, fd_parent: FileDescriptor, },
DeleteRequest { user: Id, fd: FileDescriptor },
AddUserRequest { user: Id, name: String },
ModifyUser { user: Id, name: String, password: Option<String>, expiration: Option<Option<Timestamp>> },
AddGroupRequest { user: Id, name: String },
ModifyGroup { user: Id, name: String, expiration: Option<Option<Timestamp>> },
Quit,
}
fn start_signal_listener() -> Result<Receiver<()>, ()> {
let (sender, receiver) = channel::<()>();
let mut signal_set: [size_t; 32] = unsafe { MaybeUninit::uninit().assume_init() };
if unsafe { sigemptyset(signal_set.as_mut_ptr() as _) } != 0 {
return Err(())
}
if unsafe { sigaddset(signal_set.as_mut_ptr() as _, SIGTERM) } != 0 {
return Err(())
}
if unsafe { sigaddset(signal_set.as_mut_ptr() as _, SIGINT) } != 0 {
return Err(())
}
if unsafe { sigprocmask(SIG_SETMASK, signal_set.as_ptr() as _, null_mut()) } != 0 {
return Err(());
}
spawn(move || {
let mut sig: c_int = 0;
unsafe { sigwait(signal_set.as_ptr() as _, & mut sig) };
sender.send(()).unwrap();
});
Ok(receiver)
}
struct FilesystemElementAuthorityId {
read: Id,
write: Id,
}
struct ClientInfo {
transmit: Sender<ClientProtocol>,
receive: Receiver<NodeProtocol>,
thread_handle: JoinHandle<()>,
}
pub struct NodeSettings {
pub max_page_size_random_access_file: usize,
pub max_page_size_blob_file: usize,
pub max_number_of_files_per_directory: usize,
pub filesystem_capacity: u64,
pub socket_buffer_size: u64,
}
pub struct Node {
server: TlsServer,
clients: Vec<ClientInfo>,
filesystem: Filesystem,
auth: UserAuthority,
path_workdir: PathBuf,
crypto: Crypto,
started_at: Timestamp,
server_id: u64,
max_page_size_random_access_file: usize,
max_page_size_blob_file: usize,
client_socket_buffer_size: usize,
max_inactivity_duration_secs: i64,
authentication_token_duration_secs: i64,
}
impl Node {
fn path_user_authority(path_workdir: & Path) -> PathBuf {
path_workdir.join("users")
}
fn path_data(path_workdir: & Path) -> PathBuf {
path_workdir.join("data")
}
fn path_filesystem(path_workdir: & Path) -> PathBuf {
path_workdir.join("fs")
}
fn path_node(path_workdir: & Path) -> PathBuf {
path_workdir.join("node")
}
pub fn create(
crypto: Crypto,
auth: UserAuthority,
path_workdir: & Path,
settings: NodeSettings,
) -> Result<(), ()> {
info!("Creating node, path_workdir={}", path_workdir.display());
let workdir_it = read_dir(& path_workdir)
.map_err(| error | error!("Failed to read workdir content, error=\"{}\"", error))
? ;
if workdir_it.count() != 0 {
error!("Working directory is not empty");
return Err(());
}
let context_auth = crypto.create_context()
.map_err(| () | log_crypto_context_error())
?;
let context_node_settings = crypto.create_context()
.map_err(| () | log_crypto_context_error())
? ;
auth.store(context_auth, & Node::path_user_authority(path_workdir))
.map_err(| () | error!("Failed to store user authority"))
? ;
let path_data_dir = Node::path_data(path_workdir);
create_dir(& path_data_dir)
.map_err(| error | error!("Failed to create data dir, error=\"{}\"", error))
? ;
let mut fs = Filesystem::new_with_capacity(
crypto,
& path_data_dir,
settings.filesystem_capacity as usize,
settings.max_number_of_files_per_directory as usize
);
fs.store(& Node::path_filesystem(path_workdir))
.map_err(| () | error!("Failed to store filesystem"))
? ;
let serialized_node_settings = SerializedNode {
client_input_buffer_size: settings.socket_buffer_size as u64,
page_size_for_random_access_files: settings.max_page_size_random_access_file as u64,
page_size_for_blob_files: settings.max_page_size_blob_file as u64,
};
serialized_node_settings.write(context_node_settings, & Node::path_node(path_workdir))
.map_err(| () | error!("Failed to store node settings"))
? ;
Ok(())
}
fn store(& mut self) -> Result<(), ()> {
info!("Storing node, path_workdir={}", self.path_workdir.display());
let context_auth = self.crypto.create_context()
.map_err(| () | log_crypto_context_error())
?;
let context_node_settings = self.crypto.create_context()
.map_err(| () | log_crypto_context_error())
? ;
self.auth.store(context_auth, & Node::path_user_authority(& self.path_workdir))
.map_err(| () | error!("Failed to store user authority"))
? ;
self.filesystem.store(& Node::path_filesystem(& self.path_workdir))
.map_err(| () | error!("Failed to store filesystem"))
? ;
let serialized_node_settings = SerializedNode {
client_input_buffer_size: self.client_socket_buffer_size as u64,
page_size_for_random_access_files: self.max_page_size_random_access_file as u64,
page_size_for_blob_files: self.max_page_size_blob_file as u64,
};
serialized_node_settings.write(context_node_settings, & Node::path_node(& self.path_workdir))
.map_err(| () | error!("Failed to store node settings"))
? ;
Ok(())
}
pub fn load(
crypto: Crypto,
server: TlsServer,
path_workdir: & Path,
max_inactivity_duration_secs: i64,
authentication_token_duration_secs: i64,
) -> Result<Node, ()> {
info!("Loading node, path_workdir={}", path_workdir.display());
let context_auth = crypto.create_context()
.map_err(| () | log_crypto_context_error())
?;
let context_node_settings = crypto.create_context()
.map_err(| () | log_crypto_context_error())
? ;
let auth = UserAuthority::load(context_auth, & Node::path_user_authority(path_workdir))
.map_err(| () | error!("Failed to store users"))
? ;
let path_data_dir = Node::path_data(path_workdir);
let fs = Filesystem::load(crypto.clone(), & path_data_dir, & Node::path_filesystem(path_workdir))
.map_err(| () | error!("Failed to load filesystem"))
? ;
let settings = SerializedNode::read(context_node_settings, & Node::path_node(path_workdir))
.map_err(| () | error!("Failed to load node settings"))
? ;
Ok(Node {
server: server,
clients: Vec::new(),
filesystem: fs,
auth: auth,
path_workdir: path_workdir.to_path_buf(),
crypto: crypto,
max_page_size_random_access_file: settings.page_size_for_random_access_files as usize,
max_page_size_blob_file: settings.page_size_for_blob_files as usize,
client_socket_buffer_size: settings.client_input_buffer_size as usize,
started_at: utc_timestamp(),
server_id: random::<u64>(),
max_inactivity_duration_secs: max_inactivity_duration_secs,
authentication_token_duration_secs: authentication_token_duration_secs,
})
}
pub fn run(& mut self) -> Result<(), ()> {
let signal_channel = start_signal_listener()
.map_err(| () | error!("Failed to register interrupt signals"))
? ;
info!("Node ready and waiting for connections");
let mut node_id_buffer: [NodeId; 20] = [0; 20];
loop {
let mut is_processing: bool = false;
if let Ok(()) = signal_channel.try_recv() {
info!("Interrupt signal received, shutting down");
break ;
}
let mut communication_to_client_failed: Option<usize> = None;
for (client_index, client) in self.clients.iter().enumerate() {
match client.receive.try_recv() {
Err(TryRecvError::Disconnected) => {
warn!("Failed to receive from client, removing");
communication_to_client_failed = Some(client_index);
break ;
}
Err(TryRecvError::Empty) => (),
Ok(message) => {
is_processing = true;
let mut send_failed: bool = false;
match message {
NodeProtocol::Quit => {
},
NodeProtocol::AuthenticateWithPasswordRequest {
username,
password,
} => {
trace!("Authenticate request, username=\"{}\"", username);
let result = self.auth.validate_user(
& username,
& password,
utc_timestamp()
).map_err(| () | node_error_to_rsp(NodeError::InvalidUsernamePassword))
;
send_failed = client.transmit.send(
ClientProtocol::AuthenticateResponse {
result: result,
},
).is_err();
},
NodeProtocol::AuthenticateWithTokenRequest {
token,
} => {
trace!("Authenticate request, token=\"{}\"", token);
let result = self.auth.consume_link_to_id(
& token,
utc_timestamp(),
).map_err(| () | node_error_to_rsp(NodeError::FailedToConsumeAuthenticationToken))
;
send_failed = client.transmit.send(
ClientProtocol::AuthenticateResponse {
result: result
},
).is_err();
},
NodeProtocol::AllocateAuthenticationTokenRequest {
user,
} => {
trace!("Allocate authnetication token request, user={}", user);
let result = self.auth.generate_temporary_link_for_id(
& user,
utc_timestamp() + self.authentication_token_duration_secs,
).map_err(| () | node_error_to_rsp(NodeError::FailedToAllocateAuthenticationToken))
;
send_failed = client.transmit.send(
ClientProtocol::AllocateAuthenticationTokenResponse {
result: result
},
).is_err();
},
NodeProtocol::CreateFileRequest {
parent,
type_of_file,
name,
user,
page_size,
} => {
trace!("Create file request, user={}", user);
let result = Node::handle_create_file_req(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
self.max_page_size_random_access_file,
self.max_page_size_blob_file,
parent,
type_of_file,
name,
user,
page_size
);
send_failed = client.transmit.send(
ClientProtocol::CreateFileResponse {
result: result
},
).is_err();
}
NodeProtocol::CreateDirecotryRequest {
parent,
name,
user,
} => {
trace!("Create directory request, user={}", user);
let result = Node::handle_create_directory_req(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
parent,
name,
user
);
send_failed = client.transmit.send(
ClientProtocol::CreateDirectoryResponse {
result: result
},
).is_err();
}
NodeProtocol::OpenFileRequest { mode, file_descriptor, user } => {
trace!("Open file request, user={}", user);
let result = Node::handle_open_file_request(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
& mut self.crypto,
mode,
file_descriptor,
user,
);
send_failed = client.transmit.send(
ClientProtocol::OpenFileResponse {
result: result
},
).is_err();
},
NodeProtocol::CountersRequest { user } => {
trace!("Counters request, user={}", user);
let result = Node::handle_counters_request(
& self.clients,
& mut self.filesystem,
& mut self.auth,
user,
);
send_failed = client.transmit.send(
ClientProtocol::CountersResponse {
result: result
},
).is_err();
},
NodeProtocol::QuerySystemRequest { user } => {
trace!("Query system request, user={}", user);
let result = Node::handle_query_system_request(
& mut self.auth,
& self.server,
self.started_at,
self.server_id,
user,
);
send_failed = client.transmit.send(
ClientProtocol::QuerySystemResponse {
result: result
},
).is_err();
},
NodeProtocol::QueryFsChildrenRequest {
user,
fd,
} => {
trace!("Query fs children request, user={}", user);
let result = Node::handle_query_fs_children_request(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
user,
fd,
);
send_failed = client.transmit.send(
ClientProtocol::QueryFsChildrenResponse {
result: result
},
).is_err();
},
NodeProtocol::QueryFsElementPropertiesRequest {
user,
fd,
fd_parent,
} => {
trace!("Query fs element properties, user={}", user);
let result = Node::handle_query_fs_element_properties_request(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
& mut self.crypto,
user,
fd,
fd_parent,
);
send_failed = client.transmit.send(
ClientProtocol::QueryFsElementPropertiesResponse {
result: result,
},
).is_err();
},
NodeProtocol::QueryFsElementRequest {
user,
fd,
} => {
trace!("Query fs element, user={}", user);
let result = Node::handle_query_fs_element_request(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
& mut self.crypto,
user,
fd,
);
send_failed = client.transmit.send(
ClientProtocol::QueryFsElementResponse {
result: result,
},
).is_err();
},
NodeProtocol::DeleteRequest {
user,
fd,
} => {
trace!("Delete, user={}, fd={}", user, fd);
let result = Node::handle_delete_request(
& mut node_id_buffer,
& mut self.filesystem,
& mut self.auth,
& mut self.crypto,
user,
fd,
);
send_failed = client.transmit.send(
ClientProtocol::DeleteResponse {
result: result,
},
).is_err();
},
NodeProtocol::AddUserRequest {
user,
name,
} => {
trace!("Create user, user={}, name={}", user, name);
let result = Node::handle_create_user(
& mut self.auth,
user,
name,
);
send_failed = client.transmit.send(
ClientProtocol::AddUserGroupResponse {
result: result,
},
).is_err();
},
NodeProtocol::ModifyUser {
user,
name,
password,
expiration,
} => {
trace!("Modify user, user={}, name={}", user, name);
let result = Node::handle_modify_user(
& mut self.auth,
user,
name,
password,
expiration,
);
send_failed = client.transmit.send(
ClientProtocol::AddUserGroupResponse {
result: result,
},
).is_err();
},
NodeProtocol::AddGroupRequest {
user,
name
} => {
trace!("Create group, user={}, name={}", user, name);
let result = Node::handle_create_group(
& mut self.auth,
user,
name,
);
send_failed = client.transmit.send(
ClientProtocol::AddUserGroupResponse {
result: result,
},
).is_err();
},
NodeProtocol::ModifyGroup {
user,
name,
expiration,
} => {
trace!("Modify group, user={}, name={}", user, name);
let result = Node::handle_modify_group(
& mut self.auth,
user,
name,
expiration,
);
send_failed = client.transmit.send(
ClientProtocol::AddUserGroupResponse {
result: result,
},
).is_err();
},
}
if send_failed {
communication_to_client_failed = Some(client_index);
break ;
}
}
}
}
if let Some(index) = communication_to_client_failed {
info!("Removing index={}", index);
let client = self.clients.remove(index);
let _ = client.thread_handle.join();
}
match self.server.accept() {
Ok(None) => (),
Ok(Some(connection)) => {
is_processing = true;
let (tx_node, rx_node) = channel::<ClientProtocol>();
let (tx_client, rx_client) = channel::<NodeProtocol>();
let buffer_size = self.client_socket_buffer_size;
let max_inactivity_duration_secs = self.max_inactivity_duration_secs;
let handle = spawn( move || {
match Client::new(
connection,
rx_node,
tx_client,
buffer_size,
max_inactivity_duration_secs,
) {
Ok(mut client) => {
client.process();
},
Err(()) => {
error!("Failed to create client");
}
}
});
self.clients.push(ClientInfo {
transmit: tx_node,
receive: rx_client,
thread_handle: handle,
});
},
Err(()) => {
error!("Failed to accept new connection, closing");
break
},
};
if ! is_processing {
sleep(Duration::from_millis(100));
}
}
for client in self.clients.iter() {
let _ = client.transmit.send(ClientProtocol::Shutdown {
reason: ShutdownReason::NodeClosing,
});
}
for client in self.clients.drain(..) {
if let Err(_) = client.thread_handle.join() {
error!("Failed to join client");
}
}
self.store()
.map_err(| () | error!("Failed to store node"))
? ;
Ok(())
}
fn handle_create_file_req(
node_id_buffer: & mut [NodeId],
filesystem: & mut Filesystem,
auth: & mut UserAuthority,
max_page_size_random_access_file: usize,
max_page_size_blob_file: usize,
parent_fd: FileDescriptor,
type_of_file: FileType,
name: String,
user: Id,
requested_page_size: Option<u64>,
) -> Result<(NodeId, FileProperties), ErrorResponse> {
let parent_id = Node::resolve_file_descriptor(
node_id_buffer,
filesystem,
parent_fd
) ? ;
{
let ref parent = filesystem.node(& parent_id).unwrap();
let parent = parent.to_directory()
.map_err(| _ | node_error_to_rsp(NodeError::ParentIsNotDirectory))
? ;
auth.is_authorized(parent.write(), & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
}
let page_size = match requested_page_size {
Some(value) => {
let value = value as usize;
let max_page_size = match type_of_file {
FileType::RandomAccess => max_page_size_random_access_file,
FileType::Blob => max_page_size_blob_file,
};
if value > max_page_size {
return Err(node_error_to_rsp(NodeError::InvalidPageSize));
}
value
},
None => {
match type_of_file {
FileType::RandomAccess => max_page_size_random_access_file,
FileType::Blob => max_page_size_blob_file,
}
}
};
filesystem.create_file(
& parent_id,
& name,
user,
type_of_file,
page_size,
).map_err(fs_error_to_rsp)
}
fn handle_create_directory_req(
node_id_buffer: & mut [NodeId],
filesystem: & mut Filesystem,
auth: & mut UserAuthority,
parent_fd: FileDescriptor,
name: String,
user: Id
) -> Result<NodeId, ErrorResponse> {
let parent_id = Node::resolve_file_descriptor(
node_id_buffer,
filesystem,
parent_fd
) ? ;
{
let ref parent = filesystem.node(& parent_id).unwrap();
let parent = parent.to_directory()
.map_err(| _ | node_error_to_rsp(NodeError::ParentIsNotDirectory))
? ;
auth.is_authorized(parent.write(), & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
}
filesystem.to_directory(
& parent_id,
& name,
user
).map_err(fs_error_to_rsp)
}
fn handle_open_file_request(
node_id_buffer: & mut [NodeId],
fs: & mut Filesystem,
auth: & mut UserAuthority,
crypto: & mut Crypto,
mode: OpenMode,
file_descriptor: FileDescriptor,
user: Id,
) -> Result<(FileAccess, NodeId, FileProperties), ErrorResponse> {
let node_id = Node::resolve_file_descriptor(
node_id_buffer,
fs,
file_descriptor
) ? ;
let (properties, file_auth) = Node::resolve_file_properties(& node_id, fs, crypto) ? ;
let file = fs.mut_file(& node_id)
.map_err(fs_error_to_rsp)
? ;
let access = match mode {
OpenMode::Read => file_auth.read,
OpenMode::ReadWrite => file_auth.write,
};
auth.is_authorized(& access, & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
let access = file.open(& crypto, user)
.map_err(| () | node_error_to_rsp(NodeError::InternalError))
? ;
Ok((access, node_id, properties))
}
fn handle_counters_request(
clients: & Vec<ClientInfo>,
fs: & mut Filesystem,
auth: & mut UserAuthority,
user: Id,
) -> Result<Counters, ErrorResponse> {
auth.is_authorized(& ADMIN_GROUP, & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
Ok(Counters {
active_connections: clients.len() as u32,
number_of_files: fs.number_of_files() as u32,
number_of_open_files: fs.number_of_open_files() as u32,
})
}
fn handle_query_system_request(
auth: & mut UserAuthority,
server: & TlsServer,
started_at: Timestamp,
server_id: u64,<|fim▁hole|> if auth.is_authorized(& ADMIN_GROUP, & user, utc_timestamp()).is_ok() {
Some(AdminSystemInformation {
certificate_expiration: server.certificate_expiration(),
})
} else {
None
}
};
Ok(SystemInformation {
started_at: started_at,
server_id: server_id,
admin_system_information: admin_system_information,
})
}
fn handle_query_fs_children_request(
node_id_buffer: & mut [NodeId],
fs: & mut Filesystem,
auth: & mut UserAuthority,
user: Id,
file_descriptor: FileDescriptor,
) -> Result<Vec<FileSystemListElement>, ErrorResponse> {
let node_id = Node::resolve_file_descriptor(
node_id_buffer,
fs,
file_descriptor
) ? ;
let children: Vec<Child> = {
let node = fs.node(& node_id)
.map_err(fs_error_to_rsp)
? ;
let directory = node.to_directory()
.map_err(fs_error_to_rsp)
? ;
auth.is_authorized(& directory.read(), & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
directory.clone_children()
};
let mut result = Vec::with_capacity(children.len());
for ref child in children {
if fs.node(& child.node_id).unwrap().is_not_set() {
panic!();
}
let is_file = fs.node(& child.node_id).unwrap().is_file();
if is_file {
let file = fs.mut_file(& child.node_id).unwrap();
let is_open = file.is_open();
let properties = file.cached_properties().unwrap();
result.push(
FileSystemListElement::File {
name: child.name.clone(),
node_id: child.node_id.clone(),
revision: properties.revision,
file_type: properties.file_type,
size: properties.size,
is_open: is_open,
})
} else {
let dir = fs.node(& child.node_id)
.unwrap()
.to_directory()
.unwrap()
;
let read = Node::resolve_id(auth, dir.read())
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let write = Node::resolve_id(auth, dir.write())
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
result.push(
FileSystemListElement::Directory {
name: child.name.clone(),
node_id: child.node_id.clone(),
read: read,
write: write,
})
}
}
Ok(result)
}
fn handle_query_fs_element_properties_request(
node_id_buffer: & mut [NodeId],
fs: & mut Filesystem,
auth: & mut UserAuthority,
crypto: & mut Crypto,
user: Id,
file_descriptor: FileDescriptor,
parent_file_descriptor: FileDescriptor,
) -> Result<FilesystemElementProperties, ErrorResponse> {
let node_id = Node::resolve_file_descriptor(
node_id_buffer,
fs,
file_descriptor
) ? ;
let parent_node_id = Node::resolve_file_descriptor(
node_id_buffer,
fs,
parent_file_descriptor
) ? ;
let (name, auth_read) = {
let parent_node = fs.node(& parent_node_id)
.map_err(fs_error_to_rsp)
? ;
let parent = parent_node.to_directory()
.map_err(| _ | node_error_to_rsp(NodeError::ParentIsNotDirectory))
? ;
let node_index = parent.child_with_node_id(& node_id)
.map_err(| () | node_error_to_rsp(NodeError::UnknownFile))
? ;
let name = & parent.children().nth(node_index).unwrap().name;
(name.clone(), parent.read().clone())
};
auth.is_authorized(& auth_read, & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
let is_file = fs.node(& node_id).unwrap().is_file();
if is_file {
let file = fs.mut_file(& node_id).unwrap();
let properties = file.properties(crypto).unwrap();
Ok(FilesystemElementProperties::File {
name: name,
node_id: node_id,
file_type: properties.file_type,
revision: properties.revision,
size: properties.size,
})
} else {
Ok(FilesystemElementProperties::Directory {
name: name,
node_id: node_id,
})
}
}
fn handle_query_fs_element_request(
node_id_buffer: & mut [NodeId],
fs: & mut Filesystem,
auth: & mut UserAuthority,
crypto: & mut Crypto,
user: Id,
file_descriptor: FileDescriptor,
) -> Result<FilesystemElement, ErrorResponse> {
let node_id = Node::resolve_file_descriptor(
node_id_buffer,
fs,
file_descriptor
) ? ;
let is_file = fs.node(& node_id).unwrap().is_file();
if is_file {
let (properties, file_auth) = Node::resolve_file_properties(& node_id, fs, crypto) ? ;
let read = Node::resolve_id(auth, & file_auth.read)
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let write = Node::resolve_id(auth, & file_auth.write)
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let created_by = Node::resolve_id(auth, & properties.created_by)
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let modified_by = Node::resolve_id(auth, & properties.modified_by)
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let desc = FilesystemElement::File {
properties: properties,
created_by: created_by,
modified_by: modified_by,
read: read,
write: write,
node_id: node_id,
};
auth.is_authorized(& file_auth.read, & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
Ok(desc)
} else {
let node = fs.node(& node_id)
.map_err(fs_error_to_rsp)
? ;
match *node {
FsNode::Directory { ref directory } => {
let read = Node::resolve_id(auth, directory.read())
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let write = Node::resolve_id(auth, directory.write())
.map_err(| () | node_error_to_rsp(NodeError::FailedToResolveAuthority))
? ;
let desc = FilesystemElement::Directory {
created_at: directory.created(),
modified_at: directory.modified(),
read: read,
write: write,
node_id: node_id,
};
auth.is_authorized(& directory.read(), & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
return Ok(desc);
},
FsNode::File { .. } => panic!(),
FsNode::NotSet { } => panic!(),
}
}
}
fn handle_delete_request(
node_id_buffer: & mut [NodeId],
fs: & mut Filesystem,
auth: & mut UserAuthority,
crypto: & mut Crypto,
user: Id,
fd: FileDescriptor,
) -> Result<(), ErrorResponse> {
let node_id = Node::resolve_file_descriptor(
node_id_buffer,
fs,
fd
) ? ;
let parent_node_id = {
let is_file = fs.node(& node_id).unwrap().is_file();
if ! is_file {
fs.node(& node_id)
.unwrap()
.to_directory()
.unwrap()
.parent()
} else {
let file = fs.mut_file(& node_id).unwrap();
let properties = file.properties(& crypto)
.map_err(| () | node_error_to_rsp(NodeError::InternalCommunicationError))
? ;
properties.parent
}
};
let index = {
let node = fs.node(& parent_node_id)
.map_err(fs_error_to_rsp)
? ;
let directory = node.to_directory()
.map_err(| _ | node_error_to_rsp(NodeError::ParentIsNotDirectory))
? ;
auth.is_authorized(& directory.write(), & user, utc_timestamp())
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
let index = directory.child_with_node_id(& node_id)
.map_err(| () | node_error_to_rsp(NodeError::UnknownFile))
? ;
index
};
fs.delete(& parent_node_id, index, node_id)
.map_err(fs_error_to_rsp)
? ;
Ok(())
}
fn handle_create_user(
auth: & mut UserAuthority,
user: Id,
name: String,
) -> Result<(), ErrorResponse> {
let current_time = utc_timestamp();
let default_password = "";
auth.is_authorized(& ADMIN_GROUP, & user, current_time)
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
auth.add_user(
& name,
default_password,
Some(current_time - 1)
)
.map_err(| () | node_error_to_rsp(NodeError::AuthorityError))
? ;
Ok(())
}
fn handle_modify_user(
auth: & mut UserAuthority,
user: Id,
name: String,
password: Option<String>,
expiration: Option<Option<Timestamp>>,
) -> Result<(), ErrorResponse> {
let current_time = utc_timestamp();
let target_user = auth.resolve_user_id(& name)
.map_err(| () | node_error_to_rsp(NodeError::UnknownAuthority))
? ;
if user != target_user && auth.is_authorized(& ADMIN_GROUP, & user, current_time).is_err() {
return Err(node_error_to_rsp(NodeError::UnauthorizedOperation));
}
if let Some(pw) = password {
auth.modify_user_password(& target_user, & pw)
.map_err(| () | node_error_to_rsp(NodeError::AuthorityError))
? ;
}
if let Some(ex) = expiration {
auth.modify_user_expiration(& target_user, ex)
.map_err(| () | node_error_to_rsp(NodeError::AuthorityError))
? ;
}
Ok(())
}
fn handle_create_group(
auth: & mut UserAuthority,
user: Id,
name: String,
) -> Result<(), ErrorResponse> {
let current_time = utc_timestamp();
auth.is_authorized(& ADMIN_GROUP, & user, current_time)
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
auth.add_group(
& name,
Some(current_time - 1)
)
.map_err(| () | node_error_to_rsp(NodeError::AuthorityError))
? ;
Ok(())
}
fn handle_modify_group(
auth: & mut UserAuthority,
user: Id,
name: String,
expiration: Option<Option<Timestamp>>,
) -> Result<(), ErrorResponse> {
let current_time = utc_timestamp();
let target_group = auth.resolve_group_id(& name)
.map_err(| () | node_error_to_rsp(NodeError::UnknownAuthority))
? ;
auth.is_authorized(& ADMIN_GROUP, & user, current_time)
.map_err(| () | node_error_to_rsp(NodeError::UnauthorizedOperation))
? ;
if let Some(ex) = expiration {
auth.modify_group_expiration(& target_group, ex)
.map_err(| () | node_error_to_rsp(NodeError::AuthorityError))
? ;
}
Ok(())
}
fn resolve_file_properties(
file_node_id: & NodeId,
fs: & mut Filesystem,
crypto: & mut Crypto,
) -> Result<(FileProperties, FilesystemElementAuthorityId), ErrorResponse> {
let properties = fs.mut_file(& file_node_id)
.map_err(fs_error_to_rsp)
.map(| file |
file.properties(& crypto)
.map_err(| () | node_error_to_rsp(NodeError::InternalCommunicationError))
)
? ? ;
let parent = fs.node(& properties.parent)
.map_err(fs_error_to_rsp)
? ;
let directory = parent.to_directory()
.map_err(| _ | node_error_to_rsp(NodeError::ParentIsNotDirectory))
? ;
Ok((
properties,
FilesystemElementAuthorityId {
read: directory.read().clone(),
write: directory.write().clone(),
}
))
}
fn resolve_id(
auth: & UserAuthority,
id: & Id,
) -> Result<Authority, ()> {
let name = auth.resolve_id_name(id) ? ;
match *id {
Id::User(_) => Ok(Authority::User(name)),
Id::Group(_) => Ok(Authority::Group(name)),
}
}
fn resolve_file_descriptor(
node_id_buffer: & mut [NodeId],
filesystem: & Filesystem,
file_descriptor: FileDescriptor
) -> Result<NodeId, ErrorResponse> {
match file_descriptor {
FileDescriptor::NodeId(id) => {
let _ = filesystem.node(& id)
.map_err(fs_error_to_rsp)
? ;
Ok(id)
},
FileDescriptor::Path(path) => {
let size = filesystem.resolve_path_from_root(
& path,
node_id_buffer
).map_err(fs_error_to_rsp)
? ;
Ok(node_id_buffer[size - 1])
},
}
}
}<|fim▁end|> | user: Id,
) -> Result<SystemInformation, ErrorResponse> {
let admin_system_information = { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2012-2014, John Haddon. All rights reserved.
# Copyright (c) 2013-2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import GafferScene
from _GafferSceneTest import *
from SceneTestCase import SceneTestCase
from ScenePlugTest import ScenePlugTest
from GroupTest import GroupTest
from SceneTimeWarpTest import SceneTimeWarpTest
from SceneProceduralTest import SceneProceduralTest
from CubeTest import CubeTest
from PlaneTest import PlaneTest
from SphereTest import SphereTest
from InstancerTest import InstancerTest
from ObjectToSceneTest import ObjectToSceneTest
from CameraTest import CameraTest
from OutputsTest import OutputsTest
from CustomOptionsTest import CustomOptionsTest
from DeleteOptionsTest import DeleteOptionsTest
from CopyOptionsTest import CopyOptionsTest
from SceneNodeTest import SceneNodeTest
from PathMatcherTest import PathMatcherTest
from PathFilterTest import PathFilterTest
from ShaderAssignmentTest import ShaderAssignmentTest
from CustomAttributesTest import CustomAttributesTest
from AlembicSourceTest import AlembicSourceTest
from DeletePrimitiveVariablesTest import DeletePrimitiveVariablesTest
from SeedsTest import SeedsTest
from SceneContextVariablesTest import SceneContextVariablesTest
from SubTreeTest import SubTreeTest
from OpenGLAttributesTest import OpenGLAttributesTest
from StandardOptionsTest import StandardOptionsTest
from ScenePathTest import ScenePathTest
from PathMatcherDataTest import PathMatcherDataTest
from LightTest import LightTest
from TestRender import TestRender
from RenderTest import RenderTest
from OpenGLShaderTest import OpenGLShaderTest
from OpenGLRenderTest import OpenGLRenderTest
from TransformTest import TransformTest
from AimConstraintTest import AimConstraintTest
from PruneTest import PruneTest
from ShaderTest import ShaderTest<|fim▁hole|>from PointConstraintTest import PointConstraintTest
from SceneReaderTest import SceneReaderTest
from SceneWriterTest import SceneWriterTest
from IsolateTest import IsolateTest
from DeleteAttributesTest import DeleteAttributesTest
from UnionFilterTest import UnionFilterTest
from SceneSwitchTest import SceneSwitchTest
from ShaderSwitchTest import ShaderSwitchTest
from ParentConstraintTest import ParentConstraintTest
from ParentTest import ParentTest
from StandardAttributesTest import StandardAttributesTest
from PrimitiveVariablesTest import PrimitiveVariablesTest
from DuplicateTest import DuplicateTest
from ModuleTest import ModuleTest
from GridTest import GridTest
from SetTest import SetTest
from FreezeTransformTest import FreezeTransformTest
from SetFilterTest import SetFilterTest
from FilterTest import FilterTest
from SceneAlgoTest import SceneAlgoTest
from CoordinateSystemTest import CoordinateSystemTest
from DeleteOutputsTest import DeleteOutputsTest
from ExternalProceduralTest import ExternalProceduralTest
from ClippingPlaneTest import ClippingPlaneTest
from FilterSwitchTest import FilterSwitchTest
from PointsTypeTest import PointsTypeTest
from ParametersTest import ParametersTest
from SceneFilterPathFilterTest import SceneFilterPathFilterTest
from AttributeVisualiserTest import AttributeVisualiserTest
from SceneLoopTest import SceneLoopTest
from SceneProcessorTest import SceneProcessorTest
from MeshToPointsTest import MeshToPointsTest
from InteractiveRenderTest import InteractiveRenderTest
from FilteredSceneProcessorTest import FilteredSceneProcessorTest
from ShaderBallTest import ShaderBallTest
from LightTweaksTest import LightTweaksTest
from FilterResultsTest import FilterResultsTest
if __name__ == "__main__":
import unittest
unittest.main()<|fim▁end|> | from TextTest import TextTest
from MapProjectionTest import MapProjectionTest
from MapOffsetTest import MapOffsetTest |
<|file_name|>ccdetect.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2020 Matthew Waters <matthew@centricular.com>
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Library General Public
// License as published by the Free Software Foundation; either
// version 2 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Library General Public License for more details.
//
// You should have received a copy of the GNU Library General Public
// License along with this library; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
// Boston, MA 02110-1335, USA.
use gst::prelude::*;
use std::sync::{Arc, Mutex};
use pretty_assertions::assert_eq;
fn init() {
use std::sync::Once;
static INIT: Once = Once::new();
INIT.call_once(|| {
gst::init().unwrap();
gstrsclosedcaption::plugin_register_static().unwrap();
});
}
struct NotifyState {
cc608_count: u32,
cc708_count: u32,
}
impl Default for NotifyState {
fn default() -> Self {
NotifyState {
cc608_count: 0,
cc708_count: 0,
}
}
}
macro_rules! assert_push_data {
($h:expr, $state:expr, $data:expr, $ts:expr, $cc608_count:expr, $cc708_count:expr) => {
let mut buf = gst::Buffer::from_mut_slice($data);
buf.get_mut().unwrap().set_pts($ts);
assert_eq!($h.push(buf), Ok(gst::FlowSuccess::Ok));
{
let state_guard = $state.lock().unwrap();
assert_eq!(state_guard.cc608_count, $cc608_count);
assert_eq!(state_guard.cc708_count, $cc708_count);
}
};
}
#[test]
fn test_have_cc_data_notify() {
init();
let valid_cc608_data = vec![0xfc, 0x80, 0x81];
let invalid_cc608_data = vec![0xf8, 0x80, 0x81];
let valid_cc708_data = vec![0xfe, 0x80, 0x81];
let invalid_cc708_data = vec![0xfa, 0x80, 0x81];
let mut h = gst_check::Harness::new("ccdetect");
h.set_src_caps_str("closedcaption/x-cea-708,format=cc_data");
h.set_sink_caps_str("closedcaption/x-cea-708,format=cc_data");
h.get_element()
.unwrap()
.set_property("window", &(500_000_000 as u64))
.unwrap();
let state = Arc::new(Mutex::new(NotifyState::default()));
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc608"), move |o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc608_count += 1;
o.get_property("cc608").unwrap();
});
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc708"), move |o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc708_count += 1;
o.get_property("cc708").unwrap();
});
/* valid cc608 data moves cc608 property to true */
assert_push_data!(h, state, valid_cc608_data, 0.into(), 1, 0);
/* invalid cc608 data moves cc608 property to false */
assert_push_data!(h, state, invalid_cc608_data, 1_000_000_000.into(), 2, 0);
/* valid cc708 data moves cc708 property to true */
assert_push_data!(h, state, valid_cc708_data, 2_000_000_000.into(), 2, 1);
/* invalid cc708 data moves cc708 property to false */
assert_push_data!(h, state, invalid_cc708_data, 3_000_000_000.into(), 2, 2);
}
#[test]
fn test_cc_data_window() {
init();
let valid_cc608_data = vec![0xfc, 0x80, 0x81];
let invalid_cc608_data = vec![0xf8, 0x80, 0x81];
let mut h = gst_check::Harness::new("ccdetect");
h.set_src_caps_str("closedcaption/x-cea-708,format=cc_data");
h.set_sink_caps_str("closedcaption/x-cea-708,format=cc_data");
h.get_element()
.unwrap()
.set_property("window", &500_000_000u64)
.unwrap();
let state = Arc::new(Mutex::new(NotifyState::default()));
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc608"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc608_count += 1;
});
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc708"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc708_count += 1;
});
/* valid cc608 data moves cc608 property to true */
assert_push_data!(h, state, valid_cc608_data.clone(), 0.into(), 1, 0);
/* valid cc608 data moves within window */
assert_push_data!(h, state, valid_cc608_data.clone(), 300_000_000.into(), 1, 0);
/* invalid cc608 data before window expires, no change */
assert_push_data!(
h,
state,
invalid_cc608_data.clone(),
600_000_000.into(),
1,
0
);
/* invalid cc608 data after window expires, cc608 changes to false */
assert_push_data!(h, state, invalid_cc608_data, 1_000_000_000.into(), 2, 0);
/* valid cc608 data before window expires, no change */
assert_push_data!(
h,
state,
valid_cc608_data.clone(),
1_300_000_000.into(),
2,
0
);
/* valid cc608 data after window expires, property changes */
assert_push_data!(h, state, valid_cc608_data, 1_600_000_000.into(), 3, 0);<|fim▁hole|> init();
let valid_cc608_data = vec![
0x96, 0x69, /* cdp magic bytes */
0x10, /* length of cdp packet */
0x8f, /* framerate */
0x43, /* flags */
0x00, 0x00, /* sequence counter */
0x72, /* cc_data byte header */
0xe1, /* n cc_data triples with 0xe0 as reserved bits */
0xfc, 0x80, 0x81, /* cc_data triple */
0x74, /* cdp end of frame byte header */
0x00, 0x00, /* sequence counter */
0x60, /* checksum */
];
let invalid_cc608_data = vec![
0x96, 0x69, 0x10, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xe1, 0xf8, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let mut h = gst_check::Harness::new("ccdetect");
h.set_src_caps_str("closedcaption/x-cea-708,format=cdp");
h.set_sink_caps_str("closedcaption/x-cea-708,format=cdp");
h.get_element()
.unwrap()
.set_property("window", &500_000_000u64)
.unwrap();
let state = Arc::new(Mutex::new(NotifyState::default()));
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc608"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc608_count += 1;
});
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc708"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc708_count += 1;
});
/* valid cc608 data moves cc608 property to true */
assert_push_data!(h, state, valid_cc608_data, 0.into(), 1, 0);
/* invalid cc608 data moves cc608 property to false */
assert_push_data!(h, state, invalid_cc608_data, 1_000_000_000.into(), 2, 0);
}
#[test]
fn test_malformed_cdp_notify() {
init();
let too_short = vec![0x96, 0x69];
let wrong_magic = vec![
0x00, 0x00, 0x10, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xe1, 0xfc, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let length_too_long = vec![
0x96, 0x69, 0x20, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xe1, 0xfc, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let length_too_short = vec![
0x96, 0x69, 0x00, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xe1, 0xfc, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let wrong_cc_data_header_byte = vec![
0x96, 0x69, 0x10, 0x8f, 0x43, 0x00, 0x00, 0xff, 0xe1, 0xfc, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let big_cc_count = vec![
0x96, 0x69, 0x10, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xef, 0xfc, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let wrong_cc_count_reserved_bits = vec![
0x96, 0x69, 0x10, 0x8f, 0x43, 0x00, 0x00, 0x72, 0x01, 0xfc, 0x81, 0x82, 0x74, 0x00, 0x00,
0x60,
];
let cc608_after_cc708 = vec![
0x96, 0x69, 0x13, 0x8f, 0x43, 0x00, 0x00, 0x72, 0xe2, 0xfe, 0x81, 0x82, 0xfc, 0x83, 0x84,
0x74, 0x00, 0x00, 0x60,
];
let mut h = gst_check::Harness::new("ccdetect");
h.set_src_caps_str("closedcaption/x-cea-708,format=cdp");
h.set_sink_caps_str("closedcaption/x-cea-708,format=cdp");
h.get_element()
.unwrap()
.set_property("window", &0u64)
.unwrap();
let state = Arc::new(Mutex::new(NotifyState::default()));
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc608"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc608_count += 1;
});
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc708"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc708_count += 1;
});
/* all invalid data does not change properties */
assert_push_data!(h, state, too_short, 0.into(), 0, 0);
assert_push_data!(h, state, wrong_magic, 1_000.into(), 0, 0);
assert_push_data!(h, state, length_too_long, 2_000.into(), 0, 0);
assert_push_data!(h, state, length_too_short, 3_000.into(), 0, 0);
assert_push_data!(h, state, wrong_cc_data_header_byte, 4_000.into(), 0, 0);
assert_push_data!(h, state, big_cc_count, 5_000.into(), 0, 0);
assert_push_data!(h, state, wrong_cc_count_reserved_bits, 6_000.into(), 0, 0);
assert_push_data!(h, state, cc608_after_cc708, 7_000.into(), 0, 0);
}
#[test]
fn test_gap_events() {
init();
let valid_cc608_data = vec![0xfc, 0x80, 0x81];
let mut h = gst_check::Harness::new("ccdetect");
h.set_src_caps_str("closedcaption/x-cea-708,format=cc_data");
h.set_sink_caps_str("closedcaption/x-cea-708,format=cc_data");
h.get_element()
.unwrap()
.set_property("window", &500_000_000u64)
.unwrap();
let state = Arc::new(Mutex::new(NotifyState::default()));
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc608"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc608_count += 1;
});
let state_c = state.clone();
h.get_element()
.unwrap()
.connect_notify(Some("cc708"), move |_o, _pspec| {
let mut state_guard = state_c.lock().unwrap();
state_guard.cc708_count += 1;
});
/* valid cc608 data moves cc608 property to true */
assert_push_data!(h, state, valid_cc608_data, 0.into(), 1, 0);
/* pushing gap event within the window changes nothing */
assert_eq!(
h.push_event(gst::event::Gap::builder(100_000_000.into(), 1.into()).build()),
true
);
{
let state_guard = state.lock().unwrap();
assert_eq!(state_guard.cc608_count, 1);
assert_eq!(state_guard.cc708_count, 0);
}
/* pushing gap event outside the window moves cc608 property to false */
assert_eq!(
h.push_event(gst::event::Gap::builder(1_000_000_000.into(), 1.into()).build()),
true
);
{
let state_guard = state.lock().unwrap();
assert_eq!(state_guard.cc608_count, 2);
assert_eq!(state_guard.cc708_count, 0);
}
}<|fim▁end|> | }
#[test]
fn test_have_cdp_notify() { |
<|file_name|>maShape.cc<|end_file_name|><|fim▁begin|>/******************************************************************************
Copyright 2013 Scientific Computation Research Center,
Rensselaer Polytechnic Institute. All rights reserved.
The LICENSE file included with this distribution describes the terms
of the SCOREC Non-Commercial License this program is distributed under.
*******************************************************************************/
#include <PCU.h>
#include "maShape.h"
#include "maSize.h"
#include "maAdapt.h"
#include "maSnap.h"
#include "maOperator.h"
#include "maEdgeSwap.h"
#include "maDoubleSplitCollapse.h"
#include "maSingleSplitCollapse.h"
#include "maFaceSplitCollapse.h"
#include "maShortEdgeRemover.h"
#include "maShapeHandler.h"
#include "maBalance.h"
#include "maDBG.h"
#include <pcu_util.h>
namespace ma {
/* projects vertex 3 onto the plane
of the bottom triangle and returns
the zone in which it lands as a bit code.
Each bit indicates whether the area coordinate
of that vertex is positive.
*/
int getSliverCode(
Adapt* a,
Entity* tet)
{
SizeField* sf = a->sizeField;
Mesh* m = a->mesh;
Matrix J,Q;
apf::MeshElement* me = apf::createMeshElement(m,tet);
Vector center(.25,.25,.25);
apf::getJacobian(me,center,J);
sf->getTransform(me,center,Q);
J = J*Q; //Jacobian in metric space
apf::destroyMeshElement(me);
int code = 0;
// check first face
Entity* fs[4];
m->getDownward(tet, 2, fs);
double f0Qual = a->shape->getQuality(fs[0]);
if ((f0Qual*f0Qual*f0Qual > a->input->goodQuality*a->input->goodQuality)) {
// if its okay, use it for projection
Vector v03 = J[2];
J[2] = apf::cross(J[0],J[1]); //face normal towards v[3]
Vector projected = v03 - apf::project(v03,J[2]); //v[3] projected to face
Matrix inverseMap = apf::invert(apf::transpose(J));
Vector basisPoint = inverseMap * projected;
Vector areaPoint(1-basisPoint[0]-basisPoint[1],
basisPoint[0],
basisPoint[1]);
for (int i=0; i < 3; ++i)
if (areaPoint[i] > 0)
code |= (1<<i);
for (int i=0; i < 3; ++i)
if (areaPoint[i] > -0.10 && areaPoint[i] < 0.10)
code |= ((1<<i) << 3);
} else {
// else, project second edge on first and use different code
// one bit to tell that use of the different code is suggested
code |= (1<<6);
Vector v02 = J[1];
Vector projected = apf::project(v02, J[0]);
J[2] = apf::cross(J[0],J[1]); //face normal towards v[3]
Matrix inverseMap = apf::invert(apf::transpose(J));
Vector basisPoint = inverseMap * projected;
Vector areaPoint(1-basisPoint[0]-basisPoint[1],
basisPoint[0],
basisPoint[1]);
for (int i=0; i < 2; ++i)
if (areaPoint[i] > 0)
code |= ((1<<i) << 7);
for (int i=0; i < 3; ++i)
if (areaPoint[i] > -0.20 && areaPoint[i] < 0.20)
code |= ((1<<i) << 9);
}
PCU_ALWAYS_ASSERT(code);
return code;
}
CodeMatch matchSliver(
Adapt* a,
Entity* tet)
{
/* TODO: make table auto-generated by the sliverCodeMatch program */
CodeMatch const table2d[4][4] =
{{{-1,-1}, {-1,-1}, {-1,-1}, {-1,-1}},
{{ 7, 2}, {-1,-1}, { 3, 3}, {-1,-1}},
{{ 1, 2}, { 2, 3}, {-1,-1}, {-1,-1}},
{{ 3, 2}, { 2, 3}, { 3, 3}, {-1,-1}}
};
CodeMatch const table[8][8] =
{{{-1,-1}, {-1,-1}, {-1,-1}, {-1,-1}, {-1,-1}, {-1,-1}, {-1,-1}, {-1,-1}},
{{ 4, 1}, {-1,-1}, {10, 2}, { 6, 3}, { 4, 2}, { 5, 3}, { 0, 3}, {-1,-1}},
{{ 1, 1}, { 8, 2}, {-1,-1}, { 6, 3}, { 9, 2}, { 5, 3}, { 0, 3}, {-1,-1}},
{{ 2, 0}, { 8, 2}, {10, 2}, {-1,-1}, { 0, 2}, { 5, 3}, { 0, 3}, {-1,-1}},
{{ 2, 1}, {11, 2}, { 2, 2}, { 6, 3}, {-1,-1}, { 5, 3}, { 0, 3}, {-1,-1}},
{{ 0, 0}, {11, 2}, { 6, 2}, { 6, 3}, { 4, 2}, {-1,-1}, { 0, 3}, {-1,-1}},
{{ 1, 0}, { 5, 2}, { 2, 2}, { 6, 3}, { 9, 2}, { 5, 3}, {-1,-1}, {-1,-1}},
{{ 0, 1}, { 5, 2}, { 6, 2}, { 6, 3}, { 0, 2}, { 5, 3}, { 0, 3}, {-1,-1}}
};
int code = getSliverCode(a,tet);
if ((code >> 6) & 1)
return table2d[(code >> 7) & 3][(code >> 9) & 3];
else
return table[code & 7][(code >> 3) & 7];
}
struct IsBadQuality : public Predicate
{
IsBadQuality(Adapt* a_):a(a_) {}
bool operator()(Entity* e)
{
return a->shape->getQuality(e) < a->input->goodQuality;
}
Adapt* a;
};
int markBadQuality(Adapt* a)
{
IsBadQuality p(a);
return markEntities(a, a->mesh->getDimension(), p, BAD_QUALITY, OK_QUALITY);
}
void unMarkBadQuality(Adapt* a)
{
Mesh* m = a->mesh;
Iterator* it;
Entity* e;
int count = 0;
it = m->begin(m->getDimension());
while ((e = m->iterate(it))) {
if (getFlag(a, e, ma::BAD_QUALITY))
clearFlag(a, e, ma::BAD_QUALITY);
count++;
}
m->end(it);
}
double getMinQuality(Adapt* a)
{
PCU_ALWAYS_ASSERT(a);
Mesh* m;
m = a->mesh;
PCU_ALWAYS_ASSERT(m);
Iterator* it = m->begin(m->getDimension());
Entity* e;
double minqual = 1;
while ((e = m->iterate(it))) {
if (!apf::isSimplex(m->getType(e)))
continue;
double qual = a->shape->getQuality(e);
if (qual < minqual)
minqual = qual;
}
m->end(it);
return PCU_Min_Double(minqual);
}
class ShortEdgeFixer : public Operator
{
public:
ShortEdgeFixer(Adapt* a):
remover(a)
{
adapter = a;
mesh = a->mesh;
sizeField = a->sizeField;
shortEdgeRatio = a->input->maximumEdgeRatio;
nr = nf = 0;
element = 0;
}
virtual ~ShortEdgeFixer()
{
}
virtual int getTargetDimension() {return mesh->getDimension();}
virtual bool shouldApply(Entity* e)
{
if ( ! getFlag(adapter,e,BAD_QUALITY))
return false;
element = e;
Downward edges;
int n = mesh->getDownward(element,1,edges);
double l[6] = {};
for (int i=0; i < n; ++i)
l[i] = sizeField->measure(edges[i]);
double maxLength;
double minLength;
Entity* shortEdge;
maxLength = minLength = l[0];
shortEdge = edges[0];
for (int i=1; i < n; ++i)
{
if (l[i] > maxLength) maxLength = l[i];
if (l[i] < minLength)
{
minLength = l[i];
shortEdge = edges[i];
}
}
if ((maxLength/minLength) < shortEdgeRatio)
{
clearFlag(adapter,element,BAD_QUALITY);
return false;
}
remover.setEdge(shortEdge);
return true;
}
virtual bool requestLocality(apf::CavityOp* o)
{
return remover.requestLocality(o);
}
virtual void apply()
{
if (remover.run())
++nr;
else
{
++nf;
clearFlag(adapter,element,BAD_QUALITY);
}
}
private:
Adapt* adapter;
Mesh* mesh;
Entity* element;
SizeField* sizeField;
ShortEdgeRemover remover;
double shortEdgeRatio;
public:
int nr;
int nf;
};
class TetFixerBase
{
public:
virtual void setTet(Entity** v) = 0;
virtual bool requestLocality(apf::CavityOp* o) = 0;
virtual bool run() = 0;
};
class FixBySwap : public TetFixerBase
{
public:
FixBySwap(Adapt* a):
adapter(a)
{
mesh = a->mesh;
edgeSwap = makeEdgeSwap(a);
nes = nf = numToTry = 0;
edges[0] = 0;
edges[1] = 0;
edges[2] = 0;
}
~FixBySwap()
{
delete edgeSwap;
}
virtual void setTet(Entity** v)
{
Entity* tet = apf::findElement(mesh, apf::Mesh::TET, v);
PCU_ALWAYS_ASSERT(tet);
match = matchSliver(adapter, tet);
Entity* dv[4];
mesh->getDownward(tet, 0, dv);
Entity* rv[4];
rotateTet(dv,match.rotation,rv);
enum { EDGE_EDGE, FACE_VERT };
if (match.code_index==EDGE_EDGE) {
Entity* ev[2];
ev[0] = rv[0]; ev[1] = rv[2];
edges[0] = findUpward(mesh, apf::Mesh::EDGE, ev);
ev[0] = rv[1]; ev[1] = rv[3];
edges[1] = findUpward(mesh, apf::Mesh::EDGE, ev);
numToTry = 2;
}
else
{
PCU_ALWAYS_ASSERT(match.code_index==FACE_VERT);
apf::findTriDown(mesh,rv,edges);
numToTry = 3;
}
}
virtual bool requestLocality(apf::CavityOp* o)
{
return o->requestLocality(edges, numToTry);
}
virtual bool run()
{
for (int i=0; i < numToTry; ++i)
if (edgeSwap->run(edges[i]))
{
++nes;
return true;
}
++nf;
return false;
}
private:
Adapt* adapter;
Mesh* mesh;
Entity* edges[3];
EdgeSwap* edgeSwap;
CodeMatch match;
int numToTry;
int nes;
int nf;
};
class EdgeVertFixer : public TetFixerBase
{
public:
EdgeVertFixer(Adapt* a):
singleSplitCollapse(a)
{
mesh = a->mesh;
edgeSwap = makeEdgeSwap(a);
nes = nssc = nf = 0;
edge = 0;
oppVert = 0;
}
~EdgeVertFixer()
{
delete edgeSwap;
}
virtual void setTet(Entity** v)
{
/* In this template, the edge v[0]--v[1] and vert v[3]
are too close*/
edge = apf::findElement(mesh, apf::Mesh::EDGE, v);
oppVert = v[3];
verts[0] = v[0];
verts[1] = v[1];
verts[2] = v[3];
}
virtual bool requestLocality(apf::CavityOp* o)
{
/* by requesting locality for all the verts we can be sure
* that all the desired entities for this operator are local */
return o->requestLocality(verts,3);
}
virtual bool run() {
if (edgeSwap->run(edge)) {
++nes;
return true;
}
if (singleSplitCollapse.run(edge, oppVert))
{
++nssc;
return true;
}
++nf;
return false;
}
private:
Mesh* mesh;
Entity* verts[3];
Entity *edge, *oppVert;
SingleSplitCollapse singleSplitCollapse;
EdgeSwap* edgeSwap;
public:
int nes; /* number of edge swaps done */
int nssc; /* number of SSCs done */
int nf; /* number of failures */
};
class FaceVertFixer : public TetFixerBase
{
public:
FaceVertFixer(Adapt* a):
faceSplitCollapse(a)
{
mesh = a->mesh;
edgeSwap = makeEdgeSwap(a);
nes = nf = nfsc = 0;
edges[0] = 0;
edges[1] = 0;
edges[2] = 0;
verts[0] = 0;
verts[1] = 0;
verts[2] = 0;
verts[3] = 0;
face = 0;
oppVert = 0;
tet = 0;
}
~FaceVertFixer()
{
delete edgeSwap;
}
virtual void setTet(Entity** v)
{
/* in this template, the bottom face and v[3]
are too close, the key edges are those that bound
face v(0,1,2) */
apf::findTriDown(mesh,v,edges);
tet = apf::findElement(mesh, apf::Mesh::TET, v);
oppVert = v[3];
verts[0] = v[0];
verts[1] = v[1];
verts[2] = v[2];
verts[3] = v[3];
}
virtual bool requestLocality(apf::CavityOp* o)
{
/* by requesting locality for all the verts we can be sure
* that all the desired entities for this operator are local */
return o->requestLocality(verts,4);
}
virtual bool run()
{
for (int i=0; i < 3; ++i)
if (edgeSwap->run(edges[i]))
{
++nes;
return true;
}
face = apf::findUpward(mesh, apf::Mesh::TRIANGLE, edges);
if (faceSplitCollapse.run(face, tet))
{
++nfsc;
return true;
}
++nf;
return false;
}
private:
Mesh* mesh;
Entity* edges[3];
Entity* verts[4];
Entity *face, *oppVert;
Entity* tet;
FaceSplitCollapse faceSplitCollapse;
EdgeSwap* edgeSwap;
public:
int nes; /* number of edge swaps done */
int nfsc; /* number of FSCs done */
int nf; /* number of failures */
};
class EdgeEdgeFixer : public TetFixerBase
{
public:
EdgeEdgeFixer(Adapt* a):
doubleSplitCollapse(a)
{
mesh = a->mesh;
edgeSwap = makeEdgeSwap(a);
nes = ndsc = nf = 0;
sf = a->sizeField;
edges[0] = 0;
edges[1] = 0;
}
~EdgeEdgeFixer()
{
delete edgeSwap;
}
virtual void setTet(Entity** v)
{
/* in this template, the v[0]-v[2] amd v[1]-v[3]
edges are too close. */
Entity* ev[2];
ev[0] = v[0]; ev[1] = v[2];
edges[0] = findUpward(mesh, apf::Mesh::EDGE, ev);
ev[0] = v[1]; ev[1] = v[3];
edges[1] = findUpward(mesh, apf::Mesh::EDGE, ev);
}
virtual bool requestLocality(apf::CavityOp* o)
{
return o->requestLocality(edges,2);
}
virtual bool run()
{
for (int i=0; i < 2; ++i)
if (edgeSwap->run(edges[i]))
{
++nes;
return true;
}
if (doubleSplitCollapse.run(edges))
{
++ndsc;
return true;
}
++nf;
return false;
}
private:
Mesh* mesh;
Entity* edges[2];
EdgeSwap* edgeSwap;
DoubleSplitCollapse doubleSplitCollapse;
SizeField* sf;
public:
int nes;
int ndsc;
int nf;
};
class LargeAngleTetFixer : public Operator
{
public:
LargeAngleTetFixer(Adapt* a):
edgeEdgeFixer(a),
edgeVertFixer(a),
faceVertFixer(a)
{
adapter = a;
mesh = a->mesh;<|fim▁hole|> virtual ~LargeAngleTetFixer()
{
}
virtual int getTargetDimension() {return 3;}
enum { EDGE_EDGE, FACE_VERT, EDGE_VERT, VERT_VERT };
virtual bool shouldApply(Entity* e)
{
if ( ! getFlag(adapter,e,BAD_QUALITY))
return false;
tet = e;
CodeMatch match = matchSliver(adapter,e);
if (match.code_index==EDGE_EDGE) {
fixer = &edgeEdgeFixer;
} else if (match.code_index==FACE_VERT) {
fixer = &faceVertFixer;
} else if (match.code_index==EDGE_VERT) {
fixer = &edgeVertFixer;
} else if (match.code_index==VERT_VERT) {
fixer = &faceVertFixer;
}
Entity* v[4];
mesh->getDownward(e,0,v);
Entity* rv[4];
rotateTet(v,match.rotation,rv);
fixer->setTet(rv);
return true;
}
virtual bool requestLocality(apf::CavityOp* o)
{
return fixer->requestLocality(o);
}
virtual void apply()
{
if ( ! fixer->run())
clearFlag(adapter,tet,BAD_QUALITY);
}
private:
Adapt* adapter;
Mesh* mesh;
Entity* tet;
TetFixerBase* fixer;
public:
EdgeEdgeFixer edgeEdgeFixer;
EdgeVertFixer edgeVertFixer;
FaceVertFixer faceVertFixer;
};
class LargeAngleTetAligner : public Operator
{
public:
LargeAngleTetAligner(Adapt* a):
fixer(a)
{
adapter = a;
mesh = a->mesh;
tet = 0;
}
virtual ~LargeAngleTetAligner()
{
}
virtual int getTargetDimension() {return 3;}
virtual bool shouldApply(Entity* e)
{
if ( ! getFlag(adapter,e,BAD_QUALITY))
return false;
tet = e;
/* PCU_ALWAYS_ASSERT(mesh->getType(e) == apf::Mesh::TET); */
enum { EDGE_EDGE, FACE_VERT };
CodeMatch match = matchSliver(adapter,e);
if (match.code_index==EDGE_EDGE) {
clearFlag(adapter,tet,BAD_QUALITY);
return false;
}
/* else */
/* { PCU_ALWAYS_ASSERT(match.code_index==FACE_VERT); */
/* fixer = &faceVertFixer; */
/* } */
Entity* v[4];
mesh->getDownward(e,0,v);
fixer.setTet(v);
return true;
}
virtual bool requestLocality(apf::CavityOp* o)
{
return fixer.requestLocality(o);
}
virtual void apply()
{
if ( ! fixer.run())
clearFlag(adapter,tet,BAD_QUALITY);
}
private:
Adapt* adapter;
Mesh* mesh;
Entity* tet;
FixBySwap fixer;
};
class LargeAngleTriFixer : public Operator
{
public:
LargeAngleTriFixer(Adapt* a)
{
adapter = a;
mesh = a->mesh;
edgeSwap = makeEdgeSwap(a);
ns = nf = 0;
tri = 0;
edge = 0;
}
virtual ~LargeAngleTriFixer()
{
delete edgeSwap;
}
virtual int getTargetDimension() {return 2;}
virtual bool shouldApply(Entity* e)
{
if ( ! getFlag(adapter,e,BAD_QUALITY))
return false;
tri = e;
// get the metric Q for angle computations
SizeField* sf = adapter->sizeField;
Matrix Q;
apf::MeshElement* me = apf::createMeshElement(mesh, tri);
Vector center(1./3.,1./3.,1./3.);
sf->getTransform(me,center,Q);
apf::destroyMeshElement(me);
// pick the edge opposite to the largest angle (in metric) for swap
Entity* edges[3];
mesh->getDownward(e,1,edges);
double minCos = 1.0;
for (int i = 0; i < 3; i++) {
Entity* current = edges[i%3];
Entity* next = edges[(i+1)%3];
double cosAngle = apf::computeCosAngle(mesh, tri, current, next, Q);
if (cosAngle < minCos) {
minCos = cosAngle;
edge = edges[(i+2)%3];
}
}
return true;
}
virtual bool requestLocality(apf::CavityOp* o)
{
return o->requestLocality(&edge,1);
}
virtual void apply()
{
if (edgeSwap->run(edge))
{
++ns;
return;
}
++nf;
clearFlag(adapter,tri,BAD_QUALITY);
}
private:
Adapt* adapter;
Mesh* mesh;
Entity* tri;
Entity* edge;
EdgeSwap* edgeSwap;
int ns;
int nf;
};
class QualityImprover2D : public Operator
{
public:
QualityImprover2D(Adapt* a)
{
adapter = a;
mesh = a->mesh;
edgeSwap = makeEdgeSwap(a);
ns = nf = 0;
edge = 0;
}
virtual ~QualityImprover2D()
{
delete edgeSwap;
}
virtual int getTargetDimension() {return 1;}
virtual bool shouldApply(Entity* e)
{
if ( getFlag(adapter,e,DONT_SWAP))
return false;
if ( mesh->isShared(e) )
return false;
edge = e;
return true;
}
virtual bool requestLocality(apf::CavityOp* o)
{
return o->requestLocality(&edge,1);
}
virtual void apply()
{
if (edgeSwap->run(edge))
{
++ns;
return;
}
++nf;
}
private:
Adapt* adapter;
Mesh* mesh;
Entity* edge;
EdgeSwap* edgeSwap;
int ns;
int nf;
};
static double fixShortEdgeElements(Adapt* a)
{
double t0 = PCU_Time();
ShortEdgeFixer fixer(a);
applyOperator(a,&fixer);
double t1 = PCU_Time();
return t1 - t0;
}
static void fixLargeAngleTets(Adapt* a)
{
LargeAngleTetFixer fixer(a);
applyOperator(a,&fixer);
}
static void fixLargeAngleTris(Adapt* a)
{
LargeAngleTriFixer fixer(a);
applyOperator(a,&fixer);
}
static void alignLargeAngleTets(Adapt* a)
{
LargeAngleTetAligner aligner(a);
applyOperator(a,&aligner);
}
static void alignLargeAngleTris(Adapt* a)
{
LargeAngleTriFixer aligner(a);
applyOperator(a,&aligner);
}
static void improveQualities2D(Adapt* a)
{
QualityImprover2D improver(a);
applyOperator(a, &improver);
}
static double fixLargeAngles(Adapt* a)
{
double t0 = PCU_Time();
if (a->mesh->getDimension()==3)
fixLargeAngleTets(a);
else
fixLargeAngleTris(a);
double t1 = PCU_Time();
return t1 - t0;
}
static void alignLargeAngles(Adapt* a)
{
if (a->mesh->getDimension()==3)
alignLargeAngleTets(a);
else
alignLargeAngleTris(a);
}
double improveQualities(Adapt* a)
{
double t0 = PCU_Time();
if (a->mesh->getDimension() == 3)
return 0; // TODO: implement this for 3D
else
improveQualities2D(a);
double t1 = PCU_Time();
return t1 - t0;
}
void fixElementShapes(Adapt* a)
{
if ( ! a->input->shouldFixShape)
return;
double t0 = PCU_Time();
int count = markBadQuality(a);
int originalCount = count;
int prev_count;
double time;
int iter = 0;
do {
if ( ! count)
break;
prev_count = count;
print("--iter %d of shape correction loop: #bad elements %d", iter, count);
time = fixLargeAngles(a);
/* We need to snap the new verts as soon as they are
* created (to avoid future problems). At the moment
* new verts are created only during 3D mesh adapt, so
* we only run a bulk snap operation if the mesh is 3D.
*/
if (a->mesh->getDimension() == 3)
snap(a);
count = markBadQuality(a);
print("--fixLargeAngles in %f seconds: #bad elements %d",time,count);
time = fixShortEdgeElements(a);
count = markBadQuality(a);
print("--fixShortEdgeElements in %f seconds: #bad elements %d",time,count);
if (count >= prev_count)
unMarkBadQuality(a); // to make sure markEntities does not complain!
// balance the mesh to avoid empty parts
midBalance(a);
print("--percent change in number of bad elements %f",
((double) prev_count - (double) count) / (double) prev_count);
iter++;
} while(count < prev_count);
double t1 = PCU_Time();
print("bad shapes down from %d to %d in %f seconds",
originalCount,count,t1-t0);
}
void alignElements(Adapt* a)
{
int max_iter = 5;
if ( ! a->input->shouldFixShape)
return;
double t0 = PCU_Time();
int count = markBadQuality(a);
int originalCount = count;
int prev_count;
int i = 0;
do {
if ( ! count)
break;
prev_count = count;
alignLargeAngles(a);
count = markBadQuality(a);
++i;
if (count >= prev_count || i >= max_iter)
unMarkBadQuality(a);
} while(count < prev_count && i < max_iter);
double t1 = PCU_Time();
print("non-aligned elements down from %d to %d in %f seconds",
originalCount,count,t1-t0);
}
void printQuality(Adapt* a)
{
if ( ! a->input->shouldPrintQuality)
return;
double minqual = getMinQuality(a);
print("worst element quality is %e", minqual);
}
}<|fim▁end|> | tet = 0;
fixer = 0;
} |
<|file_name|>interface.d.ts<|end_file_name|><|fim▁begin|>/* eslint-disable max-len */
declare global {
namespace Kloudless.filePicker {
function picker(options: ChooserOptions | SaverOptions): Picker;
function dropzone(options: DropzoneOptions): Dropzone;
function getGlobalOptions(): BuildOptions;
function setGlobalOptions(buildOptions: Partial<BuildOptions>);
/**
* See https://github.com/kloudless/file-picker#methods
*/
interface Picker extends Events {
choose(): void;
choosify(element: HTMLElement): void;
save(files: File[]): void;
savify(element: HTMLElement, files: File[]): void;
close(): void;
destroy(): void;
update(options: UpdateOptions): void;
logout(deleteAccount?: boolean): void;
}
/**
* See https://github.com/kloudless/file-picker#methods-1
*/
interface Dropzone extends DropzoneEvents {
close(): void;
destroy(): void;
update(options: DropzoneUpdateOptions): void;
}
type PersistMode = 'none' | 'local' | 'session';
type ServiceGroup = 'file_store' | 'object_store' | 'construction' | 'all';
/**
* Please refer to https://developers.kloudless.com/docs/latest/storage
* for the full list of suppoerted services.
*/
type ServiceName = string;
type ServiceCategory = 'all' | 'folders' | 'files' | 'text' | 'documents'
| 'images' | 'videos' | 'audio';
type FileExtension = string;
type NotUpdatableOptions = 'app_id' | 'custom_css' | 'types' | 'services'
| 'persist' | 'create_folder' | 'account_key' | 'elementId' | 'element';
type UpdateOptions = Partial<Omit<(ChooserOptions & SaverOptions), NotUpdatableOptions>>;
type DropzoneUpdateOptions = Partial<Omit<DropzoneOptions, NotUpdatableOptions>>;
interface File {
name: string;
url: string;
}
interface LinkOptions {
password?: string;
expiration?: string;
direct?: boolean;
}
/**
* See https://github.com/kloudless/file-picker#chooser-and-saver
*/
interface BaseOptions {
app_id: string;
retrieve_token?: boolean;
computer?: boolean;
persist?: PersistMode;
services?: (ServiceName | ServiceGroup)[];
account_management?: boolean;
display_backdrop?: boolean;
element?: string | HTMLElement;
// TODO: deprecate in v2
custom_css?: string;
/**
* Please refer to https://github.com/kloudless/file-picker#chooser-and-saver
* for the usage of `locale` option.
*/
locale?: string;
/**
* Please refer to https://github.com/kloudless/file-picker#chooser-and-saver
* for the usage of `translations` option.
*/
translations?: string | object;
dateTimeFormat?: string;
create_folder?: boolean;
/**
* @deprecated Please use the `retrieve_token` option instead.
*/
account_key?: boolean;
/**
* @deprecated Please use the `retrieve_token` option instead.
*/
keys?: string[];
tokens?: string[];
enable_logout?: boolean;
delete_accounts_on_logout?: boolean;
oauth?(service: ServiceName): OAuthQueryParams;
root_folder_id?: { [key: number]: string };
}
/**
* See https://github.com/kloudless/file-picker#chooser-and-saver
*/
interface ChooserOptions extends BaseOptions {
multiselect?: boolean;
link?: boolean;
link_options?: LinkOptions;
copy_to_upload_location?: 'async' | 'sync';
upload_location_account?: string;
upload_location_folder?: string;
uploads_pause_on_error?: boolean;
types?: (FileExtension | ServiceCategory)[];
}
/**
* See https://github.com/kloudless/file-picker#chooser-and-saver
*/
interface SaverOptions extends BaseOptions {
files: File[];
}
interface DropzoneDeprecatedOptions {
/**
* @deprecated Please use the `element` option instead.
*/
elementId: string;
}
interface DropzoneNewOptions {
element: string | HTMLElement;
}
type DropzoneOptions = ChooserOptions & {
copy_to_upload_location: 'async' | 'sync';
computer?: true;
} & (DropzoneDeprecatedOptions | DropzoneNewOptions);
interface BuildOptions {
pickerUrl?: string;
}
/**
* See https://developers.kloudless.com/docs/v1/authentication#oauth-2.0.
*/
interface OAuthQueryParams {
scope?: string;
oob_loading_delay?: number;
custom_properties?: object;
raw?: object;
/**
* @deprecated Please use the `form_data` option instead.
*/
extra_data?: object;
form_data?: object;
}
type SuccessEvent = Array<FileMetadata | FolderMetadata | Task>;
type ErrorEvent = Array<FileMetadata | FolderMetadata | Task | SaverError>;
type SelectedEvent = FileMetadata[] | FolderMetadata[];
type SaverStartFileUploadEvent = SaverFile;
type StartFileUploadEvent = ChooserStartFileUploadEvent | SaverStartFileUploadEvent;
type FinishFileUploadEvent = ChooserFinishFileUploadEvent | SaverFinishFileUploadEvent;
/**
* See https://github.com/kloudless/file-picker#events
*/
interface Events {
on(event: 'success', callback: (event: SuccessEvent) => void): void;
on(event: 'cancel', callback: () => void): void;
on(event: 'error', callback: (event: ErrorEvent) => void): void;
on(event: 'open', callback: () => void): void;
on(event: 'close', callback: () => void): void;
on(event: 'selected', callback: (event: SelectedEvent) => void): void;
on(event: 'addAccount', callback: (account: Account) => void): void;
on(event: 'deleteAccount', callback: (accountId: number) => void): void;
on(event: 'startFileUpload', callback: (event: StartFileUploadEvent) => void): void;
on(event: 'finishFileUpload', callback: (event: FinishFileUploadEvent) => void): void;
on(event: 'logout', callback: () => void): void;
}
/**
* See https://github.com/kloudless/file-picker#events
*/
type DropzoneEvents = Events & {
on(event: 'drop', callback: () => void): void;
}
interface BaseMetadata {
id: string;
name: string;
size: number | null;
created: string | null;
modified: string | null;
type: string;
account: number;
parent: ParentMetadata;
ancestors: ParentMetadata[] | null;
path: string | null;
/**
* @deprecated
*/
raw_id: string;
owner?: UserMetadata;
creator?: UserMetadata;
last_modifier?: UserMetadata;
api: 'storage';
ids?: IdsMetadata;
id_type?: keyof IdsMetadata;
bearer_token?: {
key: string;
};
error?: Error;
/**
* In case Kloudless API introduces new properties but hasn't updated here.
* Please notify us by creating GitHub issues: https://github.com/kloudless/file-picker/issues.
*/
[x: string]: any; // eslint-disable-line @typescript-eslint/no-explicit-any
}
/**
* See https://developers.kloudless.com/docs/latest/storage#files
*/
interface FileMetadata extends BaseMetadata {
type: 'file';
mime_type: string;
downloadable: boolean;
link?: string;
}
/**
* See https://developers.kloudless.com/docs/latest/storage#folders
*/
interface FolderMetadata extends BaseMetadata {
type: 'folder';
can_create_folders: boolean;
can_upload_files: boolean;
}
interface UserMetadata {
id: string;
}
interface ParentMetadata {
id: string;
name: string;
id_type?: keyof IdsMetadata;
}
interface IdsMetadata {
default?: string;
shared?: string;
path?: string;
version?: string;
}
interface Error {
status_code: number;
message: string;
error_code: string;
id: string;
}
interface Account {
id: string;
name: string;
service: ServiceName;
bearer_token?: {
key: string;
};
}
interface ChooserStartFileUploadEvent {
id: string;<|fim▁hole|> }
interface SaverFile {
url: string;
name: string;
}
interface SaverError extends SaverFile {
error: Error;
}
interface ChooserFinishFileUploadEvent extends ChooserStartFileUploadEvent {
metadata: FileMetadata;
}
interface SaverFinishFileUploadEvent extends SaverFile {
metadata: FileMetadata;
}
/**
* See https://developers.kloudless.com/docs/latest/core#asynchronous-requests-and-the-task-api
*/
interface Task {
id: string;
state: 'PENDING' | 'RECEIVED' | 'STARTED';
account?: number;
bearer_token?: {
key: string;
};
}
}
}
export default Kloudless.filePicker;
export type PersistMode = Kloudless.filePicker.PersistMode;
export type ServiceGroup = Kloudless.filePicker.ServiceGroup;
export type ServiceName = Kloudless.filePicker.ServiceName;
export type ServiceCategory = Kloudless.filePicker.ServiceCategory;
export type FileExtension = Kloudless.filePicker.FileExtension;
export type UpdateOptions = Kloudless.filePicker.UpdateOptions;
export type DropzoneUpdateOptions = Kloudless.filePicker.DropzoneUpdateOptions;
export type SuccessEvent = Kloudless.filePicker.SuccessEvent;
export type ErrorEvent = Kloudless.filePicker.ErrorEvent;
export type SelectedEvent = Kloudless.filePicker.SelectedEvent;
export type StartFileUploadEvent = Kloudless.filePicker.StartFileUploadEvent;
export type FinishFileUploadEvent = Kloudless.filePicker.FinishFileUploadEvent;
export type ChooserStartFileUploadEvent = Kloudless.filePicker.ChooserStartFileUploadEvent;
export type SaverStartFileUploadEvent = Kloudless.filePicker.SaverStartFileUploadEvent;
export type ChooserFinishFileUploadEvent = Kloudless.filePicker.ChooserFinishFileUploadEvent;
export type SaverFinishFileUploadEvent = Kloudless.filePicker.SaverFinishFileUploadEvent;
export type Picker = Kloudless.filePicker.Picker;
export type Dropzone = Kloudless.filePicker.Dropzone;
export type File = Kloudless.filePicker.File;
export type LinkOptions = Kloudless.filePicker.LinkOptions;
export type BaseOptions = Kloudless.filePicker.BaseOptions;
export type ChooserOptions = Kloudless.filePicker.ChooserOptions;
export type SaverOptions = Kloudless.filePicker.SaverOptions;
export type DropzoneOptions = Kloudless.filePicker.DropzoneOptions;
export type BuildOptions = Kloudless.filePicker.BuildOptions;
export type OAuthQueryParams = Kloudless.filePicker.OAuthQueryParams;
export type Events = Kloudless.filePicker.Events;
export type BaseMetadata = Kloudless.filePicker.BaseMetadata;
export type FileMetadata = Kloudless.filePicker.FileMetadata;
export type FolderMetadata = Kloudless.filePicker.FolderMetadata;
export type UserMetadata = Kloudless.filePicker.UserMetadata;
export type ParentMetadata = Kloudless.filePicker.ParentMetadata;
export type IdsMetadata = Kloudless.filePicker.IdsMetadata;
export type Error = Kloudless.filePicker.Error;
export type SaverError = Kloudless.filePicker.SaverError;
export type Account = Kloudless.filePicker.Account;
export type Task = Kloudless.filePicker.Task;<|fim▁end|> | name: string;
size: number;
mime_type: string; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from share.transform.chain.exceptions import * # noqa
from share.transform.chain.links import * # noqa
from share.transform.chain.parsers import * # noqa<|fim▁hole|>
# Context singleton to be used for parser definitions
# Class SHOULD be thread safe
# Accessing subattribtues will result in a new copy of the context
# to avoid leaking data between chains
ctx = Context()<|fim▁end|> | from share.transform.chain.transformer import ChainTransformer # noqa
from share.transform.chain.links import Context
|
<|file_name|>account.py<|end_file_name|><|fim▁begin|>from collections import ChainMap
from django.conf import settings
from django.core.urlresolvers import reverse_lazy
from django.shortcuts import render, redirect, resolve_url
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.tokens import default_token_generator
from django.contrib.auth.decorators import login_required
from django.contrib.sites.models import get_current_site
from django.views.generic import FormView, TemplateView
from django.contrib.auth import login, logout
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode, is_safe_url
from django.utils.encoding import force_bytes
from django.utils.decorators import method_decorator
from app.models.account import *
from app.forms.account import *
from app.views.edit import FormsetView
class SignUpView(FormsetView):
template_name = 'account/signup.html'
form_class = SignUpUserForm
formset_class = SignUpAddressFormSet
http_method_names = ['get', 'post']
success_url = reverse_lazy('account:signin')
success_message = 'We have sent you an activation email at <strong>%(email)s</strong>. Please follow the ' \
'instructions in the mail to <strong>activate</strong> your account.'
def form_valid(self, request, form, formset):
cleaned_data = dict()
for k, v in ChainMap(formset.cleaned_data, form.cleaned_data).items():
cleaned_data[k] = v
self.sign_up(request, **cleaned_data)
messages.success(request, self.success_message % {'email': cleaned_data['email']}, extra_tags='safe')
return super(SignUpView, self).form_valid(request, form, formset)
def sign_up(self, request, **cleaned_data):
cd = cleaned_data
# Attibutes of the User
email, password, first_name, last_name, phone_num, pickup_arrangements = cd['email'], cd[
'password1'], cd['first_name'], cd['last_name'], cd['phone_num'], cd['pickup_arrangements']
# Attributes of user's Address
apt_num, street, city, county, state, zip = cd['apt_num'], cd['street'], cd['city'], cd['county'], \
cd['state'], cd['zip']
user = RegistrationProfile.objects.create_inactive_user(get_current_site(request), **cleaned_data)
class SignInView(FormView):
template_name = 'account/signin.html'
form_class = SignInForm
http_method_names = ['get', 'post']
success_url = reverse_lazy('home')
def get(self, request, *args, **kwargs):
redirect_to = self.request.REQUEST.get(REDIRECT_FIELD_NAME, '')
form_class = self.get_form_class()
form = self.get_form(form_class)
kwargs = {'form': form, 'REDIRECT_FIELD_NAME': REDIRECT_FIELD_NAME, 'redirect_to': redirect_to}
return self.render_to_response(self.get_context_data(**kwargs))
def form_valid(self, form):
redirect_to = self.request.REQUEST.get(REDIRECT_FIELD_NAME, '')
# If redirection URL is un-safe redirect to default home
if is_safe_url(url=redirect_to, host=self.request.get_host()):
self.success_url = redirect_to
login(self.request, form.get_user())
return super(SignInView, self).form_valid(form)
def form_invalid(self, form):
return super(SignInView, self).form_invalid(form)
class SignOutView(FormView):
http_method_names = ['post']
success_url = reverse_lazy('account:signin')
success_message = 'You have signed out of your ToolShare account.'
def post(self, request):
logout(request)
messages.success(request, self.success_message)
return redirect(self.success_url)
class ActivateAccountView(TemplateView):
http_method_names = ['get']
success_url = reverse_lazy('account:signin')
success_message = 'Your ToolShare account is now ready to use. Please <strong>sign in</strong> to continue.'
failure_url = reverse_lazy('account:signup')
failure_message = 'It appears that the activation link is no longer valid. Please <strong>sign up</strong> for a ' \
'new account.'
def get(self, request, activation_key):
if RegistrationProfile.objects.activate_user(activation_key):
messages.success(request, self.success_message, extra_tags='safe')
return redirect(self.success_url)
messages.error(request, self.failure_message, extra_tags='safe')
return redirect(self.failure_url)
class RecoverAccountView(FormView):
template_name = 'account/recover.html'
success_url = reverse_lazy('account:signin')
success_message = 'We have sent an email with instructions to <strong>reset</strong> the password on your ' \
'ToolShare account.'
form_class = RecoverAccountForm
http_method_names = ['get', 'post']
def form_valid(self, form):
user = User.objects.get(email=form.cleaned_data['email'])
uidb64 = urlsafe_base64_encode(force_bytes(user.pk))
token = default_token_generator.make_token(user)
form.save(get_current_site(self.request), user, uidb64, token)
messages.success(self.request, self.success_message, extra_tags='safe')
return super(RecoverAccountView, self).form_valid(form)
class ResetAccountView(FormView):
template_name = 'account/reset.html'
success_url = reverse_lazy('account:signin')
success_message = 'The password on your ToolShare account was successfully reset. Please <strong>sign in' \
'</strong> with your new password.'
failure_url = reverse_lazy('account:recover')
failure_message = 'It appears that the URL you used to recover the account is no longer valid. Please try to ' \
'reset your password again.'
form_class = ResetAccountForm
http_method_names = ['get', 'post']
def get(self, request, uidb64, token):
uid = urlsafe_base64_decode(uidb64)
user = User.objects.get(pk=uid)
if not default_token_generator.check_token(user, token):
messages.error(self.request, self.failure_message, extra_tags='safe')
return redirect(self.failure_url)
return super(ResetAccountView, self).get(request)
def post(self, request, uidb64, token):
uid = urlsafe_base64_decode(uidb64)
user = User.objects.get(pk=uid)
form_class = self.get_form_class()
form = self.get_form(form_class)
if form.is_valid():
form.save(user)
messages.success(self.request, self.success_message, extra_tags='safe')
return super(ResetAccountView, self).form_valid(form)
return super(ResetAccountView, self).form_invalid(form)
class UpdateAccountView(FormsetView):
template_name = 'account/update.html'
success_url = reverse_lazy('account:update')
success_message = 'Your ToolShare account was successfully updated.'
failure_message = \
'''
One or more reasons listed below is <strong>preventing</strong> the change from being saved:
<ul>
<li>You have <strong>borrowed tools</strong> in your possession that need to be returned.</li>
<li>You have <strong>tools in the community shed</strong> that you need to collect.</li>
<li>You have <strong>unresolved future reservations</strong>.</li>
</ul>
'''
form_class = UpdateUserForm<|fim▁hole|>
def get(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = form_class(instance=request.user)
formset_class = self.get_formset_class()
formset = formset_class(instance=request.user.address)
return render(request, self.template_name, {'form': form, 'formset': formset})
def post(self, request, *args, **kwargs):
prev_sz = request.user.share_zone
form_class = self.get_form_class()
form = form_class(request.POST, instance=request.user)
formset_class = self.get_formset_class()
formset = formset_class(request.POST, instance=request.user.address)
if form.is_valid() and formset.is_valid():
if self.is_relocation_allowed(prev_sz, formset.instance.share_zone, request.user):
return self.form_valid(request, form, formset)
else:
messages.error(request, self.failure_message % {'r': reverse_lazy('reservation')}, extra_tags='safe')
return self.form_invalid(request, form, formset)
else:
return self.form_invalid(request, form, formset)
def form_valid(self, request, form, formset):
form.save()
formset.save()
messages.success(request, self.success_message)
return super(UpdateAccountView, self).form_valid(request, form, formset)
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(UpdateAccountView, self).dispatch(request, *args, **kwargs)
def is_relocation_allowed(self, prev_sz, curr_sz, user):
if prev_sz != curr_sz and not user.is_ready_to_move():
return False
return True
class ChangePasswordView(FormView):
template_name = 'account/change_password.html'
success_url = reverse_lazy('account:update')
success_message = 'Password updated successfully.'
form_class = ChangePasswordForm
def get_form(self, form_class):
return form_class(user=self.request.user, **self.get_form_kwargs())
def form_valid(self, form):
form.save()
messages.success(self.request, self.success_message)
return super(ChangePasswordView, self).form_valid(form)
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(ChangePasswordView, self).dispatch(request, *args, **kwargs)<|fim▁end|> | formset_class = UpdateAddressFormSet |
<|file_name|>ChangeDirectionExecute.java<|end_file_name|><|fim▁begin|>package commands.executes;
import agents.Agent;
import agents.Robot;
import agents.Speed;
import agents.Vacuum;
import commands.AgentCommand;
import commands.AgentCommandVisitor;
import commands.FieldCommand;
import commands.NoFieldCommandException;
import commands.transmits.ChangeDirectionTransmit;
import field.Direction;
public class ChangeDirectionExecute extends AgentCommand {
private Direction direction;
public ChangeDirectionExecute(ChangeDirectionTransmit parent) {
super(parent);
this.direction = parent.getDirection();
}
public Direction getDirection() {
return direction;
}
public void setDirection(Direction direction) {
this.direction = direction;
}
@Override<|fim▁hole|> @Override
public void accept(AgentCommandVisitor modifier) {
modifier.visit(this);
}
@Override
public void visit(Robot element) {
visitCommon(element);
}
@Override
public void visit(Vacuum element) {
visitCommon(element);
}
private void visitCommon(Agent element) {
if (!canExecute) {
result.pushNormal("irvalt 1 " + element);
return;
}
Speed newSpeed = element.getSpeed();
newSpeed.setDirection(direction);
element.setSpeed(newSpeed);
result.pushNormal("irvalt 0 " + element + " " + direction);
}
}<|fim▁end|> | public FieldCommand getFieldCommand() throws NoFieldCommandException {
throw new NoFieldCommandException();
}
|
<|file_name|>optimize.py<|end_file_name|><|fim▁begin|>#__docformat__ = "restructuredtext en"
# ******NOTICE***************
# optimize.py module by Travis E. Oliphant
#
# You may copy and use this module as you see fit with no
# guarantee implied provided you keep this notice in all copies.
# *****END NOTICE************
# A collection of optimization algorithms. Version 0.5
# CHANGES
# Added fminbound (July 2001)
# Added brute (Aug. 2002)
# Finished line search satisfying strong Wolfe conditions (Mar. 2004)
# Updated strong Wolfe conditions line search to use
# cubic-interpolation (Mar. 2004)
from __future__ import division, print_function, absolute_import
# Minimization routines
__all__ = ['fmin', 'fmin_powell', 'fmin_bfgs', 'fmin_ncg', 'fmin_cg',
'fminbound', 'brent', 'golden', 'bracket', 'rosen', 'rosen_der',
'rosen_hess', 'rosen_hess_prod', 'brute', 'approx_fprime',
'line_search', 'check_grad', 'OptimizeResult', 'show_options',
'OptimizeWarning']
__docformat__ = "restructuredtext en"
import warnings
import sys
import numpy
from scipy._lib.six import callable, xrange
from numpy import (atleast_1d, eye, mgrid, argmin, zeros, shape, squeeze,
vectorize, asarray, sqrt, Inf, asfarray, isinf)
import numpy as np
from .linesearch import (line_search_wolfe1, line_search_wolfe2,
line_search_wolfe2 as line_search,
LineSearchWarning)
from scipy._lib._util import getargspec_no_self as _getargspec
from scipy.linalg import get_blas_funcs
# standard status messages of optimizers
_status_message = {'success': 'Optimization terminated successfully.',
'maxfev': 'Maximum number of function evaluations has '
'been exceeded.',
'maxiter': 'Maximum number of iterations has been '
'exceeded.',
'pr_loss': 'Desired error not necessarily achieved due '
'to precision loss.'}
class MemoizeJac(object):
""" Decorator that caches the value gradient of function each time it
is called. """
def __init__(self, fun):
self.fun = fun
self.jac = None
self.x = None
def __call__(self, x, *args):
self.x = numpy.asarray(x).copy()
fg = self.fun(x, *args)
self.jac = fg[1]
return fg[0]
def derivative(self, x, *args):
if self.jac is not None and numpy.alltrue(x == self.x):
return self.jac
else:
self(x, *args)
return self.jac
class OptimizeResult(dict):
""" Represents the optimization result.
Attributes
----------
x : ndarray
The solution of the optimization.
success : bool
Whether or not the optimizer exited successfully.
status : int
Termination status of the optimizer. Its value depends on the
underlying solver. Refer to `message` for details.
message : str
Description of the cause of the termination.
fun, jac, hess: ndarray
Values of objective function, its Jacobian and its Hessian (if
available). The Hessians may be approximations, see the documentation
of the function in question.
hess_inv : object
Inverse of the objective function's Hessian; may be an approximation.
Not available for all solvers. The type of this attribute may be
either np.ndarray or scipy.sparse.linalg.LinearOperator.
nfev, njev, nhev : int
Number of evaluations of the objective functions and of its
Jacobian and Hessian.
nit : int
Number of iterations performed by the optimizer.
maxcv : float
The maximum constraint violation.
Notes
-----
There may be additional attributes not listed above depending of the
specific solver. Since this class is essentially a subclass of dict
with attribute accessors, one can see which attributes are available
using the `keys()` method.
"""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
def __repr__(self):
if self.keys():
m = max(map(len, list(self.keys()))) + 1
return '\n'.join([k.rjust(m) + ': ' + repr(v)
for k, v in sorted(self.items())])
else:
return self.__class__.__name__ + "()"
def __dir__(self):
return list(self.keys())
class OptimizeWarning(UserWarning):
pass
def _check_unknown_options(unknown_options):
if unknown_options:
msg = ", ".join(map(str, unknown_options.keys()))
# Stack level 4: this is called from _minimize_*, which is
# called from another function in Scipy. Level 4 is the first
# level in user code.
warnings.warn("Unknown solver options: %s" % msg, OptimizeWarning, 4)
def is_array_scalar(x):
"""Test whether `x` is either a scalar or an array scalar.
"""
return np.size(x) == 1
_epsilon = sqrt(numpy.finfo(float).eps)
def vecnorm(x, ord=2):
if ord == Inf:
return numpy.amax(numpy.abs(x))
elif ord == -Inf:
return numpy.amin(numpy.abs(x))
else:
return numpy.sum(numpy.abs(x)**ord, axis=0)**(1.0 / ord)
def rosen(x):
"""
The Rosenbrock function.
The function computed is::
sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0)
Parameters
----------
x : array_like
1-D array of points at which the Rosenbrock function is to be computed.
Returns
-------
f : float
The value of the Rosenbrock function.
See Also
--------
rosen_der, rosen_hess, rosen_hess_prod
"""
x = asarray(x)
r = numpy.sum(100.0 * (x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0,
axis=0)
return r
def rosen_der(x):
"""
The derivative (i.e. gradient) of the Rosenbrock function.
Parameters
----------
x : array_like
1-D array of points at which the derivative is to be computed.
Returns
-------
rosen_der : (N,) ndarray
The gradient of the Rosenbrock function at `x`.
See Also
--------
rosen, rosen_hess, rosen_hess_prod
"""
x = asarray(x)
xm = x[1:-1]
xm_m1 = x[:-2]
xm_p1 = x[2:]
der = numpy.zeros_like(x)
der[1:-1] = (200 * (xm - xm_m1**2) -
400 * (xm_p1 - xm**2) * xm - 2 * (1 - xm))
der[0] = -400 * x[0] * (x[1] - x[0]**2) - 2 * (1 - x[0])
der[-1] = 200 * (x[-1] - x[-2]**2)
return der
def rosen_hess(x):
"""
The Hessian matrix of the Rosenbrock function.
Parameters
----------
x : array_like
1-D array of points at which the Hessian matrix is to be computed.
Returns
-------
rosen_hess : ndarray
The Hessian matrix of the Rosenbrock function at `x`.
See Also
--------
rosen, rosen_der, rosen_hess_prod
"""
x = atleast_1d(x)
H = numpy.diag(-400 * x[:-1], 1) - numpy.diag(400 * x[:-1], -1)
diagonal = numpy.zeros(len(x), dtype=x.dtype)
diagonal[0] = 1200 * x[0]**2 - 400 * x[1] + 2
diagonal[-1] = 200
diagonal[1:-1] = 202 + 1200 * x[1:-1]**2 - 400 * x[2:]
H = H + numpy.diag(diagonal)
return H
def rosen_hess_prod(x, p):
"""
Product of the Hessian matrix of the Rosenbrock function with a vector.
Parameters
----------
x : array_like
1-D array of points at which the Hessian matrix is to be computed.
p : array_like
1-D array, the vector to be multiplied by the Hessian matrix.
Returns
-------
rosen_hess_prod : ndarray
The Hessian matrix of the Rosenbrock function at `x` multiplied
by the vector `p`.
See Also
--------
rosen, rosen_der, rosen_hess
"""
x = atleast_1d(x)
Hp = numpy.zeros(len(x), dtype=x.dtype)
Hp[0] = (1200 * x[0]**2 - 400 * x[1] + 2) * p[0] - 400 * x[0] * p[1]
Hp[1:-1] = (-400 * x[:-2] * p[:-2] +
(202 + 1200 * x[1:-1]**2 - 400 * x[2:]) * p[1:-1] -
400 * x[1:-1] * p[2:])
Hp[-1] = -400 * x[-2] * p[-2] + 200*p[-1]
return Hp
def wrap_function(function, args):
ncalls = [0]
if function is None:
return ncalls, None
def function_wrapper(*wrapper_args):
ncalls[0] += 1
return function(*(wrapper_args + args))
return ncalls, function_wrapper
def fmin(func, x0, args=(), xtol=1e-4, ftol=1e-4, maxiter=None, maxfun=None,
full_output=0, disp=1, retall=0, callback=None, initial_simplex=None):
"""
Minimize a function using the downhill simplex algorithm.
This algorithm only uses function values, not derivatives or second
derivatives.
Parameters
----------
func : callable func(x,*args)
The objective function to be minimized.
x0 : ndarray
Initial guess.
args : tuple, optional
Extra arguments passed to func, i.e. ``f(x,*args)``.
xtol : float, optional
Absolute error in xopt between iterations that is acceptable for
convergence.
ftol : number, optional
Absolute error in func(xopt) between iterations that is acceptable for
convergence.
maxiter : int, optional
Maximum number of iterations to perform.
maxfun : number, optional
Maximum number of function evaluations to make.
full_output : bool, optional
Set to True if fopt and warnflag outputs are desired.
disp : bool, optional
Set to True to print convergence messages.
retall : bool, optional
Set to True to return list of solutions at each iteration.
callback : callable, optional
Called after each iteration, as callback(xk), where xk is the
current parameter vector.
initial_simplex : array_like of shape (N + 1, N), optional
Initial simplex. If given, overrides `x0`.
``initial_simplex[j,:]`` should contain the coordinates of
the j-th vertex of the ``N+1`` vertices in the simplex, where
``N`` is the dimension.
Returns
-------
xopt : ndarray
Parameter that minimizes function.
fopt : float
Value of function at minimum: ``fopt = func(xopt)``.
iter : int
Number of iterations performed.
funcalls : int
Number of function calls made.
warnflag : int
1 : Maximum number of function evaluations made.
2 : Maximum number of iterations reached.
allvecs : list
Solution at each iteration.
See also
--------
minimize: Interface to minimization algorithms for multivariate
functions. See the 'Nelder-Mead' `method` in particular.
Notes
-----
Uses a Nelder-Mead simplex algorithm to find the minimum of function of
one or more variables.
This algorithm has a long history of successful use in applications.
But it will usually be slower than an algorithm that uses first or
second derivative information. In practice it can have poor
performance in high-dimensional problems and is not robust to
minimizing complicated functions. Additionally, there currently is no
complete theory describing when the algorithm will successfully
converge to the minimum, or how fast it will if it does. Both the ftol and
xtol criteria must be met for convergence.
Examples
--------
>>> def f(x):
... return x**2
>>> from scipy import optimize
>>> minimum = optimize.fmin(f, 1)
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 17
Function evaluations: 34
>>> minimum[0]
-8.8817841970012523e-16
References
----------
.. [1] Nelder, J.A. and Mead, R. (1965), "A simplex method for function
minimization", The Computer Journal, 7, pp. 308-313
.. [2] Wright, M.H. (1996), "Direct Search Methods: Once Scorned, Now
Respectable", in Numerical Analysis 1995, Proceedings of the
1995 Dundee Biennial Conference in Numerical Analysis, D.F.
Griffiths and G.A. Watson (Eds.), Addison Wesley Longman,
Harlow, UK, pp. 191-208.
"""
opts = {'xatol': xtol,
'fatol': ftol,
'maxiter': maxiter,
'maxfev': maxfun,
'disp': disp,
'return_all': retall,
'initial_simplex': initial_simplex}
res = _minimize_neldermead(func, x0, args, callback=callback, **opts)
if full_output:
retlist = res['x'], res['fun'], res['nit'], res['nfev'], res['status']
if retall:
retlist += (res['allvecs'], )
return retlist
else:
if retall:
return res['x'], res['allvecs']
else:
return res['x']
def _minimize_neldermead(func, x0, args=(), callback=None,
maxiter=None, maxfev=None, disp=False,
return_all=False, initial_simplex=None,
xatol=1e-4, fatol=1e-4, **unknown_options):
"""
Minimization of scalar function of one or more variables using the
Nelder-Mead algorithm.
Options
-------
disp : bool
Set to True to print convergence messages.
maxiter, maxfev : int
Maximum allowed number of iterations and function evaluations.
Will default to ``N*200``, where ``N`` is the number of
variables, if neither `maxiter` or `maxfev` is set. If both
`maxiter` and `maxfev` are set, minimization will stop at the
first reached.
initial_simplex : array_like of shape (N + 1, N)
Initial simplex. If given, overrides `x0`.
``initial_simplex[j,:]`` should contain the coordinates of
the j-th vertex of the ``N+1`` vertices in the simplex, where
``N`` is the dimension.
xatol : float, optional
Absolute error in xopt between iterations that is acceptable for
convergence.
fatol : number, optional
Absolute error in func(xopt) between iterations that is acceptable for
convergence.
"""
if 'ftol' in unknown_options:
warnings.warn("ftol is deprecated for Nelder-Mead,"
" use fatol instead. If you specified both, only"
" fatol is used.",
DeprecationWarning)
if (np.isclose(fatol, 1e-4) and
not np.isclose(unknown_options['ftol'], 1e-4)):
# only ftol was probably specified, use it.
fatol = unknown_options['ftol']
unknown_options.pop('ftol')
if 'xtol' in unknown_options:
warnings.warn("xtol is deprecated for Nelder-Mead,"
" use xatol instead. If you specified both, only"
" xatol is used.",
DeprecationWarning)
if (np.isclose(xatol, 1e-4) and
not np.isclose(unknown_options['xtol'], 1e-4)):
# only xtol was probably specified, use it.
xatol = unknown_options['xtol']
unknown_options.pop('xtol')
_check_unknown_options(unknown_options)
maxfun = maxfev
retall = return_all
fcalls, func = wrap_function(func, args)
rho = 1
chi = 2
psi = 0.5
sigma = 0.5
nonzdelt = 0.05
zdelt = 0.00025
x0 = asfarray(x0).flatten()
if initial_simplex is None:
N = len(x0)
sim = numpy.zeros((N + 1, N), dtype=x0.dtype)
sim[0] = x0
for k in range(N):
y = numpy.array(x0, copy=True)
if y[k] != 0:
y[k] = (1 + nonzdelt)*y[k]
else:
y[k] = zdelt
sim[k + 1] = y
else:
sim = np.asfarray(initial_simplex).copy()
if sim.ndim != 2 or sim.shape[0] != sim.shape[1] + 1:
raise ValueError("`initial_simplex` should be an array of shape (N+1,N)")
if len(x0) != sim.shape[1]:
raise ValueError("Size of `initial_simplex` is not consistent with `x0`")
N = sim.shape[1]
if retall:
allvecs = [sim[0]]
# If neither are set, then set both to default
if maxiter is None and maxfun is None:
maxiter = N * 200
maxfun = N * 200
elif maxiter is None:
# Convert remaining Nones, to np.inf, unless the other is np.inf, in
# which case use the default to avoid unbounded iteration
if maxfun == np.inf:
maxiter = N * 200
else:
maxiter = np.inf
elif maxfun is None:
if maxiter == np.inf:
maxfun = N * 200
else:
maxfun = np.inf
one2np1 = list(range(1, N + 1))
fsim = numpy.zeros((N + 1,), float)
for k in range(N + 1):
fsim[k] = func(sim[k])
ind = numpy.argsort(fsim)
fsim = numpy.take(fsim, ind, 0)
# sort so sim[0,:] has the lowest function value
sim = numpy.take(sim, ind, 0)
iterations = 1
while (fcalls[0] < maxfun and iterations < maxiter):
if (numpy.max(numpy.ravel(numpy.abs(sim[1:] - sim[0]))) <= xatol and
numpy.max(numpy.abs(fsim[0] - fsim[1:])) <= fatol):
break
xbar = numpy.add.reduce(sim[:-1], 0) / N
xr = (1 + rho) * xbar - rho * sim[-1]
fxr = func(xr)
doshrink = 0
if fxr < fsim[0]:
xe = (1 + rho * chi) * xbar - rho * chi * sim[-1]
fxe = func(xe)
if fxe < fxr:
sim[-1] = xe
fsim[-1] = fxe
else:
sim[-1] = xr
fsim[-1] = fxr
else: # fsim[0] <= fxr
if fxr < fsim[-2]:
sim[-1] = xr
fsim[-1] = fxr
else: # fxr >= fsim[-2]
# Perform contraction
if fxr < fsim[-1]:
xc = (1 + psi * rho) * xbar - psi * rho * sim[-1]
fxc = func(xc)
if fxc <= fxr:
sim[-1] = xc
fsim[-1] = fxc
else:
doshrink = 1
else:
# Perform an inside contraction
xcc = (1 - psi) * xbar + psi * sim[-1]
fxcc = func(xcc)
if fxcc < fsim[-1]:
sim[-1] = xcc
fsim[-1] = fxcc
else:
doshrink = 1
if doshrink:
for j in one2np1:
sim[j] = sim[0] + sigma * (sim[j] - sim[0])
fsim[j] = func(sim[j])
ind = numpy.argsort(fsim)
sim = numpy.take(sim, ind, 0)
fsim = numpy.take(fsim, ind, 0)
if callback is not None:
callback(sim[0])
iterations += 1
if retall:
allvecs.append(sim[0])
x = sim[0]
fval = numpy.min(fsim)
warnflag = 0
if fcalls[0] >= maxfun:
warnflag = 1
msg = _status_message['maxfev']
if disp:
print('Warning: ' + msg)
elif iterations >= maxiter:
warnflag = 2
msg = _status_message['maxiter']
if disp:
print('Warning: ' + msg)
else:
msg = _status_message['success']
if disp:
print(msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % iterations)
print(" Function evaluations: %d" % fcalls[0])
result = OptimizeResult(fun=fval, nit=iterations, nfev=fcalls[0],
status=warnflag, success=(warnflag == 0),
message=msg, x=x, final_simplex=(sim, fsim))
if retall:
result['allvecs'] = allvecs
return result
def _approx_fprime_helper(xk, f, epsilon, args=(), f0=None):
"""
See ``approx_fprime``. An optional initial function value arg is added.
"""
if f0 is None:
f0 = f(*((xk,) + args))
grad = numpy.zeros((len(xk),), float)
ei = numpy.zeros((len(xk),), float)
for k in range(len(xk)):
ei[k] = 1.0
d = epsilon * ei
grad[k] = (f(*((xk + d,) + args)) - f0) / d[k]
ei[k] = 0.0
return grad
def approx_fprime(xk, f, epsilon, *args):
"""Finite-difference approximation of the gradient of a scalar function.
Parameters
----------
xk : array_like
The coordinate vector at which to determine the gradient of `f`.
f : callable
The function of which to determine the gradient (partial derivatives).
Should take `xk` as first argument, other arguments to `f` can be
supplied in ``*args``. Should return a scalar, the value of the
function at `xk`.
epsilon : array_like
Increment to `xk` to use for determining the function gradient.
If a scalar, uses the same finite difference delta for all partial
derivatives. If an array, should contain one value per element of
`xk`.
\\*args : args, optional
Any other arguments that are to be passed to `f`.
Returns
-------
grad : ndarray
The partial derivatives of `f` to `xk`.
See Also
--------
check_grad : Check correctness of gradient function against approx_fprime.
Notes
-----
The function gradient is determined by the forward finite difference
formula::
f(xk[i] + epsilon[i]) - f(xk[i])
f'[i] = ---------------------------------
epsilon[i]
The main use of `approx_fprime` is in scalar function optimizers like
`fmin_bfgs`, to determine numerically the Jacobian of a function.
Examples
--------
>>> from scipy import optimize
>>> def func(x, c0, c1):
... "Coordinate vector `x` should be an array of size two."
... return c0 * x[0]**2 + c1*x[1]**2
>>> x = np.ones(2)
>>> c0, c1 = (1, 200)
>>> eps = np.sqrt(np.finfo(float).eps)
>>> optimize.approx_fprime(x, func, [eps, np.sqrt(200) * eps], c0, c1)
array([ 2. , 400.00004198])
"""
return _approx_fprime_helper(xk, f, epsilon, args=args)
def check_grad(func, grad, x0, *args, **kwargs):
"""Check the correctness of a gradient function by comparing it against a
(forward) finite-difference approximation of the gradient.
Parameters
----------
func : callable ``func(x0, *args)``
Function whose derivative is to be checked.
grad : callable ``grad(x0, *args)``
Gradient of `func`.
x0 : ndarray
Points to check `grad` against forward difference approximation of grad
using `func`.
args : \\*args, optional
Extra arguments passed to `func` and `grad`.
epsilon : float, optional
Step size used for the finite difference approximation. It defaults to
``sqrt(numpy.finfo(float).eps)``, which is approximately 1.49e-08.<|fim▁hole|>
Returns
-------
err : float
The square root of the sum of squares (i.e. the 2-norm) of the
difference between ``grad(x0, *args)`` and the finite difference
approximation of `grad` using func at the points `x0`.
See Also
--------
approx_fprime
Examples
--------
>>> def func(x):
... return x[0]**2 - 0.5 * x[1]**3
>>> def grad(x):
... return [2 * x[0], -1.5 * x[1]**2]
>>> from scipy.optimize import check_grad
>>> check_grad(func, grad, [1.5, -1.5])
2.9802322387695312e-08
"""
step = kwargs.pop('epsilon', _epsilon)
if kwargs:
raise ValueError("Unknown keyword arguments: %r" %
(list(kwargs.keys()),))
return sqrt(sum((grad(x0, *args) -
approx_fprime(x0, func, step, *args))**2))
def approx_fhess_p(x0, p, fprime, epsilon, *args):
f2 = fprime(*((x0 + epsilon*p,) + args))
f1 = fprime(*((x0,) + args))
return (f2 - f1) / epsilon
class _LineSearchError(RuntimeError):
pass
def _line_search_wolfe12(f, fprime, xk, pk, gfk, old_fval, old_old_fval,
**kwargs):
"""
Same as line_search_wolfe1, but fall back to line_search_wolfe2 if
suitable step length is not found, and raise an exception if a
suitable step length is not found.
Raises
------
_LineSearchError
If no suitable step size is found
"""
extra_condition = kwargs.pop('extra_condition', None)
ret = line_search_wolfe1(f, fprime, xk, pk, gfk,
old_fval, old_old_fval,
**kwargs)
if ret[0] is not None and extra_condition is not None:
xp1 = xk + ret[0] * pk
if not extra_condition(ret[0], xp1, ret[3], ret[5]):
# Reject step if extra_condition fails
ret = (None,)
if ret[0] is None:
# line search failed: try different one.
with warnings.catch_warnings():
warnings.simplefilter('ignore', LineSearchWarning)
kwargs2 = {}
for key in ('c1', 'c2', 'amax'):
if key in kwargs:
kwargs2[key] = kwargs[key]
ret = line_search_wolfe2(f, fprime, xk, pk, gfk,
old_fval, old_old_fval,
extra_condition=extra_condition,
**kwargs2)
if ret[0] is None:
raise _LineSearchError()
return ret
def fmin_bfgs(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf,
epsilon=_epsilon, maxiter=None, full_output=0, disp=1,
retall=0, callback=None):
"""
Minimize a function using the BFGS algorithm.
Parameters
----------
f : callable f(x,*args)
Objective function to be minimized.
x0 : ndarray
Initial guess.
fprime : callable f'(x,*args), optional
Gradient of f.
args : tuple, optional
Extra arguments passed to f and fprime.
gtol : float, optional
Gradient norm must be less than gtol before successful termination.
norm : float, optional
Order of norm (Inf is max, -Inf is min)
epsilon : int or ndarray, optional
If fprime is approximated, use this value for the step size.
callback : callable, optional
An optional user-supplied function to call after each
iteration. Called as callback(xk), where xk is the
current parameter vector.
maxiter : int, optional
Maximum number of iterations to perform.
full_output : bool, optional
If True,return fopt, func_calls, grad_calls, and warnflag
in addition to xopt.
disp : bool, optional
Print convergence message if True.
retall : bool, optional
Return a list of results at each iteration if True.
Returns
-------
xopt : ndarray
Parameters which minimize f, i.e. f(xopt) == fopt.
fopt : float
Minimum value.
gopt : ndarray
Value of gradient at minimum, f'(xopt), which should be near 0.
Bopt : ndarray
Value of 1/f''(xopt), i.e. the inverse hessian matrix.
func_calls : int
Number of function_calls made.
grad_calls : int
Number of gradient calls made.
warnflag : integer
1 : Maximum number of iterations exceeded.
2 : Gradient and/or function calls not changing.
allvecs : list
`OptimizeResult` at each iteration. Only returned if retall is True.
See also
--------
minimize: Interface to minimization algorithms for multivariate
functions. See the 'BFGS' `method` in particular.
Notes
-----
Optimize the function, f, whose gradient is given by fprime
using the quasi-Newton method of Broyden, Fletcher, Goldfarb,
and Shanno (BFGS)
References
----------
Wright, and Nocedal 'Numerical Optimization', 1999, pg. 198.
"""
opts = {'gtol': gtol,
'norm': norm,
'eps': epsilon,
'disp': disp,
'maxiter': maxiter,
'return_all': retall}
res = _minimize_bfgs(f, x0, args, fprime, callback=callback, **opts)
if full_output:
retlist = (res['x'], res['fun'], res['jac'], res['hess_inv'],
res['nfev'], res['njev'], res['status'])
if retall:
retlist += (res['allvecs'], )
return retlist
else:
if retall:
return res['x'], res['allvecs']
else:
return res['x']
def _minimize_bfgs(fun, x0, args=(), jac=None, callback=None,
gtol=1e-5, norm=Inf, eps=_epsilon, maxiter=None,
disp=False, return_all=False,
**unknown_options):
"""
Minimization of scalar function of one or more variables using the
BFGS algorithm.
Options
-------
disp : bool
Set to True to print convergence messages.
maxiter : int
Maximum number of iterations to perform.
gtol : float
Gradient norm must be less than `gtol` before successful
termination.
norm : float
Order of norm (Inf is max, -Inf is min).
eps : float or ndarray
If `jac` is approximated, use this value for the step size.
"""
_check_unknown_options(unknown_options)
f = fun
fprime = jac
epsilon = eps
retall = return_all
x0 = asarray(x0).flatten()
if x0.ndim == 0:
x0.shape = (1,)
if maxiter is None:
maxiter = len(x0) * 200
func_calls, f = wrap_function(f, args)
if fprime is None:
grad_calls, myfprime = wrap_function(approx_fprime, (f, epsilon))
else:
grad_calls, myfprime = wrap_function(fprime, args)
gfk = myfprime(x0)
k = 0
N = len(x0)
I = numpy.eye(N, dtype=int)
Hk = I
# get needed blas functions
syr = get_blas_funcs('syr', dtype='d') # Symetric rank 1 update
syr2 = get_blas_funcs('syr2', dtype='d') # Symetric rank 2 update
symv = get_blas_funcs('symv', dtype='d') # Symetric matrix-vector product
# Sets the initial step guess to dx ~ 1
old_fval = f(x0)
old_old_fval = old_fval + np.linalg.norm(gfk) / 2
xk = x0
if retall:
allvecs = [x0]
sk = [2 * gtol]
warnflag = 0
gnorm = vecnorm(gfk, ord=norm)
while (gnorm > gtol) and (k < maxiter):
pk = symv(-1, Hk, gfk)
try:
alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \
_line_search_wolfe12(f, myfprime, xk, pk, gfk,
old_fval, old_old_fval, amin=1e-100, amax=1e100)
except _LineSearchError:
# Line search failed to find a better solution.
warnflag = 2
break
xkp1 = xk + alpha_k * pk
if retall:
allvecs.append(xkp1)
sk = xkp1 - xk
xk = xkp1
if gfkp1 is None:
gfkp1 = myfprime(xkp1)
yk = gfkp1 - gfk
gfk = gfkp1
if callback is not None:
callback(xk)
gnorm = vecnorm(gfk, ord=norm)
if (gnorm <= gtol):
break
if not numpy.isfinite(old_fval):
# We correctly found +-Inf as optimal value, or something went
# wrong.
warnflag = 2
break
yk_sk = np.dot(yk, sk)
try: # this was handled in numeric, let it remaines for more safety
rhok = 1.0 / yk_sk
except ZeroDivisionError:
rhok = 1000.0
if disp:
print("Divide-by-zero encountered: rhok assumed large")
if isinf(rhok): # this is patch for numpy
rhok = 1000.0
if disp:
print("Divide-by-zero encountered: rhok assumed large")
# Heristic to adjust Hk for k == 0
# described at Nocedal/Wright "Numerical Optimization"
# p.143 formula (6.20)
if k == 0:
Hk = yk_sk / np.dot(yk, yk)*I
# Implement BFGS update using the formula:
# Hk <- Hk + ((Hk yk).T yk+sk.T yk)*(rhok**2)*sk sk.T -rhok*[(Hk yk)sk.T +sk(Hk yk).T]
# This formula is equivalent to (6.17) from
# Nocedal/Wright "Numerical Optimization"
# written in a more efficient way for implementation.
Hk_yk = symv(1, Hk, yk)
c = rhok**2 * (yk_sk+Hk_yk.dot(yk))
Hk = syr2(-rhok, sk, Hk_yk, a=Hk)
Hk = syr(c, sk, a=Hk)
k += 1
# The matrix Hk is obtained from the
# symmetric representation that were being
# used to store it.
Hk_triu = numpy.triu(Hk)
Hk_diag = numpy.diag(Hk)
Hk = Hk_triu + Hk_triu.T - numpy.diag(Hk_diag)
fval = old_fval
if np.isnan(fval):
# This can happen if the first call to f returned NaN;
# the loop is then never entered.
warnflag = 2
if warnflag == 2:
msg = _status_message['pr_loss']
if disp:
print("Warning: " + msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % func_calls[0])
print(" Gradient evaluations: %d" % grad_calls[0])
elif k >= maxiter:
warnflag = 1
msg = _status_message['maxiter']
if disp:
print("Warning: " + msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % func_calls[0])
print(" Gradient evaluations: %d" % grad_calls[0])
else:
msg = _status_message['success']
if disp:
print(msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % func_calls[0])
print(" Gradient evaluations: %d" % grad_calls[0])
result = OptimizeResult(fun=fval, jac=gfk, hess_inv=Hk, nfev=func_calls[0],
njev=grad_calls[0], status=warnflag,
success=(warnflag == 0), message=msg, x=xk,
nit=k)
if retall:
result['allvecs'] = allvecs
return result
def fmin_cg(f, x0, fprime=None, args=(), gtol=1e-5, norm=Inf, epsilon=_epsilon,
maxiter=None, full_output=0, disp=1, retall=0, callback=None):
"""
Minimize a function using a nonlinear conjugate gradient algorithm.
Parameters
----------
f : callable, ``f(x, *args)``
Objective function to be minimized. Here `x` must be a 1-D array of
the variables that are to be changed in the search for a minimum, and
`args` are the other (fixed) parameters of `f`.
x0 : ndarray
A user-supplied initial estimate of `xopt`, the optimal value of `x`.
It must be a 1-D array of values.
fprime : callable, ``fprime(x, *args)``, optional
A function that returns the gradient of `f` at `x`. Here `x` and `args`
are as described above for `f`. The returned value must be a 1-D array.
Defaults to None, in which case the gradient is approximated
numerically (see `epsilon`, below).
args : tuple, optional
Parameter values passed to `f` and `fprime`. Must be supplied whenever
additional fixed parameters are needed to completely specify the
functions `f` and `fprime`.
gtol : float, optional
Stop when the norm of the gradient is less than `gtol`.
norm : float, optional
Order to use for the norm of the gradient
(``-np.Inf`` is min, ``np.Inf`` is max).
epsilon : float or ndarray, optional
Step size(s) to use when `fprime` is approximated numerically. Can be a
scalar or a 1-D array. Defaults to ``sqrt(eps)``, with eps the
floating point machine precision. Usually ``sqrt(eps)`` is about
1.5e-8.
maxiter : int, optional
Maximum number of iterations to perform. Default is ``200 * len(x0)``.
full_output : bool, optional
If True, return `fopt`, `func_calls`, `grad_calls`, and `warnflag` in
addition to `xopt`. See the Returns section below for additional
information on optional return values.
disp : bool, optional
If True, return a convergence message, followed by `xopt`.
retall : bool, optional
If True, add to the returned values the results of each iteration.
callback : callable, optional
An optional user-supplied function, called after each iteration.
Called as ``callback(xk)``, where ``xk`` is the current value of `x0`.
Returns
-------
xopt : ndarray
Parameters which minimize f, i.e. ``f(xopt) == fopt``.
fopt : float, optional
Minimum value found, f(xopt). Only returned if `full_output` is True.
func_calls : int, optional
The number of function_calls made. Only returned if `full_output`
is True.
grad_calls : int, optional
The number of gradient calls made. Only returned if `full_output` is
True.
warnflag : int, optional
Integer value with warning status, only returned if `full_output` is
True.
0 : Success.
1 : The maximum number of iterations was exceeded.
2 : Gradient and/or function calls were not changing. May indicate
that precision was lost, i.e., the routine did not converge.
allvecs : list of ndarray, optional
List of arrays, containing the results at each iteration.
Only returned if `retall` is True.
See Also
--------
minimize : common interface to all `scipy.optimize` algorithms for
unconstrained and constrained minimization of multivariate
functions. It provides an alternative way to call
``fmin_cg``, by specifying ``method='CG'``.
Notes
-----
This conjugate gradient algorithm is based on that of Polak and Ribiere
[1]_.
Conjugate gradient methods tend to work better when:
1. `f` has a unique global minimizing point, and no local minima or
other stationary points,
2. `f` is, at least locally, reasonably well approximated by a
quadratic function of the variables,
3. `f` is continuous and has a continuous gradient,
4. `fprime` is not too large, e.g., has a norm less than 1000,
5. The initial guess, `x0`, is reasonably close to `f` 's global
minimizing point, `xopt`.
References
----------
.. [1] Wright & Nocedal, "Numerical Optimization", 1999, pp. 120-122.
Examples
--------
Example 1: seek the minimum value of the expression
``a*u**2 + b*u*v + c*v**2 + d*u + e*v + f`` for given values
of the parameters and an initial guess ``(u, v) = (0, 0)``.
>>> args = (2, 3, 7, 8, 9, 10) # parameter values
>>> def f(x, *args):
... u, v = x
... a, b, c, d, e, f = args
... return a*u**2 + b*u*v + c*v**2 + d*u + e*v + f
>>> def gradf(x, *args):
... u, v = x
... a, b, c, d, e, f = args
... gu = 2*a*u + b*v + d # u-component of the gradient
... gv = b*u + 2*c*v + e # v-component of the gradient
... return np.asarray((gu, gv))
>>> x0 = np.asarray((0, 0)) # Initial guess.
>>> from scipy import optimize
>>> res1 = optimize.fmin_cg(f, x0, fprime=gradf, args=args)
Optimization terminated successfully.
Current function value: 1.617021
Iterations: 4
Function evaluations: 8
Gradient evaluations: 8
>>> res1
array([-1.80851064, -0.25531915])
Example 2: solve the same problem using the `minimize` function.
(This `myopts` dictionary shows all of the available options,
although in practice only non-default values would be needed.
The returned value will be a dictionary.)
>>> opts = {'maxiter' : None, # default value.
... 'disp' : True, # non-default value.
... 'gtol' : 1e-5, # default value.
... 'norm' : np.inf, # default value.
... 'eps' : 1.4901161193847656e-08} # default value.
>>> res2 = optimize.minimize(f, x0, jac=gradf, args=args,
... method='CG', options=opts)
Optimization terminated successfully.
Current function value: 1.617021
Iterations: 4
Function evaluations: 8
Gradient evaluations: 8
>>> res2.x # minimum found
array([-1.80851064, -0.25531915])
"""
opts = {'gtol': gtol,
'norm': norm,
'eps': epsilon,
'disp': disp,
'maxiter': maxiter,
'return_all': retall}
res = _minimize_cg(f, x0, args, fprime, callback=callback, **opts)
if full_output:
retlist = res['x'], res['fun'], res['nfev'], res['njev'], res['status']
if retall:
retlist += (res['allvecs'], )
return retlist
else:
if retall:
return res['x'], res['allvecs']
else:
return res['x']
def _minimize_cg(fun, x0, args=(), jac=None, callback=None,
gtol=1e-5, norm=Inf, eps=_epsilon, maxiter=None,
disp=False, return_all=False,
**unknown_options):
"""
Minimization of scalar function of one or more variables using the
conjugate gradient algorithm.
Options
-------
disp : bool
Set to True to print convergence messages.
maxiter : int
Maximum number of iterations to perform.
gtol : float
Gradient norm must be less than `gtol` before successful
termination.
norm : float
Order of norm (Inf is max, -Inf is min).
eps : float or ndarray
If `jac` is approximated, use this value for the step size.
"""
_check_unknown_options(unknown_options)
f = fun
fprime = jac
epsilon = eps
retall = return_all
x0 = asarray(x0).flatten()
if maxiter is None:
maxiter = len(x0) * 200
func_calls, f = wrap_function(f, args)
if fprime is None:
grad_calls, myfprime = wrap_function(approx_fprime, (f, epsilon))
else:
grad_calls, myfprime = wrap_function(fprime, args)
gfk = myfprime(x0)
k = 0
xk = x0
# Sets the initial step guess to dx ~ 1
old_fval = f(xk)
old_old_fval = old_fval + np.linalg.norm(gfk) / 2
if retall:
allvecs = [xk]
warnflag = 0
pk = -gfk
gnorm = vecnorm(gfk, ord=norm)
sigma_3 = 0.01
while (gnorm > gtol) and (k < maxiter):
deltak = numpy.dot(gfk, gfk)
cached_step = [None]
def polak_ribiere_powell_step(alpha, gfkp1=None):
xkp1 = xk + alpha * pk
if gfkp1 is None:
gfkp1 = myfprime(xkp1)
yk = gfkp1 - gfk
beta_k = max(0, numpy.dot(yk, gfkp1) / deltak)
pkp1 = -gfkp1 + beta_k * pk
gnorm = vecnorm(gfkp1, ord=norm)
return (alpha, xkp1, pkp1, gfkp1, gnorm)
def descent_condition(alpha, xkp1, fp1, gfkp1):
# Polak-Ribiere+ needs an explicit check of a sufficient
# descent condition, which is not guaranteed by strong Wolfe.
#
# See Gilbert & Nocedal, "Global convergence properties of
# conjugate gradient methods for optimization",
# SIAM J. Optimization 2, 21 (1992).
cached_step[:] = polak_ribiere_powell_step(alpha, gfkp1)
alpha, xk, pk, gfk, gnorm = cached_step
# Accept step if it leads to convergence.
if gnorm <= gtol:
return True
# Accept step if sufficient descent condition applies.
return numpy.dot(pk, gfk) <= -sigma_3 * numpy.dot(gfk, gfk)
try:
alpha_k, fc, gc, old_fval, old_old_fval, gfkp1 = \
_line_search_wolfe12(f, myfprime, xk, pk, gfk, old_fval,
old_old_fval, c2=0.4, amin=1e-100, amax=1e100,
extra_condition=descent_condition)
except _LineSearchError:
# Line search failed to find a better solution.
warnflag = 2
break
# Reuse already computed results if possible
if alpha_k == cached_step[0]:
alpha_k, xk, pk, gfk, gnorm = cached_step
else:
alpha_k, xk, pk, gfk, gnorm = polak_ribiere_powell_step(alpha_k, gfkp1)
if retall:
allvecs.append(xk)
if callback is not None:
callback(xk)
k += 1
fval = old_fval
if warnflag == 2:
msg = _status_message['pr_loss']
if disp:
print("Warning: " + msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % func_calls[0])
print(" Gradient evaluations: %d" % grad_calls[0])
elif k >= maxiter:
warnflag = 1
msg = _status_message['maxiter']
if disp:
print("Warning: " + msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % func_calls[0])
print(" Gradient evaluations: %d" % grad_calls[0])
else:
msg = _status_message['success']
if disp:
print(msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % func_calls[0])
print(" Gradient evaluations: %d" % grad_calls[0])
result = OptimizeResult(fun=fval, jac=gfk, nfev=func_calls[0],
njev=grad_calls[0], status=warnflag,
success=(warnflag == 0), message=msg, x=xk,
nit=k)
if retall:
result['allvecs'] = allvecs
return result
def fmin_ncg(f, x0, fprime, fhess_p=None, fhess=None, args=(), avextol=1e-5,
epsilon=_epsilon, maxiter=None, full_output=0, disp=1, retall=0,
callback=None):
"""
Unconstrained minimization of a function using the Newton-CG method.
Parameters
----------
f : callable ``f(x, *args)``
Objective function to be minimized.
x0 : ndarray
Initial guess.
fprime : callable ``f'(x, *args)``
Gradient of f.
fhess_p : callable ``fhess_p(x, p, *args)``, optional
Function which computes the Hessian of f times an
arbitrary vector, p.
fhess : callable ``fhess(x, *args)``, optional
Function to compute the Hessian matrix of f.
args : tuple, optional
Extra arguments passed to f, fprime, fhess_p, and fhess
(the same set of extra arguments is supplied to all of
these functions).
epsilon : float or ndarray, optional
If fhess is approximated, use this value for the step size.
callback : callable, optional
An optional user-supplied function which is called after
each iteration. Called as callback(xk), where xk is the
current parameter vector.
avextol : float, optional
Convergence is assumed when the average relative error in
the minimizer falls below this amount.
maxiter : int, optional
Maximum number of iterations to perform.
full_output : bool, optional
If True, return the optional outputs.
disp : bool, optional
If True, print convergence message.
retall : bool, optional
If True, return a list of results at each iteration.
Returns
-------
xopt : ndarray
Parameters which minimize f, i.e. ``f(xopt) == fopt``.
fopt : float
Value of the function at xopt, i.e. ``fopt = f(xopt)``.
fcalls : int
Number of function calls made.
gcalls : int
Number of gradient calls made.
hcalls : int
Number of hessian calls made.
warnflag : int
Warnings generated by the algorithm.
1 : Maximum number of iterations exceeded.
allvecs : list
The result at each iteration, if retall is True (see below).
See also
--------
minimize: Interface to minimization algorithms for multivariate
functions. See the 'Newton-CG' `method` in particular.
Notes
-----
Only one of `fhess_p` or `fhess` need to be given. If `fhess`
is provided, then `fhess_p` will be ignored. If neither `fhess`
nor `fhess_p` is provided, then the hessian product will be
approximated using finite differences on `fprime`. `fhess_p`
must compute the hessian times an arbitrary vector. If it is not
given, finite-differences on `fprime` are used to compute
it.
Newton-CG methods are also called truncated Newton methods. This
function differs from scipy.optimize.fmin_tnc because
1. scipy.optimize.fmin_ncg is written purely in python using numpy
and scipy while scipy.optimize.fmin_tnc calls a C function.
2. scipy.optimize.fmin_ncg is only for unconstrained minimization
while scipy.optimize.fmin_tnc is for unconstrained minimization
or box constrained minimization. (Box constraints give
lower and upper bounds for each variable separately.)
References
----------
Wright & Nocedal, 'Numerical Optimization', 1999, pg. 140.
"""
opts = {'xtol': avextol,
'eps': epsilon,
'maxiter': maxiter,
'disp': disp,
'return_all': retall}
res = _minimize_newtoncg(f, x0, args, fprime, fhess, fhess_p,
callback=callback, **opts)
if full_output:
retlist = (res['x'], res['fun'], res['nfev'], res['njev'],
res['nhev'], res['status'])
if retall:
retlist += (res['allvecs'], )
return retlist
else:
if retall:
return res['x'], res['allvecs']
else:
return res['x']
def _minimize_newtoncg(fun, x0, args=(), jac=None, hess=None, hessp=None,
callback=None, xtol=1e-5, eps=_epsilon, maxiter=None,
disp=False, return_all=False,
**unknown_options):
"""
Minimization of scalar function of one or more variables using the
Newton-CG algorithm.
Note that the `jac` parameter (Jacobian) is required.
Options
-------
disp : bool
Set to True to print convergence messages.
xtol : float
Average relative error in solution `xopt` acceptable for
convergence.
maxiter : int
Maximum number of iterations to perform.
eps : float or ndarray
If `jac` is approximated, use this value for the step size.
"""
_check_unknown_options(unknown_options)
if jac is None:
raise ValueError('Jacobian is required for Newton-CG method')
f = fun
fprime = jac
fhess_p = hessp
fhess = hess
avextol = xtol
epsilon = eps
retall = return_all
def terminate(warnflag, msg):
if disp:
print(msg)
print(" Current function value: %f" % old_fval)
print(" Iterations: %d" % k)
print(" Function evaluations: %d" % fcalls[0])
print(" Gradient evaluations: %d" % gcalls[0])
print(" Hessian evaluations: %d" % hcalls)
fval = old_fval
result = OptimizeResult(fun=fval, jac=gfk, nfev=fcalls[0],
njev=gcalls[0], nhev=hcalls, status=warnflag,
success=(warnflag == 0), message=msg, x=xk,
nit=k)
if retall:
result['allvecs'] = allvecs
return result
x0 = asarray(x0).flatten()
fcalls, f = wrap_function(f, args)
gcalls, fprime = wrap_function(fprime, args)
hcalls = 0
if maxiter is None:
maxiter = len(x0)*200
cg_maxiter = 20*len(x0)
xtol = len(x0) * avextol
update = [2 * xtol]
xk = x0
if retall:
allvecs = [xk]
k = 0
old_fval = f(x0)
old_old_fval = None
float64eps = numpy.finfo(numpy.float64).eps
while numpy.add.reduce(numpy.abs(update)) > xtol:
if k >= maxiter:
msg = "Warning: " + _status_message['maxiter']
return terminate(1, msg)
# Compute a search direction pk by applying the CG method to
# del2 f(xk) p = - grad f(xk) starting from 0.
b = -fprime(xk)
maggrad = numpy.add.reduce(numpy.abs(b))
eta = numpy.min([0.5, numpy.sqrt(maggrad)])
termcond = eta * maggrad
xsupi = zeros(len(x0), dtype=x0.dtype)
ri = -b
psupi = -ri
i = 0
dri0 = numpy.dot(ri, ri)
if fhess is not None: # you want to compute hessian once.
A = fhess(*(xk,) + args)
hcalls = hcalls + 1
for k2 in xrange(cg_maxiter):
if numpy.add.reduce(numpy.abs(ri)) <= termcond:
break
if fhess is None:
if fhess_p is None:
Ap = approx_fhess_p(xk, psupi, fprime, epsilon)
else:
Ap = fhess_p(xk, psupi, *args)
hcalls = hcalls + 1
else:
Ap = numpy.dot(A, psupi)
# check curvature
Ap = asarray(Ap).squeeze() # get rid of matrices...
curv = numpy.dot(psupi, Ap)
if 0 <= curv <= 3 * float64eps:
break
elif curv < 0:
if (i > 0):
break
else:
# fall back to steepest descent direction
xsupi = dri0 / (-curv) * b
break
alphai = dri0 / curv
xsupi = xsupi + alphai * psupi
ri = ri + alphai * Ap
dri1 = numpy.dot(ri, ri)
betai = dri1 / dri0
psupi = -ri + betai * psupi
i = i + 1
dri0 = dri1 # update numpy.dot(ri,ri) for next time.
else:
# curvature keeps increasing, bail out
msg = ("Warning: CG iterations didn't converge. The Hessian is not "
"positive definite.")
return terminate(3, msg)
pk = xsupi # search direction is solution to system.
gfk = -b # gradient at xk
try:
alphak, fc, gc, old_fval, old_old_fval, gfkp1 = \
_line_search_wolfe12(f, fprime, xk, pk, gfk,
old_fval, old_old_fval)
except _LineSearchError:
# Line search failed to find a better solution.
msg = "Warning: " + _status_message['pr_loss']
return terminate(2, msg)
update = alphak * pk
xk = xk + update # upcast if necessary
if callback is not None:
callback(xk)
if retall:
allvecs.append(xk)
k += 1
else:
msg = _status_message['success']
return terminate(0, msg)
def fminbound(func, x1, x2, args=(), xtol=1e-5, maxfun=500,
full_output=0, disp=1):
"""Bounded minimization for scalar functions.
Parameters
----------
func : callable f(x,*args)
Objective function to be minimized (must accept and return scalars).
x1, x2 : float or array scalar
The optimization bounds.
args : tuple, optional
Extra arguments passed to function.
xtol : float, optional
The convergence tolerance.
maxfun : int, optional
Maximum number of function evaluations allowed.
full_output : bool, optional
If True, return optional outputs.
disp : int, optional
If non-zero, print messages.
0 : no message printing.
1 : non-convergence notification messages only.
2 : print a message on convergence too.
3 : print iteration results.
Returns
-------
xopt : ndarray
Parameters (over given interval) which minimize the
objective function.
fval : number
The function value at the minimum point.
ierr : int
An error flag (0 if converged, 1 if maximum number of
function calls reached).
numfunc : int
The number of function calls made.
See also
--------
minimize_scalar: Interface to minimization algorithms for scalar
univariate functions. See the 'Bounded' `method` in particular.
Notes
-----
Finds a local minimizer of the scalar function `func` in the
interval x1 < xopt < x2 using Brent's method. (See `brent`
for auto-bracketing).
Examples
--------
`fminbound` finds the minimum of the function in the given range.
The following examples illustrate the same
>>> def f(x):
... return x**2
>>> from scipy import optimize
>>> minimum = optimize.fminbound(f, -1, 2)
>>> minimum
0.0
>>> minimum = optimize.fminbound(f, 1, 2)
>>> minimum
1.0000059608609866
"""
options = {'xatol': xtol,
'maxiter': maxfun,
'disp': disp}
res = _minimize_scalar_bounded(func, (x1, x2), args, **options)
if full_output:
return res['x'], res['fun'], res['status'], res['nfev']
else:
return res['x']
def _minimize_scalar_bounded(func, bounds, args=(),
xatol=1e-5, maxiter=500, disp=0,
**unknown_options):
"""
Options
-------
maxiter : int
Maximum number of iterations to perform.
disp : bool
Set to True to print convergence messages.
xatol : float
Absolute error in solution `xopt` acceptable for convergence.
"""
_check_unknown_options(unknown_options)
maxfun = maxiter
# Test bounds are of correct form
if len(bounds) != 2:
raise ValueError('bounds must have two elements.')
x1, x2 = bounds
if not (is_array_scalar(x1) and is_array_scalar(x2)):
raise ValueError("Optimisation bounds must be scalars"
" or array scalars.")
if x1 > x2:
raise ValueError("The lower bound exceeds the upper bound.")
flag = 0
header = ' Func-count x f(x) Procedure'
step = ' initial'
sqrt_eps = sqrt(2.2e-16)
golden_mean = 0.5 * (3.0 - sqrt(5.0))
a, b = x1, x2
fulc = a + golden_mean * (b - a)
nfc, xf = fulc, fulc
rat = e = 0.0
x = xf
fx = func(x, *args)
num = 1
fmin_data = (1, xf, fx)
ffulc = fnfc = fx
xm = 0.5 * (a + b)
tol1 = sqrt_eps * numpy.abs(xf) + xatol / 3.0
tol2 = 2.0 * tol1
if disp > 2:
print(" ")
print(header)
print("%5.0f %12.6g %12.6g %s" % (fmin_data + (step,)))
while (numpy.abs(xf - xm) > (tol2 - 0.5 * (b - a))):
golden = 1
# Check for parabolic fit
if numpy.abs(e) > tol1:
golden = 0
r = (xf - nfc) * (fx - ffulc)
q = (xf - fulc) * (fx - fnfc)
p = (xf - fulc) * q - (xf - nfc) * r
q = 2.0 * (q - r)
if q > 0.0:
p = -p
q = numpy.abs(q)
r = e
e = rat
# Check for acceptability of parabola
if ((numpy.abs(p) < numpy.abs(0.5*q*r)) and (p > q*(a - xf)) and
(p < q * (b - xf))):
rat = (p + 0.0) / q
x = xf + rat
step = ' parabolic'
if ((x - a) < tol2) or ((b - x) < tol2):
si = numpy.sign(xm - xf) + ((xm - xf) == 0)
rat = tol1 * si
else: # do a golden section step
golden = 1
if golden: # Do a golden-section step
if xf >= xm:
e = a - xf
else:
e = b - xf
rat = golden_mean*e
step = ' golden'
si = numpy.sign(rat) + (rat == 0)
x = xf + si * numpy.max([numpy.abs(rat), tol1])
fu = func(x, *args)
num += 1
fmin_data = (num, x, fu)
if disp > 2:
print("%5.0f %12.6g %12.6g %s" % (fmin_data + (step,)))
if fu <= fx:
if x >= xf:
a = xf
else:
b = xf
fulc, ffulc = nfc, fnfc
nfc, fnfc = xf, fx
xf, fx = x, fu
else:
if x < xf:
a = x
else:
b = x
if (fu <= fnfc) or (nfc == xf):
fulc, ffulc = nfc, fnfc
nfc, fnfc = x, fu
elif (fu <= ffulc) or (fulc == xf) or (fulc == nfc):
fulc, ffulc = x, fu
xm = 0.5 * (a + b)
tol1 = sqrt_eps * numpy.abs(xf) + xatol / 3.0
tol2 = 2.0 * tol1
if num >= maxfun:
flag = 1
break
fval = fx
if disp > 0:
_endprint(x, flag, fval, maxfun, xatol, disp)
result = OptimizeResult(fun=fval, status=flag, success=(flag == 0),
message={0: 'Solution found.',
1: 'Maximum number of function calls '
'reached.'}.get(flag, ''),
x=xf, nfev=num)
return result
class Brent:
#need to rethink design of __init__
def __init__(self, func, args=(), tol=1.48e-8, maxiter=500,
full_output=0):
self.func = func
self.args = args
self.tol = tol
self.maxiter = maxiter
self._mintol = 1.0e-11
self._cg = 0.3819660
self.xmin = None
self.fval = None
self.iter = 0
self.funcalls = 0
# need to rethink design of set_bracket (new options, etc)
def set_bracket(self, brack=None):
self.brack = brack
def get_bracket_info(self):
#set up
func = self.func
args = self.args
brack = self.brack
### BEGIN core bracket_info code ###
### carefully DOCUMENT any CHANGES in core ##
if brack is None:
xa, xb, xc, fa, fb, fc, funcalls = bracket(func, args=args)
elif len(brack) == 2:
xa, xb, xc, fa, fb, fc, funcalls = bracket(func, xa=brack[0],
xb=brack[1], args=args)
elif len(brack) == 3:
xa, xb, xc = brack
if (xa > xc): # swap so xa < xc can be assumed
xc, xa = xa, xc
if not ((xa < xb) and (xb < xc)):
raise ValueError("Not a bracketing interval.")
fa = func(*((xa,) + args))
fb = func(*((xb,) + args))
fc = func(*((xc,) + args))
if not ((fb < fa) and (fb < fc)):
raise ValueError("Not a bracketing interval.")
funcalls = 3
else:
raise ValueError("Bracketing interval must be "
"length 2 or 3 sequence.")
### END core bracket_info code ###
return xa, xb, xc, fa, fb, fc, funcalls
def optimize(self):
# set up for optimization
func = self.func
xa, xb, xc, fa, fb, fc, funcalls = self.get_bracket_info()
_mintol = self._mintol
_cg = self._cg
#################################
#BEGIN CORE ALGORITHM
#################################
x = w = v = xb
fw = fv = fx = func(*((x,) + self.args))
if (xa < xc):
a = xa
b = xc
else:
a = xc
b = xa
deltax = 0.0
funcalls = 1
iter = 0
while (iter < self.maxiter):
tol1 = self.tol * numpy.abs(x) + _mintol
tol2 = 2.0 * tol1
xmid = 0.5 * (a + b)
# check for convergence
if numpy.abs(x - xmid) < (tol2 - 0.5 * (b - a)):
break
# XXX In the first iteration, rat is only bound in the true case
# of this conditional. This used to cause an UnboundLocalError
# (gh-4140). It should be set before the if (but to what?).
if (numpy.abs(deltax) <= tol1):
if (x >= xmid):
deltax = a - x # do a golden section step
else:
deltax = b - x
rat = _cg * deltax
else: # do a parabolic step
tmp1 = (x - w) * (fx - fv)
tmp2 = (x - v) * (fx - fw)
p = (x - v) * tmp2 - (x - w) * tmp1
tmp2 = 2.0 * (tmp2 - tmp1)
if (tmp2 > 0.0):
p = -p
tmp2 = numpy.abs(tmp2)
dx_temp = deltax
deltax = rat
# check parabolic fit
if ((p > tmp2 * (a - x)) and (p < tmp2 * (b - x)) and
(numpy.abs(p) < numpy.abs(0.5 * tmp2 * dx_temp))):
rat = p * 1.0 / tmp2 # if parabolic step is useful.
u = x + rat
if ((u - a) < tol2 or (b - u) < tol2):
if xmid - x >= 0:
rat = tol1
else:
rat = -tol1
else:
if (x >= xmid):
deltax = a - x # if it's not do a golden section step
else:
deltax = b - x
rat = _cg * deltax
if (numpy.abs(rat) < tol1): # update by at least tol1
if rat >= 0:
u = x + tol1
else:
u = x - tol1
else:
u = x + rat
fu = func(*((u,) + self.args)) # calculate new output value
funcalls += 1
if (fu > fx): # if it's bigger than current
if (u < x):
a = u
else:
b = u
if (fu <= fw) or (w == x):
v = w
w = u
fv = fw
fw = fu
elif (fu <= fv) or (v == x) or (v == w):
v = u
fv = fu
else:
if (u >= x):
a = x
else:
b = x
v = w
w = x
x = u
fv = fw
fw = fx
fx = fu
iter += 1
#################################
#END CORE ALGORITHM
#################################
self.xmin = x
self.fval = fx
self.iter = iter
self.funcalls = funcalls
def get_result(self, full_output=False):
if full_output:
return self.xmin, self.fval, self.iter, self.funcalls
else:
return self.xmin
def brent(func, args=(), brack=None, tol=1.48e-8, full_output=0, maxiter=500):
"""
Given a function of one-variable and a possible bracket, return
the local minimum of the function isolated to a fractional precision
of tol.
Parameters
----------
func : callable f(x,*args)
Objective function.
args : tuple, optional
Additional arguments (if present).
brack : tuple, optional
Either a triple (xa,xb,xc) where xa<xb<xc and func(xb) <
func(xa), func(xc) or a pair (xa,xb) which are used as a
starting interval for a downhill bracket search (see
`bracket`). Providing the pair (xa,xb) does not always mean
the obtained solution will satisfy xa<=x<=xb.
tol : float, optional
Stop if between iteration change is less than `tol`.
full_output : bool, optional
If True, return all output args (xmin, fval, iter,
funcalls).
maxiter : int, optional
Maximum number of iterations in solution.
Returns
-------
xmin : ndarray
Optimum point.
fval : float
Optimum value.
iter : int
Number of iterations.
funcalls : int
Number of objective function evaluations made.
See also
--------
minimize_scalar: Interface to minimization algorithms for scalar
univariate functions. See the 'Brent' `method` in particular.
Notes
-----
Uses inverse parabolic interpolation when possible to speed up
convergence of golden section method.
Does not ensure that the minimum lies in the range specified by
`brack`. See `fminbound`.
Examples
--------
We illustrate the behaviour of the function when `brack` is of
size 2 and 3 respectively. In the case where `brack` is of the
form (xa,xb), we can see for the given values, the output need
not necessarily lie in the range (xa,xb).
>>> def f(x):
... return x**2
>>> from scipy import optimize
>>> minimum = optimize.brent(f,brack=(1,2))
>>> minimum
0.0
>>> minimum = optimize.brent(f,brack=(-1,0.5,2))
>>> minimum
-2.7755575615628914e-17
"""
options = {'xtol': tol,
'maxiter': maxiter}
res = _minimize_scalar_brent(func, brack, args, **options)
if full_output:
return res['x'], res['fun'], res['nit'], res['nfev']
else:
return res['x']
def _minimize_scalar_brent(func, brack=None, args=(),
xtol=1.48e-8, maxiter=500,
**unknown_options):
"""
Options
-------
maxiter : int
Maximum number of iterations to perform.
xtol : float
Relative error in solution `xopt` acceptable for convergence.
Notes
-----
Uses inverse parabolic interpolation when possible to speed up
convergence of golden section method.
"""
_check_unknown_options(unknown_options)
tol = xtol
if tol < 0:
raise ValueError('tolerance should be >= 0, got %r' % tol)
brent = Brent(func=func, args=args, tol=tol,
full_output=True, maxiter=maxiter)
brent.set_bracket(brack)
brent.optimize()
x, fval, nit, nfev = brent.get_result(full_output=True)
return OptimizeResult(fun=fval, x=x, nit=nit, nfev=nfev,
success=nit < maxiter)
def golden(func, args=(), brack=None, tol=_epsilon,
full_output=0, maxiter=5000):
"""
Return the minimum of a function of one variable using golden section
method.
Given a function of one variable and a possible bracketing interval,
return the minimum of the function isolated to a fractional precision of
tol.
Parameters
----------
func : callable func(x,*args)
Objective function to minimize.
args : tuple, optional
Additional arguments (if present), passed to func.
brack : tuple, optional
Triple (a,b,c), where (a<b<c) and func(b) <
func(a),func(c). If bracket consists of two numbers (a,
c), then they are assumed to be a starting interval for a
downhill bracket search (see `bracket`); it doesn't always
mean that obtained solution will satisfy a<=x<=c.
tol : float, optional
x tolerance stop criterion
full_output : bool, optional
If True, return optional outputs.
maxiter : int
Maximum number of iterations to perform.
See also
--------
minimize_scalar: Interface to minimization algorithms for scalar
univariate functions. See the 'Golden' `method` in particular.
Notes
-----
Uses analog of bisection method to decrease the bracketed
interval.
Examples
--------
We illustrate the behaviour of the function when `brack` is of
size 2 and 3 respectively. In the case where `brack` is of the
form (xa,xb), we can see for the given values, the output need
not necessarily lie in the range ``(xa, xb)``.
>>> def f(x):
... return x**2
>>> from scipy import optimize
>>> minimum = optimize.golden(f, brack=(1, 2))
>>> minimum
1.5717277788484873e-162
>>> minimum = optimize.golden(f, brack=(-1, 0.5, 2))
>>> minimum
-1.5717277788484873e-162
"""
options = {'xtol': tol, 'maxiter': maxiter}
res = _minimize_scalar_golden(func, brack, args, **options)
if full_output:
return res['x'], res['fun'], res['nfev']
else:
return res['x']
def _minimize_scalar_golden(func, brack=None, args=(),
xtol=_epsilon, maxiter=5000, **unknown_options):
"""
Options
-------
maxiter : int
Maximum number of iterations to perform.
xtol : float
Relative error in solution `xopt` acceptable for convergence.
"""
_check_unknown_options(unknown_options)
tol = xtol
if brack is None:
xa, xb, xc, fa, fb, fc, funcalls = bracket(func, args=args)
elif len(brack) == 2:
xa, xb, xc, fa, fb, fc, funcalls = bracket(func, xa=brack[0],
xb=brack[1], args=args)
elif len(brack) == 3:
xa, xb, xc = brack
if (xa > xc): # swap so xa < xc can be assumed
xc, xa = xa, xc
if not ((xa < xb) and (xb < xc)):
raise ValueError("Not a bracketing interval.")
fa = func(*((xa,) + args))
fb = func(*((xb,) + args))
fc = func(*((xc,) + args))
if not ((fb < fa) and (fb < fc)):
raise ValueError("Not a bracketing interval.")
funcalls = 3
else:
raise ValueError("Bracketing interval must be length 2 or 3 sequence.")
_gR = 0.61803399 # golden ratio conjugate: 2.0/(1.0+sqrt(5.0))
_gC = 1.0 - _gR
x3 = xc
x0 = xa
if (numpy.abs(xc - xb) > numpy.abs(xb - xa)):
x1 = xb
x2 = xb + _gC * (xc - xb)
else:
x2 = xb
x1 = xb - _gC * (xb - xa)
f1 = func(*((x1,) + args))
f2 = func(*((x2,) + args))
funcalls += 2
nit = 0
for i in xrange(maxiter):
if numpy.abs(x3 - x0) <= tol * (numpy.abs(x1) + numpy.abs(x2)):
break
if (f2 < f1):
x0 = x1
x1 = x2
x2 = _gR * x1 + _gC * x3
f1 = f2
f2 = func(*((x2,) + args))
else:
x3 = x2
x2 = x1
x1 = _gR * x2 + _gC * x0
f2 = f1
f1 = func(*((x1,) + args))
funcalls += 1
nit += 1
if (f1 < f2):
xmin = x1
fval = f1
else:
xmin = x2
fval = f2
return OptimizeResult(fun=fval, nfev=funcalls, x=xmin, nit=nit,
success=nit < maxiter)
def bracket(func, xa=0.0, xb=1.0, args=(), grow_limit=110.0, maxiter=1000):
"""
Bracket the minimum of the function.
Given a function and distinct initial points, search in the
downhill direction (as defined by the initital points) and return
new points xa, xb, xc that bracket the minimum of the function
f(xa) > f(xb) < f(xc). It doesn't always mean that obtained
solution will satisfy xa<=x<=xb
Parameters
----------
func : callable f(x,*args)
Objective function to minimize.
xa, xb : float, optional
Bracketing interval. Defaults `xa` to 0.0, and `xb` to 1.0.
args : tuple, optional
Additional arguments (if present), passed to `func`.
grow_limit : float, optional
Maximum grow limit. Defaults to 110.0
maxiter : int, optional
Maximum number of iterations to perform. Defaults to 1000.
Returns
-------
xa, xb, xc : float
Bracket.
fa, fb, fc : float
Objective function values in bracket.
funcalls : int
Number of function evaluations made.
"""
_gold = 1.618034 # golden ratio: (1.0+sqrt(5.0))/2.0
_verysmall_num = 1e-21
fa = func(*(xa,) + args)
fb = func(*(xb,) + args)
if (fa < fb): # Switch so fa > fb
xa, xb = xb, xa
fa, fb = fb, fa
xc = xb + _gold * (xb - xa)
fc = func(*((xc,) + args))
funcalls = 3
iter = 0
while (fc < fb):
tmp1 = (xb - xa) * (fb - fc)
tmp2 = (xb - xc) * (fb - fa)
val = tmp2 - tmp1
if numpy.abs(val) < _verysmall_num:
denom = 2.0 * _verysmall_num
else:
denom = 2.0 * val
w = xb - ((xb - xc) * tmp2 - (xb - xa) * tmp1) / denom
wlim = xb + grow_limit * (xc - xb)
if iter > maxiter:
raise RuntimeError("Too many iterations.")
iter += 1
if (w - xc) * (xb - w) > 0.0:
fw = func(*((w,) + args))
funcalls += 1
if (fw < fc):
xa = xb
xb = w
fa = fb
fb = fw
return xa, xb, xc, fa, fb, fc, funcalls
elif (fw > fb):
xc = w
fc = fw
return xa, xb, xc, fa, fb, fc, funcalls
w = xc + _gold * (xc - xb)
fw = func(*((w,) + args))
funcalls += 1
elif (w - wlim)*(wlim - xc) >= 0.0:
w = wlim
fw = func(*((w,) + args))
funcalls += 1
elif (w - wlim)*(xc - w) > 0.0:
fw = func(*((w,) + args))
funcalls += 1
if (fw < fc):
xb = xc
xc = w
w = xc + _gold * (xc - xb)
fb = fc
fc = fw
fw = func(*((w,) + args))
funcalls += 1
else:
w = xc + _gold * (xc - xb)
fw = func(*((w,) + args))
funcalls += 1
xa = xb
xb = xc
xc = w
fa = fb
fb = fc
fc = fw
return xa, xb, xc, fa, fb, fc, funcalls
def _linesearch_powell(func, p, xi, tol=1e-3):
"""Line-search algorithm using fminbound.
Find the minimium of the function ``func(x0+ alpha*direc)``.
"""
def myfunc(alpha):
return func(p + alpha*xi)
alpha_min, fret, iter, num = brent(myfunc, full_output=1, tol=tol)
xi = alpha_min*xi
return squeeze(fret), p + xi, xi
def fmin_powell(func, x0, args=(), xtol=1e-4, ftol=1e-4, maxiter=None,
maxfun=None, full_output=0, disp=1, retall=0, callback=None,
direc=None):
"""
Minimize a function using modified Powell's method. This method
only uses function values, not derivatives.
Parameters
----------
func : callable f(x,*args)
Objective function to be minimized.
x0 : ndarray
Initial guess.
args : tuple, optional
Extra arguments passed to func.
callback : callable, optional
An optional user-supplied function, called after each
iteration. Called as ``callback(xk)``, where ``xk`` is the
current parameter vector.
direc : ndarray, optional
Initial direction set.
xtol : float, optional
Line-search error tolerance.
ftol : float, optional
Relative error in ``func(xopt)`` acceptable for convergence.
maxiter : int, optional
Maximum number of iterations to perform.
maxfun : int, optional
Maximum number of function evaluations to make.
full_output : bool, optional
If True, fopt, xi, direc, iter, funcalls, and
warnflag are returned.
disp : bool, optional
If True, print convergence messages.
retall : bool, optional
If True, return a list of the solution at each iteration.
Returns
-------
xopt : ndarray
Parameter which minimizes `func`.
fopt : number
Value of function at minimum: ``fopt = func(xopt)``.
direc : ndarray
Current direction set.
iter : int
Number of iterations.
funcalls : int
Number of function calls made.
warnflag : int
Integer warning flag:
1 : Maximum number of function evaluations.
2 : Maximum number of iterations.
allvecs : list
List of solutions at each iteration.
See also
--------
minimize: Interface to unconstrained minimization algorithms for
multivariate functions. See the 'Powell' `method` in particular.
Notes
-----
Uses a modification of Powell's method to find the minimum of
a function of N variables. Powell's method is a conjugate
direction method.
The algorithm has two loops. The outer loop
merely iterates over the inner loop. The inner loop minimizes
over each current direction in the direction set. At the end
of the inner loop, if certain conditions are met, the direction
that gave the largest decrease is dropped and replaced with
the difference between the current estimated x and the estimated
x from the beginning of the inner-loop.
The technical conditions for replacing the direction of greatest
increase amount to checking that
1. No further gain can be made along the direction of greatest increase
from that iteration.
2. The direction of greatest increase accounted for a large sufficient
fraction of the decrease in the function value from that iteration of
the inner loop.
Examples
--------
>>> def f(x):
... return x**2
>>> from scipy import optimize
>>> minimum = optimize.fmin_powell(f, -1)
Optimization terminated successfully.
Current function value: 0.000000
Iterations: 2
Function evaluations: 18
>>> minimum
array(0.0)
References
----------
Powell M.J.D. (1964) An efficient method for finding the minimum of a
function of several variables without calculating derivatives,
Computer Journal, 7 (2):155-162.
Press W., Teukolsky S.A., Vetterling W.T., and Flannery B.P.:
Numerical Recipes (any edition), Cambridge University Press
"""
opts = {'xtol': xtol,
'ftol': ftol,
'maxiter': maxiter,
'maxfev': maxfun,
'disp': disp,
'direc': direc,
'return_all': retall}
res = _minimize_powell(func, x0, args, callback=callback, **opts)
if full_output:
retlist = (res['x'], res['fun'], res['direc'], res['nit'],
res['nfev'], res['status'])
if retall:
retlist += (res['allvecs'], )
return retlist
else:
if retall:
return res['x'], res['allvecs']
else:
return res['x']
def _minimize_powell(func, x0, args=(), callback=None,
xtol=1e-4, ftol=1e-4, maxiter=None, maxfev=None,
disp=False, direc=None, return_all=False,
**unknown_options):
"""
Minimization of scalar function of one or more variables using the
modified Powell algorithm.
Options
-------
disp : bool
Set to True to print convergence messages.
xtol : float
Relative error in solution `xopt` acceptable for convergence.
ftol : float
Relative error in ``fun(xopt)`` acceptable for convergence.
maxiter, maxfev : int
Maximum allowed number of iterations and function evaluations.
Will default to ``N*1000``, where ``N`` is the number of
variables, if neither `maxiter` or `maxfev` is set. If both
`maxiter` and `maxfev` are set, minimization will stop at the
first reached.
direc : ndarray
Initial set of direction vectors for the Powell method.
"""
_check_unknown_options(unknown_options)
maxfun = maxfev
retall = return_all
# we need to use a mutable object here that we can update in the
# wrapper function
fcalls, func = wrap_function(func, args)
x = asarray(x0).flatten()
if retall:
allvecs = [x]
N = len(x)
# If neither are set, then set both to default
if maxiter is None and maxfun is None:
maxiter = N * 1000
maxfun = N * 1000
elif maxiter is None:
# Convert remaining Nones, to np.inf, unless the other is np.inf, in
# which case use the default to avoid unbounded iteration
if maxfun == np.inf:
maxiter = N * 1000
else:
maxiter = np.inf
elif maxfun is None:
if maxiter == np.inf:
maxfun = N * 1000
else:
maxfun = np.inf
if direc is None:
direc = eye(N, dtype=float)
else:
direc = asarray(direc, dtype=float)
fval = squeeze(func(x))
x1 = x.copy()
iter = 0
ilist = list(range(N))
while True:
fx = fval
bigind = 0
delta = 0.0
for i in ilist:
direc1 = direc[i]
fx2 = fval
fval, x, direc1 = _linesearch_powell(func, x, direc1,
tol=xtol * 100)
if (fx2 - fval) > delta:
delta = fx2 - fval
bigind = i
iter += 1
if callback is not None:
callback(x)
if retall:
allvecs.append(x)
bnd = ftol * (numpy.abs(fx) + numpy.abs(fval)) + 1e-20
if 2.0 * (fx - fval) <= bnd:
break
if fcalls[0] >= maxfun:
break
if iter >= maxiter:
break
# Construct the extrapolated point
direc1 = x - x1
x2 = 2*x - x1
x1 = x.copy()
fx2 = squeeze(func(x2))
if (fx > fx2):
t = 2.0*(fx + fx2 - 2.0*fval)
temp = (fx - fval - delta)
t *= temp*temp
temp = fx - fx2
t -= delta*temp*temp
if t < 0.0:
fval, x, direc1 = _linesearch_powell(func, x, direc1,
tol=xtol*100)
direc[bigind] = direc[-1]
direc[-1] = direc1
warnflag = 0
if fcalls[0] >= maxfun:
warnflag = 1
msg = _status_message['maxfev']
if disp:
print("Warning: " + msg)
elif iter >= maxiter:
warnflag = 2
msg = _status_message['maxiter']
if disp:
print("Warning: " + msg)
else:
msg = _status_message['success']
if disp:
print(msg)
print(" Current function value: %f" % fval)
print(" Iterations: %d" % iter)
print(" Function evaluations: %d" % fcalls[0])
x = squeeze(x)
result = OptimizeResult(fun=fval, direc=direc, nit=iter, nfev=fcalls[0],
status=warnflag, success=(warnflag == 0),
message=msg, x=x)
if retall:
result['allvecs'] = allvecs
return result
def _endprint(x, flag, fval, maxfun, xtol, disp):
if flag == 0:
if disp > 1:
print("\nOptimization terminated successfully;\n"
"The returned value satisfies the termination criteria\n"
"(using xtol = ", xtol, ")")
if flag == 1:
if disp:
print("\nMaximum number of function evaluations exceeded --- "
"increase maxfun argument.\n")
return
def brute(func, ranges, args=(), Ns=20, full_output=0, finish=fmin,
disp=False):
"""Minimize a function over a given range by brute force.
Uses the "brute force" method, i.e. computes the function's value
at each point of a multidimensional grid of points, to find the global
minimum of the function.
The function is evaluated everywhere in the range with the datatype of the
first call to the function, as enforced by the ``vectorize`` NumPy
function. The value and type of the function evaluation returned when
``full_output=True`` are affected in addition by the ``finish`` argument
(see Notes).
Parameters
----------
func : callable
The objective function to be minimized. Must be in the
form ``f(x, *args)``, where ``x`` is the argument in
the form of a 1-D array and ``args`` is a tuple of any
additional fixed parameters needed to completely specify
the function.
ranges : tuple
Each component of the `ranges` tuple must be either a
"slice object" or a range tuple of the form ``(low, high)``.
The program uses these to create the grid of points on which
the objective function will be computed. See `Note 2` for
more detail.
args : tuple, optional
Any additional fixed parameters needed to completely specify
the function.
Ns : int, optional
Number of grid points along the axes, if not otherwise
specified. See `Note2`.
full_output : bool, optional
If True, return the evaluation grid and the objective function's
values on it.
finish : callable, optional
An optimization function that is called with the result of brute force
minimization as initial guess. `finish` should take `func` and
the initial guess as positional arguments, and take `args` as
keyword arguments. It may additionally take `full_output`
and/or `disp` as keyword arguments. Use None if no "polishing"
function is to be used. See Notes for more details.
disp : bool, optional
Set to True to print convergence messages.
Returns
-------
x0 : ndarray
A 1-D array containing the coordinates of a point at which the
objective function had its minimum value. (See `Note 1` for
which point is returned.)
fval : float
Function value at the point `x0`. (Returned when `full_output` is
True.)
grid : tuple
Representation of the evaluation grid. It has the same
length as `x0`. (Returned when `full_output` is True.)
Jout : ndarray
Function values at each point of the evaluation
grid, `i.e.`, ``Jout = func(*grid)``. (Returned
when `full_output` is True.)
See Also
--------
basinhopping, differential_evolution
Notes
-----
*Note 1*: The program finds the gridpoint at which the lowest value
of the objective function occurs. If `finish` is None, that is the
point returned. When the global minimum occurs within (or not very far
outside) the grid's boundaries, and the grid is fine enough, that
point will be in the neighborhood of the global minimum.
However, users often employ some other optimization program to
"polish" the gridpoint values, `i.e.`, to seek a more precise
(local) minimum near `brute's` best gridpoint.
The `brute` function's `finish` option provides a convenient way to do
that. Any polishing program used must take `brute's` output as its
initial guess as a positional argument, and take `brute's` input values
for `args` as keyword arguments, otherwise an error will be raised.
It may additionally take `full_output` and/or `disp` as keyword arguments.
`brute` assumes that the `finish` function returns either an
`OptimizeResult` object or a tuple in the form:
``(xmin, Jmin, ... , statuscode)``, where ``xmin`` is the minimizing
value of the argument, ``Jmin`` is the minimum value of the objective
function, "..." may be some other returned values (which are not used
by `brute`), and ``statuscode`` is the status code of the `finish` program.
Note that when `finish` is not None, the values returned are those
of the `finish` program, *not* the gridpoint ones. Consequently,
while `brute` confines its search to the input grid points,
the `finish` program's results usually will not coincide with any
gridpoint, and may fall outside the grid's boundary. Thus, if a
minimum only needs to be found over the provided grid points, make
sure to pass in `finish=None`.
*Note 2*: The grid of points is a `numpy.mgrid` object.
For `brute` the `ranges` and `Ns` inputs have the following effect.
Each component of the `ranges` tuple can be either a slice object or a
two-tuple giving a range of values, such as (0, 5). If the component is a
slice object, `brute` uses it directly. If the component is a two-tuple
range, `brute` internally converts it to a slice object that interpolates
`Ns` points from its low-value to its high-value, inclusive.
Examples
--------
We illustrate the use of `brute` to seek the global minimum of a function
of two variables that is given as the sum of a positive-definite
quadratic and two deep "Gaussian-shaped" craters. Specifically, define
the objective function `f` as the sum of three other functions,
``f = f1 + f2 + f3``. We suppose each of these has a signature
``(z, *params)``, where ``z = (x, y)``, and ``params`` and the functions
are as defined below.
>>> params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5)
>>> def f1(z, *params):
... x, y = z
... a, b, c, d, e, f, g, h, i, j, k, l, scale = params
... return (a * x**2 + b * x * y + c * y**2 + d*x + e*y + f)
>>> def f2(z, *params):
... x, y = z
... a, b, c, d, e, f, g, h, i, j, k, l, scale = params
... return (-g*np.exp(-((x-h)**2 + (y-i)**2) / scale))
>>> def f3(z, *params):
... x, y = z
... a, b, c, d, e, f, g, h, i, j, k, l, scale = params
... return (-j*np.exp(-((x-k)**2 + (y-l)**2) / scale))
>>> def f(z, *params):
... return f1(z, *params) + f2(z, *params) + f3(z, *params)
Thus, the objective function may have local minima near the minimum
of each of the three functions of which it is composed. To
use `fmin` to polish its gridpoint result, we may then continue as
follows:
>>> rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25))
>>> from scipy import optimize
>>> resbrute = optimize.brute(f, rranges, args=params, full_output=True,
... finish=optimize.fmin)
>>> resbrute[0] # global minimum
array([-1.05665192, 1.80834843])
>>> resbrute[1] # function value at global minimum
-3.4085818767
Note that if `finish` had been set to None, we would have gotten the
gridpoint [-1.0 1.75] where the rounded function value is -2.892.
"""
N = len(ranges)
if N > 40:
raise ValueError("Brute Force not possible with more "
"than 40 variables.")
lrange = list(ranges)
for k in range(N):
if type(lrange[k]) is not type(slice(None)):
if len(lrange[k]) < 3:
lrange[k] = tuple(lrange[k]) + (complex(Ns),)
lrange[k] = slice(*lrange[k])
if (N == 1):
lrange = lrange[0]
def _scalarfunc(*params):
params = squeeze(asarray(params))
return func(params, *args)
vecfunc = vectorize(_scalarfunc)
grid = mgrid[lrange]
if (N == 1):
grid = (grid,)
Jout = vecfunc(*grid)
Nshape = shape(Jout)
indx = argmin(Jout.ravel(), axis=-1)
Nindx = zeros(N, int)
xmin = zeros(N, float)
for k in range(N - 1, -1, -1):
thisN = Nshape[k]
Nindx[k] = indx % Nshape[k]
indx = indx // thisN
for k in range(N):
xmin[k] = grid[k][tuple(Nindx)]
Jmin = Jout[tuple(Nindx)]
if (N == 1):
grid = grid[0]
xmin = xmin[0]
if callable(finish):
# set up kwargs for `finish` function
finish_args = _getargspec(finish).args
finish_kwargs = dict()
if 'full_output' in finish_args:
finish_kwargs['full_output'] = 1
if 'disp' in finish_args:
finish_kwargs['disp'] = disp
elif 'options' in finish_args:
# pass 'disp' as `options`
# (e.g. if `finish` is `minimize`)
finish_kwargs['options'] = {'disp': disp}
# run minimizer
res = finish(func, xmin, args=args, **finish_kwargs)
if isinstance(res, OptimizeResult):
xmin = res.x
Jmin = res.fun
success = res.success
else:
xmin = res[0]
Jmin = res[1]
success = res[-1] == 0
if not success:
if disp:
print("Warning: Either final optimization did not succeed "
"or `finish` does not return `statuscode` as its last "
"argument.")
if full_output:
return xmin, Jmin, grid, Jout
else:
return xmin
def show_options(solver=None, method=None, disp=True):
"""
Show documentation for additional options of optimization solvers.
These are method-specific options that can be supplied through the
``options`` dict.
Parameters
----------
solver : str
Type of optimization solver. One of 'minimize', 'minimize_scalar',
'root', or 'linprog'.
method : str, optional
If not given, shows all methods of the specified solver. Otherwise,
show only the options for the specified method. Valid values
corresponds to methods' names of respective solver (e.g. 'BFGS' for
'minimize').
disp : bool, optional
Whether to print the result rather than returning it.
Returns
-------
text
Either None (for disp=False) or the text string (disp=True)
Notes
-----
The solver-specific methods are:
`scipy.optimize.minimize`
- :ref:`Nelder-Mead <optimize.minimize-neldermead>`
- :ref:`Powell <optimize.minimize-powell>`
- :ref:`CG <optimize.minimize-cg>`
- :ref:`BFGS <optimize.minimize-bfgs>`
- :ref:`Newton-CG <optimize.minimize-newtoncg>`
- :ref:`L-BFGS-B <optimize.minimize-lbfgsb>`
- :ref:`TNC <optimize.minimize-tnc>`
- :ref:`COBYLA <optimize.minimize-cobyla>`
- :ref:`SLSQP <optimize.minimize-slsqp>`
- :ref:`dogleg <optimize.minimize-dogleg>`
- :ref:`trust-ncg <optimize.minimize-trustncg>`
`scipy.optimize.root`
- :ref:`hybr <optimize.root-hybr>`
- :ref:`lm <optimize.root-lm>`
- :ref:`broyden1 <optimize.root-broyden1>`
- :ref:`broyden2 <optimize.root-broyden2>`
- :ref:`anderson <optimize.root-anderson>`
- :ref:`linearmixing <optimize.root-linearmixing>`
- :ref:`diagbroyden <optimize.root-diagbroyden>`
- :ref:`excitingmixing <optimize.root-excitingmixing>`
- :ref:`krylov <optimize.root-krylov>`
- :ref:`df-sane <optimize.root-dfsane>`
`scipy.optimize.minimize_scalar`
- :ref:`brent <optimize.minimize_scalar-brent>`
- :ref:`golden <optimize.minimize_scalar-golden>`
- :ref:`bounded <optimize.minimize_scalar-bounded>`
`scipy.optimize.linprog`
- :ref:`simplex <optimize.linprog-simplex>`
"""
import textwrap
doc_routines = {
'minimize': (
('bfgs', 'scipy.optimize.optimize._minimize_bfgs'),
('cg', 'scipy.optimize.optimize._minimize_cg'),
('cobyla', 'scipy.optimize.cobyla._minimize_cobyla'),
('dogleg', 'scipy.optimize._trustregion_dogleg._minimize_dogleg'),
('l-bfgs-b', 'scipy.optimize.lbfgsb._minimize_lbfgsb'),
('nelder-mead', 'scipy.optimize.optimize._minimize_neldermead'),
('newtoncg', 'scipy.optimize.optimize._minimize_newtoncg'),
('powell', 'scipy.optimize.optimize._minimize_powell'),
('slsqp', 'scipy.optimize.slsqp._minimize_slsqp'),
('tnc', 'scipy.optimize.tnc._minimize_tnc'),
('trust-ncg', 'scipy.optimize._trustregion_ncg._minimize_trust_ncg'),
),
'root': (
('hybr', 'scipy.optimize.minpack._root_hybr'),
('lm', 'scipy.optimize._root._root_leastsq'),
('broyden1', 'scipy.optimize._root._root_broyden1_doc'),
('broyden2', 'scipy.optimize._root._root_broyden2_doc'),
('anderson', 'scipy.optimize._root._root_anderson_doc'),
('diagbroyden', 'scipy.optimize._root._root_diagbroyden_doc'),
('excitingmixing', 'scipy.optimize._root._root_excitingmixing_doc'),
('linearmixing', 'scipy.optimize._root._root_linearmixing_doc'),
('krylov', 'scipy.optimize._root._root_krylov_doc'),
('df-sane', 'scipy.optimize._spectral._root_df_sane'),
),
'linprog': (
('simplex', 'scipy.optimize._linprog._linprog_simplex'),
),
'minimize_scalar': (
('brent', 'scipy.optimize.optimize._minimize_scalar_brent'),
('bounded', 'scipy.optimize.optimize._minimize_scalar_bounded'),
('golden', 'scipy.optimize.optimize._minimize_scalar_golden'),
),
}
if solver is None:
text = ["\n\n\n========\n", "minimize\n", "========\n"]
text.append(show_options('minimize', disp=False))
text.extend(["\n\n===============\n", "minimize_scalar\n",
"===============\n"])
text.append(show_options('minimize_scalar', disp=False))
text.extend(["\n\n\n====\n", "root\n",
"====\n"])
text.append(show_options('root', disp=False))
text.extend(['\n\n\n=======\n', 'linprog\n',
'=======\n'])
text.append(show_options('linprog', disp=False))
text = "".join(text)
else:
solver = solver.lower()
if solver not in doc_routines:
raise ValueError('Unknown solver %r' % (solver,))
if method is None:
text = []
for name, _ in doc_routines[solver]:
text.extend(["\n\n" + name, "\n" + "="*len(name) + "\n\n"])
text.append(show_options(solver, name, disp=False))
text = "".join(text)
else:
methods = dict(doc_routines[solver])
if method not in methods:
raise ValueError("Unknown method %r" % (method,))
name = methods[method]
# Import function object
parts = name.split('.')
mod_name = ".".join(parts[:-1])
__import__(mod_name)
obj = getattr(sys.modules[mod_name], parts[-1])
# Get doc
doc = obj.__doc__
if doc is not None:
text = textwrap.dedent(doc).strip()
else:
text = ""
if disp:
print(text)
return
else:
return text
def main():
import time
times = []
algor = []
x0 = [0.8, 1.2, 0.7]
print("Nelder-Mead Simplex")
print("===================")
start = time.time()
x = fmin(rosen, x0)
print(x)
times.append(time.time() - start)
algor.append('Nelder-Mead Simplex\t')
print()
print("Powell Direction Set Method")
print("===========================")
start = time.time()
x = fmin_powell(rosen, x0)
print(x)
times.append(time.time() - start)
algor.append('Powell Direction Set Method.')
print()
print("Nonlinear CG")
print("============")
start = time.time()
x = fmin_cg(rosen, x0, fprime=rosen_der, maxiter=200)
print(x)
times.append(time.time() - start)
algor.append('Nonlinear CG \t')
print()
print("BFGS Quasi-Newton")
print("=================")
start = time.time()
x = fmin_bfgs(rosen, x0, fprime=rosen_der, maxiter=80)
print(x)
times.append(time.time() - start)
algor.append('BFGS Quasi-Newton\t')
print()
print("BFGS approximate gradient")
print("=========================")
start = time.time()
x = fmin_bfgs(rosen, x0, gtol=1e-4, maxiter=100)
print(x)
times.append(time.time() - start)
algor.append('BFGS without gradient\t')
print()
print("Newton-CG with Hessian product")
print("==============================")
start = time.time()
x = fmin_ncg(rosen, x0, rosen_der, fhess_p=rosen_hess_prod, maxiter=80)
print(x)
times.append(time.time() - start)
algor.append('Newton-CG with hessian product')
print()
print("Newton-CG with full Hessian")
print("===========================")
start = time.time()
x = fmin_ncg(rosen, x0, rosen_der, fhess=rosen_hess, maxiter=80)
print(x)
times.append(time.time() - start)
algor.append('Newton-CG with full hessian')
print()
print("\nMinimizing the Rosenbrock function of order 3\n")
print(" Algorithm \t\t\t Seconds")
print("===========\t\t\t =========")
for k in range(len(algor)):
print(algor[k], "\t -- ", times[k])
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>GraphService.java<|end_file_name|><|fim▁begin|>package com.github.weeniearms.graffiti;
import com.github.weeniearms.graffiti.config.CacheConfiguration;
import com.github.weeniearms.graffiti.generator.GraphGenerator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.Arrays;
@Service
public class GraphService {
private final GraphGenerator[] generators;
@Autowired
public GraphService(GraphGenerator[] generators) {
this.generators = generators;
}<|fim▁hole|> GraphGenerator generator =
Arrays.stream(generators)
.filter(g -> g.isSourceSupported(source))
.findFirst()
.orElseThrow(() -> new IllegalArgumentException("No matching generator found for source"));
return generator.generateGraph(source, format);
}
}<|fim▁end|> |
@Cacheable(CacheConfiguration.GRAPH)
public byte[] generateGraph(String source, GraphGenerator.OutputFormat format) throws IOException { |
<|file_name|>Settings.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import List from 'react-toolbox/lib/list/List';
import ListSubHeader from 'react-toolbox/lib/list/ListSubHeader';
import ListCheckbox from 'react-toolbox/lib/list/ListCheckbox';
import ListItem from 'react-toolbox/lib/list/ListItem';
import Dropdown from 'react-toolbox/lib/dropdown/Dropdown';<|fim▁hole|>const NEW_PHOTO_INTERVAL_OPTIONS = [
{ value: NEW_PHOTO_DURATIONS.ALWAYS, label: 'Always' },
{ value: NEW_PHOTO_DURATIONS.HOURLY, label: 'Hourly' },
{ value: NEW_PHOTO_DURATIONS.DAILY, label: 'Daily' },
];
const handleFetchFromServerChange = (value, ev) =>
Actions.setSetting({ fetchFromServer: value });
const handleNewPhotoIntervalChange = (value, ev) =>
Actions.setSetting({ newPhotoDuration: parseInt(value, 10) });
const NewPhotoIntervalDropdown = ({ refreshInterval, className }) => (
<Dropdown
label="Duration"
className={className}
value={refreshInterval}
source={NEW_PHOTO_INTERVAL_OPTIONS}
onChange={handleNewPhotoIntervalChange} />
);
class SettingsContainer extends Component {
componentDidMount() {
// lazy initialize the state object
setTimeout(() => Actions.refresh(false), 0);
}
render() {
const { fetchFromServer, newPhotoDuration } = this.props;
return (
<List selectable ripple>
<ListSubHeader caption="Background Photos" />
<ListCheckbox
caption="Load Fresh"
legend="If disabled, it will cycle through a list of locally stored wallpapers only."
checked={fetchFromServer}
onChange={handleFetchFromServerChange} />
<ListItem
itemContent={
<div>
<p className="settings__inlineItem">Show new photo</p>
<NewPhotoIntervalDropdown
className="settings__inlineItem"
refreshInterval={newPhotoDuration} />
</div>
}
ripple={false}
selectable={false} />
</List>);
}
}
export default ConnectedStoreHOC(SettingsContainer);<|fim▁end|> | import ConnectedStoreHOC from '../utils/connect.store.hoc';
import * as Actions from '../utils/actions';
import { NEW_PHOTO_DURATIONS } from '../configs/constants';
|
<|file_name|>test_admin_actions.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|># Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mock
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils import timeutils
from nova.api.openstack.compute import admin_actions
from nova.compute import vm_states
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
from nova.tests.unit.policies import base
class AdminActionsPolicyTest(base.BasePolicyTest):
"""Test Admin Actions APIs policies with all possible context.
This class defines the set of context with different roles
which are allowed and not allowed to pass the policy checks.
With those set of context, it will call the API operation and
verify the expected behaviour.
"""
def setUp(self):
super(AdminActionsPolicyTest, self).setUp()
self.controller = admin_actions.AdminActionsController()
self.req = fakes.HTTPRequest.blank('')
self.mock_get = self.useFixture(
fixtures.MockPatch('nova.compute.api.API.get')).mock
uuid = uuids.fake_id
self.instance = fake_instance.fake_instance_obj(
self.project_member_context,
id=1, uuid=uuid, vm_state=vm_states.ACTIVE,
task_state=None, launched_at=timeutils.utcnow())
self.mock_get.return_value = self.instance
# Check that admin is able to change the service
self.admin_authorized_contexts = [
self.legacy_admin_context, self.system_admin_context,
self.project_admin_context]
# Check that non-admin is not able to change the service
self.admin_unauthorized_contexts = [
self.system_member_context, self.system_reader_context,
self.system_foo_context, self.project_member_context,
self.other_project_member_context,
self.project_foo_context, self.project_reader_context
]
@mock.patch('nova.objects.Instance.save')
def test_reset_state_policy(self, mock_save):
rule_name = "os_compute_api:os-admin-actions:reset_state"
self.common_policy_check(self.admin_authorized_contexts,
self.admin_unauthorized_contexts,
rule_name, self.controller._reset_state,
self.req, self.instance.uuid,
body={'os-resetState': {'state': 'active'}})
def test_inject_network_info_policy(self):
rule_name = "os_compute_api:os-admin-actions:inject_network_info"
with mock.patch.object(self.controller.compute_api,
"inject_network_info"):
self.common_policy_check(self.admin_authorized_contexts,
self.admin_unauthorized_contexts,
rule_name,
self.controller._inject_network_info,
self.req, self.instance.uuid, body={})
def test_reset_network_policy(self):
rule_name = "os_compute_api:os-admin-actions:reset_network"
with mock.patch.object(self.controller.compute_api, "reset_network"):
self.common_policy_check(self.admin_authorized_contexts,
self.admin_unauthorized_contexts,
rule_name, self.controller._reset_network,
self.req, self.instance.uuid, body={})
class AdminActionsScopeTypePolicyTest(AdminActionsPolicyTest):
"""Test Admin Actions APIs policies with system scope enabled.
This class set the nova.conf [oslo_policy] enforce_scope to True
so that we can switch on the scope checking on oslo policy side.
It defines the set of context with scopped token
which are allowed and not allowed to pass the policy checks.
With those set of context, it will run the API operation and
verify the expected behaviour.
"""
def setUp(self):
super(AdminActionsScopeTypePolicyTest, self).setUp()
self.flags(enforce_scope=True, group="oslo_policy")
# Check that system admin is able to perform the system level actions
# on server.
self.admin_authorized_contexts = [
self.system_admin_context]
# Check that non-system or non-admin is not able to perform the system
# level actions on server.
self.admin_unauthorized_contexts = [
self.legacy_admin_context, self.system_member_context,
self.system_reader_context, self.system_foo_context,
self.project_admin_context, self.project_member_context,
self.other_project_member_context,
self.project_foo_context, self.project_reader_context
]<|fim▁end|> | # |
<|file_name|>test_weather.py<|end_file_name|><|fim▁begin|>"""Tests for Climacell weather entity."""
from __future__ import annotations
from datetime import datetime
import logging
from typing import Any
from unittest.mock import patch
import pytest
from homeassistant.components.climacell.config_flow import (
_get_config_schema,
_get_unique_id,
)
from homeassistant.components.climacell.const import (
ATTR_CLOUD_COVER,
ATTR_PRECIPITATION_TYPE,
ATTR_WIND_GUST,
ATTRIBUTION,
DOMAIN,
)
from homeassistant.components.weather import (
ATTR_CONDITION_CLOUDY,
ATTR_CONDITION_RAINY,
ATTR_CONDITION_SNOWY,
ATTR_CONDITION_SUNNY,
ATTR_FORECAST,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
ATTR_WEATHER_HUMIDITY,
ATTR_WEATHER_OZONE,
ATTR_WEATHER_PRESSURE,
ATTR_WEATHER_TEMPERATURE,
ATTR_WEATHER_VISIBILITY,
ATTR_WEATHER_WIND_BEARING,
ATTR_WEATHER_WIND_SPEED,
DOMAIN as WEATHER_DOMAIN,
)
from homeassistant.const import ATTR_ATTRIBUTION, ATTR_FRIENDLY_NAME
from homeassistant.core import HomeAssistant, State, callback
from homeassistant.helpers.entity_registry import async_get
from homeassistant.util import dt as dt_util
from .const import API_V3_ENTRY_DATA, API_V4_ENTRY_DATA
from tests.common import MockConfigEntry
_LOGGER = logging.getLogger(__name__)
@callback
def _enable_entity(hass: HomeAssistant, entity_name: str) -> None:
"""Enable disabled entity."""
ent_reg = async_get(hass)
entry = ent_reg.async_get(entity_name)
updated_entry = ent_reg.async_update_entity(
entry.entity_id, **{"disabled_by": None}
)
assert updated_entry != entry
assert updated_entry.disabled is False
async def _setup(hass: HomeAssistant, config: dict[str, Any]) -> State:
"""Set up entry and return entity state."""
with patch(
"homeassistant.util.dt.utcnow",
return_value=datetime(2021, 3, 6, 23, 59, 59, tzinfo=dt_util.UTC),
):
data = _get_config_schema(hass)(config)
config_entry = MockConfigEntry(
domain=DOMAIN,
data=data,
unique_id=_get_unique_id(hass, data),
version=1,
)
config_entry.add_to_hass(hass)
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
for entity_name in ("hourly", "nowcast"):
_enable_entity(hass, f"weather.climacell_{entity_name}")
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(WEATHER_DOMAIN)) == 3
return hass.states.get("weather.climacell_daily")
async def test_v3_weather(
hass: HomeAssistant,
climacell_config_entry_update: pytest.fixture,
) -> None:
"""Test v3 weather data."""
weather_state = await _setup(hass, API_V3_ENTRY_DATA)
assert weather_state.state == ATTR_CONDITION_SUNNY
assert weather_state.attributes[ATTR_ATTRIBUTION] == ATTRIBUTION
assert weather_state.attributes[ATTR_FORECAST] == [
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_SUNNY,
ATTR_FORECAST_TIME: "2021-03-07T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 7,
ATTR_FORECAST_TEMP_LOW: -5,
},<|fim▁hole|> {
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-08T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 10,
ATTR_FORECAST_TEMP_LOW: -4,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-09T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 19,
ATTR_FORECAST_TEMP_LOW: 0,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-10T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 18,
ATTR_FORECAST_TEMP_LOW: 3,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-11T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 5,
ATTR_FORECAST_TEMP: 20,
ATTR_FORECAST_TEMP_LOW: 9,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-12T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0.0457,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 25,
ATTR_FORECAST_TEMP: 20,
ATTR_FORECAST_TEMP_LOW: 12,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-13T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 25,
ATTR_FORECAST_TEMP: 16,
ATTR_FORECAST_TEMP_LOW: 7,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_RAINY,
ATTR_FORECAST_TIME: "2021-03-14T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 1.0744,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 75,
ATTR_FORECAST_TEMP: 6,
ATTR_FORECAST_TEMP_LOW: 3,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_SNOWY,
ATTR_FORECAST_TIME: "2021-03-15T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 7.3050,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 95,
ATTR_FORECAST_TEMP: 1,
ATTR_FORECAST_TEMP_LOW: 0,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-16T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0.0051,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 5,
ATTR_FORECAST_TEMP: 6,
ATTR_FORECAST_TEMP_LOW: -2,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-17T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 11,
ATTR_FORECAST_TEMP_LOW: 1,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-18T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 5,
ATTR_FORECAST_TEMP: 12,
ATTR_FORECAST_TEMP_LOW: 6,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-19T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0.1778,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 45,
ATTR_FORECAST_TEMP: 9,
ATTR_FORECAST_TEMP_LOW: 5,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_RAINY,
ATTR_FORECAST_TIME: "2021-03-20T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 1.2319,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 55,
ATTR_FORECAST_TEMP: 5,
ATTR_FORECAST_TEMP_LOW: 3,
},
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_CLOUDY,
ATTR_FORECAST_TIME: "2021-03-21T00:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0.0432,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 20,
ATTR_FORECAST_TEMP: 7,
ATTR_FORECAST_TEMP_LOW: 1,
},
]
assert weather_state.attributes[ATTR_FRIENDLY_NAME] == "ClimaCell - Daily"
assert weather_state.attributes[ATTR_WEATHER_HUMIDITY] == 24
assert weather_state.attributes[ATTR_WEATHER_OZONE] == 52.625
assert weather_state.attributes[ATTR_WEATHER_PRESSURE] == 1028.1246
assert weather_state.attributes[ATTR_WEATHER_TEMPERATURE] == 7
assert weather_state.attributes[ATTR_WEATHER_VISIBILITY] == 9.9940
assert weather_state.attributes[ATTR_WEATHER_WIND_BEARING] == 320.31
assert weather_state.attributes[ATTR_WEATHER_WIND_SPEED] == 14.6289
assert weather_state.attributes[ATTR_CLOUD_COVER] == 1
assert weather_state.attributes[ATTR_WIND_GUST] == 24.0758
assert weather_state.attributes[ATTR_PRECIPITATION_TYPE] == "rain"
async def test_v4_weather(
hass: HomeAssistant,
climacell_config_entry_update: pytest.fixture,
) -> None:
"""Test v4 weather data."""
weather_state = await _setup(hass, API_V4_ENTRY_DATA)
assert weather_state.state == ATTR_CONDITION_SUNNY
assert weather_state.attributes[ATTR_ATTRIBUTION] == ATTRIBUTION
assert weather_state.attributes[ATTR_FORECAST] == [
{
ATTR_FORECAST_CONDITION: ATTR_CONDITION_SUNNY,
ATTR_FORECAST_TIME: "2021-03-07T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 8,
ATTR_FORECAST_TEMP_LOW: -3,
ATTR_FORECAST_WIND_BEARING: 239.6,
ATTR_FORECAST_WIND_SPEED: 15.2727,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-08T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 10,
ATTR_FORECAST_TEMP_LOW: -3,
ATTR_FORECAST_WIND_BEARING: 262.82,
ATTR_FORECAST_WIND_SPEED: 11.6517,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-09T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 19,
ATTR_FORECAST_TEMP_LOW: 0,
ATTR_FORECAST_WIND_BEARING: 229.3,
ATTR_FORECAST_WIND_SPEED: 11.3459,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-10T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 18,
ATTR_FORECAST_TEMP_LOW: 3,
ATTR_FORECAST_WIND_BEARING: 149.91,
ATTR_FORECAST_WIND_SPEED: 17.1234,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-11T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 19,
ATTR_FORECAST_TEMP_LOW: 9,
ATTR_FORECAST_WIND_BEARING: 210.45,
ATTR_FORECAST_WIND_SPEED: 25.2506,
},
{
ATTR_FORECAST_CONDITION: "rainy",
ATTR_FORECAST_TIME: "2021-03-12T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0.1219,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 25,
ATTR_FORECAST_TEMP: 20,
ATTR_FORECAST_TEMP_LOW: 12,
ATTR_FORECAST_WIND_BEARING: 217.98,
ATTR_FORECAST_WIND_SPEED: 19.7949,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-13T11:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 25,
ATTR_FORECAST_TEMP: 12,
ATTR_FORECAST_TEMP_LOW: 6,
ATTR_FORECAST_WIND_BEARING: 58.79,
ATTR_FORECAST_WIND_SPEED: 15.6428,
},
{
ATTR_FORECAST_CONDITION: "snowy",
ATTR_FORECAST_TIME: "2021-03-14T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 23.9573,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 95,
ATTR_FORECAST_TEMP: 6,
ATTR_FORECAST_TEMP_LOW: 1,
ATTR_FORECAST_WIND_BEARING: 70.25,
ATTR_FORECAST_WIND_SPEED: 26.1518,
},
{
ATTR_FORECAST_CONDITION: "snowy",
ATTR_FORECAST_TIME: "2021-03-15T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 1.4630,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 55,
ATTR_FORECAST_TEMP: 6,
ATTR_FORECAST_TEMP_LOW: -1,
ATTR_FORECAST_WIND_BEARING: 84.47,
ATTR_FORECAST_WIND_SPEED: 25.5725,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-16T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 6,
ATTR_FORECAST_TEMP_LOW: -2,
ATTR_FORECAST_WIND_BEARING: 103.85,
ATTR_FORECAST_WIND_SPEED: 10.7987,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-17T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 0,
ATTR_FORECAST_TEMP: 11,
ATTR_FORECAST_TEMP_LOW: 1,
ATTR_FORECAST_WIND_BEARING: 145.41,
ATTR_FORECAST_WIND_SPEED: 11.6999,
},
{
ATTR_FORECAST_CONDITION: "cloudy",
ATTR_FORECAST_TIME: "2021-03-18T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 0,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 10,
ATTR_FORECAST_TEMP: 12,
ATTR_FORECAST_TEMP_LOW: 5,
ATTR_FORECAST_WIND_BEARING: 62.99,
ATTR_FORECAST_WIND_SPEED: 10.5895,
},
{
ATTR_FORECAST_CONDITION: "rainy",
ATTR_FORECAST_TIME: "2021-03-19T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 2.9261,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 55,
ATTR_FORECAST_TEMP: 9,
ATTR_FORECAST_TEMP_LOW: 4,
ATTR_FORECAST_WIND_BEARING: 68.54,
ATTR_FORECAST_WIND_SPEED: 22.3860,
},
{
ATTR_FORECAST_CONDITION: "snowy",
ATTR_FORECAST_TIME: "2021-03-20T10:00:00+00:00",
ATTR_FORECAST_PRECIPITATION: 1.2192,
ATTR_FORECAST_PRECIPITATION_PROBABILITY: 33.3,
ATTR_FORECAST_TEMP: 5,
ATTR_FORECAST_TEMP_LOW: 2,
ATTR_FORECAST_WIND_BEARING: 56.98,
ATTR_FORECAST_WIND_SPEED: 27.9221,
},
]
assert weather_state.attributes[ATTR_FRIENDLY_NAME] == "ClimaCell - Daily"
assert weather_state.attributes[ATTR_WEATHER_HUMIDITY] == 23
assert weather_state.attributes[ATTR_WEATHER_OZONE] == 46.53
assert weather_state.attributes[ATTR_WEATHER_PRESSURE] == 1027.7691
assert weather_state.attributes[ATTR_WEATHER_TEMPERATURE] == 7
assert weather_state.attributes[ATTR_WEATHER_VISIBILITY] == 13.1162
assert weather_state.attributes[ATTR_WEATHER_WIND_BEARING] == 315.14
assert weather_state.attributes[ATTR_WEATHER_WIND_SPEED] == 15.0152
assert weather_state.attributes[ATTR_CLOUD_COVER] == 1
assert weather_state.attributes[ATTR_WIND_GUST] == 20.3421
assert weather_state.attributes[ATTR_PRECIPITATION_TYPE] == "rain"<|fim▁end|> | |
<|file_name|>gallery.component.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
<|fim▁hole|>import { RouterTestingModule } from '@angular/router/testing';
import { SharedModule } from './../../shared/shared.module';
import { StoreService } from './../../core/store.service';
import { ScoreService } from './../../core/score.service';
import { rootReducer } from '../../state/root-reducer';
import { AdminGalleryComponent } from './gallery.component';
import { TimerComponent, NextComponent } from './../components';
describe('AdminGalleryComponent', () => {
let component: AdminGalleryComponent;
let fixture: ComponentFixture<AdminGalleryComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ AdminGalleryComponent, TimerComponent, NextComponent ],
imports: [
RouterTestingModule,
StoreModule.provideStore(rootReducer),
SharedModule
],
providers: [ScoreService, StoreService]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(AdminGalleryComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});<|fim▁end|> | import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { By } from '@angular/platform-browser';
import { DebugElement } from '@angular/core';
import { StoreModule } from '@ngrx/store'; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
###############Credits######################################################
# Coded by: María Gabriela Quilarque <gabrielaquilarque97@gmail.com><|fim▁hole|>#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import account<|fim▁end|> | # Luis Escobar <luis@vauxoo.com>
# Planified by: Nhomar Hernandez
# Finance by: Vauxoo, C.A. http://vauxoo.com
# Audited by: Humberto Arocha humberto@openerp.com.ve |
<|file_name|>poincare.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Jayant Jain <jayantjain1992@gmail.com>
# Copyright (C) 2017 Radim Rehurek <me@radimrehurek.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""Python implementation of Poincaré Embeddings.
These embeddings are better at capturing latent hierarchical information than traditional Euclidean embeddings.
The method is described in detail in `Maximilian Nickel, Douwe Kiela -
"Poincaré Embeddings for Learning Hierarchical Representations" <https://arxiv.org/abs/1705.08039>`_.
The main use-case is to automatically learn hierarchical representations of nodes from a tree-like structure,
such as a Directed Acyclic Graph (DAG), using a transitive closure of the relations. Representations of nodes in a
symmetric graph can also be learned.
This module allows training Poincaré Embeddings from a training file containing relations of graph in a
csv-like format, or from a Python iterable of relations.
Examples
--------
Initialize and train a model from a list
.. sourcecode:: pycon
>>> from gensim.models.poincare import PoincareModel
>>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')]
>>> model = PoincareModel(relations, negative=2)
>>> model.train(epochs=50)
Initialize and train a model from a file containing one relation per line
.. sourcecode:: pycon
>>> from gensim.models.poincare import PoincareModel, PoincareRelations
>>> from gensim.test.utils import datapath
>>> file_path = datapath('poincare_hypernyms.tsv')
>>> model = PoincareModel(PoincareRelations(file_path), negative=2)
>>> model.train(epochs=50)
"""
import csv
import logging
from numbers import Integral
import sys
import time
import numpy as np
from collections import defaultdict, Counter
from numpy import random as np_random
from scipy.stats import spearmanr
from six import string_types
from six.moves import zip, range
from gensim import utils, matutils
from gensim.models.keyedvectors import Vocab, BaseKeyedVectors
from gensim.models.utils_any2vec import _save_word2vec_format, _load_word2vec_format
from numpy import float32 as REAL
try:
from autograd import grad # Only required for optionally verifying gradients while training
from autograd import numpy as grad_np
AUTOGRAD_PRESENT = True
except ImportError:
AUTOGRAD_PRESENT = False
logger = logging.getLogger(__name__)
class PoincareModel(utils.SaveLoad):
"""Train, use and evaluate Poincare Embeddings.
The model can be stored/loaded via its :meth:`~gensim.models.poincare.PoincareModel.save`
and :meth:`~gensim.models.poincare.PoincareModel.load` methods, or stored/loaded in the word2vec format
via `model.kv.save_word2vec_format` and :meth:`~gensim.models.poincare.PoincareKeyedVectors.load_word2vec_format`.
Notes
-----
Training cannot be resumed from a model loaded via `load_word2vec_format`, if you wish to train further,
use :meth:`~gensim.models.poincare.PoincareModel.save` and :meth:`~gensim.models.poincare.PoincareModel.load`
methods instead.
An important attribute (that provides a lot of additional functionality when directly accessed) are the
keyed vectors:
self.kv : :class:`~gensim.models.poincare.PoincareKeyedVectors`
This object essentially contains the mapping between nodes and embeddings, as well the vocabulary of the model
(set of unique nodes seen by the model). After training, it can be used to perform operations on the vectors
such as vector lookup, distance and similarity calculations etc.
See the documentation of its class for usage examples.
"""
def __init__(self, train_data, size=50, alpha=0.1, negative=10, workers=1, epsilon=1e-5, regularization_coeff=1.0,
burn_in=10, burn_in_alpha=0.01, init_range=(-0.001, 0.001), dtype=np.float64, seed=0):
"""Initialize and train a Poincare embedding model from an iterable of relations.
Parameters
----------
train_data : {iterable of (str, str), :class:`gensim.models.poincare.PoincareRelations`}
Iterable of relations, e.g. a list of tuples, or a :class:`gensim.models.poincare.PoincareRelations`
instance streaming from a file. Note that the relations are treated as ordered pairs,
i.e. a relation (a, b) does not imply the opposite relation (b, a). In case the relations are symmetric,
the data should contain both relations (a, b) and (b, a).
size : int, optional
Number of dimensions of the trained model.
alpha : float, optional
Learning rate for training.
negative : int, optional
Number of negative samples to use.
workers : int, optional
Number of threads to use for training the model.
epsilon : float, optional
Constant used for clipping embeddings below a norm of one.
regularization_coeff : float, optional
Coefficient used for l2-regularization while training (0 effectively disables regularization).
burn_in : int, optional
Number of epochs to use for burn-in initialization (0 means no burn-in).
burn_in_alpha : float, optional
Learning rate for burn-in initialization, ignored if `burn_in` is 0.
init_range : 2-tuple (float, float)
Range within which the vectors are randomly initialized.
dtype : numpy.dtype
The numpy dtype to use for the vectors in the model (numpy.float64, numpy.float32 etc).
Using lower precision floats may be useful in increasing training speed and reducing memory usage.
seed : int, optional
Seed for random to ensure reproducibility.
Examples
--------
Initialize a model from a list:
.. sourcecode:: pycon
>>> from gensim.models.poincare import PoincareModel
>>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')]
>>> model = PoincareModel(relations, negative=2)
Initialize a model from a file containing one relation per line:
.. sourcecode:: pycon
>>> from gensim.models.poincare import PoincareModel, PoincareRelations
>>> from gensim.test.utils import datapath
>>> file_path = datapath('poincare_hypernyms.tsv')
>>> model = PoincareModel(PoincareRelations(file_path), negative=2)
See :class:`~gensim.models.poincare.PoincareRelations` for more options.
"""
self.train_data = train_data
self.kv = PoincareKeyedVectors(size)
self.all_relations = []
self.node_relations = defaultdict(set)
self._negatives_buffer = NegativesBuffer([])
self._negatives_buffer_size = 2000
self.size = size
self.train_alpha = alpha # Learning rate for training
self.burn_in_alpha = burn_in_alpha # Learning rate for burn-in
self.alpha = alpha # Current learning rate
self.negative = negative
self.workers = workers
self.epsilon = epsilon
self.regularization_coeff = regularization_coeff
self.burn_in = burn_in
self._burn_in_done = False
self.dtype = dtype
self.seed = seed
self._np_random = np_random.RandomState(seed)
self.init_range = init_range
self._loss_grad = None
self.build_vocab(train_data)
def build_vocab(self, relations, update=False):
"""Build the model's vocabulary from known relations.
Parameters
----------
relations : {iterable of (str, str), :class:`gensim.models.poincare.PoincareRelations`}
Iterable of relations, e.g. a list of tuples, or a :class:`gensim.models.poincare.PoincareRelations`
instance streaming from a file. Note that the relations are treated as ordered pairs,
i.e. a relation (a, b) does not imply the opposite relation (b, a). In case the relations are symmetric,
the data should contain both relations (a, b) and (b, a).
update : bool, optional
If true, only new nodes's embeddings are initialized.
Use this when the model already has an existing vocabulary and you want to update it.
If false, all node's embeddings are initialized.
Use this when you're creating a new vocabulary from scratch.
Examples
--------
Train a model and update vocab for online training:
.. sourcecode:: pycon
>>> from gensim.models.poincare import PoincareModel
>>>
>>> # train a new model from initial data
>>> initial_relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal')]
>>> model = PoincareModel(initial_relations, negative=1)
>>> model.train(epochs=50)
>>>
>>> # online training: update the vocabulary and continue training
>>> online_relations = [('striped_skunk', 'mammal')]
>>> model.build_vocab(online_relations, update=True)
>>> model.train(epochs=50)
"""
old_index2word_len = len(self.kv.index2word)
logger.info("loading relations from train data..")
for relation in relations:
if len(relation) != 2:
raise ValueError('Relation pair "%s" should have exactly two items' % repr(relation))
for item in relation:
if item in self.kv.vocab:
self.kv.vocab[item].count += 1
else:
self.kv.vocab[item] = Vocab(count=1, index=len(self.kv.index2word))
self.kv.index2word.append(item)
node_1, node_2 = relation
node_1_index, node_2_index = self.kv.vocab[node_1].index, self.kv.vocab[node_2].index
self.node_relations[node_1_index].add(node_2_index)
relation = (node_1_index, node_2_index)
self.all_relations.append(relation)
logger.info("loaded %d relations from train data, %d nodes", len(self.all_relations), len(self.kv.vocab))
self.indices_set = set(range(len(self.kv.index2word))) # Set of all node indices
self.indices_array = np.fromiter(range(len(self.kv.index2word)), dtype=int) # Numpy array of all node indices
self._init_node_probabilities()
if not update:
self._init_embeddings()
else:
self._update_embeddings(old_index2word_len)
def _init_embeddings(self):
"""Randomly initialize vectors for the items in the vocab."""
shape = (len(self.kv.index2word), self.size)
self.kv.syn0 = self._np_random.uniform(self.init_range[0], self.init_range[1], shape).astype(self.dtype)
def _update_embeddings(self, old_index2word_len):
"""Randomly initialize vectors for the items in the additional vocab."""
shape = (len(self.kv.index2word) - old_index2word_len, self.size)
v = self._np_random.uniform(self.init_range[0], self.init_range[1], shape).astype(self.dtype)
self.kv.syn0 = np.concatenate([self.kv.syn0, v])
def _init_node_probabilities(self):
"""Initialize a-priori probabilities."""
counts = np.fromiter((
self.kv.vocab[self.kv.index2word[i]].count
for i in range(len(self.kv.index2word))
),
dtype=np.float64, count=len(self.kv.index2word))
self._node_counts_cumsum = np.cumsum(counts)
self._node_probabilities = counts / counts.sum()
def _get_candidate_negatives(self):
"""Get candidate negatives of size `self.negative` from the negative examples buffer.
Returns
-------
numpy.array
Array of shape (`self.negative`,) containing indices of negative nodes.
"""
if self._negatives_buffer.num_items() < self.negative:
# cumsum table of counts used instead of the standard approach of a probability cumsum table
# this is to avoid floating point errors that result when the number of nodes is very high
# for reference: https://github.com/RaRe-Technologies/gensim/issues/1917
max_cumsum_value = self._node_counts_cumsum[-1]
uniform_numbers = self._np_random.randint(1, max_cumsum_value + 1, self._negatives_buffer_size)
cumsum_table_indices = np.searchsorted(self._node_counts_cumsum, uniform_numbers)
self._negatives_buffer = NegativesBuffer(cumsum_table_indices)
return self._negatives_buffer.get_items(self.negative)
def _sample_negatives(self, node_index):
"""Get a sample of negatives for the given node.
Parameters
----------
node_index : int
Index of the positive node for which negative samples are to be returned.
Returns
-------
numpy.array
Array of shape (self.negative,) containing indices of negative nodes for the given node index.
"""
node_relations = self.node_relations[node_index]
num_remaining_nodes = len(self.kv.vocab) - len(node_relations)
if num_remaining_nodes < self.negative:
raise ValueError(
'Cannot sample %d negative nodes from a set of %d negative nodes for %s' %
(self.negative, num_remaining_nodes, self.kv.index2word[node_index])
)
positive_fraction = float(len(node_relations)) / len(self.kv.vocab)
if positive_fraction < 0.01:
# If number of positive relations is a small fraction of total nodes
# re-sample till no positively connected nodes are chosen
indices = self._get_candidate_negatives()
unique_indices = set(indices)
times_sampled = 1
while (len(indices) != len(unique_indices)) or (unique_indices & node_relations):
times_sampled += 1
indices = self._get_candidate_negatives()
unique_indices = set(indices)
if times_sampled > 1:
logger.debug('sampled %d times, positive fraction %.5f', times_sampled, positive_fraction)
else:
# If number of positive relations is a significant fraction of total nodes
# subtract positively connected nodes from set of choices and sample from the remaining
valid_negatives = np.array(list(self.indices_set - node_relations))
probs = self._node_probabilities[valid_negatives]
probs /= probs.sum()
indices = self._np_random.choice(valid_negatives, size=self.negative, p=probs, replace=False)
return list(indices)
@staticmethod
def _loss_fn(matrix, regularization_coeff=1.0):
"""Computes loss value.
Parameters
----------
matrix : numpy.array
Array containing vectors for u, v and negative samples, of shape (2 + negative_size, dim).
regularization_coeff : float, optional
Coefficient to use for l2-regularization
Returns
-------
float
Computed loss value.
Warnings
--------
Only used for autograd gradients, since autograd requires a specific function signature.
"""
vector_u = matrix[0]
vectors_v = matrix[1:]
euclidean_dists = grad_np.linalg.norm(vector_u - vectors_v, axis=1)
norm = grad_np.linalg.norm(vector_u)
all_norms = grad_np.linalg.norm(vectors_v, axis=1)
poincare_dists = grad_np.arccosh(
1 + 2 * (
(euclidean_dists ** 2) / ((1 - norm ** 2) * (1 - all_norms ** 2))
)
)
exp_negative_distances = grad_np.exp(-poincare_dists)
regularization_term = regularization_coeff * grad_np.linalg.norm(vectors_v[0]) ** 2
return -grad_np.log(exp_negative_distances[0] / (exp_negative_distances.sum())) + regularization_term
@staticmethod
def _clip_vectors(vectors, epsilon):
"""Clip vectors to have a norm of less than one.
Parameters
----------
vectors : numpy.array
Can be 1-D, or 2-D (in which case the norm for each row is checked).
epsilon : float
Parameter for numerical stability, each dimension of the vector is reduced by `epsilon`
if the norm of the vector is greater than or equal to 1.
Returns
-------
numpy.array
Array with norms clipped below 1.
"""
one_d = len(vectors.shape) == 1
threshold = 1 - epsilon
if one_d:<|fim▁hole|> norm = np.linalg.norm(vectors)
if norm < threshold:
return vectors
else:
return vectors / norm - (np.sign(vectors) * epsilon)
else:
norms = np.linalg.norm(vectors, axis=1)
if (norms < threshold).all():
return vectors
else:
vectors[norms >= threshold] *= (threshold / norms[norms >= threshold])[:, np.newaxis]
vectors[norms >= threshold] -= np.sign(vectors[norms >= threshold]) * epsilon
return vectors
def save(self, *args, **kwargs):
"""Save complete model to disk, inherited from :class:`~gensim.utils.SaveLoad`.
See also
--------
:meth:`~gensim.models.poincare.PoincareModel.load`
Parameters
----------
*args
Positional arguments passed to :meth:`~gensim.utils.SaveLoad.save`.
**kwargs
Keyword arguments passed to :meth:`~gensim.utils.SaveLoad.save`.
"""
self._loss_grad = None # Can't pickle autograd fn to disk
attrs_to_ignore = ['_node_probabilities', '_node_counts_cumsum']
kwargs['ignore'] = set(list(kwargs.get('ignore', [])) + attrs_to_ignore)
super(PoincareModel, self).save(*args, **kwargs)
@classmethod
def load(cls, *args, **kwargs):
"""Load model from disk, inherited from :class:`~gensim.utils.SaveLoad`.
See also
--------
:meth:`~gensim.models.poincare.PoincareModel.save`
Parameters
----------
*args
Positional arguments passed to :meth:`~gensim.utils.SaveLoad.load`.
**kwargs
Keyword arguments passed to :meth:`~gensim.utils.SaveLoad.load`.
Returns
-------
:class:`~gensim.models.poincare.PoincareModel`
The loaded model.
"""
model = super(PoincareModel, cls).load(*args, **kwargs)
model._init_node_probabilities()
return model
def _prepare_training_batch(self, relations, all_negatives, check_gradients=False):
"""Create a training batch and compute gradients and loss for the batch.
Parameters
----------
relations : list of tuples
List of tuples of positive examples of the form (node_1_index, node_2_index).
all_negatives : list of lists
List of lists of negative samples for each node_1 in the positive examples.
check_gradients : bool, optional
Whether to compare the computed gradients to autograd gradients for this batch.
Returns
-------
:class:`~gensim.models.poincare.PoincareBatch`
Node indices, computed gradients and loss for the batch.
"""
batch_size = len(relations)
indices_u, indices_v = [], []
for relation, negatives in zip(relations, all_negatives):
u, v = relation
indices_u.append(u)
indices_v.append(v)
indices_v.extend(negatives)
vectors_u = self.kv.syn0[indices_u]
vectors_v = self.kv.syn0[indices_v].reshape((batch_size, 1 + self.negative, self.size))
vectors_v = vectors_v.swapaxes(0, 1).swapaxes(1, 2)
batch = PoincareBatch(vectors_u, vectors_v, indices_u, indices_v, self.regularization_coeff)
batch.compute_all()
if check_gradients:
self._check_gradients(relations, all_negatives, batch)
return batch
def _check_gradients(self, relations, all_negatives, batch, tol=1e-8):
"""Compare computed gradients for batch to autograd gradients.
Parameters
----------
relations : list of tuples
List of tuples of positive examples of the form (node_1_index, node_2_index).
all_negatives : list of lists
List of lists of negative samples for each node_1 in the positive examples.
batch : :class:`~gensim.models.poincare.PoincareBatch`
Batch for which computed gradients are to be checked.
tol : float, optional
The maximum error between our computed gradients and the reference ones from autograd.
"""
if not AUTOGRAD_PRESENT:
logger.warning('autograd could not be imported, cannot do gradient checking')
logger.warning('please install autograd to enable gradient checking')
return
if self._loss_grad is None:
self._loss_grad = grad(PoincareModel._loss_fn)
max_diff = 0.0
for i, (relation, negatives) in enumerate(zip(relations, all_negatives)):
u, v = relation
auto_gradients = self._loss_grad(
np.vstack((self.kv.syn0[u], self.kv.syn0[[v] + negatives])), self.regularization_coeff)
computed_gradients = np.vstack((batch.gradients_u[:, i], batch.gradients_v[:, :, i]))
diff = np.abs(auto_gradients - computed_gradients).max()
if diff > max_diff:
max_diff = diff
logger.info('max difference between computed gradients and autograd gradients: %.10f', max_diff)
assert max_diff < tol, (
'Max difference between computed gradients and autograd gradients %.10f, '
'greater than tolerance %.10f' % (max_diff, tol))
def _sample_negatives_batch(self, nodes):
"""Get negative examples for each node.
Parameters
----------
nodes : iterable of int
Iterable of node indices for which negative samples are to be returned.
Returns
-------
list of lists
Each inner list is a list of negative samples for a single node in the input list.
"""
all_indices = [self._sample_negatives(node) for node in nodes]
return all_indices
def _train_on_batch(self, relations, check_gradients=False):
"""Perform training for a single training batch.
Parameters
----------
relations : list of tuples of (int, int)
List of tuples of positive examples of the form (node_1_index, node_2_index).
check_gradients : bool, optional
Whether to compare the computed gradients to autograd gradients for this batch.
Returns
-------
:class:`~gensim.models.poincare.PoincareBatch`
The batch that was just trained on, contains computed loss for the batch.
"""
all_negatives = self._sample_negatives_batch(relation[0] for relation in relations)
batch = self._prepare_training_batch(relations, all_negatives, check_gradients)
self._update_vectors_batch(batch)
return batch
@staticmethod
def _handle_duplicates(vector_updates, node_indices):
"""Handle occurrences of multiple updates to the same node in a batch of vector updates.
Parameters
----------
vector_updates : numpy.array
Array with each row containing updates to be performed on a certain node.
node_indices : list of int
Node indices on which the above updates are to be performed on.
Notes
-----
Mutates the `vector_updates` array.
Required because vectors[[2, 1, 2]] += np.array([-0.5, 1.0, 0.5]) performs only the last update
on the row at index 2.
"""
counts = Counter(node_indices)
node_dict = defaultdict(list)
for i, node_index in enumerate(node_indices):
node_dict[node_index].append(i)
for node_index, count in counts.items():
if count == 1:
continue
positions = node_dict[node_index]
# Move all updates to the same node to the last such update, zeroing all the others
vector_updates[positions[-1]] = vector_updates[positions].sum(axis=0)
vector_updates[positions[:-1]] = 0
def _update_vectors_batch(self, batch):
"""Update vectors for nodes in the given batch.
Parameters
----------
batch : :class:`~gensim.models.poincare.PoincareBatch`
Batch containing computed gradients and node indices of the batch for which updates are to be done.
"""
grad_u, grad_v = batch.gradients_u, batch.gradients_v
indices_u, indices_v = batch.indices_u, batch.indices_v
batch_size = len(indices_u)
u_updates = (self.alpha * (batch.alpha ** 2) / 4 * grad_u).T
self._handle_duplicates(u_updates, indices_u)
self.kv.syn0[indices_u] -= u_updates
self.kv.syn0[indices_u] = self._clip_vectors(self.kv.syn0[indices_u], self.epsilon)
v_updates = self.alpha * (batch.beta ** 2)[:, np.newaxis] / 4 * grad_v
v_updates = v_updates.swapaxes(1, 2).swapaxes(0, 1)
v_updates = v_updates.reshape(((1 + self.negative) * batch_size, self.size))
self._handle_duplicates(v_updates, indices_v)
self.kv.syn0[indices_v] -= v_updates
self.kv.syn0[indices_v] = self._clip_vectors(self.kv.syn0[indices_v], self.epsilon)
def train(self, epochs, batch_size=10, print_every=1000, check_gradients_every=None):
"""Train Poincare embeddings using loaded data and model parameters.
Parameters
----------
epochs : int
Number of iterations (epochs) over the corpus.
batch_size : int, optional
Number of examples to train on in a single batch.
print_every : int, optional
Prints progress and average loss after every `print_every` batches.
check_gradients_every : int or None, optional
Compares computed gradients and autograd gradients after every `check_gradients_every` batches.
Useful for debugging, doesn't compare by default.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.models.poincare import PoincareModel
>>> relations = [('kangaroo', 'marsupial'), ('kangaroo', 'mammal'), ('gib', 'cat')]
>>> model = PoincareModel(relations, negative=2)
>>> model.train(epochs=50)
"""
if self.workers > 1:
raise NotImplementedError("Multi-threaded version not implemented yet")
# Some divide-by-zero results are handled explicitly
old_settings = np.seterr(divide='ignore', invalid='ignore')
logger.info(
"training model of size %d with %d workers on %d relations for %d epochs and %d burn-in epochs, "
"using lr=%.5f burn-in lr=%.5f negative=%d",
self.size, self.workers, len(self.all_relations), epochs, self.burn_in,
self.alpha, self.burn_in_alpha, self.negative
)
if self.burn_in > 0 and not self._burn_in_done:
logger.info("starting burn-in (%d epochs)----------------------------------------", self.burn_in)
self.alpha = self.burn_in_alpha
self._train_batchwise(
epochs=self.burn_in, batch_size=batch_size, print_every=print_every,
check_gradients_every=check_gradients_every)
self._burn_in_done = True
logger.info("burn-in finished")
self.alpha = self.train_alpha
logger.info("starting training (%d epochs)----------------------------------------", epochs)
self._train_batchwise(
epochs=epochs, batch_size=batch_size, print_every=print_every,
check_gradients_every=check_gradients_every)
logger.info("training finished")
np.seterr(**old_settings)
def _train_batchwise(self, epochs, batch_size=10, print_every=1000, check_gradients_every=None):
"""Train Poincare embeddings using specified parameters.
Parameters
----------
epochs : int
Number of iterations (epochs) over the corpus.
batch_size : int, optional
Number of examples to train on in a single batch.
print_every : int, optional
Prints progress and average loss after every `print_every` batches.
check_gradients_every : int or None, optional
Compares computed gradients and autograd gradients after every `check_gradients_every` batches.
Useful for debugging, doesn't compare by default.
"""
if self.workers > 1:
raise NotImplementedError("Multi-threaded version not implemented yet")
for epoch in range(1, epochs + 1):
indices = list(range(len(self.all_relations)))
self._np_random.shuffle(indices)
avg_loss = 0.0
last_time = time.time()
for batch_num, i in enumerate(range(0, len(indices), batch_size), start=1):
should_print = not (batch_num % print_every)
check_gradients = bool(check_gradients_every) and (batch_num % check_gradients_every) == 0
batch_indices = indices[i:i + batch_size]
relations = [self.all_relations[idx] for idx in batch_indices]
result = self._train_on_batch(relations, check_gradients=check_gradients)
avg_loss += result.loss
if should_print:
avg_loss /= print_every
time_taken = time.time() - last_time
speed = print_every * batch_size / time_taken
logger.info(
'training on epoch %d, examples #%d-#%d, loss: %.2f'
% (epoch, i, i + batch_size, avg_loss))
logger.info(
'time taken for %d examples: %.2f s, %.2f examples / s'
% (print_every * batch_size, time_taken, speed))
last_time = time.time()
avg_loss = 0.0
class PoincareBatch(object):
"""Compute Poincare distances, gradients and loss for a training batch.
Store intermediate state to avoid recomputing multiple times.
"""
def __init__(self, vectors_u, vectors_v, indices_u, indices_v, regularization_coeff=1.0):
"""
Initialize instance with sets of vectors for which distances are to be computed.
Parameters
----------
vectors_u : numpy.array
Vectors of all nodes `u` in the batch. Expected shape (batch_size, dim).
vectors_v : numpy.array
Vectors of all positively related nodes `v` and negatively sampled nodes `v'`,
for each node `u` in the batch. Expected shape (1 + neg_size, dim, batch_size).
indices_u : list of int
List of node indices for each of the vectors in `vectors_u`.
indices_v : list of lists of int
Nested list of lists, each of which is a list of node indices
for each of the vectors in `vectors_v` for a specific node `u`.
regularization_coeff : float, optional
Coefficient to use for l2-regularization
"""
self.vectors_u = vectors_u.T[np.newaxis, :, :] # (1, dim, batch_size)
self.vectors_v = vectors_v # (1 + neg_size, dim, batch_size)
self.indices_u = indices_u
self.indices_v = indices_v
self.regularization_coeff = regularization_coeff
self.poincare_dists = None
self.euclidean_dists = None
self.norms_u = None
self.norms_v = None
self.alpha = None
self.beta = None
self.gamma = None
self.gradients_u = None
self.distance_gradients_u = None
self.gradients_v = None
self.distance_gradients_v = None
self.loss = None
self._distances_computed = False
self._gradients_computed = False
self._distance_gradients_computed = False
self._loss_computed = False
def compute_all(self):
"""Convenience method to perform all computations."""
self.compute_distances()
self.compute_distance_gradients()
self.compute_gradients()
self.compute_loss()
def compute_distances(self):
"""Compute and store norms, euclidean distances and poincare distances between input vectors."""
if self._distances_computed:
return
euclidean_dists = np.linalg.norm(self.vectors_u - self.vectors_v, axis=1) # (1 + neg_size, batch_size)
norms_u = np.linalg.norm(self.vectors_u, axis=1) # (1, batch_size)
norms_v = np.linalg.norm(self.vectors_v, axis=1) # (1 + neg_size, batch_size)
alpha = 1 - norms_u ** 2 # (1, batch_size)
beta = 1 - norms_v ** 2 # (1 + neg_size, batch_size)
gamma = 1 + 2 * (
(euclidean_dists ** 2) / (alpha * beta)
) # (1 + neg_size, batch_size)
poincare_dists = np.arccosh(gamma) # (1 + neg_size, batch_size)
exp_negative_distances = np.exp(-poincare_dists) # (1 + neg_size, batch_size)
Z = exp_negative_distances.sum(axis=0) # (batch_size)
self.euclidean_dists = euclidean_dists
self.poincare_dists = poincare_dists
self.exp_negative_distances = exp_negative_distances
self.Z = Z
self.gamma = gamma
self.norms_u = norms_u
self.norms_v = norms_v
self.alpha = alpha
self.beta = beta
self.gamma = gamma
self._distances_computed = True
def compute_gradients(self):
"""Compute and store gradients of loss function for all input vectors."""
if self._gradients_computed:
return
self.compute_distances()
self.compute_distance_gradients()
# (1 + neg_size, dim, batch_size)
gradients_v = -self.exp_negative_distances[:, np.newaxis, :] * self.distance_gradients_v
gradients_v /= self.Z # (1 + neg_size, dim, batch_size)
gradients_v[0] += self.distance_gradients_v[0]
gradients_v[0] += self.regularization_coeff * 2 * self.vectors_v[0]
# (1 + neg_size, dim, batch_size)
gradients_u = -self.exp_negative_distances[:, np.newaxis, :] * self.distance_gradients_u
gradients_u /= self.Z # (1 + neg_size, dim, batch_size)
gradients_u = gradients_u.sum(axis=0) # (dim, batch_size)
gradients_u += self.distance_gradients_u[0]
assert not np.isnan(gradients_u).any()
assert not np.isnan(gradients_v).any()
self.gradients_u = gradients_u
self.gradients_v = gradients_v
self._gradients_computed = True
def compute_distance_gradients(self):
"""Compute and store partial derivatives of poincare distance d(u, v) w.r.t all u and all v."""
if self._distance_gradients_computed:
return
self.compute_distances()
euclidean_dists_squared = self.euclidean_dists ** 2 # (1 + neg_size, batch_size)
# (1 + neg_size, 1, batch_size)
c_ = (4 / (self.alpha * self.beta * np.sqrt(self.gamma ** 2 - 1)))[:, np.newaxis, :]
# (1 + neg_size, 1, batch_size)
u_coeffs = ((euclidean_dists_squared + self.alpha) / self.alpha)[:, np.newaxis, :]
distance_gradients_u = u_coeffs * self.vectors_u - self.vectors_v # (1 + neg_size, dim, batch_size)
distance_gradients_u *= c_ # (1 + neg_size, dim, batch_size)
nan_gradients = self.gamma == 1 # (1 + neg_size, batch_size)
if nan_gradients.any():
distance_gradients_u.swapaxes(1, 2)[nan_gradients] = 0
self.distance_gradients_u = distance_gradients_u
# (1 + neg_size, 1, batch_size)
v_coeffs = ((euclidean_dists_squared + self.beta) / self.beta)[:, np.newaxis, :]
distance_gradients_v = v_coeffs * self.vectors_v - self.vectors_u # (1 + neg_size, dim, batch_size)
distance_gradients_v *= c_ # (1 + neg_size, dim, batch_size)
if nan_gradients.any():
distance_gradients_v.swapaxes(1, 2)[nan_gradients] = 0
self.distance_gradients_v = distance_gradients_v
self._distance_gradients_computed = True
def compute_loss(self):
"""Compute and store loss value for the given batch of examples."""
if self._loss_computed:
return
self.compute_distances()
self.loss = -np.log(self.exp_negative_distances[0] / self.Z).sum() # scalar
self._loss_computed = True
class PoincareKeyedVectors(BaseKeyedVectors):
"""Vectors and vocab for the :class:`~gensim.models.poincare.PoincareModel` training class.
Used to perform operations on the vectors such as vector lookup, distance calculations etc.
"""
def __init__(self, vector_size):
super(PoincareKeyedVectors, self).__init__(vector_size)
self.max_distance = 0
self.index2word = []
self.vocab = {}
@property
def vectors(self):
return self.syn0
@vectors.setter
def vectors(self, value):
self.syn0 = value
@property
def index2entity(self):
return self.index2word
@index2entity.setter
def index2entity(self, value):
self.index2word = value
def word_vec(self, word):
"""Get the word's representations in vector space, as a 1D numpy array.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # Query the trained model.
>>> wv = model.kv.word_vec('kangaroo.n.01')
"""
return super(PoincareKeyedVectors, self).get_vector(word)
def words_closer_than(self, w1, w2):
"""Get all words that are closer to `w1` than `w2` is to `w1`.
Parameters
----------
w1 : str
Input word.
w2 : str
Input word.
Returns
-------
list (str)
List of words that are closer to `w1` than `w2` is to `w1`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # Which term is closer to 'kangaroo' than 'metatherian' is to 'kangaroo'?
>>> model.kv.words_closer_than('kangaroo.n.01', 'metatherian.n.01')
[u'marsupial.n.01', u'phalanger.n.01']
"""
return super(PoincareKeyedVectors, self).closer_than(w1, w2)
def save_word2vec_format(self, fname, fvocab=None, binary=False, total_vec=None):
"""Store the input-hidden weight matrix in the same format used by the original
C word2vec-tool, for compatibility, using :func:`~gensim.models.utils_any2vec._save_word2vec_format`.
Parameters
----------
fname : str
Path to file that will be used for storing.
fvocab : str, optional
File path used to save the vocabulary.
binary : bool, optional
If True, the data wil be saved in binary word2vec format, else it will be saved in plain text.
total_vec : int, optional
Explicitly specify total number of vectors
(in case word vectors are appended with document vectors afterwards).
"""
_save_word2vec_format(fname, self.vocab, self.syn0, fvocab=fvocab, binary=binary, total_vec=total_vec)
@classmethod
def load_word2vec_format(cls, fname, fvocab=None, binary=False, encoding='utf8', unicode_errors='strict',
limit=None, datatype=REAL):
"""Load the input-hidden weight matrix from the original C word2vec-tool format.
Use :func:`~gensim.models.utils_any2vec._load_word2vec_format`.
Note that the information stored in the file is incomplete (the binary tree is missing),
so while you can query for word similarity etc., you cannot continue training
with a model loaded this way.
Parameters
----------
fname : str
The file path to the saved word2vec-format file.
fvocab : str, optional
File path to the vocabulary.Word counts are read from `fvocab` filename, if set
(this is the file generated by `-save-vocab` flag of the original C tool).
binary : bool, optional
If True, indicates whether the data is in binary word2vec format.
encoding : str, optional
If you trained the C model using non-utf8 encoding for words, specify that encoding in `encoding`.
unicode_errors : str, optional
default 'strict', is a string suitable to be passed as the `errors`
argument to the unicode() (Python 2.x) or str() (Python 3.x) function. If your source
file may include word tokens truncated in the middle of a multibyte unicode character
(as is common from the original word2vec.c tool), 'ignore' or 'replace' may help.
limit : int, optional
Sets a maximum number of word-vectors to read from the file. The default,
None, means read all.
datatype : type, optional
(Experimental) Can coerce dimensions to a non-default float type (such as `np.float16`) to save memory.
Such types may result in much slower bulk operations or incompatibility with optimized routines.)
Returns
-------
:class:`~gensim.models.poincare.PoincareModel`
Loaded Poincare model.
"""
return _load_word2vec_format(
cls, fname, fvocab=fvocab, binary=binary, encoding=encoding, unicode_errors=unicode_errors,
limit=limit, datatype=datatype)
@staticmethod
def vector_distance(vector_1, vector_2):
"""Compute poincare distance between two input vectors. Convenience method over `vector_distance_batch`.
Parameters
----------
vector_1 : numpy.array
Input vector.
vector_2 : numpy.array
Input vector.
Returns
-------
numpy.float
Poincare distance between `vector_1` and `vector_2`.
"""
return PoincareKeyedVectors.vector_distance_batch(vector_1, vector_2[np.newaxis, :])[0]
@staticmethod
def vector_distance_batch(vector_1, vectors_all):
"""Compute poincare distances between one vector and a set of other vectors.
Parameters
----------
vector_1 : numpy.array
vector from which Poincare distances are to be computed, expected shape (dim,).
vectors_all : numpy.array
for each row in vectors_all, distance from vector_1 is computed, expected shape (num_vectors, dim).
Returns
-------
numpy.array
Poincare distance between `vector_1` and each row in `vectors_all`, shape (num_vectors,).
"""
euclidean_dists = np.linalg.norm(vector_1 - vectors_all, axis=1)
norm = np.linalg.norm(vector_1)
all_norms = np.linalg.norm(vectors_all, axis=1)
return np.arccosh(
1 + 2 * (
(euclidean_dists ** 2) / ((1 - norm ** 2) * (1 - all_norms ** 2))
)
)
def closest_child(self, node):
"""Get the node closest to `node` that is lower in the hierarchy than `node`.
Parameters
----------
node : {str, int}
Key for node for which closest child is to be found.
Returns
-------
{str, None}
Node closest to `node` that is lower in the hierarchy than `node`.
If there are no nodes lower in the hierarchy, None is returned.
"""
all_distances = self.distances(node)
all_norms = np.linalg.norm(self.syn0, axis=1)
node_norm = all_norms[self.vocab[node].index]
mask = node_norm >= all_norms
if mask.all(): # No nodes lower in the hierarchy
return None
all_distances = np.ma.array(all_distances, mask=mask)
closest_child_index = np.ma.argmin(all_distances)
return self.index2word[closest_child_index]
def closest_parent(self, node):
"""Get the node closest to `node` that is higher in the hierarchy than `node`.
Parameters
----------
node : {str, int}
Key for node for which closest parent is to be found.
Returns
-------
{str, None}
Node closest to `node` that is higher in the hierarchy than `node`.
If there are no nodes higher in the hierarchy, None is returned.
"""
all_distances = self.distances(node)
all_norms = np.linalg.norm(self.syn0, axis=1)
node_norm = all_norms[self.vocab[node].index]
mask = node_norm <= all_norms
if mask.all(): # No nodes higher in the hierarchy
return None
all_distances = np.ma.array(all_distances, mask=mask)
closest_child_index = np.ma.argmin(all_distances)
return self.index2word[closest_child_index]
def descendants(self, node, max_depth=5):
"""Get the list of recursively closest children from the given node, up to a max depth of `max_depth`.
Parameters
----------
node : {str, int}
Key for node for which descendants are to be found.
max_depth : int
Maximum number of descendants to return.
Returns
-------
list of str
Descendant nodes from the node `node`.
"""
depth = 0
descendants = []
current_node = node
while depth < max_depth:
descendants.append(self.closest_child(current_node))
current_node = descendants[-1]
depth += 1
return descendants
def ancestors(self, node):
"""Get the list of recursively closest parents from the given node.
Parameters
----------
node : {str, int}
Key for node for which ancestors are to be found.
Returns
-------
list of str
Ancestor nodes of the node `node`.
"""
ancestors = []
current_node = node
ancestor = self.closest_parent(current_node)
while ancestor is not None:
ancestors.append(ancestor)
ancestor = self.closest_parent(ancestors[-1])
return ancestors
def distance(self, w1, w2):
"""Calculate Poincare distance between vectors for nodes `w1` and `w2`.
Parameters
----------
w1 : {str, int}
Key for first node.
w2 : {str, int}
Key for second node.
Returns
-------
float
Poincare distance between the vectors for nodes `w1` and `w2`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # What is the distance between the words 'mammal' and 'carnivore'?
>>> model.kv.distance('mammal.n.01', 'carnivore.n.01')
2.9742298803339304
Raises
------
KeyError
If either of `w1` and `w2` is absent from vocab.
"""
vector_1 = self.word_vec(w1)
vector_2 = self.word_vec(w2)
return self.vector_distance(vector_1, vector_2)
def similarity(self, w1, w2):
"""Compute similarity based on Poincare distance between vectors for nodes `w1` and `w2`.
Parameters
----------
w1 : {str, int}
Key for first node.
w2 : {str, int}
Key for second node.
Returns
-------
float
Similarity between the between the vectors for nodes `w1` and `w2` (between 0 and 1).
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # What is the similarity between the words 'mammal' and 'carnivore'?
>>> model.kv.similarity('mammal.n.01', 'carnivore.n.01')
0.25162107631176484
Raises
------
KeyError
If either of `w1` and `w2` is absent from vocab.
"""
return 1 / (1 + self.distance(w1, w2))
def most_similar(self, node_or_vector, topn=10, restrict_vocab=None):
"""Find the top-N most similar nodes to the given node or vector, sorted in increasing order of distance.
Parameters
----------
node_or_vector : {str, int, numpy.array}
node key or vector for which similar nodes are to be found.
topn : int or None, optional
Number of top-N similar nodes to return, when `topn` is int. When `topn` is None,
then distance for all nodes are returned.
restrict_vocab : int or None, optional
Optional integer which limits the range of vectors which are searched for most-similar values.
For example, restrict_vocab=10000 would only check the first 10000 node vectors in the vocabulary order.
This may be meaningful if vocabulary is sorted by descending frequency.
Returns
--------
list of (str, float) or numpy.array
When `topn` is int, a sequence of (node, distance) is returned in increasing order of distance.
When `topn` is None, then similarities for all words are returned as a one-dimensional numpy array with the
size of the vocabulary.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # Which words are most similar to 'kangaroo'?
>>> model.kv.most_similar('kangaroo.n.01', topn=2)
[(u'kangaroo.n.01', 0.0), (u'marsupial.n.01', 0.26524229460827725)]
"""
if isinstance(topn, Integral) and topn < 1:
return []
if not restrict_vocab:
all_distances = self.distances(node_or_vector)
else:
nodes_to_use = self.index2word[:restrict_vocab]
all_distances = self.distances(node_or_vector, nodes_to_use)
if isinstance(node_or_vector, string_types + (int,)):
node_index = self.vocab[node_or_vector].index
else:
node_index = None
if not topn:
closest_indices = matutils.argsort(all_distances)
else:
closest_indices = matutils.argsort(all_distances, topn=1 + topn)
result = [
(self.index2word[index], float(all_distances[index]))
for index in closest_indices if (not node_index or index != node_index) # ignore the input node
]
if topn:
result = result[:topn]
return result
def distances(self, node_or_vector, other_nodes=()):
"""Compute Poincare distances from given `node_or_vector` to all nodes in `other_nodes`.
If `other_nodes` is empty, return distance between `node_or_vector` and all nodes in vocab.
Parameters
----------
node_or_vector : {str, int, numpy.array}
Node key or vector from which distances are to be computed.
other_nodes : {iterable of str, iterable of int, None}, optional
For each node in `other_nodes` distance from `node_or_vector` is computed.
If None or empty, distance of `node_or_vector` from all nodes in vocab is computed (including itself).
Returns
-------
numpy.array
Array containing distances to all nodes in `other_nodes` from input `node_or_vector`,
in the same order as `other_nodes`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # Check the distances between a word and a list of other words.
>>> model.kv.distances('mammal.n.01', ['carnivore.n.01', 'dog.n.01'])
array([2.97422988, 2.83007402])
>>> # Check the distances between a word and every other word in the vocab.
>>> all_distances = model.kv.distances('mammal.n.01')
Raises
------
KeyError
If either `node_or_vector` or any node in `other_nodes` is absent from vocab.
"""
if isinstance(node_or_vector, string_types):
input_vector = self.word_vec(node_or_vector)
else:
input_vector = node_or_vector
if not other_nodes:
other_vectors = self.syn0
else:
other_indices = [self.vocab[node].index for node in other_nodes]
other_vectors = self.syn0[other_indices]
return self.vector_distance_batch(input_vector, other_vectors)
def norm(self, node_or_vector):
"""Compute absolute position in hierarchy of input node or vector.
Values range between 0 and 1. A lower value indicates the input node or vector is higher in the hierarchy.
Parameters
----------
node_or_vector : {str, int, numpy.array}
Input node key or vector for which position in hierarchy is to be returned.
Returns
-------
float
Absolute position in the hierarchy of the input vector or node.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> # Get the norm of the embedding of the word `mammal`.
>>> model.kv.norm('mammal.n.01')
0.6423008703542398
Notes
-----
The position in hierarchy is based on the norm of the vector for the node.
"""
if isinstance(node_or_vector, string_types):
input_vector = self.word_vec(node_or_vector)
else:
input_vector = node_or_vector
return np.linalg.norm(input_vector)
def difference_in_hierarchy(self, node_or_vector_1, node_or_vector_2):
"""Compute relative position in hierarchy of `node_or_vector_1` relative to `node_or_vector_2`.
A positive value indicates `node_or_vector_1` is higher in the hierarchy than `node_or_vector_2`.
Parameters
----------
node_or_vector_1 : {str, int, numpy.array}
Input node key or vector.
node_or_vector_2 : {str, int, numpy.array}
Input node key or vector.
Returns
-------
float
Relative position in hierarchy of `node_or_vector_1` relative to `node_or_vector_2`.
Examples
--------
.. sourcecode:: pycon
>>> from gensim.test.utils import datapath
>>>
>>> # Read the sample relations file and train the model
>>> relations = PoincareRelations(file_path=datapath('poincare_hypernyms_large.tsv'))
>>> model = PoincareModel(train_data=relations)
>>> model.train(epochs=50)
>>>
>>> model.kv.difference_in_hierarchy('mammal.n.01', 'dog.n.01')
0.05382517902410999
>>> model.kv.difference_in_hierarchy('dog.n.01', 'mammal.n.01')
-0.05382517902410999
Notes
-----
The returned value can be positive or negative, depending on whether `node_or_vector_1` is higher
or lower in the hierarchy than `node_or_vector_2`.
"""
return self.norm(node_or_vector_2) - self.norm(node_or_vector_1)
class PoincareRelations(object):
"""Stream relations for `PoincareModel` from a tsv-like file."""
def __init__(self, file_path, encoding='utf8', delimiter='\t'):
"""Initialize instance from file containing a pair of nodes (a relation) per line.
Parameters
----------
file_path : str
Path to file containing a pair of nodes (a relation) per line, separated by `delimiter`.
Since the relations are asymmetric, the order of `u` and `v` nodes in each pair matters.
To express a "u is v" relation, the lines should take the form `u delimeter v`.
e.g: `kangaroo mammal` is a tab-delimited line expressing a "`kangaroo is a mammal`" relation.
For a full input file example, see `gensim/test/test_data/poincare_hypernyms.tsv
<https://github.com/RaRe-Technologies/gensim/blob/master/gensim/test/test_data/poincare_hypernyms.tsv>`_.
encoding : str, optional
Character encoding of the input file.
delimiter : str, optional
Delimiter character for each relation.
"""
self.file_path = file_path
self.encoding = encoding
self.delimiter = delimiter
def __iter__(self):
"""Stream relations from self.file_path decoded into unicode strings.
Yields
-------
(unicode, unicode)
Relation from input file.
"""
with utils.open(self.file_path, 'rb') as file_obj:
if sys.version_info[0] < 3:
lines = file_obj
else:
lines = (l.decode(self.encoding) for l in file_obj)
# csv.reader requires bytestring input in python2, unicode input in python3
reader = csv.reader(lines, delimiter=self.delimiter)
for row in reader:
if sys.version_info[0] < 3:
row = [value.decode(self.encoding) for value in row]
yield tuple(row)
class NegativesBuffer(object):
"""Buffer and return negative samples."""
def __init__(self, items):
"""Initialize instance from list or numpy array of samples.
Parameters
----------
items : list/numpy.array
List or array containing negative samples.
"""
self._items = items
self._current_index = 0
def num_items(self):
"""Get the number of items remaining in the buffer.
Returns
-------
int
Number of items in the buffer that haven't been consumed yet.
"""
return len(self._items) - self._current_index
def get_items(self, num_items):
"""Get the next `num_items` from buffer.
Parameters
----------
num_items : int
Number of items to fetch.
Returns
-------
numpy.array or list
Slice containing `num_items` items from the original data.
Notes
-----
No error is raised if less than `num_items` items are remaining,
simply all the remaining items are returned.
"""
start_index = self._current_index
end_index = start_index + num_items
self._current_index += num_items
return self._items[start_index:end_index]
class ReconstructionEvaluation(object):
"""Evaluate reconstruction on given network for given embedding."""
def __init__(self, file_path, embedding):
"""Initialize evaluation instance with tsv file containing relation pairs and embedding to be evaluated.
Parameters
----------
file_path : str
Path to tsv file containing relation pairs.
embedding : :class:`~gensim.models.poincare.PoincareKeyedVectors`
Embedding to be evaluated.
"""
items = set()
embedding_vocab = embedding.vocab
relations = defaultdict(set)
with utils.open(file_path, 'r') as f:
reader = csv.reader(f, delimiter='\t')
for row in reader:
assert len(row) == 2, 'Hypernym pair has more than two items'
item_1_index = embedding_vocab[row[0]].index
item_2_index = embedding_vocab[row[1]].index
relations[item_1_index].add(item_2_index)
items.update([item_1_index, item_2_index])
self.items = items
self.relations = relations
self.embedding = embedding
@staticmethod
def get_positive_relation_ranks_and_avg_prec(all_distances, positive_relations):
"""Compute ranks and Average Precision of positive relations.
Parameters
----------
all_distances : numpy.array of float
Array of all distances (floats) for a specific item.
positive_relations : list
List of indices of positive relations for the item.
Returns
-------
(list of int, float)
The list contains ranks of positive relations in the same order as `positive_relations`.
The float is the Average Precision of the ranking, e.g. ([1, 2, 3, 20], 0.610).
"""
positive_relation_distances = all_distances[positive_relations]
negative_relation_distances = np.ma.array(all_distances, mask=False)
negative_relation_distances.mask[positive_relations] = True
# Compute how many negative relation distances are less than each positive relation distance, plus 1 for rank
ranks = (negative_relation_distances < positive_relation_distances[:, np.newaxis]).sum(axis=1) + 1
map_ranks = np.sort(ranks) + np.arange(len(ranks))
avg_precision = ((np.arange(1, len(map_ranks) + 1) / np.sort(map_ranks)).mean())
return list(ranks), avg_precision
def evaluate(self, max_n=None):
"""Evaluate all defined metrics for the reconstruction task.
Parameters
----------
max_n : int, optional
Maximum number of positive relations to evaluate, all if `max_n` is None.
Returns
-------
dict of (str, float)
(metric_name, metric_value) pairs, e.g. {'mean_rank': 50.3, 'MAP': 0.31}.
"""
mean_rank, map_ = self.evaluate_mean_rank_and_map(max_n)
return {'mean_rank': mean_rank, 'MAP': map_}
def evaluate_mean_rank_and_map(self, max_n=None):
"""Evaluate mean rank and MAP for reconstruction.
Parameters
----------
max_n : int, optional
Maximum number of positive relations to evaluate, all if `max_n` is None.
Returns
-------
(float, float)
(mean_rank, MAP), e.g (50.3, 0.31).
"""
ranks = []
avg_precision_scores = []
for i, item in enumerate(self.items, start=1):
if item not in self.relations:
continue
item_relations = list(self.relations[item])
item_term = self.embedding.index2word[item]
item_distances = self.embedding.distances(item_term)
positive_relation_ranks, avg_precision = \
self.get_positive_relation_ranks_and_avg_prec(item_distances, item_relations)
ranks += positive_relation_ranks
avg_precision_scores.append(avg_precision)
if max_n is not None and i > max_n:
break
return np.mean(ranks), np.mean(avg_precision_scores)
class LinkPredictionEvaluation(object):
"""Evaluate reconstruction on given network for given embedding."""
def __init__(self, train_path, test_path, embedding):
"""Initialize evaluation instance with tsv file containing relation pairs and embedding to be evaluated.
Parameters
----------
train_path : str
Path to tsv file containing relation pairs used for training.
test_path : str
Path to tsv file containing relation pairs to evaluate.
embedding : :class:`~gensim.models.poincare.PoincareKeyedVectors`
Embedding to be evaluated.
"""
items = set()
embedding_vocab = embedding.vocab
relations = {'known': defaultdict(set), 'unknown': defaultdict(set)}
data_files = {'known': train_path, 'unknown': test_path}
for relation_type, data_file in data_files.items():
with utils.open(data_file, 'r') as f:
reader = csv.reader(f, delimiter='\t')
for row in reader:
assert len(row) == 2, 'Hypernym pair has more than two items'
item_1_index = embedding_vocab[row[0]].index
item_2_index = embedding_vocab[row[1]].index
relations[relation_type][item_1_index].add(item_2_index)
items.update([item_1_index, item_2_index])
self.items = items
self.relations = relations
self.embedding = embedding
@staticmethod
def get_unknown_relation_ranks_and_avg_prec(all_distances, unknown_relations, known_relations):
"""Compute ranks and Average Precision of unknown positive relations.
Parameters
----------
all_distances : numpy.array of float
Array of all distances for a specific item.
unknown_relations : list of int
List of indices of unknown positive relations.
known_relations : list of int
List of indices of known positive relations.
Returns
-------
tuple (list of int, float)
The list contains ranks of positive relations in the same order as `positive_relations`.
The float is the Average Precision of the ranking, e.g. ([1, 2, 3, 20], 0.610).
"""
unknown_relation_distances = all_distances[unknown_relations]
negative_relation_distances = np.ma.array(all_distances, mask=False)
negative_relation_distances.mask[unknown_relations] = True
negative_relation_distances.mask[known_relations] = True
# Compute how many negative relation distances are less than each unknown relation distance, plus 1 for rank
ranks = (negative_relation_distances < unknown_relation_distances[:, np.newaxis]).sum(axis=1) + 1
map_ranks = np.sort(ranks) + np.arange(len(ranks))
avg_precision = ((np.arange(1, len(map_ranks) + 1) / np.sort(map_ranks)).mean())
return list(ranks), avg_precision
def evaluate(self, max_n=None):
"""Evaluate all defined metrics for the link prediction task.
Parameters
----------
max_n : int, optional
Maximum number of positive relations to evaluate, all if `max_n` is None.
Returns
-------
dict of (str, float)
(metric_name, metric_value) pairs, e.g. {'mean_rank': 50.3, 'MAP': 0.31}.
"""
mean_rank, map_ = self.evaluate_mean_rank_and_map(max_n)
return {'mean_rank': mean_rank, 'MAP': map_}
def evaluate_mean_rank_and_map(self, max_n=None):
"""Evaluate mean rank and MAP for link prediction.
Parameters
----------
max_n : int, optional
Maximum number of positive relations to evaluate, all if `max_n` is None.
Returns
-------
tuple (float, float)
(mean_rank, MAP), e.g (50.3, 0.31).
"""
ranks = []
avg_precision_scores = []
for i, item in enumerate(self.items, start=1):
if item not in self.relations['unknown']: # No positive relations to predict for this node
continue
unknown_relations = list(self.relations['unknown'][item])
known_relations = list(self.relations['known'][item])
item_term = self.embedding.index2word[item]
item_distances = self.embedding.distances(item_term)
unknown_relation_ranks, avg_precision = \
self.get_unknown_relation_ranks_and_avg_prec(item_distances, unknown_relations, known_relations)
ranks += unknown_relation_ranks
avg_precision_scores.append(avg_precision)
if max_n is not None and i > max_n:
break
return np.mean(ranks), np.mean(avg_precision_scores)
class LexicalEntailmentEvaluation(object):
"""Evaluate reconstruction on given network for any embedding."""
def __init__(self, filepath):
"""Initialize evaluation instance with HyperLex text file containing relation pairs.
Parameters
----------
filepath : str
Path to HyperLex text file.
"""
expected_scores = {}
with utils.open(filepath, 'r') as f:
reader = csv.DictReader(f, delimiter=' ')
for row in reader:
word_1, word_2 = row['WORD1'], row['WORD2']
expected_scores[(word_1, word_2)] = float(row['AVG_SCORE'])
self.scores = expected_scores
self.alpha = 1000
def score_function(self, embedding, trie, term_1, term_2):
"""Compute predicted score - extent to which `term_1` is a type of `term_2`.
Parameters
----------
embedding : :class:`~gensim.models.poincare.PoincareKeyedVectors`
Embedding to use for computing predicted score.
trie : :class:`pygtrie.Trie`
Trie to use for finding matching vocab terms for input terms.
term_1 : str
Input term.
term_2 : str
Input term.
Returns
-------
float
Predicted score (the extent to which `term_1` is a type of `term_2`).
"""
try:
word_1_terms = self.find_matching_terms(trie, term_1)
word_2_terms = self.find_matching_terms(trie, term_2)
except KeyError:
raise ValueError("No matching terms found for either %s or %s" % (term_1, term_2))
min_distance = np.inf
min_term_1, min_term_2 = None, None
for term_1 in word_1_terms:
for term_2 in word_2_terms:
distance = embedding.distance(term_1, term_2)
if distance < min_distance:
min_term_1, min_term_2 = term_1, term_2
min_distance = distance
assert min_term_1 is not None and min_term_2 is not None
vector_1, vector_2 = embedding.word_vec(min_term_1), embedding.word_vec(min_term_2)
norm_1, norm_2 = np.linalg.norm(vector_1), np.linalg.norm(vector_2)
return -1 * (1 + self.alpha * (norm_2 - norm_1)) * min_distance
@staticmethod
def find_matching_terms(trie, word):
"""Find terms in the `trie` beginning with the `word`.
Parameters
----------
trie : :class:`pygtrie.Trie`
Trie to use for finding matching terms.
word : str
Input word to use for prefix search.
Returns
-------
list of str
List of matching terms.
"""
matches = trie.items('%s.' % word)
matching_terms = [''.join(key_chars) for key_chars, value in matches]
return matching_terms
@staticmethod
def create_vocab_trie(embedding):
"""Create trie with vocab terms of the given embedding to enable quick prefix searches.
Parameters
----------
embedding : :class:`~gensim.models.poincare.PoincareKeyedVectors`
Embedding for which trie is to be created.
Returns
-------
:class:`pygtrie.Trie`
Trie containing vocab terms of the input embedding.
"""
try:
from pygtrie import Trie
except ImportError:
raise ImportError(
'pygtrie could not be imported, please install pygtrie in order to use LexicalEntailmentEvaluation')
vocab_trie = Trie()
for key in embedding.vocab:
vocab_trie[key] = True
return vocab_trie
def evaluate_spearman(self, embedding):
"""Evaluate spearman scores for lexical entailment for given embedding.
Parameters
----------
embedding : :class:`~gensim.models.poincare.PoincareKeyedVectors`
Embedding for which evaluation is to be done.
Returns
-------
float
Spearman correlation score for the task for input embedding.
"""
predicted_scores = []
expected_scores = []
skipped = 0
count = 0
vocab_trie = self.create_vocab_trie(embedding)
for (word_1, word_2), expected_score in self.scores.items():
try:
predicted_score = self.score_function(embedding, vocab_trie, word_1, word_2)
except ValueError:
skipped += 1
continue
count += 1
predicted_scores.append(predicted_score)
expected_scores.append(expected_score)
logger.info('skipped pairs: %d out of %d' % (skipped, len(self.scores)))
spearman = spearmanr(expected_scores, predicted_scores)
return spearman.correlation<|fim▁end|> | |
<|file_name|>classifier.py<|end_file_name|><|fim▁begin|>"""
Module containing MPG Ranch NFC coarse classifier, version 3.1.
An NFC coarse classifier classifies an unclassified clip as a `'Call'`
if it appears to be a nocturnal flight call, or as a `'Noise'` otherwise.
It does not classify a clip that has already been classified, whether
manually or automatically.
This classifier uses the same model as version 3.0, which was developed
for TensorFlow 1, but updated for TensorFlow 2.
The two classifiers were compared by running both on 16429 clips created
by the Old Bird Thrush Detector Redux 1.1 and 22505 clips created by the
Old Bird Tseep Detector Redux 1.1 from 17 nights of recordings made in
Ithaca, NY from 2021-04-03 through 2021-04-19. The older classifier ran
with TensorFlow 1.15.5 and the newer one with TensorFlow 2.5.0rc1. The
two classifiers labeled the clips exactly the same. 1711 thrush clips
were labeled as calls and the others as noises, and 2636 tseep clips
were labeled as calls and the others as noises.
"""
from collections import defaultdict
import logging
import numpy as np
import resampy
import tensorflow as tf
from vesper.command.annotator import Annotator
from vesper.django.app.models import AnnotationInfo
from vesper.singleton.clip_manager import clip_manager
from vesper.util.settings import Settings
import vesper.django.app.model_utils as model_utils
import vesper.mpg_ranch.nfc_coarse_classifier_3_1.classifier_utils as \
classifier_utils
import vesper.mpg_ranch.nfc_coarse_classifier_3_1.dataset_utils as \
dataset_utils
import vesper.util.open_mp_utils as open_mp_utils
import vesper.util.signal_utils as signal_utils
import vesper.util.yaml_utils as yaml_utils
_EVALUATION_MODE_ENABLED = False
'''
This classifier can run in one of two modes, *normal mode* and
*evaluation mode*. In normal mode, it annotates only unclassified clips,
assigning to each a "Classification" annotation value or either "Call"
or "Noise".
In evaluation mode, the classifier classifies every clip whose clip type
(e.g. "Tseep" or "Thrush") it recognizes and that already has a
classification that is "Noise" or starts with "Call" or "XCall".
The new classification is a function of both the existing classification
and the *normal classification* that the classifier would assign to the
clip in normal mode if it had no existing classification. The new
classifications are as follows (where the classification pairs are
(existing classification, normal classification)):
(Noise, Noise) -> Noise (i.e. no change)
(Noise, Call) -> FP
(Call*, Call) -> Call* (i.e. no change)
(Call*, Noise) -> FN* (i.e. only coarse part changes)
(XCall*, Call) -> XCallP* (i.e. only coarse part changes)
(XCall*, Noise) -> XCallN* (i.e. only coarse part changes)
This reclassifies clips for which the normal classification differs from
the existing classification in such a way that important sets of clips
(i.e. false positives, false negatives, excluded call positives, and
excluded call negatives) can subsequently be viewed in clip albums.
'''
class Classifier(Annotator):
extension_name = 'MPG Ranch NFC Coarse Classifier 3.1'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
open_mp_utils.work_around_multiple_copies_issue()
# Suppress TensorFlow INFO and DEBUG log messages.
logging.getLogger('tensorflow').setLevel(logging.WARN)
self._classifiers = dict(
(t, _Classifier(t)) for t in ('Tseep', 'Thrush'))
if _EVALUATION_MODE_ENABLED:
self._score_annotation_info = \
AnnotationInfo.objects.get(name='Score')
def annotate_clips(self, clips):
"""Annotates the specified clips with the appropriate classifiers."""
clip_lists = self._get_clip_lists(clips)
num_clips_classified = 0
for clip_type, clips in clip_lists.items():
classifier = self._classifiers.get(clip_type)
if classifier is not None:
# have classifier for this clip type
num_clips_classified += self._annotate_clips(clips, classifier)
return num_clips_classified
def _get_clip_lists(self, clips):
"""Gets a mapping from clip types to lists of clips to classify."""
clip_lists = defaultdict(list)
for clip in clips:
if _EVALUATION_MODE_ENABLED or \
self._get_annotation_value(clip) is None:
# clip should be classified
clip_type = model_utils.get_clip_type(clip)
clip_lists[clip_type].append(clip)
return clip_lists
def _annotate_clips(self, clips, classifier):
"""Annotates the specified clips with the specified classifier."""
num_clips_classified = 0
triples = classifier.classify_clips(clips)
# if _EVALUATION_MODE_ENABLED and len(triples) > 0:
# self._show_classification_errors(triples)
for clip, auto_classification, score in triples:
if auto_classification is not None:
if _EVALUATION_MODE_ENABLED:
old_classification = self._get_annotation_value(clip)
new_classification = self._get_new_classification(
old_classification, auto_classification)
if new_classification is not None:
self._annotate(clip, new_classification)
num_clips_classified += 1
self._set_clip_score(clip, score)
else:
# normal mode
self._annotate(clip, auto_classification)
num_clips_classified += 1
return num_clips_classified
def _get_new_classification(self, old_classification, auto_classification):
old = old_classification
auto = auto_classification
if old is None:
return None
elif old.startswith('Call') and auto == 'Noise':
return 'FN' + old[len('Call'):]
elif old == 'Noise' and auto == 'Call':
return 'FP'
elif old.startswith('XCall') and auto == 'Noise':
return 'XCallN' + old_classification[len('XCall'):]
elif old.startswith('XCall') and auto == 'Call':
return 'XCallP' + old_classification[len('XCall'):]
else:
return None
def _set_clip_score(self, clip, score):
value = '{:.3f}'.format(score)
model_utils.annotate_clip(
clip, self._score_annotation_info, value,
creating_user=self._creating_user,
creating_job=self._creating_job,
creating_processor=self._creating_processor)
def _show_classification_errors(self, triples):
num_positives = 0
num_negatives = 0
false_positives = []
false_negatives = []
for i, (clip, new_classification, score) in enumerate(triples):
old_classification = self._get_annotation_value(clip)
if old_classification.startswith('Call'):
num_positives += 1
if new_classification == 'Noise':
false_negatives.append(
(i, old_classification, new_classification, score))
else:
# old classification does not start with 'Call'
num_negatives += 1
if new_classification == 'Call':
false_positives.append(
(i, old_classification, new_classification, score))
num_clips = len(triples)
logging.info('Classified {} clips.'.format(num_clips))
self._show_classification_errors_aux(
'calls', false_negatives, num_positives)
self._show_classification_errors_aux(
'non-calls', false_positives, num_negatives)
num_errors = len(false_positives) + len(false_negatives)
accuracy = 100 * (1 - num_errors / num_clips)
logging.info(
'The overall accuracy was {:.1f} percent.'.format(accuracy))
def _show_classification_errors_aux(self, category, errors, num_clips):
num_errors = len(errors)
percent = 100 * num_errors / num_clips
logging.info((
'{} of {} {} ({:.1f} percent) where incorrectly '
'classified:').format(num_errors, num_clips, category, percent))
for i, old_classification, new_classification, score in errors:
logging.info(
' {} {} -> {} {}'.format(
i, old_classification, new_classification, score))
class _Classifier:
def __init__(self, clip_type):
self.clip_type = clip_type
self._model = self._load_model()
self._settings = self._load_settings()
# Configure waveform slicing.
s = self._settings
fs = s.waveform_sample_rate
s2f = signal_utils.seconds_to_frames
self._waveform_start_time = \
s.waveform_start_time + s.inference_waveform_start_time_offset
self._waveform_duration = s.waveform_duration
self._waveform_length = s2f(self._waveform_duration, fs)
self._classification_threshold = \
self._settings.classification_threshold
def _load_model(self):
path = classifier_utils.get_keras_model_file_path(self.clip_type)
logging.info(f'Loading classifier model from "{path}"...')
return tf.keras.models.load_model(path)
def _load_settings(self):
path = classifier_utils.get_settings_file_path(self.clip_type)
logging.info(f'Loading classifier settings from "{path}"...')
text = path.read_text()
d = yaml_utils.load(text)
return Settings.create_from_dict(d)
def classify_clips(self, clips):
# logging.info('Collecting clip waveforms for scoring...')
waveforms, indices = self._slice_clip_waveforms(clips)
if len(waveforms) == 0:
return []
else:
# have at least one waveform slice to classify
# Stack waveform slices to make 2-D NumPy array.
self._waveforms = np.stack(waveforms)
# logging.info('Scoring clip waveforms...')
dataset = \
dataset_utils.create_spectrogram_dataset_from_waveforms_array(
self._waveforms, dataset_utils.DATASET_MODE_INFERENCE,
self._settings, batch_size=64,
feature_name=self._settings.model_input_name)
scores = self._model.predict(dataset).flatten()
# logging.info('Classifying clips...')
triples = [
self._classify_clip(i, score, clips)
for i, score in zip(indices, scores)]
return triples
def _slice_clip_waveforms(self, clips):
waveforms = []
indices = []
for i, clip in enumerate(clips):
try:
waveform = self._get_clip_samples(clip)
except Exception as e:
logging.warning((
'Could not classify clip "{}", since its '
'samples could not be obtained. Error message was: '
'{}').format(str(clip), str(e)))
else:
# got clip samples
waveforms.append(waveform)
indices.append(i)
return waveforms, indices
def _get_clip_samples(self, clip):
clip_sample_rate = clip.sample_rate
classifier_sample_rate = self._settings.waveform_sample_rate
s2f = signal_utils.seconds_to_frames
start_offset = s2f(self._waveform_start_time, clip_sample_rate)
if clip_sample_rate != classifier_sample_rate:
# need to resample
# Get clip samples, including a millisecond of padding at
# the end. I don't know what if any guarantees the
# `resampy.resample` function offers about the relationship
# between its input and output lengths, so we add the padding
# to try to ensure that we don't wind up with too few samples
# after resampling.
length = s2f(self._waveform_duration + .001, clip_sample_rate)
samples = clip_manager.get_samples(
clip, start_offset=start_offset, length=length)
# Resample clip samples to classifier sample rate.
samples = resampy.resample(
samples, clip_sample_rate, classifier_sample_rate)<|fim▁hole|> # Discard any extra trailing samples we wound up with.
samples = samples[:self._waveform_length]
if len(samples) < self._waveform_length:
raise ValueError('Resampling produced too few samples.')
else:
# don't need to resample
samples = clip_manager.get_samples(
clip, start_offset=start_offset, length=self._waveform_length)
return samples
def _classify_clip(self, index, score, clips):
if score >= self._classification_threshold:
classification = 'Call'
else:
classification = 'Noise'
return clips[index], classification, score<|fim▁end|> | |
<|file_name|>unique-decl-init-copy.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(box_syntax)]
pub fn main() {
let mut i: Box<_> = box 1;<|fim▁hole|> let mut j = i.clone();
*i = 2;
*j = 3;
assert_eq!(*i, 2);
assert_eq!(*j, 3);
}<|fim▁end|> | // Should be a copy |
<|file_name|>sourceMap-InterfacePrecedingVariableDeclaration1.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>// @sourcemap: true
interface I {}
var x = 0;<|fim▁end|> | // @target: ES3
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for React Router 4.0
// Project: https://github.com/ReactTraining/react-router
// Definitions by: Tanguy Krotoff <https://github.com/tkrotoff>
// Huy Nguyen <https://github.com/huy-nguyen>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
declare module 'react-router-dom' {
import {
Prompt,
MemoryRouter,
Redirect,
RouteComponentProps,
RouteProps,
Route,
Router,
StaticRouter,
Switch,
match,
matchPath,
withRouter,
RouterChildContext
} from 'react-router';
import * as React from 'react';
import * as H from 'history';
interface BrowserRouterProps {
basename?: string;
getUserConfirmation?(): void;
forceRefresh?: boolean;
keyLength?: number;
}
class BrowserRouter extends React.Component<BrowserRouterProps> {}
interface HashRouterProps {
basename?: string;
getUserConfirmation?(): void;
hashType?: 'slash' | 'noslash' | 'hashbang';
}
class HashRouter extends React.Component<HashRouterProps> {}
interface LinkProps extends React.AnchorHTMLAttributes<HTMLAnchorElement> {
to: H.LocationDescriptor;
replace?: boolean;
}
class Link extends React.Component<LinkProps> {}
interface NavLinkProps extends LinkProps {
activeClassName?: string;
activeStyle?: React.CSSProperties;
exact?: boolean;
strict?: boolean;
isActive?<P>(match: match<P>, location: H.Location): boolean;
}
class NavLink extends React.Component<NavLinkProps> {}
export {
BrowserRouter,
BrowserRouterProps, // TypeScript specific, not from React Router itself
HashRouter,
HashRouterProps, // TypeScript specific, not from React Router itself
LinkProps, // TypeScript specific, not from React Router itself
NavLinkProps, // TypeScript specific, not from React Router itself
Link,
NavLink,
Prompt,
MemoryRouter,
Redirect,
RouteComponentProps, // TypeScript specific, not from React Router itself<|fim▁hole|> RouteProps, // TypeScript specific, not from React Router itself
Route,
Router,
StaticRouter,
Switch,
match, // TypeScript specific, not from React Router itself
matchPath,
withRouter,
RouterChildContext
};
}<|fim▁end|> | |
<|file_name|>fix_ssa_opcalls.rs<|end_file_name|><|fim▁begin|>//! Fixes the call target for all call sites in the SSA
//! For every [`OpCall`] SSA node in every function, try to find that call
//! site's corresponding edge in [the callgraph] and replace the "target"
//! operand of the SSA node with a constant value for the address of the actual
//! call target.
//!
//! [`OpCall`]: ir::MOpcode::OpCall
//! [the callgraph]: RadecoModule::callgraph
use analysis::analyzer::{
Action, Analyzer, AnalyzerInfo, AnalyzerKind, AnalyzerResult, Change, ModuleAnalyzer,
};
use frontend::radeco_containers::*;
use middle::ir;
use middle::ssa::ssa_traits::*;
use middle::ssa::ssastorage::SSAStorage;
use std::any::Any;
use std::collections::HashMap;
const NAME: &str = "call_size_fixer";
const REQUIRES: &[AnalyzerKind] = &[];
pub const INFO: AnalyzerInfo = AnalyzerInfo {
name: NAME,
kind: AnalyzerKind::CallSiteFixer,
requires: REQUIRES,
uses_policy: false,
};
#[derive(Debug)]
pub struct CallSiteFixer;
impl CallSiteFixer {
pub fn new() -> Self {
CallSiteFixer
}
}
impl Analyzer for CallSiteFixer {
fn info(&self) -> &'static AnalyzerInfo {
&INFO
}
fn as_any(&self) -> &dyn Any {
self
}
}
impl ModuleAnalyzer for CallSiteFixer {
fn analyze<T: FnMut(Box<Change>) -> Action>(
&mut self,
rmod: &mut RadecoModule,
_policy: Option<T>,
) -> Option<Box<AnalyzerResult>> {
for rfun in rmod.functions.values_mut() {
go_fn(rfun, &rmod.callgraph);
}
None
}
}
fn go_fn(rfun: &mut RadecoFunction, callgraph: &CallGraph) -> () {
let _fn_addr = rfun.offset;
let call_site_addr_to_target_addr: HashMap<u64, u64> = callgraph
.callees(rfun.cgid())
.map(|(cs_a, tgt_idx)| (cs_a, callgraph[tgt_idx]))
.collect();
let ssa = rfun.ssa_mut();
for node in ssa.inorder_walk() {
if let Ok(NodeType::Op(ir::MOpcode::OpCall)) = ssa.node_data(node).map(|x| x.nt) {
fix_call_site(ssa, node, &call_site_addr_to_target_addr).unwrap_or_else(|| {
radeco_err!(
"failed to fix call site {:?} in function at {:#X}",
node,
_fn_addr
)
});
}
}
}
fn fix_call_site(
ssa: &mut SSAStorage,
call_node: <SSAStorage as SSA>::ValueRef,
fn_call_map: &HashMap<u64, u64>,
) -> Option<()> {
let call_site_addr = ssa.address(call_node)?.address;<|fim▁hole|> if let Some(&call_target_addr) = fn_call_map.get(&call_site_addr) {
let old_opcall_tgt_node = ssa
.sparse_operands_of(call_node)
.iter()
.find(|x| x.0 == 0)?
.1;
let new_opcall_tgt_node = ssa.insert_const(call_target_addr, None)?;
ssa.op_unuse(call_node, old_opcall_tgt_node);
ssa.op_use(call_node, 0, new_opcall_tgt_node);
} else {
radeco_trace!(
"call site at {:#X} isn't in call graph; perhaps the call is indirect?",
call_site_addr
);
}
Some(())
}<|fim▁end|> | |
<|file_name|>hook-sysconfig.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2005-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
# The 'sysconfig' module requires Makefile and pyconfig.h files from
# Python installation. 'sysconfig' parses these files to get some
# information from them.
# TODO Verify that bundling Makefile and pyconfig.h is still required for Python 3.
import sysconfig
import os
from PyInstaller.utils.hooks import relpath_to_config_or_make
_CONFIG_H = sysconfig.get_config_h_filename()<|fim▁hole|>else:
_MAKEFILE = sysconfig._get_makefile_filename()
datas = [(_CONFIG_H, relpath_to_config_or_make(_CONFIG_H))]
# The Makefile does not exist on all platforms, eg. on Windows
if os.path.exists(_MAKEFILE):
datas.append((_MAKEFILE, relpath_to_config_or_make(_MAKEFILE)))<|fim▁end|> | if hasattr(sysconfig, 'get_makefile_filename'):
# sysconfig.get_makefile_filename is missing in Python < 2.7.9
_MAKEFILE = sysconfig.get_makefile_filename() |
<|file_name|>fstree.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
from fs.opener import opener
from fs.commands.runner import Command
from fs.utils import print_fs
class FSTree(Command):
usage = """fstree [OPTION]... [PATH]
Recursively display the contents of PATH in an ascii tree"""
def get_optparse(self):
optparse = super(FSTree, self).get_optparse()
optparse.add_option('-l', '--level', dest='depth', type="int", default=5,
help="Descend only LEVEL directories deep (-1 for infinite)", metavar="LEVEL")
optparse.add_option('-g', '--gui', dest='gui', action='store_true', default=False,
help="browse the tree with a gui")
optparse.add_option('-a', '--all', dest='all', action='store_true', default=False,
help="do not hide dot files")
optparse.add_option('-d', '--dirsfirst', dest='dirsfirst', action='store_true', default=False,
help="List directories before files")
return optparse
def do_run(self, options, args):
if not args:<|fim▁hole|> self.error(u"'%s' is not a dir\n" % path)
return 1
fs.cache_hint(True)
if options.gui:
from fs.browsewin import browse
if path:
fs = fs.opendir(path)
browse(fs, hide_dotfiles=not options.all)
else:
if options.depth < 0:
max_levels = None
else:
max_levels = options.depth
print_fs(fs, path or '',
file_out=self.output_file,
max_levels=max_levels,
terminal_colors=self.terminal_colors,
hide_dotfiles=not options.all,
dirs_first=options.dirsfirst)
def run():
return FSTree().run()
if __name__ == "__main__":
sys.exit(run())<|fim▁end|> | args = ['.']
for fs, path, is_dir in self.get_resources(args, single=True):
if not is_dir: |
<|file_name|>base.js<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
$(document).ready(function(){
var div = document.getElementById('content');
var div1 = document.getElementById('leftbox');
div.style.height = document.body.clientHeight + 'px';
div1.style.height = div.style.height;
var contentToRemove = document.querySelectorAll(".collapsed-navbox");
$(contentToRemove).hide();
var oritop = -100;
$(window).scroll(function() {
var scrollt = window.scrollY;
var elm = $("#leftbox");
if(oritop < 0) {
oritop= elm.offset().top;
}
if(scrollt >= oritop) {
elm.css({"position": "fixed", "top": 0, "left": 0});
}
else {
elm.css("position", "static");
}
});
/*$(window).resize(function() {
var wi = $(window).width();
$("p.testp").text('Screen width is currently: ' + wi + 'px.');
});
$(window).resize(function() {
var wi = $(window).width();
if (wi <= 767){
var contentToRemove = document.querySelectorAll(".fullscreen-navbox");
$(contentToRemove).hide();
var contentToRemove = document.querySelectorAll(".collapsed-navbox");
$(contentToRemove).show();
$("#leftbox").css("width","30px");
$("#content").css("width","90%");
}else if (wi > 800){
var contentToRemove = document.querySelectorAll(".fullscreen-navbox");
$(contentToRemove).show();
var contentToRemove = document.querySelectorAll(".collapsed-navbox");
$(contentToRemove).hide();
$("#leftbox").css("width","15%");
$("#content").css("width","85%");
}
});*/
<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>uk.js<|end_file_name|><|fim▁begin|>(function($) {
$.Redactor.opts.langs['uk'] = {
html: 'Код',
video: 'Відео',
image: 'Зображення',
table: 'Таблиця',
link: 'Посилання',
link_insert: 'Вставити посилання ...',
link_edit: 'Edit link',
unlink: 'Видалити посилання',
formatting: 'Стилі',
paragraph: 'Звичайний текст',
quote: 'Цитата',
code: 'Код',
header1: 'Заголовок 1',
header2: 'Заголовок 2',
header3: 'Заголовок 3',
header4: 'Заголовок 4',
header5: 'Заголовок 5',
bold: 'Жирний',
italic: 'Похилий',
fontcolor: 'Колір тексту',
backcolor: 'Заливка тексту',
unorderedlist: 'Звичайний список',
orderedlist: 'Нумерований список',
outdent: 'Зменшити відступ',
indent: 'Збільшити відступ',
cancel: 'Скасувати',
insert: 'Вставити',
save: 'Зберегти',
_delete: 'Видалити',
insert_table: 'Вставити таблицю',
insert_row_above: 'Додати рядок зверху',
insert_row_below: 'Додати рядок знизу',
insert_column_left: 'Додати стовпець ліворуч',
insert_column_right: 'Додати стовпець праворуч',
delete_column: 'Видалити стовпець',
delete_row: 'Видалити рядок',
delete_table: 'Видалити таблицю',
rows: 'Рядки',
columns: 'Стовпці',
add_head: 'Додати заголовок',
delete_head: 'Видалити заголовок',
title: 'Підказка',
image_view: 'Завантажити зображення',
image_position: 'Обтікання текстом',
none: 'ні',
left: 'ліворуч',
right: 'праворуч',
image_web_link: 'Посилання на зображення',
text: 'Текст',
mailto: 'Ел. пошта',
web: 'URL',
video_html_code: 'Код відео ролика',
file: 'Файл',
upload: 'Завантажити',
download: 'Завантажити',
choose: 'Вибрати',
or_choose: 'Або виберіть',
drop_file_here: 'Перетягніть файл сюди',
align_left: 'По лівому краю',
align_center: 'По центру',
align_right: 'По правому краю',
align_justify: 'Вирівняти текст по ширині',
horizontalrule: 'Горизонтальная лінійка',
fullscreen: 'На весь екран',
deleted: 'Закреслений',<|fim▁hole|> link_new_tab: 'Open link in new tab',
underline: 'Underline',
alignment: 'Alignment',
filename: 'Name (optional)',
edit: 'Edit',
center: 'Center'
};
})(jQuery);<|fim▁end|> | anchor: 'Anchor', |
<|file_name|>test_storage.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright 2015 rpaas authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
<|fim▁hole|>
from rpaas import plan, storage
class MongoDBStorageTestCase(unittest.TestCase):
def setUp(self):
self.storage = storage.MongoDBStorage()
self.storage.db[self.storage.quota_collection].remove()
self.storage.db[self.storage.plans_collection].remove()
self.storage.db[self.storage.plans_collection].insert(
{"_id": "small",
"description": "some cool plan",
"config": {"serviceofferingid": "abcdef123456"}}
)
self.storage.db[self.storage.plans_collection].insert(
{"_id": "huge",
"description": "some cool huge plan",
"config": {"serviceofferingid": "abcdef123459"}}
)
def test_set_team_quota(self):
q = self.storage.set_team_quota("myteam", 8)
used, quota = self.storage.find_team_quota("myteam")
self.assertEqual([], used)
self.assertEqual(8, quota)
self.assertEqual(used, q["used"])
self.assertEqual(quota, q["quota"])
def test_list_plans(self):
plans = self.storage.list_plans()
expected = [
{"name": "small", "description": "some cool plan",
"config": {"serviceofferingid": "abcdef123456"}},
{"name": "huge", "description": "some cool huge plan",
"config": {"serviceofferingid": "abcdef123459"}},
]
self.assertEqual(expected, [p.to_dict() for p in plans])
def test_find_plan(self):
plan = self.storage.find_plan("small")
expected = {"name": "small", "description": "some cool plan",
"config": {"serviceofferingid": "abcdef123456"}}
self.assertEqual(expected, plan.to_dict())
with self.assertRaises(storage.PlanNotFoundError):
self.storage.find_plan("something that doesn't exist")
def test_store_plan(self):
p = plan.Plan(name="super_huge", description="very huge thing",
config={"serviceofferingid": "abcdef123"})
self.storage.store_plan(p)
got_plan = self.storage.find_plan(p.name)
self.assertEqual(p.to_dict(), got_plan.to_dict())
def test_store_plan_duplicate(self):
p = plan.Plan(name="small", description="small thing",
config={"serviceofferingid": "abcdef123"})
with self.assertRaises(storage.DuplicateError):
self.storage.store_plan(p)
def test_update_plan(self):
p = plan.Plan(name="super_huge", description="very huge thing",
config={"serviceofferingid": "abcdef123"})
self.storage.store_plan(p)
self.storage.update_plan(p.name, description="wat?",
config={"serviceofferingid": "abcdef123459"})
p = self.storage.find_plan(p.name)
self.assertEqual("super_huge", p.name)
self.assertEqual("wat?", p.description)
self.assertEqual({"serviceofferingid": "abcdef123459"}, p.config)
def test_update_plan_partial(self):
p = plan.Plan(name="super_huge", description="very huge thing",
config={"serviceofferingid": "abcdef123"})
self.storage.store_plan(p)
self.storage.update_plan(p.name, config={"serviceofferingid": "abcdef123459"})
p = self.storage.find_plan(p.name)
self.assertEqual("super_huge", p.name)
self.assertEqual("very huge thing", p.description)
self.assertEqual({"serviceofferingid": "abcdef123459"}, p.config)
def test_update_plan_not_found(self):
with self.assertRaises(storage.PlanNotFoundError):
self.storage.update_plan("my_plan", description="woot")
def test_delete_plan(self):
p = plan.Plan(name="super_huge", description="very huge thing",
config={"serviceofferingid": "abcdef123"})
self.storage.store_plan(p)
self.storage.delete_plan(p.name)
with self.assertRaises(storage.PlanNotFoundError):
self.storage.find_plan(p.name)
def test_delete_plan_not_found(self):
with self.assertRaises(storage.PlanNotFoundError):
self.storage.delete_plan("super_huge")
def test_instance_metadata_storage(self):
self.storage.store_instance_metadata("myinstance", plan="small")
inst_metadata = self.storage.find_instance_metadata("myinstance")
self.assertEqual({"_id": "myinstance",
"plan": "small"}, inst_metadata)
self.storage.store_instance_metadata("myinstance", plan="medium")
inst_metadata = self.storage.find_instance_metadata("myinstance")
self.assertEqual({"_id": "myinstance", "plan": "medium"}, inst_metadata)
self.storage.remove_instance_metadata("myinstance")
inst_metadata = self.storage.find_instance_metadata("myinstance")
self.assertIsNone(inst_metadata)<|fim▁end|> | import unittest |
<|file_name|>picker.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# This code is available for use under CC0 (Creative Commons 0 - universal).
# You can copy, modify, distribute and perform the work, even for commercial
# purposes, all without asking permission. For more information, see LICENSE.md or
# https://creativecommons.org/publicdomain/zero/1.0/
# usage:
# opts = Picker(
# title = 'Delete all files',
# options = ["Yes", "No"]
# ).getSelected()
# returns a simple list
# cancel returns False
import curses
import shutil
import signal
from curses import wrapper
class Picker:
"""Allows you to select from a list with curses"""
stdscr = None
win = None
title = ""
arrow = ""
footer = ""
more = ""
c_selected = ""
c_empty = ""
cursor = 0
offset = 0
selected = 0
selcount = 0
aborted = False
window_height = shutil.get_terminal_size().lines - 10
window_width = shutil.get_terminal_size().columns - 20
all_options = []
length = 0
def curses_start(self):
self.stdscr = curses.initscr()
curses.noecho()
curses.cbreak()
self.win = curses.newwin(
5 + self.window_height,
self.window_width,
2,
4
)
def sigwinch_handler(self, n, frame):
self.window_height = shutil.get_terminal_size().lines - 10
self.window_width = shutil.get_terminal_size().columns - 20
curses.endwin()
self.stdscr.clear()
self.stdscr = curses.initscr()
self.win = curses.newwin(
5 + self.window_height,
self.window_width,
2,
4
)
def curses_stop(self):
curses.nocbreak()
self.stdscr.keypad(0)
curses.echo()
curses.endwin()
def getSelected(self):
if self.aborted == True:
return( False )
ret_s = [x for x in self.all_options if x["selected"]]
ret = [x["label"] for x in ret_s]
return(ret)
def redraw(self):
self.win.clear()
self.win.border(
self.border[0], self.border[1],
self.border[2], self.border[3],
self.border[4], self.border[5],
self.border[6], self.border[7]
)
self.win.addstr(
self.window_height + 4, 5, " " + self.footer + " "
)
position = 0
range = self.all_options[self.offset:self.offset+self.window_height+1]
for option in range:
if option["selected"] == True:
line_label = self.c_selected + " "
else:
line_label = self.c_empty + " "
if len(option["label"]) > (self.window_width - 20):
reduced = option["label"][:self.window_width - 20] + "..."
else:
reduced = option["label"]
self.win.addstr(position + 2, 5, line_label + reduced)
position = position + 1
# hint for more content above
if self.offset > 0:
self.win.addstr(1, 5, self.more)
# hint for more content below
if self.offset + self.window_height <= self.length - 2:
self.win.addstr(self.window_height + 3, 5, self.more)
self.win.addstr(0, 5, " " + self.title + " ")
self.win.addstr(
0, self.window_width - 8,
" " + str(self.selcount) + "/" + str(self.length) + " "
)
self.win.addstr(self.cursor + 2,1, self.arrow)<|fim▁hole|> self.win.refresh()
def check_cursor_up(self):
if self.cursor < 0:
self.cursor = 0
if self.offset > 0:
self.offset = self.offset - 1
def check_cursor_down(self):
if self.cursor >= self.length:
self.cursor = self.cursor - 1
if self.cursor > self.window_height:
self.cursor = self.window_height
self.offset = self.offset + 1
if self.offset + self.cursor >= self.length:
self.offset = self.offset - 1
def curses_loop(self, stdscr):
while 1:
self.redraw()
c = stdscr.getch()
if c == ord('q') or c == ord('Q'):
self.aborted = True
break
elif c == curses.KEY_UP:
self.cursor = self.cursor - 1
elif c == curses.KEY_DOWN:
self.cursor = self.cursor + 1
#elif c == curses.KEY_PPAGE:
#elif c == curses.KEY_NPAGE:
elif c == ord(' '):
self.all_options[self.selected]["selected"] = \
not self.all_options[self.selected]["selected"]
elif c == 10:
break
# deal with interaction limits
self.check_cursor_up()
self.check_cursor_down()
# compute selected position only after dealing with limits
self.selected = self.cursor + self.offset
temp = self.getSelected()
self.selcount = len(temp)
def __init__(self, options, title='Select', arrow="-->",
footer="Space = toggle, Enter = accept, q = cancel",
more="...", border="||--++++", c_selected="[X]", c_empty="[ ]", checked="[ ]"):
self.title = title
self.arrow = arrow
self.footer = footer
self.more = more
self.border = border
self.c_selected = c_selected
self.c_empty = c_empty
self.all_options = []
for option in options:
self.all_options.append({
"label": option,
"selected": True if (option in checked) else False
})
self.length = len(self.all_options)
self.curses_start()
signal.signal(signal.SIGWINCH, self.sigwinch_handler)
curses.wrapper( self.curses_loop )
self.curses_stop()<|fim▁end|> | |
<|file_name|>node.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this<|fim▁hole|> * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Traits that nodes must implement. Breaks the otherwise-cyclic dependency between layout and
//! style.
use cssparser::RGBA;
use legacy::{IntegerAttribute, LengthAttribute, SimpleColorAttribute, UnsignedIntegerAttribute};
use selectors::AttrSelector;
use servo_util::str::LengthOrPercentageOrAuto;
use string_cache::{Atom, Namespace};
pub trait TNode<'a, E: TElement<'a>> : Clone + Copy {
fn parent_node(self) -> Option<Self>;
fn first_child(self) -> Option<Self>;
fn last_child(self) -> Option<Self>;
fn prev_sibling(self) -> Option<Self>;
fn next_sibling(self) -> Option<Self>;
fn is_document(self) -> bool;
fn is_element(self) -> bool;
fn as_element(self) -> E;
fn match_attr(self, attr: &AttrSelector, test: |&str| -> bool) -> bool;
fn is_html_element_in_html_document(self) -> bool;
fn has_changed(self) -> bool;
unsafe fn set_changed(self, value: bool);
fn is_dirty(self) -> bool;
unsafe fn set_dirty(self, value: bool);
fn has_dirty_siblings(self) -> bool;
unsafe fn set_dirty_siblings(self, value: bool);
fn has_dirty_descendants(self) -> bool;
unsafe fn set_dirty_descendants(self, value: bool);
}
pub trait TElement<'a> : Copy {
fn get_attr(self, namespace: &Namespace, attr: &Atom) -> Option<&'a str>;
fn get_attrs(self, attr: &Atom) -> Vec<&'a str>;
fn get_link(self) -> Option<&'a str>;
fn get_local_name(self) -> &'a Atom;
fn get_namespace(self) -> &'a Namespace;
fn get_hover_state(self) -> bool;
fn get_id(self) -> Option<Atom>;
fn get_disabled_state(self) -> bool;
fn get_enabled_state(self) -> bool;
fn get_checked_state(self) -> bool;
fn get_indeterminate_state(self) -> bool;
fn has_class(self, name: &Atom) -> bool;
fn has_nonzero_border(self) -> bool;
// Ordinarily I wouldn't use callbacks like this, but the alternative is
// really messy, since there is a `JSRef` and a `RefCell` involved. Maybe
// in the future when we have associated types and/or a more convenient
// JS GC story... --pcwalton
fn each_class(self, callback: |&Atom|);
}
pub trait TElementAttributes : Copy {
fn get_length_attribute(self, attribute: LengthAttribute) -> LengthOrPercentageOrAuto;
fn get_integer_attribute(self, attribute: IntegerAttribute) -> Option<i32>;
fn get_unsigned_integer_attribute(self, attribute: UnsignedIntegerAttribute) -> Option<u32>;
fn get_simple_color_attribute(self, attribute: SimpleColorAttribute) -> Option<RGBA>;
}<|fim▁end|> | |
<|file_name|>edsl.rs<|end_file_name|><|fim▁begin|>//! Holmes EDSL
//!
//! This module provides a set of macros for more easily writing Holmes
//! programs, avoiding manual construction of all tye types required.
/// Converts an EDSL type specification into a Holmes type object
/// Takes the name of a variable containing a holmes object as the first
/// parameter, and a type description as the second.
///
/// [type] -> list of that type
/// (type0, type1, type2) -> tuple of those types
/// type -> look up type by name in the registry
#[macro_export]
macro_rules! htype {
($holmes:ident, [$t:tt]) => {
::holmes::pg::dyn::types::List::new(htype!($holmes, $t))
};
($holmes:ident, ($($t:tt),*)) => {
::holmes::pg::dyn::types::Tuple::new(vec![$(htype!($holmes, $t)),*])
};
($holmes:ident, $i:ident) => {
$holmes.get_type(stringify!($i))
.expect(&format!("Type not present in database: {}", stringify!($i)))
};
}
/// Shorthand notation for performing many actions with the same holmes context
/// Analogous to a weaker version of the `Reader` monad which cannot return
/// values.
///
/// The first parameter is the holmes object to be used, and the second is
/// a list of the actions to be performed, e.g.
///
/// ```c
/// holmes_exec!(holmes, {
/// predicate!(foo(string, uint64));
/// fact!(foo("bar", 3));
/// });
/// ```
///
/// If any of the actions would error, the *enclosing function* will error out.
/// This is due to a limitation in how the `try!` macro works. (It uses return
/// to error out, rather than a bind-like mechanism).
///
/// This only works because the other macros have both an explicit ident form,
/// and one which generates a function taking a `holmes` parameter instead.
#[macro_export]
macro_rules! holmes_exec {
($holmes:ident, { $( $action:expr );* }) => {{
$( try!($action($holmes)); );*
$holmes.nop()
}};
}
#[macro_export]
macro_rules! field {
($holmes:ident, [$name:ident $t:tt $descr:expr]) => {{::holmes::engine::types::Field {
name: Some(stringify!($name).to_string()),
description: Some($descr.to_string()),
type_: htype!($holmes, $t)
}}};
($holmes:ident, [$name:ident $t:tt]) => {{::holmes::engine::types::Field {
name: Some(stringify!($name).to_string()),
description: None,
type_: htype!($holmes, $t)
}}};
($holmes:ident, $t:tt) => {{::holmes::engine::types::Field {
name: None,
description: None,
type_: htype!($holmes, $t)
}}};
}
/// Registers a predicate with the `Holmes` context.
///
/// ```c
/// predicate!(holmes, foo(string, uint64))
/// ```
///
/// will register a predicate named foo, with a `string` slot and a `uint64`
/// slot, to the provided `holmes` context object.
///
/// If the `holmes` parameter is omitted, it will generate a function taking
/// a `holmes` parameter in its stead.
#[macro_export]
macro_rules! predicate {
($holmes:ident, $pred_name:ident($($t:tt),*), $descr:expr) => {{
let fields = vec![$(field!($holmes, $t),)*];
$holmes.new_predicate(&::holmes::engine::types::Predicate {
name: stringify!($pred_name).to_string(),
description: Some($descr.to_string()),
fields: fields<|fim▁hole|> }};
($holmes:ident, $pred_name:ident($($t:tt),*)) => {{
let fields = vec![$(field!($holmes, $t),)*];
$holmes.new_predicate(&::holmes::engine::types::Predicate {
name: stringify!($pred_name).to_string(),
description: None,
fields: fields
})
}};
($pred_name:ident($($t:tt),*) : $descr:expr) => { |holmes: &mut ::holmes::Engine| {
predicate!(holmes, $pred_name($($t),*), $descr)
}};
($pred_name:ident($($t:tt),*)) => { |holmes: &mut ::holmes::Engine| {
predicate!(holmes, $pred_name($($t),*))
}};
}
/// Stores a fact with the `Holmes` context.
///
/// ```c
/// fact!(holmes, foo("bar", 3))
/// ```
///
/// will store a true instance of the predicate foo with "bar" in the first
/// slot and 3 in the second.
///
/// If the `holmes` parameter is omitted, it will generate a function taking
/// a `holmes` parameter in its stead.
#[macro_export]
macro_rules! fact {
($holmes:ident, $pred_name:ident($($a:expr),*)) => {
$holmes.new_fact(&::holmes::engine::types::Fact {
pred_name : stringify!($pred_name).to_string(),
args : vec![$(::holmes::pg::dyn::values::ToValue::to_value($a)),*]
})
};
($pred_name:ident($($a:expr),*)) => { |holmes: &mut ::holmes::Engine| {
fact!(holmes, $pred_name($($a),*))
}};
}
#[macro_export]
macro_rules! clause {
($holmes:ident, $vars:ident, $next:ident, $pred_name:ident($($m:tt),*)) => {{
::holmes::engine::types::Clause {
pred_name: stringify!($pred_name).to_string(),
args: vec![$(clause_match!($vars, $next, $m)),*]
}
}};
($holmes:ident, $vars:ident, $next:ident, $pred_name:ident{$($field:ident = $m:tt),*}) => {{
use std::collections::HashMap;
let pred_name = stringify!($pred_name).to_string();
let pred = $holmes.get_predicate(&pred_name).unwrap().unwrap();
let mut matches = HashMap::new();
let _ = {
$(matches.insert(stringify!($field).to_string(), clause_match!($vars, $next, $m)));*
};
let args: Vec<_> = pred.fields.iter().map(|field| {
match field.name {
Some(ref name) => match matches.remove(name) {
Some(cm) => cm,
None => ::holmes::engine::types::MatchExpr::Unbound
},
None => ::holmes::engine::types::MatchExpr::Unbound,
}
}).collect();
::holmes::engine::types::Clause {
pred_name: pred_name,
args: args
}
}};
}
/// Runs a datalog query against the `Holmes` context
///
/// Matches as per the right hand side of a datalog rule, then returns
/// a list of possible assignments to variables.
///
/// Clauses are separated by `&`, slots follow the rules in `match_expr!`
///
/// ```c
/// query!(holmes, foo((3), [_]) & bar([_], x))
/// ```
#[macro_export]
macro_rules! query {
($holmes:ident, $($pred_name:ident $inner:tt)&*) => {{
use std::collections::HashMap;
let mut _vars : HashMap<String, ::holmes::engine::types::Var> = HashMap::new();
let mut _n : ::holmes::engine::types::Var = 0;
let query = vec![$(clause!($holmes, _vars, _n, $pred_name $inner)),*];
$holmes.derive(&query)
}}
}
/// Adds a Holmes rule to the system
///
/// # Datalog Rules
///
/// ```c
/// rule!(holmes, baz([x], (7)) <= foo((3), [_]) & bar([_], x))
/// ```
///
/// will work as per a normal datalog rule, matching on foo and bar, and
/// generating a baz using any solutions found.
///
/// # Extended Rules
///
/// Holmes rules can also have "where clauses" which call out to native code
/// in the event of a match. For example,
///
/// ```c
/// rule!(holmes, baz([y], (8)) <= foo((3), [_]) & bar([_], x), {
/// let y = {f(x)}
/// })
/// ```
///
/// would call the Holmes registered function `f` on each output of `x`, bind
/// the result to `y`, and output it in the first slot of `baz`.
///
/// For more information on the expression and bind syntax, see the `hexpr!`
/// and `bind_match!` macro docs.
#[macro_export]
macro_rules! rule {
($holmes:ident, $rule_name:ident : $head_name:ident $head_inner:tt <= $($body_name:ident $body_inner:tt)&*,
{$(let $bind:tt = $hexpr:tt);*}) => {{
use std::collections::HashMap;
let mut _vars : HashMap<String, ::holmes::engine::types::Var> = HashMap::new();
let mut _n : ::holmes::engine::types::Var = 0;
let body = vec![$(clause!($holmes, _vars, _n, $body_name $body_inner)),*];
let wheres = vec![$(::holmes::engine::types::WhereClause {
lhs: bind_match!(_vars, _n, $bind),
rhs: hexpr!(_vars, _n, $hexpr)
}),*];
let head = clause!($holmes, _vars, _n, $head_name $head_inner);
$holmes.new_rule(&::holmes::engine::types::Rule {
name: stringify!($rule_name).to_string(),
body: body,
head: head,
wheres: wheres,
})
}};
($holmes:ident, $rule_name:ident : $($head_name:ident $head_inner:tt),* <= $($body_name:ident $inner:tt)&*) => {
rule!($holmes, $rule_name : $($head_name $head_inner),* <= $($body_name $inner)&*, {})
};
($rule_name:ident : $($head_name:ident $head_inner:tt),* <= $($body_name:ident $inner:tt)&*) => {
|holmes: &mut ::holmes::Engine| {
rule!(holmes, $rule_name : $($head_name $head_inner),* <= $($body_name $inner)&*, {})
}
};
($rule_name:ident : $($head_name:ident $head_inner:tt),* <=
$($body_name:ident $inner:tt)&*, {$(let $bind:tt = $hexpr:tt);*}) => {
|holmes: &mut ::holmes::Engine| {
rule!(holmes, $rule_name : $($head_name $head_inner),* <=
$($body_name $inner)&*, {$(let $bind = $hexpr);*})
}
};
}
/// Registers a native rust function with the `Holmes` object for use in rules.
///
/// ```c
/// func!(holmes, let f : uint64 -> string = |x : &u64| {
/// format!("{}", x)
/// })
/// ```
///
/// If your function input has more than one parameter, they will be tupled
/// and packed into a value.
/// To describe such a function, just use a tuple type on the left of the
/// arrow.
#[macro_export]
macro_rules! func {
($holmes:ident, let $name:ident : $src:tt -> $dst:tt = $body:expr) => {{
let src = htype!($holmes, $src);
let dst = htype!($holmes, $dst);
$holmes.reg_func(stringify!($name).to_string(),
::holmes::engine::types::Func {
input_type: src,
output_type: dst,
run: Box::new(move |v : ::holmes::pg::dyn::Value| {
::holmes::pg::dyn::values::ToValue::to_value($body(typed_unpack!(v, $src)))
})})
}};
(let $name:ident : $src:tt -> $dst:tt = $body:expr) => {
|holmes: &mut ::holmes::Engine| {
func!(holmes, let $name : $src -> $dst = $body)
}
};
}
pub mod internal {
//! EDSL Support Code
//! This module contains support code for the other macros which is not
//! intended to be user facing, but which must be exported for the macros
//! to work properly.
//!
//! Until more complete example code is provided at the top of the module,
//! the documentation in here may be useful for understanding the EDSL
//! structure.
/// Given a value and a type it is believed to be, unpack it to the greatest
/// extent possible (e.g. unpack through tupling and lists)
#[macro_export]
macro_rules! typed_unpack {
($val:expr, [$typ:tt]) => {
$val.get().downcast_ref::<Vec<::holmes::pg::dyn::Value>>()
.expect("Dynamic list unpack failed")
.into_iter().map(|v| {
typed_unpack!(v, $typ)
}).collect::<Vec<_>>()
};
($val:expr, ($($typ:tt),*)) => {{
let mut pack = $val.get().downcast_ref::<Vec<::holmes::pg::dyn::Value>>()
.expect("Dynamic tuple unpack failed").into_iter();
($(typed_unpack!(pack.next().expect("Dynamic tuple too short"), $typ)),*)
}};
($val:expr, $name:ident) => {
$val.get().downcast_ref()
.expect(concat!("Dynamic base type unpack failed for ",
stringify!($name)))
};
}
/// Constructs a bind match outer object.
///
/// Args:
///
/// * `$vars:ident` is a mutable `HashMap` from variable name to
/// variable number, to be updated as more variables are created, or
/// referenced to re-use existing variable numberings.
/// * `$n:ident` is a mutable Var, intended to be used as an allocator for
/// the next unused variable. It should have a value equal to the next
/// unallocated variable
/// * The last parameter is the bind expression, it can be structured as:
/// * `[bind_expression]` -> do a list destructure/iteration, similar to
/// the list monad
/// * {bind_expression0, bind_expression1} -> do a tuple destructure
/// * a `clause_match!` compatible expression (see `clause_match` docs)
#[macro_export]
macro_rules! bind_match {
($vars:ident, $n:ident, [ $bm:tt ]) => {
::holmes::engine::types::BindExpr::Iterate(
Box::new(bind_match!($vars, $n, $bm)))
};
($vars:ident, $n:ident, {$($bm:tt),*}) => {
::holmes::engine::types::BindExpr::Destructure(
vec![$(bind_match!($vars, $n, $bm)),*])
};
($vars:ident, $n:ident, $cm:tt) => {{
::holmes::engine::types::BindExpr::Normal(
clause_match!($vars, $n, $cm))
}};
}
/// Generates an expression structure
///
/// Args:
///
/// * `$vars:ident` is a mutable `HashMap` from variable name to
/// variable number, to be updated as more variables are created, or
/// referenced to re-use existing variable numberings.
/// * `$n:ident` is a mutable Var, intended to be used as an allocator for
/// the next unused variable. It should have a value equal to the next
/// unallocated variable
/// * the expression to convert
/// * `[var]`
/// * `(val)`
/// * `{f(expr, expr, expr)}`
#[macro_export]
macro_rules! hexpr {
($vars:ident, $n:ident, [$hexpr_name:ident]) => {{
match clause_match!($vars, $n, $hexpr_name) {
::holmes::engine::types::MatchExpr::Var(var_no) =>
::holmes::engine::types::Expr::Var(var_no),
_ => panic!("clause_match! returned non-var for var input")
}
}};
($vars:ident, $n:ident, ($hexpr:expr)) => {
::holmes::engine::types::Expr::Val(
::holmes::pg::dyn::values::ToValue::to_value($hexpr))
};
($vars:ident, $n:ident, {$hexpr_func:ident($($hexpr_arg:tt),*)}) => {
::holmes::engine::types::Expr::App(
stringify!($hexpr_func).to_string(),
vec![$(hexpr!($vars, $n, $hexpr_arg)),*])
};
}
/// Generates a `MatchExpr` from a representation
///
/// Args:
///
/// * `$vars:ident` is a mutable `HashMap` from variable name to
/// variable number, to be updated as more variables are created, or
/// referenced to re-use existing variable numberings.
/// * `$n:ident` is a mutable Var, intended to be used as an allocator for
/// the next unused variable. It should have a value equal to the next
/// unallocated variable
/// * Clause representation:
/// * `[_]` -> unbound
/// * `(val)` -> constant match
/// * `x` -> variable bind
#[macro_export]
macro_rules! clause_match {
($vars:ident, $n:ident, [_]) => {{
::holmes::engine::types::MatchExpr::Unbound
}};
($vars:ident, $n:ident, ($v:expr)) => {{
::holmes::engine::types::MatchExpr::Const(
::holmes::pg::dyn::values::ToValue::to_value($v))
}};
($vars:ident, $n:ident, $m:ident) => {{
use std::collections::hash_map::Entry::*;
use ::holmes::engine::types::MatchExpr::*;
match $vars.entry(stringify!($m).to_string()) {
Occupied(entry) => Var(*entry.get()),
Vacant(entry) => {
$n = $n + 1;
entry.insert($n - 1);
Var($n - 1)
}
}
}};
}
}<|fim▁end|> | }) |
<|file_name|>LinkedHashMapFactory.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2009 Aleksandar Seovic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.seovic.core.factory;
import com.seovic.core.Factory;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* {@link Factory} implementation that creates a <tt>java.util.LinkedHashMap</tt><|fim▁hole|> */
public class LinkedHashMapFactory<K, V>
extends AbstractFactory<Map<K, V>> {
private static final long serialVersionUID = -2766923385818267291L;
/**
* {@inheritDoc}
*/
@Override
public Map<K, V> create() {
return new LinkedHashMap<K, V>();
}
}<|fim▁end|> | * instance.
*
* @author Aleksandar Seovic 2010.11.08 |
<|file_name|>subsetOf.ts<|end_file_name|><|fim▁begin|>import { Collection, CollectionConstructor } from '../collection';
import { define, tools } from '../object-plus';
import { AggregatedType, ChainableAttributeSpec, Record, type } from '../record';
import { ItemsBehavior, transactionApi } from '../transactions';
import { CollectionReference, parseReference } from './commons';
type RecordsIds = ( string | number )[];
export function subsetOf<X extends CollectionConstructor<R>, R extends Record>( this : void, masterCollection : CollectionReference, T? : X ) : ChainableAttributeSpec<X>{
const CollectionClass = T || Collection,
// Lazily define class for subset collection, if it's not defined already...
SubsetOf = CollectionClass._SubsetOf || ( CollectionClass._SubsetOf = defineSubsetCollection( CollectionClass as any ) as any ),
getMasterCollection = parseReference( masterCollection );
return type( SubsetOf ).get(
function( refs ){
!refs || refs.resolvedWith || refs.resolve( getMasterCollection( this ) );
return refs;
}
);
}
Collection.prototype.createSubset = function( models : any, options ) : Collection {
const SubsetOf = subsetOf( this, this.constructor ).options.type,
subset = new SubsetOf( models, options );
subset.resolve( this );
return subset;
}
const subsetOfBehavior = ItemsBehavior.share | ItemsBehavior.persistent;
function defineSubsetCollection( CollectionClass : typeof Collection ) {
@define class SubsetOfCollection extends CollectionClass {
refs : any[];
resolvedWith : Collection = null;<|fim▁hole|>
constructor( recordsOrIds?, options? ){
super( [], options, subsetOfBehavior );
this.refs = toArray( recordsOrIds );
}
// Remove should work fine as it already accepts ids. Add won't...
add( a_elements, options = {} ){
const { resolvedWith } = this,
toAdd = toArray( a_elements );
if( resolvedWith ){
// If the collection is resolved already, everything is simple.
return super.add( resolveRefs( resolvedWith, toAdd ), options );
}
else{
// Collection is not resolved yet. So, we prepare the delayed computation.
if( toAdd.length ){
const isRoot = transactionApi.begin( this );
// Save elements to resolve in future...
this.refs = this.refs ? this.refs.concat( toAdd ) : toAdd.slice();
transactionApi.markAsDirty( this, options );
// And throw the 'changes' event.
isRoot && transactionApi.commit( this );
}
}
}
reset( a_elements?, options = {} ){
const { resolvedWith } = this,
elements = toArray( a_elements );
return resolvedWith ?
// Collection is resolved, so parse ids and forward the call to set.
super.reset( resolveRefs( resolvedWith, elements ), options ) :
// Collection is not resolved yet. So, we prepare the delayed computation.
delaySet( this, elements, options ) as any || [];
}
_createTransaction( a_elements, options? ){
const { resolvedWith } = this,
elements = toArray( a_elements );
return resolvedWith ?
// Collection is resolved, so parse ids and forward the call to set.
super._createTransaction( resolveRefs( resolvedWith, elements ), options ) :
// Collection is not resolved yet. So, we prepare the delayed computation.
delaySet( this, elements, options );
}
// Serialized as an array of model ids.
toJSON() : RecordsIds {
return this.refs ?
this.refs.map( objOrId => objOrId.id || objOrId ) :
this.models.map( model => model.id );
}
// Subset is always valid.
_validateNested(){ return 0; }
get length() : number {
return this.models.length || ( this.refs ? this.refs.length : 0 );
}
// Must be shallow copied on clone.
clone( owner? ){
var Ctor = (<any>this).constructor,
copy = new Ctor( [], {
model : this.model,
comparator : this.comparator
});
if( this.resolvedWith ){
// TODO: bug here.
copy.resolvedWith = this.resolvedWith;
copy.refs = null;
copy.reset( this.models, { silent : true } );
}
else{
copy.refs = this.refs.slice();
}
return copy;
}
// Clean up the custom parse method possibly defined in the base class.
parse( raw : any ) : Record[] {
return raw;
}
resolve( collection : Collection ) : this {
if( collection && collection.length ){
this.resolvedWith = collection;
if( this.refs ){
this.reset( this.refs, { silent : true } );
this.refs = null;
}
}
return this;
}
getModelIds() : RecordsIds { return this.toJSON(); }
toggle( modelOrId : any, val : boolean ) : boolean {
return super.toggle( this.resolvedWith.get( modelOrId ), val );
}
addAll() : Record[] {
if( this.resolvedWith ){
this.set( this.resolvedWith.models );
return this.models;
}
throw new Error( "Cannot add elemens because the subset collection is not resolved yet." );
}
toggleAll() : Record[] {
return this.length ? this.reset() : this.addAll();
}
}
// Clean up all custom item events to prevent memory leaks.
SubsetOfCollection.prototype._itemEvents = void 0;
return SubsetOfCollection;
}
function resolveRefs( master, elements ){
const records = [];
for( let el of elements ){
const record = master.get( el );
if( record ) records.push( record );
}
return records;
}
function delaySet( collection, elements, options ) : void {
if( tools.notEqual( collection.refs, elements ) ){
const isRoot = transactionApi.begin( collection );
// Save elements to resolve in future...
collection.refs = elements.slice();
transactionApi.markAsDirty( collection, options );
// And throw the 'changes' event.
isRoot && transactionApi.commit( collection );
}
}
function toArray( elements ){
return elements ? (
Array.isArray( elements ) ? elements : [ elements ]
) : [];
}<|fim▁end|> |
_metatype : AggregatedType
get __inner_state__(){ return this.refs || this.models; } |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|>import json
from prismriver import util, main
class SongJsonEncoder(json.JSONEncoder):
def default(self, o):
return o.__dict__
def format_output(songs, output_format, txt_template=None):
if output_format == 'txt':
formatted_songs = []
for song in songs:
lyrics_txt = ''
if song.lyrics:
index = 0
for lyric in song.lyrics:
lyrics_txt += lyric
if index < len(song.lyrics) - 1:
lyrics_txt += '\n\n<<< --- --- --- >>>\n\n'
index += 1
result = txt_template
result = result.replace('%TITLE%', song.title)
result = result.replace('%ARTIST%', song.artist)
result = result.replace('%PLUGIN_ID%', song.plugin_name)
result = result.replace('%PLUGIN_NAME%', song.plugin_name)
result = result.replace('%LYRICS%', lyrics_txt)
formatted_songs.append(result)
result = ''
index = 0
for formatted_song in formatted_songs:
result += formatted_song
if index < len(formatted_songs) - 1:
result += '\n\n<<< --- --- --- --- --- >>>\n\n'
index += 1
return result
elif output_format == 'json':
return json.dumps(songs, cls=SongJsonEncoder, sort_keys=True, indent=4, ensure_ascii=False)
elif output_format == 'json_ascii':
return json.dumps(songs, cls=SongJsonEncoder, sort_keys=True, indent=4)
else:
pass
def list_plugins():
plugins = main.get_plugins()
plugins.sort(key=lambda x: x.plugin_name.lower())
for plugin in plugins:
print('{:<20} [id: {}]'.format(plugin.plugin_name, plugin.ID))
<|fim▁hole|>def run():
parser = util.init_args_parser()
parser.add_argument('--list', action='store_true', help='list available search plugins')
parser.add_argument('--song', action='store_true',
help='search for song information by artist and title (default action)')
parser.add_argument('--cleanup', action='store_true', help='remove outdated files from cache')
parser.add_argument("-f", "--format", type=str, default='txt',
help="lyrics output format (txt (default), json, json_ascii)")
parser.add_argument("-o", "--output", type=str, default='%ARTIST% - %TITLE%\nSource: %PLUGIN_NAME%\n\n%LYRICS%',
help="output template for txt format. Available parameters: "
"%%TITLE%% - song title, "
"%%ARTIST%% - song artist, "
"%%LYRICS%% - song lyrics, "
"%%PLUGIN_ID%% - plugin id, "
"%%PLUGIN_NAME%% - plugin name "
"(default value: %%ARTIST%% - %%TITLE%%\\nSource: %%PLUGIN_NAME%%\\n\\n%%LYRICS%%)"
)
params = parser.parse_args()
util.init_logging(params.quiet, params.verbose, params.log)
util.log_debug_info(params)
config = util.init_search_config(params)
util.log_config_info(config)
if params.list:
list_plugins()
elif params.cleanup:
main.cleanup_cache(config)
else:
result = main.search(params.artist, params.title, config)
if result:
print(format_output(result, params.format, params.output))<|fim▁end|> | |
<|file_name|>npo.py<|end_file_name|><|fim▁begin|>import logging
import json
import os
import re
#from pprint import pprint
#from itertools import count
from urlparse import urljoin
from lxml import html
from thready import threaded
import requests
from scrapekit.util import collapse_whitespace
from connectedafrica.scrapers.util import MultiCSV
from connectedafrica.scrapers.util import make_path
log = logging.getLogger('npo')
URL_PATTERN = "http://www.npo.gov.za/PublicNpo/Npo/DetailsAllDocs/%s"
def make_cache(i):
return make_path('.cache/npo/%s/%s/%s/%s/%s.json' % (
i % 10, i % 100, i % 1000, i % 10000, i))
def make_urls():
for i in xrange(1, 16000000):
yield i
def scrape_npo(csv, i):
url = URL_PATTERN % i
cache_path = make_cache(i)
if not os.path.exists(cache_path):
res = requests.get(url)
page = {
'url': url,
'http_status': res.status_code,
'content': res.content.decode('utf-8')
}
with open(cache_path, 'wb') as fh:
json.dump(page, fh)
else:
with open(cache_path, 'rb') as fh:
page = json.load(fh)
if 'internal server error' in page['content']:
return
data = {}
doc = html.fromstring(page['content'])
data = {
'source_url': url,
'name': doc.find('.//h1').find('.//span').text.strip(),
'status': doc.find('.//h1').find('.//span[@class="npo-status"]').text,
'email': None
}
log.info("Scraping: %s", data['name'])
sub_titles = doc.findall('.//h5')
next_heading = None
for sub_title in sub_titles:
text = collapse_whitespace(sub_title.text)
if 'Registration No' in text:
data['reg_no'] = sub_title.find('./span').text.strip()
next_heading = 'category'
elif 'Your Name' in text:
next_heading = None
elif next_heading == 'category':<|fim▁hole|> next_heading = None
for span in doc.findall('.//span'):
text = collapse_whitespace(span.text)
if text is not None and 'Registered on' in text:
match = re.search(r'\d+.\d+.\d+', text)
if match:
data['reg_date'] = match.group(0)
for addr in doc.findall('.//div[@class="address"]'):
addr_type = collapse_whitespace(addr.find('./h4').text)
addrs = [collapse_whitespace(a) for a in
addr.xpath('string()').split('\n')]
addrs = '\n'.join([a for a in addrs if len(a)][1:])
if 'Physical' in addr_type:
data['physical_address'] = addrs
elif 'Postal' in addr_type:
data['postal_address'] = addrs
elif 'Contact' in addr_type:
data['contact_name'] = collapse_whitespace(addr.find('./p').text)
for li in addr.findall('.//li'):
contact = collapse_whitespace(li.xpath('string()'))
contact_type = {
'phone': 'phone',
'mailinfo': 'email',
'fax': 'fax'
}.get(li.get('class'))
data[contact_type] = contact
off_div = './/li[@data-sha-context-enttype="Npo.AppointedOfficeBearer"]'
csv.write('npo/npo_organisations.csv', data)
for li in doc.findall(off_div):
s = li.find('.//strong')
a = s.find('./a')
id_number = li.find('.//div/span')
if id_number is not None:
id_number = id_number.text
id_number = id_number.replace('(', '')
id_number = id_number.replace(')', '')
id_number = id_number.strip()
if 'Neither ID or Passport' in id_number:
id_number = None
officer = {
'role': collapse_whitespace(s.text).replace(' :', ''),
'npo_name': data['name'],
'source_url': url,
'officer_id': urljoin(url, a.get('href')),
'officer_name': collapse_whitespace(a.text),
'officer_id_number': id_number
}
csv.write('npo/npo_officers.csv', officer)
def scrape_npos():
csv = MultiCSV()
threaded(make_urls(), lambda i: scrape_npo(csv, i), num_threads=30)
csv.close()
if __name__ == '__main__':
scrape_npos()<|fim▁end|> | data['category'] = text
next_heading = 'legal_form'
elif next_heading == 'legal_form':
data['legal_form'] = text |
<|file_name|>web.py<|end_file_name|><|fim▁begin|>"""
Nodes that use web services to do something.
"""
import json
import httplib2
import urllib
from BeautifulSoup import BeautifulSoup
from nodetree import node, exceptions
from . import base
from .. import stages
class WebServiceNodeError(exceptions.NodeError):
pass
class BaseWebService(node.Node, base.TextWriterMixin):
"""Base class for web service nodes."""
abstract = True
stage = stages.POST
intypes = [unicode]
outtype = unicode
<|fim▁hole|> parameters = [
dict(name="extract", value="phrases", choices=["phrases", "sentiment"]),
]
def process(self, input):
http = httplib2.Http()
headers = {}
body = dict(text=input[:10000].encode("utf8", "replace"))
url = "%s/%s/" % (self.baseurl, self._params.get("extract", "phrases"))
request, content = http.request(url, "POST", headers=headers, body=urllib.urlencode(body))
if request["status"] == "503":
raise WebServiceNodeError("Daily limit exceeded", self)
elif request["status"] == "400":
raise WebServiceNodeError("No text, limit exceeded, or incorrect language", self)
out = u""
try:
data = json.loads(content)
except ValueError:
return content
for key in ["GPE", "VP", "LOCATION", "NP", "DATE"]:
keydata = data.get(key)
if keydata is not None:
out += "%s\n" % key
for entity in keydata:
out += " %s\n" % entity
return out
class DBPediaAnnotate(BaseWebService):
"""Mashape entity extraction."""
stage = stages.POST
baseurl = "http://spotlight.dbpedia.org/rest/annotate/"
parameters = [
dict(name="confident", value=0.2),
dict(name="support", value=20),
]
def process(self, input):
http = httplib2.Http()
headers = {}
body = dict(
text=input.encode("utf8", "replace"),
confidence=self._params.get("confident"),
support=self._params.get("support"),
)
url = "%s?%s" % (self.baseurl, urllib.urlencode(body))
request, content = http.request(url, "GET", headers=headers)
if request["status"] != "200":
raise WebServiceNodeError("A web service error occured. Status: %s" % request["status"], self)
out = u""
soup = BeautifulSoup(content)
for ref in soup.findAll("a"):
out += "%s\n" % ref.text
out += " %s\n\n" % ref.get("href")
return out
class OpenCalais(BaseWebService):
"""OpenCalias sematic markup."""
stage = stages.POST
baseurl = "http://api.opencalais.com/tag/rs/enrich"
parameters = [
]
def process(self, input):
http = httplib2.Http()
headers = {
"x-calais-licenseID": "dsza6q6zwa9nzvz9wbz7f6y5",
"content-type": "text/raw",
"Accept": "xml/rdf",
"enableMetadataType": "GenericRelations",
}
request, content = http.request(
self.baseurl,
"POST",
headers=headers,
body=input.encode("utf8")
)
if request["status"] != "200":
raise WebServiceNodeError("A web service error occured. Status: %s" % request["status"], self)
return content.decode("utf8")<|fim▁end|> | class MashapeProcessing(BaseWebService):
"""Mashape entity extraction."""
stage = stages.POST
baseurl = "http://text-processing.com/api/" |
<|file_name|>SwiftIntTypes.py<|end_file_name|><|fim▁begin|># ===--- SwiftIntTypes.py ----------------------------*- coding: utf-8 -*-===//
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
# Bit counts for all int types
_all_integer_type_bitwidths = [8, 16, 32, 64]
# Number of bits in the biggest int type
int_max_bits = max(_all_integer_type_bitwidths)
def int_max(bits, signed):
bits = bits - 1 if signed else bits
bits = max(bits, 0)
return (1 << bits) - 1
def int_min(bits, signed):
return (-1 * int_max(bits, signed) - 1) if signed else 0
class SwiftIntegerType(object):
def __init__(self, is_word, bits, is_signed):
self.is_word = is_word
self.bits = bits
self.is_signed = is_signed
if is_word:
self.possible_bitwidths = [32, 64]
else:
self.possible_bitwidths = [bits]
self.min = int_min(bits, is_signed)
self.max = int_max(bits, is_signed)
# Derived properties
self.stdlib_name = \
('' if is_signed else 'U') + \
'Int' + \
('' if is_word else str(bits))
self.builtin_name = 'Int' + str(bits)
def get_opposite_signedness(self):
return SwiftIntegerType(self.is_word, self.bits, not self.is_signed)
def __eq__(self, other):
return self.is_word == other.is_word and \
self.bits == other.bits and \
self.is_signed == other.is_signed
def __ne__(self, other):
return not self.__eq__(other)
def all_integer_types(word_bits):
for bitwidth in _all_integer_type_bitwidths:
for is_signed in [False, True]:
yield SwiftIntegerType(
is_word=False, bits=bitwidth,
is_signed=is_signed)
<|fim▁hole|> for is_signed in [False, True]:
yield SwiftIntegerType(
is_word=True, bits=word_bits,
is_signed=is_signed)
# 'truncatingBitPattern' initializer is defined if the conversion is truncating
# on any platform that Swift supports.
def should_define_truncating_bit_pattern_init(src_ty, dst_ty):
# Don't define a truncating conversion between a type and itself.
if src_ty == dst_ty:
return False
# Conversion to opposite signedness is never truncating.
if src_ty == dst_ty.get_opposite_signedness():
return False
for src_ty_bits in src_ty.possible_bitwidths:
for dst_ty_bits in dst_ty.possible_bitwidths:
if src_ty_bits > dst_ty_bits:
return True
return False
def all_integer_type_names():
return [self_ty.stdlib_name for self_ty in all_integer_types(0)]
def all_real_number_type_names():
# FIXME , 'Float80' Revert until I figure out a test failure # Float80
# for i386 & x86_64
return ['Float', 'Double']
def all_numeric_type_names():
return all_integer_type_names() + all_real_number_type_names()
def numeric_type_names_macintosh_only():
return ['Float80']
# Swift_Programming_Language/Expressions.html
def all_integer_binary_operator_names():
return ['%', '<<', '>>', '&*', '&', '&+', '&-', '|', '^']
def all_integer_or_real_binary_operator_names():
return ['*', '/', '+', '-', '..<', '...']
def all_integer_assignment_operator_names():
return ['%=', '<<=', '>>=', '&=', '^=', '|=']
def all_integer_or_real_assignment_operator_names():
return ['=', '*=', '/=', '+=', '-=']<|fim▁end|> | |
<|file_name|>builder.rs<|end_file_name|><|fim▁begin|>use rustc_index::vec::IndexVec;
use rustc_middle::mir::tcx::RvalueInitializationState;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, TyCtxt};
use smallvec::{smallvec, SmallVec};
use std::iter;
use std::mem;
use super::abs_domain::Lift;
use super::IllegalMoveOriginKind::*;
use super::{Init, InitIndex, InitKind, InitLocation, LookupResult, MoveError};
use super::{
LocationMap, MoveData, MoveOut, MoveOutIndex, MovePath, MovePathIndex, MovePathLookup,
};
struct MoveDataBuilder<'a, 'tcx> {
body: &'a Body<'tcx>,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
data: MoveData<'tcx>,
errors: Vec<(Place<'tcx>, MoveError<'tcx>)>,
}
impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
fn new(body: &'a Body<'tcx>, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> Self {
let mut move_paths = IndexVec::new();
let mut path_map = IndexVec::new();
let mut init_path_map = IndexVec::new();
MoveDataBuilder {
body,
tcx,
param_env,
errors: Vec::new(),
data: MoveData {
moves: IndexVec::new(),
loc_map: LocationMap::new(body),
rev_lookup: MovePathLookup {
locals: body
.local_decls
.indices()
.map(|i| {
Self::new_move_path(
&mut move_paths,
&mut path_map,
&mut init_path_map,
None,
Place::from(i),
)
})
.collect(),
projections: Default::default(),
},
move_paths,
path_map,
inits: IndexVec::new(),
init_loc_map: LocationMap::new(body),
init_path_map,
},
}
}
fn new_move_path(
move_paths: &mut IndexVec<MovePathIndex, MovePath<'tcx>>,
path_map: &mut IndexVec<MovePathIndex, SmallVec<[MoveOutIndex; 4]>>,
init_path_map: &mut IndexVec<MovePathIndex, SmallVec<[InitIndex; 4]>>,
parent: Option<MovePathIndex>,
place: Place<'tcx>,
) -> MovePathIndex {
let move_path =
move_paths.push(MovePath { next_sibling: None, first_child: None, parent, place });
if let Some(parent) = parent {
let next_sibling = mem::replace(&mut move_paths[parent].first_child, Some(move_path));
move_paths[move_path].next_sibling = next_sibling;
}
let path_map_ent = path_map.push(smallvec![]);
assert_eq!(path_map_ent, move_path);
let init_path_map_ent = init_path_map.push(smallvec![]);
assert_eq!(init_path_map_ent, move_path);
move_path
}
}
impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
/// This creates a MovePath for a given place, returning an `MovePathError`
/// if that place can't be moved from.
///
/// NOTE: places behind references *do not* get a move path, which is
/// problematic for borrowck.
///
/// Maybe we should have separate "borrowck" and "moveck" modes.
fn move_path_for(&mut self, place: Place<'tcx>) -> Result<MovePathIndex, MoveError<'tcx>> {
debug!("lookup({:?})", place);
let mut base = self.builder.data.rev_lookup.locals[place.local];
// The move path index of the first union that we find. Once this is
// some we stop creating child move paths, since moves from unions
// move the whole thing.
// We continue looking for other move errors though so that moving
// from `*(u.f: &_)` isn't allowed.
let mut union_path = None;
for (i, elem) in place.projection.iter().enumerate() {
let proj_base = &place.projection[..i];
let body = self.builder.body;
let tcx = self.builder.tcx;
let place_ty = Place::ty_from(place.local, proj_base, body, tcx).ty;
match place_ty.kind() {
ty::Ref(..) | ty::RawPtr(..) => {
let proj = &place.projection[..i + 1];
return Err(MoveError::cannot_move_out_of(
self.loc,
BorrowedContent {
target_place: Place {
local: place.local,
projection: tcx.intern_place_elems(proj),
},
},
));
}
ty::Adt(adt, _) if adt.has_dtor(tcx) && !adt.is_box() => {
return Err(MoveError::cannot_move_out_of(
self.loc,
InteriorOfTypeWithDestructor { container_ty: place_ty },
));
}
ty::Adt(adt, _) if adt.is_union() => {
union_path.get_or_insert(base);
}
ty::Slice(_) => {
return Err(MoveError::cannot_move_out_of(
self.loc,
InteriorOfSliceOrArray {
ty: place_ty,
is_index: matches!(elem, ProjectionElem::Index(..)),
},
));
}
ty::Array(..) => {
if let ProjectionElem::Index(..) = elem {
return Err(MoveError::cannot_move_out_of(
self.loc,
InteriorOfSliceOrArray { ty: place_ty, is_index: true },
));
}
}
_ => {}
};
if union_path.is_none() {
base = self.add_move_path(base, elem, |tcx| Place {
local: place.local,
projection: tcx.intern_place_elems(&place.projection[..i + 1]),
});
}
}
if let Some(base) = union_path {
// Move out of union - always move the entire union.
Err(MoveError::UnionMove { path: base })
} else {
Ok(base)
}
}
fn add_move_path(
&mut self,
base: MovePathIndex,
elem: PlaceElem<'tcx>,
mk_place: impl FnOnce(TyCtxt<'tcx>) -> Place<'tcx>,
) -> MovePathIndex {
let MoveDataBuilder {
data: MoveData { rev_lookup, move_paths, path_map, init_path_map, .. },
tcx,
..
} = self.builder;
*rev_lookup.projections.entry((base, elem.lift())).or_insert_with(move || {
MoveDataBuilder::new_move_path(
move_paths,
path_map,
init_path_map,
Some(base),
mk_place(*tcx),
)
})
}
fn create_move_path(&mut self, place: Place<'tcx>) {
// This is an non-moving access (such as an overwrite or
// drop), so this not being a valid move path is OK.
let _ = self.move_path_for(place);
}
}
impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
fn finalize(
self,
) -> Result<MoveData<'tcx>, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
debug!("{}", {
debug!("moves for {:?}:", self.body.span);
for (j, mo) in self.data.moves.iter_enumerated() {
debug!(" {:?} = {:?}", j, mo);
}
debug!("move paths for {:?}:", self.body.span);
for (j, path) in self.data.move_paths.iter_enumerated() {
debug!(" {:?} = {:?}", j, path);
}
"done dumping moves"
});
if !self.errors.is_empty() { Err((self.data, self.errors)) } else { Ok(self.data) }
}
}
pub(super) fn gather_moves<'tcx>(
body: &Body<'tcx>,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Result<MoveData<'tcx>, (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>)> {
let mut builder = MoveDataBuilder::new(body, tcx, param_env);
builder.gather_args();
for (bb, block) in body.basic_blocks().iter_enumerated() {
for (i, stmt) in block.statements.iter().enumerate() {
let source = Location { block: bb, statement_index: i };
builder.gather_statement(source, stmt);
}
let terminator_loc = Location { block: bb, statement_index: block.statements.len() };
builder.gather_terminator(terminator_loc, block.terminator());
}
builder.finalize()
}
impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
fn gather_args(&mut self) {
for arg in self.body.args_iter() {
let path = self.data.rev_lookup.locals[arg];
let init = self.data.inits.push(Init {
path,
kind: InitKind::Deep,
location: InitLocation::Argument(arg),
});
debug!("gather_args: adding init {:?} of {:?} for argument {:?}", init, path, arg);
self.data.init_path_map[path].push(init);
}
}
fn gather_statement(&mut self, loc: Location, stmt: &Statement<'tcx>) {
debug!("gather_statement({:?}, {:?})", loc, stmt);
(Gatherer { builder: self, loc }).gather_statement(stmt);
}
fn gather_terminator(&mut self, loc: Location, term: &Terminator<'tcx>) {
debug!("gather_terminator({:?}, {:?})", loc, term);
(Gatherer { builder: self, loc }).gather_terminator(term);
}
}
struct Gatherer<'b, 'a, 'tcx> {
builder: &'b mut MoveDataBuilder<'a, 'tcx>,
loc: Location,
}
impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
match &stmt.kind {
StatementKind::Assign(box (place, rval)) => {
self.create_move_path(*place);
if let RvalueInitializationState::Shallow = rval.initialization_state() {
// Box starts out uninitialized - need to create a separate
// move-path for the interior so it will be separate from
// the exterior.
self.create_move_path(self.builder.tcx.mk_place_deref(*place));
self.gather_init(place.as_ref(), InitKind::Shallow);
} else {
self.gather_init(place.as_ref(), InitKind::Deep);
}
self.gather_rvalue(rval);
}
StatementKind::FakeRead(box (_, place)) => {
self.create_move_path(*place);
}
StatementKind::LlvmInlineAsm(ref asm) => {
for (output, kind) in iter::zip(&*asm.outputs, &asm.asm.outputs) {
if !kind.is_indirect {
self.gather_init(output.as_ref(), InitKind::Deep);
}
}
for (_, input) in asm.inputs.iter() {
self.gather_operand(input);
}
}
StatementKind::StorageLive(_) => {}
StatementKind::StorageDead(local) => {
self.gather_move(Place::from(*local));
}
StatementKind::SetDiscriminant { .. } => {
span_bug!(
stmt.source_info.span,
"SetDiscriminant should not exist during borrowck"
);
}
StatementKind::Retag { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Coverage(..)
| StatementKind::CopyNonOverlapping(..)
| StatementKind::Nop => {}
}
}
fn gather_rvalue(&mut self, rvalue: &Rvalue<'tcx>) {
match *rvalue {
Rvalue::ThreadLocalRef(_) => {} // not-a-move
Rvalue::Use(ref operand)
| Rvalue::Repeat(ref operand, _)
| Rvalue::Cast(_, ref operand, _)
| Rvalue::UnaryOp(_, ref operand) => self.gather_operand(operand),
Rvalue::BinaryOp(ref _binop, box (ref lhs, ref rhs))
| Rvalue::CheckedBinaryOp(ref _binop, box (ref lhs, ref rhs)) => {
self.gather_operand(lhs);
self.gather_operand(rhs);
}
Rvalue::Aggregate(ref _kind, ref operands) => {
for operand in operands {
self.gather_operand(operand);
}
}
Rvalue::Ref(..)
| Rvalue::AddressOf(..)
| Rvalue::Discriminant(..)
| Rvalue::Len(..)
| Rvalue::NullaryOp(NullOp::SizeOf, _)
| Rvalue::NullaryOp(NullOp::Box, _) => {
// This returns an rvalue with uninitialized contents. We can't
// move out of it here because it is an rvalue - assignments always
// completely initialize their place.
//
// However, this does not matter - MIR building is careful to
// only emit a shallow free for the partially-initialized
// temporary.
//
// In any case, if we want to fix this, we have to register a
// special move and change the `statement_effect` functions.
}
}
}
fn gather_terminator(&mut self, term: &Terminator<'tcx>) {
match term.kind {
TerminatorKind::Goto { target: _ }
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. }
// In some sense returning moves the return place into the current
// call's destination, however, since there are no statements after
// this that could possibly access the return place, this doesn't
// need recording.
| TerminatorKind::Return
| TerminatorKind::Resume
| TerminatorKind::Abort<|fim▁hole|> self.gather_operand(cond);
}
TerminatorKind::SwitchInt { ref discr, .. } => {
self.gather_operand(discr);
}
TerminatorKind::Yield { ref value, resume_arg: place, .. } => {
self.gather_operand(value);
self.create_move_path(place);
self.gather_init(place.as_ref(), InitKind::Deep);
}
TerminatorKind::Drop { place, target: _, unwind: _ } => {
self.gather_move(place);
}
TerminatorKind::DropAndReplace { place, ref value, .. } => {
self.create_move_path(place);
self.gather_operand(value);
self.gather_init(place.as_ref(), InitKind::Deep);
}
TerminatorKind::Call {
ref func,
ref args,
ref destination,
cleanup: _,
from_hir_call: _,
fn_span: _,
} => {
self.gather_operand(func);
for arg in args {
self.gather_operand(arg);
}
if let Some((destination, _bb)) = *destination {
self.create_move_path(destination);
self.gather_init(destination.as_ref(), InitKind::NonPanicPathOnly);
}
}
TerminatorKind::InlineAsm {
template: _,
ref operands,
options: _,
line_spans: _,
destination: _,
} => {
for op in operands {
match *op {
InlineAsmOperand::In { reg: _, ref value }
=> {
self.gather_operand(value);
}
InlineAsmOperand::Out { reg: _, late: _, place, .. } => {
if let Some(place) = place {
self.create_move_path(place);
self.gather_init(place.as_ref(), InitKind::Deep);
}
}
InlineAsmOperand::InOut { reg: _, late: _, ref in_value, out_place } => {
self.gather_operand(in_value);
if let Some(out_place) = out_place {
self.create_move_path(out_place);
self.gather_init(out_place.as_ref(), InitKind::Deep);
}
}
InlineAsmOperand::Const { value: _ }
| InlineAsmOperand::SymFn { value: _ }
| InlineAsmOperand::SymStatic { def_id: _ } => {}
}
}
}
}
}
fn gather_operand(&mut self, operand: &Operand<'tcx>) {
match *operand {
Operand::Constant(..) | Operand::Copy(..) => {} // not-a-move
Operand::Move(place) => {
// a move
self.gather_move(place);
}
}
}
fn gather_move(&mut self, place: Place<'tcx>) {
debug!("gather_move({:?}, {:?})", self.loc, place);
if let [ref base @ .., ProjectionElem::Subslice { from, to, from_end: false }] =
**place.projection
{
// Split `Subslice` patterns into the corresponding list of
// `ConstIndex` patterns. This is done to ensure that all move paths
// are disjoint, which is expected by drop elaboration.
let base_place =
Place { local: place.local, projection: self.builder.tcx.intern_place_elems(base) };
let base_path = match self.move_path_for(base_place) {
Ok(path) => path,
Err(MoveError::UnionMove { path }) => {
self.record_move(place, path);
return;
}
Err(error @ MoveError::IllegalMove { .. }) => {
self.builder.errors.push((base_place, error));
return;
}
};
let base_ty = base_place.ty(self.builder.body, self.builder.tcx).ty;
let len: u64 = match base_ty.kind() {
ty::Array(_, size) => size.eval_usize(self.builder.tcx, self.builder.param_env),
_ => bug!("from_end: false slice pattern of non-array type"),
};
for offset in from..to {
let elem =
ProjectionElem::ConstantIndex { offset, min_length: len, from_end: false };
let path =
self.add_move_path(base_path, elem, |tcx| tcx.mk_place_elem(base_place, elem));
self.record_move(place, path);
}
} else {
match self.move_path_for(place) {
Ok(path) | Err(MoveError::UnionMove { path }) => self.record_move(place, path),
Err(error @ MoveError::IllegalMove { .. }) => {
self.builder.errors.push((place, error));
}
};
}
}
fn record_move(&mut self, place: Place<'tcx>, path: MovePathIndex) {
let move_out = self.builder.data.moves.push(MoveOut { path, source: self.loc });
debug!(
"gather_move({:?}, {:?}): adding move {:?} of {:?}",
self.loc, place, move_out, path
);
self.builder.data.path_map[path].push(move_out);
self.builder.data.loc_map[self.loc].push(move_out);
}
fn gather_init(&mut self, place: PlaceRef<'tcx>, kind: InitKind) {
debug!("gather_init({:?}, {:?})", self.loc, place);
let mut place = place;
// Check if we are assigning into a field of a union, if so, lookup the place
// of the union so it is marked as initialized again.
if let Some((place_base, ProjectionElem::Field(_, _))) = place.last_projection() {
if place_base.ty(self.builder.body, self.builder.tcx).ty.is_union() {
place = place_base;
}
}
if let LookupResult::Exact(path) = self.builder.data.rev_lookup.find(place) {
let init = self.builder.data.inits.push(Init {
location: InitLocation::Statement(self.loc),
path,
kind,
});
debug!(
"gather_init({:?}, {:?}): adding init {:?} of {:?}",
self.loc, place, init, path
);
self.builder.data.init_path_map[path].push(init);
self.builder.data.init_loc_map[self.loc].push(init);
}
}
}<|fim▁end|> | | TerminatorKind::GeneratorDrop
| TerminatorKind::Unreachable => {}
TerminatorKind::Assert { ref cond, .. } => { |
<|file_name|>angularfire-tests.ts<|end_file_name|><|fim▁begin|>/// <reference path="angularfire.d.ts"/>
var myapp = angular.module("myapp", ["firebase"]);
interface AngularFireScope extends ng.IScope {
data: any;
}
var url = "https://myapp.firebaseio.com";
myapp.controller("MyController", ["$scope", "$firebase", '$FirebaseObject', '$FirebaseArray',
function ($scope: AngularFireScope, $firebase: AngularFireService, $FirebaseObject: AngularFireObjectService, $FirebaseArray: AngularFireArrayService) {
var ref = new Firebase(url);
var sync = $firebase(ref);
// AngularFire
{
sync.$asArray();
sync.$asObject();
sync.$ref();
sync.$remove();
sync.$push({ foo: "foo data" });
sync.$set("foo", 1);
sync.$set({ foo: 2 });
sync.$update({ foo: 3 });
sync.$update("foo", { bar: 1 });
// Increment the message count by 1
sync.$transaction('count', function (currentCount) {
if (!currentCount) return 1; // Initial value for counter.
if (currentCount < 0) return; // Return undefined to abort transaction.
return currentCount + 1; // Increment the count by 1.
}).then(function (snapshot) {
if (!snapshot) {
// Handle aborted transaction.
} else {
// Do something.
console.log(snapshot.val());
}
}, function (err) {
// Handle the error condition.
console.log(err.stack);
});
}
// AngularFireObject
{
var obj = $FirebaseObject(ref);
// $id
if (obj.$id !== ref.name()) throw "error";
// $loaded()
obj.$loaded().then((data) => {
if (data !== obj) throw "error";
// $priority
obj.$priority;
// $value, $save()
obj.$value = "foobar";
obj.$save();
});
// $ref()
if (obj.$ref() !== ref) throw "error";
// $bindTo()
obj.$bindTo($scope, "data").then(function () {
console.log($scope.data);
$scope.data.foo = "baz"; // will be saved to Firebase
sync.$set({ foo: "baz" }); // this would update Firebase and $scope.data
});
// $watch()
var unwatch = obj.$watch(function () {
console.log("data changed!");
});
unwatch();
// $destroy()
obj.$destroy();
// $extend()
var NewFactory = $FirebaseObject.$extend({
getMyFavoriteColor: function () {
return this.favoriteColor + ", no green!"; // obscure Monty Python reference
}
});
var customObj = $firebase(ref, { objectFactory: NewFactory }).$asObject();
}
// AngularFireArray
{
var list = $FirebaseArray(ref);
// $ref()
if (list.$ref() !== ref) throw "error";
// $add()
list.$add({ foo: "foo value" });
// $keyAt()
var key = list.$keyAt(0);
// $indexFor()
var index = list.$indexFor(key);
// $getRecord()
var item = list.$getRecord(key);
// $save()
item["bar"] = "bar value";
list.$save(item);
// $remove()
list.$remove(item);
// $loaded()
list.$loaded().then(data => {
if (data !== list) throw "error";
});
// $watch()
var unwatch = list.$watch((event, key, prevChild) => {
switch (event) {
case "child_added":
console.log(key + " added");
break;
case "child_changed":
console.log(key + " changed");
break;
case "child_moved":
console.log(key + " moved");
break;
case "child_removed":
console.log(key + " removed");
break;
default:
throw "error";
}
});
unwatch();
// $destroy()
list.$destroy();
// $extend()
var ArrayWithSum = $FirebaseArray.$extend({
sum: function () {
var total = 0;
angular.forEach(this.$list, function (rec) {
total += rec.x;
});
return total;
}
});
var list = $firebase(ref, { arrayFactory: ArrayWithSum }).$asArray();
list.$loaded().then(function () {
console.log("List has " + (<any>list).sum() + " items");
});
}
}
]);
interface AngularFireAuthScope extends ng.IScope {
loginObj: AngularFireAuth;
}
myapp.controller("MyAuthController", ["$scope", "$firebaseAuth",
function($scope: AngularFireAuthScope, $firebaseAuth: AngularFireAuthService) {
var dataRef = new Firebase(url);
$scope.loginObj = $firebaseAuth(dataRef);
$scope.loginObj.$getAuth();
var credentials = {<|fim▁hole|> };
var resetPasswordCredentials = {
email: 'my@email.com'
};
var changePasswordCredentials = {
email: 'my@email.com',
oldPassword: 'mypassword',
newPassword: 'mypassword'
};
var changeUserCredentials = {
oldEmail: 'my@email.com',
newEmail: 'my@email.com',
password: 'mypassword'
};
$scope.loginObj.$authWithCustomToken("token").then(_ => {});
$scope.loginObj.$authAnonymously().then(_ => {});
$scope.loginObj.$authWithPassword(credentials).then(_ => {});
$scope.loginObj.$authWithOAuthPopup("github").then(_ => {});
$scope.loginObj.$authWithOAuthRedirect("google").then(_ => {});
$scope.loginObj.$authWithOAuthToken("twitter", "token").then(_ => {});
$scope.loginObj.$getAuth();
$scope.loginObj.$onAuth(() => {});
$scope.loginObj.$unauth();
$scope.loginObj.$waitForAuth();
$scope.loginObj.$requireAuth();
$scope.loginObj.$createUser(credentials).then(_ => {});
$scope.loginObj.$removeUser(credentials).then(_ => {});
$scope.loginObj.$changeEmail(changeUserCredentials).then(_ => {});
$scope.loginObj.$changePassword(changePasswordCredentials).then(_ => {});
$scope.loginObj.$resetPassword(resetPasswordCredentials).then(_ => {});
}
]);<|fim▁end|> | email: 'my@email.com',
password: 'mypassword' |
<|file_name|>procurement_pull.py<|end_file_name|><|fim▁begin|>##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp import netsvc
from openerp.tools.translate import _
class procurement_order(osv.osv):
_inherit = 'procurement.order'
def check_buy(self, cr, uid, ids, context=None):<|fim▁hole|> for procurement in self.browse(cr, uid, ids, context=context):
for line in procurement.product_id.flow_pull_ids:
if line.location_id==procurement.location_id:
return line.type_proc=='buy'
return super(procurement_order, self).check_buy(cr, uid, ids)
def check_produce(self, cr, uid, ids, context=None):
for procurement in self.browse(cr, uid, ids, context=context):
for line in procurement.product_id.flow_pull_ids:
if line.location_id==procurement.location_id:
return line.type_proc=='produce'
return super(procurement_order, self).check_produce(cr, uid, ids)
def check_move(self, cr, uid, ids, context=None):
for procurement in self.browse(cr, uid, ids, context=context):
for line in procurement.product_id.flow_pull_ids:
if line.location_id==procurement.location_id:
return (line.type_proc=='move') and (line.location_src_id)
return False
def action_move_create(self, cr, uid, ids, context=None):
proc_obj = self.pool.get('procurement.order')
move_obj = self.pool.get('stock.move')
picking_obj=self.pool.get('stock.picking')
wf_service = netsvc.LocalService("workflow")
for proc in proc_obj.browse(cr, uid, ids, context=context):
line = None
for line in proc.product_id.flow_pull_ids:
if line.location_id == proc.location_id:
break
assert line, 'Line cannot be False if we are on this state of the workflow'
origin = (proc.origin or proc.name or '').split(':')[0] +':'+line.name
picking_id = picking_obj.create(cr, uid, {
'origin': origin,
'company_id': line.company_id and line.company_id.id or False,
'type': line.picking_type,
'stock_journal_id': line.journal_id and line.journal_id.id or False,
'move_type': 'one',
'partner_id': line.partner_address_id.id,
'note': _('Picking for pulled procurement coming from original location %s, pull rule %s, via original Procurement %s (#%d)') % (proc.location_id.name, line.name, proc.name, proc.id),
'invoice_state': line.invoice_state,
})
move_id = move_obj.create(cr, uid, {
'name': line.name,
'picking_id': picking_id,
'company_id': line.company_id and line.company_id.id or False,
'product_id': proc.product_id.id,
'date': proc.date_planned,
'product_qty': proc.product_qty,
'product_uom': proc.product_uom.id,
'product_uos_qty': (proc.product_uos and proc.product_uos_qty)\
or proc.product_qty,
'product_uos': (proc.product_uos and proc.product_uos.id)\
or proc.product_uom.id,
'partner_id': line.partner_address_id.id,
'location_id': line.location_src_id.id,
'location_dest_id': line.location_id.id,
'move_dest_id': proc.move_id and proc.move_id.id or False, # to verif, about history ?
'tracking_id': False,
'cancel_cascade': line.cancel_cascade,
'state': 'confirmed',
'note': _('Move for pulled procurement coming from original location %s, pull rule %s, via original Procurement %s (#%d)') % (proc.location_id.name, line.name, proc.name, proc.id),
})
if proc.move_id and proc.move_id.state in ('confirmed'):
move_obj.write(cr,uid, [proc.move_id.id], {
'state':'waiting'
}, context=context)
proc_id = proc_obj.create(cr, uid, {
'name': line.name,
'origin': origin,
'note': _('Pulled procurement coming from original location %s, pull rule %s, via original Procurement %s (#%d)') % (proc.location_id.name, line.name, proc.name, proc.id),
'company_id': line.company_id and line.company_id.id or False,
'date_planned': proc.date_planned,
'product_id': proc.product_id.id,
'product_qty': proc.product_qty,
'product_uom': proc.product_uom.id,
'product_uos_qty': (proc.product_uos and proc.product_uos_qty)\
or proc.product_qty,
'product_uos': (proc.product_uos and proc.product_uos.id)\
or proc.product_uom.id,
'location_id': line.location_src_id.id,
'procure_method': line.procure_method,
'move_id': move_id,
})
wf_service = netsvc.LocalService("workflow")
wf_service.trg_validate(uid, 'stock.picking', picking_id, 'button_confirm', cr)
wf_service.trg_validate(uid, 'procurement.order', proc_id, 'button_confirm', cr)
if proc.move_id:
move_obj.write(cr, uid, [proc.move_id.id],
{'location_id':proc.location_id.id})
msg = _('Pulled from another location.')
self.write(cr, uid, [proc.id], {'state':'running', 'message': msg})
self.message_post(cr, uid, [proc.id], body=msg, context=context)
# trigger direct processing (the new procurement shares the same planned date as the original one, which is already being processed)
wf_service.trg_validate(uid, 'procurement.order', proc_id, 'button_check', cr)
return False
procurement_order()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | |
<|file_name|>setcounter.go<|end_file_name|><|fim▁begin|>package core
import (
"github.com/MG-RAST/AWE/lib/core/cwl"
"github.com/MG-RAST/AWE/lib/logger"
)
type SetCounter struct {
Counter []int
Max []int
NumberOfSets int
Scatter_type string
//position_in_counter int
}<|fim▁hole|>func NewSetCounter(numberOfSets int, array []cwl.Array, scatter_type string) (sc *SetCounter) {
logger.Debug(3, "(NewSetCounter) numberOfSets: %d", numberOfSets)
logger.Debug(3, "(NewSetCounter) array: %d", len(array))
logger.Debug(3, "(NewSetCounter) scatter_type: %s", scatter_type)
sc = &SetCounter{}
sc.NumberOfSets = numberOfSets
//sc.position_in_counter = sc.NumberOfSets
sc.Counter = make([]int, sc.NumberOfSets)
sc.Max = make([]int, sc.NumberOfSets)
for i := 0; i < sc.NumberOfSets; i++ {
sc.Counter[i] = 0
sc.Max[i] = array[i].Len() - 1 // indicates last valid position. Needed for carry-over, e.g. 9+1 = 0
}
sc.Scatter_type = scatter_type
return
}
func (sc *SetCounter) Increment() (ok bool) {
if sc.Scatter_type == "cross" {
//fmt.Printf("(SetCounter/Increment) cross\n")
for position_in_counter := sc.NumberOfSets - 1; position_in_counter >= 0; position_in_counter-- {
//fmt.Printf("(SetCounter/Increment) position_in_counter: %d\n", position_in_counter)
//fmt.Printf("(SetCounter/Increment) sc.Counter[position_in_counter]: %d\n", sc.Counter[position_in_counter])
if sc.Counter[position_in_counter] < sc.Max[position_in_counter] {
sc.Counter[position_in_counter] += 1
ok = true
return
}
// sc.Counter[position_in_counter] == sc.Max[position_in_counter]
sc.Counter[position_in_counter] = 0
// carry over - continue
}
} else {
//fmt.Printf("(SetCounter/Increment) dot\n")
// "dot" dotproduct
// this is not very efficient but keeps the code simpler, as only one counter is used
if sc.Counter[0] >= sc.Max[0] {
//end of counter
ok = false
return
}
// increment position in each array
for position_in_counter := sc.NumberOfSets - 1; position_in_counter >= 0; position_in_counter-- {
sc.Counter[position_in_counter] += 1
//fmt.Printf("(SetCounter/Increment) sc.Counter[position_in_counter]: %d \n", sc.Counter[position_in_counter])
}
ok = true
return
}
// carry over not possible, done.
ok = false
return
}<|fim▁end|> | |
<|file_name|>witness.go<|end_file_name|><|fim▁begin|>package client
import (<|fim▁hole|> "github.com/docker/notary/client/changelist"
"github.com/docker/notary/tuf"
"github.com/docker/notary/tuf/data"
)
// Witness creates change objects to witness (i.e. re-sign) the given
// roles on the next publish. One change is created per role
func (r *NotaryRepository) Witness(roles ...data.RoleName) ([]data.RoleName, error) {
var err error
successful := make([]data.RoleName, 0, len(roles))
for _, role := range roles {
// scope is role
c := changelist.NewTUFChange(
changelist.ActionUpdate,
role,
changelist.TypeWitness,
"",
nil,
)
err = r.changelist.Add(c)
if err != nil {
break
}
successful = append(successful, role)
}
return successful, err
}
func witnessTargets(repo *tuf.Repo, invalid *tuf.Repo, role data.RoleName) error {
if r, ok := repo.Targets[role]; ok {
// role is already valid, mark for re-signing/updating
r.Dirty = true
return nil
}
if roleObj, err := repo.GetDelegationRole(role); err == nil && invalid != nil {
// A role with a threshold > len(keys) is technically invalid, but we let it build in the builder because
// we want to be able to download the role (which may still have targets on it), add more keys, and then
// witness the role, thus bringing it back to valid. However, if no keys have been added before witnessing,
// then it is still an invalid role, and can't be witnessed because nothing can bring it back to valid.
if roleObj.Threshold > len(roleObj.Keys) {
return data.ErrInvalidRole{
Role: role,
Reason: "role does not specify enough valid signing keys to meet its required threshold",
}
}
if r, ok := invalid.Targets[role]; ok {
// role is recognized but invalid, move to valid data and mark for re-signing
repo.Targets[role] = r
r.Dirty = true
return nil
}
}
// role isn't recognized, even as invalid
return data.ErrInvalidRole{
Role: role,
Reason: "this role is not known",
}
}<|fim▁end|> | |
<|file_name|>cmd_from_callable.py<|end_file_name|><|fim▁begin|>from doit.action import CmdAction
def task_hello():
"""hello cmd """
def create_cmd_string():
return "echo hi"
return {<|fim▁hole|><|fim▁end|> | 'actions': [CmdAction(create_cmd_string)],
'verbosity': 2,
} |
<|file_name|>MeshComponent.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2014 UT-Battelle, LLC.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Initial API and implementation and/or initial documentation - Jay Jay Billings,
* Jordan H. Deyton, Dasha Gorin, Alexander J. McCaskey, Taylor Patterson,
* Claire Saunders, Matthew Wang, Anna Wojtowicz
*******************************************************************************/
package org.eclipse.ice.datastructures.form;
import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.eclipse.ice.datastructures.ICEObject.Component;
import org.eclipse.ice.datastructures.ICEObject.ICEObject;
import org.eclipse.ice.datastructures.componentVisitor.IComponentVisitor;
import org.eclipse.ice.viz.service.datastructures.IVizUpdateable;
import org.eclipse.ice.viz.service.datastructures.IVizUpdateableListener;
import org.eclipse.ice.viz.service.mesh.datastructures.Edge;
import org.eclipse.ice.viz.service.mesh.datastructures.IMeshPart;
import org.eclipse.ice.viz.service.mesh.datastructures.IMeshPartVisitor;
import org.eclipse.ice.viz.service.mesh.datastructures.Polygon;
import org.eclipse.ice.viz.service.mesh.datastructures.Vertex;
import org.eclipse.ice.viz.service.mesh.datastructures.VizMeshComponent;
/**
* <p>
* A wrapper class for a VizMeshComponent. It provides all the functionality of
* a VizMeshComponent, but delegates to a wrapped VizMeshComponent for all
* actual implementations.
* </p>
*
* @author Jordan H. Deyton
* @author Robert Smith
*/
@XmlRootElement(name = "MeshComponent")
@XmlAccessorType(XmlAccessType.FIELD)
public class MeshComponent extends ICEObject implements Component, IMeshPart,
IVizUpdateableListener {
/**
* The wrapped VizMeshComponent.
*/
private VizMeshComponent mesh;
/**
* <p>
* The default constructor for a MeshComponent. Initializes the list of
* polygons and any associated bookkeeping structures.
* </p>
*
*/
public MeshComponent() {
super();
mesh = new VizMeshComponent();
mesh.register(this);
return;
}
/**
* Getter method for the wrapped VizMeshComponent
*
* @return The wrapped VizMeshComponent
*/
public VizMeshComponent getMesh() {
return mesh;
}
/**
* Setter method for the wrapped VizMeshComponent
*
* @param newMesh
* The new mesh to hold
*/
public void setMesh(VizMeshComponent newMesh) {
mesh = newMesh;
}
/**
* <p>
* Adds a polygon to the MeshComponent. The polygon is expected to have a
* unique polygon ID. If the polygon can be added, a notification is sent to
* listeners. If the polygon uses equivalent vertices or edges with
* different references, then a new polygon is created with references to
* the vertices and edges already known by this MeshComponent.
* </p>
*
* @param polygon
* <p>
* The new polygon to add to the existing list.
* </p>
*/
public void addPolygon(Polygon polygon) {
mesh.addPolygon(polygon);
notifyListeners();
return;
}
/**
* <p>
* Removes a polygon from the MeshComponent. This will also remove any
* vertices and edges used only by this polygon. If a polygon was removed, a
* notification is sent to listeners.
* </p>
*
* @param id
* <p>
* The ID of the polygon to remove from the existing list.
* </p>
*/
public void removePolygon(int id) {
mesh.removePolygon(id);
notifyListeners();
return;
}
/**
* <p>
* Removes a list polygons from the MeshComponent. This will also remove any
* vertices and edges used by these polygons. If a polygon was removed, a
* notification is sent to listeners.
* </p>
*
* @param ids
* <p>
* An ArrayList containing the IDs of the polygons to remove from
* the MeshComponent.
* </p>
*/
public void removePolygons(ArrayList<Integer> ids) {
mesh.removePolygons(ids);
notifyListeners();
return;
}
/**
* <p>
* Gets a list of all polygons stored in the MeshComponent ordered by their
* IDs.
* </p>
*
* @return <p>
* A list of polygons contained in this MeshComponent.
* </p>
*/
public ArrayList<Polygon> getPolygons() {
return mesh.getPolygons();
}
/**
* <p>
* Gets a Polygon instance corresponding to an ID.
* </p>
*
* @param id
* <p>
* The ID of the polygon.
* </p>
* @return <p>
* The polygon referred to by the ID, or null if there is no polygon
* with the ID.
* </p>
*/
public Polygon getPolygon(int id) {
return mesh.getPolygon(id);
}
/**
* <p>
* Returns the next available ID for polygons.
* </p>
*
* @return <p>
* The greatest polygon ID (or zero) plus one.
* </p>
*/
public int getNextPolygonId() {
return mesh.getNextPolygonId();
}
/**
* <p>
* Sets the list of all polygons stored in the MeshComponent.
* </p>
*
* @param polygons
* <p>
* The list of polygons to replace the existing list of polygons
* in the MeshComponent.
* </p>
*/
public void setPolygons(ArrayList<Polygon> polygons) {
mesh.setPolygons(polygons);
}
/**
* <p>
* Gets a list of all vertices associated with this MeshComponent.
* </p>
*
* @return <p>
* All vertices managed by this MeshComponent.
* </p>
*/
public ArrayList<Vertex> getVertices() {
return mesh.getVertices();
}
/**
* <p>
* Gets a Vertex instance corresponding to an ID.
* </p>
*
* @param id
* <p>
* The ID of the vertex.
* </p>
* @return <p>
* The vertex referred to by the ID, or null if the ID is invalid.
* </p>
*/
public Vertex getVertex(int id) {
return mesh.getVertex(id);
}
/**
* <p>
* Returns the next available ID for vertices.
* </p>
*
* @return <p>
* The greatest vertex ID (or zero) plus one.
* </p>
*/
public int getNextVertexId() {
return mesh.getNextVertexId();
}
/**
* <p>
* Gets a list of all edges associated with this MeshComponent.
* </p>
*
* @return <p>
* All edges managed by this MeshComponent.
* </p>
*/
public ArrayList<Edge> getEdges() {
return mesh.getEdges();
}
/**
* <p>
* Gets an Edge instance corresponding to an ID.
* </p>
*
* @param id
* <p>
* The ID of the edge.
* </p>
* @return <p>
* The edge referred to by the ID, or null if the ID is invalid.
* </p>
*/
public Edge getEdge(int id) {
return mesh.getEdge(id);
}
/**
* <p>
* Returns the next available ID for edges.
* </p>
*
* @return <p>
* The greatest edge ID (or zero) plus one.
* </p>
*/
public int getNextEdgeId() {
return mesh.getNextEdgeId();
}
/**
* <p>
* Returns a list of Edges attached to the Vertex with the specified ID.
* </p>
*
* @param id
* <p>
* The ID of the vertex.
* </p>
* @return <p>
* An ArrayList of Edges that are attached to the vertex with the
* specified ID. If there are no such edges, e.g., if the vertex ID
* is invalid, the list will be empty.
* </p>
*/
public ArrayList<Edge> getEdgesFromVertex(int id) {
return getEdgesFromVertex(id);
}
/**
* <p>
* Returns a list of Polygons containing the Vertex with the specified ID.
* </p>
*
* @param id
* <p>
* The ID of the vertex.
* </p>
* @return <p>
* An ArrayList of Polygons that contain the vertex with the
* specified ID. If there are no such polygons, e.g., if the vertex
* ID is invalid, the list will be empty.
* </p>
*/
public ArrayList<Polygon> getPolygonsFromVertex(int id) {
return mesh.getPolygonsFromVertex(id);
}
/**
* <p>
* Returns a list of Polygons containing the Edge with the specified ID.
* </p>
*
* @param id
* <p>
* The ID of the edge.
* </p>
* @return <p>
* An ArrayList of Polygons that contain the edge with the specified
* ID. If there are no such polygons, e.g., if the edge ID is
* invalid, the list will be empty.
* </p>
*/
public ArrayList<Polygon> getPolygonsFromEdge(int id) {
return mesh.getPolygonsFromEdge(id);
}
/**
* <p>
* Returns an Edge that connects two specified vertices if one exists.
* </p>
*
* @param firstId
* <p>
* The ID of the first vertex.
* </p>
* @param secondId
* <p>
* The ID of the second vertex.
* </p>
*
* @return <p>
* An Edge instance that connects the first and second vertices, or
* null if no such edge exists.
* </p>
*/
public Edge getEdgeFromVertices(int firstId, int secondId) {
return mesh.getEdgeFromVertices(firstId, secondId);
}
/**
* <p>
* Returns a list containing all Polygons in the MeshComponent whose
* vertices are a subset of the supplied list of vertices.
* </p>
*
* @param vertices
* <p>
* A collection of vertices.
* </p><|fim▁hole|> * composed of some subset of the specified vertices.
* </p>
*/
public ArrayList<Polygon> getPolygonsFromVertices(ArrayList<Vertex> vertices) {
return mesh.getPolygonsFromVertices(vertices);
}
/**
* <p>
* This operation returns the hash value of the MeshComponent.
* </p>
*
* @return <p>
* The hashcode of the ICEObject.
* </p>
*/
@Override
public int hashCode() {
return mesh.hashCode();
}
/**
* <p>
* This operation is used to check equality between this MeshComponent and
* another MeshComponent. It returns true if the MeshComponents are equal
* and false if they are not.
* </p>
*
* @param otherObject
* <p>
* The other ICEObject that should be compared with this one.
* </p>
* @return <p>
* True if the ICEObjects are equal, false otherwise.
* </p>
*/
@Override
public boolean equals(Object otherObject) {
// By default, the objects are not equivalent.
boolean equals = false;
// Check the reference.
if (this == otherObject) {
equals = true;
}
// Check the information stored in the other object.
else if (otherObject != null && otherObject instanceof MeshComponent) {
// We can now cast the other object.
MeshComponent component = (MeshComponent) otherObject;
// Compare the values between the two objects.
equals = (super.equals(otherObject) && mesh.equals(component.mesh));
// The polygons are the only defining feature of the MeshComponent
// (aside from the super properties). If the polygon lists are
// equivalent, we can safely expect the other bookkeeping structures
// are identical.
}
return equals;
}
/**
* <p>
* This operation copies the contents of a MeshComponent into the current
* object using a deep copy.
* </p>
*
* @param component
* <p>
* The ICEObject from which the values should be copied
* </p>
*/
public void copy(MeshComponent component) {
// Check the parameters.
if (component != null) {
super.copy(component);
mesh.copy(component.mesh);
notifyListeners();
}
return;
}
/**
* <p>
* This operation returns a clone of the MeshComponent using a deep copy.
* </p>
*
* @return <p>
* The new clone
* </p>
*/
@Override
public Object clone() {
// Initialize a new object.
MeshComponent object = new MeshComponent();
// Copy the contents from this one.
object.copy(this);
// Return the newly instantiated object.
return object;
}
/**
* (non-Javadoc)
*
* @see Component#accept(IComponentVisitor visitor)
*/
@Override
public void accept(IComponentVisitor visitor) {
// Call the visitor's visit(MeshComponent) method.
if (visitor != null) {
visitor.visit(this);
}
return;
}
/**
* <p>
* This method calls the {@link IMeshPartVisitor}'s visit method.
* </p>
*
* @param visitor
* <p>
* The {@link IMeshPartVisitor} that is visiting this
* {@link IMeshPart}.
* </p>
*/
@Override
public void acceptMeshVisitor(IMeshPartVisitor visitor) {
if (visitor != null) {
visitor.visit(this);
}
return;
}
@Override
public void update(IVizUpdateable component) {
notifyListeners();
}
}<|fim▁end|> | * @return <p>
* An ArrayList of all Polygons in the MeshComponent that are |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Views for contract feature
"""
import logging
from edxmako.shortcuts import render_to_response
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django.shortcuts import redirect
from django.core.urlresolvers import reverse
from biz.djangoapps.ga_manager.models import Manager
log = logging.getLogger(__name__)
LOGIN_ADMIN = 1
LOGIN_ERROR = -1
LOGIN_DEFAULT = 0
LOGIN_ERROR_AUTH = -2
def index(request):
"""
lists content of Login
"""
next_url = request.GET.get('next', '')
if request.user.is_active:
if request.user.is_authenticated():
if next_url == '':
return redirect(reverse('biz:index'))
else:
return redirect(next_url)
account_check = LOGIN_DEFAULT
post_email = request.POST.get('email', '')
post_password = request.POST.get("password")
post_remember = False
if request.method == 'POST':
next_url = request.POST.get("next", '')
if "remember" in request.POST:
post_remember = True
if not 0 < len(post_email) <= 255:
log.info('Login failed - email length over')
account_check = LOGIN_ERROR
if not 0 < len(post_password) <= 255:
log.info('Login failed - password length over')
account_check = LOGIN_ERROR
if User.objects.filter(email=post_email, is_active=True).exists():
user = User.objects.get(email=post_email, is_active=True)
else:
log.info("Login failed - password for {0} is invalid".format(post_email))
account_check = LOGIN_ERROR
if account_check == LOGIN_ERROR:
return render_to_response('gx_login/login.html', {'account_check': account_check, 'next_url': next_url, 'email': post_email})
if user.check_password(post_password):
mgs = Manager.get_managers(user)
if any([mg.is_aggregator() for mg in mgs]):
account_check = LOGIN_ADMIN
if any([mg.is_director() for mg in mgs]):
account_check = LOGIN_ADMIN
if any([mg.is_manager() for mg in mgs]):
account_check = LOGIN_ADMIN
if any([mg.is_platformer() for mg in mgs]):
account_check = LOGIN_ADMIN
if account_check == LOGIN_ADMIN:
# Auto Updating Last Login Datetime
user = authenticate(username=user.username, password=post_password)
login(request, user)
if post_remember:
# Session Retention 7 days
request.session.set_expiry(604800)
else:
request.session.set_expiry(0)
if next_url == '':
return redirect(reverse('biz:index'))
else:
return redirect(next_url)
else:<|fim▁hole|> account_check = LOGIN_ERROR_AUTH
else:
log.info('Login failed - password mismatch')
account_check = LOGIN_ERROR
return render_to_response('gx_login/login.html', {'account_check': account_check, 'next_url': next_url, 'email': post_email})<|fim▁end|> | |
<|file_name|>strip_icc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from thumbor.filters import BaseFilter, filter_method
<|fim▁hole|> def strip_icc(self):
self.engine.strip_icc()<|fim▁end|> |
class Filter(BaseFilter):
@filter_method() |
<|file_name|>uint_macros.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![macro_escape]
#![doc(hidden)]
#![allow(unsigned_negate)]
macro_rules! uint_module (($T:ty) => (
// String conversion functions and impl str -> num
/// Parse a byte slice as a number in the given base
///
/// Yields an `Option` because `buf` may or may not actually be parseable.
///
/// # Examples
///
/// ```
/// let num = std::uint::parse_bytes([49,50,51,52,53,54,55,56,57], 10);
/// assert!(num == Some(123456789));
/// ```
#[inline]
pub fn parse_bytes(buf: &[u8], radix: uint) -> Option<$T> {
strconv::from_str_bytes_common(buf, radix, false, false, false,
strconv::ExpNone, false, false)
}
impl FromStr for $T {
#[inline]
fn from_str(s: &str) -> Option<$T> {
strconv::from_str_common(s, 10u, false, false, false,
strconv::ExpNone, false, false)
}
}
impl FromStrRadix for $T {
#[inline]
fn from_str_radix(s: &str, radix: uint) -> Option<$T> {
strconv::from_str_common(s, radix, false, false, false,
strconv::ExpNone, false, false)
}
}
// String conversion functions and impl num -> str
/// Convert to a string as a byte slice in a given base.
///
/// Use in place of x.to_str() when you do not need to store the string permanently
///
/// # Examples
///
/// ```
/// std::uint::to_str_bytes(123, 10, |v| {
/// assert!(v == "123".as_bytes());
/// });
/// ```
#[inline]
pub fn to_str_bytes<U>(n: $T, radix: uint, f: |v: &[u8]| -> U) -> U {
// The radix can be as low as 2, so we need at least 64 characters for a
// base 2 number.
let mut buf = [0u8, ..64];
let mut cur = 0;
strconv::int_to_str_bytes_common(n, radix, strconv::SignNone, |i| {
buf[cur] = i;
cur += 1;
});
f(buf.slice(0, cur))
}
impl ToStrRadix for $T {
/// Convert to a string in a given base.
#[inline]
fn to_str_radix(&self, radix: uint) -> ~str {
use slice::Vector;
use str::StrAllocating;
let mut buf = ::vec::Vec::new();
strconv::int_to_str_bytes_common(*self, radix, strconv::SignNone, |i| {
buf.push(i);
});
// We know we generated valid utf-8, so we don't need to go through that
// check.
unsafe { str::raw::from_utf8(buf.as_slice()).to_owned() }
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use super::*;
use num::ToStrRadix;
use str::StrSlice;
use u16;<|fim▁hole|> assert_eq!((1 as $T).to_str_radix(10u), "1".to_owned());
assert_eq!((2 as $T).to_str_radix(10u), "2".to_owned());
assert_eq!((11 as $T).to_str_radix(10u), "11".to_owned());
assert_eq!((11 as $T).to_str_radix(16u), "b".to_owned());
assert_eq!((255 as $T).to_str_radix(16u), "ff".to_owned());
assert_eq!((0xff as $T).to_str_radix(10u), "255".to_owned());
}
#[test]
pub fn test_from_str() {
assert_eq!(from_str::<$T>("0"), Some(0u as $T));
assert_eq!(from_str::<$T>("3"), Some(3u as $T));
assert_eq!(from_str::<$T>("10"), Some(10u as $T));
assert_eq!(from_str::<u32>("123456789"), Some(123456789 as u32));
assert_eq!(from_str::<$T>("00100"), Some(100u as $T));
assert!(from_str::<$T>("").is_none());
assert!(from_str::<$T>(" ").is_none());
assert!(from_str::<$T>("x").is_none());
}
#[test]
pub fn test_parse_bytes() {
use str::StrSlice;
assert_eq!(parse_bytes("123".as_bytes(), 10u), Some(123u as $T));
assert_eq!(parse_bytes("1001".as_bytes(), 2u), Some(9u as $T));
assert_eq!(parse_bytes("123".as_bytes(), 8u), Some(83u as $T));
assert_eq!(u16::parse_bytes("123".as_bytes(), 16u), Some(291u as u16));
assert_eq!(u16::parse_bytes("ffff".as_bytes(), 16u), Some(65535u as u16));
assert_eq!(parse_bytes("z".as_bytes(), 36u), Some(35u as $T));
assert!(parse_bytes("Z".as_bytes(), 10u).is_none());
assert!(parse_bytes("_".as_bytes(), 2u).is_none());
}
#[test]
fn test_uint_to_str_overflow() {
let mut u8_val: u8 = 255_u8;
assert_eq!(u8_val.to_str(), "255".to_owned());
u8_val += 1 as u8;
assert_eq!(u8_val.to_str(), "0".to_owned());
let mut u16_val: u16 = 65_535_u16;
assert_eq!(u16_val.to_str(), "65535".to_owned());
u16_val += 1 as u16;
assert_eq!(u16_val.to_str(), "0".to_owned());
let mut u32_val: u32 = 4_294_967_295_u32;
assert_eq!(u32_val.to_str(), "4294967295".to_owned());
u32_val += 1 as u32;
assert_eq!(u32_val.to_str(), "0".to_owned());
let mut u64_val: u64 = 18_446_744_073_709_551_615_u64;
assert_eq!(u64_val.to_str(), "18446744073709551615".to_owned());
u64_val += 1 as u64;
assert_eq!(u64_val.to_str(), "0".to_owned());
}
#[test]
fn test_uint_from_str_overflow() {
let mut u8_val: u8 = 255_u8;
assert_eq!(from_str::<u8>("255"), Some(u8_val));
assert!(from_str::<u8>("256").is_none());
u8_val += 1 as u8;
assert_eq!(from_str::<u8>("0"), Some(u8_val));
assert!(from_str::<u8>("-1").is_none());
let mut u16_val: u16 = 65_535_u16;
assert_eq!(from_str::<u16>("65535"), Some(u16_val));
assert!(from_str::<u16>("65536").is_none());
u16_val += 1 as u16;
assert_eq!(from_str::<u16>("0"), Some(u16_val));
assert!(from_str::<u16>("-1").is_none());
let mut u32_val: u32 = 4_294_967_295_u32;
assert_eq!(from_str::<u32>("4294967295"), Some(u32_val));
assert!(from_str::<u32>("4294967296").is_none());
u32_val += 1 as u32;
assert_eq!(from_str::<u32>("0"), Some(u32_val));
assert!(from_str::<u32>("-1").is_none());
let mut u64_val: u64 = 18_446_744_073_709_551_615_u64;
assert_eq!(from_str::<u64>("18446744073709551615"), Some(u64_val));
assert!(from_str::<u64>("18446744073709551616").is_none());
u64_val += 1 as u64;
assert_eq!(from_str::<u64>("0"), Some(u64_val));
assert!(from_str::<u64>("-1").is_none());
}
#[test]
#[should_fail]
pub fn to_str_radix1() {
100u.to_str_radix(1u);
}
#[test]
#[should_fail]
pub fn to_str_radix37() {
100u.to_str_radix(37u);
}
}
))<|fim▁end|> |
#[test]
pub fn test_to_str() {
assert_eq!((0 as $T).to_str_radix(10u), "0".to_owned()); |
<|file_name|>pathfinders.py<|end_file_name|><|fim▁begin|>from molpher.algorithms.functions import find_path
from molpher.core import ExplorationTree as ETree
class BasicPathfinder:
"""
:param settings: settings to use in the search
:type settings: `Settings`
A very basic pathfinder class that can be used to run exploration with
any combination of operations.
"""
<|fim▁hole|> super(BasicPathfinder.MaxItersReachedException, self).__init__(
"Maximum number of iterations reached while searching "
"for a path\n\t source: {0}\n\t target: {1}".format(tree.source, tree.target))
def __init__(self, settings, operations):
self.settings = settings
"""a settings class (should be a subclass of `Settings`)"""
self.tree = ETree.create(source=self.settings.source, target=self.settings.target)
""":class:`~molpher.core.ExplorationTree.ExplorationTree` used in the search"""
if self.settings.tree_params:
self.tree.params = self.settings.tree_params
self.tree.thread_count = self.settings.max_threads
self._iteration = operations
self.path = None
"""a list of SMILES strings if a path was found, `None` otherwise"""
def __call__(self):
"""
Executes the search
:return: discovered path
:rtype: `list` of `str`
"""
counter = 0
while not self.tree.path_found:
counter+=1
if counter > self.settings.max_iters:
raise BasicPathfinder.MaxItersReachedException(self.tree)
print('Iteration {0}'.format(counter))
for oper in self._iteration:
self.tree.runOperation(oper)
self.path = find_path(self.tree, self.tree.params['target'])
print('Path found:', self.path)
return self.path<|fim▁end|> | class MaxItersReachedException(Exception):
def __init__(self, tree): |
<|file_name|>toast.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { IActionProps, IIntentProps, IProps } from "../../common/props";
export interface IToastProps extends IProps, IIntentProps {
/**
* Action to display in a minimal button. The toast is dismissed automatically when the
* user clicks the action button. Note that the `intent` prop is ignored (the action button
* cannot have its own intent color that might conflict with the toast's intent). Omit this
* prop to omit the action button.
*/
action?: IActionProps;
/** Name of icon to appear before message. Specify only the part of the name after `pt-icon-`. */
iconName?: string;
/** Message to display in the body of the toast. */
message: string | JSX.Element;
/**
* Callback invoked when the toast is dismissed, either by the user or by the timeout.
* The value of the argument indicates whether the toast was closed because the timeout expired.
*/
onDismiss?: (didTimeoutExpire: boolean) => void;
/**
* Milliseconds to wait before automatically dismissing toast.
* Providing a value <= 0 will disable the timeout (this is discouraged).
* @default 5000
*/
timeout?: number;
}
export declare class Toast extends React.Component<IToastProps, {}> {
static defaultProps: IToastProps;
displayName: string;
private timeoutId;<|fim▁hole|> componentDidUpdate(prevProps: IToastProps): void;
componentWillUnmount(): void;
private maybeRenderActionButton();
private maybeRenderIcon();
private handleActionClick;
private handleCloseClick;
private triggerDismiss(didTimeoutExpire);
private startTimeout;
private clearTimeout;
}
export declare const ToastFactory: React.ComponentFactory<IToastProps & {
children?: React.ReactNode;
}, Toast>;<|fim▁end|> | render(): JSX.Element;
componentDidMount(): void; |
<|file_name|>compute_squared_distance.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::animate::{AnimationFieldAttrs, AnimationInputAttrs, AnimationVariantAttrs};
use derive_common::cg;
use proc_macro2::TokenStream;
use quote::TokenStreamExt;
use syn::{DeriveInput, WhereClause};
use synstructure;
pub fn derive(mut input: DeriveInput) -> TokenStream {
let animation_input_attrs = cg::parse_input_attrs::<AnimationInputAttrs>(&input);
let no_bound = animation_input_attrs.no_bound.unwrap_or_default();
let mut where_clause = input.generics.where_clause.take();
for param in input.generics.type_params() {
if !no_bound.iter().any(|name| name.is_ident(¶m.ident)) {
cg::add_predicate(
&mut where_clause,
parse_quote!(#param: crate::values::distance::ComputeSquaredDistance),
);
}
}
let (mut match_body, needs_catchall_branch) = {
let s = synstructure::Structure::new(&input);
let needs_catchall_branch = s.variants().len() > 1;
let match_body = s.variants().iter().fold(quote!(), |body, variant| {
let arm = derive_variant_arm(variant, &mut where_clause);
quote! { #body #arm }
});
(match_body, needs_catchall_branch)
};
input.generics.where_clause = where_clause;
if needs_catchall_branch {
// This ideally shouldn't be needed, but see:
// https://github.com/rust-lang/rust/issues/68867
match_body.append_all(quote! { _ => unsafe { debug_unreachable!() } });
}
let name = &input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
quote! {
impl #impl_generics crate::values::distance::ComputeSquaredDistance for #name #ty_generics #where_clause {
#[allow(unused_variables, unused_imports)]
#[inline]
fn compute_squared_distance(
&self,
other: &Self,
) -> Result<crate::values::distance::SquaredDistance, ()> {
if std::mem::discriminant(self) != std::mem::discriminant(other) {
return Err(());
}
match (self, other) {
#match_body
}
}
}
}
}
fn derive_variant_arm(
variant: &synstructure::VariantInfo,
mut where_clause: &mut Option<WhereClause>,
) -> TokenStream {
let variant_attrs = cg::parse_variant_attrs_from_ast::<AnimationVariantAttrs>(&variant.ast());
let (this_pattern, this_info) = cg::ref_pattern(&variant, "this");
let (other_pattern, other_info) = cg::ref_pattern(&variant, "other");
if variant_attrs.error {
return quote! {
(&#this_pattern, &#other_pattern) => Err(()),
};
}
let sum = if this_info.is_empty() {
quote! { crate::values::distance::SquaredDistance::from_sqrt(0.) }
} else {
let mut sum = quote!();
sum.append_separated(this_info.iter().zip(&other_info).map(|(this, other)| {
let field_attrs = cg::parse_field_attrs::<DistanceFieldAttrs>(&this.ast());
if field_attrs.field_bound {
let ty = &this.ast().ty;
cg::add_predicate(
&mut where_clause,
parse_quote!(#ty: crate::values::distance::ComputeSquaredDistance),
);
}
let animation_field_attrs =
cg::parse_field_attrs::<AnimationFieldAttrs>(&this.ast());
if animation_field_attrs.constant {
quote! {
{
if #this != #other {
return Err(());
}
crate::values::distance::SquaredDistance::from_sqrt(0.)
}
}
} else {
quote! {<|fim▁hole|> crate::values::distance::ComputeSquaredDistance::compute_squared_distance(#this, #other)?
}
}
}), quote!(+));
sum
};
return quote! {
(&#this_pattern, &#other_pattern) => Ok(#sum),
};
}
#[derive(Default, FromField)]
#[darling(attributes(distance), default)]
struct DistanceFieldAttrs {
field_bound: bool,
}<|fim▁end|> | |
<|file_name|>0030_auto__add_field_agency_alias.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Agency.alias'
db.add_column('lobbyingph_agency', 'alias',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Agency.alias'
db.delete_column('lobbyingph_agency', 'alias')
models = {
'lobbyingph.agency': {
'Meta': {'ordering': "['name']", 'object_name': 'Agency'},
'alias': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.article': {
'Meta': {'ordering': "['-date']", 'object_name': 'Article'},
'date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2012, 10, 19, 0, 0)'}),
'headline': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Issue']", 'null': 'True', 'blank': 'True'}),
'publisher': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'quote': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'lobbyingph.bill': {
'Meta': {'ordering': "['number']", 'object_name': 'Bill'},
'bill_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '10'}),
'url': ('django.db.models.fields.URLField', [], {'default': "'http://legislation.phila.gov/detailreport/?key='", 'max_length': '200'})
},
'lobbyingph.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.communication_method': {
'Meta': {'ordering': "['name']", 'object_name': 'Communication_Method'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.exp_direct_comm': {
'Meta': {'object_name': 'Exp_Direct_Comm'},
'agencies': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Agency']", 'null': 'True', 'blank': 'True'}),
'bill': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Bill']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Category']"}),
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Issue']", 'null': 'True', 'blank': 'True'}),
'officials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Official']", 'null': 'True', 'blank': 'True'}),
'other_desc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),<|fim▁hole|> 'Meta': {'object_name': 'Exp_Indirect_Comm'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Agency']", 'null': 'True', 'blank': 'True'}),
'bill': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Bill']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Category']"}),
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Receipent_Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'issue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Issue']", 'null': 'True', 'blank': 'True'}),
'methods': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Communication_Method']", 'null': 'True', 'blank': 'True'}),
'officials': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Official']", 'null': 'True', 'blank': 'True'}),
'other_desc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.SmallIntegerField', [], {})
},
'lobbyingph.exp_other': {
'Meta': {'object_name': 'Exp_Other'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Agency']", 'null': 'True'}),
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'official': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Official']", 'null': 'True'}),
'place': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Principal']", 'null': 'True'}),
'value': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '12', 'decimal_places': '2'})
},
'lobbyingph.filing': {
'Meta': {'object_name': 'Filing'},
'corrected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'error_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'errors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'firms': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Firm']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lobbyists': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Lobbyist']", 'null': 'True', 'blank': 'True'}),
'principal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Principal']", 'null': 'True', 'blank': 'True'}),
'quarter': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'total_exp_direct_comm': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '12', 'decimal_places': '2'}),
'total_exp_indirect_comm': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '12', 'decimal_places': '2'}),
'total_exp_other': ('django.db.models.fields.DecimalField', [], {'default': '0.0', 'max_digits': '12', 'decimal_places': '2'}),
'year': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today'})
},
'lobbyingph.firm': {
'Meta': {'ordering': "['name']", 'object_name': 'Firm'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'address3': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'lobbyingph.issue': {
'Meta': {'ordering': "['description']", 'object_name': 'Issue'},
'bill': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Bill']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'detail_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'summary': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'lobbyingph.lobbyist': {
'Meta': {'ordering': "['name']", 'object_name': 'Lobbyist'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'address3': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'firm': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Firm']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'principals': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lobbyingph.Principal']", 'null': 'True', 'blank': 'True'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'lobbyingph.official': {
'Meta': {'ordering': "['last_name']", 'object_name': 'Official'},
'agency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Agency']", 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'lobbyingph.principal': {
'Meta': {'ordering': "['name']", 'object_name': 'Principal'},
'address1': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'address2': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'address3': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '12'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'lobbyingph.receipent_group': {
'Meta': {'ordering': "['name']", 'object_name': 'Receipent_Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lobbyingph.source': {
'Meta': {'ordering': "['name']", 'object_name': 'Source'},
'filing': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lobbyingph.Filing']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['lobbyingph']<|fim▁end|> | 'position': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'lobbyingph.exp_indirect_comm': { |
<|file_name|>version.py<|end_file_name|><|fim▁begin|>import sys
import platform
<|fim▁hole|>import twisted
import scrapy
from scrapy.command import ScrapyCommand
class Command(ScrapyCommand):
def syntax(self):
return "[-v]"
def short_desc(self):
return "Print Scrapy version"
def add_options(self, parser):
ScrapyCommand.add_options(self, parser)
parser.add_option("--verbose", "-v", dest="verbose", action="store_true",
help="also display twisted/python/platform info (useful for bug reports)")
def run(self, args, opts):
if opts.verbose:
print "Scrapy : %s" % scrapy.__version__
print "Twisted : %s" % twisted.version.short()
print "Python : %s" % sys.version.replace("\n", "- ")
print "Platform: %s" % platform.platform()
else:
print "Scrapy %s" % scrapy.__version__<|fim▁end|> | |
<|file_name|>LoadApp.py<|end_file_name|><|fim▁begin|>from .SlackIntegration import slackIntegration
import logging
from aiohttp import ClientSession
import asyncio
from time import perf_counter
class LoadApp():
def __init__(self):
self.logger = logging.getLogger('reliability')
self.app_visit_succeeded = 0
self.app_visit_failed = 0
self.tasks = []<|fim▁hole|> async with ClientSession() as session:
async with session.get(url) as response:
code = response.status
result = await response.text()
self.logger.info(f"{str(code)} : load: {url}")
#print (f"{str(code)} : load: {url}")
if code == 200:
self.app_visit_succeeded += 1
else:
self.app_visit_failed += 1
# send slack message if response code is not 200
slackIntegration.post_message_in_slack(f"Access to {url} failed. Response code: {str(code)}")
def set_tasks(self, urls, num):
for url in urls:
for i in range(num):
task = asyncio.ensure_future(self.get(url))
self.tasks.append(task)
if __name__ == "__main__":
loadApp = LoadApp()
urls = ["https://www.google.com",]
concurrency = 10
loadApp.set_tasks(urls, concurrency)
loop = asyncio.get_event_loop()
start = perf_counter()
loop.run_until_complete(asyncio.wait(loadApp.tasks))
end = perf_counter()
print(f"Perf of {concurrency} visits is: {end - start} second.")<|fim▁end|> |
async def get(self, url):
# To simulate differnt users accessing same app, do not reuse session |
<|file_name|>db.rs<|end_file_name|><|fim▁begin|>use sled;
use std::path::PathBuf;
pub struct DB {
tree: sled::Tree,
}
impl DB {
pub fn open(path: String) -> Self {
let db_path = PathBuf::from(&path);
let cfg = sled::Config::default()
.path(path.to_owned())
.use_compression(true);
if db_path.is_file() {
return DB { tree: cfg.tree() };
} else {
return DB { tree: sled::Tree::new(cfg) };<|fim▁hole|>}<|fim▁end|> |
}
} |
<|file_name|>PanoramioPlugin.cpp<|end_file_name|><|fim▁begin|>//
// This file is part of the Marble Virtual Globe.
//
// This program is free software licensed under the GNU LGPL. You can
// find a copy of this license in LICENSE.txt in the top directory of
// the source code.
//
// Copyright 2009 Bastian Holst <bastianholst@gmx.de>
//
// Self
#include "PanoramioPlugin.h"
#include "PanoramioModel.h"
#include "MarbleWidget.h"
using namespace Marble;
PanoramioPlugin::PanoramioPlugin( const MarbleModel *marbleModel ) :
AbstractDataPlugin( marbleModel )
{
}
QString Marble::PanoramioPlugin::nameId() const
{
return "panoramio";
}<|fim▁hole|> setNumberOfItems( numberOfImagesPerFetch );
}
QString PanoramioPlugin::name() const
{
return tr( "Panoramio Photos" );
}
QString PanoramioPlugin::guiString() const
{
return tr( "&Panoramio" );
}
QString PanoramioPlugin::description() const
{
return tr( "Automatically downloads images from around the world in preference to their popularity" );
}
QIcon PanoramioPlugin::icon() const
{
return QIcon( ":/icons/panoramio.png" );
}
QString Marble::PanoramioPlugin::version() const
{
return "0.1";
}
QString PanoramioPlugin::copyrightYears() const
{
return "2009, 2014";
}
QList<PluginAuthor> PanoramioPlugin::pluginAuthors() const
{
return QList<PluginAuthor>() << PluginAuthor( "Bastian Holst", "bastianholst@gmx.de" );
}
bool PanoramioPlugin::eventFilter(QObject *object, QEvent *event)
{
if ( isInitialized() ) {
Q_ASSERT( dynamic_cast<PanoramioModel *>( model() ) != 0 );
PanoramioModel *photoPluginModel = static_cast<PanoramioModel *>( model() );
MarbleWidget *widget = dynamic_cast<MarbleWidget *>( object );
if ( widget ) {
photoPluginModel->setMarbleWidget( widget );
}
}
return AbstractDataPlugin::eventFilter( object, event );
}
Q_EXPORT_PLUGIN2(PanoramioPlugin, Marble::PanoramioPlugin)
#include "moc_PanoramioPlugin.cpp"<|fim▁end|> |
void PanoramioPlugin::initialize()
{
setModel( new PanoramioModel( marbleModel(), this ) ); |
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use super::*;
// Ensure serialization of MessagingProtocolVersion enum takes 1 byte.
#[test]
fn net_protocol() -> bcs::Result<()> {
let protocol = MessagingProtocolVersion::V1;
assert_eq!(bcs::to_bytes(&protocol)?, vec![0x00]);
Ok(())
}
#[test]
fn protocols_to_from_vec() {
let supported_protocols: SupportedProtocols =<|fim▁hole|> [ProtocolId::ConsensusRpc, ProtocolId::MempoolDirectSend]
.iter()
.into();
assert_eq!(
SupportedProtocols::from(
(supported_protocols.clone().try_into() as Result<Vec<ProtocolId>, _>)
.unwrap()
.iter()
),
supported_protocols
);
}
#[test]
fn represents_same_network() {
let mut handshake_msg = HandshakeMsg::new_for_testing();
handshake_msg.network_id = NetworkId::vfn_network();
// succeeds: Positive case
let h1 = handshake_msg.clone();
let h2 = handshake_msg.clone();
h1.perform_handshake(&h2).unwrap();
// fails: another private network
let mut h2 = handshake_msg.clone();
h2.network_id = NetworkId::Private("h2".to_string());
h1.perform_handshake(&h2).unwrap_err();
// fails: different network
let mut h2 = handshake_msg.clone();
h2.network_id = NetworkId::Public;
h1.perform_handshake(&h2).unwrap_err();
// fails: different chain
let mut h2 = handshake_msg;
h2.chain_id = ChainId::new(h1.chain_id.id() + 1);
h1.perform_handshake(&h2).unwrap_err();
}
#[test]
fn common_protocols() {
let network_id = NetworkId::default();
let chain_id = ChainId::default();
let mut supported_protocols = BTreeMap::new();
supported_protocols.insert(
MessagingProtocolVersion::V1,
[ProtocolId::ConsensusRpc, ProtocolId::DiscoveryDirectSend]
.iter()
.into(),
);
let h1 = HandshakeMsg {
chain_id,
network_id: network_id.clone(),
supported_protocols,
};
// Case 1: One intersecting protocol is found for common messaging protocol version.
let mut supported_protocols = BTreeMap::new();
supported_protocols.insert(
MessagingProtocolVersion::V1,
[ProtocolId::ConsensusRpc, ProtocolId::MempoolDirectSend]
.iter()
.into(),
);
let h2 = HandshakeMsg {
chain_id,
network_id: network_id.clone(),
supported_protocols,
};
assert_eq!(
(
MessagingProtocolVersion::V1,
[ProtocolId::ConsensusRpc].iter().into()
),
h1.perform_handshake(&h2).unwrap()
);
// Case 2: No intersecting messaging protocol version.
let h2 = HandshakeMsg {
chain_id,
network_id: network_id.clone(),
supported_protocols: BTreeMap::new(),
};
h1.perform_handshake(&h2).unwrap_err();
// Case 3: Intersecting messaging protocol version is present, but no intersecting protocols.
let mut supported_protocols = BTreeMap::new();
supported_protocols.insert(MessagingProtocolVersion::V1, SupportedProtocols::default());
let h2 = HandshakeMsg {
chain_id,
network_id,
supported_protocols,
};
assert_eq!(
(MessagingProtocolVersion::V1, [].iter().into()),
h1.perform_handshake(&h2).unwrap()
);
}<|fim▁end|> | |
<|file_name|>TableCustomConfig.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.pinot.common.config;
import java.lang.reflect.Field;
import java.util.Map;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@JsonIgnoreProperties(ignoreUnknown = true)
public class TableCustomConfig {
private static final Logger LOGGER = LoggerFactory.getLogger(SegmentsValidationAndRetentionConfig.class);
public static final String MESSAGE_BASED_REFRESH_KEY = "messageBasedRefresh";
private Map<String, String> customConfigs;
public Map<String, String> getCustomConfigs() {
return customConfigs;
}
public void setCustomConfigs(Map<String, String> customConfigs) {
this.customConfigs = customConfigs;
}
@Override
public String toString() {
final StringBuilder result = new StringBuilder();
final String newLine = System.getProperty("line.separator");
result.append(this.getClass().getName());
result.append(" Object {");
result.append(newLine);
//determine fields declared in this class only (no fields of superclass)
final Field[] fields = this.getClass().getDeclaredFields();
//print field names paired with their values
for (final Field field : fields) {
result.append(" ");
try {
result.append(field.getName());
result.append(": ");
//requires access to private field:
result.append(field.get(this));
} catch (final IllegalAccessException ex) {<|fim▁hole|> result.append(newLine);
}
result.append("}");
return result.toString();
}
}<|fim▁end|> | if (LOGGER.isWarnEnabled()) {
LOGGER.warn("Caught exception while processing field " + field, ex);
}
} |
<|file_name|>enrich_intersection_points.hpp<|end_file_name|><|fim▁begin|>// Boost.Geometry (aka GGL, Generic Geometry Library)
// Copyright (c) 2007-2012 Barend Gehrels, Amsterdam, the Netherlands.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_GEOMETRY_ALGORITHMS_DETAIL_OVERLAY_ENRICH_HPP
#define BOOST_GEOMETRY_ALGORITHMS_DETAIL_OVERLAY_ENRICH_HPP
#include <cstddef>
#include <algorithm>
#include <map>
#include <set>
#include <vector>
#ifdef BOOST_GEOMETRY_DEBUG_ENRICH
# include <iostream>
# include <boost/geometry/algorithms/detail/overlay/debug_turn_info.hpp>
# include <boost/geometry/io/wkt/wkt.hpp>
# define BOOST_GEOMETRY_DEBUG_IDENTIFIER
#endif
#include <boost/range.hpp>
#include <boost/geometry/iterators/ever_circling_iterator.hpp>
#include <boost/geometry/algorithms/detail/ring_identifier.hpp>
#include <boost/geometry/algorithms/detail/overlay/copy_segment_point.hpp>
#include <boost/geometry/algorithms/detail/overlay/handle_colocations.hpp>
#include <boost/geometry/algorithms/detail/overlay/less_by_segment_ratio.hpp>
#include <boost/geometry/algorithms/detail/overlay/overlay_type.hpp>
#include <boost/geometry/algorithms/detail/overlay/sort_by_side.hpp>
#include <boost/geometry/policies/robustness/robust_type.hpp>
#include <boost/geometry/strategies/side.hpp>
#ifdef BOOST_GEOMETRY_DEBUG_ENRICH
# include <boost/geometry/algorithms/detail/overlay/check_enrich.hpp>
#endif
namespace boost { namespace geometry
{
#ifndef DOXYGEN_NO_DETAIL
namespace detail { namespace overlay
{
// Sorts IP-s of this ring on segment-identifier, and if on same segment,
// on distance.
// Then assigns for each IP which is the next IP on this segment,
// plus the vertex-index to travel to, plus the next IP
// (might be on another segment)
template
<
bool Reverse1, bool Reverse2,
typename Operations,
typename Turns,
typename Geometry1, typename Geometry2,
typename RobustPolicy,
typename Strategy
>
inline void enrich_sort(Operations& operations,
Turns const& turns,
operation_type for_operation,
Geometry1 const& geometry1,
Geometry2 const& geometry2,
RobustPolicy const& robust_policy,
Strategy const& /*strategy*/)
{
std::sort(boost::begin(operations),
boost::end(operations),
less_by_segment_ratio
<
Turns,
typename boost::range_value<Operations>::type,
Geometry1, Geometry2,
RobustPolicy,
Reverse1, Reverse2
>(turns, for_operation, geometry1, geometry2, robust_policy));
}
template <typename Operations, typename Turns>
inline void enrich_assign(Operations& operations, Turns& turns)
{
typedef typename boost::range_value<Turns>::type turn_type;
typedef typename turn_type::turn_operation_type op_type;
typedef typename boost::range_iterator<Operations>::type iterator_type;
if (operations.size() > 0)
{
// Assign travel-to-vertex/ip index for each turning point.
// Iterator "next" is circular
geometry::ever_circling_range_iterator<Operations const> next(operations);
++next;
for (iterator_type it = boost::begin(operations);
it != boost::end(operations); ++it)
{
turn_type& turn = turns[it->turn_index];
op_type& op = turn.operations[it->operation_index];
// Normal behaviour: next should point at next turn:
if (it->turn_index == next->turn_index)
{
++next;
}
// Cluster behaviour: next should point after cluster, unless
// their seg_ids are not the same
while (turn.cluster_id != -1
&& it->turn_index != next->turn_index
&& turn.cluster_id == turns[next->turn_index].cluster_id
&& op.seg_id == turns[next->turn_index].operations[next->operation_index].seg_id)
{
++next;
}
turn_type const& next_turn = turns[next->turn_index];
op_type const& next_op = next_turn.operations[next->operation_index];
op.enriched.travels_to_ip_index
= static_cast<signed_size_type>(next->turn_index);
op.enriched.travels_to_vertex_index
= next->subject->seg_id.segment_index;
if (op.seg_id.segment_index == next_op.seg_id.segment_index
&& op.fraction < next_op.fraction)
{
// Next turn is located further on same segment
// assign next_ip_index
// (this is one not circular therefore fraction is considered)
op.enriched.next_ip_index = static_cast<signed_size_type>(next->turn_index);
}
}
}
// DEBUG
#ifdef BOOST_GEOMETRY_DEBUG_ENRICH
{
for (iterator_type it = boost::begin(operations);
it != boost::end(operations);
++it)
{
op_type& op = turns[it->turn_index]
.operations[it->operation_index];
std::cout << it->turn_index
<< " cl=" << turns[it->turn_index].cluster_id
<< " meth=" << method_char(turns[it->turn_index].method)
<< " seg=" << op.seg_id
<< " dst=" << op.fraction // needs define
<< " op=" << operation_char(turns[it->turn_index].operations[0].operation)
<< operation_char(turns[it->turn_index].operations[1].operation)
<< " (" << operation_char(op.operation) << ")"
<< " nxt=" << op.enriched.next_ip_index
<< " / " << op.enriched.travels_to_ip_index
<< " [vx " << op.enriched.travels_to_vertex_index << "]"
<< std::boolalpha << turns[it->turn_index].discarded
<< std::endl;
;
}
}
#endif
// END DEBUG
}
template <typename Turns, typename MappedVector>
inline void create_map(Turns const& turns,
detail::overlay::operation_type for_operation,
MappedVector& mapped_vector)
{
typedef typename boost::range_value<Turns>::type turn_type;
typedef typename turn_type::container_type container_type;
typedef typename MappedVector::mapped_type mapped_type;
typedef typename boost::range_value<mapped_type>::type indexed_type;
std::size_t index = 0;
for (typename boost::range_iterator<Turns const>::type
it = boost::begin(turns);
it != boost::end(turns);
++it, ++index)
{
// Add all (non discarded) operations on this ring
// Blocked operations or uu on clusters (for intersection)
// should be included, to block potential paths in clusters
turn_type const& turn = *it;
if (turn.discarded)
{
continue;
}
if (for_operation == operation_intersection
&& turn.cluster_id == -1
&& turn.both(operation_union))
{
// Only include uu turns if part of cluster (to block potential paths),
// otherwise they can block possibly viable paths
continue;<|fim▁hole|> }
std::size_t op_index = 0;
for (typename boost::range_iterator<container_type const>::type
op_it = boost::begin(turn.operations);
op_it != boost::end(turn.operations);
++op_it, ++op_index)
{
ring_identifier const ring_id
(
op_it->seg_id.source_index,
op_it->seg_id.multi_index,
op_it->seg_id.ring_index
);
mapped_vector[ring_id].push_back
(
indexed_type(index, op_index, *op_it,
it->operations[1 - op_index].seg_id)
);
}
}
}
}} // namespace detail::overlay
#endif //DOXYGEN_NO_DETAIL
/*!
\brief All intersection points are enriched with successor information
\ingroup overlay
\tparam Turns type of intersection container
(e.g. vector of "intersection/turn point"'s)
\tparam Clusters type of cluster container
\tparam Geometry1 \tparam_geometry
\tparam Geometry2 \tparam_geometry
\tparam Strategy side strategy type
\param turns container containing intersection points
\param clusters container containing clusters
\param geometry1 \param_geometry
\param geometry2 \param_geometry
\param robust_policy policy to handle robustness issues
\param strategy strategy
*/
template
<
bool Reverse1, bool Reverse2,
overlay_type OverlayType,
typename Turns,
typename Clusters,
typename Geometry1, typename Geometry2,
typename RobustPolicy,
typename Strategy
>
inline void enrich_intersection_points(Turns& turns,
Clusters& clusters,
Geometry1 const& geometry1, Geometry2 const& geometry2,
RobustPolicy const& robust_policy,
Strategy const& strategy)
{
static const detail::overlay::operation_type for_operation
= detail::overlay::operation_from_overlay<OverlayType>::value;
typedef typename boost::range_value<Turns>::type turn_type;
typedef typename turn_type::turn_operation_type op_type;
typedef detail::overlay::indexed_turn_operation
<
op_type
> indexed_turn_operation;
typedef std::map
<
ring_identifier,
std::vector<indexed_turn_operation>
> mapped_vector_type;
bool const has_colocations
= detail::overlay::handle_colocations<Reverse1, Reverse2>(turns,
clusters, geometry1, geometry2);
// Discard none turns, if any
for (typename boost::range_iterator<Turns>::type
it = boost::begin(turns);
it != boost::end(turns);
++it)
{
if (it->both(detail::overlay::operation_none))
{
it->discarded = true;
}
}
// Create a map of vectors of indexed operation-types to be able
// to sort intersection points PER RING
mapped_vector_type mapped_vector;
detail::overlay::create_map(turns, for_operation, mapped_vector);
// No const-iterator; contents of mapped copy is temporary,
// and changed by enrich
for (typename mapped_vector_type::iterator mit
= mapped_vector.begin();
mit != mapped_vector.end();
++mit)
{
#ifdef BOOST_GEOMETRY_DEBUG_ENRICH
std::cout << "ENRICH-sort Ring "
<< mit->first << std::endl;
#endif
detail::overlay::enrich_sort<Reverse1, Reverse2>(
mit->second, turns, for_operation,
geometry1, geometry2,
robust_policy, strategy);
}
for (typename mapped_vector_type::iterator mit
= mapped_vector.begin();
mit != mapped_vector.end();
++mit)
{
#ifdef BOOST_GEOMETRY_DEBUG_ENRICH
std::cout << "ENRICH-assign Ring "
<< mit->first << std::endl;
#endif
detail::overlay::enrich_assign(mit->second, turns);
}
if (has_colocations)
{
detail::overlay::gather_cluster_properties<Reverse1, Reverse2>(
clusters, turns, for_operation, geometry1, geometry2);
}
#ifdef BOOST_GEOMETRY_DEBUG_ENRICH
//detail::overlay::check_graph(turns, for_operation);
#endif
}
}} // namespace boost::geometry
#endif // BOOST_GEOMETRY_ALGORITHMS_DETAIL_OVERLAY_ENRICH_HPP<|fim▁end|> | |
<|file_name|>main_test.go<|end_file_name|><|fim▁begin|>package main
import (
"bytes"
"context"
"log"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
strings_app "github.com/utrack/clay/integration/client_cancel_request/app/strings"
strings_pb "github.com/utrack/clay/integration/client_cancel_request/pkg/strings"
)
func TestCancelRequest(t *testing.T) {
ts := testServer()
errlog := bytes.NewBuffer([]byte{})
ts.Config.ErrorLog = log.New(errlog, "", 0)
defer func() {
ts.Close()
if errlog.Len() > 0 {
t.Fatalf("expected no errors, got: %s", errlog.Bytes())
}
}()
httpClient := ts.Client()
client := strings_pb.NewStringsHTTPClient(httpClient, ts.URL)
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Millisecond)
defer cancel()
client.ToUpper(ctx, &strings_pb.String{Str: strings.Repeat("s", 10*1024)})<|fim▁hole|>}
func testServer() *httptest.Server {
mux := http.NewServeMux()
desc := strings_app.NewStrings().GetDescription()
desc.RegisterHTTP(mux)
mux.Handle("/swagger.json", http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/javascript")
w.Write(desc.SwaggerDef())
}))
ts := httptest.NewServer(mux)
return ts
}<|fim▁end|> | |
<|file_name|>Router.js<|end_file_name|><|fim▁begin|>var express = require('../')
, Router = express.Router
, request = require('./support/http')
, methods = require('methods')
, assert = require('assert');
describe('Router', function(){
var router, app;
beforeEach(function(){
router = new Router;
app = express();
})
describe('.match(method, url, i)', function(){
it('should match based on index', function(){
router.route('get', '/foo', function(){});
router.route('get', '/foob?', function(){});
router.route('get', '/bar', function(){});
var method = 'GET';
var url = '/foo?bar=baz';
var route = router.match(method, url, 0);
route.constructor.name.should.equal('Route');
route.method.should.equal('get');
route.path.should.equal('/foo');
var route = router.match(method, url, 1);
route.path.should.equal('/foob?');
var route = router.match(method, url, 2);
assert(!route);
url = '/bar';
var route = router.match(method, url);
route.path.should.equal('/bar');
})
})
<|fim▁hole|> describe('.matchRequest(req, i)', function(){
it('should match based on index', function(){
router.route('get', '/foo', function(){});
router.route('get', '/foob?', function(){});
router.route('get', '/bar', function(){});
var req = { method: 'GET', url: '/foo?bar=baz' };
var route = router.matchRequest(req, 0);
route.constructor.name.should.equal('Route');
route.method.should.equal('get');
route.path.should.equal('/foo');
var route = router.matchRequest(req, 1);
req._route_index.should.equal(1);
route.path.should.equal('/foob?');
var route = router.matchRequest(req, 2);
assert(!route);
req.url = '/bar';
var route = router.matchRequest(req);
route.path.should.equal('/bar');
})
})
describe('.middleware', function(){
it('should dispatch', function(done){
router.route('get', '/foo', function(req, res){
res.send('foo');
});
app.use(router.middleware);
request(app)
.get('/foo')
.expect('foo', done);
})
})
describe('.multiple callbacks', function(){
it('should throw if a callback is null', function(){
assert.throws(function () {
router.route('get', '/foo', null, function(){});
})
})
it('should throw if a callback is undefined', function(){
assert.throws(function () {
router.route('get', '/foo', undefined, function(){});
})
})
it('should throw if a callback is not a function', function(){
assert.throws(function () {
router.route('get', '/foo', 'not a function', function(){});
})
})
it('should not throw if all callbacks are functions', function(){
router.route('get', '/foo', function(){}, function(){});
})
})
describe('.all', function() {
it('should support using .all to capture all http verbs', function() {
var router = new Router();
router.all('/foo', function(){});
var url = '/foo?bar=baz';
methods.forEach(function testMethod(method) {
var route = router.match(method, url);
route.constructor.name.should.equal('Route');
route.method.should.equal(method);
route.path.should.equal('/foo');
});
})
})
})<|fim▁end|> | |
<|file_name|>SiltEffect.cpp<|end_file_name|><|fim▁begin|>/* -*-c++-*- OpenSceneGraph - Copyright (C) 1998-2006 Robert Osfield
*
* This library is open source and may be redistributed and/or modified under
* the terms of the OpenSceneGraph Public License (OSGPL) version 0.0 or
* (at your option) any later version. The full license is in LICENSE file
* included with this distribution, and on the openscenegraph.org website.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* OpenSceneGraph Public License for more details.
*/
#include <osgOcean/SiltEffect>
#include <osgOcean/ShaderManager>
#include <stdlib.h>
#include <OpenThreads/ScopedLock>
#include <osg/Texture2D>
#include <osg/PointSprite>
#include <osgUtil/CullVisitor>
#include <osgUtil/GLObjectsVisitor>
#include <osg/Notify>
#include <osg/io_utils>
#include <osg/Timer>
#include <osg/Version>
using namespace osgOcean;
static float random(float min,float max) { return min + (max-min)*(float)rand()/(float)RAND_MAX; }
static void fillSpotLightImage(unsigned char* ptr, const osg::Vec4& centerColour, const osg::Vec4& backgroudColour, unsigned int size, float power)
{
if (size==1)
{
float r = 0.5f;
osg::Vec4 color = centerColour*r+backgroudColour*(1.0f-r);
*ptr++ = (unsigned char)((color[0])*255.0f);
*ptr++ = (unsigned char)((color[1])*255.0f);
*ptr++ = (unsigned char)((color[2])*255.0f);
*ptr++ = (unsigned char)((color[3])*255.0f);
return;
}
float mid = (float(size)-1.0f)*0.5f;
float div = 2.0f/float(size);
for(unsigned int r=0;r<size;++r)
{
//unsigned char* ptr = image->data(0,r,0);
for(unsigned int c=0;c<size;++c)
{
float dx = (float(c) - mid)*div;
float dy = (float(r) - mid)*div;
float r = powf(1.0f-sqrtf(dx*dx+dy*dy),power);
if (r<0.0f) r=0.0f;
osg::Vec4 color = centerColour*r+backgroudColour*(1.0f-r);
*ptr++ = (unsigned char)((color[0])*255.0f);
*ptr++ = (unsigned char)((color[1])*255.0f);
*ptr++ = (unsigned char)((color[2])*255.0f);
*ptr++ = (unsigned char)((color[3])*255.0f);
}
}
}
static osg::Image* createSpotLightImage(const osg::Vec4& centerColour, const osg::Vec4& backgroudColour, unsigned int size, float power)
{
#if 0
osg::Image* image = new osg::Image;
unsigned char* ptr = image->data(0,0,0);
fillSpotLightImage(ptr, centerColour, backgroudColour, size, power);
return image;
#else
osg::Image* image = new osg::Image;
osg::Image::MipmapDataType mipmapData;
unsigned int s = size;
unsigned int totalSize = 0;
unsigned i;
for(i=0; s>0; s>>=1, ++i)
{
if (i>0) mipmapData.push_back(totalSize);
totalSize += s*s*4;
}
unsigned char* ptr = new unsigned char[totalSize];
image->setImage(size, size, size, GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, ptr, osg::Image::USE_NEW_DELETE,1);
image->setMipmapLevels(mipmapData);
s = size;
for(i=0; s>0; s>>=1, ++i)
{
fillSpotLightImage(ptr, centerColour, backgroudColour, s, power);
ptr += s*s*4;
}
return image;
#endif
}
SiltEffect::SiltEffect()
{
setNumChildrenRequiringUpdateTraversal(1);
setUpGeometries(1024);
setIntensity(0.5);
}
void SiltEffect::setIntensity(float intensity)
{
_wind.set(0.0f,0.0f,0.0f);
_particleSpeed = -0.75f - 0.25f*intensity;
_particleSize = 0.02f + 0.03f*intensity;
_particleColor = osg::Vec4(0.85f, 0.85f, 0.85f, 1.0f) - osg::Vec4(0.1f, 0.1f, 0.1f, 1.0f)* intensity;
_maximumParticleDensity = intensity * 8.2f;
_cellSize.set(5.0f / (0.25f+intensity), 5.0f / (0.25f+intensity), 5.0f);
_nearTransition = 25.f;
_farTransition = 100.0f - 60.0f*sqrtf(intensity);
if (!_fog) _fog = new osg::Fog;
_fog->setMode(osg::Fog::EXP);
_fog->setDensity(0.01f*intensity);
_fog->setColor(osg::Vec4(0.6, 0.6, 0.6, 1.0));
_dirty = true;
update();
}
SiltEffect::SiltEffect(const SiltEffect& copy, const osg::CopyOp& copyop):
osg::Node(copy,copyop)
{
setNumChildrenRequiringUpdateTraversal(getNumChildrenRequiringUpdateTraversal()+1);
_dirty = true;
update();
}
void SiltEffect::compileGLObjects(osg::RenderInfo& renderInfo) const
{
if (_quadGeometry.valid())
{
_quadGeometry->compileGLObjects(renderInfo);
if (_quadGeometry->getStateSet()) _quadGeometry->getStateSet()->compileGLObjects(*renderInfo.getState());
}
if (_pointGeometry.valid())
{
_pointGeometry->compileGLObjects(renderInfo);
if (_pointGeometry->getStateSet()) _pointGeometry->getStateSet()->compileGLObjects(*renderInfo.getState());
}
}
void SiltEffect::traverse(osg::NodeVisitor& nv)
{
if (nv.getVisitorType() == osg::NodeVisitor::UPDATE_VISITOR)
{
if (_dirty) update();
if (nv.getFrameStamp())
{
double currentTime = nv.getFrameStamp()->getSimulationTime();
static double previousTime = currentTime;
double delta = currentTime - previousTime;
_origin += _wind * delta;
previousTime = currentTime;
}
<|fim▁hole|> {
if (_dirty) update();
osgUtil::GLObjectsVisitor* globjVisitor = dynamic_cast<osgUtil::GLObjectsVisitor*>(&nv);
if (globjVisitor)
{
if (globjVisitor->getMode() & osgUtil::GLObjectsVisitor::COMPILE_STATE_ATTRIBUTES)
{
compileGLObjects(globjVisitor->getRenderInfo());
}
}
return;
}
if (nv.getVisitorType() != osg::NodeVisitor::CULL_VISITOR)
{
return;
}
osgUtil::CullVisitor* cv = dynamic_cast<osgUtil::CullVisitor*>(&nv);
if (!cv)
{
return;
}
ViewIdentifier viewIndentifier(cv, nv.getNodePath());
{
SiltDrawableSet* SiltDrawableSet = 0;
{
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex);
SiltDrawableSet = &(_viewDrawableMap[viewIndentifier]);
if (!SiltDrawableSet->_quadSiltDrawable)
{
SiltDrawableSet->_quadSiltDrawable = new SiltDrawable;
SiltDrawableSet->_quadSiltDrawable->setGeometry(_quadGeometry.get());
SiltDrawableSet->_quadSiltDrawable->setStateSet(_quadStateSet.get());
SiltDrawableSet->_quadSiltDrawable->setDrawType(GL_QUADS);
SiltDrawableSet->_pointSiltDrawable = new SiltDrawable;
SiltDrawableSet->_pointSiltDrawable->setGeometry(_pointGeometry.get());
SiltDrawableSet->_pointSiltDrawable->setStateSet(_pointStateSet.get());
SiltDrawableSet->_pointSiltDrawable->setDrawType(GL_POINTS);
}
}
cull(*SiltDrawableSet, cv);
cv->pushStateSet(_stateset.get());
float depth = 0.0f;
if (!SiltDrawableSet->_quadSiltDrawable->getCurrentCellMatrixMap().empty())
{
cv->pushStateSet(SiltDrawableSet->_quadSiltDrawable->getStateSet());
cv->addDrawableAndDepth(SiltDrawableSet->_quadSiltDrawable.get(),cv->getModelViewMatrix(),depth);
cv->popStateSet();
}
if (!SiltDrawableSet->_pointSiltDrawable->getCurrentCellMatrixMap().empty())
{
cv->pushStateSet(SiltDrawableSet->_pointSiltDrawable->getStateSet());
cv->addDrawableAndDepth(SiltDrawableSet->_pointSiltDrawable.get(),cv->getModelViewMatrix(),depth);
cv->popStateSet();
}
cv->popStateSet();
}
}
void SiltEffect::update()
{
_dirty = false;
osg::notify(osg::INFO)<<"SiltEffect::update()"<<std::endl;
float length_u = _cellSize.x();
float length_v = _cellSize.y();
float length_w = _cellSize.z();
// time taken to get from start to the end of cycle
_period = fabsf(_cellSize.z() / _particleSpeed);
_du.set(length_u, 0.0f, 0.0f);
_dv.set(0.0f, length_v, 0.0f);
_dw.set(0.0f, 0.0f, length_w);
_inverse_du.set(1.0f/length_u, 0.0f, 0.0f);
_inverse_dv.set(0.0f, 1.0f/length_v, 0.0f);
_inverse_dw.set(0.0f, 0.0f, 1.0f/length_w);
osg::notify(osg::INFO)<<"Cell size X="<<length_u<<std::endl;
osg::notify(osg::INFO)<<"Cell size Y="<<length_v<<std::endl;
osg::notify(osg::INFO)<<"Cell size Z="<<length_w<<std::endl;
{
OpenThreads::ScopedLock<OpenThreads::Mutex> lock(_mutex);
_viewDrawableMap.clear();
}
// set up state/
{
if (!_stateset)
{
_stateset = new osg::StateSet;
_stateset->addUniform(new osg::Uniform("osgOcean_BaseTexture",0));
_stateset->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
_stateset->setMode(GL_BLEND, osg::StateAttribute::ON);
osg::Texture2D* texture = new osg::Texture2D(createSpotLightImage(osg::Vec4(0.55f,0.55f,0.55f,0.65f),osg::Vec4(0.55f,0.55f,0.55f,0.0f),32,1.0));
_stateset->setTextureAttribute(0, texture);
}
if (!_inversePeriodUniform)
{
_inversePeriodUniform = new osg::Uniform("osgOcean_InversePeriod",1.0f/_period);
_stateset->addUniform(_inversePeriodUniform.get());
}
else _inversePeriodUniform->set(1.0f/_period);
if (!_particleColorUniform)
{
_particleColorUniform = new osg::Uniform("osgOcean_ParticleColour", _particleColor);
_stateset->addUniform(_particleColorUniform.get());
}
else _particleColorUniform->set(_particleColor);
if (!_particleSizeUniform)
{
_particleSizeUniform = new osg::Uniform("osgOcean_ParticleSize", _particleSize);
_stateset->addUniform(_particleSizeUniform.get());
}
else
_particleSizeUniform->set(_particleSize);
}
}
void SiltEffect::createGeometry(unsigned int numParticles,
osg::Geometry* quad_geometry,
osg::Geometry* point_geometry )
{
// particle corner offsets
osg::Vec2 offset00(0.0f,0.0f);
osg::Vec2 offset10(1.0f,0.0f);
osg::Vec2 offset01(0.0f,1.0f);
osg::Vec2 offset11(1.0f,1.0f);
osg::Vec2 offset0(0.5f,0.0f);
osg::Vec2 offset1(0.5f,1.0f);
osg::Vec2 offset(0.5f,0.5f);
// configure quad_geometry;
osg::Vec3Array* quad_vertices = 0;
osg::Vec2Array* quad_offsets = 0;
osg::Vec3Array* quad_vectors = 0;
if (quad_geometry)
{
quad_geometry->setName("quad");
quad_vertices = new osg::Vec3Array(numParticles*4);
quad_offsets = new osg::Vec2Array(numParticles*4);
quad_vectors = new osg::Vec3Array(numParticles*4);
quad_geometry->setVertexArray(quad_vertices);
quad_geometry->setTexCoordArray(0, quad_offsets);
quad_geometry->setNormalArray(quad_vectors);
quad_geometry->setNormalBinding(osg::Geometry::BIND_PER_VERTEX);
}
// configure point_geometry;
osg::Vec3Array* point_vertices = 0;
osg::Vec2Array* point_offsets = 0;
osg::Vec3Array* point_vectors = 0;
if (point_geometry)
{
point_geometry->setName("point");
point_vertices = new osg::Vec3Array(numParticles);
point_offsets = new osg::Vec2Array(numParticles);
point_vectors = new osg::Vec3Array(numParticles);
point_geometry->setVertexArray(point_vertices);
point_geometry->setTexCoordArray(0, point_offsets);
point_geometry->setNormalArray(point_vectors);
point_geometry->setNormalBinding(osg::Geometry::BIND_PER_VERTEX);
}
// set up vertex attribute data.
for(unsigned int i=0; i< numParticles; ++i)
{
osg::Vec3 pos( random(0.0f, 1.0f), random(0.0f, 1.0f), random(0.0f, 1.0f));
osg::Vec3 dir( random(-1.f, 1.f), random(-1.f,1.f), random(-1.f,1.f) );
// quad particles
if (quad_vertices)
{
(*quad_vertices)[i*4] = pos;
(*quad_vertices)[i*4+1] = pos;
(*quad_vertices)[i*4+2] = pos;
(*quad_vertices)[i*4+3] = pos;
(*quad_offsets)[i*4] = offset00;
(*quad_offsets)[i*4+1] = offset01;
(*quad_offsets)[i*4+2] = offset11;
(*quad_offsets)[i*4+3] = offset10;
(*quad_vectors)[i*4] = dir;
(*quad_vectors)[i*4+1] = dir;
(*quad_vectors)[i*4+2] = dir;
(*quad_vectors)[i*4+3] = dir;
}
// point particles
if (point_vertices)
{
(*point_vertices)[i] = pos;
(*point_offsets)[i] = offset;
(*point_vectors)[i] = dir;
}
}
}
#include <osgOcean/shaders/osgOcean_silt_quads_vert.inl>
#include <osgOcean/shaders/osgOcean_silt_quads_frag.inl>
#include <osgOcean/shaders/osgOcean_silt_points_vert.inl>
#include <osgOcean/shaders/osgOcean_silt_points_frag.inl>
void SiltEffect::setUpGeometries(unsigned int numParticles)
{
unsigned int quadRenderBin = 12;
unsigned int pointRenderBin = 11;
osg::notify(osg::INFO)<<"SiltEffect::setUpGeometries("<<numParticles<<")"<<std::endl;
bool needGeometryRebuild = false;
if (!_quadGeometry || _quadGeometry->getVertexArray()->getNumElements() != 4*numParticles)
{
_quadGeometry = new osg::Geometry;
_quadGeometry->setUseVertexBufferObjects(true);
needGeometryRebuild = true;
}
if (!_pointGeometry || _pointGeometry->getVertexArray()->getNumElements() != numParticles)
{
_pointGeometry = new osg::Geometry;
_pointGeometry->setUseVertexBufferObjects(true);
needGeometryRebuild = true;
}
if (needGeometryRebuild)
{
createGeometry(numParticles, _quadGeometry.get(), _pointGeometry.get());
}
if (!_quadStateSet)
{
_quadStateSet = new osg::StateSet;
_quadStateSet->setRenderBinDetails(quadRenderBin,"DepthSortedBin");
static const char osgOcean_silt_quads_vert_file[] = "osgOcean/shaders/osgOcean_silt_quads.vert";
static const char osgOcean_silt_quads_frag_file[] = "osgOcean/shaders/osgOcean_silt_quads.frag";
osg::Program* program =
ShaderManager::instance().createProgram("silt_quads",
osgOcean_silt_quads_vert_file, osgOcean_silt_quads_frag_file,
osgOcean_silt_quads_vert, osgOcean_silt_quads_frag );
_quadStateSet->setAttribute(program);
}
if (!_pointStateSet)
{
_pointStateSet = new osg::StateSet;
static const char osgOcean_silt_points_vert_file[] = "osgOcean/shaders/osgOcean_silt_points.vert";
static const char osgOcean_silt_points_frag_file[] = "osgOcean/shaders/osgOcean_silt_points.frag";
osg::Program* program =
ShaderManager::instance().createProgram("silt_point",
osgOcean_silt_points_vert_file, osgOcean_silt_points_frag_file,
osgOcean_silt_points_vert, osgOcean_silt_points_frag );
_pointStateSet->setAttribute(program);
/// Setup the point sprites
osg::PointSprite *sprite = new osg::PointSprite();
_pointStateSet->setTextureAttributeAndModes(0, sprite, osg::StateAttribute::ON);
_pointStateSet->setMode(GL_VERTEX_PROGRAM_POINT_SIZE, osg::StateAttribute::ON);
_pointStateSet->setRenderBinDetails(pointRenderBin,"DepthSortedBin");
}
}
void SiltEffect::cull(SiltDrawableSet& pds, osgUtil::CullVisitor* cv) const
{
#ifdef DO_TIMING
osg::Timer_t startTick = osg::Timer::instance()->tick();
#endif
float cellVolume = _cellSize.x() * _cellSize.y() * _cellSize.z();
int numberOfParticles = (int)(_maximumParticleDensity * cellVolume);
if (numberOfParticles==0)
return;
pds._quadSiltDrawable->setNumberOfVertices(numberOfParticles*4);
pds._pointSiltDrawable->setNumberOfVertices(numberOfParticles);
pds._quadSiltDrawable->newFrame();
pds._pointSiltDrawable->newFrame();
osg::Matrix inverse_modelview;
inverse_modelview.invert(*(cv->getModelViewMatrix()));
osg::Vec3 eyeLocal = osg::Vec3(0.0f,0.0f,0.0f) * inverse_modelview;
//osg::notify(osg::NOTICE)<<" eyeLocal "<<eyeLocal<<std::endl;
float eye_k = (eyeLocal-_origin)*_inverse_dw;
osg::Vec3 eye_kPlane = eyeLocal-_dw*eye_k-_origin;
// osg::notify(osg::NOTICE)<<" eye_kPlane "<<eye_kPlane<<std::endl;
float eye_i = eye_kPlane*_inverse_du;
float eye_j = eye_kPlane*_inverse_dv;
osg::Polytope frustum;
frustum.setToUnitFrustum(false,false);
frustum.transformProvidingInverse(*(cv->getProjectionMatrix()));
frustum.transformProvidingInverse(*(cv->getModelViewMatrix()));
float i_delta = _farTransition * _inverse_du.x();
float j_delta = _farTransition * _inverse_dv.y();
float k_delta = 1;//_nearTransition * _inverse_dw.z();
int i_min = (int)floor(eye_i - i_delta);
int j_min = (int)floor(eye_j - j_delta);
int k_min = (int)floor(eye_k - k_delta);
int i_max = (int)ceil(eye_i + i_delta);
int j_max = (int)ceil(eye_j + j_delta);
int k_max = (int)ceil(eye_k + k_delta);
//osg::notify(osg::NOTICE)<<"i_delta="<<i_delta<<" j_delta="<<j_delta<<" k_delta="<<k_delta<<std::endl;
unsigned int numTested=0;
unsigned int numInFrustum=0;
float iCyle = 0.43;
float jCyle = 0.64;
for(int i = i_min; i<=i_max; ++i)
{
for(int j = j_min; j<=j_max; ++j)
{
for(int k = k_min; k<=k_max; ++k)
{
float startTime = (float)(i)*iCyle + (float)(j)*jCyle;
startTime = (startTime-floor(startTime))*_period;
if (build(eyeLocal, i,j,k, startTime, pds, frustum, cv))
++numInFrustum;
++numTested;
}
}
}
#ifdef DO_TIMING
osg::Timer_t endTick = osg::Timer::instance()->tick();
osg::notify(osg::NOTICE)<<"time for cull "<<osg::Timer::instance()->delta_m(startTick,endTick)<<"ms numTested= "<<numTested<<" numInFrustum= "<<numInFrustum<<std::endl;
osg::notify(osg::NOTICE)<<" quads "<<pds._quadSiltDrawable->getCurrentCellMatrixMap().size()<<" points "<<pds._pointSiltDrawable->getCurrentCellMatrixMap().size()<<std::endl;
#endif
}
bool SiltEffect::build(const osg::Vec3 eyeLocal, int i, int j, int k, float startTime, SiltDrawableSet& pds, osg::Polytope& frustum, osgUtil::CullVisitor* cv) const
{
osg::Vec3 position = _origin + osg::Vec3(float(i)*_du.x(), float(j)*_dv.y(), float(k+1)*_dw.z());
osg::Vec3 scale(_du.x(), _dv.y(), -_dw.z());
osg::BoundingBox bb(position.x(), position.y(), position.z()+scale.z(),
position.x()+scale.x(), position.y()+scale.y(), position.z());
if ( !frustum.contains(bb) )
return false;
osg::Vec3 center = position + scale*0.5f;
float distance = (center-eyeLocal).length();
osg::Matrix* mymodelview = 0;
if (distance < _nearTransition)
{
SiltDrawable::DepthMatrixStartTime& mstp
= pds._quadSiltDrawable->getCurrentCellMatrixMap()[SiltDrawable::Cell(i,k,j)];
mstp.depth = distance;
mstp.startTime = startTime;
mymodelview = &mstp.modelview;
}
else if (distance <= _farTransition)
{
SiltDrawable::DepthMatrixStartTime& mstp
= pds._pointSiltDrawable->getCurrentCellMatrixMap()[SiltDrawable::Cell(i,k,j)];
mstp.depth = distance;
mstp.startTime = startTime;
mymodelview = &mstp.modelview;
}
else
{
return false;
}
*mymodelview = *(cv->getModelViewMatrix());
#if OPENSCENEGRAPH_MAJOR_VERSION > 2 || \
(OPENSCENEGRAPH_MAJOR_VERSION == 2 && OPENSCENEGRAPH_MINOR_VERSION > 7) || \
(OPENSCENEGRAPH_MAJOR_VERSION == 2 && OPENSCENEGRAPH_MINOR_VERSION == 7 && OPENSCENEGRAPH_PATCH_VERSION >= 3)
// preMultTranslate and preMultScale introduced in rev 8868, which was
// before OSG 2.7.3.
mymodelview->preMultTranslate(position);
mymodelview->preMultScale(scale);
#else
// Otherwise use unoptimized versions
mymodelview->preMult(osg::Matrix::translate(position));
mymodelview->preMult(osg::Matrix::scale(scale));
#endif
cv->updateCalculatedNearFar(*(cv->getModelViewMatrix()),bb);
return true;
}
/////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Precipitation Drawable
//
////////////////////////////////////////////////////////////////////////////////////////////////////
SiltEffect::SiltDrawable::SiltDrawable():
_drawType(GL_QUADS),
_numberOfVertices(0)
{
setSupportsDisplayList(false);
}
SiltEffect::SiltDrawable::SiltDrawable(const SiltDrawable& copy, const osg::CopyOp& copyop):
osg::Drawable(copy,copyop),
_geometry(copy._geometry),
_drawType(copy._drawType),
_numberOfVertices(copy._numberOfVertices)
{
}
void SiltEffect::SiltDrawable::drawImplementation(osg::RenderInfo& renderInfo) const
{
if (!_geometry) return;
const osg::Geometry::Extensions* extensions = osg::Geometry::getExtensions(renderInfo.getContextID(),true);
glPushMatrix();
typedef std::vector<const CellMatrixMap::value_type*> DepthMatrixStartTimeVector;
DepthMatrixStartTimeVector orderedEntries;
orderedEntries.reserve(_currentCellMatrixMap.size());
for(CellMatrixMap::const_iterator citr = _currentCellMatrixMap.begin();
citr != _currentCellMatrixMap.end();
++citr)
{
orderedEntries.push_back(&(*citr));
}
std::sort(orderedEntries.begin(),orderedEntries.end(),LessFunctor());
for(DepthMatrixStartTimeVector::reverse_iterator itr = orderedEntries.rbegin();
itr != orderedEntries.rend();
++itr)
{
extensions->glMultiTexCoord1f(GL_TEXTURE0+1, (*itr)->second.startTime);
glMatrixMode( GL_MODELVIEW );
glLoadMatrix((*itr)->second.modelview.ptr());
_geometry->draw(renderInfo);
unsigned int numVertices = osg::minimum(_geometry->getVertexArray()->getNumElements(), _numberOfVertices);
glDrawArrays(_drawType, 0, numVertices);
}
glPopMatrix();
}<|fim▁end|> | return;
}
if (nv.getVisitorType() == osg::NodeVisitor::NODE_VISITOR)
|
<|file_name|>test_fastq.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
from future.builtins import zip
from six import StringIO
import unittest
import warnings
from functools import partial
from skbio import (read, write, Sequence, DNA, RNA, Protein,
SequenceCollection, Alignment)
from skbio.io import FASTQFormatError
from skbio.io.fastq import (
_fastq_sniffer, _fastq_to_generator, _fastq_to_sequence_collection,
_fastq_to_alignment, _generator_to_fastq, _sequence_collection_to_fastq,
_alignment_to_fastq)
from skbio.util import get_data_path
import numpy as np
# Note: the example FASTQ files with file extension .fastq are taken from the
# following open-access publication's supplementary data:
#
# P.J.A. Cock, C.J. Fields, N. Goto, M.L. Heuer and P.M. Rice (2009). The
# Sanger FASTQ file format for sequences with quality scores, and the
# Solexa/Illumina FASTQ variants.
#
# See licenses/fastq-example-files-readme.txt for the original README that
# accompanied these files, which includes the terms of use and detailed
# description of the files.
#
# The example files bearing the original filenames have not been modified from
# their original form.
def _drop_kwargs(kwargs, *args):
for arg in args:
if arg in kwargs:
kwargs.pop(arg)
class TestSniffer(unittest.TestCase):
def setUp(self):
self.positives = [get_data_path(e) for e in [
'fastq_multi_seq_sanger',
'fastq_multi_blank_between_records',
'fastq_multi_ws_lines_between_records',
'fastq_multi_blank_end_of_file',
'fastq_multi_ws_lines_end_of_file',
'fastq_multi_whitespace_stripping',
'fastq_blank_lines',
'fastq_whitespace_only_lines',
'fastq_single_seq_illumina1.3',
'fastq_wrapping_as_illumina_no_description',
'fastq_wrapping_as_sanger_no_description',
'fastq_wrapping_original_sanger_no_description',
'fastq_writer_illumina1.3_defaults',
'fastq_writer_sanger_defaults',
'fastq_writer_sanger_non_defaults',
'fastq_5_blanks_start_of_file',
'fastq_5_ws_lines_start_of_file',
'illumina_full_range_as_illumina.fastq',
'illumina_full_range_as_sanger.fastq',
'illumina_full_range_original_illumina.fastq',
'longreads_as_illumina.fastq',
'longreads_as_sanger.fastq',
'longreads_original_sanger.fastq',
'misc_dna_as_illumina.fastq',
'misc_dna_as_sanger.fastq',
'misc_dna_original_sanger.fastq',
'misc_rna_as_illumina.fastq',
'misc_rna_as_sanger.fastq',
'misc_rna_original_sanger.fastq',
'sanger_full_range_as_illumina.fastq',
'sanger_full_range_as_sanger.fastq',
'sanger_full_range_original_sanger.fastq',
'solexa_full_range_original_solexa.fastq',
'wrapping_as_illumina.fastq',
'wrapping_as_sanger.fastq',
'wrapping_original_sanger.fastq'
]]
self.negatives = [get_data_path(e) for e in [
'empty',
'whitespace_only',
'fastq_multi_blank_start_of_file',
'fastq_multi_ws_lines_start_of_file',
'fastq_invalid_blank_after_header',
'fastq_invalid_blank_after_seq',
'fastq_invalid_blank_after_plus',
'fastq_invalid_blank_within_seq',
'fastq_invalid_blank_within_qual',
'fastq_invalid_ws_line_after_header',
'fastq_invalid_ws_line_after_seq',
'fastq_invalid_ws_line_after_plus',
'fastq_invalid_ws_line_within_seq',
'fastq_invalid_ws_line_within_qual',
'fastq_invalid_missing_header',
'fastq_invalid_missing_seq_data',
'error_diff_ids.fastq',
'error_double_qual.fastq',
'error_double_seq.fastq',
'error_long_qual.fastq',
'error_no_qual.fastq',
'error_qual_del.fastq',
'error_qual_escape.fastq',
'error_qual_null.fastq',
'error_qual_space.fastq',
'error_qual_tab.fastq',
'error_qual_unit_sep.fastq',
'error_qual_vtab.fastq',
'error_short_qual.fastq',
'error_spaces.fastq',
'error_tabs.fastq',
'error_trunc_at_seq.fastq',
'error_trunc_at_plus.fastq',
'error_trunc_at_qual.fastq',
'error_trunc_in_title.fastq',
'error_trunc_in_seq.fastq',
'error_trunc_in_plus.fastq',
'error_trunc_in_qual.fastq',
]]
def test_positives(self):
for fp in self.positives:
self.assertEqual(_fastq_sniffer(fp), (True, {}))
def test_negatives(self):
for fp in self.negatives:
self.assertEqual(_fastq_sniffer(fp), (False, {}))
class TestReaders(unittest.TestCase):
def setUp(self):
self.valid_configurations = [
([get_data_path('empty'),
get_data_path('whitespace_only')],
[{},
{'variant': 'illumina1.8'},
{'phred_offset': 33,
'constructor': DNA}],
[]),
([get_data_path('fastq_single_seq_illumina1.3')], [
{'variant': 'illumina1.3'},
{'phred_offset': 64},
{'variant': 'illumina1.3',
'constructor': Protein},
], [
('', 'bar\t baz', 'aCGT', [33, 34, 35, 36])
]),
([get_data_path('fastq_multi_seq_sanger'),
get_data_path('fastq_whitespace_only_lines'),
get_data_path('fastq_blank_lines'),
get_data_path('fastq_multi_blank_between_records'),
get_data_path('fastq_multi_ws_lines_between_records'),
get_data_path('fastq_multi_blank_end_of_file'),
get_data_path('fastq_multi_ws_lines_end_of_file'),
get_data_path('fastq_multi_blank_start_of_file'),
get_data_path('fastq_multi_ws_lines_start_of_file'),
get_data_path('fastq_multi_whitespace_stripping')], [
{'variant': 'sanger'},
{'phred_offset': 33, 'seq_num': 2},
{'variant': 'sanger',
'constructor': partial(RNA, validate=False),
'seq_num': 3},
], [
('foo', 'bar baz', 'AACCGG',
[16, 17, 18, 19, 20, 21]),
('bar', 'baz foo', 'TTGGCC',
[23, 22, 21, 20, 19, 18]),
('baz', 'foo bar', 'GATTTC',
[20, 21, 22, 23, 24, 18])
]),
]
self.invalid_files = [(get_data_path(e[0]), e[1], e[2]) for e in [
('fastq_invalid_blank_after_header', FASTQFormatError,
'blank or whitespace-only line.*after header.*in FASTQ'),
('fastq_invalid_blank_after_seq', FASTQFormatError,
"blank or whitespace-only line.*before '\+' in FASTQ"),
('fastq_invalid_blank_after_plus', FASTQFormatError,
"blank or whitespace-only line.*after '\+'.*in FASTQ"),
('fastq_invalid_blank_within_seq', FASTQFormatError,
'blank or whitespace-only line.*within sequence.*FASTQ'),
('fastq_invalid_blank_within_qual', FASTQFormatError,
"blank or whitespace-only line.*within quality scores.*in FASTQ"),
('fastq_invalid_ws_line_after_header', FASTQFormatError,
'blank or whitespace-only line.*after header.*in FASTQ'),
('fastq_invalid_ws_line_after_seq', FASTQFormatError,
"blank or whitespace-only line.*before '\+' in FASTQ"),
('fastq_invalid_ws_line_after_plus', FASTQFormatError,
"blank or whitespace-only line.*after '\+'.*in FASTQ"),
('fastq_invalid_ws_line_within_seq', FASTQFormatError,
'blank or whitespace-only line.*within sequence.*FASTQ'),
('fastq_invalid_ws_line_within_qual', FASTQFormatError,
"blank or whitespace-only line.*within quality scores.*in FASTQ"),
('fastq_invalid_missing_header', FASTQFormatError,
"sequence.*header.*start of file: 'seq1 desc1'"),
('fastq_invalid_missing_seq_data', FASTQFormatError,
'without sequence data'),
('error_diff_ids.fastq', FASTQFormatError,
"header lines do not match: "
"'SLXA-B3_649_FC8437_R1_1_1_850_123' != "
"'SLXA-B3_649_FC8437_R1_1_1_850_124'"),
('error_double_qual.fastq', FASTQFormatError,
"Extra quality.*'\+SLXA-B3_649_FC8437_R1_1_1_850_123'"),
('error_double_seq.fastq', FASTQFormatError,
'FASTQ record that is missing a quality \(\+\) header line'),
('error_long_qual.fastq', FASTQFormatError, "Extra quality.*'Y'"),
('error_no_qual.fastq', FASTQFormatError,
"blank or whitespace-only line.*after '\+'.*in FASTQ"),
('error_qual_del.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_qual_escape.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_qual_null.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_qual_space.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_qual_tab.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_qual_unit_sep.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_qual_vtab.fastq', ValueError,
'Decoded Phred score.*out of range'),
('error_short_qual.fastq', FASTQFormatError,
"Extra quality.*'SLXA-B3_649_FC8437_R1_1_1_362_549'"),
('error_spaces.fastq', FASTQFormatError,
"whitespace.*sequence data: 'GATGTGCAA TACCTTTGTA GAGGAA'"),
('error_tabs.fastq', FASTQFormatError,
r"whitespace.*sequence data: 'GATGTGCAA\\tTACCTTTGTA\\tGAGGAA'"),
('error_trunc_at_seq.fastq', FASTQFormatError,
'incomplete/truncated.*FASTQ'),
('error_trunc_at_plus.fastq', FASTQFormatError,
'incomplete/truncated.*FASTQ'),
('error_trunc_at_qual.fastq', FASTQFormatError,
'incomplete/truncated.*end of file'),
('error_trunc_in_title.fastq', FASTQFormatError,
'incomplete/truncated.*end of file'),
('error_trunc_in_seq.fastq', FASTQFormatError,
'incomplete/truncated.*end of file'),
('error_trunc_in_plus.fastq', FASTQFormatError,
"header lines do not match: "
"'SLXA-B3_649_FC8437_R1_1_1_183_714' != 'SLXA-B3_649_FC'"),
('error_trunc_in_qual.fastq', FASTQFormatError,
'incomplete/truncated.*end of file')
]]
def test_fastq_to_generator_valid_files(self):
for valid_files, kwargs, components in self.valid_configurations:
for valid in valid_files:
for observed_kwargs in kwargs:
_drop_kwargs(observed_kwargs, 'seq_num')
constructor = observed_kwargs.get('constructor', Sequence)
# Can't use partials for this because the read
# function below can't operate on partials
expected_kwargs = {}
if hasattr(constructor, 'lowercase'):
expected_kwargs['lowercase'] = 'introns'
observed_kwargs['lowercase'] = 'introns'
expected = [constructor(c[2],
metadata={'id': c[0],
'description': c[1]},
positional_metadata={'quality': np.array(c[3],
dtype=np.uint8)},
**expected_kwargs)
for c in components]
observed = list(_fastq_to_generator(valid,
**observed_kwargs))
self.assertEqual(len(expected), len(observed))
for o, e in zip(observed, expected):
self.assertEqual(o, e)
def test_fastq_to_generator_invalid_files_all_variants(self):
# files that should be invalid for all variants, as well as custom
# phred offsets
for fp, error_type, error_msg_regex in self.invalid_files:
for variant in 'sanger', 'illumina1.3', 'illumina1.8':
with self.assertRaisesRegexp(error_type, error_msg_regex):
list(_fastq_to_generator(fp, variant=variant))
for offset in 33, 64, 40, 77:
with self.assertRaisesRegexp(error_type, error_msg_regex):
list(_fastq_to_generator(fp, phred_offset=offset))
def test_fastq_to_generator_invalid_files_illumina(self):
# files that should be invalid for illumina1.3 and illumina1.8 variants
fps = [get_data_path(fp) for fp in
['sanger_full_range_original_sanger.fastq',
'solexa_full_range_original_solexa.fastq']]
for fp in fps:
with self.assertRaisesRegexp(ValueError, 'out of range \[0, 62\]'):
list(_fastq_to_generator(fp, variant='illumina1.3'))
with self.assertRaisesRegexp(ValueError, 'out of range \[0, 62\]'):
list(_fastq_to_generator(fp, variant='illumina1.8'))
def test_fastq_to_generator_solexa(self):
# solexa support isn't implemented yet. should raise error even with
# valid solexa file
with self.assertRaises(NotImplementedError):
list(_fastq_to_generator(
get_data_path('solexa_full_range_original_solexa.fastq'),
variant='solexa'))
def test_fastq_to_sequence(self):
for constructor in [Sequence, DNA, RNA, Protein]:
for valid_files, kwargs, components in self.valid_configurations:
for valid in valid_files:
# skip empty file case since we cannot read a specific
# sequencefrom an empty file
if len(components) == 0:
continue
for observed_kwargs in kwargs:
expected_kwargs = {}
# TODO:
# some of the test files contain characters which are
# invalid for RNA, so don't validate for now. Need to
# fix this
if constructor is RNA:
observed_kwargs['validate'] = False
expected_kwargs['validate'] = False
_drop_kwargs(observed_kwargs, 'constructor')
# Can't use partials for this because the read
# function below can't operate on partials
if hasattr(constructor, 'lowercase'):
expected_kwargs['lowercase'] = 'introns'
observed_kwargs['lowercase'] = 'introns'
seq_num = observed_kwargs.get('seq_num', 1)
c = components[seq_num - 1]
expected = \
constructor(
c[2], metadata={'id': c[0],
'description': c[1]},
positional_metadata={'quality': np.array(c[3],
dtype=np.uint8)},
**expected_kwargs)
observed = read(valid, into=constructor,
format='fastq', verify=False,
**observed_kwargs)
self.assertEqual(observed, expected)
def test_fastq_to_sequence_collection(self):
for valid_files, kwargs, components in self.valid_configurations:
for valid in valid_files:
for observed_kwargs in kwargs:
_drop_kwargs(observed_kwargs, 'seq_num')
constructor = observed_kwargs.get('constructor', Sequence)
# Can't use partials for this because the read
# function below can't operate on partials
expected_kwargs = {}
if hasattr(constructor, 'lowercase'):
expected_kwargs['lowercase'] = 'introns'
observed_kwargs['lowercase'] = 'introns'
expected = SequenceCollection(
[constructor(
c[2], metadata={'id': c[0], 'description': c[1]},
positional_metadata={'quality': np.array(c[3],
np.uint8)},
**expected_kwargs)
for c in components])
observed = _fastq_to_sequence_collection(valid,
**observed_kwargs)
self.assertEqual(observed, expected)
def test_fastq_to_alignment(self):
for valid_files, kwargs, components in self.valid_configurations:
for valid in valid_files:
for observed_kwargs in kwargs:
_drop_kwargs(observed_kwargs, 'seq_num')
constructor = observed_kwargs.get('constructor', Sequence)
# Can't use partials for this because the read
# function below can't operate on partials
expected_kwargs = {}
if hasattr(constructor, 'lowercase'):
expected_kwargs['lowercase'] = 'introns'
observed_kwargs['lowercase'] = 'introns'
expected = Alignment(
[constructor(
c[2], metadata={'id': c[0],
'description': c[1]},
positional_metadata={'quality': np.array(c[3],
dtype=np.uint8)},
**expected_kwargs)
for c in components])
observed = _fastq_to_alignment(valid, **observed_kwargs)
self.assertEqual(observed, expected)
class TestWriters(unittest.TestCase):
def setUp(self):
self.valid_files = [
([
('f o o', 'bar\n\nbaz', 'AaCcGg',
[16, 17, 18, 19, 20, 21]),
('bar', 'baz foo', 'TtGgCc',
[23, 22, 21, 20, 19, 18]),
('ba\n\t\tz', 'foo bar', 'gAtTtC',
[20, 21, 22, 23, 24, 18])
], [
({'variant': 'sanger'},
get_data_path('fastq_writer_sanger_defaults')),
({'phred_offset': 33},
get_data_path('fastq_writer_sanger_defaults')),
({'variant': 'illumina1.8'},
get_data_path('fastq_writer_sanger_defaults')),
({'variant': 'illumina1.3'},
get_data_path('fastq_writer_illumina1.3_defaults')),
({'variant': 'sanger', 'id_whitespace_replacement': '%',
'description_newline_replacement': '^'},
get_data_path('fastq_writer_sanger_non_defaults'))
]),
]
def test_generator_to_fastq_kwargs_passed(self):
for components, kwargs_expected_fp in self.valid_files:
for kwargs, expected_fp in kwargs_expected_fp:
def gen():
for c in components:
yield Sequence(
c[2], metadata={'id': c[0], 'description': c[1]},
positional_metadata={'quality': c[3]})
fh = StringIO()
_generator_to_fastq(gen(), fh, **kwargs)
observed = fh.getvalue()
fh.close()
with open(expected_fp, 'U') as f:
expected = f.read()
self.assertEqual(observed, expected)
def test_sequence_to_fastq_kwargs_passed(self):
for constructor in [Sequence, DNA, RNA, Protein]:
for components, kwargs_expected_fp in self.valid_files:
for expected_kwargs, expected_fp in kwargs_expected_fp:
observed_kwargs = {}
# TODO:
# some of the test files contain characters which are
# invalid for RNA, so don't validate for now. Need to
# fix this
if constructor is RNA:
observed_kwargs['validate'] = False
# Can't use partials for this because the read
# function below can't operate on partials
if hasattr(constructor, 'lowercase'):
expected_kwargs['lowercase'] = 'introns'
observed_kwargs['lowercase'] = 'introns'
fh = StringIO()
for c in components:
obj = constructor(
c[2],
metadata={'id': c[0], 'description': c[1]},
positional_metadata={'quality': c[3]},
**observed_kwargs)
write(obj, into=fh, format='fastq', **expected_kwargs)
observed = fh.getvalue()
fh.close()
with open(expected_fp, 'U') as f:
expected = f.read()
self.assertEqual(observed, expected)
def test_sequence_collection_to_fastq_kwargs_passed(self):
for components, kwargs_expected_fp in self.valid_files:
for kwargs, expected_fp in kwargs_expected_fp:
obj = SequenceCollection([
DNA(c[2], metadata={'id': c[0], 'description': c[1]},
positional_metadata={'quality': c[3]},
lowercase='introns')
for c in components])
fh = StringIO()
kwargs['lowercase'] = 'introns'
_sequence_collection_to_fastq(obj, fh, **kwargs)
observed = fh.getvalue()
fh.close()
with open(expected_fp, 'U') as f:
expected = f.read()
self.assertEqual(observed, expected)
def test_alignment_to_fastq_kwargs_passed(self):
for components, kwargs_expected_fp in self.valid_files:
for kwargs, expected_fp in kwargs_expected_fp:
obj = Alignment([
Protein(c[2], metadata={'id': c[0], 'description': c[1]},
positional_metadata={'quality': c[3]},
lowercase='introns')
for c in components])
fh = StringIO()
kwargs['lowercase'] = 'introns'
_alignment_to_fastq(obj, fh, **kwargs)
observed = fh.getvalue()
fh.close()
with open(expected_fp, 'U') as f:
expected = f.read()
self.assertEqual(observed, expected)
def test_generator_to_fastq_no_qual(self):
def gen():
yield Sequence('ACGT',
metadata={'id': 'foo', 'description': 'bar'},
positional_metadata={'quality': range(4)})
yield Sequence('ACG', metadata={'id': 'foo', 'description': 'bar'})
with self.assertRaisesRegexp(ValueError, '2nd.*quality scores'):
_generator_to_fastq(gen(), StringIO(), variant='illumina1.8')
class TestConversions(unittest.TestCase):
def setUp(self):
self.conversions = [
(get_data_path('empty'),
get_data_path('empty'), [
({'variant': 'sanger'}, {'phred_offset': 42}),
]),
<|fim▁hole|> get_data_path('longreads_as_sanger.fastq'), [
({'variant': 'sanger'}, {'variant': 'sanger'}),
({'phred_offset': 33}, {'variant': 'sanger'}),
({'variant': 'sanger'}, {'phred_offset': 33})
]),
(get_data_path('longreads_original_sanger.fastq'),
get_data_path('longreads_as_illumina.fastq'), [
({'variant': 'sanger'}, {'variant': 'illumina1.3'}),
({'phred_offset': 33}, {'variant': 'illumina1.3'}),
({'variant': 'sanger'}, {'phred_offset': 64})
]),
(get_data_path('wrapping_original_sanger.fastq'),
get_data_path('wrapping_as_sanger.fastq'), [
({'variant': 'sanger'}, {'variant': 'sanger'}),
({'phred_offset': 33}, {'variant': 'sanger'}),
({'variant': 'sanger'}, {'phred_offset': 33})
]),
(get_data_path('wrapping_original_sanger.fastq'),
get_data_path('wrapping_as_illumina.fastq'), [
({'variant': 'sanger'}, {'variant': 'illumina1.3'}),
({'phred_offset': 33}, {'variant': 'illumina1.3'}),
({'variant': 'sanger'}, {'phred_offset': 64})
]),
(get_data_path('sanger_full_range_original_sanger.fastq'),
get_data_path('sanger_full_range_as_sanger.fastq'), [
({'variant': 'sanger'}, {'variant': 'sanger'}),
({'phred_offset': 33}, {'variant': 'sanger'}),
({'variant': 'sanger'}, {'phred_offset': 33})
]),
(get_data_path('sanger_full_range_original_sanger.fastq'),
get_data_path('sanger_full_range_as_illumina.fastq'), [
({'variant': 'sanger'}, {'variant': 'illumina1.3'}),
({'phred_offset': 33}, {'variant': 'illumina1.3'}),
({'variant': 'sanger'}, {'phred_offset': 64})
]),
(get_data_path('illumina_full_range_original_illumina.fastq'),
get_data_path('illumina_full_range_as_illumina.fastq'), [
({'variant': 'illumina1.3'}, {'variant': 'illumina1.3'}),
({'phred_offset': 64}, {'variant': 'illumina1.3'}),
({'variant': 'illumina1.3'}, {'phred_offset': 64})
]),
(get_data_path('illumina_full_range_original_illumina.fastq'),
get_data_path('illumina_full_range_as_sanger.fastq'), [
({'variant': 'illumina1.3'}, {'variant': 'sanger'}),
({'phred_offset': 64}, {'variant': 'sanger'}),
({'variant': 'illumina1.3'}, {'phred_offset': 33})
]),
(get_data_path('misc_dna_original_sanger.fastq'),
get_data_path('misc_dna_as_sanger.fastq'), [
({'variant': 'sanger'}, {'variant': 'sanger'}),
({'phred_offset': 33}, {'variant': 'sanger'}),
({'variant': 'sanger'}, {'phred_offset': 33})
]),
(get_data_path('misc_dna_original_sanger.fastq'),
get_data_path('misc_dna_as_illumina.fastq'), [
({'variant': 'sanger'}, {'variant': 'illumina1.3'}),
({'phred_offset': 33}, {'variant': 'illumina1.3'}),
({'variant': 'sanger'}, {'phred_offset': 64})
]),
(get_data_path('misc_rna_original_sanger.fastq'),
get_data_path('misc_rna_as_sanger.fastq'), [
({'variant': 'sanger'}, {'variant': 'sanger'}),
({'phred_offset': 33}, {'variant': 'sanger'}),
({'variant': 'sanger'}, {'phred_offset': 33})
]),
(get_data_path('misc_rna_original_sanger.fastq'),
get_data_path('misc_rna_as_illumina.fastq'), [
({'variant': 'sanger'}, {'variant': 'illumina1.3'}),
({'phred_offset': 33}, {'variant': 'illumina1.3'}),
({'variant': 'sanger'}, {'phred_offset': 64})
]),
(get_data_path('fastq_wrapping_original_sanger_no_description'),
get_data_path('fastq_wrapping_as_sanger_no_description'), [
({'variant': 'sanger'}, {'variant': 'sanger'}),
({'phred_offset': 33}, {'variant': 'sanger'}),
({'variant': 'sanger'}, {'phred_offset': 33})
]),
(get_data_path('fastq_wrapping_original_sanger_no_description'),
get_data_path('fastq_wrapping_as_illumina_no_description'), [
({'variant': 'sanger'}, {'variant': 'illumina1.3'}),
({'phred_offset': 33}, {'variant': 'illumina1.3'}),
({'variant': 'sanger'}, {'phred_offset': 64})
]),
]
def test_conversion(self):
for from_fp, to_fp, kwargs in self.conversions:
for from_kwargs, to_kwargs in kwargs:
read_gen = _fastq_to_generator(from_fp, **from_kwargs)
fh = StringIO()
# will issue warning when truncating quality scores
with warnings.catch_warnings(record=True):
warnings.simplefilter("ignore")
_generator_to_fastq(read_gen, fh, **to_kwargs)
obs = fh.getvalue()
fh.close()
with open(to_fp, 'U') as fh:
exp = fh.read()
self.assertEqual(obs, exp)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | (get_data_path('longreads_original_sanger.fastq'), |
<|file_name|>csssupportsrule.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, ParserInput};<|fim▁hole|>use dom::bindings::codegen::Bindings::WindowBinding::WindowBinding::WindowMethods;
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::root::DomRoot;
use dom::bindings::str::DOMString;
use dom::cssconditionrule::CSSConditionRule;
use dom::cssrule::SpecificCSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use servo_arc::Arc;
use style::parser::ParserContext;
use style::shared_lock::{Locked, ToCssWithGuard};
use style::stylesheets::{CssRuleType, SupportsRule};
use style::stylesheets::supports_rule::SupportsCondition;
use style_traits::{ParsingMode, ToCss};
#[dom_struct]
pub struct CSSSupportsRule {
cssconditionrule: CSSConditionRule,
#[ignore_malloc_size_of = "Arc"]
supportsrule: Arc<Locked<SupportsRule>>,
}
impl CSSSupportsRule {
fn new_inherited(
parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>,
) -> CSSSupportsRule {
let guard = parent_stylesheet.shared_lock().read();
let list = supportsrule.read_with(&guard).rules.clone();
CSSSupportsRule {
cssconditionrule: CSSConditionRule::new_inherited(parent_stylesheet, list),
supportsrule: supportsrule,
}
}
#[allow(unrooted_must_root)]
pub fn new(
window: &Window,
parent_stylesheet: &CSSStyleSheet,
supportsrule: Arc<Locked<SupportsRule>>,
) -> DomRoot<CSSSupportsRule> {
reflect_dom_object(
Box::new(CSSSupportsRule::new_inherited(
parent_stylesheet,
supportsrule,
)),
window,
CSSSupportsRuleBinding::Wrap,
)
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn get_condition_text(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
let rule = self.supportsrule.read_with(&guard);
rule.condition.to_css_string().into()
}
/// <https://drafts.csswg.org/css-conditional-3/#the-csssupportsrule-interface>
pub fn set_condition_text(&self, text: DOMString) {
let mut input = ParserInput::new(&text);
let mut input = Parser::new(&mut input);
let cond = SupportsCondition::parse(&mut input);
if let Ok(cond) = cond {
let global = self.global();
let win = global.as_window();
let url = win.Document().url();
let quirks_mode = win.Document().quirks_mode();
let context = ParserContext::new_for_cssom(
&url,
Some(CssRuleType::Supports),
ParsingMode::DEFAULT,
quirks_mode,
None,
None,
);
let enabled = {
let namespaces =
self
.cssconditionrule
.parent_stylesheet()
.style_stylesheet()
.contents
.namespaces
.read();
cond.eval(&context, &namespaces)
};
let mut guard = self.cssconditionrule.shared_lock().write();
let rule = self.supportsrule.write_with(&mut guard);
rule.condition = cond;
rule.enabled = enabled;
}
}
}
impl SpecificCSSRule for CSSSupportsRule {
fn ty(&self) -> u16 {
use dom::bindings::codegen::Bindings::CSSRuleBinding::CSSRuleConstants;
CSSRuleConstants::SUPPORTS_RULE
}
fn get_css(&self) -> DOMString {
let guard = self.cssconditionrule.shared_lock().read();
self.supportsrule
.read_with(&guard)
.to_css_string(&guard)
.into()
}
}<|fim▁end|> | use dom::bindings::codegen::Bindings::CSSSupportsRuleBinding; |
<|file_name|>refcounted.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A generic, safe mechanism by which DOM objects can be pinned and transferred
//! between threads (or intra-thread for asynchronous events). Akin to Gecko's
//! nsMainThreadPtrHandle, this uses thread-safe reference counting and ensures
//! that the actual SpiderMonkey GC integration occurs on the script thread via
//! weak refcounts. Ownership of a `Trusted<T>` object means the DOM object of
//! type T to which it points remains alive. Any other behaviour is undefined.
//! To guarantee the lifetime of a DOM object when performing asynchronous operations,
//! obtain a `Trusted<T>` from that object and pass it along with each operation.
//! A usable pointer to the original DOM object can be obtained on the script thread
//! from a `Trusted<T>` via the `root` method.
//!
//! The implementation of `Trusted<T>` is as follows:
//! The `Trusted<T>` object contains an atomic reference counted pointer to the Rust DOM object.
//! A hashtable resides in the script thread, keyed on the pointer.
//! The values in this hashtable are weak reference counts. When a `Trusted<T>` object is
//! created or cloned, the reference count is increased. When a `Trusted<T>` is dropped, the count
//! decreases. If the count hits zero, the weak reference is emptied, and is removed from
//! its hash table during the next GC. During GC, the entries of the hash table are counted
//! as JS roots.
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::Error;
use dom::bindings::reflector::{DomObject, Reflector};
use dom::bindings::root::DomRoot;
use dom::bindings::trace::trace_reflector;
use dom::promise::Promise;
use js::jsapi::JSTracer;
use libc;
use std::cell::RefCell;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::collections::hash_map::HashMap;
use std::hash::Hash;
use std::marker::PhantomData;
use std::os;
use std::rc::Rc;
use std::sync::{Arc, Weak};
use task::TaskOnce;
#[allow(missing_docs)] // FIXME
mod dummy { // Attributes don’t apply through the macro.
use std::cell::RefCell;
use std::rc::Rc;
use super::LiveDOMReferences;
thread_local!(pub static LIVE_REFERENCES: Rc<RefCell<Option<LiveDOMReferences>>> =
Rc::new(RefCell::new(None)));
}
pub use self::dummy::LIVE_REFERENCES;
/// A pointer to a Rust DOM object that needs to be destroyed.
pub struct TrustedReference(*const libc::c_void);
unsafe impl Send for TrustedReference {}
impl TrustedReference {
fn new<T: DomObject>(ptr: *const T) -> TrustedReference {
TrustedReference(ptr as *const libc::c_void)
}
}
/// A safe wrapper around a DOM Promise object that can be shared among threads for use
/// in asynchronous operations. The underlying DOM object is guaranteed to live at least
/// as long as the last outstanding `TrustedPromise` instance. These values cannot be cloned,
/// only created from existing Rc<Promise> values.
pub struct TrustedPromise {
dom_object: *const Promise,
owner_thread: *const libc::c_void,
}
unsafe impl Send for TrustedPromise {}
impl TrustedPromise {
/// Create a new `TrustedPromise` instance from an existing DOM object. The object will
/// be prevented from being GCed for the duration of the resulting `TrustedPromise` object's
/// lifetime.
#[allow(unrooted_must_root)]
pub fn new(promise: Rc<Promise>) -> TrustedPromise {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let ptr = &*promise as *const Promise;
live_references.addref_promise(promise);
TrustedPromise {
dom_object: ptr,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
}
})
}
/// Obtain a usable DOM Promise from a pinned `TrustedPromise` value. Fails if used on
/// a different thread than the original value from which this `TrustedPromise` was
/// obtained.
#[allow(unrooted_must_root)]
pub fn root(self) -> Rc<Promise> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
assert!(self.owner_thread == (&*live_references) as *const _ as *const libc::c_void);
// Borrow-check error requires the redundant `let promise = ...; promise` here.
let promise = match live_references.promise_table.borrow_mut().entry(self.dom_object) {
Occupied(mut entry) => {
let promise = {
let promises = entry.get_mut();
promises.pop().expect("rooted promise list unexpectedly empty")
};
if entry.get().is_empty() {
entry.remove();
}
promise
}
Vacant(_) => unreachable!(),
};
promise
})
}
/// A task which will reject the promise.
#[allow(unrooted_must_root)]
pub fn reject_task(self, error: Error) -> impl TaskOnce {
let this = self;
task!(reject_promise: move || {
debug!("Rejecting promise.");
this.root().reject_error(error);
})
}
/// A task which will resolve the promise.
#[allow(unrooted_must_root)]
pub fn resolve_task<T>(self, value: T) -> impl TaskOnce
where
T: ToJSValConvertible + Send,
{
let this = self;
task!(resolve_promise: move || {
debug!("Resolving promise.");
this.root().resolve_native(&value);
})
}
}
/// A safe wrapper around a raw pointer to a DOM object that can be
/// shared among threads for use in asynchronous operations. The underlying
/// DOM object is guaranteed to live at least as long as the last outstanding
/// `Trusted<T>` instance.
#[allow_unrooted_interior]
pub struct Trusted<T: DomObject> {
/// A pointer to the Rust DOM object of type T, but void to allow
/// sending `Trusted<T>` between threads, regardless of T's sendability.
refcount: Arc<TrustedReference>,
owner_thread: *const libc::c_void,
phantom: PhantomData<T>,
}
unsafe impl<T: DomObject> Send for Trusted<T> {}
impl<T: DomObject> Trusted<T> {
/// Create a new `Trusted<T>` instance from an existing DOM pointer. The DOM object will
/// be prevented from being GCed for the duration of the resulting `Trusted<T>` object's
/// lifetime.
pub fn new(ptr: &T) -> Trusted<T> {
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
let refcount = live_references.addref(&*ptr as *const T);
Trusted {
refcount: refcount,
owner_thread: (&*live_references) as *const _ as *const libc::c_void,
phantom: PhantomData,
}
})
}
/// Obtain a usable DOM pointer from a pinned `Trusted<T>` value. Fails if used on
/// a different thread than the original value from which this `Trusted<T>` was
/// obtained.
pub fn root(&self) -> DomRoot<T> {
assert!(LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
self.owner_thread == (&*live_references) as *const _ as *const libc::c_void
}));
unsafe {
DomRoot::from_ref(&*(self.refcount.0 as *const T))
}
}
}
impl<T: DomObject> Clone for Trusted<T> {
fn clone(&self) -> Trusted<T> {
Trusted {
refcount: self.refcount.clone(),
owner_thread: self.owner_thread,
phantom: PhantomData,
}
}
}
/// The set of live, pinned DOM objects that are currently prevented
/// from being garbage collected due to outstanding references.
#[allow(unrooted_must_root)]
pub struct LiveDOMReferences {
// keyed on pointer to Rust DOM object
reflectable_table: RefCell<HashMap<*const libc::c_void, Weak<TrustedReference>>>,
promise_table: RefCell<HashMap<*const Promise, Vec<Rc<Promise>>>>,
}
impl LiveDOMReferences {
/// Set up the thread-local data required for storing the outstanding DOM references.
pub fn initialize() {
LIVE_REFERENCES.with(|ref r| {
*r.borrow_mut() = Some(LiveDOMReferences {
reflectable_table: RefCell::new(HashMap::new()),
promise_table: RefCell::new(HashMap::new()),
})
});
}
#[allow(unrooted_must_root)]
fn addref_promise(&self, promise: Rc<Promise>) {
let mut table = self.promise_table.borrow_mut();
table.entry(&*promise).or_insert(vec![]).push(promise)
}
fn addref<T: DomObject>(&self, ptr: *const T) -> Arc<TrustedReference> {
let mut table = self.reflectable_table.borrow_mut();
let capacity = table.capacity();
let len = table.len();
if (0 < capacity) && (capacity <= len) {
info!("growing refcounted references by {}", len);
remove_nulls(&mut table);
table.reserve(len);
}
match table.entry(ptr as *const libc::c_void) {
Occupied(mut entry) => match entry.get().upgrade() {
Some(refcount) => refcount,
None => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
},
},
Vacant(entry) => {
let refcount = Arc::new(TrustedReference::new(ptr));
entry.insert(Arc::downgrade(&refcount));
refcount
}
}
}
}
/// Remove null entries from the live references table
fn remove_nulls<K: Eq + Hash + Clone, V> (table: &mut HashMap<K, Weak<V>>) {
let to_remove: Vec<K> =
table.iter()
.filter(|&(_, value)| Weak::upgrade(value).is_none())
.map(|(key, _)| key.clone())
.collect();
info!("removing {} refcounted references", to_remove.len());
for key in to_remove {
table.remove(&key);
}
}
/// A JSTraceDataOp for tracing reflectors held in LIVE_REFERENCES
#[allow(unrooted_must_root)]
pub unsafe extern "C" fn trace_refcounted_objects(tracer: *mut JSTracer,
_data: *mut os::raw::c_void) {
info!("tracing live refcounted references");
LIVE_REFERENCES.with(|ref r| {
let r = r.borrow();
let live_references = r.as_ref().unwrap();
{
let mut table = live_references.reflectable_table.borrow_mut();
remove_nulls(&mut table);
for obj in table.keys() {
let reflectable = &*(*obj as *const Reflector);
trace_reflector(tracer, "refcounted", reflectable);
}
}
<|fim▁hole|> for promise in table.keys() {
trace_reflector(tracer, "refcounted", (**promise).reflector());
}
}
});
}<|fim▁end|> | {
let table = live_references.promise_table.borrow_mut(); |
<|file_name|>replication_lib.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""An implementation of the ReplicationConfig proto interface."""
from __future__ import print_function
import json
import os
import shutil
import sys
from chromite.api.gen.config import replication_config_pb2
from chromite.lib import constants
from chromite.lib import cros_logging as logging
from chromite.lib import osutils
from chromite.utils import field_mask_util
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
def _ValidateFileReplicationRule(rule):
"""Raises an error if a FileReplicationRule is invalid.
For example, checks that if REPLICATION_TYPE_FILTER, destination_fields
are specified.
Args:
rule: (FileReplicationRule) The rule to validate.
"""
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_FILTER:
raise ValueError(
'Rule for JSON source %s must use REPLICATION_TYPE_FILTER.' %
rule.source_path)
elif rule.file_type == replication_config_pb2.FILE_TYPE_OTHER:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_COPY:
raise ValueError('Rule for source %s must use REPLICATION_TYPE_COPY.' %
rule.source_path)
else:
raise NotImplementedError('Replicate not implemented for file type %s' %
rule.file_type)
if rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY:
if rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_COPY cannot use destination_fields.')
elif rule.replication_type == replication_config_pb2.REPLICATION_TYPE_FILTER:
if not rule.destination_fields.paths:<|fim▁hole|> raise ValueError(
'Rule with REPLICATION_TYPE_FILTER must use destination_fields.')
else:
raise NotImplementedError(
'Replicate not implemented for replication type %s' %
rule.replication_type)
if os.path.isabs(rule.source_path) or os.path.isabs(rule.destination_path):
raise ValueError(
'Only paths relative to the source root are allowed. In rule: %s' %
rule)
def _ApplyStringReplacementRules(destination_path, rules):
"""Read the file at destination path, apply rules, and write a new file.
Args:
destination_path: (str) Path to the destination file to read. The new file
will also be written at this path.
rules: (list[StringReplacementRule]) Rules to apply. Must not be empty.
"""
assert rules
with open(destination_path, 'r') as f:
dst_data = f.read()
for string_replacement_rule in rules:
dst_data = dst_data.replace(string_replacement_rule.before,
string_replacement_rule.after)
with open(destination_path, 'w') as f:
f.write(dst_data)
def Replicate(replication_config):
"""Run the replication described in replication_config.
Args:
replication_config: (ReplicationConfig) Describes the replication to run.
"""
# Validate all rules before any of them are run, to decrease chance of ending
# with a partial replication.
for rule in replication_config.file_replication_rules:
_ValidateFileReplicationRule(rule)
for rule in replication_config.file_replication_rules:
logging.info('Processing FileReplicationRule: %s', rule)
src = os.path.join(constants.SOURCE_ROOT, rule.source_path)
dst = os.path.join(constants.SOURCE_ROOT, rule.destination_path)
osutils.SafeMakedirs(os.path.dirname(dst))
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
assert (rule.replication_type ==
replication_config_pb2.REPLICATION_TYPE_FILTER)
assert rule.destination_fields.paths
with open(src, 'r') as f:
source_json = json.load(f)
try:
source_device_configs = source_json['chromeos']['configs']
except KeyError:
raise NotImplementedError(
('Currently only ChromeOS Configs are supported (expected file %s '
'to have a list at "$.chromeos.configs")') % src)
destination_device_configs = []
for source_device_config in source_device_configs:
destination_device_configs.append(
field_mask_util.CreateFilteredDict(rule.destination_fields,
source_device_config))
destination_json = {'chromeos': {'configs': destination_device_configs}}
logging.info('Writing filtered JSON source to %s', dst)
with open(dst, 'w') as f:
# Use the print function, so the file ends in a newline.
print(
json.dumps(
destination_json,
sort_keys=True,
indent=2,
separators=(',', ': ')),
file=f)
else:
assert rule.file_type == replication_config_pb2.FILE_TYPE_OTHER
assert (
rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY)
assert not rule.destination_fields.paths
logging.info('Copying full file from %s to %s', src, dst)
shutil.copy2(src, dst)
if rule.string_replacement_rules:
_ApplyStringReplacementRules(dst, rule.string_replacement_rules)<|fim▁end|> | |
<|file_name|>JdkHelper.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2019 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
import {existsSync, promises as fsPromises} from 'fs';
import {Config} from '../Config';
import * as path from 'path';
import {executeFile} from '../util';
import {Result} from '../Result';
import {ValidatePathError} from '../errors/ValidatePathError';
type JoinPathFunction = (...paths: string[]) => string;
/**
* Helps getting information relevant to the JDK installed, including
* the approprite environment needed to run Java commands on the JDK
*/
export class JdkHelper {
private process: NodeJS.Process;
private config: Config;
private joinPath: JoinPathFunction;
private pathSeparator: string;
private pathEnvironmentKey: string;
/**
* Constructs a new instance of JdkHelper.
*
* @param {NodeJS.Process} process information from the OS process
* @param {Config} config the bubblewrap general configuration
*/
constructor(process: NodeJS.Process, config: Config) {
this.process = process;
this.config = config;
if (process.platform === 'win32') {
this.joinPath = path.win32.join;
this.pathSeparator = ';';
this.pathEnvironmentKey = 'Path';
} else {
this.joinPath = path.posix.join;
this.pathSeparator = ':';
this.pathEnvironmentKey = 'PATH';
}
}
/**
* Runs the `java` command, passing args as parameters.
*/
async runJava(args: string[]): Promise<{stdout: string; stderr: string}> {
const java = this.process.platform === 'win32' ? '/bin/java.exe' : '/bin/java';
const runJavaCmd = this.joinPath(this.getJavaHome(),
java);
return await executeFile(runJavaCmd, args, this.getEnv());
}
/**
* Returns information from the JAVA_HOME, based on the config and platform.
*/
getJavaHome(): string {
return JdkHelper.getJavaHome(this.config.jdkPath, this.process);
}
/**
* Returns information from the JAVA_HOME, based on the config and platform.
* @param {Config} config The bubblewrap general configuration
* @param {NodeJS.Process} process Information from the OS process
*/
static getJavaHome(jdkPath: string, process: NodeJS.Process): string {
const joinPath = (process.platform === 'win32') ? path.win32.join : path.posix.join;
if (process.platform === 'darwin') {
return joinPath(jdkPath, '/Contents/Home/');
} else if (process.platform === 'linux' || process.platform === 'win32') {
return joinPath(jdkPath, '/');
}
throw new Error(`Unsupported Platform: ${process.platform}`);
}
private static getJoinPath(process: NodeJS.Process): JoinPathFunction {
switch (process.platform) {
case 'win32': return path.win32.join;
default: return path.posix.join;
}
}
/**
* Checks if the given jdkPath is valid.
* @param {string} jdkPath the path to the jdk.
*/
static async validatePath(jdkPath: string, currentProcess: NodeJS.Process = process):<|fim▁hole|> const join = JdkHelper.getJoinPath(currentProcess);
if (!existsSync(jdkPath)) {
return Result.error(new ValidatePathError(
`jdkPath "${jdkPath}" does not exist.`, 'PathIsNotCorrect'));
};
const javaHome = JdkHelper.getJavaHome(jdkPath, currentProcess);
try {
const releaseFilePath = join(javaHome, 'release');
const file = await fsPromises.readFile(releaseFilePath, 'utf-8');
if (file.indexOf('JAVA_VERSION="1.8') < 0) { // Checks if the jdk's version is 8 as needed
return Result.error(new ValidatePathError(
'JDK version not supported. JDK version 1.8 is required.', 'PathIsNotSupported'));
}
} catch (e) {
return Result.error(new ValidatePathError(
`Error reading the "release" file for the JDK at "${jdkPath}", with error: ${e} `,
'PathIsNotCorrect'));
}
return Result.ok(jdkPath);
}
/**
* Returns information from the Java executable location, based on the config and platform.
* @returns {string} the value where the Java executables can be found
*/
getJavaBin(): string {
return this.joinPath(this.getJavaHome(), 'bin/');
}
/**
* Returns a copy of process.env, customized with the correct JAVA_HOME and PATH.
* @returns {NodeJS.ProcessEnv} an env object configure to run JDK commands
*/
getEnv(): NodeJS.ProcessEnv {
const env: NodeJS.ProcessEnv = Object.assign({}, this.process.env);
env['JAVA_HOME'] = this.getJavaHome();
// Concatenates the Java binary path to the existing PATH environment variable.
env[this.pathEnvironmentKey] =
this.getJavaBin() + this.pathSeparator + env[this.pathEnvironmentKey];
return env;
}
}<|fim▁end|> | Promise<Result<string, ValidatePathError>> { |
<|file_name|>proxyhandler.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for the implementation of JSAPI proxy handlers.
#![deny(missing_docs)]
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::utils::delete_property_by_id;
use js::glue::GetProxyExtra;
use js::glue::InvokeGetOwnPropertyDescriptor;<|fim▁hole|>use js::jsapi::{JSContext, JSObject, JSPropertyDescriptor};
use js::jsapi::{JSErrNum, JS_StrictPropertyStub};
use js::jsapi::{JS_DefinePropertyById6, JS_NewObjectWithGivenProto};
use js::jsapi::{JS_GetPropertyDescriptorById};
use js::jsval::ObjectValue;
use js::{JSFalse, JSTrue};
use js::{JSPROP_ENUMERATE, JSPROP_GETTER, JSPROP_READONLY};
use libc;
use std::{mem, ptr};
static JSPROXYSLOT_EXPANDO: u32 = 0;
/// Invoke the [[GetOwnProperty]] trap (`getOwnPropertyDescriptor`) on `proxy`,
/// with argument `id` and return the result, if it is not `undefined`.
/// Otherwise, walk along the prototype chain to find a property with that
/// name.
pub unsafe extern fn get_property_descriptor(cx: *mut JSContext,
proxy: HandleObject,
id: HandleId,
desc: MutableHandle<JSPropertyDescriptor>)
-> u8 {
let handler = GetProxyHandler(proxy.get());
if InvokeGetOwnPropertyDescriptor(handler, cx, proxy, id, desc) == 0 {
return JSFalse;
}
if !desc.get().obj.is_null() {
return JSTrue;
}
let mut proto = RootedObject::new(cx, ptr::null_mut());
if GetObjectProto(cx, proxy, proto.handle_mut()) == 0 {
desc.get().obj = ptr::null_mut();
return JSTrue;
}
JS_GetPropertyDescriptorById(cx, proto.handle(), id, desc)
}
/// Defines an expando on the given `proxy`.
pub unsafe extern fn define_property(cx: *mut JSContext, proxy: HandleObject,
id: HandleId, desc: Handle<JSPropertyDescriptor>,
result: *mut ObjectOpResult)
-> u8 {
//FIXME: Workaround for https://github.com/mozilla/rust/issues/13385
let setter: *const libc::c_void = mem::transmute(desc.get().setter);
let setter_stub: *const libc::c_void = mem::transmute(JS_StrictPropertyStub);
if (desc.get().attrs & JSPROP_GETTER) != 0 && setter == setter_stub {
(*result).code_ = JSErrNum::JSMSG_GETTER_ONLY as u32;
return JSTrue;
}
let expando = RootedObject::new(cx, ensure_expando_object(cx, proxy));
JS_DefinePropertyById6(cx, expando.handle(), id, desc, result)
}
/// Deletes an expando off the given `proxy`.
pub unsafe extern fn delete(cx: *mut JSContext, proxy: HandleObject, id: HandleId,
bp: *mut ObjectOpResult) -> u8 {
let expando = RootedObject::new(cx, get_expando_object(proxy));
if expando.ptr.is_null() {
(*bp).code_ = 0 /* OkCode */;
return JSTrue;
}
delete_property_by_id(cx, expando.handle(), id, bp)
}
/// Controls whether the Extensible bit can be changed
pub unsafe extern fn prevent_extensions(_cx: *mut JSContext,
_proxy: HandleObject,
result: *mut ObjectOpResult) -> u8 {
(*result).code_ = JSErrNum::JSMSG_CANT_PREVENT_EXTENSIONS as u32;
JSTrue
}
/// Reports whether the object is Extensible
pub unsafe extern fn is_extensible(_cx: *mut JSContext, _proxy: HandleObject,
succeeded: *mut u8) -> u8 {
*succeeded = JSTrue;
JSTrue
}
/// Get the expando object, or null if there is none.
pub fn get_expando_object(obj: HandleObject) -> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj.get()));
let val = GetProxyExtra(obj.get(), JSPROXYSLOT_EXPANDO);
if val.is_undefined() {
ptr::null_mut()
} else {
val.to_object()
}
}
}
/// Get the expando object, or create it if it doesn't exist yet.
/// Fails on JSAPI failure.
pub fn ensure_expando_object(cx: *mut JSContext, obj: HandleObject)
-> *mut JSObject {
unsafe {
assert!(is_dom_proxy(obj.get()));
let mut expando = get_expando_object(obj);
if expando.is_null() {
expando = JS_NewObjectWithGivenProto(cx, ptr::null_mut(), HandleObject::null());
assert!(!expando.is_null());
SetProxyExtra(obj.get(), JSPROXYSLOT_EXPANDO, ObjectValue(&*expando));
}
expando
}
}
/// Set the property descriptor's object to `obj` and set it to enumerable,
/// and writable if `readonly` is true.
pub fn fill_property_descriptor(desc: &mut JSPropertyDescriptor,
obj: *mut JSObject, readonly: bool) {
desc.obj = obj;
desc.attrs = if readonly { JSPROP_READONLY } else { 0 } | JSPROP_ENUMERATE;
desc.getter = None;
desc.setter = None;
}<|fim▁end|> | use js::glue::{GetProxyHandler, SetProxyExtra};
use js::jsapi::GetObjectProto;
use js::jsapi::{Handle, HandleId, HandleObject, MutableHandle, ObjectOpResult, RootedObject}; |
<|file_name|>archive.js<|end_file_name|><|fim▁begin|>import React from "react"
import { Link, graphql } from "gatsby"
import Bio from "../components/bio"
import Layout from "../components/layout"
import SEO from "../components/seo"
import { rhythm } from "../utils/typography"
class BlogIndex extends React.Component {
render() {
const { data } = this.props
const siteTitle = data.site.siteMetadata.title
const posts = data.allMarkdownRemark.edges
return (
<Layout location={this.props.location} title={siteTitle}>
<SEO
title="All posts"
keywords={[`blog`, `gatsby`, `javascript`, `react`]}
/>
<Bio />
{posts.map(({ node }) => {
const title = node.frontmatter.title || node.fields.slug;
var desc = node.frontmatter.excerpt || node.frontmatter.description;
if (!desc || desc.length === 0)
{
desc = node.excerpt;
}
return (
<div key={node.fields.slug}>
<h3
style={{
marginBottom: rhythm(1 / 4),
}}
>
<Link style={{ boxShadow: `none` }} to={node.fields.slug}>
{title}
</Link>
</h3>
<small>{node.frontmatter.date}</small>
<p
dangerouslySetInnerHTML={{
__html: desc
}}
/>
</div>
)
})}
</Layout>
)
}
}<|fim▁hole|>export default BlogIndex
export const pageQuery = graphql`
query {
site {
siteMetadata {
title
}
}
allMarkdownRemark(sort: { fields: [frontmatter___date], order: DESC }) {
edges {
node {
excerpt
fields {
slug
}
frontmatter {
date(formatString: "MMMM DD, YYYY")
title
description
excerpt
}
}
}
}
}
`<|fim▁end|> | |
<|file_name|>zepto.js<|end_file_name|><|fim▁begin|>/*!
* CanJS - 1.1.4 (2013-02-05)
* http://canjs.us/
* Copyright (c) 2013 Bitovi
* Licensed MIT
*/
define(['can/util/can', 'zepto', 'can/util/object/isplain', 'can/util/event', 'can/util/fragment', 'can/util/deferred', 'can/util/array/each'], function (can) {
var $ = Zepto;
// data.js
// ---------
// _jQuery-like data methods._
var data = {},
dataAttr = $.fn.data,
uuid = $.uuid = +new Date(),
exp = $.expando = 'Zepto' + uuid;
function getData(node, name) {
var id = node[exp],
store = id && data[id];
return name === undefined ? store || setData(node) : (store && store[name]) || dataAttr.call($(node), name);
}
<|fim▁hole|> if (name !== undefined) store[name] = value;
return store;
};
$.fn.data = function (name, value) {
return value === undefined ? this.length == 0 ? undefined : getData(this[0], name) : this.each(function (idx) {
setData(this, name, $.isFunction(value) ? value.call(this, idx, getData(this, name)) : value);
});
};
$.cleanData = function (elems) {
for (var i = 0, elem;
(elem = elems[i]) !== undefined; i++) {
can.trigger(elem, "destroyed", [], false)
var id = elem[exp]
delete data[id];
}
}
// zepto.js
// ---------
// _Zepto node list._
var oldEach = can.each;
// Extend what you can out of Zepto.
$.extend(can, Zepto);
can.each = oldEach;
var arrHas = function (obj, name) {
return obj[0] && obj[0][name] || obj[name]
}
// Do what's similar for jQuery.
can.trigger = function (obj, event, args, bubble) {
if (obj.trigger) {
obj.trigger(event, args)
} else if (arrHas(obj, "dispatchEvent")) {
if (bubble === false) {
$([obj]).triggerHandler(event, args)
} else {
$([obj]).trigger(event, args)
}
} else {
if (typeof event == "string") {
event = {
type: event
}
}
event.target = event.target || obj;
event.data = args;
can.dispatch.call(obj, event)
}
}
can.$ = Zepto;
can.bind = function (ev, cb) {
// If we can bind to it...
if (this.bind) {
this.bind(ev, cb)
} else if (arrHas(this, "addEventListener")) {
$([this]).bind(ev, cb)
} else {
can.addEvent.call(this, ev, cb)
}
return this;
}
can.unbind = function (ev, cb) {
// If we can bind to it...
if (this.unbind) {
this.unbind(ev, cb)
} else if (arrHas(this, "addEventListener")) {
$([this]).unbind(ev, cb)
} else {
can.removeEvent.call(this, ev, cb)
}
return this;
}
can.delegate = function (selector, ev, cb) {
if (this.delegate) {
this.delegate(selector, ev, cb)
} else {
$([this]).delegate(selector, ev, cb)
}
}
can.undelegate = function (selector, ev, cb) {
if (this.undelegate) {
this.undelegate(selector, ev, cb)
} else {
$([this]).undelegate(selector, ev, cb)
}
}
$.each(["append", "filter", "addClass", "remove", "data"], function (i, name) {
can[name] = function (wrapped) {
return wrapped[name].apply(wrapped, can.makeArray(arguments).slice(1))
}
})
can.makeArray = function (arr) {
var ret = []
can.each(arr, function (a, i) {
ret[i] = a
})
return ret;
};
can.proxy = function (f, ctx) {
return function () {
return f.apply(ctx, arguments)
}
}
// Make ajax.
var XHR = $.ajaxSettings.xhr;
$.ajaxSettings.xhr = function () {
var xhr = XHR()
var open = xhr.open;
xhr.open = function (type, url, async) {
open.call(this, type, url, ASYNC === undefined ? true : ASYNC)
}
return xhr;
}
var ASYNC;
var AJAX = $.ajax;
var updateDeferred = function (xhr, d) {
for (var prop in xhr) {
if (typeof d[prop] == 'function') {
d[prop] = function () {
xhr[prop].apply(xhr, arguments)
}
} else {
d[prop] = prop[xhr]
}
}
}
can.ajax = function (options) {
var success = options.success,
error = options.error;
var d = can.Deferred();
options.success = function (data) {
updateDeferred(xhr, d);
d.resolve.call(d, data);
success && success.apply(this, arguments);
}
options.error = function () {
updateDeferred(xhr, d);
d.reject.apply(d, arguments);
error && error.apply(this, arguments);
}
if (options.async === false) {
ASYNC = false
}
var xhr = AJAX(options);
ASYNC = undefined;
updateDeferred(xhr, d);
return d;
};
// Make destroyed and empty work.
$.fn.empty = function () {
return this.each(function () {
$.cleanData(this.getElementsByTagName('*'))
this.innerHTML = ''
})
}
$.fn.remove = function () {
$.cleanData(this);
this.each(function () {
if (this.parentNode != null) {
// might be a text node
this.getElementsByTagName && $.cleanData(this.getElementsByTagName('*'))
this.parentNode.removeChild(this);
}
});
return this;
}
can.trim = function (str) {
return str.trim();
}
can.isEmptyObject = function (object) {
var name;
for (name in object) {};
return name === undefined;
}
// Make extend handle `true` for deep.
can.extend = function (first) {
if (first === true) {
var args = can.makeArray(arguments);
args.shift();
return $.extend.apply($, args)
}
return $.extend.apply($, arguments)
}
can.get = function (wrapped, index) {
return wrapped[index];
}
return can;
});<|fim▁end|> | function setData(node, name, value) {
var id = node[exp] || (node[exp] = ++uuid),
store = data[id] || (data[id] = {}); |
<|file_name|>BackendTarget.java<|end_file_name|><|fim▁begin|>/* This file is part of VoltDB.
* Copyright (C) 2008-2013 VoltDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
package org.voltdb;
/**
* Specifies whether the system should be run on the native C++
* backend for VoltDB, or if the system should use a JDBC
* wrapper around HSQLDB.
*
* HSQLDB is pure java, making the system very portable, and it
* supports a wide range of SQL. On the other hand, it's not as
* fast and only supports a single partition. It's best used
* for testing.
*
*/
public enum BackendTarget {
NATIVE_EE_JNI("jni", false),<|fim▁hole|> NATIVE_EE_VALGRIND_IPC("valgrind_ipc", true),
HSQLDB_BACKEND("hsqldb", false),
NONE("none", false);
private BackendTarget(String display, boolean isIPC) { this.display = display; this.isIPC = isIPC; }
public final String display;
public final boolean isIPC;
}<|fim▁end|> | NATIVE_EE_IPC("ipc", true), |
<|file_name|>recover.py<|end_file_name|><|fim▁begin|>import sys
import zmq
import tnetstring
command_uri = sys.argv[1]
sock = zmq.Context.instance().socket(zmq.REQ)
sock.connect(command_uri)
req = {'method': 'recover'}
sock.send(tnetstring.dumps(req))<|fim▁hole|>if not resp.get('success'):
raise ValueError('request failed: %s' % resp)<|fim▁end|> |
resp = tnetstring.loads(sock.recv()) |
<|file_name|>sqlstring_test.py<|end_file_name|><|fim▁begin|>import unittest
import wire
<|fim▁hole|> '''Sets up the test case'''
self.sql = wire.SQLString
def test_pragma(self):
'''Tests the PRAGMA SQL generation'''
self.assertEqual(self.sql.pragma("INTEGRITY_CHECK(10)"), "PRAGMA INTEGRITY_CHECK(10)")
self.assertEqual(self.sql.checkIntegrity(5), "PRAGMA INTEGRITY_CHECK(5)")
def test_createTable(self):
'''Tests the CREATE TABLE SQL generation'''
table_outputs = ["CREATE TABLE test (id INT NOT NULL,username VARCHAR(255) DEFAULT 'default_user')",
"CREATE TABLE test (username VARCHAR(255) DEFAULT 'default_user',id INT NOT NULL)"]
temp_table_outputs = ["CREATE TEMPORARY TABLE test_temp (value REAL DEFAULT 0.0,time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)",
"CREATE TEMPORARY TABLE test_temp (time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,value REAL DEFAULT 0.0)"]
self.assertIn(self.sql.createTable("test", False, id = "INT", username = ["VARCHAR(255)", "'default_user'"]), table_outputs)
self.assertIn(self.sql.createTable("test_temp", True, value = ["REAL", 0.0], time = ["TIMESTAMP", "CURRENT_TIMESTAMP"]), temp_table_outputs)
# include a Temp table test (False --> True)
def test_dropTable(self):
'''Tests the DROP TABLE SQL generation'''
self.assertEqual(self.sql.dropTable("table_drop"), "DROP TABLE table_drop")
self.assertEqual(self.sql.dropTable("some_other_table"), "DROP TABLE some_other_table")
def test_renameTable(self):
'''Tests the ALTER TABLE RENAME SQL generation'''
self.assertEqual(self.sql.rename("orig_table", "new_table"), "ALTER TABLE orig_table RENAME TO new_table")
if __name__ == '__main__':
unittest.main()<|fim▁end|> | class TestSQLString(unittest.TestCase):
def setUp(self): |
<|file_name|>axe.config.js<|end_file_name|><|fim▁begin|>/*jshint node: true*/
'use strict';
// Defaults derived from: https://github.com/dequelabs/axe-core
const defaults = {
rules: {
'area-alt': { 'enabled': true },
'audio-caption': { 'enabled': true },
'button-name': { 'enabled': true },
'document-title': { 'enabled': true },
'empty-heading': { 'enabled': true },
'frame-title': { 'enabled': true },
'frame-title-unique': { 'enabled': true },
'image-alt': { 'enabled': true },
'image-redundant-alt': { 'enabled': true },
'input-image-alt': { 'enabled': true },
'link-name': { 'enabled': true },
'object-alt': { 'enabled': true },
'server-side-image-map': { 'enabled': true },
'video-caption': { 'enabled': true },
'video-description': { 'enabled': true },
'definition-list': { 'enabled': true },
'dlitem': { 'enabled': true },
'heading-order': { 'enabled': true },
'href-no-hash': { 'enabled': true },
'layout-table': { 'enabled': true },
'list': { 'enabled': true },
'listitem': { 'enabled': true },
'p-as-heading': { 'enabled': true },
'scope-attr-valid': { 'enabled': true },
'table-duplicate-name': { 'enabled': true },
'table-fake-caption': { 'enabled': true },
'td-has-header': { 'enabled': true },
'td-headers-attr': { 'enabled': true },
'th-has-data-cells': { 'enabled': true },
'duplicate-id': { 'enabled': true },
'html-has-lang': { 'enabled': true },
'html-lang-valid': { 'enabled': true },
'meta-refresh': { 'enabled': true },
'valid-lang': { 'enabled': true },
'checkboxgroup': { 'enabled': true },
'label': { 'enabled': true },
'radiogroup': { 'enabled': true },
'accesskeys': { 'enabled': true },
'bypass': { 'enabled': true },
'tabindex': { 'enabled': true },
// TODO: this should be re-enabled when we upgrade to axe-core ^3.1.1 (https://github.com/dequelabs/axe-core/issues/961)
'aria-allowed-attr': { 'enabled': false },
'aria-required-attr': { 'enabled': true },
'aria-required-children': { 'enabled': true },
'aria-required-parent': { 'enabled': true },
'aria-roles': { 'enabled': true },
'aria-valid-attr': { 'enabled': true },
'aria-valid-attr-value': { 'enabled': true },
'blink': { 'enabled': true },
'color-contrast': { 'enabled': true },
'link-in-text-block': { 'enabled': true },<|fim▁hole|> }
};
module.exports = {
getConfig: () => {
const skyPagesConfigUtil = require('../sky-pages/sky-pages.config');
const skyPagesConfig = skyPagesConfigUtil.getSkyPagesConfig();
let config = {};
// Merge rules from skyux config.
if (skyPagesConfig.skyux.a11y && skyPagesConfig.skyux.a11y.rules) {
config.rules = Object.assign({}, defaults.rules, skyPagesConfig.skyux.a11y.rules);
}
// The consuming SPA wishes to disable all rules.
if (skyPagesConfig.skyux.a11y === false) {
config.rules = Object.assign({}, defaults.rules);
Object.keys(config.rules).forEach((key) => {
config.rules[key].enabled = false;
});
}
if (!config.rules) {
return defaults;
}
return config;
}
};<|fim▁end|> | 'marquee': { 'enabled': true },
'meta-viewport': { 'enabled': true },
'meta-viewport-large': { 'enabled': true } |
<|file_name|>view.js<|end_file_name|><|fim▁begin|>function BxTimelineView(oOptions) {
this._sActionsUri = oOptions.sActionUri;
this._sActionsUrl = oOptions.sActionUrl;
this._sObjName = oOptions.sObjName == undefined ? 'oTimelineView' : oOptions.sObjName;
this._iOwnerId = oOptions.iOwnerId == undefined ? 0 : oOptions.iOwnerId;
this._sAnimationEffect = oOptions.sAnimationEffect == undefined ? 'slide' : oOptions.sAnimationEffect;
this._iAnimationSpeed = oOptions.iAnimationSpeed == undefined ? 'slow' : oOptions.iAnimationSpeed;
this._aHtmlIds = oOptions.aHtmlIds == undefined ? {} : oOptions.aHtmlIds;
this._oRequestParams = oOptions.oRequestParams == undefined ? {} : oOptions.oRequestParams;
var $this = this;
$(document).ready(function() {
$this.initMasonry();
$('.bx-tl-item').resize(function() {
$this.reloadMasonry();
});
$('img.bx-tl-item-image').load(function() {
$this.reloadMasonry();
});
});
}
BxTimelineView.prototype = new BxTimelineMain();
BxTimelineView.prototype.changePage = function(oElement, iStart, iPerPage) {
this._oRequestParams.start = iStart;
this._oRequestParams.per_page = iPerPage;
this._getPosts(oElement, 'page');
};
BxTimelineView.prototype.changeFilter = function(oLink) {
var sId = $(oLink).attr('id');
this._oRequestParams.start = 0;
this._oRequestParams.filter = sId.substr(sId.lastIndexOf('-') + 1, sId.length);
this._getPosts(oLink, 'filter');
};
BxTimelineView.prototype.changeTimeline = function(oLink, iYear) {
this._oRequestParams.start = 0;
this._oRequestParams.timeline = iYear;
this._getPosts(oLink, 'timeline');
};
BxTimelineView.prototype.deletePost = function(oLink, iId) {
var $this = this;
var oView = $(this.sIdView);
var oData = this._getDefaultData();
oData['id'] = iId;
this.loadingInBlock(oLink, true);
$.post(
this._sActionsUrl + 'delete/',
oData,
function(oData) {
$this.loadingInBlock(oLink, false);
if(oData && oData.msg != undefined)
alert(oData.msg);
if(oData && oData.code == 0)
$(oLink).parents('.bx-popup-applied:first:visible').dolPopupHide();
$($this.sIdItem + oData.id).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).remove();
if(oView.find('.bx-tl-item').length != 0) {
$this.reloadMasonry();
return;
}
$this.destroyMasonry();
oView.find('.bx-tl-load-more').hide();
oView.find('.bx-tl-empty').show();
}); <|fim▁hole|> },
'json'
);
};
BxTimelineView.prototype.showMoreContent = function(oLink) {
$(oLink).parent('span').next('span').show().prev('span').remove();
this.reloadMasonry();
};
BxTimelineView.prototype.showPhoto = function(oLink, sUrl) {
$('#' + this._aHtmlIds['photo_popup']).dolPopupImage(sUrl, $(oLink).parent());
};
BxTimelineView.prototype.commentItem = function(oLink, sSystem, iId) {
var $this = this;
var oData = this._getDefaultData();
oData['system'] = sSystem;
oData['id'] = iId;
var oComments = $(oLink).parents('.' + this.sClassItem + ':first').find('.' + this.sClassItemComments);
if(oComments.children().length > 0) {
oComments.bx_anim('toggle', this._sAnimationEffect, this._iAnimationSpeed);
return;
}
if(oLink)
this.loadingInItem(oLink, true);
jQuery.get (
this._sActionsUrl + 'get_comments',
oData,
function(oData) {
if(oLink)
$this.loadingInItem(oLink, false);
if(!oData.content)
return;
oComments.html($(oData.content).hide()).children(':hidden').bxTime().bx_anim('show', $this._sAnimationEffect, $this._iAnimationSpeed);
},
'json'
);
};
BxTimelineView.prototype._getPosts = function(oElement, sAction) {
var $this = this;
var oView = $(this.sIdView);
switch(sAction) {
case 'page':
this.loadingInButton(oElement, true);
break;
default:
this.loadingInBlock(oElement, true);
break;
}
jQuery.get(
this._sActionsUrl + 'get_posts/',
this._getDefaultData(),
function(oData) {
if(oData && oData.items != undefined) {
var sItems = $.trim(oData.items);
switch(sAction) {
case 'page':
$this.loadingInButton(oElement, false);
$this.appendMasonry($(sItems).bxTime());
break;
default:
$this.loadingInBlock(oElement, false);
oView.find('.' + $this.sClassItems).bx_anim('hide', $this._sAnimationEffect, $this._iAnimationSpeed, function() {
$(this).html(sItems).show().bxTime();
if($this.isMasonryEmpty()) {
$this.destroyMasonry();
return;
}
if($this.isMasonry())
$this.reloadMasonry();
else
$this.initMasonry();
});
break;
}
}
if(oData && oData.load_more != undefined)
oView.find('.' + $this.sSP + '-load-more-holder').html($.trim(oData.load_more));
if(oData && oData.back != undefined)
oView.find('.' + $this.sSP + '-back-holder').html($.trim(oData.back));
},
'json'
);
};<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::io::{self, Read};
mod smtp_parser;<|fim▁hole|>
let mut buffer = String::new();
io::stdin()
.read_to_string(&mut buffer)
.expect("Could not read input");
match smtp_parser::session(&buffer) {
Ok(r) => println!("Parsed as: {:?}", r),
Err(e) => println!("Parse error: {}", e),
}
}<|fim▁end|> |
fn main() {
println!("Hello, type some SMTP and press ctrl+d"); |
<|file_name|>queues.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Queue definitions.
This module defines the queues where a worker may look for work.
"""
from grr_response_core.lib import rdfvalue
# Queues that a standard worker should work from, highest priority first.
#
# "W" and "CA" are deprecated, but included until we are sure that they are
# empty.<|fim▁hole|>
# The normal queue for flows. Must be kept synchronized with the default value
# of FlowRunnerArgs.queue.
FLOWS = rdfvalue.RDFURN("F")
# The normal queue for hunts. Must be kept synchronized with the default value
# of HuntRunnerArgs.queue.
HUNTS = rdfvalue.RDFURN("H")
# The normal queue for statistics processing.
STATS = rdfvalue.RDFURN("S")<|fim▁end|> | WORKER_LIST = list(map(rdfvalue.RDFURN, ["CA", "W", "E", "F", "H", "S"]))
# The normal queue for enrollment messages.
ENROLLMENT = rdfvalue.RDFURN("E") |
<|file_name|>de.py<|end_file_name|><|fim▁begin|># Authors: David Goodger; Gunnar Schwant
# Contact: goodger@users.sourceforge.net
# Revision: $Revision: 21817 $
# Date: $Date: 2005-07-21 13:39:57 -0700 (Thu, 21 Jul 2005) $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
German language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
'author': 'Autor',
'authors': 'Autoren',
'organization': 'Organisation',
'address': 'Adresse',
'contact': 'Kontakt',
'version': 'Version',
'revision': 'Revision',
'status': 'Status',
'date': 'Datum',
'dedication': 'Widmung',
'copyright': 'Copyright',
'abstract': 'Zusammenfassung',
'attention': 'Achtung!',
'caution': 'Vorsicht!',
'danger': '!GEFAHR!',
'error': 'Fehler',
'hint': 'Hinweis',
'important': 'Wichtig',
'note': 'Bemerkung',
'tip': 'Tipp',
'warning': 'Warnung',
'contents': 'Inhalt'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
'autor': 'author',
'autoren': 'authors',
'organisation': 'organization',
'adresse': 'address',
'kontakt': 'contact',
'version': 'version',
'revision': 'revision',
'status': 'status',
'datum': 'date',
'copyright': 'copyright',<|fim▁hole|>
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""<|fim▁end|> | 'widmung': 'dedication',
'zusammenfassung': 'abstract'}
"""German (lowcased) to canonical name mapping for bibliographic fields.""" |
<|file_name|>dockets.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import logging
from uuid import uuid1
import dockets.queue
import dockets.error_queue
from .base import Queue
class DocketsQueue(Queue):
def __init__(self, redis_client, queue_name, wait_time, timeout):
self.queue = dockets.queue.Queue(redis_client,
queue_name,
use_error_queue=True,
wait_time=wait_time,
timeout=timeout)
def make_error_queue(self):
return DocketsErrorQueue(self.queue)
def _push(self, item):
push_kwargs = {}
if 'delay' in item:
push_kwargs['delay'] = item['delay'] or None
return self.queue.push(item, **push_kwargs)
def _push_batch(self, items):
result = []
for item in items:
try:
self._push(item)
result.append((item, True))
except Exception:
logging.exception("Error pushing item {}".format(item))
result.append((item, False))
return result
def _pop(self):
envelope = self.queue.pop()
if envelope:
return envelope, envelope.get('item')
return None, None
def _pop_batch(self, batch_size):
batch = []
for _ in range(batch_size):
envelope, item = self._pop()
if envelope:
batch.append((envelope, item))
else:
break
return batch
def _touch(self, envelope, seconds):
"""Dockets heartbeat is consumer-level and does not
utilize the envelope or seconds arguments."""
return self.queue._heartbeat()
def _complete(self, envelope):
return self.queue.complete(envelope)
def _complete_batch(self, envelopes):
# Dockets doesn't return any information from complete, so here we go...
for envelope in envelopes:
self._complete(envelope)<|fim▁hole|> def _flush(self):
while True:
envelope, item = self._pop()
if envelope is None:
break
self._complete(envelope)
def _stats(self):
return {'available': self.queue.queued(),
'in_flight': self.queue.working(),
'delayed': self.queue.delayed()}
class DocketsErrorQueue(Queue):
FIFO = False
SUPPORTS_DELAY = False
RECLAIMS_TO_BACK_OF_QUEUE = False
def __init__(self, parent_dockets_queue):
self.queue = dockets.error_queue.ErrorQueue(parent_dockets_queue)
def _push(self, item):
"""This error ID dance is Dockets-specific, since we need the ID
to interface with the hash error queue. Other backends shouldn't
need to do this and should use the envelope properly instead."""
try:
error_id = item['error']['id']
except KeyError:
logging.warn('No error ID found for item, will generate and add one: {}'.format(item))
error_id = str(uuid1())
item.setdefault('error', {})['id'] = error_id
return self.queue.queue_error_item(error_id, item)
def _push_batch(self, items):
result = []
for item in items:
try:
self._push(item)
result.append((item, True))
except Exception:
logging.exception("Error pushing item {}".format(item))
result.append((item, False))
return result
def _pop(self):
"""Dockets Error Queues are not actually queues, they're hashes. There's no way
for us to implement a pure pop that doesn't expose us to the risk of dropping
data. As such, we're going to return the first error in that hash but not actually
remove it until we call `_complete` later on. This keeps our data safe but may
deliver errors multiple times. That should be okay."""
error_ids = self.queue.error_ids()
if error_ids:
error_id = error_ids[0]
error = self.queue.error(error_id)
return error, error
return None, None
def _pop_batch(self, batch_size):
"""Similar to _pop, but returns a list of tuples containing batch_size pops
from our queue.
Again, this does not actually pop from the queue until we call _complete on
each queued item"""
error_ids = self.queue.error_ids()
batch = []
if error_ids:
for error_id in error_ids[:batch_size]:
error = self.queue.error(error_id)
batch.append((error, error))
return batch
def _touch(self, envelope, seconds):
return None
def _complete(self, envelope):
error_id = envelope['error']['id']
if not error_id:
raise AttributeError('Error item has no id field: {}'.format(envelope))
return self.queue.delete_error(error_id)
def _complete_batch(self, envelopes):
return [(envelope, bool(self._complete(envelope))) for envelope in envelopes]
def _flush(self):
for error_id in self.queue.error_ids():
self.queue.delete_error(error_id)
def _stats(self):
return {'available': self.queue.length()}<|fim▁end|> | return [(envelope, True) for envelope in envelopes]
|
<|file_name|>test_monitors.py<|end_file_name|><|fim▁begin|># Copyright 2015 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from oslo_serialization import jsonutils
from magnum.conductor import k8s_monitor
from magnum.conductor import mesos_monitor
from magnum.conductor import monitors
from magnum.conductor import swarm_monitor
from magnum import objects
from magnum.tests import base
from magnum.tests.unit.db import utils
class MonitorsTestCase(base.TestCase):
test_metrics_spec = {
'metric1': {
'unit': 'metric1_unit',
'func': 'metric1_func',
},
'metric2': {
'unit': 'metric2_unit',
'func': 'metric2_func',
},
}
def setUp(self):
super(MonitorsTestCase, self).setUp()
bay = utils.get_test_bay(node_addresses=['1.2.3.4'],
api_address='https://5.6.7.8:2376')
self.bay = objects.Bay(self.context, **bay)
self.monitor = swarm_monitor.SwarmMonitor(self.context, self.bay)
self.k8s_monitor = k8s_monitor.K8sMonitor(self.context, self.bay)
self.mesos_monitor = mesos_monitor.MesosMonitor(self.context,
self.bay)
p = mock.patch('magnum.conductor.swarm_monitor.SwarmMonitor.'
'metrics_spec', new_callable=mock.PropertyMock)
self.mock_metrics_spec = p.start()
self.mock_metrics_spec.return_value = self.test_metrics_spec
self.addCleanup(p.stop)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_success(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'swarm'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, swarm_monitor.SwarmMonitor)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_k8s_bay(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'kubernetes'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, k8s_monitor.K8sMonitor)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_mesos_bay(self, mock_baymodel_get_by_uuid):<|fim▁hole|> baymodel = mock.MagicMock()
baymodel.coe = 'mesos'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsInstance(monitor, mesos_monitor.MesosMonitor)
@mock.patch('magnum.objects.BayModel.get_by_uuid')
def test_create_monitor_unsupported_coe(self, mock_baymodel_get_by_uuid):
baymodel = mock.MagicMock()
baymodel.coe = 'unsupported'
mock_baymodel_get_by_uuid.return_value = baymodel
monitor = monitors.create_monitor(self.context, self.bay)
self.assertIsNone(monitor)
@mock.patch('magnum.common.docker_utils.docker_for_bay')
def test_swarm_monitor_pull_data_success(self, mock_docker_for_bay):
mock_docker = mock.MagicMock()
mock_docker.info.return_value = {'DriverStatus': [[
u' \u2514 Reserved Memory', u'0 B / 1 GiB']]}
mock_docker.containers.return_value = [mock.MagicMock()]
mock_docker.inspect_container.return_value = 'test_container'
mock_docker_for_bay.return_value.__enter__.return_value = mock_docker
self.monitor.pull_data()
self.assertEqual([{'MemTotal': 1073741824.0}],
self.monitor.data['nodes'])
self.assertEqual(['test_container'], self.monitor.data['containers'])
@mock.patch('magnum.common.docker_utils.docker_for_bay')
def test_swarm_monitor_pull_data_raise(self, mock_docker_for_bay):
mock_container = mock.MagicMock()
mock_docker = mock.MagicMock()
mock_docker.info.return_value = {'DriverStatus': [[
u' \u2514 Reserved Memory', u'0 B / 1 GiB']]}
mock_docker.containers.return_value = [mock_container]
mock_docker.inspect_container.side_effect = Exception("inspect error")
mock_docker_for_bay.return_value.__enter__.return_value = mock_docker
self.monitor.pull_data()
self.assertEqual([{'MemTotal': 1073741824.0}],
self.monitor.data['nodes'])
self.assertEqual([mock_container], self.monitor.data['containers'])
def test_swarm_monitor_get_metric_names(self):
names = self.monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_swarm_monitor_get_metric_unit(self):
unit = self.monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_swarm_monitor_compute_metric_value(self):
mock_func = mock.MagicMock()
mock_func.return_value = 'metric1_value'
self.monitor.metric1_func = mock_func
value = self.monitor.compute_metric_value('metric1')
self.assertEqual('metric1_value', value)
def test_swarm_monitor_compute_memory_util(self):
test_data = {
'nodes': [
{
'Name': 'node',
'MemTotal': 20,
},
],
'containers': [
{
'Name': 'container',
'HostConfig': {
'Memory': 10,
},
},
],
}
self.monitor.data = test_data
mem_util = self.monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'nodes': [],
'containers': [],
}
self.monitor.data = test_data
mem_util = self.monitor.compute_memory_util()
self.assertEqual(0, mem_util)
@mock.patch('magnum.conductor.k8s_api.create_k8s_api')
def test_k8s_monitor_pull_data_success(self, mock_k8s_api):
mock_nodes = mock.MagicMock()
mock_node = mock.MagicMock()
mock_node.status = mock.MagicMock()
mock_node.status.capacity = "{'memory': '2000Ki'}"
mock_nodes.items = [mock_node]
mock_k8s_api.return_value.list_namespaced_node.return_value = (
mock_nodes)
mock_pods = mock.MagicMock()
mock_pod = mock.MagicMock()
mock_pod.spec = mock.MagicMock()
mock_container = mock.MagicMock()
mock_container.resources = mock.MagicMock()
mock_container.resources.limits = "{'memory':'100Mi'}"
mock_pod.spec.containers = [mock_container]
mock_pods.items = [mock_pod]
mock_k8s_api.return_value.list_namespaced_pod.return_value = mock_pods
self.k8s_monitor.pull_data()
self.assertEqual(self.k8s_monitor.data['nodes'],
[{'Memory': 2048000.0}])
self.assertEqual(self.k8s_monitor.data['pods'],
[{'Memory': 104857600.0}])
def test_k8s_monitor_get_metric_names(self):
k8s_metric_spec = 'magnum.conductor.k8s_monitor.K8sMonitor.'\
'metrics_spec'
with mock.patch(k8s_metric_spec,
new_callable=mock.PropertyMock) as mock_k8s_metric:
mock_k8s_metric.return_value = self.test_metrics_spec
names = self.k8s_monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_k8s_monitor_get_metric_unit(self):
k8s_metric_spec = 'magnum.conductor.k8s_monitor.K8sMonitor.' \
'metrics_spec'
with mock.patch(k8s_metric_spec,
new_callable=mock.PropertyMock) as mock_k8s_metric:
mock_k8s_metric.return_value = self.test_metrics_spec
unit = self.k8s_monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_k8s_monitor_compute_memory_util(self):
test_data = {
'nodes': [
{
'Memory': 20,
},
],
'pods': [
{
'Memory': 10,
},
],
}
self.k8s_monitor.data = test_data
mem_util = self.k8s_monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'nodes': [],
'pods': [],
}
self.k8s_monitor.data = test_data
mem_util = self.k8s_monitor.compute_memory_util()
self.assertEqual(0, mem_util)
@mock.patch('magnum.common.urlfetch.get')
def test_mesos_monitor_pull_data_success(self, mock_url_get):
state_json = {
'slaves': [{
'resources': {
'mem': 100
},
'used_resources': {
'mem': 50
}
}]
}
state_json = jsonutils.dumps(state_json)
mock_url_get.return_value = state_json
self.mesos_monitor.pull_data()
self.assertEqual(self.mesos_monitor.data['mem_total'],
100)
self.assertEqual(self.mesos_monitor.data['mem_used'],
50)
def test_mesos_monitor_get_metric_names(self):
mesos_metric_spec = 'magnum.conductor.mesos_monitor.MesosMonitor.'\
'metrics_spec'
with mock.patch(mesos_metric_spec,
new_callable=mock.PropertyMock) as mock_mesos_metric:
mock_mesos_metric.return_value = self.test_metrics_spec
names = self.mesos_monitor.get_metric_names()
self.assertEqual(sorted(['metric1', 'metric2']), sorted(names))
def test_mesos_monitor_get_metric_unit(self):
mesos_metric_spec = 'magnum.conductor.mesos_monitor.MesosMonitor.' \
'metrics_spec'
with mock.patch(mesos_metric_spec,
new_callable=mock.PropertyMock) as mock_mesos_metric:
mock_mesos_metric.return_value = self.test_metrics_spec
unit = self.mesos_monitor.get_metric_unit('metric1')
self.assertEqual('metric1_unit', unit)
def test_mesos_monitor_compute_memory_util(self):
test_data = {
'mem_total': 100,
'mem_used': 50
}
self.mesos_monitor.data = test_data
mem_util = self.mesos_monitor.compute_memory_util()
self.assertEqual(50, mem_util)
test_data = {
'mem_total': 0,
'pods': 0,
}
self.mesos_monitor.data = test_data
mem_util = self.mesos_monitor.compute_memory_util()
self.assertEqual(0, mem_util)<|fim▁end|> | |
<|file_name|>battery_models.py<|end_file_name|><|fim▁begin|>import numpy as np
def min_max_model(power, use, battery_capacity):
"""
Minimal maximum battery model, obsoleted
:param power: Pandas TimeSeries, total power from renewable system
:param use: float, unit W fixed load of the power system
:param battery_capacity: float, unit Wh battery capacity
:return: list, energy history in battery
"""
power = power.tolist()
energy = 0
energy_history = []
for p in power:
energy = min(battery_capacity, max(0, energy + (p - use) * 1))
energy_history.append(energy)
return energy_history
def soc_model_fixed_load(
power,
use,
battery_capacity,
depth_of_discharge=1,
discharge_rate=0.005,
battery_eff=0.9,
discharge_eff=0.8,
):
"""
Battery state of charge model with fixed load. (Obsolete)
:param power: Pandas TimeSeries of total power from renewable system
:param use: float unit W fixed load of the power system
:param battery_capacity: float unit Wh battery capacity
:param depth_of_discharge: float 0 to 1 maximum allowed discharge depth
:param discharge_rate: self discharge rate
:param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9
:param discharge_eff: battery discharge efficiency 0 to 1 default 0.8
:return: tuple SOC: state of charge, energy history: E in battery,
unmet_history: unmet energy history, waste_history: waste energy history
"""
DOD = depth_of_discharge
power = power.tolist()
use_history = []
waste_history = []
unmet_history = []
energy_history = []
energy = 0
for p in power:
if p >= use:
use_history.append(use)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - use) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - use)
energy = energy
elif p < use:
energy_new = energy * (1 - discharge_rate) + (p - use) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(use)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(use - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(use - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
if battery_capacity == 0:
SOC = np.array(energy_history)
else:
SOC = np.array(energy_history) / battery_capacity
return SOC, energy_history, unmet_history, waste_history, use_history
class Battery:
"""
A simple finite state based energy flow battery model.
"""
def __init__(self, capacity, config={}):
"""
Initialise the battery with a given capacity and configuration.
:param capacity: float, unit Wh
:param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency;
eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0
and 1
"""
self.capacity = capacity
self.config = config
self.set_parameters()
def set_parameters(self):
"""
Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate;
eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge;
where all values shall between 0 and 1.
"""
try:
self.depth_of_discharge = self.config['simulation']['battery']['DOD']
self.discharge_rate = self.config['simulation']['battery']['sigma']
self.battery_eff = self.config['simulation']['battery']['eta_in']
self.discharge_eff = self.config['simulation']['battery']['eta_out']
self.init_charge = self.config['simulation']['battery']['B0']
except KeyError:
print('Parameter is not found in config file, default values are used.')
self.depth_of_discharge = 1
self.discharge_rate = 0.005
self.battery_eff = 0.9
self.discharge_eff = 0.8
self.init_charge = 1
def run(self, power, use):
"""
Run the battery model with a list of power generation and usage.
:param power: list, power generation unit in W
:param use: list, power usage unit in W
:return: None
"""
DOD = self.depth_of_discharge
battery_capacity = self.capacity
discharge_rate = self.discharge_rate
discharge_eff = self.discharge_eff
battery_eff = self.battery_eff
use_history = []
waste_history = []
unmet_history = []
energy_history = []
SOC = []
energy = self.init_charge * self.capacity
for p, u in zip(power, use):
if p >= u:
use_history.append(u)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - u)
energy = energy
elif p < u:
energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(u)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
SOC.append(energy / battery_capacity)
self.SOC = SOC
self.energy_history = energy_history
self.unmet_history = unmet_history
self.waste_history = waste_history
self.use_history = use_history
def battery_history(self):
"""
Return the history of the battery.
:return: np array, the SOC, energy in the battery, unmet power supply, wasted power and the supplied power unit in W
"""
history = np.vstack(
(
np.array(self.SOC),
np.array(self.energy_history),
np.array(self.unmet_history),
np.array(self.waste_history),
np.array(self.use_history),
)
)
return history
def lost_power_supply_probability(self):
"""
Return the lost power supply probability (LPSP) using the battery history.
:return: float, LPSP
"""
LPSP = 1 - self.unmet_history.count(0) / len(self.energy_history)<|fim▁hole|>
class Battery_managed:
"""
Battery managed is a the basic class for the demand load controllable battery model.
"""
def __init__(self, capacity, config={}):
"""
:param capacity: float, unit Wh
:param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency;
eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0
and 1
"""
self.capacity = capacity
self.config = config
self.set_parameters()
self.init_history()
self.init_simulation()
self.status = []
self.states_list = []
def set_parameters(self):
"""
Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate;
eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge;
where all values shall between 0 and 1.
"""
try:
self.depth_of_discharge = self.config['simulation']['battery']['DOD']
self.discharge_rate = self.config['simulation']['battery']['sigma']
self.battery_eff = self.config['simulation']['battery']['eta_in']
self.discharge_eff = self.config['simulation']['battery']['eta_out']
self.init_charge = self.config['simulation']['battery']['B0']
self.DOD = self.depth_of_discharge
except KeyError:
print('Parameter is not found in config file, default values are used.')
self.depth_of_discharge = 1
self.discharge_rate = 0.005
self.battery_eff = 0.9
self.discharge_eff = 0.8
self.init_charge = 1
self.DOD = self.depth_of_discharge
def reset(self):
"""
Reset the battery state to the start of simulation.
:return:
"""
self.init_history()
self.init_simulation()
def init_simulation(self):
self.energy = self.init_charge * self.capacity
def init_history(self):
self.supply_history = []
self.waste_history = []
self.unmet_history = []
self.battery_energy_history = []
self.SOC = []
def step(self, plan, generated, gym = False):
"""
Run the finite state battery model on one time step.
:param plan: float, planned power usage in W
:param generated: float, power generation unit in W
:param gym: optional, set True to using in OpenAI gym mode
:return: float, the supplied power in W
"""
if gym == True:
plan = plan[0][0]
if generated >= plan:
self.supply_history.append(plan)
self.unmet_history.append(0)
energy_new = self.energy * (1 - self.discharge_rate) + (generated - plan) * self.battery_eff
if energy_new < self.capacity:
self.energy = energy_new # battery energy got update
self.waste_history.append(0)
self.status.append("""Demand can be meet by generation, also battery is not full.
Supply {demand}, charge {diff}.""".format(demand=plan, diff=generated - plan)
)
self.state = 'charge'
else:
self.waste_history.append(generated - plan - (self.capacity - self.energy))
self.energy = self.capacity
self.status.append("""Demand can be meet by generation, but battery is already full.
Supply {demand}, charge battery to full waste {diff}.""".format(
demand=plan, diff=generated - plan)
)
self.state = 'float'
elif generated < plan:
energy_new = self.energy * (1 - self.discharge_rate) + (generated - plan) / self.discharge_eff
if energy_new > (1 - self.DOD) * self.capacity:
self.energy = energy_new
self.unmet_history.append(0)
self.waste_history.append(0)
self.supply_history.append(plan)
self.status.append("""Demand can not meet by generation, power in battery can make up difference.
Supply {demand} by discharge from battery""".format(demand=plan))
self.state = 'discharge'
elif self.energy * (1 - self.discharge_rate) + generated * self.battery_eff < self.capacity:
self.energy = self.energy * (1 - self.discharge_rate) + generated * self.battery_eff
self.unmet_history.append(plan - generated)
self.supply_history.append(0)
self.waste_history.append(0)
self.status.append("""Demand can not meet by generation, also power in battery can not make up difference.
Charge {diff} to battery to avoid waste""".format(diff=generated))
self.state = 'unmet'
else:
self.unmet_history.append(plan - generated)
self.supply_history.append(0)
self.waste_history.append(generated - (self.capacity - self.energy))
self.energy = self.capacity
self.status.append("""Demand can not meet by generation, also power in battery can not make up difference.
Charge {diff} to make battery full""".format(
diff=self.capacity-self.energy))
self.state = 'unmet'
self.states_list.append(self.state)
self.battery_energy_history.append(self.energy)
self.SOC.append(self.energy / self.capacity)
self.supply = self.supply_history[-1]
return self.supply
def history(self):
"""
Get the history of the managed battery.
:return: np array including the history of the battery: SOC, battery energy, unmet and wasted energy, supplied power
"""
battery_history = np.vstack(
(
np.array(self.SOC),
np.array(self.battery_energy_history),
np.array(self.unmet_history),
np.array(self.waste_history),
np.array(self.supply_history),
)
)
return battery_history
def observation(self):
"""
Observation
:return:
"""
battery_state = {
'current_energy': self.energy,
'usable_capacity': self.DOD * self.capacity,
}
return battery_state
def story_board(self):
"""
For the use of explainable AI in power management system.
:return: the status of battery
"""
return self.status
def lost_power_supply_probability(self):
"""
Get the lost power supply probability of the managed battery after run.
:return: float, LPSP
"""
LPSP = 1 - self.unmet_history.count(0) / len(self.SOC)
return LPSP
def copy(self):
"""
Make a copy of battery model.
:return: Copied version of battery with same capacity and configuration
"""
return Battery_managed(self.capacity, self.config)
class Soc_model_variable_load:
"""
Obsolete basic class.
"""
def __init__(self, battery, power, load):
self.battery = battery
self.battery.run(power, load)
def get_lost_power_supply_probability(self):
return self.battery.lost_power_supply_probability()
def get_battery_history(self):
return self.battery.battery_history()
def get_quality_performance_index(self):
pass
def soc_model_variable_load(
power,
use,
battery_capacity,
depth_of_discharge=1,
discharge_rate=0.005,
battery_eff=0.9,
discharge_eff=0.8,
):
"""
Battery state of charge model with fixed load.
:param power: Pandas TimeSeries of total power from renewable system
:param use: float unit W fixed load of the power system
:param battery_capacity: float unit Wh battery capacity
:param depth_of_discharge: float 0 to 1 maximum allowed discharge depth
:param discharge_rate: self discharge rate
:param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9
:param discharge_eff: battery discharge efficiency 0 to 1 default 0.8
:return: tuple SOC: state of charge, energy history: E in battery,
unmet_history: unmet energy history, waste_history: waste energy history
"""
DOD = depth_of_discharge
power = power.tolist()
use = use.tolist()
use_history = []
waste_history = []
unmet_history = []
energy_history = []
energy = 0
for p, u in zip(power, use):
if p >= u:
use_history.append(u)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - u)
energy = energy
elif p < u:
energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(use)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
if battery_capacity == 0:
SOC = np.array(energy_history)
else:
SOC = np.array(energy_history) / battery_capacity
return SOC, energy_history, unmet_history, waste_history, use_history
if __name__ == '__main__':
b1 = Battery(10)
b1.run([1, 1, 1], [1, 1, 1])
b1.run([1, 1, 1], [10, 10, 10])
print(b1.lost_power_supply_probability())<|fim▁end|> | return LPSP |
<|file_name|>used_before_assignment_nonlocal.py<|end_file_name|><|fim▁begin|>"""Check for nonlocal and used-before-assignment"""
# pylint: disable=missing-docstring, unused-variable, no-init, too-few-public-methods
__revision__ = 0
def test_ok():
""" uses nonlocal """
cnt = 1
def wrap():
nonlocal cnt
cnt = cnt + 1
<|fim▁hole|> wrap()
def test_fail():
""" doesn't use nonlocal """
cnt = 1
def wrap():
cnt = cnt + 1 # [used-before-assignment]
wrap()
def test_fail2():
""" use nonlocal, but for other variable """
cnt = 1
count = 1
def wrap():
nonlocal count
cnt = cnt + 1 # [used-before-assignment]
wrap()
def test_fail3(arg: test_fail4): # [used-before-assignment]
""" Depends on `test_fail4`, in argument annotation. """
return arg
# +1: [used-before-assignment, used-before-assignment]
def test_fail4(*args: test_fail5, **kwargs: undefined):
""" Depends on `test_fail5` and `undefined` in
variable and named arguments annotations.
"""
return args, kwargs
def test_fail5()->undefined1: # [used-before-assignment]
""" Depends on `undefined1` in function return annotation. """
def undefined():
""" no op """
def undefined1():
""" no op """<|fim▁end|> | |
<|file_name|>authReducer.js<|end_file_name|><|fim▁begin|>import * as ActionTypes from '../constants/constants';
const initialState = {
isAuthenticated: false,
isFetching: false,
loaded: false,
message: ''
};
const authReducer = (state = initialState, action) => {
switch (action.type) {
case ActionTypes.REQUEST_CHECK_TOKEN:
return {
...state,
isAuthenticated: action.isAuthenticated,
isFetching: action.isFetching,
loaded: false,
};
case ActionTypes.TOKEN_VALID:
return {
...state,
isAuthenticated: action.isAuthenticated,
isFetching: action.isFetching,
loaded: true,
};
case ActionTypes.TOKEN_INVALID:
return {
...state,
isAuthenticated: action.isAuthenticated,
isFetching: action.isFetching,
loaded: true,
};
case ActionTypes.REQUEST_LOGIN:
return {
...state,
isAuthenticated: action.isAuthenticated,
isFetching: action.isFetching,
user: action.creds,
message: '',
};
case ActionTypes.LOGIN_SUCCESS:
return {
...state,<|fim▁hole|> isAuthenticated: true,
isFetching: false,
message: '',
};
case ActionTypes.LOGIN_FAILURE:
return {
...state,
isAuthenticated: false,
isFetching: false,
message: action.message,
};
default:
return state;
}
};
export default authReducer;<|fim▁end|> | |
<|file_name|>commands.go<|end_file_name|><|fim▁begin|>package docker
import (
"archive/tar"
"bufio"
"bytes"
"encoding/base64"
"encoding/json"
"errors"
"flag"
"fmt"
"github.com/dotcloud/docker/archive"
"github.com/dotcloud/docker/auth"
"github.com/dotcloud/docker/registry"
"github.com/dotcloud/docker/term"
"github.com/dotcloud/docker/utils"
"io"
"io/ioutil"
"net"
"net/http"
"net/http/httputil"
"net/url"
"os"
"os/signal"
"path"
"reflect"
"regexp"
"runtime"
"sort"
"strconv"
"strings"
"syscall"
"text/tabwriter"
"text/template"
"time"
)
var (
GITCOMMIT string
VERSION string
)
var (
ErrConnectionRefused = errors.New("Can't connect to docker daemon. Is 'docker -d' running on this host?")
)
func (cli *DockerCli) getMethod(name string) (func(...string) error, bool) {
methodName := "Cmd" + strings.ToUpper(name[:1]) + strings.ToLower(name[1:])
method := reflect.ValueOf(cli).MethodByName(methodName)
if !method.IsValid() {
return nil, false
}
return method.Interface().(func(...string) error), true
}
func ParseCommands(proto, addr string, args ...string) error {
cli := NewDockerCli(os.Stdin, os.Stdout, os.Stderr, proto, addr)
if len(args) > 0 {
method, exists := cli.getMethod(args[0])
if !exists {
fmt.Println("Error: Command not found:", args[0])
return cli.CmdHelp(args[1:]...)
}
return method(args[1:]...)
}
return cli.CmdHelp(args...)
}
func (cli *DockerCli) CmdHelp(args ...string) error {
if len(args) > 0 {
method, exists := cli.getMethod(args[0])
if !exists {
fmt.Fprintf(cli.err, "Error: Command not found: %s\n", args[0])
} else {
method("--help")
return nil
}
}
help := fmt.Sprintf("Usage: docker [OPTIONS] COMMAND [arg...]\n -H=[unix://%s]: tcp://host:port to bind/connect to or unix://path/to/socket to use\n\nA self-sufficient runtime for linux containers.\n\nCommands:\n", DEFAULTUNIXSOCKET)
for _, command := range [][]string{
{"attach", "Attach to a running container"},
{"build", "Build a container from a Dockerfile"},
{"commit", "Create a new image from a container's changes"},
{"cp", "Copy files/folders from the containers filesystem to the host path"},
{"diff", "Inspect changes on a container's filesystem"},
{"events", "Get real time events from the server"},
{"export", "Stream the contents of a container as a tar archive"},
{"history", "Show the history of an image"},
{"images", "List images"},
{"import", "Create a new filesystem image from the contents of a tarball"},
{"info", "Display system-wide information"},
{"insert", "Insert a file in an image"},
{"inspect", "Return low-level information on a container"},
{"kill", "Kill a running container"},
{"load", "Load an image from a tar archive"},
{"login", "Register or Login to the docker registry server"},
{"logs", "Fetch the logs of a container"},
{"port", "Lookup the public-facing port which is NAT-ed to PRIVATE_PORT"},
{"ps", "List containers"},
{"pull", "Pull an image or a repository from the docker registry server"},
{"push", "Push an image or a repository to the docker registry server"},
{"restart", "Restart a running container"},
{"rm", "Remove one or more containers"},
{"rmi", "Remove one or more images"},
{"run", "Run a command in a new container"},
{"save", "Save an image to a tar archive"},
{"search", "Search for an image in the docker index"},
{"start", "Start a stopped container"},
{"stop", "Stop a running container"},
{"tag", "Tag an image into a repository"},
{"top", "Lookup the running processes of a container"},
{"version", "Show the docker version information"},
{"wait", "Block until a container stops, then print its exit code"},
} {
help += fmt.Sprintf(" %-10.10s%s\n", command[0], command[1])
}
fmt.Fprintf(cli.err, "%s\n", help)
return nil
}
func (cli *DockerCli) CmdInsert(args ...string) error {
cmd := cli.Subcmd("insert", "IMAGE URL PATH", "Insert a file from URL in the IMAGE at PATH")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 3 {
cmd.Usage()
return nil
}
v := url.Values{}
v.Set("url", cmd.Arg(1))
v.Set("path", cmd.Arg(2))
return cli.stream("POST", "/images/"+cmd.Arg(0)+"/insert?"+v.Encode(), nil, cli.out, nil)
}
// mkBuildContext returns an archive of an empty context with the contents
// of `dockerfile` at the path ./Dockerfile
func MkBuildContext(dockerfile string, files [][2]string) (archive.Archive, error) {
buf := new(bytes.Buffer)
tw := tar.NewWriter(buf)
files = append(files, [2]string{"Dockerfile", dockerfile})
for _, file := range files {
name, content := file[0], file[1]
hdr := &tar.Header{
Name: name,
Size: int64(len(content)),
}
if err := tw.WriteHeader(hdr); err != nil {
return nil, err
}
if _, err := tw.Write([]byte(content)); err != nil {
return nil, err
}
}
if err := tw.Close(); err != nil {
return nil, err
}
return buf, nil
}
func (cli *DockerCli) CmdBuild(args ...string) error {
cmd := cli.Subcmd("build", "[OPTIONS] PATH | URL | -", "Build a new container image from the source code at PATH")
tag := cmd.String("t", "", "Repository name (and optionally a tag) to be applied to the resulting image in case of success")
suppressOutput := cmd.Bool("q", false, "Suppress verbose build output")
noCache := cmd.Bool("no-cache", false, "Do not use cache when building the image")
rm := cmd.Bool("rm", false, "Remove intermediate containers after a successful build")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
var (
context archive.Archive
isRemote bool
err error
)
if cmd.Arg(0) == "-" {
// As a special case, 'docker build -' will build from an empty context with the
// contents of stdin as a Dockerfile
dockerfile, err := ioutil.ReadAll(cli.in)
if err != nil {
return err
}
context, err = MkBuildContext(string(dockerfile), nil)
} else if utils.IsURL(cmd.Arg(0)) || utils.IsGIT(cmd.Arg(0)) {
isRemote = true
} else {
if _, err := os.Stat(cmd.Arg(0)); err != nil {
return err
}
filename := path.Join(cmd.Arg(0), "Dockerfile")
if _, err = os.Stat(filename); os.IsNotExist(err) {
return fmt.Errorf("no Dockerfile found in %s", cmd.Arg(0))
}
context, err = archive.Tar(cmd.Arg(0), archive.Uncompressed)
}
var body io.Reader
// Setup an upload progress bar
// FIXME: ProgressReader shouldn't be this annoying to use
if context != nil {
sf := utils.NewStreamFormatter(false)
body = utils.ProgressReader(ioutil.NopCloser(context), 0, cli.err, sf, true, "", "Uploading context")
}
// Upload the build context
v := &url.Values{}
v.Set("t", *tag)
if *suppressOutput {
v.Set("q", "1")
}
if isRemote {
v.Set("remote", cmd.Arg(0))
}
if *noCache {
v.Set("nocache", "1")
}
if *rm {
v.Set("rm", "1")
}
headers := http.Header(make(map[string][]string))
if context != nil {
headers.Set("Content-Type", "application/tar")
}
err = cli.stream("POST", fmt.Sprintf("/build?%s", v.Encode()), body, cli.out, headers)
if jerr, ok := err.(*utils.JSONError); ok {
return &utils.StatusError{Status: jerr.Message, StatusCode: jerr.Code}
}
return err
}
// 'docker login': login / register a user to registry service.
func (cli *DockerCli) CmdLogin(args ...string) error {
cmd := cli.Subcmd("login", "[OPTIONS] [SERVER]", "Register or Login to a docker registry server, if no server is specified \""+auth.IndexServerAddress()+"\" is the default.")
var username, password, email string
cmd.StringVar(&username, "u", "", "username")
cmd.StringVar(&password, "p", "", "password")
cmd.StringVar(&email, "e", "", "email")
err := cmd.Parse(args)
if err != nil {
return nil
}
serverAddress := auth.IndexServerAddress()
if len(cmd.Args()) > 0 {
serverAddress, err = registry.ExpandAndVerifyRegistryUrl(cmd.Arg(0))
if err != nil {
return err
}
fmt.Fprintf(cli.out, "Login against server at %s\n", serverAddress)
}
promptDefault := func(prompt string, configDefault string) {
if configDefault == "" {
fmt.Fprintf(cli.out, "%s: ", prompt)
} else {
fmt.Fprintf(cli.out, "%s (%s): ", prompt, configDefault)
}
}
readInput := func(in io.Reader, out io.Writer) string {
reader := bufio.NewReader(in)
line, _, err := reader.ReadLine()
if err != nil {
fmt.Fprintln(out, err.Error())
os.Exit(1)
}
return string(line)
}
cli.LoadConfigFile()
authconfig, ok := cli.configFile.Configs[serverAddress]
if !ok {
authconfig = auth.AuthConfig{}
}
if username == "" {
promptDefault("Username", authconfig.Username)
username = readInput(cli.in, cli.out)
if username == "" {
username = authconfig.Username
}
}
if username != authconfig.Username {
if password == "" {
oldState, _ := term.SaveState(cli.terminalFd)
fmt.Fprintf(cli.out, "Password: ")
term.DisableEcho(cli.terminalFd, oldState)
password = readInput(cli.in, cli.out)
fmt.Fprint(cli.out, "\n")
term.RestoreTerminal(cli.terminalFd, oldState)
if password == "" {
return fmt.Errorf("Error : Password Required")
}
}
if email == "" {
promptDefault("Email", authconfig.Email)
email = readInput(cli.in, cli.out)
if email == "" {
email = authconfig.Email
}
}
} else {
password = authconfig.Password
email = authconfig.Email
}
authconfig.Username = username
authconfig.Password = password
authconfig.Email = email
authconfig.ServerAddress = serverAddress
cli.configFile.Configs[serverAddress] = authconfig
body, statusCode, err := cli.call("POST", "/auth", cli.configFile.Configs[serverAddress])
if statusCode == 401 {
delete(cli.configFile.Configs, serverAddress)
auth.SaveConfig(cli.configFile)
return err
}
if err != nil {
return err
}
var out2 APIAuth
err = json.Unmarshal(body, &out2)
if err != nil {
cli.configFile, _ = auth.LoadConfig(os.Getenv("HOME"))
return err
}
auth.SaveConfig(cli.configFile)
if out2.Status != "" {
fmt.Fprintf(cli.out, "%s\n", out2.Status)
}
return nil
}
// 'docker wait': block until a container stops
func (cli *DockerCli) CmdWait(args ...string) error {
cmd := cli.Subcmd("wait", "CONTAINER [CONTAINER...]", "Block until a container stops, then print its exit code.")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
var encounteredError error
for _, name := range cmd.Args() {
status, err := waitForExit(cli, name)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to wait one or more containers")
} else {
fmt.Fprintf(cli.out, "%d\n", status)
}
}
return encounteredError
}
// 'docker version': show version information
func (cli *DockerCli) CmdVersion(args ...string) error {
cmd := cli.Subcmd("version", "", "Show the docker version information.")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() > 0 {
cmd.Usage()
return nil
}
if VERSION != "" {
fmt.Fprintf(cli.out, "Client version: %s\n", VERSION)
}
fmt.Fprintf(cli.out, "Go version (client): %s\n", runtime.Version())
if GITCOMMIT != "" {
fmt.Fprintf(cli.out, "Git commit (client): %s\n", GITCOMMIT)
}
body, _, err := cli.call("GET", "/version", nil)
if err != nil {
return err
}
var out APIVersion
err = json.Unmarshal(body, &out)
if err != nil {
utils.Errorf("Error unmarshal: body: %s, err: %s\n", body, err)
return err
}
if out.Version != "" {
fmt.Fprintf(cli.out, "Server version: %s\n", out.Version)
}
if out.GitCommit != "" {
fmt.Fprintf(cli.out, "Git commit (server): %s\n", out.GitCommit)
}
if out.GoVersion != "" {
fmt.Fprintf(cli.out, "Go version (server): %s\n", out.GoVersion)
}
release := utils.GetReleaseVersion()
if release != "" {
fmt.Fprintf(cli.out, "Last stable version: %s", release)
if (VERSION != "" || out.Version != "") && (strings.Trim(VERSION, "-dev") != release || strings.Trim(out.Version, "-dev") != release) {
fmt.Fprintf(cli.out, ", please update docker")
}
fmt.Fprintf(cli.out, "\n")
}
return nil
}
// 'docker info': display system-wide information.
func (cli *DockerCli) CmdInfo(args ...string) error {
cmd := cli.Subcmd("info", "", "Display system-wide information")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() > 0 {
cmd.Usage()
return nil
}
body, _, err := cli.call("GET", "/info", nil)
if err != nil {
return err
}
var out APIInfo
if err := json.Unmarshal(body, &out); err != nil {
return err
}
fmt.Fprintf(cli.out, "Containers: %d\n", out.Containers)
fmt.Fprintf(cli.out, "Images: %d\n", out.Images)
fmt.Fprintf(cli.out, "Driver: %s\n", out.Driver)
for _, pair := range out.DriverStatus {
fmt.Fprintf(cli.out, " %s: %s\n", pair[0], pair[1])
}
if out.Debug || os.Getenv("DEBUG") != "" {
fmt.Fprintf(cli.out, "Debug mode (server): %v\n", out.Debug)
fmt.Fprintf(cli.out, "Debug mode (client): %v\n", os.Getenv("DEBUG") != "")
fmt.Fprintf(cli.out, "Fds: %d\n", out.NFd)
fmt.Fprintf(cli.out, "Goroutines: %d\n", out.NGoroutines)
fmt.Fprintf(cli.out, "LXC Version: %s\n", out.LXCVersion)
fmt.Fprintf(cli.out, "EventsListeners: %d\n", out.NEventsListener)
fmt.Fprintf(cli.out, "Kernel Version: %s\n", out.KernelVersion)
}
if len(out.IndexServerAddress) != 0 {
cli.LoadConfigFile()
u := cli.configFile.Configs[out.IndexServerAddress].Username
if len(u) > 0 {
fmt.Fprintf(cli.out, "Username: %v\n", u)
fmt.Fprintf(cli.out, "Registry: %v\n", out.IndexServerAddress)
}
}
if !out.MemoryLimit {
fmt.Fprintf(cli.err, "WARNING: No memory limit support\n")
}
if !out.SwapLimit {
fmt.Fprintf(cli.err, "WARNING: No swap limit support\n")
}
if !out.IPv4Forwarding {
fmt.Fprintf(cli.err, "WARNING: IPv4 forwarding is disabled.\n")
}
return nil
}
func (cli *DockerCli) CmdStop(args ...string) error {
cmd := cli.Subcmd("stop", "[OPTIONS] CONTAINER [CONTAINER...]", "Stop a running container (Send SIGTERM, and then SIGKILL after grace period)")
nSeconds := cmd.Int("t", 10, "Number of seconds to wait for the container to stop before killing it.")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
v := url.Values{}
v.Set("t", strconv.Itoa(*nSeconds))
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := cli.call("POST", "/containers/"+name+"/stop?"+v.Encode(), nil)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to stop one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) CmdRestart(args ...string) error {
cmd := cli.Subcmd("restart", "[OPTIONS] CONTAINER [CONTAINER...]", "Restart a running container")
nSeconds := cmd.Int("t", 10, "Number of seconds to try to stop for before killing the container. Once killed it will then be restarted. Default=10")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
v := url.Values{}
v.Set("t", strconv.Itoa(*nSeconds))
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := cli.call("POST", "/containers/"+name+"/restart?"+v.Encode(), nil)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to restart one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) forwardAllSignals(cid string) chan os.Signal {
sigc := make(chan os.Signal, 1)
utils.CatchAll(sigc)
go func() {
for s := range sigc {
if s == syscall.SIGCHLD {
continue
}
if _, _, err := cli.call("POST", fmt.Sprintf("/containers/%s/kill?signal=%d", cid, s), nil); err != nil {
utils.Debugf("Error sending signal: %s", err)
}
}
}()
return sigc
}
func (cli *DockerCli) CmdStart(args ...string) error {
cmd := cli.Subcmd("start", "CONTAINER [CONTAINER...]", "Restart a stopped container")
attach := cmd.Bool("a", false, "Attach container's stdout/stderr and forward all signals to the process")
openStdin := cmd.Bool("i", false, "Attach container's stdin")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
var cErr chan error
var tty bool
if *attach || *openStdin {
if cmd.NArg() > 1 {
return fmt.Errorf("Impossible to start and attach multiple containers at once.")
}
body, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/json", nil)
if err != nil {
return err
}
container := &Container{}
err = json.Unmarshal(body, container)
if err != nil {
return err
}
tty = container.Config.Tty
if !container.Config.Tty {
sigc := cli.forwardAllSignals(cmd.Arg(0))
defer utils.StopCatch(sigc)
}
var in io.ReadCloser
v := url.Values{}
v.Set("stream", "1")
if *openStdin && container.Config.OpenStdin {
v.Set("stdin", "1")
in = cli.in
}
v.Set("stdout", "1")
v.Set("stderr", "1")
cErr = utils.Go(func() error {
return cli.hijack("POST", "/containers/"+cmd.Arg(0)+"/attach?"+v.Encode(), container.Config.Tty, in, cli.out, cli.err, nil)
})
}
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := cli.call("POST", "/containers/"+name+"/start", nil)
if err != nil {
if !*attach || !*openStdin {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to start one or more containers")
}
} else {
if !*attach || !*openStdin {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
}
if encounteredError != nil {
if *openStdin || *attach {
cli.in.Close()
<-cErr
}
return encounteredError
}
if *openStdin || *attach {
if tty && cli.isTerminal {
if err := cli.monitorTtySize(cmd.Arg(0)); err != nil {
utils.Errorf("Error monitoring TTY size: %s\n", err)
}
}
return <-cErr
}
return nil
}
func (cli *DockerCli) CmdInspect(args ...string) error {
cmd := cli.Subcmd("inspect", "CONTAINER|IMAGE [CONTAINER|IMAGE...]", "Return low-level information on a container/image")
tmplStr := cmd.String("format", "", "Format the output using the given go template.")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
var tmpl *template.Template
if *tmplStr != "" {
var err error
if tmpl, err = template.New("").Parse(*tmplStr); err != nil {
fmt.Fprintf(cli.err, "Template parsing error: %v\n", err)
return &utils.StatusError{StatusCode: 64,
Status: "Template parsing error: " + err.Error()}
}
}
indented := new(bytes.Buffer)
indented.WriteByte('[')
status := 0
for _, name := range cmd.Args() {
obj, _, err := cli.call("GET", "/containers/"+name+"/json", nil)
if err != nil {
obj, _, err = cli.call("GET", "/images/"+name+"/json", nil)
if err != nil {
if strings.Contains(err.Error(), "No such") {
fmt.Fprintf(cli.err, "Error: No such image or container: %s\n", name)
} else {
fmt.Fprintf(cli.err, "%s", err)
}
status = 1
continue
}
}
if tmpl == nil {
if err = json.Indent(indented, obj, "", " "); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
status = 1
continue
}
} else {
// Has template, will render
var value interface{}
if err := json.Unmarshal(obj, &value); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
status = 1
continue
}
if err := tmpl.Execute(cli.out, value); err != nil {
return err
}
cli.out.Write([]byte{'\n'})
}
indented.WriteString(",")
}
if indented.Len() > 1 {
// Remove trailing ','
indented.Truncate(indented.Len() - 1)
}
indented.WriteByte(']')
if tmpl == nil {
if _, err := io.Copy(cli.out, indented); err != nil {
return err
}
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
func (cli *DockerCli) CmdTop(args ...string) error {
cmd := cli.Subcmd("top", "CONTAINER [ps OPTIONS]", "Lookup the running processes of a container")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() == 0 {
cmd.Usage()
return nil
}
val := url.Values{}
if cmd.NArg() > 1 {
val.Set("ps_args", strings.Join(cmd.Args()[1:], " "))
}
body, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/top?"+val.Encode(), nil)
if err != nil {
return err
}
procs := APITop{}
err = json.Unmarshal(body, &procs)
if err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
fmt.Fprintln(w, strings.Join(procs.Titles, "\t"))
for _, proc := range procs.Processes {
fmt.Fprintln(w, strings.Join(proc, "\t"))
}
w.Flush()
return nil
}
func (cli *DockerCli) CmdPort(args ...string) error {
cmd := cli.Subcmd("port", "CONTAINER PRIVATE_PORT", "Lookup the public-facing port which is NAT-ed to PRIVATE_PORT")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 2 {
cmd.Usage()
return nil
}
port := cmd.Arg(1)
proto := "tcp"
parts := strings.SplitN(port, "/", 2)
if len(parts) == 2 && len(parts[1]) != 0 {
port = parts[0]
proto = parts[1]
}
body, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/json", nil)
if err != nil {
return err
}
var out Container
err = json.Unmarshal(body, &out)
if err != nil {
return err
}
if frontends, exists := out.NetworkSettings.Ports[Port(port+"/"+proto)]; exists && frontends != nil {
for _, frontend := range frontends {
fmt.Fprintf(cli.out, "%s:%s\n", frontend.HostIp, frontend.HostPort)
}
} else {
return fmt.Errorf("Error: No public port '%s' published for %s", cmd.Arg(1), cmd.Arg(0))
}
return nil
}
// 'docker rmi IMAGE' removes all images with the name IMAGE
func (cli *DockerCli) CmdRmi(args ...string) error {
cmd := cli.Subcmd("rmi", "IMAGE [IMAGE...]", "Remove one or more images")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
var encounteredError error
for _, name := range cmd.Args() {
body, _, err := cli.call("DELETE", "/images/"+name, nil)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to remove one or more images")
} else {
var outs []APIRmi
err = json.Unmarshal(body, &outs)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to remove one or more images")
continue
}
for _, out := range outs {
if out.Deleted != "" {
fmt.Fprintf(cli.out, "Deleted: %s\n", out.Deleted)
} else {
fmt.Fprintf(cli.out, "Untagged: %s\n", out.Untagged)
}
}
}
}
return encounteredError
}
func (cli *DockerCli) CmdHistory(args ...string) error {
cmd := cli.Subcmd("history", "[OPTIONS] IMAGE", "Show the history of an image")
quiet := cmd.Bool("q", false, "only show numeric IDs")
noTrunc := cmd.Bool("notrunc", false, "Don't truncate output")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
body, _, err := cli.call("GET", "/images/"+cmd.Arg(0)+"/history", nil)
if err != nil {
return err
}
var outs []APIHistory
err = json.Unmarshal(body, &outs)
if err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
fmt.Fprintln(w, "IMAGE\tCREATED\tCREATED BY\tSIZE")
}
for _, out := range outs {
if !*quiet {
if *noTrunc {
fmt.Fprintf(w, "%s\t", out.ID)
} else {
fmt.Fprintf(w, "%s\t", utils.TruncateID(out.ID))
}
fmt.Fprintf(w, "%s ago\t", utils.HumanDuration(time.Now().UTC().Sub(time.Unix(out.Created, 0))))
if *noTrunc {
fmt.Fprintf(w, "%s\t", out.CreatedBy)
} else {
fmt.Fprintf(w, "%s\t", utils.Trunc(out.CreatedBy, 45))
}
fmt.Fprintf(w, "%s\n", utils.HumanSize(out.Size))
} else {
if *noTrunc {
fmt.Fprintln(w, out.ID)
} else {
fmt.Fprintln(w, utils.TruncateID(out.ID))
}
}
}
w.Flush()
return nil
}
func (cli *DockerCli) CmdRm(args ...string) error {
cmd := cli.Subcmd("rm", "[OPTIONS] CONTAINER [CONTAINER...]", "Remove one or more containers")
v := cmd.Bool("v", false, "Remove the volumes associated to the container")
link := cmd.Bool("link", false, "Remove the specified link and not the underlying container")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
val := url.Values{}
if *v {
val.Set("v", "1")
}
if *link {
val.Set("link", "1")
}
var encounteredError error
for _, name := range cmd.Args() {
_, _, err := cli.call("DELETE", "/containers/"+name+"?"+val.Encode(), nil)
if err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to remove one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
// 'docker kill NAME' kills a running container
func (cli *DockerCli) CmdKill(args ...string) error {
cmd := cli.Subcmd("kill", "CONTAINER [CONTAINER...]", "Kill a running container (send SIGKILL)")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
var encounteredError error
for _, name := range args {
if _, _, err := cli.call("POST", "/containers/"+name+"/kill", nil); err != nil {
fmt.Fprintf(cli.err, "%s\n", err)
encounteredError = fmt.Errorf("Error: failed to kill one or more containers")
} else {
fmt.Fprintf(cli.out, "%s\n", name)
}
}
return encounteredError
}
func (cli *DockerCli) CmdImport(args ...string) error {
cmd := cli.Subcmd("import", "URL|- [REPOSITORY[:TAG]]", "Create an empty filesystem image and import the contents of the tarball (.tar, .tar.gz, .tgz, .bzip, .tar.xz, .txz) into it, then optionally tag it.")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() < 1 {
cmd.Usage()
return nil
}
var src, repository, tag string
if cmd.NArg() == 3 {
fmt.Fprintf(cli.err, "[DEPRECATED] The format 'URL|- [REPOSITORY [TAG]]' as been deprecated. Please use URL|- [REPOSITORY[:TAG]]\n")
src, repository, tag = cmd.Arg(0), cmd.Arg(1), cmd.Arg(2)
} else {
src = cmd.Arg(0)
repository, tag = utils.ParseRepositoryTag(cmd.Arg(1))
}
v := url.Values{}
v.Set("repo", repository)
v.Set("tag", tag)
v.Set("fromSrc", src)
var in io.Reader
if src == "-" {
in = cli.in
}
return cli.stream("POST", "/images/create?"+v.Encode(), in, cli.out, nil)
}
func (cli *DockerCli) CmdPush(args ...string) error {
cmd := cli.Subcmd("push", "NAME", "Push an image or a repository to the registry")
if err := cmd.Parse(args); err != nil {
return nil
}
name := cmd.Arg(0)
if name == "" {
cmd.Usage()
return nil
}
cli.LoadConfigFile()
// Resolve the Repository name from fqn to endpoint + name
endpoint, _, err := registry.ResolveRepositoryName(name)
if err != nil {
return err
}
// Resolve the Auth config relevant for this server
authConfig := cli.configFile.ResolveAuthConfig(endpoint)
// If we're not using a custom registry, we know the restrictions
// applied to repository names and can warn the user in advance.
// Custom repositories can have different rules, and we must also
// allow pushing by image ID.
if len(strings.SplitN(name, "/", 2)) == 1 {
username := cli.configFile.Configs[auth.IndexServerAddress()].Username
if username == "" {
username = "<user>"
}
return fmt.Errorf("Impossible to push a \"root\" repository. Please rename your repository in <user>/<repo> (ex: %s/%s)", username, name)
}
v := url.Values{}
push := func(authConfig auth.AuthConfig) error {
buf, err := json.Marshal(authConfig)
if err != nil {
return err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
return cli.stream("POST", "/images/"+name+"/push?"+v.Encode(), nil, cli.out, map[string][]string{
"X-Registry-Auth": registryAuthHeader,
})
}
if err := push(authConfig); err != nil {
if err.Error() == registry.ErrLoginRequired.Error() {
fmt.Fprintln(cli.out, "\nPlease login prior to push:")
if err := cli.CmdLogin(endpoint); err != nil {
return err
}
authConfig := cli.configFile.ResolveAuthConfig(endpoint)
return push(authConfig)
}
return err
}
return nil
}
func (cli *DockerCli) CmdPull(args ...string) error {
cmd := cli.Subcmd("pull", "NAME", "Pull an image or a repository from the registry")
tag := cmd.String("t", "", "Download tagged image in repository")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
remote, parsedTag := utils.ParseRepositoryTag(cmd.Arg(0))
if *tag == "" {
*tag = parsedTag
}
// Resolve the Repository name from fqn to endpoint + name
endpoint, _, err := registry.ResolveRepositoryName(remote)
if err != nil {
return err
}
cli.LoadConfigFile()
// Resolve the Auth config relevant for this server
authConfig := cli.configFile.ResolveAuthConfig(endpoint)
v := url.Values{}
v.Set("fromImage", remote)
v.Set("tag", *tag)
pull := func(authConfig auth.AuthConfig) error {
buf, err := json.Marshal(authConfig)
if err != nil {
return err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
return cli.stream("POST", "/images/create?"+v.Encode(), nil, cli.out, map[string][]string{
"X-Registry-Auth": registryAuthHeader,
})
}
if err := pull(authConfig); err != nil {
if err.Error() == registry.ErrLoginRequired.Error() {
fmt.Fprintln(cli.out, "\nPlease login prior to push:")
if err := cli.CmdLogin(endpoint); err != nil {
return err
}
authConfig := cli.configFile.ResolveAuthConfig(endpoint)
return pull(authConfig)
}
return err
}
return nil
}
func (cli *DockerCli) CmdImages(args ...string) error {
cmd := cli.Subcmd("images", "[OPTIONS] [NAME]", "List images")
quiet := cmd.Bool("q", false, "only show numeric IDs")
all := cmd.Bool("a", false, "show all images (by default filter out the intermediate images used to build)")
noTrunc := cmd.Bool("notrunc", false, "Don't truncate output")
flViz := cmd.Bool("viz", false, "output graph in graphviz format")
flTree := cmd.Bool("tree", false, "output graph in tree format")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() > 1 {
cmd.Usage()
return nil
}
if *flViz {
body, _, err := cli.call("GET", "/images/json?all=1", nil)
if err != nil {
return err
}
var outs []APIImages
err = json.Unmarshal(body, &outs)
if err != nil {
return err
}
fmt.Fprintf(cli.out, "digraph docker {\n")
for _, image := range outs {
if image.ParentId == "" {
fmt.Fprintf(cli.out, " base -> \"%s\" [style=invis]\n", utils.TruncateID(image.ID))
} else {
fmt.Fprintf(cli.out, " \"%s\" -> \"%s\"\n", utils.TruncateID(image.ParentId), utils.TruncateID(image.ID))
}
if image.RepoTags[0] != "<none>:<none>" {
fmt.Fprintf(cli.out, " \"%s\" [label=\"%s\\n%s\",shape=box,fillcolor=\"paleturquoise\",style=\"filled,rounded\"];\n", utils.TruncateID(image.ID), utils.TruncateID(image.ID), strings.Join(image.RepoTags, "\\n"))
}
}
fmt.Fprintf(cli.out, " base [style=invisible]\n}\n")
} else if *flTree {
body, _, err := cli.call("GET", "/images/json?all=1", nil)
if err != nil {
return err
}
var outs []APIImages
if err := json.Unmarshal(body, &outs); err != nil {
return err
}
var (
startImageArg = cmd.Arg(0)
startImage APIImages
roots []APIImages
byParent = make(map[string][]APIImages)
)
for _, image := range outs {
if image.ParentId == "" {
roots = append(roots, image)
} else {
if children, exists := byParent[image.ParentId]; exists {
byParent[image.ParentId] = append(children, image)
} else {
byParent[image.ParentId] = []APIImages{image}
}
}
if startImageArg != "" {
if startImageArg == image.ID || startImageArg == utils.TruncateID(image.ID) {
startImage = image
}
for _, repotag := range image.RepoTags {
if repotag == startImageArg {
startImage = image
}
}
}
}
if startImageArg != "" {
WalkTree(cli, noTrunc, []APIImages{startImage}, byParent, "")
} else {
WalkTree(cli, noTrunc, roots, byParent, "")
}
} else {
v := url.Values{}
if cmd.NArg() == 1 {
v.Set("filter", cmd.Arg(0))
}
if *all {
v.Set("all", "1")
}
body, _, err := cli.call("GET", "/images/json?"+v.Encode(), nil)
if err != nil {
return err
}
var outs []APIImages
err = json.Unmarshal(body, &outs)
if err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
fmt.Fprintln(w, "REPOSITORY\tTAG\tIMAGE ID\tCREATED\tVIRTUAL SIZE")
}
for _, out := range outs {
for _, repotag := range out.RepoTags {
repo, tag := utils.ParseRepositoryTag(repotag)
if !*noTrunc {
out.ID = utils.TruncateID(out.ID)
}
if !*quiet {
fmt.Fprintf(w, "%s\t%s\t%s\t%s ago\t%s\n", repo, tag, out.ID, utils.HumanDuration(time.Now().UTC().Sub(time.Unix(out.Created, 0))), utils.HumanSize(out.VirtualSize))
} else {
fmt.Fprintln(w, out.ID)
}
}
}
if !*quiet {
w.Flush()
}
}
return nil
}
func WalkTree(cli *DockerCli, noTrunc *bool, images []APIImages, byParent map[string][]APIImages, prefix string) {
if len(images) > 1 {
length := len(images)
for index, image := range images {
if index+1 == length {
PrintTreeNode(cli, noTrunc, image, prefix+"└─")
if subimages, exists := byParent[image.ID]; exists {
WalkTree(cli, noTrunc, subimages, byParent, prefix+" ")
}
} else {
PrintTreeNode(cli, noTrunc, image, prefix+"|─")
if subimages, exists := byParent[image.ID]; exists {
WalkTree(cli, noTrunc, subimages, byParent, prefix+"| ")
}
}
}
} else {
for _, image := range images {
PrintTreeNode(cli, noTrunc, image, prefix+"└─")
if subimages, exists := byParent[image.ID]; exists {
WalkTree(cli, noTrunc, subimages, byParent, prefix+" ")
}
}
}
}
func PrintTreeNode(cli *DockerCli, noTrunc *bool, image APIImages, prefix string) {
var imageID string
if *noTrunc {
imageID = image.ID
} else {
imageID = utils.TruncateID(image.ID)
}
fmt.Fprintf(cli.out, "%s%s Size: %s (virtual %s)", prefix, imageID, utils.HumanSize(image.Size), utils.HumanSize(image.VirtualSize))
if image.RepoTags[0] != "<none>:<none>" {
fmt.Fprintf(cli.out, " Tags: %s\n", strings.Join(image.RepoTags, ", "))
} else {
fmt.Fprint(cli.out, "\n")
}
}
func displayablePorts(ports []APIPort) string {
result := []string{}
for _, port := range ports {
if port.IP == "" {
result = append(result, fmt.Sprintf("%d/%s", port.PublicPort, port.Type))
} else {
result = append(result, fmt.Sprintf("%s:%d->%d/%s", port.IP, port.PublicPort, port.PrivatePort, port.Type))
}
}
sort.Strings(result)
return strings.Join(result, ", ")
}
func (cli *DockerCli) CmdPs(args ...string) error {
cmd := cli.Subcmd("ps", "[OPTIONS]", "List containers")
quiet := cmd.Bool("q", false, "Only display numeric IDs")
size := cmd.Bool("s", false, "Display sizes")
all := cmd.Bool("a", false, "Show all containers. Only running containers are shown by default.")
noTrunc := cmd.Bool("notrunc", false, "Don't truncate output")
nLatest := cmd.Bool("l", false, "Show only the latest created container, include non-running ones.")
since := cmd.String("sinceId", "", "Show only containers created since Id, include non-running ones.")
before := cmd.String("beforeId", "", "Show only container created before Id, include non-running ones.")
last := cmd.Int("n", -1, "Show n last created containers, include non-running ones.")
if err := cmd.Parse(args); err != nil {
return nil
}
v := url.Values{}
if *last == -1 && *nLatest {
*last = 1
}
if *all {
v.Set("all", "1")
}
if *last != -1 {
v.Set("limit", strconv.Itoa(*last))
}
if *since != "" {
v.Set("since", *since)
}
if *before != "" {
v.Set("before", *before)
}
if *size {
v.Set("size", "1")
}
body, _, err := cli.call("GET", "/containers/json?"+v.Encode(), nil)
if err != nil {
return err
}
var outs []APIContainers
err = json.Unmarshal(body, &outs)
if err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 20, 1, 3, ' ', 0)
if !*quiet {
fmt.Fprint(w, "CONTAINER ID\tIMAGE\tCOMMAND\tCREATED\tSTATUS\tPORTS\tNAMES")
if *size {
fmt.Fprintln(w, "\tSIZE")
} else {
fmt.Fprint(w, "\n")
}
}
for _, out := range outs {
if !*noTrunc {
out.ID = utils.TruncateID(out.ID)
}
// Remove the leading / from the names
for i := 0; i < len(out.Names); i++ {
out.Names[i] = out.Names[i][1:]
}
if !*quiet {
if !*noTrunc {
out.Command = utils.Trunc(out.Command, 20)
}
fmt.Fprintf(w, "%s\t%s\t%s\t%s ago\t%s\t%s\t%s\t", out.ID, out.Image, out.Command, utils.HumanDuration(time.Now().UTC().Sub(time.Unix(out.Created, 0))), out.Status, displayablePorts(out.Ports), strings.Join(out.Names, ","))
if *size {
if out.SizeRootFs > 0 {
fmt.Fprintf(w, "%s (virtual %s)\n", utils.HumanSize(out.SizeRw), utils.HumanSize(out.SizeRootFs))
} else {
fmt.Fprintf(w, "%s\n", utils.HumanSize(out.SizeRw))
}
} else {
fmt.Fprint(w, "\n")
}
} else {
fmt.Fprintln(w, out.ID)
}
}
if !*quiet {
w.Flush()
}
return nil
}
func (cli *DockerCli) CmdCommit(args ...string) error {
cmd := cli.Subcmd("commit", "[OPTIONS] CONTAINER [REPOSITORY[:TAG]]", "Create a new image from a container's changes")
flComment := cmd.String("m", "", "Commit message")
flAuthor := cmd.String("author", "", "Author (eg. \"John Hannibal Smith <hannibal@a-team.com>\"")
flConfig := cmd.String("run", "", "Config automatically applied when the image is run. "+`(ex: -run='{"Cmd": ["cat", "/world"], "PortSpecs": ["22"]}')`)
if err := cmd.Parse(args); err != nil {
return nil
}
var name, repository, tag string
if cmd.NArg() == 3 {
fmt.Fprintf(cli.err, "[DEPRECATED] The format 'CONTAINER [REPOSITORY [TAG]]' as been deprecated. Please use CONTAINER [REPOSITORY[:TAG]]\n")
name, repository, tag = cmd.Arg(0), cmd.Arg(1), cmd.Arg(2)
} else {
name = cmd.Arg(0)
repository, tag = utils.ParseRepositoryTag(cmd.Arg(1))
}
if name == "" {
cmd.Usage()
return nil
}
v := url.Values{}
v.Set("container", name)
v.Set("repo", repository)
v.Set("tag", tag)
v.Set("comment", *flComment)
v.Set("author", *flAuthor)
var config *Config
if *flConfig != "" {
config = &Config{}
if err := json.Unmarshal([]byte(*flConfig), config); err != nil {
return err
}
}
body, _, err := cli.call("POST", "/commit?"+v.Encode(), config)
if err != nil {
return err
}
apiID := &APIID{}
err = json.Unmarshal(body, apiID)
if err != nil {
return err
}
fmt.Fprintf(cli.out, "%s\n", apiID.ID)
return nil
}
func (cli *DockerCli) CmdEvents(args ...string) error {
cmd := cli.Subcmd("events", "[OPTIONS]", "Get real time events from the server")
since := cmd.String("since", "", "Show previously created events and then stream.")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 0 {
cmd.Usage()
return nil
}
v := url.Values{}
if *since != "" {
loc := time.FixedZone(time.Now().Zone())
format := "2006-01-02 15:04:05 -0700 MST"
if len(*since) < len(format) {
format = format[:len(*since)]
}
if t, err := time.ParseInLocation(format, *since, loc); err == nil {
v.Set("since", strconv.FormatInt(t.Unix(), 10))
} else {
v.Set("since", *since)
}
}
if err := cli.stream("GET", "/events?"+v.Encode(), nil, cli.out, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) CmdExport(args ...string) error {
cmd := cli.Subcmd("export", "CONTAINER", "Export the contents of a filesystem as a tar archive to STDOUT")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
if err := cli.stream("GET", "/containers/"+cmd.Arg(0)+"/export", nil, cli.out, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) CmdDiff(args ...string) error {
cmd := cli.Subcmd("diff", "CONTAINER", "Inspect changes on a container's filesystem")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
body, _, err := cli.call("GET", "/containers/"+cmd.Arg(0)+"/changes", nil)
if err != nil {
return err
}
changes := []Change{}
err = json.Unmarshal(body, &changes)
if err != nil {
return err
}
for _, change := range changes {
fmt.Fprintf(cli.out, "%s\n", change.String())
}
return nil
}
func (cli *DockerCli) CmdLogs(args ...string) error {
cmd := cli.Subcmd("logs", "CONTAINER", "Fetch the logs of a container")
follow := cmd.Bool("f", false, "Follow log output")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
name := cmd.Arg(0)
body, _, err := cli.call("GET", "/containers/"+name+"/json", nil)
if err != nil {
return err
}
container := &Container{}
err = json.Unmarshal(body, container)
if err != nil {
return err
}
v := url.Values{}
v.Set("logs", "1")
v.Set("stdout", "1")
v.Set("stderr", "1")
if *follow && container.State.Running {
v.Set("stream", "1")
}
if err := cli.hijack("POST", "/containers/"+name+"/attach?"+v.Encode(), container.Config.Tty, nil, cli.out, cli.err, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) CmdAttach(args ...string) error {
cmd := cli.Subcmd("attach", "[OPTIONS] CONTAINER", "Attach to a running container")
noStdin := cmd.Bool("nostdin", false, "Do not attach stdin")
proxy := cmd.Bool("sig-proxy", true, "Proxify all received signal to the process (even in non-tty mode)")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
name := cmd.Arg(0)
body, _, err := cli.call("GET", "/containers/"+name+"/json", nil)
if err != nil {
return err
}
container := &Container{}
err = json.Unmarshal(body, container)
if err != nil {
return err
}
if !container.State.IsRunning() {
return fmt.Errorf("Impossible to attach to a stopped container, start it first")
}
if container.Config.Tty && cli.isTerminal {
if err := cli.monitorTtySize(cmd.Arg(0)); err != nil {
utils.Debugf("Error monitoring TTY size: %s", err)
}
}
var in io.ReadCloser
v := url.Values{}
v.Set("stream", "1")
if !*noStdin && container.Config.OpenStdin {
v.Set("stdin", "1")
in = cli.in
}
v.Set("stdout", "1")
v.Set("stderr", "1")
if *proxy && !container.Config.Tty {
sigc := cli.forwardAllSignals(cmd.Arg(0))
defer utils.StopCatch(sigc)
}
if err := cli.hijack("POST", "/containers/"+cmd.Arg(0)+"/attach?"+v.Encode(), container.Config.Tty, in, cli.out, cli.err, nil); err != nil {
return err
}
_, status, err := getExitCode(cli, cmd.Arg(0))
if err != nil {
return err
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
func (cli *DockerCli) CmdSearch(args ...string) error {
cmd := cli.Subcmd("search", "TERM", "Search the docker index for images")
noTrunc := cmd.Bool("notrunc", false, "Don't truncate output")
trusted := cmd.Bool("trusted", false, "Only show trusted builds")
stars := cmd.Int("stars", 0, "Only displays with at least xxx stars")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
v := url.Values{}
v.Set("term", cmd.Arg(0))
body, _, err := cli.call("GET", "/images/search?"+v.Encode(), nil)
if err != nil {
return err
}
outs := []registry.SearchResult{}
err = json.Unmarshal(body, &outs)
if err != nil {
return err
}
w := tabwriter.NewWriter(cli.out, 10, 1, 3, ' ', 0)
fmt.Fprintf(w, "NAME\tDESCRIPTION\tSTARS\tOFFICIAL\tTRUSTED\n")
for _, out := range outs {
if (*trusted && !out.IsTrusted) || (*stars > out.StarCount) {
continue
}
desc := strings.Replace(out.Description, "\n", " ", -1)
desc = strings.Replace(desc, "\r", " ", -1)
if !*noTrunc && len(desc) > 45 {
desc = utils.Trunc(desc, 42) + "..."
}
fmt.Fprintf(w, "%s\t%s\t%d\t", out.Name, desc, out.StarCount)
if out.IsOfficial {
fmt.Fprint(w, "[OK]")
}
fmt.Fprint(w, "\t")
if out.IsTrusted {
fmt.Fprint(w, "[OK]")
}
fmt.Fprint(w, "\n")
}
w.Flush()
return nil
}
// Ports type - Used to parse multiple -p flags
type ports []int
func (cli *DockerCli) CmdTag(args ...string) error {
cmd := cli.Subcmd("tag", "[OPTIONS] IMAGE REPOSITORY[:TAG]", "Tag an image into a repository")
force := cmd.Bool("f", false, "Force")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 2 && cmd.NArg() != 3 {
cmd.Usage()
return nil
}
var repository, tag string
if cmd.NArg() == 3 {
fmt.Fprintf(cli.err, "[DEPRECATED] The format 'IMAGE [REPOSITORY [TAG]]' as been deprecated. Please use IMAGE [REPOSITORY[:TAG]]\n")
repository, tag = cmd.Arg(1), cmd.Arg(2)
} else {
repository, tag = utils.ParseRepositoryTag(cmd.Arg(1))
}
v := url.Values{}
v.Set("repo", repository)
v.Set("tag", tag)
if *force {
v.Set("force", "1")
}
if _, _, err := cli.call("POST", "/images/"+cmd.Arg(0)+"/tag?"+v.Encode(), nil); err != nil {
return err
}
return nil
}
//FIXME Only used in tests
func ParseRun(args []string, capabilities *Capabilities) (*Config, *HostConfig, *flag.FlagSet, error) {
cmd := flag.NewFlagSet("run", flag.ContinueOnError)
cmd.SetOutput(ioutil.Discard)
cmd.Usage = nil
return parseRun(cmd, args, capabilities)
}
func parseRun(cmd *flag.FlagSet, args []string, capabilities *Capabilities) (*Config, *HostConfig, *flag.FlagSet, error) {
var (
// FIXME: use utils.ListOpts for attach and volumes?
flAttach = NewListOpts(ValidateAttach)
flVolumes = NewListOpts(ValidatePath)
flLinks = NewListOpts(ValidateLink)
flEnv = NewListOpts(ValidateEnv)
flPublish ListOpts
flExpose ListOpts
flDns ListOpts
flVolumesFrom ListOpts
flLxcOpts ListOpts
flAutoRemove = cmd.Bool("rm", false, "Automatically remove the container when it exits (incompatible with -d)")
flDetach = cmd.Bool("d", false, "Detached mode: Run container in the background, print new container id")
flNetwork = cmd.Bool("n", true, "Enable networking for this container")
flPrivileged = cmd.Bool("privileged", false, "Give extended privileges to this container")
flPublishAll = cmd.Bool("P", false, "Publish all exposed ports to the host interfaces")
flStdin = cmd.Bool("i", false, "Keep stdin open even if not attached")
flTty = cmd.Bool("t", false, "Allocate a pseudo-tty")
flContainerIDFile = cmd.String("cidfile", "", "Write the container ID to the file")
flEntrypoint = cmd.String("entrypoint", "", "Overwrite the default entrypoint of the image")
flHostname = cmd.String("h", "", "Container host name")
flMemoryString = cmd.String("m", "", "Memory limit (format: <number><optional unit>, where unit = b, k, m or g)")
flUser = cmd.String("u", "", "Username or UID")
flWorkingDir = cmd.String("w", "", "Working directory inside the container")
flCpuShares = cmd.Int64("c", 0, "CPU shares (relative weight)")
// For documentation purpose
_ = cmd.Bool("sig-proxy", true, "Proxify all received signal to the process (even in non-tty mode)")
_ = cmd.String("name", "", "Assign a name to the container")
)
cmd.Var(&flAttach, "a", "Attach to stdin, stdout or stderr.")
cmd.Var(&flVolumes, "v", "Bind mount a volume (e.g. from the host: -v /host:/container, from docker: -v /container)")
cmd.Var(&flLinks, "link", "Add link to another container (name:alias)")
cmd.Var(&flEnv, "e", "Set environment variables")
cmd.Var(&flPublish, "p", fmt.Sprintf("Publish a container's port to the host (format: %s) (use 'docker port' to see the actual mapping)", PortSpecTemplateFormat))
cmd.Var(&flExpose, "expose", "Expose a port from the container without publishing it to your host")
cmd.Var(&flDns, "dns", "Set custom dns servers")
cmd.Var(&flVolumesFrom, "volumes-from", "Mount volumes from the specified container(s)")
cmd.Var(&flLxcOpts, "lxc-conf", "Add custom lxc options -lxc-conf=\"lxc.cgroup.cpuset.cpus = 0,1\"")
if err := cmd.Parse(args); err != nil {
return nil, nil, cmd, err
}
// Check if the kernel supports memory limit cgroup.
if capabilities != nil && *flMemoryString != "" && !capabilities.MemoryLimit {
*flMemoryString = ""
}
// Validate input params
if *flDetach && flAttach.Len() > 0 {
return nil, nil, cmd, ErrConflictAttachDetach
}
if *flWorkingDir != "" && !path.IsAbs(*flWorkingDir) {
return nil, nil, cmd, ErrInvalidWorikingDirectory
}
if *flDetach && *flAutoRemove {
return nil, nil, cmd, ErrConflictDetachAutoRemove
}
// If neither -d or -a are set, attach to everything by default
if flAttach.Len() == 0 && !*flDetach {
if !*flDetach {
flAttach.Set("stdout")
flAttach.Set("stderr")
if *flStdin {
flAttach.Set("stdin")
}
}
}
var flMemory int64
if *flMemoryString != "" {
parsedMemory, err := utils.RAMInBytes(*flMemoryString)
if err != nil {
return nil, nil, cmd, err
}
flMemory = parsedMemory
}
var binds []string
// add any bind targets to the list of container volumes
for bind := range flVolumes.GetMap() {
if arr := strings.Split(bind, ":"); len(arr) > 1 {
if arr[0] == "/" {
return nil, nil, cmd, fmt.Errorf("Invalid bind mount: source can't be '/'")
}
dstDir := arr[1]
flVolumes.Set(dstDir)
binds = append(binds, bind)
flVolumes.Delete(bind)
}
}
var (
parsedArgs = cmd.Args()
runCmd []string
entrypoint []string
image string
)
if len(parsedArgs) >= 1 {
image = cmd.Arg(0)
}
if len(parsedArgs) > 1 {
runCmd = parsedArgs[1:]
}
if *flEntrypoint != "" {
entrypoint = []string{*flEntrypoint}
}
lxcConf, err := parseLxcConfOpts(flLxcOpts)
if err != nil {
return nil, nil, cmd, err
}
var (
domainname string
hostname = *flHostname
parts = strings.SplitN(hostname, ".", 2)
)
if len(parts) > 1 {
hostname = parts[0]
domainname = parts[1]
}
ports, portBindings, err := parsePortSpecs(flPublish.GetAll())
if err != nil {
return nil, nil, cmd, err
}
// Merge in exposed ports to the map of published ports
for _, e := range flExpose.GetAll() {
if strings.Contains(e, ":") {
return nil, nil, cmd, fmt.Errorf("Invalid port format for -expose: %s", e)
}
p := NewPort(splitProtoPort(e))
if _, exists := ports[p]; !exists {
ports[p] = struct{}{}
}
}
config := &Config{
Hostname: hostname,
Domainname: domainname,
PortSpecs: nil, // Deprecated
ExposedPorts: ports,
User: *flUser,
Tty: *flTty,
NetworkDisabled: !*flNetwork,
OpenStdin: *flStdin,
Memory: flMemory,
CpuShares: *flCpuShares,
AttachStdin: flAttach.Get("stdin"),
AttachStdout: flAttach.Get("stdout"),
AttachStderr: flAttach.Get("stderr"),
Env: flEnv.GetAll(),
Cmd: runCmd,
Dns: flDns.GetAll(),
Image: image,
Volumes: flVolumes.GetMap(),
VolumesFrom: strings.Join(flVolumesFrom.GetAll(), ","),
Entrypoint: entrypoint,
WorkingDir: *flWorkingDir,
}
hostConfig := &HostConfig{
Binds: binds,
ContainerIDFile: *flContainerIDFile,
LxcConf: lxcConf,
Privileged: *flPrivileged,
PortBindings: portBindings,
Links: flLinks.GetAll(),
PublishAllPorts: *flPublishAll,
}
if capabilities != nil && flMemory > 0 && !capabilities.SwapLimit {
//fmt.Fprintf(stdout, "WARNING: Your kernel does not support swap limit capabilities. Limitation discarded.\n")
config.MemorySwap = -1
}
// When allocating stdin in attached mode, close stdin at client disconnect
if config.OpenStdin && config.AttachStdin {
config.StdinOnce = true
}
return config, hostConfig, cmd, nil
}
func (cli *DockerCli) CmdRun(args ...string) error {
config, hostConfig, cmd, err := parseRun(cli.Subcmd("run", "[OPTIONS] IMAGE [COMMAND] [ARG...]", "Run a command in a new container"), args, nil)
if err != nil {
return err
}
if config.Image == "" {
cmd.Usage()
return nil
}
// Retrieve relevant client-side config
var (
flName = cmd.Lookup("name")
flRm = cmd.Lookup("rm")
flSigProxy = cmd.Lookup("sig-proxy")
autoRemove, _ = strconv.ParseBool(flRm.Value.String())
sigProxy, _ = strconv.ParseBool(flSigProxy.Value.String())
)
// Disable sigProxy in case on TTY
if config.Tty {
sigProxy = false
}
var containerIDFile io.WriteCloser
if len(hostConfig.ContainerIDFile) > 0 {
if _, err := os.Stat(hostConfig.ContainerIDFile); err == nil {
return fmt.Errorf("cid file found, make sure the other container isn't running or delete %s", hostConfig.ContainerIDFile)
}
if containerIDFile, err = os.Create(hostConfig.ContainerIDFile); err != nil {
return fmt.Errorf("failed to create the container ID file: %s", err)
}
defer containerIDFile.Close()
}
containerValues := url.Values{}
if name := flName.Value.String(); name != "" {
containerValues.Set("name", name)
}
//create the container
body, statusCode, err := cli.call("POST", "/containers/create?"+containerValues.Encode(), config)
//if image not found try to pull it
if statusCode == 404 {
_, tag := utils.ParseRepositoryTag(config.Image)
if tag == "" {
tag = DEFAULTTAG
}
fmt.Fprintf(cli.err, "Unable to find image '%s' (tag: %s) locally\n", config.Image, tag)
v := url.Values{}
repos, tag := utils.ParseRepositoryTag(config.Image)
v.Set("fromImage", repos)
v.Set("tag", tag)
// Resolve the Repository name from fqn to endpoint + name
endpoint, _, err := registry.ResolveRepositoryName(repos)
if err != nil {
return err
}
// Load the auth config file, to be able to pull the image
cli.LoadConfigFile()
// Resolve the Auth config relevant for this server
authConfig := cli.configFile.ResolveAuthConfig(endpoint)
buf, err := json.Marshal(authConfig)
if err != nil {
return err
}
registryAuthHeader := []string{
base64.URLEncoding.EncodeToString(buf),
}
if err = cli.stream("POST", "/images/create?"+v.Encode(), nil, cli.err, map[string][]string{"X-Registry-Auth": registryAuthHeader}); err != nil {
return err
}
if body, _, err = cli.call("POST", "/containers/create?"+containerValues.Encode(), config); err != nil {
return err
}
} else if err != nil {
return err
}
var runResult APIRun
if err := json.Unmarshal(body, &runResult); err != nil {
return err
}
for _, warning := range runResult.Warnings {
fmt.Fprintf(cli.err, "WARNING: %s\n", warning)
}
if len(hostConfig.ContainerIDFile) > 0 {
if _, err = containerIDFile.Write([]byte(runResult.ID)); err != nil {
return fmt.Errorf("failed to write the container ID to the file: %s", err)
}
}
if sigProxy {
sigc := cli.forwardAllSignals(runResult.ID)
defer utils.StopCatch(sigc)
}
var (
waitDisplayId chan struct{}
errCh chan error
)
if !config.AttachStdout && !config.AttachStderr {
// Make this asynchrone in order to let the client write to stdin before having to read the ID
waitDisplayId = make(chan struct{})
go func() {
defer close(waitDisplayId)
fmt.Fprintf(cli.out, "%s\n", runResult.ID)
}()
}
// We need to instanciate the chan because the select needs it. It can
// be closed but can't be uninitialized.
hijacked := make(chan io.Closer)
// Block the return until the chan gets closed
defer func() {
utils.Debugf("End of CmdRun(), Waiting for hijack to finish.")
if _, ok := <-hijacked; ok {
utils.Errorf("Hijack did not finish (chan still open)")
}
}()
if config.AttachStdin || config.AttachStdout || config.AttachStderr {
var (
out, stderr io.Writer
in io.ReadCloser
v = url.Values{}
)
v.Set("stream", "1")
if config.AttachStdin {
v.Set("stdin", "1")
in = cli.in
}
if config.AttachStdout {
v.Set("stdout", "1")
out = cli.out
}
if config.AttachStderr {
v.Set("stderr", "1")
if config.Tty {
stderr = cli.out
} else {
stderr = cli.err
}
}
errCh = utils.Go(func() error {
return cli.hijack("POST", "/containers/"+runResult.ID+"/attach?"+v.Encode(), config.Tty, in, out, stderr, hijacked)
})
} else {
close(hijacked)
}
// Acknowledge the hijack before starting
select {
case closer := <-hijacked:
// Make sure that hijack gets closed when returning. (result
// in closing hijack chan and freeing server's goroutines.
if closer != nil {
defer closer.Close()
}
case err := <-errCh:
if err != nil {
utils.Debugf("Error hijack: %s", err)
return err
}
}
//start the container
if _, _, err = cli.call("POST", "/containers/"+runResult.ID+"/start", hostConfig); err != nil {
return err
}
if (config.AttachStdin || config.AttachStdout || config.AttachStderr) && config.Tty && cli.isTerminal {
if err := cli.monitorTtySize(runResult.ID); err != nil {
utils.Errorf("Error monitoring TTY size: %s\n", err)
}
}
if errCh != nil {
if err := <-errCh; err != nil {
utils.Debugf("Error hijack: %s", err)
return err
}
}
// Detached mode: wait for the id to be displayed and return.
if !config.AttachStdout && !config.AttachStderr {
// Detached mode
<-waitDisplayId
return nil
}
var status int
// Attached mode
if autoRemove {
// Autoremove: wait for the container to finish, retrieve
// the exit code and remove the container
if _, _, err := cli.call("POST", "/containers/"+runResult.ID+"/wait", nil); err != nil {
return err
}
if _, status, err = getExitCode(cli, runResult.ID); err != nil {
return err
}
if _, _, err := cli.call("DELETE", "/containers/"+runResult.ID, nil); err != nil {
return err
}
} else {
// No Autoremove: Simply retrieve the exit code
if _, status, err = getExitCode(cli, runResult.ID); err != nil {
return err
}
}
if status != 0 {
return &utils.StatusError{StatusCode: status}
}
return nil
}
func (cli *DockerCli) CmdCp(args ...string) error {
cmd := cli.Subcmd("cp", "CONTAINER:PATH HOSTPATH", "Copy files/folders from the PATH to the HOSTPATH")
if err := cmd.Parse(args); err != nil {
return nil
}
if cmd.NArg() != 2 {
cmd.Usage()
return nil
}
var copyData APICopy
info := strings.Split(cmd.Arg(0), ":")
if len(info) != 2 {
return fmt.Errorf("Error: Path not specified")
}
copyData.Resource = info[1]
copyData.HostPath = cmd.Arg(1)
data, statusCode, err := cli.call("POST", "/containers/"+info[0]+"/copy", copyData)
if err != nil {
return err
}
if statusCode == 200 {
r := bytes.NewReader(data)
if err := archive.Untar(r, copyData.HostPath, nil); err != nil {
return err
}
}
return nil
}
func (cli *DockerCli) CmdSave(args ...string) error {
cmd := cli.Subcmd("save", "IMAGE", "Save an image to a tar archive (streamed to stdout)")
if err := cmd.Parse(args); err != nil {
return err
}
if cmd.NArg() != 1 {
cmd.Usage()
return nil
}
image := cmd.Arg(0)
if err := cli.stream("GET", "/images/"+image+"/get", nil, cli.out, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) CmdLoad(args ...string) error {
cmd := cli.Subcmd("load", "SOURCE", "Load an image from a tar archive")
if err := cmd.Parse(args); err != nil {
return err
}
if cmd.NArg() != 0 {
cmd.Usage()
return nil
}
if err := cli.stream("POST", "/images/load", cli.in, cli.out, nil); err != nil {
return err
}
return nil
}
func (cli *DockerCli) call(method, path string, data interface{}) ([]byte, int, error) {
var params io.Reader
if data != nil {
buf, err := json.Marshal(data)
if err != nil {
return nil, -1, err
}
params = bytes.NewBuffer(buf)
}
// fixme: refactor client to support redirect
re := regexp.MustCompile("/+")
path = re.ReplaceAllString(path, "/")
req, err := http.NewRequest(method, fmt.Sprintf("/v%g%s", APIVERSION, path), params)
if err != nil {
return nil, -1, err
}
req.Header.Set("User-Agent", "Docker-Client/"+VERSION)
req.Host = cli.addr
if data != nil {
req.Header.Set("Content-Type", "application/json")
} else if method == "POST" {
req.Header.Set("Content-Type", "plain/text")
}
dial, err := net.Dial(cli.proto, cli.addr)
if err != nil {
if strings.Contains(err.Error(), "connection refused") {
return nil, -1, ErrConnectionRefused
}
return nil, -1, err
}
clientconn := httputil.NewClientConn(dial, nil)
resp, err := clientconn.Do(req)
defer clientconn.Close()
if err != nil {
if strings.Contains(err.Error(), "connection refused") {
return nil, -1, ErrConnectionRefused
}
return nil, -1, err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, -1, err
}
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
if len(body) == 0 {
return nil, resp.StatusCode, fmt.Errorf("Error: %s", http.StatusText(resp.StatusCode))
}
return nil, resp.StatusCode, fmt.Errorf("Error: %s", bytes.TrimSpace(body))
}
return body, resp.StatusCode, nil
}
func (cli *DockerCli) stream(method, path string, in io.Reader, out io.Writer, headers map[string][]string) error {
if (method == "POST" || method == "PUT") && in == nil {
in = bytes.NewReader([]byte{})
}
// fixme: refactor client to support redirect
re := regexp.MustCompile("/+")
path = re.ReplaceAllString(path, "/")
req, err := http.NewRequest(method, fmt.Sprintf("/v%g%s", APIVERSION, path), in)
if err != nil {
return err
}
req.Header.Set("User-Agent", "Docker-Client/"+VERSION)
req.Host = cli.addr
if method == "POST" {
req.Header.Set("Content-Type", "plain/text")
}
if headers != nil {
for k, v := range headers {
req.Header[k] = v
}
}
dial, err := net.Dial(cli.proto, cli.addr)
if err != nil {
if strings.Contains(err.Error(), "connection refused") {
return fmt.Errorf("Can't connect to docker daemon. Is 'docker -d' running on this host?")
}
return err
}
clientconn := httputil.NewClientConn(dial, nil)
resp, err := clientconn.Do(req)
defer clientconn.Close()
if err != nil {
if strings.Contains(err.Error(), "connection refused") {
return fmt.Errorf("Can't connect to docker daemon. Is 'docker -d' running on this host?")
}
return err
}
defer resp.Body.Close()
if resp.StatusCode < 200 || resp.StatusCode >= 400 {
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
if len(body) == 0 {
return fmt.Errorf("Error :%s", http.StatusText(resp.StatusCode))
}
return fmt.Errorf("Error: %s", bytes.TrimSpace(body))
}
if matchesContentType(resp.Header.Get("Content-Type"), "application/json") {
return utils.DisplayJSONMessagesStream(resp.Body, out, cli.terminalFd, cli.isTerminal)
}
if _, err := io.Copy(out, resp.Body); err != nil {
return err
}
return nil
}
func (cli *DockerCli) hijack(method, path string, setRawTerminal bool, in io.ReadCloser, stdout, stderr io.Writer, started chan io.Closer) error {
defer func() {
if started != nil {
close(started)
}
}()
// fixme: refactor client to support redirect
re := regexp.MustCompile("/+")
path = re.ReplaceAllString(path, "/")
req, err := http.NewRequest(method, fmt.Sprintf("/v%g%s", APIVERSION, path), nil)
if err != nil {
return err
}
req.Header.Set("User-Agent", "Docker-Client/"+VERSION)
req.Header.Set("Content-Type", "plain/text")
req.Host = cli.addr
dial, err := net.Dial(cli.proto, cli.addr)
if err != nil {
if strings.Contains(err.Error(), "connection refused") {
return fmt.Errorf("Can't connect to docker daemon. Is 'docker -d' running on this host?")
}
return err
}
clientconn := httputil.NewClientConn(dial, nil)
defer clientconn.Close()
// Server hijacks the connection, error 'connection closed' expected
clientconn.Do(req)
rwc, br := clientconn.Hijack()
defer rwc.Close()
if started != nil {
started <- rwc
}
var receiveStdout chan error
var oldState *term.State
if in != nil && setRawTerminal && cli.isTerminal && os.Getenv("NORAW") == "" {
oldState, err = term.SetRawTerminal(cli.terminalFd)
if err != nil {
return err
}
defer term.RestoreTerminal(cli.terminalFd, oldState)
}
if stdout != nil || stderr != nil {<|fim▁hole|> if in != nil {
if setRawTerminal && cli.isTerminal {
term.RestoreTerminal(cli.terminalFd, oldState)
}
in.Close()
}
}()
// When TTY is ON, use regular copy
if setRawTerminal {
_, err = io.Copy(stdout, br)
} else {
_, err = utils.StdCopy(stdout, stderr, br)
}
utils.Debugf("[hijack] End of stdout")
return err
})
}
sendStdin := utils.Go(func() error {
if in != nil {
io.Copy(rwc, in)
utils.Debugf("[hijack] End of stdin")
}
if tcpc, ok := rwc.(*net.TCPConn); ok {
if err := tcpc.CloseWrite(); err != nil {
utils.Errorf("Couldn't send EOF: %s\n", err)
}
} else if unixc, ok := rwc.(*net.UnixConn); ok {
if err := unixc.CloseWrite(); err != nil {
utils.Errorf("Couldn't send EOF: %s\n", err)
}
}
// Discard errors due to pipe interruption
return nil
})
if stdout != nil || stderr != nil {
if err := <-receiveStdout; err != nil {
utils.Errorf("Error receiveStdout: %s", err)
return err
}
}
if !cli.isTerminal {
if err := <-sendStdin; err != nil {
utils.Errorf("Error sendStdin: %s", err)
return err
}
}
return nil
}
func (cli *DockerCli) getTtySize() (int, int) {
if !cli.isTerminal {
return 0, 0
}
ws, err := term.GetWinsize(cli.terminalFd)
if err != nil {
utils.Errorf("Error getting size: %s", err)
if ws == nil {
return 0, 0
}
}
return int(ws.Height), int(ws.Width)
}
func (cli *DockerCli) resizeTty(id string) {
height, width := cli.getTtySize()
if height == 0 && width == 0 {
return
}
v := url.Values{}
v.Set("h", strconv.Itoa(height))
v.Set("w", strconv.Itoa(width))
if _, _, err := cli.call("POST", "/containers/"+id+"/resize?"+v.Encode(), nil); err != nil {
utils.Errorf("Error resize: %s", err)
}
}
func (cli *DockerCli) monitorTtySize(id string) error {
cli.resizeTty(id)
sigchan := make(chan os.Signal, 1)
signal.Notify(sigchan, syscall.SIGWINCH)
go func() {
for _ = range sigchan {
cli.resizeTty(id)
}
}()
return nil
}
func (cli *DockerCli) Subcmd(name, signature, description string) *flag.FlagSet {
flags := flag.NewFlagSet(name, flag.ContinueOnError)
flags.Usage = func() {
fmt.Fprintf(cli.err, "\nUsage: docker %s %s\n\n%s\n\n", name, signature, description)
flags.PrintDefaults()
os.Exit(2)
}
return flags
}
func (cli *DockerCli) LoadConfigFile() (err error) {
cli.configFile, err = auth.LoadConfig(os.Getenv("HOME"))
if err != nil {
fmt.Fprintf(cli.err, "WARNING: %s\n", err)
}
return err
}
func waitForExit(cli *DockerCli, containerId string) (int, error) {
body, _, err := cli.call("POST", "/containers/"+containerId+"/wait", nil)
if err != nil {
// If we can't connect, then the daemon probably died.
if err != ErrConnectionRefused {
return -1, err
}
return -1, nil
}
var out APIWait
if err := json.Unmarshal(body, &out); err != nil {
return -1, err
}
return out.StatusCode, nil
}
// getExitCode perform an inspect on the container. It returns
// the running state and the exit code.
func getExitCode(cli *DockerCli, containerId string) (bool, int, error) {
body, _, err := cli.call("GET", "/containers/"+containerId+"/json", nil)
if err != nil {
// If we can't connect, then the daemon probably died.
if err != ErrConnectionRefused {
return false, -1, err
}
return false, -1, nil
}
c := &Container{}
if err := json.Unmarshal(body, c); err != nil {
return false, -1, err
}
return c.State.IsRunning(), c.State.GetExitCode(), nil
}
func NewDockerCli(in io.ReadCloser, out, err io.Writer, proto, addr string) *DockerCli {
var (
isTerminal = false
terminalFd uintptr
)
if in != nil {
if file, ok := in.(*os.File); ok {
terminalFd = file.Fd()
isTerminal = term.IsTerminal(terminalFd)
}
}
if err == nil {
err = out
}
return &DockerCli{
proto: proto,
addr: addr,
in: in,
out: out,
err: err,
isTerminal: isTerminal,
terminalFd: terminalFd,
}
}
type DockerCli struct {
proto string
addr string
configFile *auth.ConfigFile
in io.ReadCloser
out io.Writer
err io.Writer
isTerminal bool
terminalFd uintptr
}<|fim▁end|> | receiveStdout = utils.Go(func() (err error) {
defer func() { |
<|file_name|>13287_cake.cc<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h>
using namespace std;
#define DEBUG // comment this line to pull out print statements
#ifdef DEBUG
// completely copied from http://saadahmad.ca/cc-preprocessor-metaprogramming-2/
const char NEWLINE[] = "\n";
const char TAB[] = "\t";
#define EMPTY()
#define DEFER(...) __VA_ARGS__ EMPTY()
#define DEFER2(...) __VA_ARGS__ DEFER(EMPTY) ()
#define DEFER3(...) __VA_ARGS__ DEFER2(EMPTY) ()
#define EVAL_1(...) __VA_ARGS__
#define EVAL_2(...) EVAL_1(EVAL_1(__VA_ARGS__))
#define EVAL_3(...) EVAL_2(EVAL_2(__VA_ARGS__))
#define EVAL_4(...) EVAL_3(EVAL_3(__VA_ARGS__))
#define EVAL_5(...) EVAL_4(EVAL_4(__VA_ARGS__))
#define EVAL_6(...) EVAL_5(EVAL_5(__VA_ARGS__))
#define EVAL_7(...) EVAL_6(EVAL_6(__VA_ARGS__))
#define EVAL_8(...) EVAL_7(EVAL_7(__VA_ARGS__))
#define EVAL(...) EVAL_8(__VA_ARGS__)
#define NOT_0 EXISTS(1)
#define NOT(x) TRY_EXTRACT_EXISTS ( CAT(NOT_, x), 0 )
#define IS_ENCLOSED(x, ...) TRY_EXTRACT_EXISTS ( IS_ENCLOSED_TEST x, 0 )
#define ENCLOSE_EXPAND(...) EXPANDED, ENCLOSED, (__VA_ARGS__) ) EAT (
#define GET_CAT_EXP(a, b) (a, ENCLOSE_EXPAND b, DEFAULT, b )
#define CAT_WITH_ENCLOSED(a, b) a b
#define CAT_WITH_DEFAULT(a, b) a ## b
#define CAT_WITH(a, _, f, b) CAT_WITH_ ## f (a, b)
#define EVAL_CAT_WITH(...) CAT_WITH __VA_ARGS__
#define CAT(a, b) EVAL_CAT_WITH ( GET_CAT_EXP(a, b) )
#define IF_1(true, ...) true
#define IF_0(true, ...) __VA_ARGS__
#define IF(value) CAT(IF_, value)
#define HEAD(x, ...) x
#define TAIL(x, ...) __VA_ARGS__
#define TEST_LAST EXISTS(1)
#define IS_LIST_EMPTY(...) TRY_EXTRACT_EXISTS( DEFER(HEAD) (__VA_ARGS__ EXISTS(1)) , 0)
#define IS_LIST_NOT_EMPTY(...) NOT(IS_LIST_EMPTY(__VA_ARGS__))
#define DOES_VALUE_EXIST_EXISTS(...) 1
#define DOES_VALUE_EXIST_DOESNT_EXIST 0
#define DOES_VALUE_EXIST(x) CAT(DOES_VALUE_EXIST_, x)
#define TRY_EXTRACT_EXISTS(value, ...) IF ( DOES_VALUE_EXIST(TEST_EXISTS(value)) ) \
( EXTRACT_VALUE(value), __VA_ARGS__ )
#define EXTRACT_VALUE_EXISTS(...) __VA_ARGS__
#define EXTRACT_VALUE(value) CAT(EXTRACT_VALUE_, value)
#define EAT(...)
#define EXPAND_TEST_EXISTS(...) EXPANDED, EXISTS(__VA_ARGS__) ) EAT (
#define GET_TEST_EXISTS_RESULT(x) ( CAT(EXPAND_TEST_, x), DOESNT_EXIST )
#define GET_TEST_EXIST_VALUE_(expansion, existValue) existValue
#define GET_TEST_EXIST_VALUE(x) GET_TEST_EXIST_VALUE_ x
#define TEST_EXISTS(x) GET_TEST_EXIST_VALUE ( GET_TEST_EXISTS_RESULT(x) )
#define ENCLOSE(...) ( __VA_ARGS__ )
#define REM_ENCLOSE_(...) __VA_ARGS__
#define REM_ENCLOSE(...) REM_ENCLOSE_ __VA_ARGS__
#define IF_ENCLOSED_1(true, ...) true
#define IF_ENCLOSED_0(true, ...) __VA_ARGS__
#define IF_ENCLOSED(...) CAT(IF_ENCLOSED_, IS_ENCLOSED(__VA_ARGS__))
#define OPT_REM_ENCLOSE(...) \
IF_ENCLOSED (__VA_ARGS__) ( REM_ENCLOSE(__VA_ARGS__), __VA_ARGS__ )
#define FOR_EACH_INDIRECT() FOR_EACH_NO_EVAL
#define FOR_EACH_NO_EVAL(fVisitor, ...) \
IF ( IS_LIST_NOT_EMPTY( __VA_ARGS__ ) ) \
( \
fVisitor( OPT_REM_ENCLOSE(HEAD(__VA_ARGS__)) ) \
DEFER2 ( FOR_EACH_INDIRECT )() (fVisitor, TAIL(__VA_ARGS__)) \
)
#define FOR_EACH(fVisitor, ...) \
EVAL(FOR_EACH_NO_EVAL(fVisitor, __VA_ARGS__))
#define STRINGIFY(x) #x
#define DUMP_VAR(x) std::cout << STRINGIFY(x) << ": " << x << TAB;
#define debug(...) FOR_EACH(DUMP_VAR, __VA_ARGS__); std::cout << NEWLINE;
#define dbg(block) block
#else
#define debug(...)
#define dbg(block)
#endif
const double EPS = 1E-9;
// --- GEOMETRY ---
// --- points, lines, functions for lines and points, triangles,
// --- circles.
// -- insert geometry.hh here for geometric functions
// --- END GEOMETRY ---
typedef vector<int> vi;
typedef vector<pair<int,int>> vii;
#define UN(v) SORT(v),v.erase(unique(v.begin(),v.end()),v.end())
#define SORT(c) sort((c).begin(),(c).end())
#define FOR(i,a,b) for (int i=(a); i < (b); i++)
#define REP(i,n) FOR(i,0,(int)n)
#define CL(a,b) memset(a,b,sizeof(a))
#define CL2d(a,b,x,y) memset(a, b, sizeof(a[0][0])*x*y)
/* global variables */
int W, N;
int l, w;
int i, j, k;
char Ws[20], Ns[20];
int total_area;
char line[100];
//char answers[10000][21];
char answers[100000];
int ans, ansb;
int tlen, clen;
/* global variables */
void dump()
{
// dump data
}
bool getInput()<|fim▁hole|>
return true;
}
void process()
{
// int a = 2, b = 5, c = 3;
// debug(a, b, c); // debugging example
// printf("%d\n", total_area/W);
}
int main()
{
ios_base::sync_with_stdio(false);
char digits[10];
fgets_unlocked(line, 99, stdin);
do
{
for (i = 0; line[i] != '\n'; ++i) {
Ws[i] = line[i];
}
Ws[i] = 0;
for (k = 0; k < i-1; ++k) {
W += Ws[k]-'0';
W *= 10;
}
W += Ws[k]-'0';
fgets_unlocked(line, 99, stdin);
for (j = 0; line[j] != '\n'; ++j) {
Ns[j] = line[j];
}
for (k = 0; k < j-1; ++k) {
N += Ns[k]-'0';
N *= 10;
}
N += Ns[k]-'0';
REP(counter, N) {
l = w = 0;
fgets_unlocked(line, 99, stdin);
for (i = 0; line[i] != ' '; ++i) {
Ws[i] = line[i];
}
Ws[i+1] = 0;
for (k = 0; k < i-1; ++k) {
l += Ws[k]-'0';
l *= 10;
}
l += Ws[k]-'0';
while (line[i] == ' ') {
++i;
}
for (j = 0; line[i] != '\n' && line[i] != 0; ++i, ++j) {
Ns[j] = line[i];
}
Ns[j+1] = 0;
for (k = 0; k < j-1; ++k) {
w += Ns[k]-'0';
w *= 10;
}
w += Ns[k]-'0';
total_area += l*w;
}
ans = total_area/W, ansb = ans;
digits[9] = '\n';
clen = 9;
while (ans != 0) {
digits[--clen] = (ans%10)+'0';
ans /= 10;
}
memcpy(&answers[tlen], &digits[clen], 10-clen);
tlen += 10-clen;
/* CLEAR GLOBAL VARIABLES! */
total_area = 0;
W = 0;
N = 0;
l = 0;
w = 0;
/* CLEAR GLOBAL VARIABLES! */
fgets_unlocked(line, 99, stdin);
} while (!feof_unlocked(stdin));
fwrite_unlocked(&answers, sizeof(char), tlen, stdout);
return 0;
}<|fim▁end|> | {
if (feof(stdin)) return false; |
<|file_name|>string_template_engine.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""Provide the standard Python string.Template engine."""
from __future__ import absolute_import
from __future__ import print_function
from string import Template
from . import Engine
class StringTemplate(Engine):
"""String.Template engine."""
handle = 'string.Template'
def __init__(self, template, tolerant=False, **kwargs):
"""Initialize string.Template."""
super(StringTemplate, self).__init__(**kwargs)
self.template = Template(template)
self.tolerant = tolerant
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
mapping = {name: self.str(value, tolerant=self.tolerant)
for name, value in mapping.items()
if value is not None or self.tolerant}
if self.tolerant:<|fim▁hole|><|fim▁end|> | return self.template.safe_substitute(mapping)
return self.template.substitute(mapping) |
<|file_name|>test_environment.py<|end_file_name|><|fim▁begin|>from __future__ import with_statement
import pytest
from django.core import mail
from django.db import connection
from pytest_django_test.app.models import Item
# It doesn't matter which order all the _again methods are run, we just need
# to check the environment remains constant.
# This is possible with some of the testdir magic, but this is the lazy way
# to do it.<|fim▁hole|> mail.send_mail('subject', 'body', 'from@example.com', ['to@example.com'])
assert len(mail.outbox) == 1
m = mail.outbox[0]
assert m.subject == 'subject'
assert m.body == 'body'
assert m.from_email == 'from@example.com'
assert list(m.to) == ['to@example.com']
def test_mail_again():
test_mail()
@pytest.mark.django_project(extra_settings="""
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'tpkg.app.urls'
""")
def test_invalid_template_variable(django_testdir):
django_testdir.create_app_file("""
from django.conf.urls import url
from pytest_django_test.compat import patterns
from tpkg.app import views
urlpatterns = patterns(
'',
url(r'invalid_template/', views.invalid_template),
)
""", 'urls.py')
django_testdir.create_app_file("""
from django.shortcuts import render
def invalid_template(request):
return render(request, 'invalid_template.html', {})
""", 'views.py')
django_testdir.create_app_file(
"<div>{{ invalid_var }}</div>",
'templates/invalid_template.html'
)
django_testdir.create_test_module('''
import pytest
def test_for_invalid_template(client):
client.get('/invalid_template/')
@pytest.mark.ignore_template_errors
def test_ignore(client):
client.get('/invalid_template/')
''')
result = django_testdir.runpytest_subprocess('-s', '--fail-on-template-vars')
result.stdout.fnmatch_lines_random([
"tpkg/test_the_test.py F.",
"Undefined template variable 'invalid_var' in 'invalid_template.html'",
])
@pytest.mark.django_project(extra_settings="""
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
ROOT_URLCONF = 'tpkg.app.urls'
""")
def test_invalid_template_variable_opt_in(django_testdir):
django_testdir.create_app_file("""
from django.conf.urls import url
from pytest_django_test.compat import patterns
from tpkg.app import views
urlpatterns = patterns(
'',
url(r'invalid_template/', views.invalid_template),
)
""", 'urls.py')
django_testdir.create_app_file("""
from django.shortcuts import render
def invalid_template(request):
return render(request, 'invalid_template.html', {})
""", 'views.py')
django_testdir.create_app_file(
"<div>{{ invalid_var }}</div>",
'templates/invalid_template.html'
)
django_testdir.create_test_module('''
import pytest
def test_for_invalid_template(client):
client.get('/invalid_template/')
@pytest.mark.ignore_template_errors
def test_ignore(client):
client.get('/invalid_template/')
''')
result = django_testdir.runpytest_subprocess('-s')
result.stdout.fnmatch_lines_random([
"tpkg/test_the_test.py ..",
])
@pytest.mark.django_db
def test_database_rollback():
assert Item.objects.count() == 0
Item.objects.create(name='blah')
assert Item.objects.count() == 1
@pytest.mark.django_db
def test_database_rollback_again():
test_database_rollback()
def test_database_name():
name = connection.settings_dict['NAME']
assert name == ':memory:' or name.startswith('test_')
def test_database_noaccess():
with pytest.raises(pytest.fail.Exception):
Item.objects.count()
class TestrunnerVerbosity:
"""Test that Django's code to setup and teardown the databases uses
pytest's verbosity level."""
@pytest.fixture
def testdir(self, django_testdir):
print("testdir")
django_testdir.create_test_module('''
import pytest
@pytest.mark.django_db
def test_inner_testrunner():
pass
''')
return django_testdir
def test_default(self, testdir):
"""Not verbose by default."""
result = testdir.runpytest_subprocess('-s')
result.stdout.fnmatch_lines([
"tpkg/test_the_test.py ."])
def test_vq_verbosity_0(self, testdir):
"""-v and -q results in verbosity 0."""
result = testdir.runpytest_subprocess('-s', '-v', '-q')
result.stdout.fnmatch_lines([
"tpkg/test_the_test.py ."])
def test_verbose_with_v(self, testdir):
"""Verbose output with '-v'."""
result = testdir.runpytest_subprocess('-s', '-v')
result.stdout.fnmatch_lines_random([
"tpkg/test_the_test.py:*",
"*PASSED*",
"*Destroying test database for alias 'default'...*"])
def test_more_verbose_with_vv(self, testdir):
"""More verbose output with '-v -v'."""
result = testdir.runpytest_subprocess('-s', '-v', '-v')
result.stdout.fnmatch_lines([
"tpkg/test_the_test.py:*Creating test database for alias*",
"*Creating table app_item*",
"*PASSED*Destroying test database for alias 'default' ('*')...*"])
def test_more_verbose_with_vv_and_reusedb(self, testdir):
"""More verbose output with '-v -v', and --create-db."""
result = testdir.runpytest_subprocess('-s', '-v', '-v', '--create-db')
result.stdout.fnmatch_lines([
"tpkg/test_the_test.py:*Creating test database for alias*",
"*PASSED*"])
assert ("*Destroying test database for alias 'default' ('*')...*"
not in result.stdout.str())<|fim▁end|> |
def test_mail():
assert len(mail.outbox) == 0 |
<|file_name|>srv.rs<|end_file_name|><|fim▁begin|>extern crate anvil_server;
<|fim▁hole|><|fim▁end|> | fn main() {
anvil_server::server("\0anvil_uds").unwrap();
} |
<|file_name|>parseCrimeDataCoords.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import csv
import json
#test to make sure pyproj exists
try:
import pyproj
except ImportError:
sys.stderr.write("Please install the pyproj python module!\n")
sys.exit(3)
try:
from pymongo import MongoClient
except ImportError:
sys.stderr.write("Please install the pymongo python module!\n")<|fim▁hole|>isNAD83 = True
coordsList = []
outputFile = ""
latCol = 8
lonCol = 7
offenseCol = -1
if len(sys.argv) != 3:
print 'Supply crimedata CSV and the year!'
sys.exit(2)
csvFilename = sys.argv[1]
crimeYear = sys.argv[2]
if not crimeYear.isdigit():
print 'Please supply a valid year!'
sys.exit(2)
crimeYear = int(crimeYear)
client = MongoClient()
db = client.dc_crime
incidents = db.incidents
#set up the source and destination coordinate system
nad83=pyproj.Proj("+init=esri:102285") # Maryland State Plane for NAD 83
wgs84=pyproj.Proj("+init=EPSG:4326") # WGS84 datum
with open(csvFilename, 'r') as csvFile:
reader = csv.reader(csvFile, delimiter=',')
curLine = 1
wasSkipped = 0
for row in reader:
#we want to skip the first line
if not wasSkipped:
#check if it is LAT/LON data which seems
#to be the format for data <= 2010
if "LATITUDE" in row:
isNAD83 = False
#set the lat and lon columns
latCol = row.index("LATITUDE")
lonCol = row.index("LONGITUDE")
offenseCol = row.index("OFFENSE")
wasSkipped = 1
continue
if isNAD83:
#data is in NAD83 coordinates
#lets grab them an convert to WGS84
try:
curEastCoord = float(row[lonCol])
curNorthCoord = float(row[latCol])
#print curNorthCoord, curEastCoord
curCoords = pyproj.transform(nad83, wgs84, curEastCoord, curNorthCoord)
except ValueError:
sys.stderr.write("\nCould not parse line number %d for %s. Continuing ...\n" % (curLine, csvFilename))
continue
else:
#data is already in Lat/Lon so we are golden
#just make sure to pull from the correct columns
try:
curCoords = [ float(row[lonCol]), float(row[latCol]) ]
except ValueError:
sys.stderr.write("\nCould not parse line number %d for %s. Continuing ...\n" % (curLine, csvFilename))
continue
#for now we are just dumping everything into arrays
#coordsList.append({ "latitude" : curCoords[1], "longitude": curCoords[0]})
coordsList.append([ round(curCoords[1], 6), round(curCoords[0], 6), row[offenseCol] ])
curIncident = {
"offense": row[offenseCol],
"year": crimeYear,
"lat": round(curCoords[1], 6),
"lon": round(curCoords[0], 6)
}
incidents.insert_one(curIncident)
curLine = curLine + 1
#print json.dumps(coordsList)<|fim▁end|> | sys.exit(3)
|
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|># Copyright (C) 2017 Xavier Lucas
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<|fim▁hole|>import keyring
def build_option_parser(parser):
parser.add_argument(
"--keyring-service",
metavar="<service>",
default="confluence-cli",
help="Service entry",
)
parser.add_argument(
"--keyring-username",
metavar="<username>",
help="User name",
)
def after_command(app, cmd, result, error):
pass
def before_command(app, cmd):
pass
def initialize(app):
_load_credentials(app, app.options)
def _load_credentials(app, options):
app.username = options.keyring_username
app.password = _get_or_save(options.keyring_service, app.username)
def _get_or_save(service, entry):
value = keyring.get_password(service, entry)
if value is None:
value = getpass.getpass("Password to store: ")
keyring.set_password(service, entry, value)
return value<|fim▁end|> | # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import getpass |
<|file_name|>IntegerColumnReferenceTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.operation.reference.doc;
import io.crate.operation.reference.doc.lucene.IntegerColumnReference;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.IntField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapper;
import org.junit.Test;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
public class IntegerColumnReferenceTest extends DocLevelExpressionsTest {
@Override
protected void insertValues(IndexWriter writer) throws Exception {
for (int i = -10; i<10; i++) {
Document doc = new Document();
doc.add(new StringField("_id", Integer.toString(i), Field.Store.NO));
doc.add(new IntField(fieldName().name(), i, Field.Store.NO));
writer.addDocument(doc);
}
}
@Override
protected FieldMapper.Names fieldName() {
return new FieldMapper.Names("i");
}<|fim▁hole|> protected FieldDataType fieldType() {
return new FieldDataType("int");
}
@Test
public void testFieldCacheExpression() throws Exception {
IntegerColumnReference integerColumn = new IntegerColumnReference(fieldName().name());
integerColumn.startCollect(ctx);
integerColumn.setNextReader(readerContext);
IndexSearcher searcher = new IndexSearcher(readerContext.reader());
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 20);
int i = -10;
for (ScoreDoc doc : topDocs.scoreDocs) {
integerColumn.setNextDocId(doc.doc);
assertThat(integerColumn.value(), is(i));
i++;
}
}
}<|fim▁end|> |
@Override |
<|file_name|>_lonsrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class LonsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="lonsrc", parent_name="scattermapbox", **kwargs):
super(LonsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs<|fim▁hole|><|fim▁end|> | ) |
<|file_name|>UserRoleService.java<|end_file_name|><|fim▁begin|>package com.svcet.cashportal.service;
import com.svcet.cashportal.web.beans.UserRequest;
import com.svcet.cashportal.web.beans.UserRolesScreenRequest;
import com.svcet.cashportal.web.beans.UserRolesScreenResponse;
public interface UserRoleService {
UserRolesScreenResponse editUserRoles(UserRequest userRequest);
void updateUserRoles(UserRolesScreenRequest userRolesScreenRequest);<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>alive_squids_csv.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>#
#
# This module is still in proof of concept, and subject to change.
#
from datetime import datetime
# IkaLog Output Plugin: Write 'Alive Squids' CSV data
#
class AliveSquidsCSV(object):
##
# Write a line to text file.
# @param self The Object Pointer.
# @param record Record (text)
#
def write_record(self, file, record):
try:
csv_file = open(file, "a")
csv_file.write(record)
csv_file.close
except:
print("CSV: Failed to write CSV File")
def write_alive_squids_csv(self, context, basename="ikabattle_log", debug=False):
csv = ["tick,y\n", "tick,y\n"]
for sample in context['game']['livesTrack']:
if debug:
print('lives sample = %s', sample)
time = sample[0]
del sample[0]
num_team = 0
for team in sample:
num_squid = 0
for alive in team:
num_squid = num_squid + 1
if alive:
csv[num_team] = "%s%d, %d\n" % (
csv[num_team], time, num_squid)
num_team = num_team + 1
num_team = 0
t = datetime.now()
t_str = t.strftime("%Y%m%d_%H%M")
for f in csv:
self.write_record('%s/%s_team%d.csv' %
(self.dest_dir, basename, num_team), f)
num_team = num_team + 1
def write_flags_csv(self, context, basename="ikabattle_log", debug=False):
# データがない場合は書かない
if len(context['game']['towerTrack']) == 0:
return
csv = "tick,pos,max,min\n"
for sample in context['game']['towerTrack']:
if debug:
print('tower sample = %s', sample)
time = sample[0]
sample = sample[1]
csv = "%s%d, %d, %d, %d\n" % (
csv, time, sample['pos'], sample['max'], sample['min'])
self.write_record('%s/%s_tower.csv' % (self.dest_dir, basename), csv)
##
# on_game_individual_result Hook
# @param self The Object Pointer
# @param context IkaLog context
#
def on_game_individual_result(self, context):
t = datetime.now()
basename = t.strftime("ikabattle_log_%Y%m%d_%H%M")
self.write_alive_squids_csv(context, basename=basename, debug=self.debug)
self.write_flags_csv(context, basename=basename, debug=self.debug)
##
# Constructor
# @param self The Object Pointer.
# @param dest_dir Destionation directory (Relative path, or absolute path)
def __init__(self, dir='./log/', debug=False):
self.dest_dir = dir
self.debug = debug<|fim▁end|> | # See the License for the specific language governing permissions and
# limitations under the License. |
<|file_name|>vc.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# TODO:
# * supported arch for versions: for old versions of batch file without
# argument, giving bogus argument cannot be detected, so we have to hardcode
# this here
# * print warning when msvc version specified but not found
# * find out why warning do not print
# * test on 64 bits XP + VS 2005 (and VS 6 if possible)
# * SDK
# * Assembly
__revision__ = "src/engine/SCons/Tool/MSCommon/vc.py rel_2.3.5:3329:275e75118ad4 2015/06/20 11:18:26 bdbaddog"
__doc__ = """Module for Visual C/C++ detection and configuration.
"""
import SCons.compat
import os
import platform
from string import digits as string_digits
import SCons.Warnings
import common
debug = common.debug
import sdk
get_installed_sdks = sdk.get_installed_sdks
class VisualCException(Exception):
pass
class UnsupportedVersion(VisualCException):
pass
class UnsupportedArch(VisualCException):
pass
class MissingConfiguration(VisualCException):
pass
class NoVersionFound(VisualCException):
pass
class BatchFileExecutionError(VisualCException):
pass
# Dict to 'canonalize' the arch
_ARCH_TO_CANONICAL = {
"amd64" : "amd64",
"emt64" : "amd64",
"i386" : "x86",
"i486" : "x86",
"i586" : "x86",
"i686" : "x86",
"ia64" : "ia64",
"itanium" : "ia64",
"x86" : "x86",
"x86_64" : "amd64",
"x86_amd64" : "x86_amd64", # Cross compile to 64 bit from 32bits
}
# Given a (host, target) tuple, return the argument for the bat file. Both host
# and targets should be canonalized.
_HOST_TARGET_ARCH_TO_BAT_ARCH = {
("x86", "x86"): "x86",
("x86", "amd64"): "x86_amd64",
("x86", "x86_amd64"): "x86_amd64",
("amd64", "x86_amd64"): "x86_amd64", # This is present in (at least) VS2012 express
("amd64", "amd64"): "amd64",
("amd64", "x86"): "x86",
("x86", "ia64"): "x86_ia64"
}
def get_host_target(env):
debug('vc.py:get_host_target()')
host_platform = env.get('HOST_ARCH')
if not host_platform:
host_platform = platform.machine()
# TODO(2.5): the native Python platform.machine() function returns
# '' on all Python versions before 2.6, after which it also uses
# PROCESSOR_ARCHITECTURE.
if not host_platform:
host_platform = os.environ.get('PROCESSOR_ARCHITECTURE', '')
# Retain user requested TARGET_ARCH
req_target_platform = env.get('TARGET_ARCH')
debug('vc.py:get_host_target() req_target_platform:%s'%req_target_platform)
if req_target_platform:
# If user requested a specific platform then only try that one.
target_platform = req_target_platform
else:
target_platform = host_platform
try:
host = _ARCH_TO_CANONICAL[host_platform.lower()]<|fim▁hole|> msg = "Unrecognized host architecture %s"
raise ValueError(msg % repr(host_platform))
try:
target = _ARCH_TO_CANONICAL[target_platform.lower()]
except KeyError, e:
all_archs = str(_ARCH_TO_CANONICAL.keys())
raise ValueError("Unrecognized target architecture %s\n\tValid architectures: %s" % (target_platform, all_archs))
return (host, target,req_target_platform)
# If you update this, update SupportedVSList in Tool/MSCommon/vs.py, and the
# MSVC_VERSION documentation in Tool/msvc.xml.
_VCVER = ["15.0", "14.0", "14.0Exp", "12.0", "12.0Exp", "11.0", "11.0Exp", "10.0", "10.0Exp", "9.0", "9.0Exp","8.0", "8.0Exp","7.1", "7.0", "6.0"]
_VCVER_TO_PRODUCT_DIR = {
'15.0' : [
r'Microsoft\VisualStudio\SxS\VS7\15.0'],
'14.0' : [
r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir'],
'14.0' : [
r'Microsoft\VisualStudio\14.0\Setup\VC\ProductDir'],
'12.0' : [
r'Microsoft\VisualStudio\12.0\Setup\VC\ProductDir'],
'12.0Exp' : [
r'Microsoft\VCExpress\12.0\Setup\VC\ProductDir'],
'11.0': [
r'Microsoft\VisualStudio\11.0\Setup\VC\ProductDir'],
'11.0Exp' : [
r'Microsoft\VCExpress\11.0\Setup\VC\ProductDir'],
'10.0': [
r'Microsoft\VisualStudio\10.0\Setup\VC\ProductDir'],
'10.0Exp' : [
r'Microsoft\VCExpress\10.0\Setup\VC\ProductDir'],
'9.0': [
r'Microsoft\VisualStudio\9.0\Setup\VC\ProductDir'],
'9.0Exp' : [
r'Microsoft\VCExpress\9.0\Setup\VC\ProductDir'],
'8.0': [
r'Microsoft\VisualStudio\8.0\Setup\VC\ProductDir'],
'8.0Exp': [
r'Microsoft\VCExpress\8.0\Setup\VC\ProductDir'],
'7.1': [
r'Microsoft\VisualStudio\7.1\Setup\VC\ProductDir'],
'7.0': [
r'Microsoft\VisualStudio\7.0\Setup\VC\ProductDir'],
'6.0': [
r'Microsoft\VisualStudio\6.0\Setup\Microsoft Visual C++\ProductDir']
}
def msvc_version_to_maj_min(msvc_version):
msvc_version_numeric = ''.join([x for x in msvc_version if x in string_digits + '.'])
t = msvc_version_numeric.split(".")
if not len(t) == 2:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
try:
maj = int(t[0])
min = int(t[1])
return maj, min
except ValueError, e:
raise ValueError("Unrecognized version %s (%s)" % (msvc_version,msvc_version_numeric))
def is_host_target_supported(host_target, msvc_version):
"""Return True if the given (host, target) tuple is supported given the
msvc version.
Parameters
----------
host_target: tuple
tuple of (canonalized) host-target, e.g. ("x86", "amd64") for cross
compilation from 32 bits windows to 64 bits.
msvc_version: str
msvc version (major.minor, e.g. 10.0)
Note
----
This only check whether a given version *may* support the given (host,
target), not that the toolchain is actually present on the machine.
"""
# We assume that any Visual Studio version supports x86 as a target
if host_target[1] != "x86":
maj, min = msvc_version_to_maj_min(msvc_version)
if maj < 8:
return False
return True
def find_vc_pdir(msvc_version):
"""Try to find the product directory for the given
version.
Note
----
If for some reason the requested version could not be found, an
exception which inherits from VisualCException will be raised."""
root = 'Software\\'
if common.is_win64():
root = root + 'Wow6432Node\\'
try:
hkeys = _VCVER_TO_PRODUCT_DIR[msvc_version]
except KeyError:
debug("Unknown version of MSVC: %s" % msvc_version)
raise UnsupportedVersion("Unknown version %s" % msvc_version)
for key in hkeys:
key = root + key
try:
comps = common.read_reg(key)
except WindowsError, e:
debug('find_vc_dir(): no VC registry key %s' % repr(key))
else:
debug('find_vc_dir(): found VC in registry: %s' % comps)
if msvc_version == "15.0":
comps = os.path.join(comps, "VC")
if os.path.exists(comps):
return comps
else:
debug('find_vc_dir(): reg says dir is %s, but it does not exist. (ignoring)'\
% comps)
raise MissingConfiguration("registry dir %s not found on the filesystem" % comps)
return None
def find_batch_file(env,msvc_version,host_arch,target_arch):
"""
Find the location of the batch script which should set up the compiler
for any TARGET_ARCH whose compilers were installed by Visual Studio/VCExpress
"""
pdir = find_vc_pdir(msvc_version)
if pdir is None:
raise NoVersionFound("No version of Visual Studio found")
debug('vc.py: find_batch_file() pdir:%s'%pdir)
# filter out e.g. "Exp" from the version name
msvc_ver_numeric = ''.join([x for x in msvc_version if x in string_digits + "."])
vernum = float(msvc_ver_numeric)
if 7 <= vernum < 8:
pdir = os.path.join(pdir, os.pardir, "Common7", "Tools")
batfilename = os.path.join(pdir, "vsvars32.bat")
elif vernum < 7:
pdir = os.path.join(pdir, "Bin")
batfilename = os.path.join(pdir, "vcvars32.bat")
elif vernum >= 15:
pdir = os.path.join(pdir, "Auxiliary", "Build")
batfilename = os.path.join(pdir, "vcvarsall.bat")
else: # >= 8
batfilename = os.path.join(pdir, "vcvarsall.bat")
if not os.path.exists(batfilename):
debug("Not found: %s" % batfilename)
batfilename = None
installed_sdks=get_installed_sdks()
for _sdk in installed_sdks:
sdk_bat_file = _sdk.get_sdk_vc_script(host_arch,target_arch)
if not sdk_bat_file:
debug("vc.py:find_batch_file() not found:%s"%_sdk)
else:
sdk_bat_file_path = os.path.join(pdir,sdk_bat_file)
if os.path.exists(sdk_bat_file_path):
debug('vc.py:find_batch_file() sdk_bat_file_path:%s'%sdk_bat_file_path)
return (batfilename,sdk_bat_file_path)
return (batfilename,None)
__INSTALLED_VCS_RUN = None
def cached_get_installed_vcs():
global __INSTALLED_VCS_RUN
if __INSTALLED_VCS_RUN is None:
ret = get_installed_vcs()
__INSTALLED_VCS_RUN = ret
return __INSTALLED_VCS_RUN
def get_installed_vcs():
installed_versions = []
for ver in _VCVER:
debug('trying to find VC %s' % ver)
try:
if find_vc_pdir(ver):
debug('found VC %s' % ver)
installed_versions.append(ver)
else:
debug('find_vc_pdir return None for ver %s' % ver)
except VisualCException, e:
debug('did not find VC %s: caught exception %s' % (ver, str(e)))
return installed_versions
def reset_installed_vcs():
"""Make it try again to find VC. This is just for the tests."""
__INSTALLED_VCS_RUN = None
# Running these batch files isn't cheap: most of the time spent in
# msvs.generate() is due to vcvars*.bat. In a build that uses "tools='msvs'"
# in multiple environments, for example:
# env1 = Environment(tools='msvs')
# env2 = Environment(tools='msvs')
# we can greatly improve the speed of the second and subsequent Environment
# (or Clone) calls by memoizing the environment variables set by vcvars*.bat.
script_env_stdout_cache = {}
def script_env(script, args=None):
cache_key = (script, args)
stdout = script_env_stdout_cache.get(cache_key, None)
if stdout is None:
stdout = common.get_output(script, args)
script_env_stdout_cache[cache_key] = stdout
# Stupid batch files do not set return code: we take a look at the
# beginning of the output for an error message instead
olines = stdout.splitlines()
if olines[0].startswith("The specified configuration type is missing"):
raise BatchFileExecutionError("\n".join(olines[:2]))
return common.parse_output(stdout)
def get_default_version(env):
debug('get_default_version()')
msvc_version = env.get('MSVC_VERSION')
msvs_version = env.get('MSVS_VERSION')
debug('get_default_version(): msvc_version:%s msvs_version:%s'%(msvc_version,msvs_version))
if msvs_version and not msvc_version:
SCons.Warnings.warn(
SCons.Warnings.DeprecatedWarning,
"MSVS_VERSION is deprecated: please use MSVC_VERSION instead ")
return msvs_version
elif msvc_version and msvs_version:
if not msvc_version == msvs_version:
SCons.Warnings.warn(
SCons.Warnings.VisualVersionMismatch,
"Requested msvc version (%s) and msvs version (%s) do " \
"not match: please use MSVC_VERSION only to request a " \
"visual studio version, MSVS_VERSION is deprecated" \
% (msvc_version, msvs_version))
return msvs_version
if not msvc_version:
installed_vcs = cached_get_installed_vcs()
debug('installed_vcs:%s' % installed_vcs)
if not installed_vcs:
#msg = 'No installed VCs'
#debug('msv %s\n' % repr(msg))
#SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, msg)
debug('msvc_setup_env: No installed VCs')
return None
msvc_version = installed_vcs[0]
debug('msvc_setup_env: using default installed MSVC version %s\n' % repr(msvc_version))
return msvc_version
def msvc_setup_env_once(env):
try:
has_run = env["MSVC_SETUP_RUN"]
except KeyError:
has_run = False
if not has_run:
msvc_setup_env(env)
env["MSVC_SETUP_RUN"] = True
def msvc_find_valid_batch_script(env,version):
debug('vc.py:msvc_find_valid_batch_script()')
# Find the host platform, target platform, and if present the requested
# target platform
(host_platform, target_platform,req_target_platform) = get_host_target(env)
try_target_archs = [target_platform]
debug("msvs_find_valid_batch_script(): req_target_platform %s target_platform:%s"%(req_target_platform,target_platform))
# VS2012 has a "cross compile" environment to build 64 bit
# with x86_amd64 as the argument to the batch setup script
if req_target_platform in ('amd64','x86_64'):
try_target_archs.append('x86_amd64')
elif not req_target_platform and target_platform in ['amd64','x86_64']:
# There may not be "native" amd64, but maybe "cross" x86_amd64 tools
try_target_archs.append('x86_amd64')
# If the user hasn't specifically requested a TARGET_ARCH, and
# The TARGET_ARCH is amd64 then also try 32 bits if there are no viable
# 64 bit tools installed
try_target_archs.append('x86')
debug("msvs_find_valid_batch_script(): host_platform: %s try_target_archs:%s"%(host_platform, try_target_archs))
d = None
for tp in try_target_archs:
# Set to current arch.
env['TARGET_ARCH']=tp
debug("vc.py:msvc_find_valid_batch_script() trying target_platform:%s"%tp)
host_target = (host_platform, tp)
if not is_host_target_supported(host_target, version):
warn_msg = "host, target = %s not supported for MSVC version %s" % \
(host_target, version)
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
arg = _HOST_TARGET_ARCH_TO_BAT_ARCH[host_target]
# Try to locate a batch file for this host/target platform combo
try:
(vc_script,sdk_script) = find_batch_file(env,version,host_platform,tp)
debug('vc.py:msvc_find_valid_batch_script() vc_script:%s sdk_script:%s'%(vc_script,sdk_script))
except VisualCException, e:
msg = str(e)
debug('Caught exception while looking for batch file (%s)' % msg)
warn_msg = "VC version %s not installed. " + \
"C/C++ compilers are most likely not set correctly.\n" + \
" Installed versions are: %s"
warn_msg = warn_msg % (version, cached_get_installed_vcs())
SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
continue
# Try to use the located batch file for this host/target platform combo
debug('vc.py:msvc_find_valid_batch_script() use_script 2 %s, args:%s\n' % (repr(vc_script), arg))
if vc_script:
try:
d = script_env(vc_script, args=arg)
except BatchFileExecutionError, e:
debug('vc.py:msvc_find_valid_batch_script() use_script 3: failed running VC script %s: %s: Error:%s'%(repr(vc_script),arg,e))
vc_script=None
continue
if not vc_script and sdk_script:
debug('vc.py:msvc_find_valid_batch_script() use_script 4: trying sdk script: %s'%(sdk_script))
try:
d = script_env(sdk_script)
except BatchFileExecutionError,e:
debug('vc.py:msvc_find_valid_batch_script() use_script 5: failed running SDK script %s: Error:%s'%(repr(sdk_script),e))
continue
elif not vc_script and not sdk_script:
debug('vc.py:msvc_find_valid_batch_script() use_script 6: Neither VC script nor SDK script found')
continue
debug("vc.py:msvc_find_valid_batch_script() Found a working script/target: %s %s"%(repr(sdk_script),arg))
break # We've found a working target_platform, so stop looking
# If we cannot find a viable installed compiler, reset the TARGET_ARCH
# To it's initial value
if not d:
env['TARGET_ARCH']=req_target_platform
return d
def msvc_setup_env(env):
debug('msvc_setup_env()')
version = get_default_version(env)
if version is None:
warn_msg = "No version of Visual Studio compiler found - C/C++ " \
"compilers most likely not set correctly"
# Nuitka: Useless warning for us.
# SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
debug('msvc_setup_env: using specified MSVC version %s\n' % repr(version))
# XXX: we set-up both MSVS version for backward
# compatibility with the msvs tool
env['MSVC_VERSION'] = version
env['MSVS_VERSION'] = version
env['MSVS'] = {}
use_script = env.get('MSVC_USE_SCRIPT', True)
if SCons.Util.is_String(use_script):
debug('vc.py:msvc_setup_env() use_script 1 %s\n' % repr(use_script))
d = script_env(use_script)
elif use_script:
d = msvc_find_valid_batch_script(env,version)
debug('vc.py:msvc_setup_env() use_script 2 %s\n' % d)
if not d:
return d
else:
debug('MSVC_USE_SCRIPT set to False')
warn_msg = "MSVC_USE_SCRIPT set to False, assuming environment " \
"set correctly."
# Nuitka: We use this on purpose.
# SCons.Warnings.warn(SCons.Warnings.VisualCMissingWarning, warn_msg)
return None
for k, v in d.items():
debug('vc.py:msvc_setup_env() env:%s -> %s'%(k,v))
env.PrependENVPath(k, v, delete_existing=True)
def msvc_exists(version=None):
vcs = cached_get_installed_vcs()
if version is None:
return len(vcs) > 0
return version in vcs<|fim▁end|> | except KeyError, e: |
<|file_name|>ldcmp1.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `LDCMP1` reader"]
pub struct R(crate::R<LDCMP1_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<LDCMP1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<LDCMP1_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<LDCMP1_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `LDCMP1` writer"]
pub struct W(crate::W<LDCMP1_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<LDCMP1_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<LDCMP1_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<LDCMP1_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CMP_LD4` reader - Compare Value for LED COL\\[x\\]"]
pub struct CMP_LD4_R(crate::FieldReader<u8, u8>);
impl CMP_LD4_R {
pub(crate) fn new(bits: u8) -> Self {
CMP_LD4_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CMP_LD4_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CMP_LD4` writer - Compare Value for LED COL\\[x\\]"]
pub struct CMP_LD4_W<'a> {
w: &'a mut W,
}
impl<'a> CMP_LD4_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0xff) | (value as u32 & 0xff);
self.w
}
}
#[doc = "Field `CMP_LD5` reader - Compare Value for LED COL\\[x\\]"]
pub struct CMP_LD5_R(crate::FieldReader<u8, u8>);
impl CMP_LD5_R {
pub(crate) fn new(bits: u8) -> Self {
CMP_LD5_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CMP_LD5_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CMP_LD5` writer - Compare Value for LED COL\\[x\\]"]
pub struct CMP_LD5_W<'a> {
w: &'a mut W,
}
impl<'a> CMP_LD5_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 8)) | ((value as u32 & 0xff) << 8);
self.w
}
}
#[doc = "Field `CMP_LD6` reader - Compare Value for LED COL\\[x\\]"]
pub struct CMP_LD6_R(crate::FieldReader<u8, u8>);
impl CMP_LD6_R {
pub(crate) fn new(bits: u8) -> Self {
CMP_LD6_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CMP_LD6_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CMP_LD6` writer - Compare Value for LED COL\\[x\\]"]
pub struct CMP_LD6_W<'a> {
w: &'a mut W,
}
impl<'a> CMP_LD6_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 16)) | ((value as u32 & 0xff) << 16);
self.w
}
}
#[doc = "Field `CMP_LDA_TSCOM` reader - Compare Value for LED COLA / Common Compare Value for Touch-sense Pad Turns"]
pub struct CMP_LDA_TSCOM_R(crate::FieldReader<u8, u8>);
impl CMP_LDA_TSCOM_R {
pub(crate) fn new(bits: u8) -> Self {
CMP_LDA_TSCOM_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CMP_LDA_TSCOM_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CMP_LDA_TSCOM` writer - Compare Value for LED COLA / Common Compare Value for Touch-sense Pad Turns"]
pub struct CMP_LDA_TSCOM_W<'a> {
w: &'a mut W,
}
impl<'a> CMP_LDA_TSCOM_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xff << 24)) | ((value as u32 & 0xff) << 24);
self.w
}
}
impl R {
#[doc = "Bits 0:7 - Compare Value for LED COL\\[x\\]"]
#[inline(always)]
pub fn cmp_ld4(&self) -> CMP_LD4_R {
CMP_LD4_R::new((self.bits & 0xff) as u8)
}
#[doc = "Bits 8:15 - Compare Value for LED COL\\[x\\]"]
#[inline(always)]
pub fn cmp_ld5(&self) -> CMP_LD5_R {
CMP_LD5_R::new(((self.bits >> 8) & 0xff) as u8)
}
#[doc = "Bits 16:23 - Compare Value for LED COL\\[x\\]"]
#[inline(always)]
pub fn cmp_ld6(&self) -> CMP_LD6_R {
CMP_LD6_R::new(((self.bits >> 16) & 0xff) as u8)
}
#[doc = "Bits 24:31 - Compare Value for LED COLA / Common Compare Value for Touch-sense Pad Turns"]
#[inline(always)]
pub fn cmp_lda_tscom(&self) -> CMP_LDA_TSCOM_R {
CMP_LDA_TSCOM_R::new(((self.bits >> 24) & 0xff) as u8)
}
}
impl W {
#[doc = "Bits 0:7 - Compare Value for LED COL\\[x\\]"]
#[inline(always)]
pub fn cmp_ld4(&mut self) -> CMP_LD4_W {
CMP_LD4_W { w: self }
}
#[doc = "Bits 8:15 - Compare Value for LED COL\\[x\\]"]
#[inline(always)]
pub fn cmp_ld5(&mut self) -> CMP_LD5_W {
CMP_LD5_W { w: self }
}
#[doc = "Bits 16:23 - Compare Value for LED COL\\[x\\]"]
#[inline(always)]
pub fn cmp_ld6(&mut self) -> CMP_LD6_W {
CMP_LD6_W { w: self }
}
#[doc = "Bits 24:31 - Compare Value for LED COLA / Common Compare Value for Touch-sense Pad Turns"]
#[inline(always)]
pub fn cmp_lda_tscom(&mut self) -> CMP_LDA_TSCOM_W {
CMP_LDA_TSCOM_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "LED Compare Register 1\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ldcmp1](index.html) module"]
pub struct LDCMP1_SPEC;
impl crate::RegisterSpec for LDCMP1_SPEC {<|fim▁hole|>#[doc = "`read()` method returns [ldcmp1::R](R) reader structure"]
impl crate::Readable for LDCMP1_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [ldcmp1::W](W) writer structure"]
impl crate::Writable for LDCMP1_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets LDCMP1 to value 0"]
impl crate::Resettable for LDCMP1_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|> | type Ux = u32;
} |
<|file_name|>CWE122_Heap_Based_Buffer_Overflow__c_CWE193_char_memcpy_74b.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE122_Heap_Based_Buffer_Overflow__c_CWE193_char_memcpy_74b.cpp
Label Definition File: CWE122_Heap_Based_Buffer_Overflow__c_CWE193.label.xml
Template File: sources-sink-74b.tmpl.cpp
*/
/*
* @description
* CWE: 122 Heap Based Buffer Overflow
* BadSource: Allocate memory for a string, but do not allocate space for NULL terminator
* GoodSource: Allocate enough memory for a string and the NULL terminator
* Sinks: memcpy
* BadSink : Copy string to data using memcpy()
* Flow Variant: 74 Data flow: data passed in a map from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <map>
#ifndef _WIN32
#include <wchar.h>
#endif
/* MAINTENANCE NOTE: The length of this string should equal the 10 */
#define SRC_STRING "AAAAAAAAAA"
using namespace std;
namespace CWE122_Heap_Based_Buffer_Overflow__c_CWE193_char_memcpy_74
{
#ifndef OMITBAD
void badSink(map<int, char *> dataMap)
{
/* copy data out of dataMap */
char * data = dataMap[2];
{
char source[10+1] = SRC_STRING;
/* Copy length + 1 to include NUL terminator from source */
/* POTENTIAL FLAW: data may not have enough space to hold source */
memcpy(data, source, (strlen(source) + 1) * sizeof(char));
printLine(data);
free(data);
}
}
<|fim▁hole|>#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(map<int, char *> dataMap)
{
char * data = dataMap[2];
{
char source[10+1] = SRC_STRING;
/* Copy length + 1 to include NUL terminator from source */
/* POTENTIAL FLAW: data may not have enough space to hold source */
memcpy(data, source, (strlen(source) + 1) * sizeof(char));
printLine(data);
free(data);
}
}
#endif /* OMITGOOD */
} /* close namespace */<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>'''Brainfuck interpreter'''
VERSION = '0.1.2.1103'
def __static_vars():
'''Decorate, add static attr'''
def decorate(func):
'''The decorate'''
setattr(func, 'stdin_buffer', [])
return func
return decorate
@__static_vars()
def __getchar() -> int:
'''Return one char from stdin'''
buffer_len = len(__getchar.stdin_buffer)
if buffer_len == 0:
__getchar.stdin_buffer = list(input().encode('ascii'))
__getchar.stdin_buffer.append(10) # We need this enter to compact getchar from libc.
ret_c, __getchar.stdin_buffer = __getchar.stdin_buffer[0], __getchar.stdin_buffer[1:]
return ret_c
def __pre_execute(raw_code: str) -> list:
'''Replace the [] with paired code pointer'''
iptr = 0
bracket = list()
code = list(raw_code)
code_len = len(code)
while iptr < code_len:
code[iptr] = [code[iptr], '']
if code[iptr][0] == '[':
bracket.append(iptr)
elif code[iptr][0] == ']':
piptr = bracket.pop()
code[piptr][1], code[iptr][1] = iptr, piptr
iptr += 1
bracket_len = len(bracket)
if bracket_len != 0:
code = []
return code
def __execute(code: list, stack_size: int) -> list:
'''Run bf code'''
iptr = 0
sptr = 0
stack = list(0 for _ in range(stack_size))
code_len = len(code)
while iptr < code_len:
instruction = code[iptr][0]
if instruction == '>':
sptr += 1
elif instruction == '<':
sptr -= 1
elif instruction == '+':
stack[sptr] += 1
if stack[sptr] == 256:
stack[sptr] = 0<|fim▁hole|> stack[sptr] -= 1
if stack[sptr] == -1:
stack[sptr] = 255
elif instruction == '.':
print(chr(stack[sptr]), end='')
elif instruction == ',':
stack[sptr] = __getchar()
elif instruction == '[' and stack[sptr] == 0:
iptr = code[iptr][1]
elif instruction == ']' and stack[sptr] != 0:
iptr = code[iptr][1]
iptr += 1
# Clean the buffer, otherwise it will affect next round result.
__getchar.stdin_buffer = []
return stack
def run(raw_code: str = '', stack_size: int = 128) -> list:
'''Interpreter the raw_code.
Input:
- raw_code: the string of brainfuck code.
if this is empty, program will wait for user input.
- stack_size: the size of stack, default is 128Bytes.
Return value:
- The whole stack.
'''
if raw_code == '':
raw_code = input('% ')
code = __pre_execute(raw_code)
return __execute(code, stack_size)<|fim▁end|> | elif instruction == '-': |
<|file_name|>messages.py<|end_file_name|><|fim▁begin|># Copyright (C) 2007-2012 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""The message storage service."""
from __future__ import absolute_import, unicode_literals
__metaclass__ = type
__all__ = [
'IMessage',
'IMessageStore',
]
from zope.interface import Interface, Attribute
class IMessageStore(Interface):
"""The interface of the global message storage service.
All messages that are stored in the system live in the message storage
service. A message stored in this service must have a Message-ID header.
The store writes an X-Message-ID-Hash header which contains the Base32
encoded SHA1 hash of the message's Message-ID header. Any existing
X-Message-ID-Hash header is overwritten.
Either the Message-ID or the X-Message-ID-Hash header can be used to
uniquely identify this message in the storage service. While it is
possible to see duplicate Message-IDs, this is never correct and the
service is allowed to drop any subsequent colliding messages, or overwrite
earlier messages with later ones.
The combination of the List-Archive header and either the Message-ID or
X-Message-ID-Hash header can be used to retrieve the message from the
internet facing interface for the message store. This can be considered a
globally unique URI to the message.
For example, a message with the following headers:
Message-ID: <87myycy5eh.fsf@uwakimon.sk.tsukuba.ac.jp>
Date: Wed, 04 Jul 2007 16:49:58 +0900
List-Archive: http://archive.example.com/
X-Message-ID-Hash: RXTJ357KFOTJP3NFJA6KMO65X7VQOHJI
the globally unique URI would be:
http://archive.example.com/RXTJ357KFOTJP3NFJA6KMO65X7VQOHJI
"""
def add(message):
"""Add the message to the store.
:param message: An email.message.Message instance containing at least
a unique Message-ID header. The message will be given an
X-Message-ID-Hash header, overriding any existing such header.
:returns: The calculated X-Message-ID-Hash header.
:raises ValueError: if the message is missing a Message-ID header.
The storage service is also allowed to raise this exception if it
find, but disallows collisions.
"""
def get_message_by_id(message_id):
"""Return the message with a matching Message-ID.
:param message_id: The Message-ID header contents to search for.
:returns: The message, or None if no matching message was found.
"""
def get_message_by_hash(message_id_hash):
"""Return the message with the matching X-Message-ID-Hash.
:param message_id_hash: The X-Message-ID-Hash header contents to
search for.
:returns: The message, or None if no matching message was found.
"""
def delete_message(message_id):
"""Remove the given message from the store.<|fim▁hole|> :raises LookupError: if there is no such message.
"""
messages = Attribute(
"""An iterator over all messages in this message store.""")
class IMessage(Interface):
"""The representation of an email message."""
message_id = Attribute("""The message's Message-ID header.""")
message_id_hash = Attribute("""The unique SHA1 hash of the message.""")
path = Attribute("""The filesystem path to the message object.""")<|fim▁end|> |
:param message: The Message-ID of the mesage to delete from the store. |
<|file_name|>benchmark_base.rs<|end_file_name|><|fim▁begin|>// These functions are re-exported as public from lib.rs
// this makes them available to the benchmark crates in the workspace
use super::criterion::Criterion;
use super::rand::Rng;
use super::{<|fim▁hole|> DecodedEntity, Entity, ASCII_TEXT, UNICODE_TEXT,
};
// Benchmark functions
pub fn generate_entities() -> Vec<Vec<Entity<String>>> {
let mut rng = rand::thread_rng();
let mut entities_list: Vec<Vec<Entity<String>>> = Vec::with_capacity(1000);
for _ in 0..1000 {
let total = rng.gen::<usize>() % 10;
let mut indices = Vec::with_capacity(total);
for _ in 0..(total * 2) {
loop {
let index = rng.gen::<usize>() % ASCII_TEXT.len();
if !indices.contains(&index) {
indices.push(index);
break;
}
}
}
indices.sort();
let entities = indices.chunks(2).map(|chunk| {
let (start, end) = (chunk[0], chunk[1]);
let length = end - start;
Entity {
start,
end,
html: (0..length).map(|_| "X").collect(),
}
});
entities_list.push(entities.collect());
}
entities_list
}
pub fn generate_decoded_entities() -> Vec<Vec<DecodedEntity>> {
generate_entities()
.into_iter()
.map(decoded_entities)
.collect()
}
pub fn bench_replacement(c: &mut Criterion) {
c.bench_function("replacement", |b| {
let entities_list = generate_entities();
let mut index_iter = (0..1000).cycle();
b.iter(|| render(UNICODE_TEXT, &entities_list[index_iter.next().unwrap()]))
});
}
pub fn bench_replacement_chars(c: &mut Criterion) {
c.bench_function("replacement chars", |b| {
let entities_list = generate_decoded_entities();
let mut index_iter = (0..1000).cycle();
let decoded_text = UNICODE_TEXT.chars().collect();
b.iter(|| {
let option = index_iter.next();
render_chars(&decoded_text, &entities_list[option.unwrap()])
})
});
}
pub fn bench_replacement_chars2(c: &mut Criterion) {
c.bench_function("replacement chars 2", |b| {
let entities_list = generate_entities();
let mut index_iter = (0..1000).cycle();
let decoded_text = UNICODE_TEXT.chars().collect();
b.iter(|| {
let option = index_iter.next();
render_chars2(&decoded_text, &entities_list[option.unwrap()])
})
});
}
pub fn bench_replacement_chars_entity_references(c: &mut Criterion) {
c.bench_function("replacement chars entity references", |b| {
let entities_list = generate_entities();
let mut refs = Vec::with_capacity(1000);
for (i, _) in entities_list.iter().enumerate() {
refs.push(entity_refs(&entities_list[i]));
}
let mut index_iter = (0..1000).cycle();
let decoded_text = UNICODE_TEXT.chars().collect();
b.iter(|| {
let option = index_iter.next();
render_chars_entity_references(&decoded_text, &refs[option.unwrap()])
})
});
}
pub fn bench_replacement_chars_entity_references_to_chars(c: &mut Criterion) {
c.bench_function("replacement chars entity references to chars", |b| {
let entities_list = generate_decoded_entities();
let mut refs = Vec::with_capacity(1000);
let mut index_iter = (0..1000).cycle();
let decoded_text = UNICODE_TEXT.chars().collect();
for (i, _) in entities_list.iter().enumerate() {
refs.push(entity_refs(&entities_list[i]));
}
b.iter(|| {
let option = index_iter.next();
render_chars_entity_references_to_chars(&decoded_text, &refs[option.unwrap()])
})
});
}
// Benchmark only sorting entities and determining substitutions.
pub fn bench_render_coords(c: &mut Criterion) {
c.bench_function("render coords", |b| {
let entities_list = generate_decoded_entities();
let mut refs = Vec::with_capacity(1000);
for (i, _) in entities_list.iter().enumerate() {
refs.push(entity_refs(&entities_list[i]));
}
let mut index_iter = (0..1000).cycle();
let decoded_text = UNICODE_TEXT.chars().collect();
let mut ht = Vec::with_capacity(64);
b.iter(|| {
let option = index_iter.next();
ht.clear();
// Sort entities
let refs = &refs[option.unwrap()];
let mut sorted: Vec<&DecodedEntity> = Vec::with_capacity(refs.len());
for e in refs {
sorted.push(e);
}
sorted.sort_unstable();
render_coords(&mut ht, &decoded_text, &sorted);
})
});
}<|fim▁end|> | decoded_entities, entity_refs, render, render_chars, render_chars2,
render_chars_entity_references, render_chars_entity_references_to_chars, render_coords, |
<|file_name|>release.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import print_function
import re
import ast
import subprocess
import sys
from optparse import OptionParser
DEBUG = False
CONFIRM_STEPS = False
DRY_RUN = False
def skip_step():
"""
Asks for user's response whether to run a step. Default is yes.
:return: boolean
"""
global CONFIRM_STEPS
if CONFIRM_STEPS:
choice = raw_input("--- Confirm step? (y/N) [y] ")
if choice.lower() == 'n':
return True
return False
def run_step(*args):
"""
Prints out the command and asks if it should be run.
If yes (default), runs it.
:param args: list of strings (command and args)
"""
global DRY_RUN
cmd = args
print(' '.join(cmd))
if skip_step():
print('--- Skipping...')
elif DRY_RUN:
print('--- Pretending to run...')
else:
subprocess.check_output(cmd)
def version(version_file):
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open(version_file, 'rb') as f:
ver = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
return ver
def commit_for_release(version_file, ver):
run_step('git', 'reset')
run_step('git', 'add', version_file)
run_step('git', 'commit', '--message', 'Releasing version %s' % ver)
def create_git_tag(tag_name):
run_step('git', 'tag', '-s', '-m', tag_name, tag_name)
def create_source_tarball():
run_step('python', 'setup.py', 'sdist')
def upload_source_tarball():
run_step('python', 'setup.py', 'sdist', 'upload')
def push_to_github():
run_step('git', 'push', 'origin', 'master')
def push_tags_to_github():
run_step('git', 'push', '--tags', 'origin')
if __name__ == '__main__':
if DEBUG:
subprocess.check_output = lambda x: x
ver = version('pgcli/__init__.py')
print('Releasing Version:', ver)
parser = OptionParser()
parser.add_option(
"-c", "--confirm-steps", action="store_true", dest="confirm_steps",
default=False, help=("Confirm every step. If the step is not "
"confirmed, it will be skipped.")
)
parser.add_option(
"-d", "--dry-run", action="store_true", dest="dry_run",
default=False, help="Print out, but not actually run any steps."
)
popts, pargs = parser.parse_args()
CONFIRM_STEPS = popts.confirm_steps
DRY_RUN = popts.dry_run
choice = raw_input('Are you sure? (y/N) [n] ')
if choice.lower() != 'y':
sys.exit(1)
commit_for_release('pgcli/__init__.py', ver)
create_git_tag('v%s' % ver)
create_source_tarball()
push_to_github()<|fim▁hole|><|fim▁end|> | push_tags_to_github()
upload_source_tarball() |
<|file_name|>tree_view_column.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! A widget that emits a signal when clicked on
use glib;
use ffi;
use cast;
use glib::translate::{from_glib_none, ToGlibPtr};
use glib::{to_bool, to_gboolean};
pub struct TreeViewColumn {
pointer: *mut ffi::GtkTreeViewColumn
}
impl TreeViewColumn {
pub fn new() -> Option<TreeViewColumn> {
let tmp_pointer = unsafe { ffi::gtk_tree_view_column_new() };
check_pointer!(tmp_pointer, TreeViewColumn, G_OBJECT_FROM_TREE_VIEW_COLUMN)
}
pub fn clear(&self) {
unsafe {
ffi::gtk_tree_view_column_clear(self.pointer)
}
}
pub fn set_spacing(&self, spacing: i32) {
unsafe {
ffi::gtk_tree_view_column_set_spacing(self.pointer, spacing)
}
}
pub fn get_spacing(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_spacing(self.pointer)
}
}
pub fn set_visible(&self, visible: bool) {
unsafe {
ffi::gtk_tree_view_column_set_visible(self.pointer, to_gboolean(visible))
}
}
pub fn get_visible(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_get_visible(self.pointer))
}
}
pub fn set_resizable(&self, resizable: bool) {
unsafe {
ffi::gtk_tree_view_column_set_resizable(self.pointer, to_gboolean(resizable))
}
}
pub fn get_resizable(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_get_resizable(self.pointer))
}
}
pub fn set_sizing(&self, ty: ::TreeViewColumnSizing) {
unsafe {
ffi::gtk_tree_view_column_set_sizing(self.pointer, ty)
}
}
pub fn get_sizing(&self) -> ::TreeViewColumnSizing {
unsafe {
ffi::gtk_tree_view_column_get_sizing(self.pointer)
}
}
pub fn get_x_offset(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_x_offset(self.pointer)
}
}
pub fn get_width(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_width(self.pointer)
}
}
pub fn get_fixed_width(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_fixed_width(self.pointer)
}
}
pub fn set_fixed_width(&self, fixed_width: i32) {
unsafe {
ffi::gtk_tree_view_column_set_fixed_width(self.pointer, fixed_width)
}
}
pub fn set_min_width(&self, min_width: i32) {
unsafe {
ffi::gtk_tree_view_column_set_min_width(self.pointer, min_width)
}
}
pub fn set_max_width(&self, max_width: i32) {
unsafe {
ffi::gtk_tree_view_column_set_max_width(self.pointer, max_width)
}
}
pub fn get_min_width(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_min_width(self.pointer)
}
}
pub fn get_max_width(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_max_width(self.pointer)
}
}
pub fn clicked(&self) {
unsafe {
ffi::gtk_tree_view_column_clicked(self.pointer)
}
}
pub fn set_title(&self, title: &str) {
unsafe {
ffi::gtk_tree_view_column_set_title(self.pointer,
title.to_glib_none().0);
}
}
pub fn get_title(&self) -> Option<String> {
unsafe {
from_glib_none(ffi::gtk_tree_view_column_get_title(self.pointer))
}
}
pub fn set_expand(&self, expand: bool) {
unsafe {
ffi::gtk_tree_view_column_set_expand(self.pointer, to_gboolean(expand))
}
}
pub fn get_expand(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_get_expand(self.pointer))
}
}
pub fn set_clickable(&self, clickable: bool) {
unsafe {
ffi::gtk_tree_view_column_set_clickable(self.pointer, to_gboolean(clickable))
}
}
pub fn get_clickable(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_get_clickable(self.pointer))
}
}
pub fn set_widget<T: ::WidgetTrait>(&self, widget: &T) {
unsafe {
ffi::gtk_tree_view_column_set_widget(self.pointer, widget.unwrap_widget())
}
}
pub fn get_widget<T: ::WidgetTrait>(&self) -> T {
unsafe {
::FFIWidget::wrap_widget(ffi::gtk_tree_view_column_get_widget(self.pointer))
}
}<|fim▁hole|> ffi::gtk_tree_view_column_set_alignment(self.pointer, x_align)
}
}
pub fn get_alignment(&self) -> f32 {
unsafe {
ffi::gtk_tree_view_column_get_alignment(self.pointer)
}
}
pub fn set_reorderable(&self, reorderable: bool) {
unsafe {
ffi::gtk_tree_view_column_set_reorderable(self.pointer, to_gboolean(reorderable))
}
}
pub fn get_reorderable(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_get_reorderable(self.pointer))
}
}
pub fn get_sort_column_id(&self) -> i32 {
unsafe {
ffi::gtk_tree_view_column_get_sort_column_id(self.pointer)
}
}
pub fn set_sort_column_id(&self, sort_column_id: i32) {
unsafe {
ffi::gtk_tree_view_column_set_sort_column_id(self.pointer, sort_column_id)
}
}
pub fn set_sort_indicator(&self, setting: bool) {
unsafe {
ffi::gtk_tree_view_column_set_sort_indicator(self.pointer, to_gboolean(setting))
}
}
pub fn get_sort_indicator(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_get_sort_indicator(self.pointer))
}
}
pub fn set_sort_order(&self, order: ::SortType) {
unsafe {
ffi::gtk_tree_view_column_set_sort_order(self.pointer, order)
}
}
pub fn get_sort_order(&self) -> ::SortType {
unsafe {
ffi::gtk_tree_view_column_get_sort_order(self.pointer)
}
}
pub fn column_cell_is_visible(&self) -> bool {
unsafe {
to_bool(ffi::gtk_tree_view_column_cell_is_visible(self.pointer))
}
}
pub fn queue_resize(&self) {
unsafe {
ffi::gtk_tree_view_column_queue_resize(self.pointer)
}
}
pub fn get_tree_view(&self) -> ::TreeView {
unsafe {
::FFIWidget::wrap_widget(ffi::gtk_tree_view_column_get_tree_view(self.pointer))
}
}
pub fn get_button<T: ::WidgetTrait + ::ButtonTrait>(&self) -> T {
unsafe {
::FFIWidget::wrap_widget(ffi::gtk_tree_view_column_get_button(self.pointer))
}
}
pub fn add_attribute<T: ::FFIWidget + ::CellRendererTrait>(&self, cell: &T, attribute: &str, column: i32) {
unsafe {
ffi::gtk_tree_view_column_add_attribute(
self.pointer,
cast::GTK_CELL_RENDERER(cell.unwrap_widget()),
attribute.to_glib_none().0,
column)
}
}
pub fn clear_attributes<T: ::FFIWidget + ::CellRendererTrait>(&self, cell: &T) {
unsafe { ffi::gtk_tree_view_column_clear_attributes(self.pointer,
cast::GTK_CELL_RENDERER(cell.unwrap_widget())) }
}
pub fn pack_start<T: ::FFIWidget + ::CellRendererTrait>(&self, cell: &T, expand: bool) {
unsafe { ffi::gtk_tree_view_column_pack_start(self.pointer,
cast::GTK_CELL_RENDERER(cell.unwrap_widget()),
to_gboolean(expand)) }
}
pub fn pack_end<T: ::FFIWidget + ::CellRendererTrait>(&self, cell: &T, expand: bool) {
unsafe { ffi::gtk_tree_view_column_pack_end(self.pointer,
cast::GTK_CELL_RENDERER(cell.unwrap_widget()),
to_gboolean(expand)) }
}
#[doc(hidden)]
pub fn unwrap_pointer(&self) -> *mut ffi::GtkTreeViewColumn {
self.pointer
}
#[doc(hidden)]
pub fn wrap_pointer(treeview_column: *mut ffi::GtkTreeViewColumn) -> TreeViewColumn {
unsafe{
::gobject_ffi::g_object_ref(treeview_column as *mut ::libc::c_void);
}
TreeViewColumn {
pointer: treeview_column
}
}
}
impl glib::traits::FFIGObject for TreeViewColumn {
fn unwrap_gobject(&self) -> *mut ::gobject_ffi::GObject {
::cast::G_OBJECT_FROM_TREE_VIEW_COLUMN(self.pointer)
}
fn wrap_object(object: *mut ::gobject_ffi::GObject) -> TreeViewColumn {
TreeViewColumn { pointer: object as *mut ffi::GtkTreeViewColumn }
}
}
impl Drop for TreeViewColumn {
fn drop(&mut self) {
unsafe {
::gobject_ffi::g_object_unref(self.pointer as *mut ::libc::c_void);
}
}
}
impl Clone for TreeViewColumn {
fn clone(&self) -> TreeViewColumn {
let pointer = unsafe {
::gobject_ffi::g_object_ref(self.pointer as *mut ::libc::c_void)
};
TreeViewColumn {
pointer: pointer as *mut ffi::GtkTreeViewColumn
}
}
}<|fim▁end|> |
pub fn set_alignment(&self, x_align: f32) {
unsafe { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.