file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
recipes.py | from collections import defaultdict, namedtuple
from minecraft_data.v1_8 import recipes as raw_recipes
RecipeItem = namedtuple('RecipeItem', 'id meta amount')
class Recipe(object):
def __init__(self, raw):
self.result = reformat_item(raw['result'], None)
if 'ingredients' in raw:
|
else:
self.in_shape = reformat_shape(raw['inShape'])
self.out_shape = reformat_shape(raw['outShape']) \
if 'outShape' in raw else None
self.ingredients = [item for row in self.in_shape for item in row]
@property
def total_ingredient_amounts(self):
"""
Returns:
dict: In the form { (item_id, metadata) -> amount }
"""
totals = defaultdict(int)
for id, meta, amount in self.ingredients:
totals[(id, meta)] += amount
return totals
@property
def ingredient_positions(self):
"""
Returns:
dict: In the form { (item_id, metadata) -> [(x, y, amount), ...] }
"""
positions = defaultdict(list)
for y, row in enumerate(self.in_shape):
for x, (item_id, metadata, amount) in enumerate(row):
positions[(item_id, metadata)].append((x, y, amount))
return positions
def reformat_item(raw, default_meta=None):
if isinstance(raw, dict):
raw = raw.copy() # do not modify arg
if 'metadata' not in raw:
raw['metadata'] = default_meta
if 'count' not in raw:
raw['count'] = 1
return RecipeItem(raw['id'], raw['metadata'], raw['count'])
elif isinstance(raw, list):
return RecipeItem(raw[0], raw[1], 1)
else: # single ID or None
return RecipeItem(raw or None, default_meta, 1)
def reformat_shape(shape):
return [[reformat_item(item, None) for item in row] for row in shape]
def iter_recipes(item_id, meta=None):
item_id = str(item_id)
meta = meta and int(meta)
try:
recipes_for_item = raw_recipes[item_id]
except KeyError:
return # no recipe found, do not yield anything
else:
for raw in recipes_for_item:
recipe = Recipe(raw)
if meta is None or meta == recipe.result.meta:
yield recipe
def get_any_recipe(item, meta=None):
# TODO return small recipes if present
for matching in iter_recipes(item, meta):
return matching
return None
| self.ingredients = [reformat_item(item, 0)
for item in raw['ingredients']]
self.in_shape = None
self.out_shape = None | conditional_block |
recipes.py | from collections import defaultdict, namedtuple
from minecraft_data.v1_8 import recipes as raw_recipes
RecipeItem = namedtuple('RecipeItem', 'id meta amount')
class Recipe(object):
def __init__(self, raw):
self.result = reformat_item(raw['result'], None)
if 'ingredients' in raw:
self.ingredients = [reformat_item(item, 0)
for item in raw['ingredients']]
self.in_shape = None
self.out_shape = None
else:
self.in_shape = reformat_shape(raw['inShape'])
self.out_shape = reformat_shape(raw['outShape']) \
if 'outShape' in raw else None
self.ingredients = [item for row in self.in_shape for item in row]
@property
def total_ingredient_amounts(self):
"""
Returns:
dict: In the form { (item_id, metadata) -> amount }
"""
totals = defaultdict(int)
for id, meta, amount in self.ingredients:
totals[(id, meta)] += amount
return totals
@property
def | (self):
"""
Returns:
dict: In the form { (item_id, metadata) -> [(x, y, amount), ...] }
"""
positions = defaultdict(list)
for y, row in enumerate(self.in_shape):
for x, (item_id, metadata, amount) in enumerate(row):
positions[(item_id, metadata)].append((x, y, amount))
return positions
def reformat_item(raw, default_meta=None):
if isinstance(raw, dict):
raw = raw.copy() # do not modify arg
if 'metadata' not in raw:
raw['metadata'] = default_meta
if 'count' not in raw:
raw['count'] = 1
return RecipeItem(raw['id'], raw['metadata'], raw['count'])
elif isinstance(raw, list):
return RecipeItem(raw[0], raw[1], 1)
else: # single ID or None
return RecipeItem(raw or None, default_meta, 1)
def reformat_shape(shape):
return [[reformat_item(item, None) for item in row] for row in shape]
def iter_recipes(item_id, meta=None):
item_id = str(item_id)
meta = meta and int(meta)
try:
recipes_for_item = raw_recipes[item_id]
except KeyError:
return # no recipe found, do not yield anything
else:
for raw in recipes_for_item:
recipe = Recipe(raw)
if meta is None or meta == recipe.result.meta:
yield recipe
def get_any_recipe(item, meta=None):
# TODO return small recipes if present
for matching in iter_recipes(item, meta):
return matching
return None
| ingredient_positions | identifier_name |
serve_localhost.py | #! /usr/bin/env python3
# Invoke http.server to host a basic webserver on localhost /without/ caching.
# Files served by http.server are usually cached by browsers, which makes testing and debugging
# buggy.
import http.server
import os
from functools import partial
class NoCacheRequestHandler(http.server.SimpleHTTPRequestHandler):
def | (self):
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
super().end_headers()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--bind', '-b', default='localhost', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: localhost - pass \'\' if you want to serve remote clients]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
help='Specify alternative directory '
'[default:current directory]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
handler_class = partial(NoCacheRequestHandler, directory=args.directory)
http.server.test(HandlerClass=handler_class, port=args.port, bind=args.bind)
| end_headers | identifier_name |
serve_localhost.py | #! /usr/bin/env python3
# Invoke http.server to host a basic webserver on localhost /without/ caching.
# Files served by http.server are usually cached by browsers, which makes testing and debugging
# buggy.
import http.server
import os
from functools import partial
class NoCacheRequestHandler(http.server.SimpleHTTPRequestHandler):
def end_headers(self):
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
super().end_headers()
if __name__ == '__main__':
| import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--bind', '-b', default='localhost', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: localhost - pass \'\' if you want to serve remote clients]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
help='Specify alternative directory '
'[default:current directory]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
handler_class = partial(NoCacheRequestHandler, directory=args.directory)
http.server.test(HandlerClass=handler_class, port=args.port, bind=args.bind) | conditional_block | |
serve_localhost.py | #! /usr/bin/env python3
# Invoke http.server to host a basic webserver on localhost /without/ caching.
# Files served by http.server are usually cached by browsers, which makes testing and debugging
# buggy.
import http.server
import os
from functools import partial
class NoCacheRequestHandler(http.server.SimpleHTTPRequestHandler):
|
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--bind', '-b', default='localhost', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: localhost - pass \'\' if you want to serve remote clients]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
help='Specify alternative directory '
'[default:current directory]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
handler_class = partial(NoCacheRequestHandler, directory=args.directory)
http.server.test(HandlerClass=handler_class, port=args.port, bind=args.bind)
| def end_headers(self):
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
super().end_headers() | identifier_body |
serve_localhost.py | #! /usr/bin/env python3
# Invoke http.server to host a basic webserver on localhost /without/ caching.
# Files served by http.server are usually cached by browsers, which makes testing and debugging
# buggy.
import http.server
import os
from functools import partial
class NoCacheRequestHandler(http.server.SimpleHTTPRequestHandler): | super().end_headers()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--bind', '-b', default='localhost', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: localhost - pass \'\' if you want to serve remote clients]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
help='Specify alternative directory '
'[default:current directory]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
handler_class = partial(NoCacheRequestHandler, directory=args.directory)
http.server.test(HandlerClass=handler_class, port=args.port, bind=args.bind) | def end_headers(self):
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0") | random_line_split |
raw.rs | extern crate libsqlite3_sys as ffi;
extern crate libc;
use std::ffi::{CString, CStr};
use std::io::{stderr, Write};
use std::{ptr, str};
use result::*;
use result::Error::DatabaseError;
#[allow(missing_debug_implementations, missing_copy_implementations)]
pub struct RawConnection {
pub internal_connection: *mut ffi::sqlite3,
}
impl RawConnection {
pub fn establish(database_url: &str) -> ConnectionResult<Self> {
let mut conn_pointer = ptr::null_mut();
let database_url = try!(CString::new(database_url));
let connection_status = unsafe {
ffi::sqlite3_open(database_url.as_ptr(), &mut conn_pointer)
};
match connection_status {
ffi::SQLITE_OK => Ok(RawConnection {
internal_connection: conn_pointer,
}),
err_code => {
let message = super::error_message(err_code);
Err(ConnectionError::BadConnection(message.into()))
}
}
}
pub fn exec(&self, query: &str) -> QueryResult<()> {
let mut err_msg = ptr::null_mut();
let query = try!(CString::new(query));
let callback_fn = None;
let callback_arg = ptr::null_mut();
unsafe {
ffi::sqlite3_exec(
self.internal_connection,
query.as_ptr(),
callback_fn,
callback_arg,
&mut err_msg,
);
}
if !err_msg.is_null() {
let msg = convert_to_string_and_free(err_msg);
let error_kind = DatabaseErrorKind::__Unknown;
Err(DatabaseError(error_kind, Box::new(msg)))
} else {
Ok(())
}
}
pub fn rows_affected_by_last_query(&self) -> usize {
unsafe { ffi::sqlite3_changes(self.internal_connection) as usize }
}
pub fn last_error_message(&self) -> String {
let c_str = unsafe { CStr::from_ptr(ffi::sqlite3_errmsg(self.internal_connection)) };
c_str.to_string_lossy().into_owned()
}
pub fn last_error_code(&self) -> libc::c_int {
unsafe { ffi::sqlite3_extended_errcode(self.internal_connection) }
}
}
impl Drop for RawConnection {
fn drop(&mut self) {
use std::thread::panicking;
let close_result = unsafe { ffi::sqlite3_close(self.internal_connection) };
if close_result != ffi::SQLITE_OK {
let error_message = super::error_message(close_result);
if panicking() {
write!(stderr(), "Error closing SQLite connection: {}", error_message).unwrap();
} else {
panic!("Error closing SQLite connection: {}", error_message);
}
}
}
}
fn | (err_msg: *const libc::c_char) -> String {
let msg = unsafe {
let bytes = CStr::from_ptr(err_msg).to_bytes();
str::from_utf8_unchecked(bytes).into()
};
unsafe { ffi::sqlite3_free(err_msg as *mut libc::c_void) };
msg
}
| convert_to_string_and_free | identifier_name |
raw.rs | extern crate libsqlite3_sys as ffi;
extern crate libc;
use std::ffi::{CString, CStr};
use std::io::{stderr, Write};
use std::{ptr, str};
use result::*;
use result::Error::DatabaseError;
#[allow(missing_debug_implementations, missing_copy_implementations)]
pub struct RawConnection {
pub internal_connection: *mut ffi::sqlite3,
}
impl RawConnection {
pub fn establish(database_url: &str) -> ConnectionResult<Self> {
let mut conn_pointer = ptr::null_mut();
let database_url = try!(CString::new(database_url));
let connection_status = unsafe {
ffi::sqlite3_open(database_url.as_ptr(), &mut conn_pointer)
};
match connection_status {
ffi::SQLITE_OK => Ok(RawConnection {
internal_connection: conn_pointer,
}),
err_code => {
let message = super::error_message(err_code);
Err(ConnectionError::BadConnection(message.into()))
}
}
}
pub fn exec(&self, query: &str) -> QueryResult<()> {
let mut err_msg = ptr::null_mut();
let query = try!(CString::new(query));
let callback_fn = None;
let callback_arg = ptr::null_mut();
unsafe {
ffi::sqlite3_exec(
self.internal_connection,
query.as_ptr(),
callback_fn,
callback_arg,
&mut err_msg,
);
}
if !err_msg.is_null() | else {
Ok(())
}
}
pub fn rows_affected_by_last_query(&self) -> usize {
unsafe { ffi::sqlite3_changes(self.internal_connection) as usize }
}
pub fn last_error_message(&self) -> String {
let c_str = unsafe { CStr::from_ptr(ffi::sqlite3_errmsg(self.internal_connection)) };
c_str.to_string_lossy().into_owned()
}
pub fn last_error_code(&self) -> libc::c_int {
unsafe { ffi::sqlite3_extended_errcode(self.internal_connection) }
}
}
impl Drop for RawConnection {
fn drop(&mut self) {
use std::thread::panicking;
let close_result = unsafe { ffi::sqlite3_close(self.internal_connection) };
if close_result != ffi::SQLITE_OK {
let error_message = super::error_message(close_result);
if panicking() {
write!(stderr(), "Error closing SQLite connection: {}", error_message).unwrap();
} else {
panic!("Error closing SQLite connection: {}", error_message);
}
}
}
}
fn convert_to_string_and_free(err_msg: *const libc::c_char) -> String {
let msg = unsafe {
let bytes = CStr::from_ptr(err_msg).to_bytes();
str::from_utf8_unchecked(bytes).into()
};
unsafe { ffi::sqlite3_free(err_msg as *mut libc::c_void) };
msg
}
| {
let msg = convert_to_string_and_free(err_msg);
let error_kind = DatabaseErrorKind::__Unknown;
Err(DatabaseError(error_kind, Box::new(msg)))
} | conditional_block |
raw.rs | extern crate libsqlite3_sys as ffi;
extern crate libc;
use std::ffi::{CString, CStr};
use std::io::{stderr, Write};
use std::{ptr, str};
use result::*;
use result::Error::DatabaseError;
#[allow(missing_debug_implementations, missing_copy_implementations)]
pub struct RawConnection {
pub internal_connection: *mut ffi::sqlite3,
}
impl RawConnection {
pub fn establish(database_url: &str) -> ConnectionResult<Self> {
let mut conn_pointer = ptr::null_mut();
let database_url = try!(CString::new(database_url));
let connection_status = unsafe {
ffi::sqlite3_open(database_url.as_ptr(), &mut conn_pointer)
};
match connection_status {
ffi::SQLITE_OK => Ok(RawConnection {
internal_connection: conn_pointer,
}),
err_code => {
let message = super::error_message(err_code);
Err(ConnectionError::BadConnection(message.into()))
}
}
}
pub fn exec(&self, query: &str) -> QueryResult<()> |
pub fn rows_affected_by_last_query(&self) -> usize {
unsafe { ffi::sqlite3_changes(self.internal_connection) as usize }
}
pub fn last_error_message(&self) -> String {
let c_str = unsafe { CStr::from_ptr(ffi::sqlite3_errmsg(self.internal_connection)) };
c_str.to_string_lossy().into_owned()
}
pub fn last_error_code(&self) -> libc::c_int {
unsafe { ffi::sqlite3_extended_errcode(self.internal_connection) }
}
}
impl Drop for RawConnection {
fn drop(&mut self) {
use std::thread::panicking;
let close_result = unsafe { ffi::sqlite3_close(self.internal_connection) };
if close_result != ffi::SQLITE_OK {
let error_message = super::error_message(close_result);
if panicking() {
write!(stderr(), "Error closing SQLite connection: {}", error_message).unwrap();
} else {
panic!("Error closing SQLite connection: {}", error_message);
}
}
}
}
fn convert_to_string_and_free(err_msg: *const libc::c_char) -> String {
let msg = unsafe {
let bytes = CStr::from_ptr(err_msg).to_bytes();
str::from_utf8_unchecked(bytes).into()
};
unsafe { ffi::sqlite3_free(err_msg as *mut libc::c_void) };
msg
}
| {
let mut err_msg = ptr::null_mut();
let query = try!(CString::new(query));
let callback_fn = None;
let callback_arg = ptr::null_mut();
unsafe {
ffi::sqlite3_exec(
self.internal_connection,
query.as_ptr(),
callback_fn,
callback_arg,
&mut err_msg,
);
}
if !err_msg.is_null() {
let msg = convert_to_string_and_free(err_msg);
let error_kind = DatabaseErrorKind::__Unknown;
Err(DatabaseError(error_kind, Box::new(msg)))
} else {
Ok(())
}
} | identifier_body |
raw.rs | extern crate libsqlite3_sys as ffi;
extern crate libc;
use std::ffi::{CString, CStr};
use std::io::{stderr, Write};
use std::{ptr, str};
use result::*;
use result::Error::DatabaseError;
#[allow(missing_debug_implementations, missing_copy_implementations)]
pub struct RawConnection {
pub internal_connection: *mut ffi::sqlite3,
}
impl RawConnection {
pub fn establish(database_url: &str) -> ConnectionResult<Self> {
let mut conn_pointer = ptr::null_mut(); | };
match connection_status {
ffi::SQLITE_OK => Ok(RawConnection {
internal_connection: conn_pointer,
}),
err_code => {
let message = super::error_message(err_code);
Err(ConnectionError::BadConnection(message.into()))
}
}
}
pub fn exec(&self, query: &str) -> QueryResult<()> {
let mut err_msg = ptr::null_mut();
let query = try!(CString::new(query));
let callback_fn = None;
let callback_arg = ptr::null_mut();
unsafe {
ffi::sqlite3_exec(
self.internal_connection,
query.as_ptr(),
callback_fn,
callback_arg,
&mut err_msg,
);
}
if !err_msg.is_null() {
let msg = convert_to_string_and_free(err_msg);
let error_kind = DatabaseErrorKind::__Unknown;
Err(DatabaseError(error_kind, Box::new(msg)))
} else {
Ok(())
}
}
pub fn rows_affected_by_last_query(&self) -> usize {
unsafe { ffi::sqlite3_changes(self.internal_connection) as usize }
}
pub fn last_error_message(&self) -> String {
let c_str = unsafe { CStr::from_ptr(ffi::sqlite3_errmsg(self.internal_connection)) };
c_str.to_string_lossy().into_owned()
}
pub fn last_error_code(&self) -> libc::c_int {
unsafe { ffi::sqlite3_extended_errcode(self.internal_connection) }
}
}
impl Drop for RawConnection {
fn drop(&mut self) {
use std::thread::panicking;
let close_result = unsafe { ffi::sqlite3_close(self.internal_connection) };
if close_result != ffi::SQLITE_OK {
let error_message = super::error_message(close_result);
if panicking() {
write!(stderr(), "Error closing SQLite connection: {}", error_message).unwrap();
} else {
panic!("Error closing SQLite connection: {}", error_message);
}
}
}
}
fn convert_to_string_and_free(err_msg: *const libc::c_char) -> String {
let msg = unsafe {
let bytes = CStr::from_ptr(err_msg).to_bytes();
str::from_utf8_unchecked(bytes).into()
};
unsafe { ffi::sqlite3_free(err_msg as *mut libc::c_void) };
msg
} | let database_url = try!(CString::new(database_url));
let connection_status = unsafe {
ffi::sqlite3_open(database_url.as_ptr(), &mut conn_pointer) | random_line_split |
lib.rs | use std::path::{Path, PathBuf};
use std::fs::{self, ReadDir, DirEntry};
use std::io::Error;
pub struct DeepWalk {
root: PathBuf,
}
impl DeepWalk {
pub fn new<P: AsRef<Path>>(root: P) -> Self {
DeepWalk { root: root.as_ref().to_path_buf() }
}
} | type IntoIter = Iter;
fn into_iter(self) -> Iter {
Iter { root: Some(self.root), dirs: Vec::new() }
}
}
pub struct Iter {
root: Option<PathBuf>,
dirs: Vec<ReadDir>,
}
// TODO: Remove and implement Iterator for DeepWalk.
impl Iterator for Iter {
type Item = Result<DirEntry, Error>;
fn next(&mut self) -> Option<Result<DirEntry, Error>> {
if let Some(path) = self.root.take() {
match fs::read_dir(path) {
Ok(dir) => self.dirs.push(dir),
Err(err) => return Some(Err(err)),
}
}
while !self.dirs.is_empty() {
// TODO: FIXME.
break;
}
None
}
}
#[cfg(test)]
mod tests {
use super::DeepWalk;
use std::path::{Path, PathBuf};
fn get_test_roots() -> &'static[&'static str] {
const DATA: &'static[&'static str] = &["", "a", "test", "eee/aaa", "some/long/path"];
DATA
}
#[test]
fn deep_walk_new() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).root, Path::new(val));
}
}
#[test]
fn deep_walk_into_iterator() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).into_iter().root, Some(PathBuf::from(val)));
}
}
} |
impl IntoIterator for DeepWalk {
type Item = Result<DirEntry, Error>; | random_line_split |
lib.rs | use std::path::{Path, PathBuf};
use std::fs::{self, ReadDir, DirEntry};
use std::io::Error;
pub struct DeepWalk {
root: PathBuf,
}
impl DeepWalk {
pub fn new<P: AsRef<Path>>(root: P) -> Self {
DeepWalk { root: root.as_ref().to_path_buf() }
}
}
impl IntoIterator for DeepWalk {
type Item = Result<DirEntry, Error>;
type IntoIter = Iter;
fn into_iter(self) -> Iter {
Iter { root: Some(self.root), dirs: Vec::new() }
}
}
pub struct Iter {
root: Option<PathBuf>,
dirs: Vec<ReadDir>,
}
// TODO: Remove and implement Iterator for DeepWalk.
impl Iterator for Iter {
type Item = Result<DirEntry, Error>;
fn next(&mut self) -> Option<Result<DirEntry, Error>> {
if let Some(path) = self.root.take() {
match fs::read_dir(path) {
Ok(dir) => self.dirs.push(dir),
Err(err) => return Some(Err(err)),
}
}
while !self.dirs.is_empty() {
// TODO: FIXME.
break;
}
None
}
}
#[cfg(test)]
mod tests {
use super::DeepWalk;
use std::path::{Path, PathBuf};
fn get_test_roots() -> &'static[&'static str] {
const DATA: &'static[&'static str] = &["", "a", "test", "eee/aaa", "some/long/path"];
DATA
}
#[test]
fn | () {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).root, Path::new(val));
}
}
#[test]
fn deep_walk_into_iterator() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).into_iter().root, Some(PathBuf::from(val)));
}
}
}
| deep_walk_new | identifier_name |
lib.rs | use std::path::{Path, PathBuf};
use std::fs::{self, ReadDir, DirEntry};
use std::io::Error;
pub struct DeepWalk {
root: PathBuf,
}
impl DeepWalk {
pub fn new<P: AsRef<Path>>(root: P) -> Self {
DeepWalk { root: root.as_ref().to_path_buf() }
}
}
impl IntoIterator for DeepWalk {
type Item = Result<DirEntry, Error>;
type IntoIter = Iter;
fn into_iter(self) -> Iter {
Iter { root: Some(self.root), dirs: Vec::new() }
}
}
pub struct Iter {
root: Option<PathBuf>,
dirs: Vec<ReadDir>,
}
// TODO: Remove and implement Iterator for DeepWalk.
impl Iterator for Iter {
type Item = Result<DirEntry, Error>;
fn next(&mut self) -> Option<Result<DirEntry, Error>> {
if let Some(path) = self.root.take() {
match fs::read_dir(path) {
Ok(dir) => self.dirs.push(dir),
Err(err) => return Some(Err(err)),
}
}
while !self.dirs.is_empty() {
// TODO: FIXME.
break;
}
None
}
}
#[cfg(test)]
mod tests {
use super::DeepWalk;
use std::path::{Path, PathBuf};
fn get_test_roots() -> &'static[&'static str] {
const DATA: &'static[&'static str] = &["", "a", "test", "eee/aaa", "some/long/path"];
DATA
}
#[test]
fn deep_walk_new() |
#[test]
fn deep_walk_into_iterator() {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).into_iter().root, Some(PathBuf::from(val)));
}
}
}
| {
for val in get_test_roots() {
assert_eq!(DeepWalk::new(val).root, Path::new(val));
}
} | identifier_body |
udptransport.rs | #[feature(struct_variant)];
#[feature(macro_rules)];
use osc::{OscType, OscMessage, OscWriter, OscReader};
use rpc::{ServerId, LogEntry, AppendEntriesRpc, AppendEntriesResponseRpc,
RequestVoteRpc, RequestVoteResponseRpc, RaftRpc, AppendEntries,
AppendEntriesResponse, RequestVote, RequestVoteResponse};
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::net::udp::{UdpSocket, UdpStream};
use std::io::timer;
use std::os;
use std::vec;
use std::rand;
use transport::RaftRpcTransport;
mod rpc;
mod transport;
static raftPort : u16 = 9000;
pub struct UdpTransport {
socket: UdpSocket,
incomingRpcsChan: Chan<RaftRpc>,
incomingRpcsPort: Port<RaftRpc>,
outgoingRpcsChan: Chan<RaftRpc>,
outgoingRpcsPort: Port<RaftRpc>,
}
impl UdpTransport {
pub fn new() -> UdpTransport {
let socket = UdpSocket::bind(SocketAddr {ip: Ipv4Addr(127,0,0,1), port:raftPort}).unwrap();
let (incomingRpcsPort, incomingRpcsChan) = Chan::new();
let (outgoingRpcsPort, outgoingRpcsChan) = Chan::new();
return UdpTransport {socket: socket, incomingRpcsChan: incomingRpcsChan,
incomingRpcsPort: incomingRpcsPort, outgoingRpcsChan: outgoingRpcsChan,
outgoingRpcsPort: outgoingRpcsPort};
}
pub fn run(&self) {
let readSocket = self.socket.clone();
let writeSocket = self.socket.clone();
//spawn(proc() {
// let mut udpStream = readSocket.connect(remoteAddr);
// loop {
// let msg = OscMessage::from_reader(&mut udpStream).unwrap();
// println!("recv {}: {:?}", msg.address, msg.arguments);
// let msgRpc = self.parseRpcMessage(msg);
// self.incomingRpcsChan.send(msgRpc);
// }
//});
//spawn(proc() {
// let mut udpStream = writeSocket.connect(remoteAddr);
// loop {
// let msgRpc = self.outgoingRpcsPort.recv();
// let msg = self.createRpcMessage(msgRpc);
// println!("send {}: {:?}", msg.address, msg.arguments);
// msg.write_to(&mut udpStream).unwrap();
// }
//});
}
fn parseRpcMessage(&self, sender: ServerId, msg: OscMessage) -> RaftRpc {
return match msg.address {
~"/appendEntries" => AppendEntries(self.parseAppendEntries(sender, msg.arguments)),
~"/requestVote" => RequestVote(self.parseRequestVote(sender, msg.arguments)),
_ => fail!("woops no implementation for {}", msg.address)
};
}
// AppendEntries {term: int, leaderId: ServerId, prevLogIndex: int,
// entries: ~[LogEntry], leaderCommitIndex: int},
fn parseAppendEntries(&self, sender: ServerId, argsVec: ~[OscType]) -> AppendEntriesRpc {
let mut args = argsVec.move_iter();
let term = args.next().unwrap().unwrap_int() as int;
let leaderId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let prevLogIndex = args.next().unwrap().unwrap_int() as int;
let prevLogTerm = args.next().unwrap().unwrap_int() as int;
let entryCount = (args.len()-5)/2;
let mut entries: ~[LogEntry] = vec::with_capacity(entryCount);
for i in range(0,entryCount) {
let term = args.next().unwrap().unwrap_int() as int;
let entry = args.next().unwrap().unwrap_string();
entries[i] = LogEntry {entry: entry, term: term};
}
let leaderCommitIndex = args.next().unwrap().unwrap_int() as int;
return AppendEntriesRpc {sender: sender, term: term, leaderId: leaderId,
prevLogIndex: prevLogIndex, prevLogTerm: prevLogTerm,
entries: entries, leaderCommitIndex: leaderCommitIndex};
}
// RequestVote {term: int, candidateId: ServerId, lastLogIndex: int,
// lastLogTerm: int}
fn | (&self, sender: ServerId, argsVec: ~[OscType]) -> RequestVoteRpc {
let mut args = argsVec.move_iter();
let term: int = args.next().unwrap().unwrap_int() as int;
let candidateId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let lastLogIndex: int = args.next().unwrap().unwrap_int() as int;
let lastLogTerm: int = args.next().unwrap().unwrap_int() as int;
return RequestVoteRpc {sender: sender, term: term, candidateId: candidateId,
lastLogIndex: lastLogIndex, lastLogTerm: lastLogTerm};
}
}
impl RaftRpcTransport for UdpTransport {
fn readIncoming(&self) -> Option<RaftRpc> {
return Some(self.incomingRpcsPort.recv());
}
fn sendRpc(&self, recipient: ServerId, rpc: &RaftRpc) {
}
}
| parseRequestVote | identifier_name |
udptransport.rs | #[feature(struct_variant)];
#[feature(macro_rules)];
use osc::{OscType, OscMessage, OscWriter, OscReader};
use rpc::{ServerId, LogEntry, AppendEntriesRpc, AppendEntriesResponseRpc,
RequestVoteRpc, RequestVoteResponseRpc, RaftRpc, AppendEntries,
AppendEntriesResponse, RequestVote, RequestVoteResponse};
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::net::udp::{UdpSocket, UdpStream};
use std::io::timer;
use std::os;
use std::vec;
use std::rand;
use transport::RaftRpcTransport;
mod rpc;
mod transport;
static raftPort : u16 = 9000;
pub struct UdpTransport {
socket: UdpSocket,
incomingRpcsChan: Chan<RaftRpc>,
incomingRpcsPort: Port<RaftRpc>,
outgoingRpcsChan: Chan<RaftRpc>,
outgoingRpcsPort: Port<RaftRpc>,
}
impl UdpTransport {
pub fn new() -> UdpTransport {
let socket = UdpSocket::bind(SocketAddr {ip: Ipv4Addr(127,0,0,1), port:raftPort}).unwrap();
let (incomingRpcsPort, incomingRpcsChan) = Chan::new();
let (outgoingRpcsPort, outgoingRpcsChan) = Chan::new();
return UdpTransport {socket: socket, incomingRpcsChan: incomingRpcsChan,
incomingRpcsPort: incomingRpcsPort, outgoingRpcsChan: outgoingRpcsChan,
outgoingRpcsPort: outgoingRpcsPort};
}
pub fn run(&self) {
let readSocket = self.socket.clone();
let writeSocket = self.socket.clone();
//spawn(proc() {
// let mut udpStream = readSocket.connect(remoteAddr);
// loop {
// let msg = OscMessage::from_reader(&mut udpStream).unwrap();
// println!("recv {}: {:?}", msg.address, msg.arguments);
// let msgRpc = self.parseRpcMessage(msg);
// self.incomingRpcsChan.send(msgRpc);
// }
//});
//spawn(proc() {
// let mut udpStream = writeSocket.connect(remoteAddr);
// loop {
// let msgRpc = self.outgoingRpcsPort.recv();
// let msg = self.createRpcMessage(msgRpc);
// println!("send {}: {:?}", msg.address, msg.arguments);
// msg.write_to(&mut udpStream).unwrap();
// }
//});
}
fn parseRpcMessage(&self, sender: ServerId, msg: OscMessage) -> RaftRpc |
// AppendEntries {term: int, leaderId: ServerId, prevLogIndex: int,
// entries: ~[LogEntry], leaderCommitIndex: int},
fn parseAppendEntries(&self, sender: ServerId, argsVec: ~[OscType]) -> AppendEntriesRpc {
let mut args = argsVec.move_iter();
let term = args.next().unwrap().unwrap_int() as int;
let leaderId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let prevLogIndex = args.next().unwrap().unwrap_int() as int;
let prevLogTerm = args.next().unwrap().unwrap_int() as int;
let entryCount = (args.len()-5)/2;
let mut entries: ~[LogEntry] = vec::with_capacity(entryCount);
for i in range(0,entryCount) {
let term = args.next().unwrap().unwrap_int() as int;
let entry = args.next().unwrap().unwrap_string();
entries[i] = LogEntry {entry: entry, term: term};
}
let leaderCommitIndex = args.next().unwrap().unwrap_int() as int;
return AppendEntriesRpc {sender: sender, term: term, leaderId: leaderId,
prevLogIndex: prevLogIndex, prevLogTerm: prevLogTerm,
entries: entries, leaderCommitIndex: leaderCommitIndex};
}
// RequestVote {term: int, candidateId: ServerId, lastLogIndex: int,
// lastLogTerm: int}
fn parseRequestVote(&self, sender: ServerId, argsVec: ~[OscType]) -> RequestVoteRpc {
let mut args = argsVec.move_iter();
let term: int = args.next().unwrap().unwrap_int() as int;
let candidateId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let lastLogIndex: int = args.next().unwrap().unwrap_int() as int;
let lastLogTerm: int = args.next().unwrap().unwrap_int() as int;
return RequestVoteRpc {sender: sender, term: term, candidateId: candidateId,
lastLogIndex: lastLogIndex, lastLogTerm: lastLogTerm};
}
}
impl RaftRpcTransport for UdpTransport {
fn readIncoming(&self) -> Option<RaftRpc> {
return Some(self.incomingRpcsPort.recv());
}
fn sendRpc(&self, recipient: ServerId, rpc: &RaftRpc) {
}
}
| {
return match msg.address {
~"/appendEntries" => AppendEntries(self.parseAppendEntries(sender, msg.arguments)),
~"/requestVote" => RequestVote(self.parseRequestVote(sender, msg.arguments)),
_ => fail!("woops no implementation for {}", msg.address)
};
} | identifier_body |
udptransport.rs | #[feature(struct_variant)];
#[feature(macro_rules)];
use osc::{OscType, OscMessage, OscWriter, OscReader};
use rpc::{ServerId, LogEntry, AppendEntriesRpc, AppendEntriesResponseRpc,
RequestVoteRpc, RequestVoteResponseRpc, RaftRpc, AppendEntries,
AppendEntriesResponse, RequestVote, RequestVoteResponse};
use std::io::net::ip::{Ipv4Addr, SocketAddr};
use std::io::net::udp::{UdpSocket, UdpStream};
use std::io::timer;
use std::os;
use std::vec;
use std::rand;
use transport::RaftRpcTransport;
mod rpc;
mod transport;
static raftPort : u16 = 9000;
pub struct UdpTransport {
socket: UdpSocket,
incomingRpcsChan: Chan<RaftRpc>,
incomingRpcsPort: Port<RaftRpc>,
outgoingRpcsChan: Chan<RaftRpc>,
outgoingRpcsPort: Port<RaftRpc>,
}
impl UdpTransport {
pub fn new() -> UdpTransport {
let socket = UdpSocket::bind(SocketAddr {ip: Ipv4Addr(127,0,0,1), port:raftPort}).unwrap();
let (incomingRpcsPort, incomingRpcsChan) = Chan::new();
let (outgoingRpcsPort, outgoingRpcsChan) = Chan::new();
return UdpTransport {socket: socket, incomingRpcsChan: incomingRpcsChan,
incomingRpcsPort: incomingRpcsPort, outgoingRpcsChan: outgoingRpcsChan,
outgoingRpcsPort: outgoingRpcsPort};
}
pub fn run(&self) {
let readSocket = self.socket.clone();
let writeSocket = self.socket.clone();
//spawn(proc() {
// let mut udpStream = readSocket.connect(remoteAddr);
// loop {
// let msg = OscMessage::from_reader(&mut udpStream).unwrap();
// println!("recv {}: {:?}", msg.address, msg.arguments);
// let msgRpc = self.parseRpcMessage(msg);
// self.incomingRpcsChan.send(msgRpc);
// }
//});
//spawn(proc() {
// let mut udpStream = writeSocket.connect(remoteAddr);
// loop {
// let msgRpc = self.outgoingRpcsPort.recv();
// let msg = self.createRpcMessage(msgRpc);
// println!("send {}: {:?}", msg.address, msg.arguments);
// msg.write_to(&mut udpStream).unwrap();
// }
//});
}
fn parseRpcMessage(&self, sender: ServerId, msg: OscMessage) -> RaftRpc {
return match msg.address {
~"/appendEntries" => AppendEntries(self.parseAppendEntries(sender, msg.arguments)),
~"/requestVote" => RequestVote(self.parseRequestVote(sender, msg.arguments)),
_ => fail!("woops no implementation for {}", msg.address)
};
}
// AppendEntries {term: int, leaderId: ServerId, prevLogIndex: int,
// entries: ~[LogEntry], leaderCommitIndex: int},
fn parseAppendEntries(&self, sender: ServerId, argsVec: ~[OscType]) -> AppendEntriesRpc {
let mut args = argsVec.move_iter();
let term = args.next().unwrap().unwrap_int() as int;
let leaderId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let prevLogIndex = args.next().unwrap().unwrap_int() as int;
let prevLogTerm = args.next().unwrap().unwrap_int() as int;
let entryCount = (args.len()-5)/2;
let mut entries: ~[LogEntry] = vec::with_capacity(entryCount);
for i in range(0,entryCount) {
let term = args.next().unwrap().unwrap_int() as int;
let entry = args.next().unwrap().unwrap_string();
entries[i] = LogEntry {entry: entry, term: term};
}
let leaderCommitIndex = args.next().unwrap().unwrap_int() as int;
return AppendEntriesRpc {sender: sender, term: term, leaderId: leaderId, | prevLogIndex: prevLogIndex, prevLogTerm: prevLogTerm,
entries: entries, leaderCommitIndex: leaderCommitIndex};
}
// RequestVote {term: int, candidateId: ServerId, lastLogIndex: int,
// lastLogTerm: int}
fn parseRequestVote(&self, sender: ServerId, argsVec: ~[OscType]) -> RequestVoteRpc {
let mut args = argsVec.move_iter();
let term: int = args.next().unwrap().unwrap_int() as int;
let candidateId: ServerId = from_str::<ServerId>(args.next().unwrap().unwrap_string()).unwrap();
let lastLogIndex: int = args.next().unwrap().unwrap_int() as int;
let lastLogTerm: int = args.next().unwrap().unwrap_int() as int;
return RequestVoteRpc {sender: sender, term: term, candidateId: candidateId,
lastLogIndex: lastLogIndex, lastLogTerm: lastLogTerm};
}
}
impl RaftRpcTransport for UdpTransport {
fn readIncoming(&self) -> Option<RaftRpc> {
return Some(self.incomingRpcsPort.recv());
}
fn sendRpc(&self, recipient: ServerId, rpc: &RaftRpc) {
}
} | random_line_split | |
balancedColumnTreeBuilder.js | /**
* ag-grid - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components
* @version v4.0.2
* @link http://www.ag-grid.com/
* @license MIT
*/
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __param = (this && this.__param) || function (paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
};
var gridOptionsWrapper_1 = require('../gridOptionsWrapper');
var logger_1 = require('../logger');
var columnUtils_1 = require('../columnController/columnUtils');
var columnKeyCreator_1 = require("./columnKeyCreator");
var originalColumnGroup_1 = require("../entities/originalColumnGroup");
var column_1 = require("../entities/column");
var context_1 = require("../context/context");
var context_2 = require("../context/context");
var context_3 = require("../context/context");
var context_4 = require("../context/context");
// takes in a list of columns, as specified by the column definitions, and returns column groups
var BalancedColumnTreeBuilder = (function () {
function | () {
}
BalancedColumnTreeBuilder.prototype.agWire = function (loggerFactory) {
this.logger = loggerFactory.create('BalancedColumnTreeBuilder');
};
BalancedColumnTreeBuilder.prototype.createBalancedColumnGroups = function (abstractColDefs) {
// column key creator dishes out unique column id's in a deterministic way,
// so if we have two grids (that cold be master/slave) with same column definitions,
// then this ensures the two grids use identical id's.
var columnKeyCreator = new columnKeyCreator_1.ColumnKeyCreator();
// create am unbalanced tree that maps the provided definitions
var unbalancedTree = this.recursivelyCreateColumns(abstractColDefs, 0, columnKeyCreator);
var treeDept = this.findMaxDept(unbalancedTree, 0);
this.logger.log('Number of levels for grouped columns is ' + treeDept);
var balancedTree = this.balanceColumnTree(unbalancedTree, 0, treeDept, columnKeyCreator);
this.columnUtils.deptFirstOriginalTreeSearch(balancedTree, function (child) {
if (child instanceof originalColumnGroup_1.OriginalColumnGroup) {
child.calculateExpandable();
}
});
return {
balancedTree: balancedTree,
treeDept: treeDept
};
};
BalancedColumnTreeBuilder.prototype.balanceColumnTree = function (unbalancedTree, currentDept, columnDept, columnKeyCreator) {
var _this = this;
var result = [];
// go through each child, for groups, recurse a level deeper,
// for columns we need to pad
unbalancedTree.forEach(function (child) {
if (child instanceof originalColumnGroup_1.OriginalColumnGroup) {
var originalGroup = child;
var newChildren = _this.balanceColumnTree(originalGroup.getChildren(), currentDept + 1, columnDept, columnKeyCreator);
originalGroup.setChildren(newChildren);
result.push(originalGroup);
}
else {
var newChild = child;
for (var i = columnDept - 1; i >= currentDept; i--) {
var newColId = columnKeyCreator.getUniqueKey(null, null);
var paddedGroup = new originalColumnGroup_1.OriginalColumnGroup(null, newColId);
paddedGroup.setChildren([newChild]);
newChild = paddedGroup;
}
result.push(newChild);
}
});
return result;
};
BalancedColumnTreeBuilder.prototype.findMaxDept = function (treeChildren, dept) {
var maxDeptThisLevel = dept;
for (var i = 0; i < treeChildren.length; i++) {
var abstractColumn = treeChildren[i];
if (abstractColumn instanceof originalColumnGroup_1.OriginalColumnGroup) {
var originalGroup = abstractColumn;
var newDept = this.findMaxDept(originalGroup.getChildren(), dept + 1);
if (maxDeptThisLevel < newDept) {
maxDeptThisLevel = newDept;
}
}
}
return maxDeptThisLevel;
};
BalancedColumnTreeBuilder.prototype.recursivelyCreateColumns = function (abstractColDefs, level, columnKeyCreator) {
var _this = this;
var result = [];
if (!abstractColDefs) {
return result;
}
abstractColDefs.forEach(function (abstractColDef) {
_this.checkForDeprecatedItems(abstractColDef);
if (_this.isColumnGroup(abstractColDef)) {
var groupColDef = abstractColDef;
var groupId = columnKeyCreator.getUniqueKey(groupColDef.groupId, null);
var originalGroup = new originalColumnGroup_1.OriginalColumnGroup(groupColDef, groupId);
var children = _this.recursivelyCreateColumns(groupColDef.children, level + 1, columnKeyCreator);
originalGroup.setChildren(children);
result.push(originalGroup);
}
else {
var colDef = abstractColDef;
var colId = columnKeyCreator.getUniqueKey(colDef.colId, colDef.field);
var column = new column_1.Column(colDef, colId);
_this.context.wireBean(column);
result.push(column);
}
});
return result;
};
BalancedColumnTreeBuilder.prototype.checkForDeprecatedItems = function (colDef) {
if (colDef) {
var colDefNoType = colDef; // take out the type, so we can access attributes not defined in the type
if (colDefNoType.group !== undefined) {
console.warn('ag-grid: colDef.group is invalid, please check documentation on how to do grouping as it changed in version 3');
}
if (colDefNoType.headerGroup !== undefined) {
console.warn('ag-grid: colDef.headerGroup is invalid, please check documentation on how to do grouping as it changed in version 3');
}
if (colDefNoType.headerGroupShow !== undefined) {
console.warn('ag-grid: colDef.headerGroupShow is invalid, should be columnGroupShow, please check documentation on how to do grouping as it changed in version 3');
}
}
};
// if object has children, we assume it's a group
BalancedColumnTreeBuilder.prototype.isColumnGroup = function (abstractColDef) {
return abstractColDef.children !== undefined;
};
__decorate([
context_3.Autowired('gridOptionsWrapper'),
__metadata('design:type', gridOptionsWrapper_1.GridOptionsWrapper)
], BalancedColumnTreeBuilder.prototype, "gridOptionsWrapper", void 0);
__decorate([
context_3.Autowired('columnUtils'),
__metadata('design:type', columnUtils_1.ColumnUtils)
], BalancedColumnTreeBuilder.prototype, "columnUtils", void 0);
__decorate([
context_3.Autowired('context'),
__metadata('design:type', context_4.Context)
], BalancedColumnTreeBuilder.prototype, "context", void 0);
__decorate([
__param(0, context_2.Qualifier('loggerFactory')),
__metadata('design:type', Function),
__metadata('design:paramtypes', [logger_1.LoggerFactory]),
__metadata('design:returntype', void 0)
], BalancedColumnTreeBuilder.prototype, "agWire", null);
BalancedColumnTreeBuilder = __decorate([
context_1.Bean('balancedColumnTreeBuilder'),
__metadata('design:paramtypes', [])
], BalancedColumnTreeBuilder);
return BalancedColumnTreeBuilder;
})();
exports.BalancedColumnTreeBuilder = BalancedColumnTreeBuilder;
| BalancedColumnTreeBuilder | identifier_name |
balancedColumnTreeBuilder.js | /**
* ag-grid - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components
* @version v4.0.2
* @link http://www.ag-grid.com/
* @license MIT
*/
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __param = (this && this.__param) || function (paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
};
var gridOptionsWrapper_1 = require('../gridOptionsWrapper');
var logger_1 = require('../logger');
var columnUtils_1 = require('../columnController/columnUtils');
var columnKeyCreator_1 = require("./columnKeyCreator");
var originalColumnGroup_1 = require("../entities/originalColumnGroup");
var column_1 = require("../entities/column");
var context_1 = require("../context/context");
var context_2 = require("../context/context");
var context_3 = require("../context/context");
var context_4 = require("../context/context");
// takes in a list of columns, as specified by the column definitions, and returns column groups
var BalancedColumnTreeBuilder = (function () {
function BalancedColumnTreeBuilder() |
BalancedColumnTreeBuilder.prototype.agWire = function (loggerFactory) {
this.logger = loggerFactory.create('BalancedColumnTreeBuilder');
};
BalancedColumnTreeBuilder.prototype.createBalancedColumnGroups = function (abstractColDefs) {
// column key creator dishes out unique column id's in a deterministic way,
// so if we have two grids (that cold be master/slave) with same column definitions,
// then this ensures the two grids use identical id's.
var columnKeyCreator = new columnKeyCreator_1.ColumnKeyCreator();
// create am unbalanced tree that maps the provided definitions
var unbalancedTree = this.recursivelyCreateColumns(abstractColDefs, 0, columnKeyCreator);
var treeDept = this.findMaxDept(unbalancedTree, 0);
this.logger.log('Number of levels for grouped columns is ' + treeDept);
var balancedTree = this.balanceColumnTree(unbalancedTree, 0, treeDept, columnKeyCreator);
this.columnUtils.deptFirstOriginalTreeSearch(balancedTree, function (child) {
if (child instanceof originalColumnGroup_1.OriginalColumnGroup) {
child.calculateExpandable();
}
});
return {
balancedTree: balancedTree,
treeDept: treeDept
};
};
BalancedColumnTreeBuilder.prototype.balanceColumnTree = function (unbalancedTree, currentDept, columnDept, columnKeyCreator) {
var _this = this;
var result = [];
// go through each child, for groups, recurse a level deeper,
// for columns we need to pad
unbalancedTree.forEach(function (child) {
if (child instanceof originalColumnGroup_1.OriginalColumnGroup) {
var originalGroup = child;
var newChildren = _this.balanceColumnTree(originalGroup.getChildren(), currentDept + 1, columnDept, columnKeyCreator);
originalGroup.setChildren(newChildren);
result.push(originalGroup);
}
else {
var newChild = child;
for (var i = columnDept - 1; i >= currentDept; i--) {
var newColId = columnKeyCreator.getUniqueKey(null, null);
var paddedGroup = new originalColumnGroup_1.OriginalColumnGroup(null, newColId);
paddedGroup.setChildren([newChild]);
newChild = paddedGroup;
}
result.push(newChild);
}
});
return result;
};
BalancedColumnTreeBuilder.prototype.findMaxDept = function (treeChildren, dept) {
var maxDeptThisLevel = dept;
for (var i = 0; i < treeChildren.length; i++) {
var abstractColumn = treeChildren[i];
if (abstractColumn instanceof originalColumnGroup_1.OriginalColumnGroup) {
var originalGroup = abstractColumn;
var newDept = this.findMaxDept(originalGroup.getChildren(), dept + 1);
if (maxDeptThisLevel < newDept) {
maxDeptThisLevel = newDept;
}
}
}
return maxDeptThisLevel;
};
BalancedColumnTreeBuilder.prototype.recursivelyCreateColumns = function (abstractColDefs, level, columnKeyCreator) {
var _this = this;
var result = [];
if (!abstractColDefs) {
return result;
}
abstractColDefs.forEach(function (abstractColDef) {
_this.checkForDeprecatedItems(abstractColDef);
if (_this.isColumnGroup(abstractColDef)) {
var groupColDef = abstractColDef;
var groupId = columnKeyCreator.getUniqueKey(groupColDef.groupId, null);
var originalGroup = new originalColumnGroup_1.OriginalColumnGroup(groupColDef, groupId);
var children = _this.recursivelyCreateColumns(groupColDef.children, level + 1, columnKeyCreator);
originalGroup.setChildren(children);
result.push(originalGroup);
}
else {
var colDef = abstractColDef;
var colId = columnKeyCreator.getUniqueKey(colDef.colId, colDef.field);
var column = new column_1.Column(colDef, colId);
_this.context.wireBean(column);
result.push(column);
}
});
return result;
};
BalancedColumnTreeBuilder.prototype.checkForDeprecatedItems = function (colDef) {
if (colDef) {
var colDefNoType = colDef; // take out the type, so we can access attributes not defined in the type
if (colDefNoType.group !== undefined) {
console.warn('ag-grid: colDef.group is invalid, please check documentation on how to do grouping as it changed in version 3');
}
if (colDefNoType.headerGroup !== undefined) {
console.warn('ag-grid: colDef.headerGroup is invalid, please check documentation on how to do grouping as it changed in version 3');
}
if (colDefNoType.headerGroupShow !== undefined) {
console.warn('ag-grid: colDef.headerGroupShow is invalid, should be columnGroupShow, please check documentation on how to do grouping as it changed in version 3');
}
}
};
// if object has children, we assume it's a group
BalancedColumnTreeBuilder.prototype.isColumnGroup = function (abstractColDef) {
return abstractColDef.children !== undefined;
};
__decorate([
context_3.Autowired('gridOptionsWrapper'),
__metadata('design:type', gridOptionsWrapper_1.GridOptionsWrapper)
], BalancedColumnTreeBuilder.prototype, "gridOptionsWrapper", void 0);
__decorate([
context_3.Autowired('columnUtils'),
__metadata('design:type', columnUtils_1.ColumnUtils)
], BalancedColumnTreeBuilder.prototype, "columnUtils", void 0);
__decorate([
context_3.Autowired('context'),
__metadata('design:type', context_4.Context)
], BalancedColumnTreeBuilder.prototype, "context", void 0);
__decorate([
__param(0, context_2.Qualifier('loggerFactory')),
__metadata('design:type', Function),
__metadata('design:paramtypes', [logger_1.LoggerFactory]),
__metadata('design:returntype', void 0)
], BalancedColumnTreeBuilder.prototype, "agWire", null);
BalancedColumnTreeBuilder = __decorate([
context_1.Bean('balancedColumnTreeBuilder'),
__metadata('design:paramtypes', [])
], BalancedColumnTreeBuilder);
return BalancedColumnTreeBuilder;
})();
exports.BalancedColumnTreeBuilder = BalancedColumnTreeBuilder;
| {
} | identifier_body |
balancedColumnTreeBuilder.js | /**
* ag-grid - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components
* @version v4.0.2
* @link http://www.ag-grid.com/
* @license MIT
*/
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var __metadata = (this && this.__metadata) || function (k, v) {
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
};
var __param = (this && this.__param) || function (paramIndex, decorator) {
return function (target, key) { decorator(target, key, paramIndex); }
};
var gridOptionsWrapper_1 = require('../gridOptionsWrapper');
var logger_1 = require('../logger');
var columnUtils_1 = require('../columnController/columnUtils');
var columnKeyCreator_1 = require("./columnKeyCreator");
var originalColumnGroup_1 = require("../entities/originalColumnGroup");
var column_1 = require("../entities/column");
var context_1 = require("../context/context");
var context_2 = require("../context/context");
var context_3 = require("../context/context");
var context_4 = require("../context/context");
// takes in a list of columns, as specified by the column definitions, and returns column groups
var BalancedColumnTreeBuilder = (function () {
function BalancedColumnTreeBuilder() {
}
BalancedColumnTreeBuilder.prototype.agWire = function (loggerFactory) {
this.logger = loggerFactory.create('BalancedColumnTreeBuilder');
};
BalancedColumnTreeBuilder.prototype.createBalancedColumnGroups = function (abstractColDefs) {
// column key creator dishes out unique column id's in a deterministic way,
// so if we have two grids (that cold be master/slave) with same column definitions,
// then this ensures the two grids use identical id's.
var columnKeyCreator = new columnKeyCreator_1.ColumnKeyCreator();
// create am unbalanced tree that maps the provided definitions
var unbalancedTree = this.recursivelyCreateColumns(abstractColDefs, 0, columnKeyCreator);
var treeDept = this.findMaxDept(unbalancedTree, 0);
this.logger.log('Number of levels for grouped columns is ' + treeDept);
var balancedTree = this.balanceColumnTree(unbalancedTree, 0, treeDept, columnKeyCreator);
this.columnUtils.deptFirstOriginalTreeSearch(balancedTree, function (child) {
if (child instanceof originalColumnGroup_1.OriginalColumnGroup) {
child.calculateExpandable();
}
});
return {
balancedTree: balancedTree,
treeDept: treeDept
};
};
BalancedColumnTreeBuilder.prototype.balanceColumnTree = function (unbalancedTree, currentDept, columnDept, columnKeyCreator) {
var _this = this;
var result = [];
// go through each child, for groups, recurse a level deeper,
// for columns we need to pad
unbalancedTree.forEach(function (child) {
if (child instanceof originalColumnGroup_1.OriginalColumnGroup) {
var originalGroup = child;
var newChildren = _this.balanceColumnTree(originalGroup.getChildren(), currentDept + 1, columnDept, columnKeyCreator);
originalGroup.setChildren(newChildren);
result.push(originalGroup);
}
else {
var newChild = child;
for (var i = columnDept - 1; i >= currentDept; i--) {
var newColId = columnKeyCreator.getUniqueKey(null, null);
var paddedGroup = new originalColumnGroup_1.OriginalColumnGroup(null, newColId);
paddedGroup.setChildren([newChild]);
newChild = paddedGroup;
}
result.push(newChild);
}
});
return result;
};
BalancedColumnTreeBuilder.prototype.findMaxDept = function (treeChildren, dept) {
var maxDeptThisLevel = dept;
for (var i = 0; i < treeChildren.length; i++) {
var abstractColumn = treeChildren[i];
if (abstractColumn instanceof originalColumnGroup_1.OriginalColumnGroup) {
var originalGroup = abstractColumn;
var newDept = this.findMaxDept(originalGroup.getChildren(), dept + 1);
if (maxDeptThisLevel < newDept) {
maxDeptThisLevel = newDept;
}
}
}
return maxDeptThisLevel;
};
BalancedColumnTreeBuilder.prototype.recursivelyCreateColumns = function (abstractColDefs, level, columnKeyCreator) {
var _this = this;
var result = [];
if (!abstractColDefs) {
return result;
}
abstractColDefs.forEach(function (abstractColDef) {
_this.checkForDeprecatedItems(abstractColDef);
if (_this.isColumnGroup(abstractColDef)) {
var groupColDef = abstractColDef;
var groupId = columnKeyCreator.getUniqueKey(groupColDef.groupId, null);
var originalGroup = new originalColumnGroup_1.OriginalColumnGroup(groupColDef, groupId);
var children = _this.recursivelyCreateColumns(groupColDef.children, level + 1, columnKeyCreator);
originalGroup.setChildren(children);
result.push(originalGroup);
}
else {
var colDef = abstractColDef;
var colId = columnKeyCreator.getUniqueKey(colDef.colId, colDef.field);
var column = new column_1.Column(colDef, colId);
_this.context.wireBean(column);
result.push(column);
}
});
return result;
};
BalancedColumnTreeBuilder.prototype.checkForDeprecatedItems = function (colDef) {
if (colDef) {
var colDefNoType = colDef; // take out the type, so we can access attributes not defined in the type
if (colDefNoType.group !== undefined) {
console.warn('ag-grid: colDef.group is invalid, please check documentation on how to do grouping as it changed in version 3'); | if (colDefNoType.headerGroupShow !== undefined) {
console.warn('ag-grid: colDef.headerGroupShow is invalid, should be columnGroupShow, please check documentation on how to do grouping as it changed in version 3');
}
}
};
// if object has children, we assume it's a group
BalancedColumnTreeBuilder.prototype.isColumnGroup = function (abstractColDef) {
return abstractColDef.children !== undefined;
};
__decorate([
context_3.Autowired('gridOptionsWrapper'),
__metadata('design:type', gridOptionsWrapper_1.GridOptionsWrapper)
], BalancedColumnTreeBuilder.prototype, "gridOptionsWrapper", void 0);
__decorate([
context_3.Autowired('columnUtils'),
__metadata('design:type', columnUtils_1.ColumnUtils)
], BalancedColumnTreeBuilder.prototype, "columnUtils", void 0);
__decorate([
context_3.Autowired('context'),
__metadata('design:type', context_4.Context)
], BalancedColumnTreeBuilder.prototype, "context", void 0);
__decorate([
__param(0, context_2.Qualifier('loggerFactory')),
__metadata('design:type', Function),
__metadata('design:paramtypes', [logger_1.LoggerFactory]),
__metadata('design:returntype', void 0)
], BalancedColumnTreeBuilder.prototype, "agWire", null);
BalancedColumnTreeBuilder = __decorate([
context_1.Bean('balancedColumnTreeBuilder'),
__metadata('design:paramtypes', [])
], BalancedColumnTreeBuilder);
return BalancedColumnTreeBuilder;
})();
exports.BalancedColumnTreeBuilder = BalancedColumnTreeBuilder; | }
if (colDefNoType.headerGroup !== undefined) {
console.warn('ag-grid: colDef.headerGroup is invalid, please check documentation on how to do grouping as it changed in version 3');
} | random_line_split |
professional-email-card.tsx | import { Gridicon } from '@automattic/components';
import { useShoppingCart } from '@automattic/shopping-cart';
import { translate } from 'i18n-calypso';
import { useState } from 'react';
import { useSelector } from 'react-redux';
import poweredByTitanLogo from 'calypso/assets/images/email-providers/titan/powered-by-titan-caps.svg';
import {
titanMailMonthly,
titanMailYearly,
TitanProductProps,
} from 'calypso/lib/cart-values/cart-items';
import {
getSelectedDomain,
canCurrentUserAddEmail,
getCurrentUserCannotAddEmailReason,
} from 'calypso/lib/domains';
import { getTitanProductName } from 'calypso/lib/titan';
import { TITAN_PROVIDER_NAME } from 'calypso/lib/titan/constants';
import {
areAllMailboxesValid,
buildNewTitanMailbox,
transformMailboxForCart,
validateMailboxes as validateTitanMailboxes,
} from 'calypso/lib/titan/new-mailbox';
import useCartKey from 'calypso/my-sites/checkout/use-cart-key';
import { IntervalLength } from 'calypso/my-sites/email/email-providers-comparison/interval-length';
import ProfessionalEmailPrice from 'calypso/my-sites/email/email-providers-comparison/price/professional-email';
import EmailProvidersStackedCard from 'calypso/my-sites/email/email-providers-stacked-comparison/email-provider-stacked-card';
import {
addToCartAndCheckout,
recordTracksEventAddToCartClick,
} from 'calypso/my-sites/email/email-providers-stacked-comparison/provider-cards/utils';
import {
TITAN_PASSWORD_RESET_FIELD,
TITAN_FULL_NAME_FIELD,
} from 'calypso/my-sites/email/titan-new-mailbox';
import TitanNewMailboxList from 'calypso/my-sites/email/titan-new-mailbox-list';
import { FullWidthButton } from 'calypso/my-sites/marketplace/components';
import { getDomainsBySiteId } from 'calypso/state/sites/domains/selectors';
import { getSelectedSite } from 'calypso/state/ui/selectors';
import type { EmailProvidersStackedCardProps, ProviderCardProps } from './provider-card-props';
import type { ReactElement } from 'react';
import './professional-email-card.scss';
// eslint-disable-next-line @typescript-eslint/no-empty-function
const noop = () => {};
const logo = <Gridicon className="professional-email-card__logo" icon="my-sites" />;
const badge = (
<img src={ poweredByTitanLogo } alt={ translate( 'Powered by Titan', { textOnly: true } ) } />
);
const getTitanFeatures = () => {
return [
translate( 'Send and receive from your custom domain' ),
translate( '30GB storage' ),
translate( 'Email, calendars, and contacts' ),
translate( '24/7 support via email' ),
];
};
const professionalEmailCardInformation: ProviderCardProps = {
className: 'professional-email-card',
expandButtonLabel: translate( 'Select' ),
providerKey: 'titan',
showExpandButton: true,
description: translate(
'Integrated email solution with powerful features. Manage your email and more on any device.'
),
logo,
productName: getTitanProductName(),
footerBadge: badge,
features: getTitanFeatures(),
};
const ProfessionalEmailCard = ( {
cartDomainName,
comparisonContext,
detailsExpanded,
intervalLength,
onExpandedChange,
selectedDomainName,
source,
}: EmailProvidersStackedCardProps ): ReactElement => {
const selectedSite = useSelector( getSelectedSite );
const domains = useSelector( ( state ) => getDomainsBySiteId( state, selectedSite?.ID ) );
const domain = getSelectedDomain( {
domains,
selectedDomainName: selectedDomainName,
} );
const cartKey = useCartKey();
const shoppingCartManager = useShoppingCart( cartKey );
const [ titanMailbox, setTitanMailbox ] = useState( [
buildNewTitanMailbox( selectedDomainName, false ),
] );
const [ addingToCart, setAddingToCart ] = useState( false );
const [ validatedTitanMailboxUuids, setValidatedTitanMailboxUuids ] = useState( [ '' ] );
const optionalFields = [ TITAN_PASSWORD_RESET_FIELD, TITAN_FULL_NAME_FIELD ];
const professionalEmail: ProviderCardProps = { ...professionalEmailCardInformation };
professionalEmail.detailsExpanded = detailsExpanded;
const hasCartDomain = Boolean( cartDomainName );
const onTitanConfirmNewMailboxes = () => {
const validatedTitanMailboxes = validateTitanMailboxes( titanMailbox, optionalFields );
const mailboxesAreValid = areAllMailboxesValid( validatedTitanMailboxes, optionalFields );
const userCanAddEmail = hasCartDomain || canCurrentUserAddEmail( domain );
const userCannotAddEmailReason = userCanAddEmail
? null
: getCurrentUserCannotAddEmailReason( domain );
const validatedMailboxUuids = validatedTitanMailboxes.map( ( mailbox ) => mailbox.uuid );
recordTracksEventAddToCartClick(
comparisonContext,
validatedMailboxUuids,
mailboxesAreValid,
TITAN_PROVIDER_NAME,
source ?? '',
userCanAddEmail,
userCannotAddEmailReason
);
setTitanMailbox( titanMailbox );
setValidatedTitanMailboxUuids( validatedMailboxUuids );
if ( ! mailboxesAreValid || ! userCanAddEmail ) |
const props: TitanProductProps = {
domain: selectedDomainName,
quantity: validatedTitanMailboxes.length,
extra: {
email_users: validatedTitanMailboxes.map( transformMailboxForCart ),
new_quantity: validatedTitanMailboxes.length,
},
};
const cartItem =
intervalLength === IntervalLength.MONTHLY
? titanMailMonthly( props )
: titanMailYearly( props );
addToCartAndCheckout(
shoppingCartManager,
cartItem,
setAddingToCart,
selectedSite?.slug ?? ''
);
};
const onTitanFormReturnKeyPress = noop;
professionalEmail.onExpandedChange = onExpandedChange;
professionalEmail.priceBadge = (
<ProfessionalEmailPrice domain={ domain } intervalLength={ intervalLength } />
);
professionalEmail.formFields = (
<TitanNewMailboxList
onMailboxesChange={ setTitanMailbox }
mailboxes={ titanMailbox }
selectedDomainName={ selectedDomainName }
onReturnKeyPress={ onTitanFormReturnKeyPress }
validatedMailboxUuids={ validatedTitanMailboxUuids }
showAddAnotherMailboxButton={ false }
hiddenFieldNames={ [ TITAN_FULL_NAME_FIELD, TITAN_PASSWORD_RESET_FIELD ] }
>
<FullWidthButton
className="professional-email-card__continue"
primary
busy={ addingToCart }
onClick={ onTitanConfirmNewMailboxes }
>
{ translate( 'Create your mailbox' ) }
</FullWidthButton>
</TitanNewMailboxList>
);
return <EmailProvidersStackedCard { ...professionalEmail } />;
};
export default ProfessionalEmailCard;
| {
return;
} | conditional_block |
professional-email-card.tsx | import { Gridicon } from '@automattic/components';
import { useShoppingCart } from '@automattic/shopping-cart';
import { translate } from 'i18n-calypso';
import { useState } from 'react';
import { useSelector } from 'react-redux';
import poweredByTitanLogo from 'calypso/assets/images/email-providers/titan/powered-by-titan-caps.svg';
import {
titanMailMonthly,
titanMailYearly,
TitanProductProps,
} from 'calypso/lib/cart-values/cart-items';
import {
getSelectedDomain,
canCurrentUserAddEmail,
getCurrentUserCannotAddEmailReason,
} from 'calypso/lib/domains';
import { getTitanProductName } from 'calypso/lib/titan';
import { TITAN_PROVIDER_NAME } from 'calypso/lib/titan/constants';
import {
areAllMailboxesValid,
buildNewTitanMailbox,
transformMailboxForCart,
validateMailboxes as validateTitanMailboxes,
} from 'calypso/lib/titan/new-mailbox';
import useCartKey from 'calypso/my-sites/checkout/use-cart-key';
import { IntervalLength } from 'calypso/my-sites/email/email-providers-comparison/interval-length';
import ProfessionalEmailPrice from 'calypso/my-sites/email/email-providers-comparison/price/professional-email';
import EmailProvidersStackedCard from 'calypso/my-sites/email/email-providers-stacked-comparison/email-provider-stacked-card';
import {
addToCartAndCheckout,
recordTracksEventAddToCartClick,
} from 'calypso/my-sites/email/email-providers-stacked-comparison/provider-cards/utils';
import {
TITAN_PASSWORD_RESET_FIELD,
TITAN_FULL_NAME_FIELD,
} from 'calypso/my-sites/email/titan-new-mailbox';
import TitanNewMailboxList from 'calypso/my-sites/email/titan-new-mailbox-list';
import { FullWidthButton } from 'calypso/my-sites/marketplace/components';
import { getDomainsBySiteId } from 'calypso/state/sites/domains/selectors';
import { getSelectedSite } from 'calypso/state/ui/selectors';
import type { EmailProvidersStackedCardProps, ProviderCardProps } from './provider-card-props';
import type { ReactElement } from 'react';
import './professional-email-card.scss';
// eslint-disable-next-line @typescript-eslint/no-empty-function
const noop = () => {};
const logo = <Gridicon className="professional-email-card__logo" icon="my-sites" />;
const badge = (
<img src={ poweredByTitanLogo } alt={ translate( 'Powered by Titan', { textOnly: true } ) } />
);
const getTitanFeatures = () => {
return [
translate( 'Send and receive from your custom domain' ),
translate( '30GB storage' ),
translate( 'Email, calendars, and contacts' ),
translate( '24/7 support via email' ),
];
};
const professionalEmailCardInformation: ProviderCardProps = {
className: 'professional-email-card',
expandButtonLabel: translate( 'Select' ),
providerKey: 'titan',
showExpandButton: true,
description: translate(
'Integrated email solution with powerful features. Manage your email and more on any device.'
),
logo,
productName: getTitanProductName(),
footerBadge: badge,
features: getTitanFeatures(),
};
const ProfessionalEmailCard = ( {
cartDomainName,
comparisonContext,
detailsExpanded,
intervalLength,
onExpandedChange,
selectedDomainName,
source,
}: EmailProvidersStackedCardProps ): ReactElement => {
const selectedSite = useSelector( getSelectedSite );
const domains = useSelector( ( state ) => getDomainsBySiteId( state, selectedSite?.ID ) );
const domain = getSelectedDomain( {
domains,
selectedDomainName: selectedDomainName,
} );
const cartKey = useCartKey();
const shoppingCartManager = useShoppingCart( cartKey );
const [ titanMailbox, setTitanMailbox ] = useState( [
buildNewTitanMailbox( selectedDomainName, false ),
] );
const [ addingToCart, setAddingToCart ] = useState( false );
const [ validatedTitanMailboxUuids, setValidatedTitanMailboxUuids ] = useState( [ '' ] );
const optionalFields = [ TITAN_PASSWORD_RESET_FIELD, TITAN_FULL_NAME_FIELD ];
const professionalEmail: ProviderCardProps = { ...professionalEmailCardInformation };
professionalEmail.detailsExpanded = detailsExpanded;
const hasCartDomain = Boolean( cartDomainName );
const onTitanConfirmNewMailboxes = () => {
const validatedTitanMailboxes = validateTitanMailboxes( titanMailbox, optionalFields );
const mailboxesAreValid = areAllMailboxesValid( validatedTitanMailboxes, optionalFields );
const userCanAddEmail = hasCartDomain || canCurrentUserAddEmail( domain );
const userCannotAddEmailReason = userCanAddEmail
? null
: getCurrentUserCannotAddEmailReason( domain );
const validatedMailboxUuids = validatedTitanMailboxes.map( ( mailbox ) => mailbox.uuid );
recordTracksEventAddToCartClick(
comparisonContext,
validatedMailboxUuids,
mailboxesAreValid,
TITAN_PROVIDER_NAME,
source ?? '',
userCanAddEmail,
userCannotAddEmailReason
);
setTitanMailbox( titanMailbox );
setValidatedTitanMailboxUuids( validatedMailboxUuids );
if ( ! mailboxesAreValid || ! userCanAddEmail ) {
return;
}
const props: TitanProductProps = {
domain: selectedDomainName, | };
const cartItem =
intervalLength === IntervalLength.MONTHLY
? titanMailMonthly( props )
: titanMailYearly( props );
addToCartAndCheckout(
shoppingCartManager,
cartItem,
setAddingToCart,
selectedSite?.slug ?? ''
);
};
const onTitanFormReturnKeyPress = noop;
professionalEmail.onExpandedChange = onExpandedChange;
professionalEmail.priceBadge = (
<ProfessionalEmailPrice domain={ domain } intervalLength={ intervalLength } />
);
professionalEmail.formFields = (
<TitanNewMailboxList
onMailboxesChange={ setTitanMailbox }
mailboxes={ titanMailbox }
selectedDomainName={ selectedDomainName }
onReturnKeyPress={ onTitanFormReturnKeyPress }
validatedMailboxUuids={ validatedTitanMailboxUuids }
showAddAnotherMailboxButton={ false }
hiddenFieldNames={ [ TITAN_FULL_NAME_FIELD, TITAN_PASSWORD_RESET_FIELD ] }
>
<FullWidthButton
className="professional-email-card__continue"
primary
busy={ addingToCart }
onClick={ onTitanConfirmNewMailboxes }
>
{ translate( 'Create your mailbox' ) }
</FullWidthButton>
</TitanNewMailboxList>
);
return <EmailProvidersStackedCard { ...professionalEmail } />;
};
export default ProfessionalEmailCard; | quantity: validatedTitanMailboxes.length,
extra: {
email_users: validatedTitanMailboxes.map( transformMailboxForCart ),
new_quantity: validatedTitanMailboxes.length,
}, | random_line_split |
save.py | """
Simple utils to save and load from disk.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
# TODO(rbharath): Use standard joblib once old-data has been regenerated.
import joblib
from sklearn.externals import joblib as old_joblib
import gzip
import pickle
import pandas as pd
import numpy as np
import os
from rdkit import Chem
| if verbose:
print(string)
def save_to_disk(dataset, filename, compress=3):
"""Save a dataset to file."""
joblib.dump(dataset, filename, compress=compress)
def get_input_type(input_file):
"""Get type of input file. Must be csv/pkl.gz/sdf file."""
filename, file_extension = os.path.splitext(input_file)
# If gzipped, need to compute extension again
if file_extension == ".gz":
filename, file_extension = os.path.splitext(filename)
if file_extension == ".csv":
return "csv"
elif file_extension == ".pkl":
return "pandas-pickle"
elif file_extension == ".joblib":
return "pandas-joblib"
elif file_extension == ".sdf":
return "sdf"
else:
raise ValueError("Unrecognized extension %s" % file_extension)
def load_data(input_files, shard_size=None, verbose=True):
"""Loads data from disk.
For CSV files, supports sharded loading for large files.
"""
if not len(input_files):
return
input_type = get_input_type(input_files[0])
if input_type == "sdf":
if shard_size is not None:
log("Ignoring shard_size for sdf input.", verbose)
for value in load_sdf_files(input_files):
yield value
elif input_type == "csv":
for value in load_csv_files(input_files, shard_size, verbose=verbose):
yield value
elif input_type == "pandas-pickle":
for input_file in input_files:
yield load_pickle_from_disk(input_file)
def load_sdf_files(input_files):
"""Load SDF file into dataframe."""
dataframes = []
for input_file in input_files:
# Tasks are stored in .sdf.csv file
raw_df = next(load_csv_files([input_file+".csv"], shard_size=None))
# Structures are stored in .sdf file
print("Reading structures from %s." % input_file)
suppl = Chem.SDMolSupplier(str(input_file), False, False, False)
df_rows = []
for ind, mol in enumerate(suppl):
if mol is not None:
smiles = Chem.MolToSmiles(mol)
df_rows.append([ind,smiles,mol])
mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol'))
dataframes.append(pd.concat([mol_df, raw_df], axis=1, join='inner'))
return dataframes
def load_csv_files(filenames, shard_size=None, verbose=True):
"""Load data as pandas dataframe."""
# First line of user-specified CSV *must* be header.
shard_num = 1
for filename in filenames:
if shard_size is None:
yield pd.read_csv(filename)
else:
log("About to start loading CSV from %s" % filename, verbose)
for df in pd.read_csv(filename, chunksize=shard_size):
log("Loading shard %d of size %s." % (shard_num, str(shard_size)),
verbose)
df = df.replace(np.nan, str(""), regex=True)
shard_num += 1
yield df
def load_from_disk(filename):
"""Load a dataset from file."""
name = filename
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".pkl":
return load_pickle_from_disk(filename)
elif os.path.splitext(name)[1] == ".joblib":
try:
return joblib.load(filename)
except KeyError:
# Try older joblib version for legacy files.
return old_joblib.load(filename)
except ValueError:
return old_joblib.load(filename)
elif os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(filename, header=0)
df = df.replace(np.nan, str(""), regex=True)
return df
else:
raise ValueError("Unrecognized filetype for %s" % filename)
def load_sharded_csv(filenames):
"""Load a dataset from multiple files. Each file MUST have same column headers"""
dataframes = []
for name in filenames:
placeholder_name = name
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(placeholder_name, header=0)
df = df.replace(np.nan, str(""), regex=True)
dataframes.append(df)
else:
raise ValueError("Unrecognized filetype for %s" % filename)
#combine dataframes
combined_df = dataframes[0]
for i in range(0, len(dataframes) - 1):
combined_df = combined_df.append(dataframes[i+1])
combined_df = combined_df.reset_index(drop=True)
return combined_df
def load_pickle_from_disk(filename):
"""Load dataset from pickle file."""
if ".gz" in filename:
with gzip.open(filename, "rb") as f:
df = pickle.load(f)
else:
with open(filename, "rb") as f:
df = pickle.load(f)
return df | def log(string, verbose=True):
"""Print string if verbose.""" | random_line_split |
save.py | """
Simple utils to save and load from disk.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
# TODO(rbharath): Use standard joblib once old-data has been regenerated.
import joblib
from sklearn.externals import joblib as old_joblib
import gzip
import pickle
import pandas as pd
import numpy as np
import os
from rdkit import Chem
def log(string, verbose=True):
"""Print string if verbose."""
if verbose:
print(string)
def save_to_disk(dataset, filename, compress=3):
"""Save a dataset to file."""
joblib.dump(dataset, filename, compress=compress)
def get_input_type(input_file):
"""Get type of input file. Must be csv/pkl.gz/sdf file."""
filename, file_extension = os.path.splitext(input_file)
# If gzipped, need to compute extension again
if file_extension == ".gz":
filename, file_extension = os.path.splitext(filename)
if file_extension == ".csv":
return "csv"
elif file_extension == ".pkl":
return "pandas-pickle"
elif file_extension == ".joblib":
return "pandas-joblib"
elif file_extension == ".sdf":
return "sdf"
else:
raise ValueError("Unrecognized extension %s" % file_extension)
def load_data(input_files, shard_size=None, verbose=True):
"""Loads data from disk.
For CSV files, supports sharded loading for large files.
"""
if not len(input_files):
return
input_type = get_input_type(input_files[0])
if input_type == "sdf":
if shard_size is not None:
log("Ignoring shard_size for sdf input.", verbose)
for value in load_sdf_files(input_files):
yield value
elif input_type == "csv":
for value in load_csv_files(input_files, shard_size, verbose=verbose):
yield value
elif input_type == "pandas-pickle":
for input_file in input_files:
yield load_pickle_from_disk(input_file)
def load_sdf_files(input_files):
"""Load SDF file into dataframe."""
dataframes = []
for input_file in input_files:
# Tasks are stored in .sdf.csv file
raw_df = next(load_csv_files([input_file+".csv"], shard_size=None))
# Structures are stored in .sdf file
print("Reading structures from %s." % input_file)
suppl = Chem.SDMolSupplier(str(input_file), False, False, False)
df_rows = []
for ind, mol in enumerate(suppl):
if mol is not None:
smiles = Chem.MolToSmiles(mol)
df_rows.append([ind,smiles,mol])
mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol'))
dataframes.append(pd.concat([mol_df, raw_df], axis=1, join='inner'))
return dataframes
def load_csv_files(filenames, shard_size=None, verbose=True):
"""Load data as pandas dataframe."""
# First line of user-specified CSV *must* be header.
shard_num = 1
for filename in filenames:
if shard_size is None:
yield pd.read_csv(filename)
else:
|
def load_from_disk(filename):
"""Load a dataset from file."""
name = filename
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".pkl":
return load_pickle_from_disk(filename)
elif os.path.splitext(name)[1] == ".joblib":
try:
return joblib.load(filename)
except KeyError:
# Try older joblib version for legacy files.
return old_joblib.load(filename)
except ValueError:
return old_joblib.load(filename)
elif os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(filename, header=0)
df = df.replace(np.nan, str(""), regex=True)
return df
else:
raise ValueError("Unrecognized filetype for %s" % filename)
def load_sharded_csv(filenames):
"""Load a dataset from multiple files. Each file MUST have same column headers"""
dataframes = []
for name in filenames:
placeholder_name = name
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(placeholder_name, header=0)
df = df.replace(np.nan, str(""), regex=True)
dataframes.append(df)
else:
raise ValueError("Unrecognized filetype for %s" % filename)
#combine dataframes
combined_df = dataframes[0]
for i in range(0, len(dataframes) - 1):
combined_df = combined_df.append(dataframes[i+1])
combined_df = combined_df.reset_index(drop=True)
return combined_df
def load_pickle_from_disk(filename):
"""Load dataset from pickle file."""
if ".gz" in filename:
with gzip.open(filename, "rb") as f:
df = pickle.load(f)
else:
with open(filename, "rb") as f:
df = pickle.load(f)
return df
| log("About to start loading CSV from %s" % filename, verbose)
for df in pd.read_csv(filename, chunksize=shard_size):
log("Loading shard %d of size %s." % (shard_num, str(shard_size)),
verbose)
df = df.replace(np.nan, str(""), regex=True)
shard_num += 1
yield df | conditional_block |
save.py | """
Simple utils to save and load from disk.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
# TODO(rbharath): Use standard joblib once old-data has been regenerated.
import joblib
from sklearn.externals import joblib as old_joblib
import gzip
import pickle
import pandas as pd
import numpy as np
import os
from rdkit import Chem
def log(string, verbose=True):
"""Print string if verbose."""
if verbose:
print(string)
def save_to_disk(dataset, filename, compress=3):
"""Save a dataset to file."""
joblib.dump(dataset, filename, compress=compress)
def get_input_type(input_file):
|
def load_data(input_files, shard_size=None, verbose=True):
"""Loads data from disk.
For CSV files, supports sharded loading for large files.
"""
if not len(input_files):
return
input_type = get_input_type(input_files[0])
if input_type == "sdf":
if shard_size is not None:
log("Ignoring shard_size for sdf input.", verbose)
for value in load_sdf_files(input_files):
yield value
elif input_type == "csv":
for value in load_csv_files(input_files, shard_size, verbose=verbose):
yield value
elif input_type == "pandas-pickle":
for input_file in input_files:
yield load_pickle_from_disk(input_file)
def load_sdf_files(input_files):
"""Load SDF file into dataframe."""
dataframes = []
for input_file in input_files:
# Tasks are stored in .sdf.csv file
raw_df = next(load_csv_files([input_file+".csv"], shard_size=None))
# Structures are stored in .sdf file
print("Reading structures from %s." % input_file)
suppl = Chem.SDMolSupplier(str(input_file), False, False, False)
df_rows = []
for ind, mol in enumerate(suppl):
if mol is not None:
smiles = Chem.MolToSmiles(mol)
df_rows.append([ind,smiles,mol])
mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol'))
dataframes.append(pd.concat([mol_df, raw_df], axis=1, join='inner'))
return dataframes
def load_csv_files(filenames, shard_size=None, verbose=True):
"""Load data as pandas dataframe."""
# First line of user-specified CSV *must* be header.
shard_num = 1
for filename in filenames:
if shard_size is None:
yield pd.read_csv(filename)
else:
log("About to start loading CSV from %s" % filename, verbose)
for df in pd.read_csv(filename, chunksize=shard_size):
log("Loading shard %d of size %s." % (shard_num, str(shard_size)),
verbose)
df = df.replace(np.nan, str(""), regex=True)
shard_num += 1
yield df
def load_from_disk(filename):
"""Load a dataset from file."""
name = filename
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".pkl":
return load_pickle_from_disk(filename)
elif os.path.splitext(name)[1] == ".joblib":
try:
return joblib.load(filename)
except KeyError:
# Try older joblib version for legacy files.
return old_joblib.load(filename)
except ValueError:
return old_joblib.load(filename)
elif os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(filename, header=0)
df = df.replace(np.nan, str(""), regex=True)
return df
else:
raise ValueError("Unrecognized filetype for %s" % filename)
def load_sharded_csv(filenames):
"""Load a dataset from multiple files. Each file MUST have same column headers"""
dataframes = []
for name in filenames:
placeholder_name = name
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(placeholder_name, header=0)
df = df.replace(np.nan, str(""), regex=True)
dataframes.append(df)
else:
raise ValueError("Unrecognized filetype for %s" % filename)
#combine dataframes
combined_df = dataframes[0]
for i in range(0, len(dataframes) - 1):
combined_df = combined_df.append(dataframes[i+1])
combined_df = combined_df.reset_index(drop=True)
return combined_df
def load_pickle_from_disk(filename):
"""Load dataset from pickle file."""
if ".gz" in filename:
with gzip.open(filename, "rb") as f:
df = pickle.load(f)
else:
with open(filename, "rb") as f:
df = pickle.load(f)
return df
| """Get type of input file. Must be csv/pkl.gz/sdf file."""
filename, file_extension = os.path.splitext(input_file)
# If gzipped, need to compute extension again
if file_extension == ".gz":
filename, file_extension = os.path.splitext(filename)
if file_extension == ".csv":
return "csv"
elif file_extension == ".pkl":
return "pandas-pickle"
elif file_extension == ".joblib":
return "pandas-joblib"
elif file_extension == ".sdf":
return "sdf"
else:
raise ValueError("Unrecognized extension %s" % file_extension) | identifier_body |
save.py | """
Simple utils to save and load from disk.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
# TODO(rbharath): Use standard joblib once old-data has been regenerated.
import joblib
from sklearn.externals import joblib as old_joblib
import gzip
import pickle
import pandas as pd
import numpy as np
import os
from rdkit import Chem
def log(string, verbose=True):
"""Print string if verbose."""
if verbose:
print(string)
def | (dataset, filename, compress=3):
"""Save a dataset to file."""
joblib.dump(dataset, filename, compress=compress)
def get_input_type(input_file):
"""Get type of input file. Must be csv/pkl.gz/sdf file."""
filename, file_extension = os.path.splitext(input_file)
# If gzipped, need to compute extension again
if file_extension == ".gz":
filename, file_extension = os.path.splitext(filename)
if file_extension == ".csv":
return "csv"
elif file_extension == ".pkl":
return "pandas-pickle"
elif file_extension == ".joblib":
return "pandas-joblib"
elif file_extension == ".sdf":
return "sdf"
else:
raise ValueError("Unrecognized extension %s" % file_extension)
def load_data(input_files, shard_size=None, verbose=True):
"""Loads data from disk.
For CSV files, supports sharded loading for large files.
"""
if not len(input_files):
return
input_type = get_input_type(input_files[0])
if input_type == "sdf":
if shard_size is not None:
log("Ignoring shard_size for sdf input.", verbose)
for value in load_sdf_files(input_files):
yield value
elif input_type == "csv":
for value in load_csv_files(input_files, shard_size, verbose=verbose):
yield value
elif input_type == "pandas-pickle":
for input_file in input_files:
yield load_pickle_from_disk(input_file)
def load_sdf_files(input_files):
"""Load SDF file into dataframe."""
dataframes = []
for input_file in input_files:
# Tasks are stored in .sdf.csv file
raw_df = next(load_csv_files([input_file+".csv"], shard_size=None))
# Structures are stored in .sdf file
print("Reading structures from %s." % input_file)
suppl = Chem.SDMolSupplier(str(input_file), False, False, False)
df_rows = []
for ind, mol in enumerate(suppl):
if mol is not None:
smiles = Chem.MolToSmiles(mol)
df_rows.append([ind,smiles,mol])
mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol'))
dataframes.append(pd.concat([mol_df, raw_df], axis=1, join='inner'))
return dataframes
def load_csv_files(filenames, shard_size=None, verbose=True):
"""Load data as pandas dataframe."""
# First line of user-specified CSV *must* be header.
shard_num = 1
for filename in filenames:
if shard_size is None:
yield pd.read_csv(filename)
else:
log("About to start loading CSV from %s" % filename, verbose)
for df in pd.read_csv(filename, chunksize=shard_size):
log("Loading shard %d of size %s." % (shard_num, str(shard_size)),
verbose)
df = df.replace(np.nan, str(""), regex=True)
shard_num += 1
yield df
def load_from_disk(filename):
"""Load a dataset from file."""
name = filename
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".pkl":
return load_pickle_from_disk(filename)
elif os.path.splitext(name)[1] == ".joblib":
try:
return joblib.load(filename)
except KeyError:
# Try older joblib version for legacy files.
return old_joblib.load(filename)
except ValueError:
return old_joblib.load(filename)
elif os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(filename, header=0)
df = df.replace(np.nan, str(""), regex=True)
return df
else:
raise ValueError("Unrecognized filetype for %s" % filename)
def load_sharded_csv(filenames):
"""Load a dataset from multiple files. Each file MUST have same column headers"""
dataframes = []
for name in filenames:
placeholder_name = name
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(placeholder_name, header=0)
df = df.replace(np.nan, str(""), regex=True)
dataframes.append(df)
else:
raise ValueError("Unrecognized filetype for %s" % filename)
#combine dataframes
combined_df = dataframes[0]
for i in range(0, len(dataframes) - 1):
combined_df = combined_df.append(dataframes[i+1])
combined_df = combined_df.reset_index(drop=True)
return combined_df
def load_pickle_from_disk(filename):
"""Load dataset from pickle file."""
if ".gz" in filename:
with gzip.open(filename, "rb") as f:
df = pickle.load(f)
else:
with open(filename, "rb") as f:
df = pickle.load(f)
return df
| save_to_disk | identifier_name |
app.component.ts | import { Component,OnChanges,HostListener } from '@angular/core';
import '../js/test.js';
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@Component({
selector: 'app-root',
templateUrl: './app.component.html'
})
export class AppComponent {
text = '';
rText = '';
key;
oldLength = 10;
length = 10;
constructor(){ | handleKeyboardEvent(event: KeyboardEvent) {
this.key = event.key;
if ((event.keyCode ? event.keyCode : event.which) == 13) { //Enter keycode
this.generate();
}
}
ngDoCheck(){
if(this.oldLength!=this.length){
this.oldLength = this.length;
if(this.length==42){
console.log(':)');
}
}
this.rText = this.text.split("").reverse().join("").substring(0,this.length)+(this.text.length>this.length?"...":"");
}
generate(){
this.text = '';
for( var i=0; i < this.length; i++ )
this.text += possible.charAt(Math.floor(Math.random() * possible.length));
}
} | this.generate();
}
@HostListener('document:keypress', ['$event']) | random_line_split |
app.component.ts | import { Component,OnChanges,HostListener } from '@angular/core';
import '../js/test.js';
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@Component({
selector: 'app-root',
templateUrl: './app.component.html'
})
export class AppComponent {
text = '';
rText = '';
key;
oldLength = 10;
length = 10;
constructor(){
this.generate();
}
@HostListener('document:keypress', ['$event'])
handleKeyboardEvent(event: KeyboardEvent) |
ngDoCheck(){
if(this.oldLength!=this.length){
this.oldLength = this.length;
if(this.length==42){
console.log(':)');
}
}
this.rText = this.text.split("").reverse().join("").substring(0,this.length)+(this.text.length>this.length?"...":"");
}
generate(){
this.text = '';
for( var i=0; i < this.length; i++ )
this.text += possible.charAt(Math.floor(Math.random() * possible.length));
}
} | {
this.key = event.key;
if ((event.keyCode ? event.keyCode : event.which) == 13) { //Enter keycode
this.generate();
}
} | identifier_body |
app.component.ts | import { Component,OnChanges,HostListener } from '@angular/core';
import '../js/test.js';
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@Component({
selector: 'app-root',
templateUrl: './app.component.html'
})
export class AppComponent {
text = '';
rText = '';
key;
oldLength = 10;
length = 10;
constructor(){
this.generate();
}
@HostListener('document:keypress', ['$event'])
handleKeyboardEvent(event: KeyboardEvent) {
this.key = event.key;
if ((event.keyCode ? event.keyCode : event.which) == 13) { //Enter keycode
this.generate();
}
}
ngDoCheck(){
if(this.oldLength!=this.length){
this.oldLength = this.length;
if(this.length==42){
console.log(':)');
}
}
this.rText = this.text.split("").reverse().join("").substring(0,this.length)+(this.text.length>this.length?"...":"");
}
| (){
this.text = '';
for( var i=0; i < this.length; i++ )
this.text += possible.charAt(Math.floor(Math.random() * possible.length));
}
} | generate | identifier_name |
configure.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration interface used by the supervisor to change service configuration.
use exonum::runtime::{CommonError, ExecutionContext, ExecutionError, InstanceId, MethodId};
use exonum_merkledb::BinaryValue;
use exonum_rust_runtime::{GenericCallMut, Interface, MethodDescriptor};
/// Fully qualified name of the [`Configure`] interface.
///
/// [`Configure`]: trait.Configure.html
pub const CONFIGURE_INTERFACE_NAME: &str = "exonum.Configure";
/// Identifier of the [`Configure::verify_config`] method.
///
/// [`Configure::verify_config`]: trait.Configure.html#tymethod.verify_config
const VERIFY_CONFIG_METHOD_ID: MethodId = 0;
/// Identifier of the [`Configure::apply_config`] method.
///
/// [`Configure::apply_config`]: trait.Configure.html#tymethod.apply_config
const APPLY_CONFIG_METHOD_ID: MethodId = 1;
/// Describes a procedure for updating the configuration of a service instance.
pub trait Configure {
/// The specific type of parameters passed during the service instance configuration.
type Params: BinaryValue;
/// Verify a new configuration parameters before their actual application.
///
/// This method is called by the new configuration change proposal. If the proposed
/// parameters do not fit for this service instance, it should return a corresponding
/// error to discard this proposal. Thus only a configuration change proposal in which all
/// changes are correct can be applied later.
///
/// The proposal approval process details, and even the configuration proposal format, depends
/// on the particular runtime implementation.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn verify_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
/// Update service configuration with the given parameters.
///
/// The configuration parameters passed to the method are discarded immediately.
/// So the service instance should save them by itself if it is important for
/// the service business logic.
///
/// This method can be triggered at any time and does not follow the general transaction
/// execution workflow, so the errors returned might be ignored.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn apply_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
}
impl<'a, T: BinaryValue> Interface<'a> for dyn Configure<Params = T> {
const INTERFACE_NAME: &'static str = CONFIGURE_INTERFACE_NAME;
fn dispatch(
&self,
context: ExecutionContext<'a>,
method: MethodId,
payload: &[u8],
) -> Result<(), ExecutionError> {
match method {
VERIFY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.verify_config(context, params)
}
APPLY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.apply_config(context, params)
}
_ => Err(CommonError::NoSuchMethod.into()),
}
}
}
// Makeshift replacement for generic stubbing, which is made difficult by the existence
// of the type param.
pub(crate) trait ConfigureMut<Ctx> {
type Output;
fn verify_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
fn apply_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
}
impl ConfigureMut<InstanceId> for ExecutionContext<'_> {
type Output = Result<(), ExecutionError>;
fn | (&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 0);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
fn apply_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 1);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
}
| verify_config | identifier_name |
configure.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration interface used by the supervisor to change service configuration.
use exonum::runtime::{CommonError, ExecutionContext, ExecutionError, InstanceId, MethodId};
use exonum_merkledb::BinaryValue;
use exonum_rust_runtime::{GenericCallMut, Interface, MethodDescriptor};
/// Fully qualified name of the [`Configure`] interface.
///
/// [`Configure`]: trait.Configure.html
pub const CONFIGURE_INTERFACE_NAME: &str = "exonum.Configure";
/// Identifier of the [`Configure::verify_config`] method.
///
/// [`Configure::verify_config`]: trait.Configure.html#tymethod.verify_config
const VERIFY_CONFIG_METHOD_ID: MethodId = 0;
/// Identifier of the [`Configure::apply_config`] method.
///
/// [`Configure::apply_config`]: trait.Configure.html#tymethod.apply_config
const APPLY_CONFIG_METHOD_ID: MethodId = 1;
/// Describes a procedure for updating the configuration of a service instance.
pub trait Configure {
/// The specific type of parameters passed during the service instance configuration.
type Params: BinaryValue;
/// Verify a new configuration parameters before their actual application.
///
/// This method is called by the new configuration change proposal. If the proposed
/// parameters do not fit for this service instance, it should return a corresponding
/// error to discard this proposal. Thus only a configuration change proposal in which all
/// changes are correct can be applied later.
///
/// The proposal approval process details, and even the configuration proposal format, depends
/// on the particular runtime implementation.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn verify_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
/// Update service configuration with the given parameters.
///
/// The configuration parameters passed to the method are discarded immediately.
/// So the service instance should save them by itself if it is important for
/// the service business logic.
///
/// This method can be triggered at any time and does not follow the general transaction
/// execution workflow, so the errors returned might be ignored.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn apply_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
}
impl<'a, T: BinaryValue> Interface<'a> for dyn Configure<Params = T> {
const INTERFACE_NAME: &'static str = CONFIGURE_INTERFACE_NAME;
fn dispatch(
&self,
context: ExecutionContext<'a>,
method: MethodId,
payload: &[u8],
) -> Result<(), ExecutionError> {
match method {
VERIFY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.verify_config(context, params)
}
APPLY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.apply_config(context, params)
}
_ => Err(CommonError::NoSuchMethod.into()),
}
}
}
// Makeshift replacement for generic stubbing, which is made difficult by the existence
// of the type param.
pub(crate) trait ConfigureMut<Ctx> {
type Output;
fn verify_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
fn apply_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
}
impl ConfigureMut<InstanceId> for ExecutionContext<'_> {
type Output = Result<(), ExecutionError>;
fn verify_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 0);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
fn apply_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output |
}
| {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 1);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
} | identifier_body |
configure.rs | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Configuration interface used by the supervisor to change service configuration.
use exonum::runtime::{CommonError, ExecutionContext, ExecutionError, InstanceId, MethodId};
use exonum_merkledb::BinaryValue;
use exonum_rust_runtime::{GenericCallMut, Interface, MethodDescriptor};
/// Fully qualified name of the [`Configure`] interface.
///
/// [`Configure`]: trait.Configure.html
pub const CONFIGURE_INTERFACE_NAME: &str = "exonum.Configure";
/// Identifier of the [`Configure::verify_config`] method.
///
/// [`Configure::verify_config`]: trait.Configure.html#tymethod.verify_config
const VERIFY_CONFIG_METHOD_ID: MethodId = 0;
/// Identifier of the [`Configure::apply_config`] method.
///
/// [`Configure::apply_config`]: trait.Configure.html#tymethod.apply_config
const APPLY_CONFIG_METHOD_ID: MethodId = 1;
/// Describes a procedure for updating the configuration of a service instance.
pub trait Configure {
/// The specific type of parameters passed during the service instance configuration.
type Params: BinaryValue;
/// Verify a new configuration parameters before their actual application.
///
/// This method is called by the new configuration change proposal. If the proposed
/// parameters do not fit for this service instance, it should return a corresponding
/// error to discard this proposal. Thus only a configuration change proposal in which all
/// changes are correct can be applied later.
///
/// The proposal approval process details, and even the configuration proposal format, depends
/// on the particular runtime implementation.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn verify_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
/// Update service configuration with the given parameters.
///
/// The configuration parameters passed to the method are discarded immediately. | /// So the service instance should save them by itself if it is important for
/// the service business logic.
///
/// This method can be triggered at any time and does not follow the general transaction
/// execution workflow, so the errors returned might be ignored.
///
/// # Execution policy
///
/// At the moment, this method can only be called on behalf of the supervisor service instance.
/// In other words, only a method with numeric ID 0 can call this method.
fn apply_config(
&self,
context: ExecutionContext<'_>,
params: Self::Params,
) -> Result<(), ExecutionError>;
}
impl<'a, T: BinaryValue> Interface<'a> for dyn Configure<Params = T> {
const INTERFACE_NAME: &'static str = CONFIGURE_INTERFACE_NAME;
fn dispatch(
&self,
context: ExecutionContext<'a>,
method: MethodId,
payload: &[u8],
) -> Result<(), ExecutionError> {
match method {
VERIFY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.verify_config(context, params)
}
APPLY_CONFIG_METHOD_ID => {
let params =
T::from_bytes(payload.into()).map_err(CommonError::malformed_arguments)?;
self.apply_config(context, params)
}
_ => Err(CommonError::NoSuchMethod.into()),
}
}
}
// Makeshift replacement for generic stubbing, which is made difficult by the existence
// of the type param.
pub(crate) trait ConfigureMut<Ctx> {
type Output;
fn verify_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
fn apply_config(&mut self, context: Ctx, params: Vec<u8>) -> Self::Output;
}
impl ConfigureMut<InstanceId> for ExecutionContext<'_> {
type Output = Result<(), ExecutionError>;
fn verify_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 0);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
fn apply_config(&mut self, instance_id: InstanceId, params: Vec<u8>) -> Self::Output {
const METHOD_DESCRIPTOR: MethodDescriptor<'static> =
MethodDescriptor::new(CONFIGURE_INTERFACE_NAME, 1);
self.generic_call_mut(instance_id, METHOD_DESCRIPTOR, params)
}
} | random_line_split | |
empty-state.component.ts | import {
Component,
DoCheck,
EventEmitter,
Input,
OnInit,
Output,
ViewEncapsulation
} from '@angular/core';
import { cloneDeep, defaults, isEqual } from 'lodash';
import { Action } from '../action/action';
import { EmptyStateConfig } from './empty-state-config';
/**
* Component for rendering an empty state.
*
* Usage:
* <code><pre>
* // Individual module import
* import { EmptyStateModule } from 'patternfly-ng/empty-state';
* // Or
* import { EmptyStateModule } from 'patternfly-ng';
*
* @NgModule({
* imports: [EmptyStateModule,...]
* })
* export class AppModule(){}
* </pre></code>
*
* Optional:
* <code><pre>
* import { EmptyStateConfig } from 'patternfly-ng/empty-state';
* </pre></code>
*/
@Component({
encapsulation: ViewEncapsulation.None,
selector: 'pfng-empty-state',
templateUrl: './empty-state.component.html'
})
export class EmptyStateComponent implements DoCheck, OnInit {
/**
* The empty state config containing component properties
*/
@Input() config: EmptyStateConfig;
/**
* The event emitted when an action is selected
*/
@Output('onActionSelect') onActionSelect = new EventEmitter();
private defaultConfig = {
title: 'No Items Available'
} as EmptyStateConfig;
private prevConfig: EmptyStateConfig;
/**
* The default constructor
*/
constructor() {
}
// Initialization
/**
* Setup component configuration upon initialization
*/
ngOnInit(): void {
this.setupConfig();
}
/** | // Do a deep compare on config
if (!isEqual(this.config, this.prevConfig)) {
this.setupConfig();
}
}
/**
* Set up default config
*/
protected setupConfig(): void {
if (this.config !== undefined) {
defaults(this.config, this.defaultConfig);
} else {
this.config = cloneDeep(this.defaultConfig);
}
this.prevConfig = cloneDeep(this.config);
}
// Private
private handleAction(action: Action): void {
if (action && action.disabled !== true) {
this.onActionSelect.emit(action);
}
}
} | * Check if the component config has changed
*/
ngDoCheck(): void { | random_line_split |
empty-state.component.ts | import {
Component,
DoCheck,
EventEmitter,
Input,
OnInit,
Output,
ViewEncapsulation
} from '@angular/core';
import { cloneDeep, defaults, isEqual } from 'lodash';
import { Action } from '../action/action';
import { EmptyStateConfig } from './empty-state-config';
/**
* Component for rendering an empty state.
*
* Usage:
* <code><pre>
* // Individual module import
* import { EmptyStateModule } from 'patternfly-ng/empty-state';
* // Or
* import { EmptyStateModule } from 'patternfly-ng';
*
* @NgModule({
* imports: [EmptyStateModule,...]
* })
* export class AppModule(){}
* </pre></code>
*
* Optional:
* <code><pre>
* import { EmptyStateConfig } from 'patternfly-ng/empty-state';
* </pre></code>
*/
@Component({
encapsulation: ViewEncapsulation.None,
selector: 'pfng-empty-state',
templateUrl: './empty-state.component.html'
})
export class EmptyStateComponent implements DoCheck, OnInit {
/**
* The empty state config containing component properties
*/
@Input() config: EmptyStateConfig;
/**
* The event emitted when an action is selected
*/
@Output('onActionSelect') onActionSelect = new EventEmitter();
private defaultConfig = {
title: 'No Items Available'
} as EmptyStateConfig;
private prevConfig: EmptyStateConfig;
/**
* The default constructor
*/
constructor() {
}
// Initialization
/**
* Setup component configuration upon initialization
*/
ngOnInit(): void {
this.setupConfig();
}
/**
* Check if the component config has changed
*/
ngDoCheck(): void |
/**
* Set up default config
*/
protected setupConfig(): void {
if (this.config !== undefined) {
defaults(this.config, this.defaultConfig);
} else {
this.config = cloneDeep(this.defaultConfig);
}
this.prevConfig = cloneDeep(this.config);
}
// Private
private handleAction(action: Action): void {
if (action && action.disabled !== true) {
this.onActionSelect.emit(action);
}
}
}
| {
// Do a deep compare on config
if (!isEqual(this.config, this.prevConfig)) {
this.setupConfig();
}
} | identifier_body |
empty-state.component.ts | import {
Component,
DoCheck,
EventEmitter,
Input,
OnInit,
Output,
ViewEncapsulation
} from '@angular/core';
import { cloneDeep, defaults, isEqual } from 'lodash';
import { Action } from '../action/action';
import { EmptyStateConfig } from './empty-state-config';
/**
* Component for rendering an empty state.
*
* Usage:
* <code><pre>
* // Individual module import
* import { EmptyStateModule } from 'patternfly-ng/empty-state';
* // Or
* import { EmptyStateModule } from 'patternfly-ng';
*
* @NgModule({
* imports: [EmptyStateModule,...]
* })
* export class AppModule(){}
* </pre></code>
*
* Optional:
* <code><pre>
* import { EmptyStateConfig } from 'patternfly-ng/empty-state';
* </pre></code>
*/
@Component({
encapsulation: ViewEncapsulation.None,
selector: 'pfng-empty-state',
templateUrl: './empty-state.component.html'
})
export class EmptyStateComponent implements DoCheck, OnInit {
/**
* The empty state config containing component properties
*/
@Input() config: EmptyStateConfig;
/**
* The event emitted when an action is selected
*/
@Output('onActionSelect') onActionSelect = new EventEmitter();
private defaultConfig = {
title: 'No Items Available'
} as EmptyStateConfig;
private prevConfig: EmptyStateConfig;
/**
* The default constructor
*/
constructor() {
}
// Initialization
/**
* Setup component configuration upon initialization
*/
ngOnInit(): void {
this.setupConfig();
}
/**
* Check if the component config has changed
*/
| (): void {
// Do a deep compare on config
if (!isEqual(this.config, this.prevConfig)) {
this.setupConfig();
}
}
/**
* Set up default config
*/
protected setupConfig(): void {
if (this.config !== undefined) {
defaults(this.config, this.defaultConfig);
} else {
this.config = cloneDeep(this.defaultConfig);
}
this.prevConfig = cloneDeep(this.config);
}
// Private
private handleAction(action: Action): void {
if (action && action.disabled !== true) {
this.onActionSelect.emit(action);
}
}
}
| ngDoCheck | identifier_name |
empty-state.component.ts | import {
Component,
DoCheck,
EventEmitter,
Input,
OnInit,
Output,
ViewEncapsulation
} from '@angular/core';
import { cloneDeep, defaults, isEqual } from 'lodash';
import { Action } from '../action/action';
import { EmptyStateConfig } from './empty-state-config';
/**
* Component for rendering an empty state.
*
* Usage:
* <code><pre>
* // Individual module import
* import { EmptyStateModule } from 'patternfly-ng/empty-state';
* // Or
* import { EmptyStateModule } from 'patternfly-ng';
*
* @NgModule({
* imports: [EmptyStateModule,...]
* })
* export class AppModule(){}
* </pre></code>
*
* Optional:
* <code><pre>
* import { EmptyStateConfig } from 'patternfly-ng/empty-state';
* </pre></code>
*/
@Component({
encapsulation: ViewEncapsulation.None,
selector: 'pfng-empty-state',
templateUrl: './empty-state.component.html'
})
export class EmptyStateComponent implements DoCheck, OnInit {
/**
* The empty state config containing component properties
*/
@Input() config: EmptyStateConfig;
/**
* The event emitted when an action is selected
*/
@Output('onActionSelect') onActionSelect = new EventEmitter();
private defaultConfig = {
title: 'No Items Available'
} as EmptyStateConfig;
private prevConfig: EmptyStateConfig;
/**
* The default constructor
*/
constructor() {
}
// Initialization
/**
* Setup component configuration upon initialization
*/
ngOnInit(): void {
this.setupConfig();
}
/**
* Check if the component config has changed
*/
ngDoCheck(): void {
// Do a deep compare on config
if (!isEqual(this.config, this.prevConfig)) |
}
/**
* Set up default config
*/
protected setupConfig(): void {
if (this.config !== undefined) {
defaults(this.config, this.defaultConfig);
} else {
this.config = cloneDeep(this.defaultConfig);
}
this.prevConfig = cloneDeep(this.config);
}
// Private
private handleAction(action: Action): void {
if (action && action.disabled !== true) {
this.onActionSelect.emit(action);
}
}
}
| {
this.setupConfig();
} | conditional_block |
abstract-const-as-cast-4.rs | // check-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
trait Trait {}
pub struct EvaluatableU128<const N: u128>;
struct HasCastInTraitImpl<const N: usize, const M: u128>;
impl<const O: usize> Trait for HasCastInTraitImpl<O, { O as u128 }> {}
pub fn use_trait_impl<const N: usize>() where EvaluatableU128<{N as u128}>:, {
fn | <T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
pub fn use_trait_impl_2<const N: usize>() where EvaluatableU128<{N as _}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
fn main() {}
| assert_impl | identifier_name |
abstract-const-as-cast-4.rs | // check-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
trait Trait {}
pub struct EvaluatableU128<const N: u128>;
struct HasCastInTraitImpl<const N: usize, const M: u128>;
impl<const O: usize> Trait for HasCastInTraitImpl<O, { O as u128 }> {}
pub fn use_trait_impl<const N: usize>() where EvaluatableU128<{N as u128}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
pub fn use_trait_impl_2<const N: usize>() where EvaluatableU128<{N as _}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>(); |
fn main() {} | assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
} | random_line_split |
abstract-const-as-cast-4.rs | // check-pass
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
trait Trait {}
pub struct EvaluatableU128<const N: u128>;
struct HasCastInTraitImpl<const N: usize, const M: u128>;
impl<const O: usize> Trait for HasCastInTraitImpl<O, { O as u128 }> {}
pub fn use_trait_impl<const N: usize>() where EvaluatableU128<{N as u128}>:, {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
}
pub fn use_trait_impl_2<const N: usize>() where EvaluatableU128<{N as _}>:, |
fn main() {}
| {
fn assert_impl<T: Trait>() {}
assert_impl::<HasCastInTraitImpl<N, { N as u128 }>>();
assert_impl::<HasCastInTraitImpl<N, { N as _ }>>();
assert_impl::<HasCastInTraitImpl<12, { 12 as u128 }>>();
assert_impl::<HasCastInTraitImpl<13, 13>>();
} | identifier_body |
static-reference-to-fn-2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct StateMachineIter<'a> {
statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str>
}
impl<'a> Iterator<&'static str> for StateMachineIter<'a> {
fn next(&mut self) -> Option<&'static str> {
return (*self.statefn)(self);
}
}
fn state1(self_: &mut StateMachineIter) -> Option<&'static str> {
self_.statefn = &state2;
//~^ ERROR borrowed value does not live long enough
return Some("state1");
}
fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &state3;
//~^ ERROR borrowed value does not live long enough
return Some("state2");
}
fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> |
fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> {
return None;
}
fn state_iter() -> StateMachineIter<'static> {
StateMachineIter {
statefn: &state1 //~ ERROR borrowed value does not live long enough
}
}
fn main() {
let mut it = state_iter();
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
}
| {
self_.statefn = &finished;
//~^ ERROR borrowed value does not live long enough
return Some("state3");
} | identifier_body |
static-reference-to-fn-2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct StateMachineIter<'a> {
statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str>
}
impl<'a> Iterator<&'static str> for StateMachineIter<'a> {
fn next(&mut self) -> Option<&'static str> {
return (*self.statefn)(self);
}
}
fn state1(self_: &mut StateMachineIter) -> Option<&'static str> {
self_.statefn = &state2;
//~^ ERROR borrowed value does not live long enough
return Some("state1");
}
fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &state3;
//~^ ERROR borrowed value does not live long enough
return Some("state2");
}
fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &finished;
//~^ ERROR borrowed value does not live long enough
return Some("state3");
}
fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> {
return None;
}
fn state_iter() -> StateMachineIter<'static> {
StateMachineIter {
statefn: &state1 //~ ERROR borrowed value does not live long enough
}
}
fn main() {
let mut it = state_iter();
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
} | // http://rust-lang.org/COPYRIGHT.
// | random_line_split |
static-reference-to-fn-2.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct StateMachineIter<'a> {
statefn: &'a fn(&mut StateMachineIter<'a>) -> Option<&'static str>
}
impl<'a> Iterator<&'static str> for StateMachineIter<'a> {
fn next(&mut self) -> Option<&'static str> {
return (*self.statefn)(self);
}
}
fn | (self_: &mut StateMachineIter) -> Option<&'static str> {
self_.statefn = &state2;
//~^ ERROR borrowed value does not live long enough
return Some("state1");
}
fn state2(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &state3;
//~^ ERROR borrowed value does not live long enough
return Some("state2");
}
fn state3(self_: &mut StateMachineIter) -> Option<(&'static str)> {
self_.statefn = &finished;
//~^ ERROR borrowed value does not live long enough
return Some("state3");
}
fn finished(_: &mut StateMachineIter) -> Option<(&'static str)> {
return None;
}
fn state_iter() -> StateMachineIter<'static> {
StateMachineIter {
statefn: &state1 //~ ERROR borrowed value does not live long enough
}
}
fn main() {
let mut it = state_iter();
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
println!("{}",it.next());
}
| state1 | identifier_name |
xinput.py | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Holoscopio Tecnologia
# Author: Marcelo Jorge Vieira <metal@holoscopio.com>
# Author: Thadeu Lima de Souza Cascardo <cascardo@holoscopio.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gobject
import pygst
pygst.require("0.10")
import gst
from core import Input, INPUT_TYPE_VIDEO
from sltv.utils import Fract
CAPABILITIES = INPUT_TYPE_VIDEO
class XInput(Input):
def __init__(self):
|
def config(self, dict):
num, den = Fract.fromdecimal(dict["framerate"])
caps = gst.caps_from_string(
"video/x-raw-rgb, framerate=%d/%d" % (num, den)
)
self.capsfilter.set_property("caps", caps)
| Input.__init__(self, CAPABILITIES)
self.video_src = gst.element_factory_make("ximagesrc", "video_src")
# Setting format to time, to work with input-selector, since they're
# were not working together in version 0.10.18-1 from Debian.
# This should be fixed in ximagesrc's code and input-selector should
# also be fixed to work with byte format.
self.video_src.set_format(gst.FORMAT_TIME)
self.video_src.set_property("use-damage", False)
self.add(self.video_src)
self.capsfilter = gst.element_factory_make("capsfilter", "capsfilter")
self.add(self.capsfilter)
gst.element_link_many(self.video_src, self.capsfilter)
self.video_pad.set_target(self.capsfilter.src_pads().next()) | identifier_body |
xinput.py | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Holoscopio Tecnologia
# Author: Marcelo Jorge Vieira <metal@holoscopio.com>
# Author: Thadeu Lima de Souza Cascardo <cascardo@holoscopio.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# | # with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gobject
import pygst
pygst.require("0.10")
import gst
from core import Input, INPUT_TYPE_VIDEO
from sltv.utils import Fract
CAPABILITIES = INPUT_TYPE_VIDEO
class XInput(Input):
def __init__(self):
Input.__init__(self, CAPABILITIES)
self.video_src = gst.element_factory_make("ximagesrc", "video_src")
# Setting format to time, to work with input-selector, since they're
# were not working together in version 0.10.18-1 from Debian.
# This should be fixed in ximagesrc's code and input-selector should
# also be fixed to work with byte format.
self.video_src.set_format(gst.FORMAT_TIME)
self.video_src.set_property("use-damage", False)
self.add(self.video_src)
self.capsfilter = gst.element_factory_make("capsfilter", "capsfilter")
self.add(self.capsfilter)
gst.element_link_many(self.video_src, self.capsfilter)
self.video_pad.set_target(self.capsfilter.src_pads().next())
def config(self, dict):
num, den = Fract.fromdecimal(dict["framerate"])
caps = gst.caps_from_string(
"video/x-raw-rgb, framerate=%d/%d" % (num, den)
)
self.capsfilter.set_property("caps", caps) | # You should have received a copy of the GNU General Public License along | random_line_split |
xinput.py | # -*- coding: utf-8 -*-
# Copyright (C) 2010 Holoscopio Tecnologia
# Author: Marcelo Jorge Vieira <metal@holoscopio.com>
# Author: Thadeu Lima de Souza Cascardo <cascardo@holoscopio.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gobject
import pygst
pygst.require("0.10")
import gst
from core import Input, INPUT_TYPE_VIDEO
from sltv.utils import Fract
CAPABILITIES = INPUT_TYPE_VIDEO
class | (Input):
def __init__(self):
Input.__init__(self, CAPABILITIES)
self.video_src = gst.element_factory_make("ximagesrc", "video_src")
# Setting format to time, to work with input-selector, since they're
# were not working together in version 0.10.18-1 from Debian.
# This should be fixed in ximagesrc's code and input-selector should
# also be fixed to work with byte format.
self.video_src.set_format(gst.FORMAT_TIME)
self.video_src.set_property("use-damage", False)
self.add(self.video_src)
self.capsfilter = gst.element_factory_make("capsfilter", "capsfilter")
self.add(self.capsfilter)
gst.element_link_many(self.video_src, self.capsfilter)
self.video_pad.set_target(self.capsfilter.src_pads().next())
def config(self, dict):
num, den = Fract.fromdecimal(dict["framerate"])
caps = gst.caps_from_string(
"video/x-raw-rgb, framerate=%d/%d" % (num, den)
)
self.capsfilter.set_property("caps", caps)
| XInput | identifier_name |
settings.py | """
Django settings for magnet project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from django.utils.translation import ugettext_lazy as _
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 's**osz#c22#hn13(@0++r+2eq4^7$$7qafa%3$f#g^b_&4$7zv'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
| # Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Our apps
'magnet.apps.users',
# 3rd Party apps
'crispy_forms',
'django_extensions',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'magnet.core.middleware.ForceDefaultLanguageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'magnet.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'magnet', 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'magnet.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'id'
LANGUAGES = (
('id', _('Indonesian')),
('en', _('English')),
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Set custom user model
AUTH_USER_MODEL = 'users.User'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'magnet', 'static'),
os.path.join(BASE_DIR, 'node_modules'), # packages installed by yarn
]
# Auth with mobile phone
AUTHENTICATION_BACKENDS = [
'magnet.apps.users.backends.MagnetBackend',
]
# Translation files
LOCALE_PATHS = [
os.path.join(BASE_DIR, 'magnet', 'locales')
]
# Crispy form template pack
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# Security settings
if not DEBUG:
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
try:
from .local_settings import *
except:
pass | random_line_split | |
settings.py | """
Django settings for magnet project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from django.utils.translation import ugettext_lazy as _
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 's**osz#c22#hn13(@0++r+2eq4^7$$7qafa%3$f#g^b_&4$7zv'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Our apps
'magnet.apps.users',
# 3rd Party apps
'crispy_forms',
'django_extensions',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'magnet.core.middleware.ForceDefaultLanguageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'magnet.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'magnet', 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'magnet.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'id'
LANGUAGES = (
('id', _('Indonesian')),
('en', _('English')),
)
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Set custom user model
AUTH_USER_MODEL = 'users.User'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'magnet', 'static'),
os.path.join(BASE_DIR, 'node_modules'), # packages installed by yarn
]
# Auth with mobile phone
AUTHENTICATION_BACKENDS = [
'magnet.apps.users.backends.MagnetBackend',
]
# Translation files
LOCALE_PATHS = [
os.path.join(BASE_DIR, 'magnet', 'locales')
]
# Crispy form template pack
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# Security settings
if not DEBUG:
|
try:
from .local_settings import *
except:
pass
| SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True | conditional_block |
_load_balancer_network_interfaces_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerNetworkInterfacesOperations(object):
"""LoadBalancerNetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def | (self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets associated load balancer network interfaces.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/networkInterfaces'} # type: ignore
| __init__ | identifier_name |
_load_balancer_network_interfaces_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerNetworkInterfacesOperations(object):
"""LoadBalancerNetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
|
def list(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets associated load balancer network interfaces.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/networkInterfaces'} # type: ignore
| self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | identifier_body |
_load_balancer_network_interfaces_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerNetworkInterfacesOperations(object):
"""LoadBalancerNetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets associated load balancer network interfaces.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json" | def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/networkInterfaces'} # type: ignore | random_line_split | |
_load_balancer_network_interfaces_operations.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerNetworkInterfacesOperations(object):
"""LoadBalancerNetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets associated load balancer network interfaces.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
|
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/networkInterfaces'} # type: ignore
| url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters) | conditional_block |
create.py | # coding: utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import sqlalchemy
import ckan.plugins.toolkit as toolkit
import ckan.lib.dictization.model_dictize as model_dictize
from ckan.logic import NotAuthorized
import logging
log = logging.getLogger(__name__)
from sqlalchemy import Column, String, DateTime, Sequence, Integer, types, desc
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
from ckan.model.domain_object import DomainObject
from ckan.model.meta import metadata, mapper, Session
from ckan import model
import logging
log = logging.getLogger(__name__)
import json
import uuid
base = declarative_base()
def _get_datetime():
|
def make_uuid():
return unicode(uuid.uuid4())
class Article(base):
__tablename__ = 'articles'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
title = Column(types.UnicodeText, nullable=False, unique=True)
content = Column(types.UnicodeText, nullable=False)
author = Column(types.UnicodeText, nullable=False)
created_date = Column(DateTime, default = _get_datetime)
update_date = Column(DateTime, onupdate = _get_datetime)
def article_create(context, data_dict):
model = context['model']
model.Session.add(Article(title= data_dict['title'], author="ckan", content= data_dict['content']))
model.Session.commit()
return True
| return datetime.now() | identifier_body |
create.py | # coding: utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import sqlalchemy
import ckan.plugins.toolkit as toolkit
import ckan.lib.dictization.model_dictize as model_dictize
from ckan.logic import NotAuthorized
import logging
log = logging.getLogger(__name__)
from sqlalchemy import Column, String, DateTime, Sequence, Integer, types, desc
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
from ckan.model.domain_object import DomainObject
from ckan.model.meta import metadata, mapper, Session
from ckan import model
import logging
log = logging.getLogger(__name__)
import json
import uuid
base = declarative_base()
def _get_datetime():
return datetime.now()
def make_uuid():
return unicode(uuid.uuid4())
class Article(base):
__tablename__ = 'articles'
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
title = Column(types.UnicodeText, nullable=False, unique=True)
content = Column(types.UnicodeText, nullable=False)
author = Column(types.UnicodeText, nullable=False)
created_date = Column(DateTime, default = _get_datetime)
update_date = Column(DateTime, onupdate = _get_datetime)
def | (context, data_dict):
model = context['model']
model.Session.add(Article(title= data_dict['title'], author="ckan", content= data_dict['content']))
model.Session.commit()
return True
| article_create | identifier_name |
create.py | # coding: utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import sqlalchemy
import ckan.plugins.toolkit as toolkit
import ckan.lib.dictization.model_dictize as model_dictize
from ckan.logic import NotAuthorized
import logging
log = logging.getLogger(__name__)
from sqlalchemy import Column, String, DateTime, Sequence, Integer, types, desc
from sqlalchemy.ext.declarative import declarative_base
from datetime import datetime
from ckan.model.domain_object import DomainObject
from ckan.model.meta import metadata, mapper, Session
from ckan import model
import logging
log = logging.getLogger(__name__)
import json
import uuid
base = declarative_base()
def _get_datetime():
return datetime.now()
|
id = Column(types.UnicodeText, primary_key=True, default=make_uuid)
title = Column(types.UnicodeText, nullable=False, unique=True)
content = Column(types.UnicodeText, nullable=False)
author = Column(types.UnicodeText, nullable=False)
created_date = Column(DateTime, default = _get_datetime)
update_date = Column(DateTime, onupdate = _get_datetime)
def article_create(context, data_dict):
model = context['model']
model.Session.add(Article(title= data_dict['title'], author="ckan", content= data_dict['content']))
model.Session.commit()
return True | def make_uuid():
return unicode(uuid.uuid4())
class Article(base):
__tablename__ = 'articles' | random_line_split |
test_data_root.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.data import connector
from buildbot.data import root
from buildbot.test.util import endpoint
class RootEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.RootEndpoint
resourceTypeClass = root.Root
def setUp(self):
self.setUpEndpoint()
self.master.data.rootLinks = [
{'name': u'abc'},
]
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
rootlinks = yield self.callGet(('',))
[self.validateData(root) for root in rootlinks]
self.assertEqual(rootlinks, [
{'name': u'abc'},
])
class SpecEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.SpecEndpoint
resourceTypeClass = root.Spec
def setUp(self):
self.setUpEndpoint()
# replace fakeConnector with real DataConnector
self.master.data.disownServiceParent()
self.master.data = connector.DataConnector() | def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
specs = yield self.callGet(('application.spec',))
[self.validateData(s) for s in specs]
for s in specs:
# only test an endpoint that is reasonably stable
if s['path'] != "master":
continue
self.assertEqual(s,
{'path': 'master',
'type': 'master',
'type_spec': {'fields': [{'name': 'active',
'type': 'boolean',
'type_spec': {'name': 'boolean'}},
{'name': 'masterid',
'type': 'integer',
'type_spec': {'name': 'integer'}},
{'name': 'link',
'type': 'link',
'type_spec': {'name': 'link'}},
{'name': 'name',
'type': 'string',
'type_spec': {'name': 'string'}},
{'name': 'last_active',
'type': 'datetime',
'type_spec': {'name': 'datetime'}}],
'type': 'master'},
'plural': 'masters'}) | self.master.data.setServiceParent(self.master)
| random_line_split |
test_data_root.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.data import connector
from buildbot.data import root
from buildbot.test.util import endpoint
class RootEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.RootEndpoint
resourceTypeClass = root.Root
def setUp(self):
self.setUpEndpoint()
self.master.data.rootLinks = [
{'name': u'abc'},
]
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
rootlinks = yield self.callGet(('',))
[self.validateData(root) for root in rootlinks]
self.assertEqual(rootlinks, [
{'name': u'abc'},
])
class | (endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.SpecEndpoint
resourceTypeClass = root.Spec
def setUp(self):
self.setUpEndpoint()
# replace fakeConnector with real DataConnector
self.master.data.disownServiceParent()
self.master.data = connector.DataConnector()
self.master.data.setServiceParent(self.master)
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
specs = yield self.callGet(('application.spec',))
[self.validateData(s) for s in specs]
for s in specs:
# only test an endpoint that is reasonably stable
if s['path'] != "master":
continue
self.assertEqual(s,
{'path': 'master',
'type': 'master',
'type_spec': {'fields': [{'name': 'active',
'type': 'boolean',
'type_spec': {'name': 'boolean'}},
{'name': 'masterid',
'type': 'integer',
'type_spec': {'name': 'integer'}},
{'name': 'link',
'type': 'link',
'type_spec': {'name': 'link'}},
{'name': 'name',
'type': 'string',
'type_spec': {'name': 'string'}},
{'name': 'last_active',
'type': 'datetime',
'type_spec': {'name': 'datetime'}}],
'type': 'master'},
'plural': 'masters'})
| SpecEndpoint | identifier_name |
test_data_root.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.data import connector
from buildbot.data import root
from buildbot.test.util import endpoint
class RootEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.RootEndpoint
resourceTypeClass = root.Root
def setUp(self):
self.setUpEndpoint()
self.master.data.rootLinks = [
{'name': u'abc'},
]
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
rootlinks = yield self.callGet(('',))
[self.validateData(root) for root in rootlinks]
self.assertEqual(rootlinks, [
{'name': u'abc'},
])
class SpecEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.SpecEndpoint
resourceTypeClass = root.Spec
def setUp(self):
|
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
specs = yield self.callGet(('application.spec',))
[self.validateData(s) for s in specs]
for s in specs:
# only test an endpoint that is reasonably stable
if s['path'] != "master":
continue
self.assertEqual(s,
{'path': 'master',
'type': 'master',
'type_spec': {'fields': [{'name': 'active',
'type': 'boolean',
'type_spec': {'name': 'boolean'}},
{'name': 'masterid',
'type': 'integer',
'type_spec': {'name': 'integer'}},
{'name': 'link',
'type': 'link',
'type_spec': {'name': 'link'}},
{'name': 'name',
'type': 'string',
'type_spec': {'name': 'string'}},
{'name': 'last_active',
'type': 'datetime',
'type_spec': {'name': 'datetime'}}],
'type': 'master'},
'plural': 'masters'})
| self.setUpEndpoint()
# replace fakeConnector with real DataConnector
self.master.data.disownServiceParent()
self.master.data = connector.DataConnector()
self.master.data.setServiceParent(self.master) | identifier_body |
test_data_root.py | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.data import connector
from buildbot.data import root
from buildbot.test.util import endpoint
class RootEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.RootEndpoint
resourceTypeClass = root.Root
def setUp(self):
self.setUpEndpoint()
self.master.data.rootLinks = [
{'name': u'abc'},
]
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
rootlinks = yield self.callGet(('',))
[self.validateData(root) for root in rootlinks]
self.assertEqual(rootlinks, [
{'name': u'abc'},
])
class SpecEndpoint(endpoint.EndpointMixin, unittest.TestCase):
endpointClass = root.SpecEndpoint
resourceTypeClass = root.Spec
def setUp(self):
self.setUpEndpoint()
# replace fakeConnector with real DataConnector
self.master.data.disownServiceParent()
self.master.data = connector.DataConnector()
self.master.data.setServiceParent(self.master)
def tearDown(self):
self.tearDownEndpoint()
@defer.inlineCallbacks
def test_get(self):
specs = yield self.callGet(('application.spec',))
[self.validateData(s) for s in specs]
for s in specs:
# only test an endpoint that is reasonably stable
if s['path'] != "master":
|
self.assertEqual(s,
{'path': 'master',
'type': 'master',
'type_spec': {'fields': [{'name': 'active',
'type': 'boolean',
'type_spec': {'name': 'boolean'}},
{'name': 'masterid',
'type': 'integer',
'type_spec': {'name': 'integer'}},
{'name': 'link',
'type': 'link',
'type_spec': {'name': 'link'}},
{'name': 'name',
'type': 'string',
'type_spec': {'name': 'string'}},
{'name': 'last_active',
'type': 'datetime',
'type_spec': {'name': 'datetime'}}],
'type': 'master'},
'plural': 'masters'})
| continue | conditional_block |
linesearch_step.py | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import TensorDict, TensorSpec, TensorsSpec, tf_function, tf_util
from tensorforce.core.optimizers import UpdateModifier
from tensorforce.core.optimizers.solvers import solver_modules
class LinesearchStep(UpdateModifier):
| """
Line-search-step update modifier, which performs a line search on the update step returned by
the given optimizer to find a potentially superior smaller step size
(specification key: `linesearch_step`).
Args:
optimizer (specification): Optimizer configuration
(<span style="color:#C00000"><b>required</b></span>).
max_iterations (parameter, int >= 1): Maximum number of line search iterations
(<span style="color:#C00000"><b>required</b></span>).
backtracking_factor (parameter, 0.0 < float < 1.0): Line search backtracking factor
(<span style="color:#00C000"><b>default</b></span>: 0.75).
name (string): (<span style="color:#0000C0"><b>internal use</b></span>).
arguments_spec (specification): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, optimizer, max_iterations, backtracking_factor=0.75, name=None, arguments_spec=None
):
super().__init__(optimizer=optimizer, name=name, arguments_spec=arguments_spec)
self.line_search = self.submodule(
name='line_search', module='line_search', modules=solver_modules,
max_iterations=max_iterations, backtracking_factor=backtracking_factor
)
def initialize_given_variables(self, *, variables):
super().initialize_given_variables(variables=variables)
self.line_search.complete_initialize(
arguments_spec=self.arguments_spec, values_spec=self.variables_spec
)
@tf_function(num_args=1)
def step(self, *, arguments, variables, fn_loss, **kwargs):
loss_before = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_before,)):
deltas = self.optimizer.step(
arguments=arguments, variables=variables, fn_loss=fn_loss, **kwargs
)
with tf.control_dependencies(control_inputs=deltas):
def linesearch():
loss_after = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_after,)):
# Replace "/" with "_" to ensure TensorDict is flat
_deltas = TensorDict((
(var.name[:-2].replace('/', '_'), delta)
for var, delta in zip(variables, deltas)
))
# TODO: should be moved to initialize_given_variables, but fn_loss...
def evaluate_step(arguments, deltas):
assignments = list()
for variable, delta in zip(variables, deltas.values()):
assignments.append(variable.assign_add(delta=delta, read_value=False))
with tf.control_dependencies(control_inputs=assignments):
return fn_loss(**arguments.to_kwargs())
_deltas = self.line_search.solve(
arguments=arguments, x_init=_deltas, base_value=loss_before,
zero_value=loss_after, fn_x=evaluate_step
)
return tuple(_deltas.values())
num_nonzero = list()
for delta in deltas:
num_nonzero.append(tf.math.count_nonzero(input=delta))
num_nonzero = tf.math.add_n(inputs=num_nonzero)
return tf.cond(pred=(num_nonzero == 0), true_fn=(lambda: deltas), false_fn=linesearch) | identifier_body | |
linesearch_step.py | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import TensorDict, TensorSpec, TensorsSpec, tf_function, tf_util
from tensorforce.core.optimizers import UpdateModifier
from tensorforce.core.optimizers.solvers import solver_modules
class LinesearchStep(UpdateModifier):
"""
Line-search-step update modifier, which performs a line search on the update step returned by
the given optimizer to find a potentially superior smaller step size
(specification key: `linesearch_step`).
Args:
optimizer (specification): Optimizer configuration
(<span style="color:#C00000"><b>required</b></span>).
max_iterations (parameter, int >= 1): Maximum number of line search iterations
(<span style="color:#C00000"><b>required</b></span>).
backtracking_factor (parameter, 0.0 < float < 1.0): Line search backtracking factor
(<span style="color:#00C000"><b>default</b></span>: 0.75).
name (string): (<span style="color:#0000C0"><b>internal use</b></span>).
arguments_spec (specification): <span style="color:#0000C0"><b>internal use</b></span>.
""" | ):
super().__init__(optimizer=optimizer, name=name, arguments_spec=arguments_spec)
self.line_search = self.submodule(
name='line_search', module='line_search', modules=solver_modules,
max_iterations=max_iterations, backtracking_factor=backtracking_factor
)
def initialize_given_variables(self, *, variables):
super().initialize_given_variables(variables=variables)
self.line_search.complete_initialize(
arguments_spec=self.arguments_spec, values_spec=self.variables_spec
)
@tf_function(num_args=1)
def step(self, *, arguments, variables, fn_loss, **kwargs):
loss_before = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_before,)):
deltas = self.optimizer.step(
arguments=arguments, variables=variables, fn_loss=fn_loss, **kwargs
)
with tf.control_dependencies(control_inputs=deltas):
def linesearch():
loss_after = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_after,)):
# Replace "/" with "_" to ensure TensorDict is flat
_deltas = TensorDict((
(var.name[:-2].replace('/', '_'), delta)
for var, delta in zip(variables, deltas)
))
# TODO: should be moved to initialize_given_variables, but fn_loss...
def evaluate_step(arguments, deltas):
assignments = list()
for variable, delta in zip(variables, deltas.values()):
assignments.append(variable.assign_add(delta=delta, read_value=False))
with tf.control_dependencies(control_inputs=assignments):
return fn_loss(**arguments.to_kwargs())
_deltas = self.line_search.solve(
arguments=arguments, x_init=_deltas, base_value=loss_before,
zero_value=loss_after, fn_x=evaluate_step
)
return tuple(_deltas.values())
num_nonzero = list()
for delta in deltas:
num_nonzero.append(tf.math.count_nonzero(input=delta))
num_nonzero = tf.math.add_n(inputs=num_nonzero)
return tf.cond(pred=(num_nonzero == 0), true_fn=(lambda: deltas), false_fn=linesearch) |
def __init__(
self, *, optimizer, max_iterations, backtracking_factor=0.75, name=None, arguments_spec=None | random_line_split |
linesearch_step.py | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import TensorDict, TensorSpec, TensorsSpec, tf_function, tf_util
from tensorforce.core.optimizers import UpdateModifier
from tensorforce.core.optimizers.solvers import solver_modules
class LinesearchStep(UpdateModifier):
"""
Line-search-step update modifier, which performs a line search on the update step returned by
the given optimizer to find a potentially superior smaller step size
(specification key: `linesearch_step`).
Args:
optimizer (specification): Optimizer configuration
(<span style="color:#C00000"><b>required</b></span>).
max_iterations (parameter, int >= 1): Maximum number of line search iterations
(<span style="color:#C00000"><b>required</b></span>).
backtracking_factor (parameter, 0.0 < float < 1.0): Line search backtracking factor
(<span style="color:#00C000"><b>default</b></span>: 0.75).
name (string): (<span style="color:#0000C0"><b>internal use</b></span>).
arguments_spec (specification): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, optimizer, max_iterations, backtracking_factor=0.75, name=None, arguments_spec=None
):
super().__init__(optimizer=optimizer, name=name, arguments_spec=arguments_spec)
self.line_search = self.submodule(
name='line_search', module='line_search', modules=solver_modules,
max_iterations=max_iterations, backtracking_factor=backtracking_factor
)
def initialize_given_variables(self, *, variables):
super().initialize_given_variables(variables=variables)
self.line_search.complete_initialize(
arguments_spec=self.arguments_spec, values_spec=self.variables_spec
)
@tf_function(num_args=1)
def step(self, *, arguments, variables, fn_loss, **kwargs):
loss_before = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_before,)):
deltas = self.optimizer.step(
arguments=arguments, variables=variables, fn_loss=fn_loss, **kwargs
)
with tf.control_dependencies(control_inputs=deltas):
def linesearch():
loss_after = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_after,)):
# Replace "/" with "_" to ensure TensorDict is flat
_deltas = TensorDict((
(var.name[:-2].replace('/', '_'), delta)
for var, delta in zip(variables, deltas)
))
# TODO: should be moved to initialize_given_variables, but fn_loss...
def evaluate_step(arguments, deltas):
assignments = list()
for variable, delta in zip(variables, deltas.values()):
|
with tf.control_dependencies(control_inputs=assignments):
return fn_loss(**arguments.to_kwargs())
_deltas = self.line_search.solve(
arguments=arguments, x_init=_deltas, base_value=loss_before,
zero_value=loss_after, fn_x=evaluate_step
)
return tuple(_deltas.values())
num_nonzero = list()
for delta in deltas:
num_nonzero.append(tf.math.count_nonzero(input=delta))
num_nonzero = tf.math.add_n(inputs=num_nonzero)
return tf.cond(pred=(num_nonzero == 0), true_fn=(lambda: deltas), false_fn=linesearch)
| assignments.append(variable.assign_add(delta=delta, read_value=False)) | conditional_block |
linesearch_step.py | # Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
from tensorforce import TensorforceError
from tensorforce.core import TensorDict, TensorSpec, TensorsSpec, tf_function, tf_util
from tensorforce.core.optimizers import UpdateModifier
from tensorforce.core.optimizers.solvers import solver_modules
class LinesearchStep(UpdateModifier):
"""
Line-search-step update modifier, which performs a line search on the update step returned by
the given optimizer to find a potentially superior smaller step size
(specification key: `linesearch_step`).
Args:
optimizer (specification): Optimizer configuration
(<span style="color:#C00000"><b>required</b></span>).
max_iterations (parameter, int >= 1): Maximum number of line search iterations
(<span style="color:#C00000"><b>required</b></span>).
backtracking_factor (parameter, 0.0 < float < 1.0): Line search backtracking factor
(<span style="color:#00C000"><b>default</b></span>: 0.75).
name (string): (<span style="color:#0000C0"><b>internal use</b></span>).
arguments_spec (specification): <span style="color:#0000C0"><b>internal use</b></span>.
"""
def __init__(
self, *, optimizer, max_iterations, backtracking_factor=0.75, name=None, arguments_spec=None
):
super().__init__(optimizer=optimizer, name=name, arguments_spec=arguments_spec)
self.line_search = self.submodule(
name='line_search', module='line_search', modules=solver_modules,
max_iterations=max_iterations, backtracking_factor=backtracking_factor
)
def initialize_given_variables(self, *, variables):
super().initialize_given_variables(variables=variables)
self.line_search.complete_initialize(
arguments_spec=self.arguments_spec, values_spec=self.variables_spec
)
@tf_function(num_args=1)
def | (self, *, arguments, variables, fn_loss, **kwargs):
loss_before = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_before,)):
deltas = self.optimizer.step(
arguments=arguments, variables=variables, fn_loss=fn_loss, **kwargs
)
with tf.control_dependencies(control_inputs=deltas):
def linesearch():
loss_after = fn_loss(**arguments.to_kwargs())
with tf.control_dependencies(control_inputs=(loss_after,)):
# Replace "/" with "_" to ensure TensorDict is flat
_deltas = TensorDict((
(var.name[:-2].replace('/', '_'), delta)
for var, delta in zip(variables, deltas)
))
# TODO: should be moved to initialize_given_variables, but fn_loss...
def evaluate_step(arguments, deltas):
assignments = list()
for variable, delta in zip(variables, deltas.values()):
assignments.append(variable.assign_add(delta=delta, read_value=False))
with tf.control_dependencies(control_inputs=assignments):
return fn_loss(**arguments.to_kwargs())
_deltas = self.line_search.solve(
arguments=arguments, x_init=_deltas, base_value=loss_before,
zero_value=loss_after, fn_x=evaluate_step
)
return tuple(_deltas.values())
num_nonzero = list()
for delta in deltas:
num_nonzero.append(tf.math.count_nonzero(input=delta))
num_nonzero = tf.math.add_n(inputs=num_nonzero)
return tf.cond(pred=(num_nonzero == 0), true_fn=(lambda: deltas), false_fn=linesearch)
| step | identifier_name |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct | {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>,
}
impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps), ..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions {
SceneOptions { fpslimit: Some(fps), ..self }
}
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
}
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
}
| SceneOptions | identifier_name |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct SceneOptions {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>,
}
impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps), ..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions |
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
}
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
}
| {
SceneOptions { fpslimit: Some(fps), ..self }
} | identifier_body |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct SceneOptions {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>, | impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps), ..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions {
SceneOptions { fpslimit: Some(fps), ..self }
}
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
}
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
} | }
| random_line_split |
scene.rs | // This is a part of Sonorous.
// Copyright (c) 2005, 2007, 2009, 2012, 2013, 2014, Kang Seonghoon.
// See README.md and LICENSE.txt for details.
//! Scene management.
use std::io::timer::sleep;
use std::time::Duration;
use sdl::get_ticks;
use ui::common::Ticker;
/// Options used by the scene to customize the scene loop.
#[deriving(Clone)]
pub struct SceneOptions {
/// If specified, limits the number of `Scene::tick` calls per second to this value.
/// `run_scene` ensures this limitation by sleeping after each tick as needed.
pub tpslimit: Option<uint>,
/// If specified, limits the number of `Scene::render` calls per second to this value.
/// Due to the implementation strategy `tpslimit` takes precedence over this if specified.
pub fpslimit: Option<uint>,
}
impl SceneOptions {
/// Creates default options for the scene.
pub fn new() -> SceneOptions {
SceneOptions { tpslimit: None, fpslimit: None }
}
/// Replaces `tpslimit` field with given value.
pub fn tpslimit(self, tps: uint) -> SceneOptions {
SceneOptions { tpslimit: Some(tps), ..self }
}
/// Replaces `fpslimit` field with given value.
pub fn fpslimit(self, fps: uint) -> SceneOptions {
SceneOptions { fpslimit: Some(fps), ..self }
}
}
/// A command returned by `Scene`'s `tick` method.
pub enum SceneCommand {
/// Continues displaying this scene.
Continue,
/// Pushes a new `Scene` to the scene stack, making it the active scene. The current scene is
/// stopped (after calling `deactivate`) until the new scene returns `PopScene` command.
Push(Box<Scene+'static>),
/// Replaces the current scene with a new `Scene` that will be returned by `consume` method.
/// The command itself does not have a `Scene` argument since new scene may have to be
/// constructured out of the existing scene. Therefore the scene should be prepared for
/// multiple next scenes possible if any.
Replace,
/// Pops the current scene from the scene stack. The program exits if it was the only remaining
/// scene in the stack.
Pop,
/// Clears the scene stack, effectively ending the program.
Exit,
}
/// Scene interface.
pub trait Scene {
/// Called when the scene is to be activated, prior to the first `tick` call. May return
/// a non-`Continue` command to immediately deactivate the scene.
fn activate(&mut self) -> SceneCommand;
/// Returns the options for this scene. It is called *after* the `activate` call.
fn scene_options(&self) -> SceneOptions;
/// Does the event handling and internal logics, and returns a command to instruct the caller.
fn tick(&mut self) -> SceneCommand;
/// Does the rendering jobs. It may get called once after the `tick` call (but not mandatory,
/// for example, due to the frame drop).
fn render(&self);
/// Called when the scene is to be deactivated by the latest `tick` call. It is not called
/// when `activate` returns a non-`Continue` command and the scene becomes deactivated.
fn deactivate(&mut self);
/// Called when the scene is to be replaced by a new `Scene` due to the `ReplaceScene` command.
/// When called due to the `tick` call, this is called after `deactivate` call.
fn consume(self: Box<Self>) -> Box<Scene+'static>;
}
/// Runs given scene and other additionally spawned scenes.
pub fn run_scene(scene: Box<Scene+'static>) {
let mut current = scene;
let mut stack = Vec::new();
loop {
let mut result = current.activate();
match result {
SceneCommand::Continue => |
_ => {}
}
match result {
SceneCommand::Continue => {
panic!("impossible");
}
SceneCommand::Push(newscene) => {
stack.push(current);
current = newscene;
}
SceneCommand::Replace => {
current = current.consume();
}
SceneCommand::Pop => {
if stack.is_empty() { break; }
current = stack.pop().unwrap();
}
SceneCommand::Exit => {
break;
}
}
}
}
| {
let opts = current.scene_options();
let mintickdelay = opts.tpslimit.map_or(0, |tps| 1000 / tps);
let interval = opts.fpslimit.map_or(0, |fps| 1000 / fps);
let mut ticker = Ticker::with_interval(interval);
loop {
let ticklimit = get_ticks() + mintickdelay;
result = current.tick();
match result {
SceneCommand::Continue => {
ticker.on_tick(get_ticks(), || { current.render(); });
}
_ => { break; }
}
let now = get_ticks();
if now < ticklimit { sleep(Duration::milliseconds((ticklimit - now) as i64)); }
}
current.deactivate();
} | conditional_block |
price.service.ts | import * as memotyCache from 'memory-cache';
import * as moment from 'moment';
import * as Bluebird from 'bluebird';
import * as _ from 'lodash';
import axios from 'axios';
import { HubData } from '../../eve-client/api/id-names-mapper';
const PRICE_ENDPOINT = 'https://esi.tech.ccp.is/latest/markets/{regionId}/orders/?type_id={itemId}';
export class PriceServiceResponse {
sell: PriceResponse;
buy: PriceResponse;
}
export function getPriceForItemOnStation(itemId: number, regionId: number, stationId: number) {
let priceSearchKey = '' + itemId + regionId;
let pricesOrError: PriceResponse[] & { code: number } = memotyCache.get(priceSearchKey);
if (pricesOrError) {
console.info(`price for ${priceSearchKey} has been found in cache, skipping CCP call`);
if (pricesOrError.code && pricesOrError.code === 404) {
return Bluebird.reject(pricesOrError);
}
return Bluebird.resolve(filterPrices(pricesOrError, stationId));
}
console.info(`price for ${priceSearchKey} not found in cache, executing CCP call`);
return new Bluebird<PriceServiceResponse>((resolve, reject) => {
axios.get(PRICE_ENDPOINT.replace('{regionId}', regionId.toString()).replace('{itemId}', itemId.toString()))
.then(result => {
let expires = moment(result.headers['expires'] + '+0000', 'ddd, DD MMM YYYY HH:mm:ss Z');
let diff = expires.diff(moment());
if (result.data.length === 0) {
memotyCache.put(priceSearchKey, { code: 404 }, diff);
reject({ code: 404 });
return;
}
memotyCache.put(priceSearchKey, result.data, diff);
console.info(`cache key ${priceSearchKey} has been added with ${(diff / 1000).toFixed(0)}s TTL`);
resolve(filterPrices(result.data, stationId));
})
.catch(err => {
console.error(err);
reject(err);
});
});
}
function filterPrices(prices: PriceResponse[], stationId: number): PriceServiceResponse {
let result = new PriceServiceResponse(); | result.sell = _.minBy(_.filter(prices, (order) => {
return order.location_id === stationId && !order.is_buy_order;
}), record => record.price) || prices[0];
return result;
}
export interface PriceResponse {
order_id: number;
type_id: number;
location_id: number;
volume_total: number;
volume_remain: number;
min_volume: number;
price: number;
is_buy_order: number;
duration: number;
issued: string;
range: string;
} | result.buy = _.maxBy(_.filter(prices, (order) => {
return order.location_id === stationId && order.is_buy_order;
}), record => record.price) || prices[0]; | random_line_split |
price.service.ts | import * as memotyCache from 'memory-cache';
import * as moment from 'moment';
import * as Bluebird from 'bluebird';
import * as _ from 'lodash';
import axios from 'axios';
import { HubData } from '../../eve-client/api/id-names-mapper';
const PRICE_ENDPOINT = 'https://esi.tech.ccp.is/latest/markets/{regionId}/orders/?type_id={itemId}';
export class PriceServiceResponse {
sell: PriceResponse;
buy: PriceResponse;
}
export function getPriceForItemOnStation(itemId: number, regionId: number, stationId: number) {
let priceSearchKey = '' + itemId + regionId;
let pricesOrError: PriceResponse[] & { code: number } = memotyCache.get(priceSearchKey);
if (pricesOrError) {
console.info(`price for ${priceSearchKey} has been found in cache, skipping CCP call`);
if (pricesOrError.code && pricesOrError.code === 404) {
return Bluebird.reject(pricesOrError);
}
return Bluebird.resolve(filterPrices(pricesOrError, stationId));
}
console.info(`price for ${priceSearchKey} not found in cache, executing CCP call`);
return new Bluebird<PriceServiceResponse>((resolve, reject) => {
axios.get(PRICE_ENDPOINT.replace('{regionId}', regionId.toString()).replace('{itemId}', itemId.toString()))
.then(result => {
let expires = moment(result.headers['expires'] + '+0000', 'ddd, DD MMM YYYY HH:mm:ss Z');
let diff = expires.diff(moment());
if (result.data.length === 0) {
memotyCache.put(priceSearchKey, { code: 404 }, diff);
reject({ code: 404 });
return;
}
memotyCache.put(priceSearchKey, result.data, diff);
console.info(`cache key ${priceSearchKey} has been added with ${(diff / 1000).toFixed(0)}s TTL`);
resolve(filterPrices(result.data, stationId));
})
.catch(err => {
console.error(err);
reject(err);
});
});
}
function filterPrices(prices: PriceResponse[], stationId: number): PriceServiceResponse |
export interface PriceResponse {
order_id: number;
type_id: number;
location_id: number;
volume_total: number;
volume_remain: number;
min_volume: number;
price: number;
is_buy_order: number;
duration: number;
issued: string;
range: string;
} | {
let result = new PriceServiceResponse();
result.buy = _.maxBy(_.filter(prices, (order) => {
return order.location_id === stationId && order.is_buy_order;
}), record => record.price) || prices[0];
result.sell = _.minBy(_.filter(prices, (order) => {
return order.location_id === stationId && !order.is_buy_order;
}), record => record.price) || prices[0];
return result;
} | identifier_body |
price.service.ts | import * as memotyCache from 'memory-cache';
import * as moment from 'moment';
import * as Bluebird from 'bluebird';
import * as _ from 'lodash';
import axios from 'axios';
import { HubData } from '../../eve-client/api/id-names-mapper';
const PRICE_ENDPOINT = 'https://esi.tech.ccp.is/latest/markets/{regionId}/orders/?type_id={itemId}';
export class PriceServiceResponse {
sell: PriceResponse;
buy: PriceResponse;
}
export function getPriceForItemOnStation(itemId: number, regionId: number, stationId: number) {
let priceSearchKey = '' + itemId + regionId;
let pricesOrError: PriceResponse[] & { code: number } = memotyCache.get(priceSearchKey);
if (pricesOrError) |
console.info(`price for ${priceSearchKey} not found in cache, executing CCP call`);
return new Bluebird<PriceServiceResponse>((resolve, reject) => {
axios.get(PRICE_ENDPOINT.replace('{regionId}', regionId.toString()).replace('{itemId}', itemId.toString()))
.then(result => {
let expires = moment(result.headers['expires'] + '+0000', 'ddd, DD MMM YYYY HH:mm:ss Z');
let diff = expires.diff(moment());
if (result.data.length === 0) {
memotyCache.put(priceSearchKey, { code: 404 }, diff);
reject({ code: 404 });
return;
}
memotyCache.put(priceSearchKey, result.data, diff);
console.info(`cache key ${priceSearchKey} has been added with ${(diff / 1000).toFixed(0)}s TTL`);
resolve(filterPrices(result.data, stationId));
})
.catch(err => {
console.error(err);
reject(err);
});
});
}
function filterPrices(prices: PriceResponse[], stationId: number): PriceServiceResponse {
let result = new PriceServiceResponse();
result.buy = _.maxBy(_.filter(prices, (order) => {
return order.location_id === stationId && order.is_buy_order;
}), record => record.price) || prices[0];
result.sell = _.minBy(_.filter(prices, (order) => {
return order.location_id === stationId && !order.is_buy_order;
}), record => record.price) || prices[0];
return result;
}
export interface PriceResponse {
order_id: number;
type_id: number;
location_id: number;
volume_total: number;
volume_remain: number;
min_volume: number;
price: number;
is_buy_order: number;
duration: number;
issued: string;
range: string;
} | {
console.info(`price for ${priceSearchKey} has been found in cache, skipping CCP call`);
if (pricesOrError.code && pricesOrError.code === 404) {
return Bluebird.reject(pricesOrError);
}
return Bluebird.resolve(filterPrices(pricesOrError, stationId));
} | conditional_block |
price.service.ts | import * as memotyCache from 'memory-cache';
import * as moment from 'moment';
import * as Bluebird from 'bluebird';
import * as _ from 'lodash';
import axios from 'axios';
import { HubData } from '../../eve-client/api/id-names-mapper';
const PRICE_ENDPOINT = 'https://esi.tech.ccp.is/latest/markets/{regionId}/orders/?type_id={itemId}';
export class | {
sell: PriceResponse;
buy: PriceResponse;
}
export function getPriceForItemOnStation(itemId: number, regionId: number, stationId: number) {
let priceSearchKey = '' + itemId + regionId;
let pricesOrError: PriceResponse[] & { code: number } = memotyCache.get(priceSearchKey);
if (pricesOrError) {
console.info(`price for ${priceSearchKey} has been found in cache, skipping CCP call`);
if (pricesOrError.code && pricesOrError.code === 404) {
return Bluebird.reject(pricesOrError);
}
return Bluebird.resolve(filterPrices(pricesOrError, stationId));
}
console.info(`price for ${priceSearchKey} not found in cache, executing CCP call`);
return new Bluebird<PriceServiceResponse>((resolve, reject) => {
axios.get(PRICE_ENDPOINT.replace('{regionId}', regionId.toString()).replace('{itemId}', itemId.toString()))
.then(result => {
let expires = moment(result.headers['expires'] + '+0000', 'ddd, DD MMM YYYY HH:mm:ss Z');
let diff = expires.diff(moment());
if (result.data.length === 0) {
memotyCache.put(priceSearchKey, { code: 404 }, diff);
reject({ code: 404 });
return;
}
memotyCache.put(priceSearchKey, result.data, diff);
console.info(`cache key ${priceSearchKey} has been added with ${(diff / 1000).toFixed(0)}s TTL`);
resolve(filterPrices(result.data, stationId));
})
.catch(err => {
console.error(err);
reject(err);
});
});
}
function filterPrices(prices: PriceResponse[], stationId: number): PriceServiceResponse {
let result = new PriceServiceResponse();
result.buy = _.maxBy(_.filter(prices, (order) => {
return order.location_id === stationId && order.is_buy_order;
}), record => record.price) || prices[0];
result.sell = _.minBy(_.filter(prices, (order) => {
return order.location_id === stationId && !order.is_buy_order;
}), record => record.price) || prices[0];
return result;
}
export interface PriceResponse {
order_id: number;
type_id: number;
location_id: number;
volume_total: number;
volume_remain: number;
min_volume: number;
price: number;
is_buy_order: number;
duration: number;
issued: string;
range: string;
} | PriceServiceResponse | identifier_name |
cssbag.ts | /** The MIT License (MIT) Copyright(c) 2016 Maxim V.Tsapov */
import {
Utils, BasePropBag
} from "jriapp_shared";
import { DomUtils } from "jriapp/utils/dom";
const utils = Utils, checks = utils.check, dom = DomUtils;
// wraps HTMLElement to add or remove classNames using data binding
export class CSSBag extends BasePropBag {
private _el: Element;
constructor(el: Element) {
super();
this._el = el;
}
//implement IPropertyBag
setProp(name: string, val: any): void {
if (val === checks.undefined)
return;
if (name === "*") {
if (!val) {
//remove all classes
dom.removeClass([this._el], null);
}
else if (checks.isArray(val)) {
| else if (checks.isString(val)) {
dom.setClasses([this._el], val.split(" "));
}
return;
}
//set individual classes
dom.setClass([this._el], name, !val);
}
toString() {
return "CSSBag";
}
} | dom.setClasses([this._el], <string[]>val);
}
| conditional_block |
cssbag.ts | /** The MIT License (MIT) Copyright(c) 2016 Maxim V.Tsapov */
import {
Utils, BasePropBag
} from "jriapp_shared";
import { DomUtils } from "jriapp/utils/dom";
const utils = Utils, checks = utils.check, dom = DomUtils;
// wraps HTMLElement to add or remove classNames using data binding
export class CSSBag extends BasePropBag {
private _el: Element;
constructor(el: Element) {
super();
this._el = el;
}
//implement IPropertyBag
setProp(name: string, val: any): void {
if (val === checks.undefined)
return;
| if (name === "*") {
if (!val) {
//remove all classes
dom.removeClass([this._el], null);
}
else if (checks.isArray(val)) {
dom.setClasses([this._el], <string[]>val);
}
else if (checks.isString(val)) {
dom.setClasses([this._el], val.split(" "));
}
return;
}
//set individual classes
dom.setClass([this._el], name, !val);
}
toString() {
return "CSSBag";
}
} | random_line_split | |
cssbag.ts | /** The MIT License (MIT) Copyright(c) 2016 Maxim V.Tsapov */
import {
Utils, BasePropBag
} from "jriapp_shared";
import { DomUtils } from "jriapp/utils/dom";
const utils = Utils, checks = utils.check, dom = DomUtils;
// wraps HTMLElement to add or remove classNames using data binding
export class CSSBag extends BasePropBag {
private _el: Element;
constructor(el: Element) {
| //implement IPropertyBag
setProp(name: string, val: any): void {
if (val === checks.undefined)
return;
if (name === "*") {
if (!val) {
//remove all classes
dom.removeClass([this._el], null);
}
else if (checks.isArray(val)) {
dom.setClasses([this._el], <string[]>val);
}
else if (checks.isString(val)) {
dom.setClasses([this._el], val.split(" "));
}
return;
}
//set individual classes
dom.setClass([this._el], name, !val);
}
toString() {
return "CSSBag";
}
} | super();
this._el = el;
}
| identifier_body |
cssbag.ts | /** The MIT License (MIT) Copyright(c) 2016 Maxim V.Tsapov */
import {
Utils, BasePropBag
} from "jriapp_shared";
import { DomUtils } from "jriapp/utils/dom";
const utils = Utils, checks = utils.check, dom = DomUtils;
// wraps HTMLElement to add or remove classNames using data binding
export class CSSBag extends BasePropBag {
private _el: Element;
constructor(el: Element) {
super();
this._el = el;
}
//implement IPropertyBag
setProp(name: string, val: any): void {
if (val === checks.undefined)
return;
if (name === "*") {
if (!val) {
//remove all classes
dom.removeClass([this._el], null);
}
else if (checks.isArray(val)) {
dom.setClasses([this._el], <string[]>val);
}
else if (checks.isString(val)) {
dom.setClasses([this._el], val.split(" "));
}
return;
}
//set individual classes
dom.setClass([this._el], name, !val);
}
to | {
return "CSSBag";
}
} | String() | identifier_name |
setup.py | from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Lcdplate',
version=get_version('mopidy_lcdplate/__init__.py'),
url='https://github.com/gimunu/mopidy-lcdplate',
license='Apache License, Version 2.0',
author='Umberto De Giovannini',
author_email='umberto.degiovannini@gmail.com',
description='Modipy extension for Adafruit lcd plate',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 0.18',
'Pykka >= 1.1',
],
test_suite='nose.collector',
tests_require=[
'nose', | ],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
) | 'mock >= 1.0',
],
entry_points={
'mopidy.ext': [
'lcdplate = mopidy_lcdplate:Extension', | random_line_split |
setup.py | from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def get_version(filename):
|
setup(
name='Mopidy-Lcdplate',
version=get_version('mopidy_lcdplate/__init__.py'),
url='https://github.com/gimunu/mopidy-lcdplate',
license='Apache License, Version 2.0',
author='Umberto De Giovannini',
author_email='umberto.degiovannini@gmail.com',
description='Modipy extension for Adafruit lcd plate',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 0.18',
'Pykka >= 1.1',
],
test_suite='nose.collector',
tests_require=[
'nose',
'mock >= 1.0',
],
entry_points={
'mopidy.ext': [
'lcdplate = mopidy_lcdplate:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
) | content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version'] | identifier_body |
setup.py | from __future__ import unicode_literals
import re
from setuptools import find_packages, setup
def | (filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Lcdplate',
version=get_version('mopidy_lcdplate/__init__.py'),
url='https://github.com/gimunu/mopidy-lcdplate',
license='Apache License, Version 2.0',
author='Umberto De Giovannini',
author_email='umberto.degiovannini@gmail.com',
description='Modipy extension for Adafruit lcd plate',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests', 'tests.*']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy >= 0.18',
'Pykka >= 1.1',
],
test_suite='nose.collector',
tests_require=[
'nose',
'mock >= 1.0',
],
entry_points={
'mopidy.ext': [
'lcdplate = mopidy_lcdplate:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
) | get_version | identifier_name |
animatedText.ts | /*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbi.visuals {
export interface AnimatedTextConfigurationSettings {
align?: string;
maxFontSize?: number;
}
/**
* Base class for values that are animated when resized.
*/
export class AnimatedText {
/** Note: Public for testability */
public static formatStringProp: DataViewObjectPropertyIdentifier = {
objectName: 'general',
propertyName: 'formatString',
};
protected animator: IGenericAnimator;
private name: string;
/** Note: Public for testability */
public svg: D3.Selection;
public currentViewport: IViewport;
public value: any;
public hostServices: IVisualHostServices;
public style: IVisualStyle;
public visualConfiguration: AnimatedTextConfigurationSettings;
public metaDataColumn: DataViewMetadataColumn;
private mainText: ClassAndSelector = {
class: 'mainText',
selector: '.mainText'
};
public constructor(name: string) {
this.name = name;
this.visualConfiguration = { maxFontSize: 60 };
}
public getMetaDataColumn(dataView: DataView) {
if (dataView && dataView.metadata && dataView.metadata.columns) {
for (let i = 0, ilen = dataView.metadata.columns.length; i < ilen; i++) {
let column = dataView.metadata.columns[i];
if (column.isMeasure) {
this.metaDataColumn = column;
break;
}
}
}
}
public getAdjustedFontHeight(
availableWidth: number,
textToMeasure: string,
seedFontHeight: number): number {
// set up the node so we don't keep appending/removing it during the computation
let nodeSelection = this.svg.append('text').text(textToMeasure);
let fontHeight = this.getAdjustedFontHeightCore(
nodeSelection,
availableWidth,
seedFontHeight,
0);
nodeSelection.remove();
return fontHeight;
}
private ge | nodeToMeasure: D3.Selection,
availableWidth: number,
seedFontHeight: number,
iteration: number): number {
// Too many attempts - just return what we have so we don't sacrifice perf
if (iteration > 10)
return seedFontHeight;
nodeToMeasure.attr('font-size', seedFontHeight);
let candidateLength = TextMeasurementService.measureSvgTextElementWidth(nodeToMeasure[0][0]);
if (candidateLength < availableWidth)
return seedFontHeight;
return this.getAdjustedFontHeightCore(nodeToMeasure, availableWidth, seedFontHeight * 0.9, iteration + 1);
}
public clear() {
this.svg.select(this.mainText.selector).text('');
}
public doValueTransition(
startValue: any,
endValue: any,
displayUnitSystemType: DisplayUnitSystemType,
animationOptions: AnimationOptions,
duration: number,
forceUpdate: boolean,
formatter?: IValueFormatter): void {
if (!forceUpdate && startValue === endValue && endValue != null)
return;
if (!startValue)
startValue = 0;
let svg = this.svg,
viewport = this.currentViewport,
height = viewport.height,
width = viewport.width,
endValueArr = [endValue],
seedFontHeight = this.getSeedFontHeight(width, height),
translateX = this.getTranslateX(width),
translateY = this.getTranslateY(seedFontHeight),
metaDataColumn = this.metaDataColumn;
// Respect the formatter default value
if (!formatter) {
formatter = valueFormatter.create({
format: this.getFormatString(metaDataColumn),
value: endValue,
displayUnitSystemType: displayUnitSystemType,
formatSingleValues: true,
allowFormatBeautification: true,
columnType: metaDataColumn ? metaDataColumn.type : undefined
});
}
let startText = formatter.format(startValue),
endText = formatter.format(endValue);
svg.attr('class', this.name);
let textElement = svg
.selectAll('text')
.data(endValueArr);
textElement
.enter()
.append('text')
.attr('class', this.mainText.class);
let fontHeight = this.getAdjustedFontHeight(width, endText, seedFontHeight);
translateY = this.getTranslateY(fontHeight + (height - fontHeight) / 2);
let textElementUpdate = textElement
.text(startText)
.attr({
'text-anchor': this.getTextAnchor(),
'font-size': fontHeight,
'transform': SVGUtil.translate(translateX, translateY)
});
if (endValue == null) {
textElementUpdate.text(endText);
}
else if (metaDataColumn && AxisHelper.isDateTime(metaDataColumn.type)) {
textElementUpdate.text(endText);
}
else {
let interpolatedValue = startValue;
textElementUpdate
.transition()
.duration(duration)
.tween('text', function (d) {
let i = d3.interpolate(interpolatedValue, d);
return function (t) {
let num = i(t);
this.textContent = formatter.format(num);
};
});
}
SVGUtil.flushAllD3TransitionsIfNeeded(animationOptions);
}
public getSeedFontHeight(boundingWidth: number, boundingHeight: number) {
// Simply an estimate - it should eventually be modified based on the actual text length
let estimatedSize = Math.floor(Math.min(boundingWidth, boundingHeight) * 0.75);
let maxFontSize = this.visualConfiguration.maxFontSize;
if (maxFontSize)
return Math.min(maxFontSize, estimatedSize);
return estimatedSize;
}
public getTranslateX(width: number): number {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 0;
case 'right':
return width;
}
}
return width / 2;
}
public getTranslateY(height: number): number {
return height;
}
public getTextAnchor(): string {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 'start';
case 'right':
return 'end';
}
}
return 'middle';
}
protected getFormatString(column: DataViewMetadataColumn): string {
debug.assertAnyValue(column, 'column');
return valueFormatter.getFormatString(column, AnimatedText.formatStringProp);
}
}
} | tAdjustedFontHeightCore(
| identifier_name |
animatedText.ts | /*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbi.visuals {
export interface AnimatedTextConfigurationSettings {
align?: string;
maxFontSize?: number;
}
/**
* Base class for values that are animated when resized.
*/
export class AnimatedText {
/** Note: Public for testability */
public static formatStringProp: DataViewObjectPropertyIdentifier = {
objectName: 'general',
propertyName: 'formatString',
};
protected animator: IGenericAnimator;
private name: string;
/** Note: Public for testability */
public svg: D3.Selection;
public currentViewport: IViewport;
public value: any;
public hostServices: IVisualHostServices;
public style: IVisualStyle;
public visualConfiguration: AnimatedTextConfigurationSettings;
public metaDataColumn: DataViewMetadataColumn;
private mainText: ClassAndSelector = {
class: 'mainText',
selector: '.mainText'
};
public constructor(name: string) {
this.name = name;
this.visualConfiguration = { maxFontSize: 60 };
}
public getMetaDataColumn(dataView: DataView) {
if (dataView && dataView.metadata && dataView.metadata.columns) {
for (let i = 0, ilen = dataView.metadata.columns.length; i < ilen; i++) {
let column = dataView.metadata.columns[i];
if (column.isMeasure) {
this.metaDataColumn = column;
break;
}
}
}
}
public getAdjustedFontHeight(
availableWidth: number,
textToMeasure: string,
seedFontHeight: number): number {
// set up the node so we don't keep appending/removing it during the computation
let nodeSelection = this.svg.append('text').text(textToMeasure);
let fontHeight = this.getAdjustedFontHeightCore(
nodeSelection,
availableWidth,
seedFontHeight,
0);
nodeSelection.remove();
return fontHeight;
}
private getAdjustedFontHeightCore(
nodeToMeasure: D3.Selection,
availableWidth: number,
seedFontHeight: number,
iteration: number): number {
// Too many attempts - just return what we have so we don't sacrifice perf
if (iteration > 10)
return seedFontHeight;
nodeToMeasure.attr('font-size', seedFontHeight);
let candidateLength = TextMeasurementService.measureSvgTextElementWidth(nodeToMeasure[0][0]);
if (candidateLength < availableWidth)
return seedFontHeight;
return this.getAdjustedFontHeightCore(nodeToMeasure, availableWidth, seedFontHeight * 0.9, iteration + 1);
}
public clear() {
this.svg.select(this.mainText.selector).text('');
}
public doValueTransition(
startValue: any,
endValue: any,
displayUnitSystemType: DisplayUnitSystemType,
animationOptions: AnimationOptions,
duration: number,
forceUpdate: boolean,
formatter?: IValueFormatter): void {
if (!forceUpdate && startValue === endValue && endValue != null)
return;
if (!startValue)
startValue = 0;
let svg = this.svg,
viewport = this.currentViewport,
height = viewport.height,
width = viewport.width,
endValueArr = [endValue],
seedFontHeight = this.getSeedFontHeight(width, height),
translateX = this.getTranslateX(width),
translateY = this.getTranslateY(seedFontHeight),
metaDataColumn = this.metaDataColumn;
// Respect the formatter default value
if (!formatter) {
formatter = valueFormatter.create({
format: this.getFormatString(metaDataColumn),
value: endValue,
displayUnitSystemType: displayUnitSystemType,
formatSingleValues: true,
allowFormatBeautification: true,
columnType: metaDataColumn ? metaDataColumn.type : undefined
});
}
let startText = formatter.format(startValue),
endText = formatter.format(endValue);
svg.attr('class', this.name);
let textElement = svg
.selectAll('text')
.data(endValueArr);
textElement
.enter()
.append('text')
.attr('class', this.mainText.class);
let fontHeight = this.getAdjustedFontHeight(width, endText, seedFontHeight);
translateY = this.getTranslateY(fontHeight + (height - fontHeight) / 2);
let textElementUpdate = textElement
.text(startText)
.attr({
'text-anchor': this.getTextAnchor(),
'font-size': fontHeight,
'transform': SVGUtil.translate(translateX, translateY)
});
if (endValue == null) {
textElementUpdate.text(endText);
}
else if (metaDataColumn && AxisHelper.isDateTime(metaDataColumn.type)) {
textElementUpdate.text(endText);
}
else { | let i = d3.interpolate(interpolatedValue, d);
return function (t) {
let num = i(t);
this.textContent = formatter.format(num);
};
});
}
SVGUtil.flushAllD3TransitionsIfNeeded(animationOptions);
}
public getSeedFontHeight(boundingWidth: number, boundingHeight: number) {
// Simply an estimate - it should eventually be modified based on the actual text length
let estimatedSize = Math.floor(Math.min(boundingWidth, boundingHeight) * 0.75);
let maxFontSize = this.visualConfiguration.maxFontSize;
if (maxFontSize)
return Math.min(maxFontSize, estimatedSize);
return estimatedSize;
}
public getTranslateX(width: number): number {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 0;
case 'right':
return width;
}
}
return width / 2;
}
public getTranslateY(height: number): number {
return height;
}
public getTextAnchor(): string {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 'start';
case 'right':
return 'end';
}
}
return 'middle';
}
protected getFormatString(column: DataViewMetadataColumn): string {
debug.assertAnyValue(column, 'column');
return valueFormatter.getFormatString(column, AnimatedText.formatStringProp);
}
}
} | let interpolatedValue = startValue;
textElementUpdate
.transition()
.duration(duration)
.tween('text', function (d) { | random_line_split |
animatedText.ts | /*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbi.visuals {
export interface AnimatedTextConfigurationSettings {
align?: string;
maxFontSize?: number;
}
/**
* Base class for values that are animated when resized.
*/
export class AnimatedText {
/** Note: Public for testability */
public static formatStringProp: DataViewObjectPropertyIdentifier = {
objectName: 'general',
propertyName: 'formatString',
};
protected animator: IGenericAnimator;
private name: string;
/** Note: Public for testability */
public svg: D3.Selection;
public currentViewport: IViewport;
public value: any;
public hostServices: IVisualHostServices;
public style: IVisualStyle;
public visualConfiguration: AnimatedTextConfigurationSettings;
public metaDataColumn: DataViewMetadataColumn;
private mainText: ClassAndSelector = {
class: 'mainText',
selector: '.mainText'
};
public constructor(name: string) {
this.name = name;
this.visualConfiguration = { maxFontSize: 60 };
}
public getMetaDataColumn(dataView: DataView) {
if (dataView && dataView.metadata && dataView.metadata.columns) {
for (let i = 0, ilen = dataView.metadata.columns.length; i < ilen; i++) {
let column = dataView.metadata.columns[i];
if (column.isMeasure) {
this.metaDataColumn = column;
break;
}
}
}
}
public getAdjustedFontHeight(
availableWidth: number,
textToMeasure: string,
seedFontHeight: number): number {
| private getAdjustedFontHeightCore(
nodeToMeasure: D3.Selection,
availableWidth: number,
seedFontHeight: number,
iteration: number): number {
// Too many attempts - just return what we have so we don't sacrifice perf
if (iteration > 10)
return seedFontHeight;
nodeToMeasure.attr('font-size', seedFontHeight);
let candidateLength = TextMeasurementService.measureSvgTextElementWidth(nodeToMeasure[0][0]);
if (candidateLength < availableWidth)
return seedFontHeight;
return this.getAdjustedFontHeightCore(nodeToMeasure, availableWidth, seedFontHeight * 0.9, iteration + 1);
}
public clear() {
this.svg.select(this.mainText.selector).text('');
}
public doValueTransition(
startValue: any,
endValue: any,
displayUnitSystemType: DisplayUnitSystemType,
animationOptions: AnimationOptions,
duration: number,
forceUpdate: boolean,
formatter?: IValueFormatter): void {
if (!forceUpdate && startValue === endValue && endValue != null)
return;
if (!startValue)
startValue = 0;
let svg = this.svg,
viewport = this.currentViewport,
height = viewport.height,
width = viewport.width,
endValueArr = [endValue],
seedFontHeight = this.getSeedFontHeight(width, height),
translateX = this.getTranslateX(width),
translateY = this.getTranslateY(seedFontHeight),
metaDataColumn = this.metaDataColumn;
// Respect the formatter default value
if (!formatter) {
formatter = valueFormatter.create({
format: this.getFormatString(metaDataColumn),
value: endValue,
displayUnitSystemType: displayUnitSystemType,
formatSingleValues: true,
allowFormatBeautification: true,
columnType: metaDataColumn ? metaDataColumn.type : undefined
});
}
let startText = formatter.format(startValue),
endText = formatter.format(endValue);
svg.attr('class', this.name);
let textElement = svg
.selectAll('text')
.data(endValueArr);
textElement
.enter()
.append('text')
.attr('class', this.mainText.class);
let fontHeight = this.getAdjustedFontHeight(width, endText, seedFontHeight);
translateY = this.getTranslateY(fontHeight + (height - fontHeight) / 2);
let textElementUpdate = textElement
.text(startText)
.attr({
'text-anchor': this.getTextAnchor(),
'font-size': fontHeight,
'transform': SVGUtil.translate(translateX, translateY)
});
if (endValue == null) {
textElementUpdate.text(endText);
}
else if (metaDataColumn && AxisHelper.isDateTime(metaDataColumn.type)) {
textElementUpdate.text(endText);
}
else {
let interpolatedValue = startValue;
textElementUpdate
.transition()
.duration(duration)
.tween('text', function (d) {
let i = d3.interpolate(interpolatedValue, d);
return function (t) {
let num = i(t);
this.textContent = formatter.format(num);
};
});
}
SVGUtil.flushAllD3TransitionsIfNeeded(animationOptions);
}
public getSeedFontHeight(boundingWidth: number, boundingHeight: number) {
// Simply an estimate - it should eventually be modified based on the actual text length
let estimatedSize = Math.floor(Math.min(boundingWidth, boundingHeight) * 0.75);
let maxFontSize = this.visualConfiguration.maxFontSize;
if (maxFontSize)
return Math.min(maxFontSize, estimatedSize);
return estimatedSize;
}
public getTranslateX(width: number): number {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 0;
case 'right':
return width;
}
}
return width / 2;
}
public getTranslateY(height: number): number {
return height;
}
public getTextAnchor(): string {
if (this.visualConfiguration) {
switch (this.visualConfiguration.align) {
case 'left':
return 'start';
case 'right':
return 'end';
}
}
return 'middle';
}
protected getFormatString(column: DataViewMetadataColumn): string {
debug.assertAnyValue(column, 'column');
return valueFormatter.getFormatString(column, AnimatedText.formatStringProp);
}
}
} |
// set up the node so we don't keep appending/removing it during the computation
let nodeSelection = this.svg.append('text').text(textToMeasure);
let fontHeight = this.getAdjustedFontHeightCore(
nodeSelection,
availableWidth,
seedFontHeight,
0);
nodeSelection.remove();
return fontHeight;
}
| identifier_body |
controller.py | #
# controller.py
#
# Copyright (C) 2013-2014 Ashwin Menon <ashwin.menon@gmail.com>
# Copyright (C) 2015-2018 Track Master Steve <trackmastersteve@gmail.com>
#
# Alienfx is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Alienfx is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with alienfx. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
""" Base classes for AlienFX controller chips. These must be subclassed for
specific controllers.
This module provides the following classes:
AlienFXController: base class for AlienFX controller chips
"""
from builtins import hex
from builtins import object
import logging
import alienfx.core.usbdriver as alienfx_usbdriver
import alienfx.core.cmdpacket as alienfx_cmdpacket
from alienfx.core.themefile import AlienFXThemeFile
from functools import reduce
class AlienFXController(object):
""" Provides facilities to communicate with an AlienFX controller.
This class provides methods to send commands to an AlienFX controller, and
receive status from the controller. It must be overridden to provide
behaviour specific to a particular AlienFX controller.
"""
# List of all subclasses of this class. Subclasses must add instances of
# themselves to this list. See README for details.
supported_controllers = []
# Zone names
ZONE_LEFT_KEYBOARD = "Left Keyboard"
ZONE_MIDDLE_LEFT_KEYBOARD = "Middle-left Keyboard"
ZONE_MIDDLE_RIGHT_KEYBOARD = "Middle-right Keyboard"
ZONE_RIGHT_KEYBOARD = "Right Keyboard"
ZONE_RIGHT_SPEAKER = "Right Speaker"
ZONE_LEFT_SPEAKER = "Left Speaker"
ZONE_ALIEN_HEAD = "Alien Head"
ZONE_LOGO = "Logo"
ZONE_TOUCH_PAD = "Touchpad"
ZONE_MEDIA_BAR = "Media Bar"
ZONE_STATUS_LEDS = "Status LEDs"
ZONE_POWER_BUTTON = "Power Button"
ZONE_HDD_LEDS = "HDD LEDs"
ZONE_RIGHT_DISPLAY = "Right Display" # LED-bar display right side, as built in the AW17R4
ZONE_LEFT_DISPLAY = "Left Display" # LED-bar display left side, as built in the AW17R4
# State names
STATE_BOOT = "Boot"
STATE_AC_SLEEP = "AC Sleep"
STATE_AC_CHARGED = "AC Charged"
STATE_AC_CHARGING = "AC Charging"
STATE_BATTERY_SLEEP = "Battery Sleep"
STATE_BATTERY_ON = "Battery On"
STATE_BATTERY_CRITICAL = "Battery Critical"
ALIENFX_CONTROLLER_TYPE = "old" # Default controllertype=old. Note that modern controllers are using 8 bits per color. older ones just 4
def __init__(self, conrev=1): # conrev defaulting to 1 to maintain compatibility with old definitions
# conrev=1 -> old controllers (DEFAULT)
# conrev=2 -> newer controllers (17R4 ...)
self.zone_map = {}
self.power_zones = []
self.reset_types = {}
self.state_map = {}
self.vendor_id = 0
self.product_id = 0
self.cmd_packet = alienfx_cmdpacket.AlienFXCmdPacket(conrev) # Loads the cmdpacket.
self._driver = alienfx_usbdriver.AlienFXUSBDriver(self)
def get_zone_name(self, pkt):
""" Given 3 bytes of a command packet, return a string zone
name corresponding to it
"""
zone_mask = (pkt[0] << 16) + (pkt[1] << 8) + pkt[2]
zone_name = ""
for zone in self.zone_map:
bit_mask = self.zone_map[zone]
if zone_mask & bit_mask:
if zone_name != "":
zone_name += ","
zone_name += zone
zone_mask &= ~bit_mask
if zone_mask != 0:
if zone_name != "":
zone_name += ","
zone_name += "UNKNOWN({})".format(hex(zone_mask))
return zone_name
def get_state_name(self, state):
""" Given a state number, return a string state name """
for state_name in self.state_map:
if self.state_map[state_name] == state:
return state_name
return "UNKNOWN"
def get_reset_type_name(self, num):
""" Given a reset number, return a string reset name """
if num in list(self.reset_types.keys()):
return self.reset_types[num]
else:
return "UNKNOWN"
def _ping(self):
""" Send a get-status command to the controller."""
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
self._driver.read_packet()
def _reset(self, reset_type):
""" Send a "reset" packet to the AlienFX controller."""
reset_code = self._get_reset_code(reset_type)
pkt = self.cmd_packet.make_cmd_reset(reset_code)
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
def _wait_controller_ready(self):
""" Keep sending a "get status" packet to the AlienFX controller and
return only when the controller is ready
"""
ready = False
errcount=0
while not ready:
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
try:
resp = self._driver.read_packet()
ready = (resp[0] == self.cmd_packet.STATUS_READY)
except TypeError:
errcount += 1
logging.debug("No Status received yet... Failed tries=" + str(errcount))
if errcount > 50:
logging.error("Controller status could not be retrieved. Is the device already in use?")
quit(-99)
def pkt_to_string(self, pkt_bytes):
""" Return a human readable string representation of an AlienFX
command packet.
"""
return self.cmd_packet.pkt_to_string(pkt_bytes, self)
def _get_no_zone_code(self):
""" Return a zone code corresponding to all non-visible zones."""
zone_codes = [self.zone_map[x] for x in self.zone_map]
return ~reduce(lambda x,y: x|y, zone_codes, 0)
def _get_zone_codes(self, zone_names):
""" Given zone names, return the zone codes they refer to.
"""
zones = 0
for zone in zone_names:
if zone in self.zone_map:
zones |= self.zone_map[zone]
return zones
def _get_reset_code(self, reset_name):
""" Given the name of a reset action, return its code. """
for reset in self.reset_types:
if reset_name == self.reset_types[reset]:
return reset
logging.warning("Unknown reset type: {}".format(reset_name))
return 0
def _make_loop_cmds(self, themefile, zones, block, loop_items):
""" Given loop-items from the theme file, return a list of loop
commands.
"""
loop_cmds = []
pkt = self.cmd_packet
for item in loop_items:
item_type = themefile.get_action_type(item)
item_colours = themefile.get_action_colours(item)
if item_type == AlienFXThemeFile.KW_ACTION_TYPE_FIXED:
if len(item_colours) != 1:
logging.warning("fixed must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_BLINK:
if len(item_colours) != 1:
logging.warning("blink must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_blink_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_MORPH:
if len(item_colours) != 2:
logging.warning("morph must have exactly two colour values")
continue
loop_cmds.append(
pkt.make_cmd_set_morph_colour(
block, zones, item_colours[0], item_colours[1]))
else:
logging.warning("unknown loop item type: {}".format(item_type))
return loop_cmds
def _make_zone_cmds(self, themefile, state_name, boot=False):
""" Given a theme file, return a list of zone commands.
If 'boot' is True, then the colour commands created are not saved with
SAVE_NEXT commands. Also, the final command is one to set the colour
of all non-visible zones to black.
"""
zone_cmds = []
block = 1
pkt = self.cmd_packet
state = self.state_map[state_name]
state_items = themefile.get_state_items(state_name)
for item in state_items:
|
if zone_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save())
if boot:
zone_cmds.append(
pkt.make_cmd_set_colour(
block, self._get_no_zone_code(), (0,0,0)))
zone_cmds.append(pkt.make_cmd_loop_block_end())
return zone_cmds
def _send_cmds(self, cmds):
""" Send the given commands to the controller. """
for cmd in cmds:
logging.debug("SENDING: {}".format(self.pkt_to_string(cmd)))
self._driver.write_packet(cmd)
def set_theme(self, themefile):
""" Send the given theme settings to the controller. This should result
in the lights changing to the theme settings immediately.
"""
try:
self._driver.acquire()
cmds_boot = []
pkt = self.cmd_packet
# prepare the controller
self._ping()
self._reset("all-lights-on")
self._wait_controller_ready()
for state_name in self.state_map:
cmds = []
cmds = self._make_zone_cmds(themefile, state_name)
# Boot block commands are saved for sending again later.
# The second time, they are sent without SAVE_NEXT commands.
if (state_name == self.STATE_BOOT):
cmds_boot = self._make_zone_cmds(
themefile, state_name, boot=True)
self._send_cmds(cmds)
cmd = pkt.make_cmd_set_speed(themefile.get_speed())
self._send_cmds([cmd])
# send the boot block commands again
self._send_cmds(cmds_boot)
cmd = pkt.make_cmd_transmit_execute()
self._send_cmds([cmd])
finally:
self._driver.release()
| zone_codes = self._get_zone_codes(themefile.get_zone_names(item))
loop_items = themefile.get_loop_items(item)
loop_cmds = self._make_loop_cmds(
themefile, zone_codes, block, loop_items)
if (loop_cmds):
block += 1
for loop_cmd in loop_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(loop_cmd)
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(pkt.make_cmd_loop_block_end()) | conditional_block |
controller.py | #
# controller.py
#
# Copyright (C) 2013-2014 Ashwin Menon <ashwin.menon@gmail.com>
# Copyright (C) 2015-2018 Track Master Steve <trackmastersteve@gmail.com>
#
# Alienfx is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Alienfx is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with alienfx. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
""" Base classes for AlienFX controller chips. These must be subclassed for
specific controllers.
This module provides the following classes:
AlienFXController: base class for AlienFX controller chips
"""
from builtins import hex
from builtins import object
import logging
import alienfx.core.usbdriver as alienfx_usbdriver
import alienfx.core.cmdpacket as alienfx_cmdpacket
from alienfx.core.themefile import AlienFXThemeFile
from functools import reduce
class AlienFXController(object):
""" Provides facilities to communicate with an AlienFX controller.
This class provides methods to send commands to an AlienFX controller, and
receive status from the controller. It must be overridden to provide
behaviour specific to a particular AlienFX controller.
"""
# List of all subclasses of this class. Subclasses must add instances of
# themselves to this list. See README for details.
supported_controllers = []
# Zone names
ZONE_LEFT_KEYBOARD = "Left Keyboard"
ZONE_MIDDLE_LEFT_KEYBOARD = "Middle-left Keyboard"
ZONE_MIDDLE_RIGHT_KEYBOARD = "Middle-right Keyboard"
ZONE_RIGHT_KEYBOARD = "Right Keyboard"
ZONE_RIGHT_SPEAKER = "Right Speaker"
ZONE_LEFT_SPEAKER = "Left Speaker"
ZONE_ALIEN_HEAD = "Alien Head"
ZONE_LOGO = "Logo"
ZONE_TOUCH_PAD = "Touchpad"
ZONE_MEDIA_BAR = "Media Bar"
ZONE_STATUS_LEDS = "Status LEDs"
ZONE_POWER_BUTTON = "Power Button"
ZONE_HDD_LEDS = "HDD LEDs"
ZONE_RIGHT_DISPLAY = "Right Display" # LED-bar display right side, as built in the AW17R4
ZONE_LEFT_DISPLAY = "Left Display" # LED-bar display left side, as built in the AW17R4
# State names
STATE_BOOT = "Boot"
STATE_AC_SLEEP = "AC Sleep"
STATE_AC_CHARGED = "AC Charged"
STATE_AC_CHARGING = "AC Charging"
STATE_BATTERY_SLEEP = "Battery Sleep"
STATE_BATTERY_ON = "Battery On"
STATE_BATTERY_CRITICAL = "Battery Critical"
ALIENFX_CONTROLLER_TYPE = "old" # Default controllertype=old. Note that modern controllers are using 8 bits per color. older ones just 4
def __init__(self, conrev=1): # conrev defaulting to 1 to maintain compatibility with old definitions
# conrev=1 -> old controllers (DEFAULT)
# conrev=2 -> newer controllers (17R4 ...)
self.zone_map = {}
self.power_zones = []
self.reset_types = {}
self.state_map = {}
self.vendor_id = 0
self.product_id = 0
self.cmd_packet = alienfx_cmdpacket.AlienFXCmdPacket(conrev) # Loads the cmdpacket.
self._driver = alienfx_usbdriver.AlienFXUSBDriver(self)
def get_zone_name(self, pkt):
""" Given 3 bytes of a command packet, return a string zone
name corresponding to it
"""
zone_mask = (pkt[0] << 16) + (pkt[1] << 8) + pkt[2]
zone_name = ""
for zone in self.zone_map:
bit_mask = self.zone_map[zone]
if zone_mask & bit_mask:
if zone_name != "":
zone_name += ","
zone_name += zone
zone_mask &= ~bit_mask
if zone_mask != 0:
if zone_name != "":
zone_name += ","
zone_name += "UNKNOWN({})".format(hex(zone_mask))
return zone_name
def get_state_name(self, state):
""" Given a state number, return a string state name """
for state_name in self.state_map:
if self.state_map[state_name] == state:
return state_name
return "UNKNOWN"
def get_reset_type_name(self, num):
""" Given a reset number, return a string reset name """
if num in list(self.reset_types.keys()):
return self.reset_types[num]
else:
return "UNKNOWN"
def _ping(self):
""" Send a get-status command to the controller."""
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
self._driver.read_packet()
def _reset(self, reset_type):
""" Send a "reset" packet to the AlienFX controller."""
reset_code = self._get_reset_code(reset_type)
pkt = self.cmd_packet.make_cmd_reset(reset_code)
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
def _wait_controller_ready(self):
""" Keep sending a "get status" packet to the AlienFX controller and
return only when the controller is ready
"""
ready = False
errcount=0
while not ready:
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
try:
resp = self._driver.read_packet()
ready = (resp[0] == self.cmd_packet.STATUS_READY)
except TypeError:
errcount += 1
logging.debug("No Status received yet... Failed tries=" + str(errcount))
if errcount > 50:
logging.error("Controller status could not be retrieved. Is the device already in use?")
quit(-99)
def pkt_to_string(self, pkt_bytes):
""" Return a human readable string representation of an AlienFX
command packet.
"""
return self.cmd_packet.pkt_to_string(pkt_bytes, self)
def _get_no_zone_code(self):
""" Return a zone code corresponding to all non-visible zones."""
zone_codes = [self.zone_map[x] for x in self.zone_map]
return ~reduce(lambda x,y: x|y, zone_codes, 0)
def _get_zone_codes(self, zone_names):
""" Given zone names, return the zone codes they refer to.
"""
zones = 0
for zone in zone_names:
if zone in self.zone_map:
zones |= self.zone_map[zone]
return zones
def | (self, reset_name):
""" Given the name of a reset action, return its code. """
for reset in self.reset_types:
if reset_name == self.reset_types[reset]:
return reset
logging.warning("Unknown reset type: {}".format(reset_name))
return 0
def _make_loop_cmds(self, themefile, zones, block, loop_items):
""" Given loop-items from the theme file, return a list of loop
commands.
"""
loop_cmds = []
pkt = self.cmd_packet
for item in loop_items:
item_type = themefile.get_action_type(item)
item_colours = themefile.get_action_colours(item)
if item_type == AlienFXThemeFile.KW_ACTION_TYPE_FIXED:
if len(item_colours) != 1:
logging.warning("fixed must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_BLINK:
if len(item_colours) != 1:
logging.warning("blink must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_blink_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_MORPH:
if len(item_colours) != 2:
logging.warning("morph must have exactly two colour values")
continue
loop_cmds.append(
pkt.make_cmd_set_morph_colour(
block, zones, item_colours[0], item_colours[1]))
else:
logging.warning("unknown loop item type: {}".format(item_type))
return loop_cmds
def _make_zone_cmds(self, themefile, state_name, boot=False):
""" Given a theme file, return a list of zone commands.
If 'boot' is True, then the colour commands created are not saved with
SAVE_NEXT commands. Also, the final command is one to set the colour
of all non-visible zones to black.
"""
zone_cmds = []
block = 1
pkt = self.cmd_packet
state = self.state_map[state_name]
state_items = themefile.get_state_items(state_name)
for item in state_items:
zone_codes = self._get_zone_codes(themefile.get_zone_names(item))
loop_items = themefile.get_loop_items(item)
loop_cmds = self._make_loop_cmds(
themefile, zone_codes, block, loop_items)
if (loop_cmds):
block += 1
for loop_cmd in loop_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(loop_cmd)
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(pkt.make_cmd_loop_block_end())
if zone_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save())
if boot:
zone_cmds.append(
pkt.make_cmd_set_colour(
block, self._get_no_zone_code(), (0,0,0)))
zone_cmds.append(pkt.make_cmd_loop_block_end())
return zone_cmds
def _send_cmds(self, cmds):
""" Send the given commands to the controller. """
for cmd in cmds:
logging.debug("SENDING: {}".format(self.pkt_to_string(cmd)))
self._driver.write_packet(cmd)
def set_theme(self, themefile):
""" Send the given theme settings to the controller. This should result
in the lights changing to the theme settings immediately.
"""
try:
self._driver.acquire()
cmds_boot = []
pkt = self.cmd_packet
# prepare the controller
self._ping()
self._reset("all-lights-on")
self._wait_controller_ready()
for state_name in self.state_map:
cmds = []
cmds = self._make_zone_cmds(themefile, state_name)
# Boot block commands are saved for sending again later.
# The second time, they are sent without SAVE_NEXT commands.
if (state_name == self.STATE_BOOT):
cmds_boot = self._make_zone_cmds(
themefile, state_name, boot=True)
self._send_cmds(cmds)
cmd = pkt.make_cmd_set_speed(themefile.get_speed())
self._send_cmds([cmd])
# send the boot block commands again
self._send_cmds(cmds_boot)
cmd = pkt.make_cmd_transmit_execute()
self._send_cmds([cmd])
finally:
self._driver.release()
| _get_reset_code | identifier_name |
controller.py | #
# controller.py
#
# Copyright (C) 2013-2014 Ashwin Menon <ashwin.menon@gmail.com>
# Copyright (C) 2015-2018 Track Master Steve <trackmastersteve@gmail.com>
#
# Alienfx is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Alienfx is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with alienfx. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
""" Base classes for AlienFX controller chips. These must be subclassed for
specific controllers.
This module provides the following classes:
AlienFXController: base class for AlienFX controller chips
"""
from builtins import hex
from builtins import object
import logging
import alienfx.core.usbdriver as alienfx_usbdriver
import alienfx.core.cmdpacket as alienfx_cmdpacket
from alienfx.core.themefile import AlienFXThemeFile
from functools import reduce
class AlienFXController(object):
""" Provides facilities to communicate with an AlienFX controller.
This class provides methods to send commands to an AlienFX controller, and
receive status from the controller. It must be overridden to provide
behaviour specific to a particular AlienFX controller.
"""
# List of all subclasses of this class. Subclasses must add instances of
# themselves to this list. See README for details.
supported_controllers = []
# Zone names
ZONE_LEFT_KEYBOARD = "Left Keyboard"
ZONE_MIDDLE_LEFT_KEYBOARD = "Middle-left Keyboard"
ZONE_MIDDLE_RIGHT_KEYBOARD = "Middle-right Keyboard"
ZONE_RIGHT_KEYBOARD = "Right Keyboard"
ZONE_RIGHT_SPEAKER = "Right Speaker"
ZONE_LEFT_SPEAKER = "Left Speaker"
ZONE_ALIEN_HEAD = "Alien Head"
ZONE_LOGO = "Logo"
ZONE_TOUCH_PAD = "Touchpad"
ZONE_MEDIA_BAR = "Media Bar"
ZONE_STATUS_LEDS = "Status LEDs"
ZONE_POWER_BUTTON = "Power Button"
ZONE_HDD_LEDS = "HDD LEDs"
ZONE_RIGHT_DISPLAY = "Right Display" # LED-bar display right side, as built in the AW17R4
ZONE_LEFT_DISPLAY = "Left Display" # LED-bar display left side, as built in the AW17R4
# State names
STATE_BOOT = "Boot"
STATE_AC_SLEEP = "AC Sleep"
STATE_AC_CHARGED = "AC Charged"
STATE_AC_CHARGING = "AC Charging"
STATE_BATTERY_SLEEP = "Battery Sleep"
STATE_BATTERY_ON = "Battery On"
STATE_BATTERY_CRITICAL = "Battery Critical"
ALIENFX_CONTROLLER_TYPE = "old" # Default controllertype=old. Note that modern controllers are using 8 bits per color. older ones just 4
def __init__(self, conrev=1): # conrev defaulting to 1 to maintain compatibility with old definitions
# conrev=1 -> old controllers (DEFAULT)
# conrev=2 -> newer controllers (17R4 ...)
self.zone_map = {}
self.power_zones = []
self.reset_types = {}
self.state_map = {}
self.vendor_id = 0
self.product_id = 0
self.cmd_packet = alienfx_cmdpacket.AlienFXCmdPacket(conrev) # Loads the cmdpacket.
self._driver = alienfx_usbdriver.AlienFXUSBDriver(self)
def get_zone_name(self, pkt):
""" Given 3 bytes of a command packet, return a string zone
name corresponding to it
"""
zone_mask = (pkt[0] << 16) + (pkt[1] << 8) + pkt[2]
zone_name = ""
for zone in self.zone_map:
bit_mask = self.zone_map[zone]
if zone_mask & bit_mask:
if zone_name != "":
zone_name += ","
zone_name += zone
zone_mask &= ~bit_mask
if zone_mask != 0:
if zone_name != "":
zone_name += ","
zone_name += "UNKNOWN({})".format(hex(zone_mask))
return zone_name
def get_state_name(self, state):
""" Given a state number, return a string state name """
for state_name in self.state_map:
if self.state_map[state_name] == state:
return state_name
return "UNKNOWN"
def get_reset_type_name(self, num):
""" Given a reset number, return a string reset name """
if num in list(self.reset_types.keys()):
return self.reset_types[num]
else:
return "UNKNOWN"
def _ping(self):
""" Send a get-status command to the controller."""
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
self._driver.read_packet()
def _reset(self, reset_type):
""" Send a "reset" packet to the AlienFX controller."""
reset_code = self._get_reset_code(reset_type)
pkt = self.cmd_packet.make_cmd_reset(reset_code)
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
def _wait_controller_ready(self):
""" Keep sending a "get status" packet to the AlienFX controller and
return only when the controller is ready
"""
ready = False
errcount=0
while not ready:
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
try:
resp = self._driver.read_packet()
ready = (resp[0] == self.cmd_packet.STATUS_READY)
except TypeError:
errcount += 1
logging.debug("No Status received yet... Failed tries=" + str(errcount)) |
def pkt_to_string(self, pkt_bytes):
""" Return a human readable string representation of an AlienFX
command packet.
"""
return self.cmd_packet.pkt_to_string(pkt_bytes, self)
def _get_no_zone_code(self):
""" Return a zone code corresponding to all non-visible zones."""
zone_codes = [self.zone_map[x] for x in self.zone_map]
return ~reduce(lambda x,y: x|y, zone_codes, 0)
def _get_zone_codes(self, zone_names):
""" Given zone names, return the zone codes they refer to.
"""
zones = 0
for zone in zone_names:
if zone in self.zone_map:
zones |= self.zone_map[zone]
return zones
def _get_reset_code(self, reset_name):
""" Given the name of a reset action, return its code. """
for reset in self.reset_types:
if reset_name == self.reset_types[reset]:
return reset
logging.warning("Unknown reset type: {}".format(reset_name))
return 0
def _make_loop_cmds(self, themefile, zones, block, loop_items):
""" Given loop-items from the theme file, return a list of loop
commands.
"""
loop_cmds = []
pkt = self.cmd_packet
for item in loop_items:
item_type = themefile.get_action_type(item)
item_colours = themefile.get_action_colours(item)
if item_type == AlienFXThemeFile.KW_ACTION_TYPE_FIXED:
if len(item_colours) != 1:
logging.warning("fixed must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_BLINK:
if len(item_colours) != 1:
logging.warning("blink must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_blink_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_MORPH:
if len(item_colours) != 2:
logging.warning("morph must have exactly two colour values")
continue
loop_cmds.append(
pkt.make_cmd_set_morph_colour(
block, zones, item_colours[0], item_colours[1]))
else:
logging.warning("unknown loop item type: {}".format(item_type))
return loop_cmds
def _make_zone_cmds(self, themefile, state_name, boot=False):
""" Given a theme file, return a list of zone commands.
If 'boot' is True, then the colour commands created are not saved with
SAVE_NEXT commands. Also, the final command is one to set the colour
of all non-visible zones to black.
"""
zone_cmds = []
block = 1
pkt = self.cmd_packet
state = self.state_map[state_name]
state_items = themefile.get_state_items(state_name)
for item in state_items:
zone_codes = self._get_zone_codes(themefile.get_zone_names(item))
loop_items = themefile.get_loop_items(item)
loop_cmds = self._make_loop_cmds(
themefile, zone_codes, block, loop_items)
if (loop_cmds):
block += 1
for loop_cmd in loop_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(loop_cmd)
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(pkt.make_cmd_loop_block_end())
if zone_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save())
if boot:
zone_cmds.append(
pkt.make_cmd_set_colour(
block, self._get_no_zone_code(), (0,0,0)))
zone_cmds.append(pkt.make_cmd_loop_block_end())
return zone_cmds
def _send_cmds(self, cmds):
""" Send the given commands to the controller. """
for cmd in cmds:
logging.debug("SENDING: {}".format(self.pkt_to_string(cmd)))
self._driver.write_packet(cmd)
def set_theme(self, themefile):
""" Send the given theme settings to the controller. This should result
in the lights changing to the theme settings immediately.
"""
try:
self._driver.acquire()
cmds_boot = []
pkt = self.cmd_packet
# prepare the controller
self._ping()
self._reset("all-lights-on")
self._wait_controller_ready()
for state_name in self.state_map:
cmds = []
cmds = self._make_zone_cmds(themefile, state_name)
# Boot block commands are saved for sending again later.
# The second time, they are sent without SAVE_NEXT commands.
if (state_name == self.STATE_BOOT):
cmds_boot = self._make_zone_cmds(
themefile, state_name, boot=True)
self._send_cmds(cmds)
cmd = pkt.make_cmd_set_speed(themefile.get_speed())
self._send_cmds([cmd])
# send the boot block commands again
self._send_cmds(cmds_boot)
cmd = pkt.make_cmd_transmit_execute()
self._send_cmds([cmd])
finally:
self._driver.release() | if errcount > 50:
logging.error("Controller status could not be retrieved. Is the device already in use?")
quit(-99) | random_line_split |
controller.py | #
# controller.py
#
# Copyright (C) 2013-2014 Ashwin Menon <ashwin.menon@gmail.com>
# Copyright (C) 2015-2018 Track Master Steve <trackmastersteve@gmail.com>
#
# Alienfx is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Alienfx is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with alienfx. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
""" Base classes for AlienFX controller chips. These must be subclassed for
specific controllers.
This module provides the following classes:
AlienFXController: base class for AlienFX controller chips
"""
from builtins import hex
from builtins import object
import logging
import alienfx.core.usbdriver as alienfx_usbdriver
import alienfx.core.cmdpacket as alienfx_cmdpacket
from alienfx.core.themefile import AlienFXThemeFile
from functools import reduce
class AlienFXController(object):
""" Provides facilities to communicate with an AlienFX controller.
This class provides methods to send commands to an AlienFX controller, and
receive status from the controller. It must be overridden to provide
behaviour specific to a particular AlienFX controller.
"""
# List of all subclasses of this class. Subclasses must add instances of
# themselves to this list. See README for details.
supported_controllers = []
# Zone names
ZONE_LEFT_KEYBOARD = "Left Keyboard"
ZONE_MIDDLE_LEFT_KEYBOARD = "Middle-left Keyboard"
ZONE_MIDDLE_RIGHT_KEYBOARD = "Middle-right Keyboard"
ZONE_RIGHT_KEYBOARD = "Right Keyboard"
ZONE_RIGHT_SPEAKER = "Right Speaker"
ZONE_LEFT_SPEAKER = "Left Speaker"
ZONE_ALIEN_HEAD = "Alien Head"
ZONE_LOGO = "Logo"
ZONE_TOUCH_PAD = "Touchpad"
ZONE_MEDIA_BAR = "Media Bar"
ZONE_STATUS_LEDS = "Status LEDs"
ZONE_POWER_BUTTON = "Power Button"
ZONE_HDD_LEDS = "HDD LEDs"
ZONE_RIGHT_DISPLAY = "Right Display" # LED-bar display right side, as built in the AW17R4
ZONE_LEFT_DISPLAY = "Left Display" # LED-bar display left side, as built in the AW17R4
# State names
STATE_BOOT = "Boot"
STATE_AC_SLEEP = "AC Sleep"
STATE_AC_CHARGED = "AC Charged"
STATE_AC_CHARGING = "AC Charging"
STATE_BATTERY_SLEEP = "Battery Sleep"
STATE_BATTERY_ON = "Battery On"
STATE_BATTERY_CRITICAL = "Battery Critical"
ALIENFX_CONTROLLER_TYPE = "old" # Default controllertype=old. Note that modern controllers are using 8 bits per color. older ones just 4
def __init__(self, conrev=1): # conrev defaulting to 1 to maintain compatibility with old definitions
# conrev=1 -> old controllers (DEFAULT)
# conrev=2 -> newer controllers (17R4 ...)
self.zone_map = {}
self.power_zones = []
self.reset_types = {}
self.state_map = {}
self.vendor_id = 0
self.product_id = 0
self.cmd_packet = alienfx_cmdpacket.AlienFXCmdPacket(conrev) # Loads the cmdpacket.
self._driver = alienfx_usbdriver.AlienFXUSBDriver(self)
def get_zone_name(self, pkt):
|
def get_state_name(self, state):
""" Given a state number, return a string state name """
for state_name in self.state_map:
if self.state_map[state_name] == state:
return state_name
return "UNKNOWN"
def get_reset_type_name(self, num):
""" Given a reset number, return a string reset name """
if num in list(self.reset_types.keys()):
return self.reset_types[num]
else:
return "UNKNOWN"
def _ping(self):
""" Send a get-status command to the controller."""
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
self._driver.read_packet()
def _reset(self, reset_type):
""" Send a "reset" packet to the AlienFX controller."""
reset_code = self._get_reset_code(reset_type)
pkt = self.cmd_packet.make_cmd_reset(reset_code)
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
def _wait_controller_ready(self):
""" Keep sending a "get status" packet to the AlienFX controller and
return only when the controller is ready
"""
ready = False
errcount=0
while not ready:
pkt = self.cmd_packet.make_cmd_get_status()
logging.debug("SENDING: {}".format(self.pkt_to_string(pkt)))
self._driver.write_packet(pkt)
try:
resp = self._driver.read_packet()
ready = (resp[0] == self.cmd_packet.STATUS_READY)
except TypeError:
errcount += 1
logging.debug("No Status received yet... Failed tries=" + str(errcount))
if errcount > 50:
logging.error("Controller status could not be retrieved. Is the device already in use?")
quit(-99)
def pkt_to_string(self, pkt_bytes):
""" Return a human readable string representation of an AlienFX
command packet.
"""
return self.cmd_packet.pkt_to_string(pkt_bytes, self)
def _get_no_zone_code(self):
""" Return a zone code corresponding to all non-visible zones."""
zone_codes = [self.zone_map[x] for x in self.zone_map]
return ~reduce(lambda x,y: x|y, zone_codes, 0)
def _get_zone_codes(self, zone_names):
""" Given zone names, return the zone codes they refer to.
"""
zones = 0
for zone in zone_names:
if zone in self.zone_map:
zones |= self.zone_map[zone]
return zones
def _get_reset_code(self, reset_name):
""" Given the name of a reset action, return its code. """
for reset in self.reset_types:
if reset_name == self.reset_types[reset]:
return reset
logging.warning("Unknown reset type: {}".format(reset_name))
return 0
def _make_loop_cmds(self, themefile, zones, block, loop_items):
""" Given loop-items from the theme file, return a list of loop
commands.
"""
loop_cmds = []
pkt = self.cmd_packet
for item in loop_items:
item_type = themefile.get_action_type(item)
item_colours = themefile.get_action_colours(item)
if item_type == AlienFXThemeFile.KW_ACTION_TYPE_FIXED:
if len(item_colours) != 1:
logging.warning("fixed must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_BLINK:
if len(item_colours) != 1:
logging.warning("blink must have exactly one colour value")
continue
loop_cmds.append(
pkt.make_cmd_set_blink_colour(block, zones, item_colours[0]))
elif item_type == AlienFXThemeFile.KW_ACTION_TYPE_MORPH:
if len(item_colours) != 2:
logging.warning("morph must have exactly two colour values")
continue
loop_cmds.append(
pkt.make_cmd_set_morph_colour(
block, zones, item_colours[0], item_colours[1]))
else:
logging.warning("unknown loop item type: {}".format(item_type))
return loop_cmds
def _make_zone_cmds(self, themefile, state_name, boot=False):
""" Given a theme file, return a list of zone commands.
If 'boot' is True, then the colour commands created are not saved with
SAVE_NEXT commands. Also, the final command is one to set the colour
of all non-visible zones to black.
"""
zone_cmds = []
block = 1
pkt = self.cmd_packet
state = self.state_map[state_name]
state_items = themefile.get_state_items(state_name)
for item in state_items:
zone_codes = self._get_zone_codes(themefile.get_zone_names(item))
loop_items = themefile.get_loop_items(item)
loop_cmds = self._make_loop_cmds(
themefile, zone_codes, block, loop_items)
if (loop_cmds):
block += 1
for loop_cmd in loop_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(loop_cmd)
if not boot:
zone_cmds.append(pkt.make_cmd_save_next(state))
zone_cmds.append(pkt.make_cmd_loop_block_end())
if zone_cmds:
if not boot:
zone_cmds.append(pkt.make_cmd_save())
if boot:
zone_cmds.append(
pkt.make_cmd_set_colour(
block, self._get_no_zone_code(), (0,0,0)))
zone_cmds.append(pkt.make_cmd_loop_block_end())
return zone_cmds
def _send_cmds(self, cmds):
""" Send the given commands to the controller. """
for cmd in cmds:
logging.debug("SENDING: {}".format(self.pkt_to_string(cmd)))
self._driver.write_packet(cmd)
def set_theme(self, themefile):
""" Send the given theme settings to the controller. This should result
in the lights changing to the theme settings immediately.
"""
try:
self._driver.acquire()
cmds_boot = []
pkt = self.cmd_packet
# prepare the controller
self._ping()
self._reset("all-lights-on")
self._wait_controller_ready()
for state_name in self.state_map:
cmds = []
cmds = self._make_zone_cmds(themefile, state_name)
# Boot block commands are saved for sending again later.
# The second time, they are sent without SAVE_NEXT commands.
if (state_name == self.STATE_BOOT):
cmds_boot = self._make_zone_cmds(
themefile, state_name, boot=True)
self._send_cmds(cmds)
cmd = pkt.make_cmd_set_speed(themefile.get_speed())
self._send_cmds([cmd])
# send the boot block commands again
self._send_cmds(cmds_boot)
cmd = pkt.make_cmd_transmit_execute()
self._send_cmds([cmd])
finally:
self._driver.release()
| """ Given 3 bytes of a command packet, return a string zone
name corresponding to it
"""
zone_mask = (pkt[0] << 16) + (pkt[1] << 8) + pkt[2]
zone_name = ""
for zone in self.zone_map:
bit_mask = self.zone_map[zone]
if zone_mask & bit_mask:
if zone_name != "":
zone_name += ","
zone_name += zone
zone_mask &= ~bit_mask
if zone_mask != 0:
if zone_name != "":
zone_name += ","
zone_name += "UNKNOWN({})".format(hex(zone_mask))
return zone_name | identifier_body |
asciidoc2html.py | #!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Generate the html documentation based on the asciidoc files."""
from typing import List, Optional
import re
import os
import sys
import subprocess
import shutil
import tempfile
import argparse
import io
import pathlib
REPO_ROOT = pathlib.Path(__file__).resolve().parents[1]
DOC_DIR = REPO_ROOT / 'qutebrowser' / 'html' / 'doc'
sys.path.insert(0, str(REPO_ROOT))
from scripts import utils
class AsciiDoc:
"""Abstraction of an asciidoc subprocess."""
FILES = ['faq', 'changelog', 'contributing', 'quickstart', 'userscripts']
def __init__(self,
asciidoc: Optional[str],
asciidoc_python: Optional[str],
website: Optional[str]) -> None:
self._cmd: Optional[List[str]] = None
self._asciidoc = asciidoc
self._asciidoc_python = asciidoc_python
self._website = website
self._homedir: Optional[pathlib.Path] = None
self._themedir: Optional[pathlib.Path] = None
self._tempdir: Optional[pathlib.Path] = None
self._failed = False
def prepare(self) -> None:
"""Get the asciidoc command and create the homedir to use."""
self._cmd = self._get_asciidoc_cmd()
self._homedir = pathlib.Path(tempfile.mkdtemp())
self._themedir = self._homedir / '.asciidoc' / 'themes' / 'qute'
self._tempdir = self._homedir / 'tmp'
self._tempdir.mkdir(parents=True)
self._themedir.mkdir(parents=True)
def cleanup(self) -> None:
"""Clean up the temporary home directory for asciidoc."""
if self._homedir is not None and not self._failed:
shutil.rmtree(str(self._homedir))
def build(self) -> None:
"""Build either the website or the docs."""
if self._website:
self._build_website()
else:
self._build_docs()
self._copy_images()
def _build_docs(self) -> None:
"""Render .asciidoc files to .html sites."""
files = [((REPO_ROOT / 'doc' / '{}.asciidoc'.format(f)),
DOC_DIR / (f + ".html")) for f in self.FILES]
for src in (REPO_ROOT / 'doc' / 'help').glob('*.asciidoc'):
dst = DOC_DIR / (src.stem + ".html")
files.append((src, dst))
# patch image links to use local copy
replacements = [
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-big.png",
"qute://help/img/cheatsheet-big.png"),
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-small.png",
"qute://help/img/cheatsheet-small.png")
]
asciidoc_args = ['-a', 'source-highlighter=pygments']
for src, dst in files:
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
with modified_src.open('w', encoding='utf-8') as moded_f, \
src.open('r', encoding='utf-8') as f:
for line in f:
for orig, repl in replacements:
line = line.replace(orig, repl)
moded_f.write(line)
self.call(modified_src, dst, *asciidoc_args)
def _copy_images(self) -> None:
"""Copy image files to qutebrowser/html/doc."""
print("Copying files...")
dst_path = DOC_DIR / 'img'
dst_path.mkdir(exist_ok=True)
for filename in ['cheatsheet-big.png', 'cheatsheet-small.png']:
src = REPO_ROOT / 'doc' / 'img' / filename
dst = dst_path / filename
shutil.copy(str(src), str(dst))
def _build_website_file(self, root: pathlib.Path, filename: str) -> None:
"""Build a single website file."""
src = root / filename
assert self._website is not None # for mypy
dst = pathlib.Path(self._website)
dst = dst / src.parent.relative_to(REPO_ROOT) / (src.stem + ".html")
dst.parent.mkdir(exist_ok=True)
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
shutil.copy(str(REPO_ROOT / 'www' / 'header.asciidoc'), modified_src)
outfp = io.StringIO()
header = modified_src.read_text(encoding='utf-8')
header += "\n\n"
with src.open('r', encoding='utf-8') as infp:
outfp.write("\n\n")
hidden = False
found_title = False
title = ""
last_line = ""
for line in infp:
line = line.rstrip()
if line == '// QUTE_WEB_HIDE':
assert not hidden
hidden = True
elif line == '// QUTE_WEB_HIDE_END':
assert hidden
hidden = False
elif line == "The Compiler <mail@qutebrowser.org>":
continue
elif re.fullmatch(r':\w+:.*', line):
# asciidoc field
continue
if not found_title:
if re.fullmatch(r'=+', line):
line = line.replace('=', '-')
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
elif re.fullmatch(r'= .+', line):
line = '==' + line[1:]
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
if not hidden:
outfp.write(line.replace(".asciidoc[", ".html[") + '\n')
last_line = line
current_lines = outfp.getvalue()
outfp.close()
modified_str = title + "\n\n" + header + current_lines
modified_src.write_text(modified_str, encoding='utf-8')
asciidoc_args = ['--theme=qute', '-a toc', '-a toc-placement=manual',
'-a', 'source-highlighter=pygments']
self.call(modified_src, dst, *asciidoc_args)
def _build_website(self) -> None:
"""Prepare and build the website."""
theme_file = REPO_ROOT / 'www' / 'qute.css' |
for item_path in pathlib.Path(REPO_ROOT).rglob('*.asciidoc'):
if item_path.stem in ['header', 'OpenSans-License']:
continue
self._build_website_file(item_path.parent, item_path.name)
copy = {'icons': 'icons', 'doc/img': 'doc/img', 'www/media': 'media/'}
for src, dest in copy.items():
full_src = REPO_ROOT / src
full_dest = outdir / dest
try:
shutil.rmtree(full_dest)
except FileNotFoundError:
pass
shutil.copytree(full_src, full_dest)
for dst, link_name in [
('README.html', 'index.html'),
((pathlib.Path('doc') / 'quickstart.html'), 'quickstart.html'),
]:
assert isinstance(dst, (str, pathlib.Path)) # for mypy
try:
(outdir / link_name).symlink_to(dst)
except FileExistsError:
pass
def _get_asciidoc_cmd(self) -> List[str]:
"""Try to find out what commandline to use to invoke asciidoc."""
if self._asciidoc is not None:
python = (sys.executable if self._asciidoc_python is None
else self._asciidoc_python)
return [python, self._asciidoc]
for executable in ['asciidoc', 'asciidoc.py']:
try:
subprocess.run([executable, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True)
except OSError:
pass
else:
return [executable]
raise FileNotFoundError
def call(self, src: pathlib.Path, dst: pathlib.Path, *args):
"""Call asciidoc for the given files.
Args:
src: The source .asciidoc file.
dst: The destination .html file, or None to auto-guess.
*args: Additional arguments passed to asciidoc.
"""
print("Calling asciidoc for {}...".format(src.name))
assert self._cmd is not None # for mypy
cmdline = self._cmd[:]
if dst is not None:
cmdline += ['--out-file', str(dst)]
cmdline += args
cmdline.append(str(src))
# So the virtualenv's Pygments is found
bin_path = pathlib.Path(sys.executable).parent
try:
env = os.environ.copy()
env['HOME'] = str(self._homedir)
env['PATH'] = str(bin_path) + os.pathsep + env['PATH']
subprocess.run(cmdline, check=True, env=env)
except (subprocess.CalledProcessError, OSError) as e:
self._failed = True
utils.print_error(str(e))
print("Keeping modified sources in {}.".format(self._homedir),
file=sys.stderr)
sys.exit(1)
def parse_args() -> argparse.Namespace:
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--website', help="Build website into a given "
"directory.")
parser.add_argument('--asciidoc', help="Full path to asciidoc.py. "
"If not given, it's searched in PATH.",
nargs='?')
parser.add_argument('--asciidoc-python', help="Python to use for asciidoc."
"If not given, the current Python interpreter is used.",
nargs='?')
return parser.parse_args()
def run(**kwargs) -> None:
"""Regenerate documentation."""
DOC_DIR.mkdir(exist_ok=True)
asciidoc = AsciiDoc(**kwargs)
try:
asciidoc.prepare()
except FileNotFoundError:
utils.print_error("Could not find asciidoc! Please install it, or use "
"the --asciidoc argument to point this script to "
"the correct python/asciidoc.py location!")
sys.exit(1)
try:
asciidoc.build()
finally:
asciidoc.cleanup()
def main(colors: bool = False) -> None:
"""Generate html files for the online documentation."""
utils.change_cwd()
utils.use_color = colors
args = parse_args()
run(asciidoc=args.asciidoc, asciidoc_python=args.asciidoc_python,
website=args.website)
if __name__ == '__main__':
main(colors=True) | assert self._themedir is not None # for mypy
shutil.copy(theme_file, self._themedir)
assert self._website is not None # for mypy
outdir = pathlib.Path(self._website) | random_line_split |
asciidoc2html.py | #!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Generate the html documentation based on the asciidoc files."""
from typing import List, Optional
import re
import os
import sys
import subprocess
import shutil
import tempfile
import argparse
import io
import pathlib
REPO_ROOT = pathlib.Path(__file__).resolve().parents[1]
DOC_DIR = REPO_ROOT / 'qutebrowser' / 'html' / 'doc'
sys.path.insert(0, str(REPO_ROOT))
from scripts import utils
class AsciiDoc:
"""Abstraction of an asciidoc subprocess."""
FILES = ['faq', 'changelog', 'contributing', 'quickstart', 'userscripts']
def __init__(self,
asciidoc: Optional[str],
asciidoc_python: Optional[str],
website: Optional[str]) -> None:
self._cmd: Optional[List[str]] = None
self._asciidoc = asciidoc
self._asciidoc_python = asciidoc_python
self._website = website
self._homedir: Optional[pathlib.Path] = None
self._themedir: Optional[pathlib.Path] = None
self._tempdir: Optional[pathlib.Path] = None
self._failed = False
def prepare(self) -> None:
"""Get the asciidoc command and create the homedir to use."""
self._cmd = self._get_asciidoc_cmd()
self._homedir = pathlib.Path(tempfile.mkdtemp())
self._themedir = self._homedir / '.asciidoc' / 'themes' / 'qute'
self._tempdir = self._homedir / 'tmp'
self._tempdir.mkdir(parents=True)
self._themedir.mkdir(parents=True)
def cleanup(self) -> None:
"""Clean up the temporary home directory for asciidoc."""
if self._homedir is not None and not self._failed:
shutil.rmtree(str(self._homedir))
def build(self) -> None:
"""Build either the website or the docs."""
if self._website:
self._build_website()
else:
self._build_docs()
self._copy_images()
def _build_docs(self) -> None:
"""Render .asciidoc files to .html sites."""
files = [((REPO_ROOT / 'doc' / '{}.asciidoc'.format(f)),
DOC_DIR / (f + ".html")) for f in self.FILES]
for src in (REPO_ROOT / 'doc' / 'help').glob('*.asciidoc'):
dst = DOC_DIR / (src.stem + ".html")
files.append((src, dst))
# patch image links to use local copy
replacements = [
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-big.png",
"qute://help/img/cheatsheet-big.png"),
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-small.png",
"qute://help/img/cheatsheet-small.png")
]
asciidoc_args = ['-a', 'source-highlighter=pygments']
for src, dst in files:
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
with modified_src.open('w', encoding='utf-8') as moded_f, \
src.open('r', encoding='utf-8') as f:
for line in f:
for orig, repl in replacements:
line = line.replace(orig, repl)
moded_f.write(line)
self.call(modified_src, dst, *asciidoc_args)
def _copy_images(self) -> None:
"""Copy image files to qutebrowser/html/doc."""
print("Copying files...")
dst_path = DOC_DIR / 'img'
dst_path.mkdir(exist_ok=True)
for filename in ['cheatsheet-big.png', 'cheatsheet-small.png']:
src = REPO_ROOT / 'doc' / 'img' / filename
dst = dst_path / filename
shutil.copy(str(src), str(dst))
def _build_website_file(self, root: pathlib.Path, filename: str) -> None:
"""Build a single website file."""
src = root / filename
assert self._website is not None # for mypy
dst = pathlib.Path(self._website)
dst = dst / src.parent.relative_to(REPO_ROOT) / (src.stem + ".html")
dst.parent.mkdir(exist_ok=True)
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
shutil.copy(str(REPO_ROOT / 'www' / 'header.asciidoc'), modified_src)
outfp = io.StringIO()
header = modified_src.read_text(encoding='utf-8')
header += "\n\n"
with src.open('r', encoding='utf-8') as infp:
outfp.write("\n\n")
hidden = False
found_title = False
title = ""
last_line = ""
for line in infp:
line = line.rstrip()
if line == '// QUTE_WEB_HIDE':
assert not hidden
hidden = True
elif line == '// QUTE_WEB_HIDE_END':
assert hidden
hidden = False
elif line == "The Compiler <mail@qutebrowser.org>":
continue
elif re.fullmatch(r':\w+:.*', line):
# asciidoc field
continue
if not found_title:
if re.fullmatch(r'=+', line):
line = line.replace('=', '-')
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
elif re.fullmatch(r'= .+', line):
line = '==' + line[1:]
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
if not hidden:
outfp.write(line.replace(".asciidoc[", ".html[") + '\n')
last_line = line
current_lines = outfp.getvalue()
outfp.close()
modified_str = title + "\n\n" + header + current_lines
modified_src.write_text(modified_str, encoding='utf-8')
asciidoc_args = ['--theme=qute', '-a toc', '-a toc-placement=manual',
'-a', 'source-highlighter=pygments']
self.call(modified_src, dst, *asciidoc_args)
def _build_website(self) -> None:
"""Prepare and build the website."""
theme_file = REPO_ROOT / 'www' / 'qute.css'
assert self._themedir is not None # for mypy
shutil.copy(theme_file, self._themedir)
assert self._website is not None # for mypy
outdir = pathlib.Path(self._website)
for item_path in pathlib.Path(REPO_ROOT).rglob('*.asciidoc'):
if item_path.stem in ['header', 'OpenSans-License']:
continue
self._build_website_file(item_path.parent, item_path.name)
copy = {'icons': 'icons', 'doc/img': 'doc/img', 'www/media': 'media/'}
for src, dest in copy.items():
full_src = REPO_ROOT / src
full_dest = outdir / dest
try:
shutil.rmtree(full_dest)
except FileNotFoundError:
pass
shutil.copytree(full_src, full_dest)
for dst, link_name in [
('README.html', 'index.html'),
((pathlib.Path('doc') / 'quickstart.html'), 'quickstart.html'),
]:
assert isinstance(dst, (str, pathlib.Path)) # for mypy
try:
(outdir / link_name).symlink_to(dst)
except FileExistsError:
pass
def _get_asciidoc_cmd(self) -> List[str]:
"""Try to find out what commandline to use to invoke asciidoc."""
if self._asciidoc is not None:
python = (sys.executable if self._asciidoc_python is None
else self._asciidoc_python)
return [python, self._asciidoc]
for executable in ['asciidoc', 'asciidoc.py']:
try:
subprocess.run([executable, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True)
except OSError:
pass
else:
return [executable]
raise FileNotFoundError
def call(self, src: pathlib.Path, dst: pathlib.Path, *args):
"""Call asciidoc for the given files.
Args:
src: The source .asciidoc file.
dst: The destination .html file, or None to auto-guess.
*args: Additional arguments passed to asciidoc.
"""
print("Calling asciidoc for {}...".format(src.name))
assert self._cmd is not None # for mypy
cmdline = self._cmd[:]
if dst is not None:
cmdline += ['--out-file', str(dst)]
cmdline += args
cmdline.append(str(src))
# So the virtualenv's Pygments is found
bin_path = pathlib.Path(sys.executable).parent
try:
env = os.environ.copy()
env['HOME'] = str(self._homedir)
env['PATH'] = str(bin_path) + os.pathsep + env['PATH']
subprocess.run(cmdline, check=True, env=env)
except (subprocess.CalledProcessError, OSError) as e:
self._failed = True
utils.print_error(str(e))
print("Keeping modified sources in {}.".format(self._homedir),
file=sys.stderr)
sys.exit(1)
def parse_args() -> argparse.Namespace:
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--website', help="Build website into a given "
"directory.")
parser.add_argument('--asciidoc', help="Full path to asciidoc.py. "
"If not given, it's searched in PATH.",
nargs='?')
parser.add_argument('--asciidoc-python', help="Python to use for asciidoc."
"If not given, the current Python interpreter is used.",
nargs='?')
return parser.parse_args()
def run(**kwargs) -> None:
"""Regenerate documentation."""
DOC_DIR.mkdir(exist_ok=True)
asciidoc = AsciiDoc(**kwargs)
try:
asciidoc.prepare()
except FileNotFoundError:
utils.print_error("Could not find asciidoc! Please install it, or use "
"the --asciidoc argument to point this script to "
"the correct python/asciidoc.py location!")
sys.exit(1)
try:
asciidoc.build()
finally:
asciidoc.cleanup()
def | (colors: bool = False) -> None:
"""Generate html files for the online documentation."""
utils.change_cwd()
utils.use_color = colors
args = parse_args()
run(asciidoc=args.asciidoc, asciidoc_python=args.asciidoc_python,
website=args.website)
if __name__ == '__main__':
main(colors=True)
| main | identifier_name |
asciidoc2html.py | #!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Generate the html documentation based on the asciidoc files."""
from typing import List, Optional
import re
import os
import sys
import subprocess
import shutil
import tempfile
import argparse
import io
import pathlib
REPO_ROOT = pathlib.Path(__file__).resolve().parents[1]
DOC_DIR = REPO_ROOT / 'qutebrowser' / 'html' / 'doc'
sys.path.insert(0, str(REPO_ROOT))
from scripts import utils
class AsciiDoc:
"""Abstraction of an asciidoc subprocess."""
FILES = ['faq', 'changelog', 'contributing', 'quickstart', 'userscripts']
def __init__(self,
asciidoc: Optional[str],
asciidoc_python: Optional[str],
website: Optional[str]) -> None:
self._cmd: Optional[List[str]] = None
self._asciidoc = asciidoc
self._asciidoc_python = asciidoc_python
self._website = website
self._homedir: Optional[pathlib.Path] = None
self._themedir: Optional[pathlib.Path] = None
self._tempdir: Optional[pathlib.Path] = None
self._failed = False
def prepare(self) -> None:
"""Get the asciidoc command and create the homedir to use."""
self._cmd = self._get_asciidoc_cmd()
self._homedir = pathlib.Path(tempfile.mkdtemp())
self._themedir = self._homedir / '.asciidoc' / 'themes' / 'qute'
self._tempdir = self._homedir / 'tmp'
self._tempdir.mkdir(parents=True)
self._themedir.mkdir(parents=True)
def cleanup(self) -> None:
"""Clean up the temporary home directory for asciidoc."""
if self._homedir is not None and not self._failed:
|
def build(self) -> None:
"""Build either the website or the docs."""
if self._website:
self._build_website()
else:
self._build_docs()
self._copy_images()
def _build_docs(self) -> None:
"""Render .asciidoc files to .html sites."""
files = [((REPO_ROOT / 'doc' / '{}.asciidoc'.format(f)),
DOC_DIR / (f + ".html")) for f in self.FILES]
for src in (REPO_ROOT / 'doc' / 'help').glob('*.asciidoc'):
dst = DOC_DIR / (src.stem + ".html")
files.append((src, dst))
# patch image links to use local copy
replacements = [
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-big.png",
"qute://help/img/cheatsheet-big.png"),
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-small.png",
"qute://help/img/cheatsheet-small.png")
]
asciidoc_args = ['-a', 'source-highlighter=pygments']
for src, dst in files:
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
with modified_src.open('w', encoding='utf-8') as moded_f, \
src.open('r', encoding='utf-8') as f:
for line in f:
for orig, repl in replacements:
line = line.replace(orig, repl)
moded_f.write(line)
self.call(modified_src, dst, *asciidoc_args)
def _copy_images(self) -> None:
"""Copy image files to qutebrowser/html/doc."""
print("Copying files...")
dst_path = DOC_DIR / 'img'
dst_path.mkdir(exist_ok=True)
for filename in ['cheatsheet-big.png', 'cheatsheet-small.png']:
src = REPO_ROOT / 'doc' / 'img' / filename
dst = dst_path / filename
shutil.copy(str(src), str(dst))
def _build_website_file(self, root: pathlib.Path, filename: str) -> None:
"""Build a single website file."""
src = root / filename
assert self._website is not None # for mypy
dst = pathlib.Path(self._website)
dst = dst / src.parent.relative_to(REPO_ROOT) / (src.stem + ".html")
dst.parent.mkdir(exist_ok=True)
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
shutil.copy(str(REPO_ROOT / 'www' / 'header.asciidoc'), modified_src)
outfp = io.StringIO()
header = modified_src.read_text(encoding='utf-8')
header += "\n\n"
with src.open('r', encoding='utf-8') as infp:
outfp.write("\n\n")
hidden = False
found_title = False
title = ""
last_line = ""
for line in infp:
line = line.rstrip()
if line == '// QUTE_WEB_HIDE':
assert not hidden
hidden = True
elif line == '// QUTE_WEB_HIDE_END':
assert hidden
hidden = False
elif line == "The Compiler <mail@qutebrowser.org>":
continue
elif re.fullmatch(r':\w+:.*', line):
# asciidoc field
continue
if not found_title:
if re.fullmatch(r'=+', line):
line = line.replace('=', '-')
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
elif re.fullmatch(r'= .+', line):
line = '==' + line[1:]
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
if not hidden:
outfp.write(line.replace(".asciidoc[", ".html[") + '\n')
last_line = line
current_lines = outfp.getvalue()
outfp.close()
modified_str = title + "\n\n" + header + current_lines
modified_src.write_text(modified_str, encoding='utf-8')
asciidoc_args = ['--theme=qute', '-a toc', '-a toc-placement=manual',
'-a', 'source-highlighter=pygments']
self.call(modified_src, dst, *asciidoc_args)
def _build_website(self) -> None:
"""Prepare and build the website."""
theme_file = REPO_ROOT / 'www' / 'qute.css'
assert self._themedir is not None # for mypy
shutil.copy(theme_file, self._themedir)
assert self._website is not None # for mypy
outdir = pathlib.Path(self._website)
for item_path in pathlib.Path(REPO_ROOT).rglob('*.asciidoc'):
if item_path.stem in ['header', 'OpenSans-License']:
continue
self._build_website_file(item_path.parent, item_path.name)
copy = {'icons': 'icons', 'doc/img': 'doc/img', 'www/media': 'media/'}
for src, dest in copy.items():
full_src = REPO_ROOT / src
full_dest = outdir / dest
try:
shutil.rmtree(full_dest)
except FileNotFoundError:
pass
shutil.copytree(full_src, full_dest)
for dst, link_name in [
('README.html', 'index.html'),
((pathlib.Path('doc') / 'quickstart.html'), 'quickstart.html'),
]:
assert isinstance(dst, (str, pathlib.Path)) # for mypy
try:
(outdir / link_name).symlink_to(dst)
except FileExistsError:
pass
def _get_asciidoc_cmd(self) -> List[str]:
"""Try to find out what commandline to use to invoke asciidoc."""
if self._asciidoc is not None:
python = (sys.executable if self._asciidoc_python is None
else self._asciidoc_python)
return [python, self._asciidoc]
for executable in ['asciidoc', 'asciidoc.py']:
try:
subprocess.run([executable, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True)
except OSError:
pass
else:
return [executable]
raise FileNotFoundError
def call(self, src: pathlib.Path, dst: pathlib.Path, *args):
"""Call asciidoc for the given files.
Args:
src: The source .asciidoc file.
dst: The destination .html file, or None to auto-guess.
*args: Additional arguments passed to asciidoc.
"""
print("Calling asciidoc for {}...".format(src.name))
assert self._cmd is not None # for mypy
cmdline = self._cmd[:]
if dst is not None:
cmdline += ['--out-file', str(dst)]
cmdline += args
cmdline.append(str(src))
# So the virtualenv's Pygments is found
bin_path = pathlib.Path(sys.executable).parent
try:
env = os.environ.copy()
env['HOME'] = str(self._homedir)
env['PATH'] = str(bin_path) + os.pathsep + env['PATH']
subprocess.run(cmdline, check=True, env=env)
except (subprocess.CalledProcessError, OSError) as e:
self._failed = True
utils.print_error(str(e))
print("Keeping modified sources in {}.".format(self._homedir),
file=sys.stderr)
sys.exit(1)
def parse_args() -> argparse.Namespace:
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--website', help="Build website into a given "
"directory.")
parser.add_argument('--asciidoc', help="Full path to asciidoc.py. "
"If not given, it's searched in PATH.",
nargs='?')
parser.add_argument('--asciidoc-python', help="Python to use for asciidoc."
"If not given, the current Python interpreter is used.",
nargs='?')
return parser.parse_args()
def run(**kwargs) -> None:
"""Regenerate documentation."""
DOC_DIR.mkdir(exist_ok=True)
asciidoc = AsciiDoc(**kwargs)
try:
asciidoc.prepare()
except FileNotFoundError:
utils.print_error("Could not find asciidoc! Please install it, or use "
"the --asciidoc argument to point this script to "
"the correct python/asciidoc.py location!")
sys.exit(1)
try:
asciidoc.build()
finally:
asciidoc.cleanup()
def main(colors: bool = False) -> None:
"""Generate html files for the online documentation."""
utils.change_cwd()
utils.use_color = colors
args = parse_args()
run(asciidoc=args.asciidoc, asciidoc_python=args.asciidoc_python,
website=args.website)
if __name__ == '__main__':
main(colors=True)
| shutil.rmtree(str(self._homedir)) | conditional_block |
asciidoc2html.py | #!/usr/bin/env python3
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <https://www.gnu.org/licenses/>.
"""Generate the html documentation based on the asciidoc files."""
from typing import List, Optional
import re
import os
import sys
import subprocess
import shutil
import tempfile
import argparse
import io
import pathlib
REPO_ROOT = pathlib.Path(__file__).resolve().parents[1]
DOC_DIR = REPO_ROOT / 'qutebrowser' / 'html' / 'doc'
sys.path.insert(0, str(REPO_ROOT))
from scripts import utils
class AsciiDoc:
"""Abstraction of an asciidoc subprocess."""
FILES = ['faq', 'changelog', 'contributing', 'quickstart', 'userscripts']
def __init__(self,
asciidoc: Optional[str],
asciidoc_python: Optional[str],
website: Optional[str]) -> None:
self._cmd: Optional[List[str]] = None
self._asciidoc = asciidoc
self._asciidoc_python = asciidoc_python
self._website = website
self._homedir: Optional[pathlib.Path] = None
self._themedir: Optional[pathlib.Path] = None
self._tempdir: Optional[pathlib.Path] = None
self._failed = False
def prepare(self) -> None:
"""Get the asciidoc command and create the homedir to use."""
self._cmd = self._get_asciidoc_cmd()
self._homedir = pathlib.Path(tempfile.mkdtemp())
self._themedir = self._homedir / '.asciidoc' / 'themes' / 'qute'
self._tempdir = self._homedir / 'tmp'
self._tempdir.mkdir(parents=True)
self._themedir.mkdir(parents=True)
def cleanup(self) -> None:
"""Clean up the temporary home directory for asciidoc."""
if self._homedir is not None and not self._failed:
shutil.rmtree(str(self._homedir))
def build(self) -> None:
"""Build either the website or the docs."""
if self._website:
self._build_website()
else:
self._build_docs()
self._copy_images()
def _build_docs(self) -> None:
"""Render .asciidoc files to .html sites."""
files = [((REPO_ROOT / 'doc' / '{}.asciidoc'.format(f)),
DOC_DIR / (f + ".html")) for f in self.FILES]
for src in (REPO_ROOT / 'doc' / 'help').glob('*.asciidoc'):
dst = DOC_DIR / (src.stem + ".html")
files.append((src, dst))
# patch image links to use local copy
replacements = [
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-big.png",
"qute://help/img/cheatsheet-big.png"),
("https://raw.githubusercontent.com/qutebrowser/qutebrowser/master/doc/img/cheatsheet-small.png",
"qute://help/img/cheatsheet-small.png")
]
asciidoc_args = ['-a', 'source-highlighter=pygments']
for src, dst in files:
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
with modified_src.open('w', encoding='utf-8') as moded_f, \
src.open('r', encoding='utf-8') as f:
for line in f:
for orig, repl in replacements:
line = line.replace(orig, repl)
moded_f.write(line)
self.call(modified_src, dst, *asciidoc_args)
def _copy_images(self) -> None:
"""Copy image files to qutebrowser/html/doc."""
print("Copying files...")
dst_path = DOC_DIR / 'img'
dst_path.mkdir(exist_ok=True)
for filename in ['cheatsheet-big.png', 'cheatsheet-small.png']:
src = REPO_ROOT / 'doc' / 'img' / filename
dst = dst_path / filename
shutil.copy(str(src), str(dst))
def _build_website_file(self, root: pathlib.Path, filename: str) -> None:
"""Build a single website file."""
src = root / filename
assert self._website is not None # for mypy
dst = pathlib.Path(self._website)
dst = dst / src.parent.relative_to(REPO_ROOT) / (src.stem + ".html")
dst.parent.mkdir(exist_ok=True)
assert self._tempdir is not None # for mypy
modified_src = self._tempdir / src.name
shutil.copy(str(REPO_ROOT / 'www' / 'header.asciidoc'), modified_src)
outfp = io.StringIO()
header = modified_src.read_text(encoding='utf-8')
header += "\n\n"
with src.open('r', encoding='utf-8') as infp:
outfp.write("\n\n")
hidden = False
found_title = False
title = ""
last_line = ""
for line in infp:
line = line.rstrip()
if line == '// QUTE_WEB_HIDE':
assert not hidden
hidden = True
elif line == '// QUTE_WEB_HIDE_END':
assert hidden
hidden = False
elif line == "The Compiler <mail@qutebrowser.org>":
continue
elif re.fullmatch(r':\w+:.*', line):
# asciidoc field
continue
if not found_title:
if re.fullmatch(r'=+', line):
line = line.replace('=', '-')
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
elif re.fullmatch(r'= .+', line):
line = '==' + line[1:]
found_title = True
title = last_line + " | qutebrowser\n"
title += "=" * (len(title) - 1)
if not hidden:
outfp.write(line.replace(".asciidoc[", ".html[") + '\n')
last_line = line
current_lines = outfp.getvalue()
outfp.close()
modified_str = title + "\n\n" + header + current_lines
modified_src.write_text(modified_str, encoding='utf-8')
asciidoc_args = ['--theme=qute', '-a toc', '-a toc-placement=manual',
'-a', 'source-highlighter=pygments']
self.call(modified_src, dst, *asciidoc_args)
def _build_website(self) -> None:
"""Prepare and build the website."""
theme_file = REPO_ROOT / 'www' / 'qute.css'
assert self._themedir is not None # for mypy
shutil.copy(theme_file, self._themedir)
assert self._website is not None # for mypy
outdir = pathlib.Path(self._website)
for item_path in pathlib.Path(REPO_ROOT).rglob('*.asciidoc'):
if item_path.stem in ['header', 'OpenSans-License']:
continue
self._build_website_file(item_path.parent, item_path.name)
copy = {'icons': 'icons', 'doc/img': 'doc/img', 'www/media': 'media/'}
for src, dest in copy.items():
full_src = REPO_ROOT / src
full_dest = outdir / dest
try:
shutil.rmtree(full_dest)
except FileNotFoundError:
pass
shutil.copytree(full_src, full_dest)
for dst, link_name in [
('README.html', 'index.html'),
((pathlib.Path('doc') / 'quickstart.html'), 'quickstart.html'),
]:
assert isinstance(dst, (str, pathlib.Path)) # for mypy
try:
(outdir / link_name).symlink_to(dst)
except FileExistsError:
pass
def _get_asciidoc_cmd(self) -> List[str]:
"""Try to find out what commandline to use to invoke asciidoc."""
if self._asciidoc is not None:
python = (sys.executable if self._asciidoc_python is None
else self._asciidoc_python)
return [python, self._asciidoc]
for executable in ['asciidoc', 'asciidoc.py']:
try:
subprocess.run([executable, '--version'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
check=True)
except OSError:
pass
else:
return [executable]
raise FileNotFoundError
def call(self, src: pathlib.Path, dst: pathlib.Path, *args):
|
def parse_args() -> argparse.Namespace:
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('--website', help="Build website into a given "
"directory.")
parser.add_argument('--asciidoc', help="Full path to asciidoc.py. "
"If not given, it's searched in PATH.",
nargs='?')
parser.add_argument('--asciidoc-python', help="Python to use for asciidoc."
"If not given, the current Python interpreter is used.",
nargs='?')
return parser.parse_args()
def run(**kwargs) -> None:
"""Regenerate documentation."""
DOC_DIR.mkdir(exist_ok=True)
asciidoc = AsciiDoc(**kwargs)
try:
asciidoc.prepare()
except FileNotFoundError:
utils.print_error("Could not find asciidoc! Please install it, or use "
"the --asciidoc argument to point this script to "
"the correct python/asciidoc.py location!")
sys.exit(1)
try:
asciidoc.build()
finally:
asciidoc.cleanup()
def main(colors: bool = False) -> None:
"""Generate html files for the online documentation."""
utils.change_cwd()
utils.use_color = colors
args = parse_args()
run(asciidoc=args.asciidoc, asciidoc_python=args.asciidoc_python,
website=args.website)
if __name__ == '__main__':
main(colors=True)
| """Call asciidoc for the given files.
Args:
src: The source .asciidoc file.
dst: The destination .html file, or None to auto-guess.
*args: Additional arguments passed to asciidoc.
"""
print("Calling asciidoc for {}...".format(src.name))
assert self._cmd is not None # for mypy
cmdline = self._cmd[:]
if dst is not None:
cmdline += ['--out-file', str(dst)]
cmdline += args
cmdline.append(str(src))
# So the virtualenv's Pygments is found
bin_path = pathlib.Path(sys.executable).parent
try:
env = os.environ.copy()
env['HOME'] = str(self._homedir)
env['PATH'] = str(bin_path) + os.pathsep + env['PATH']
subprocess.run(cmdline, check=True, env=env)
except (subprocess.CalledProcessError, OSError) as e:
self._failed = True
utils.print_error(str(e))
print("Keeping modified sources in {}.".format(self._homedir),
file=sys.stderr)
sys.exit(1) | identifier_body |
lda_testing.py | __author__ = 'fpena'
import numpy as np
import lda
import lda.datasets
def | ():
# document-term matrix
X = lda.datasets.load_reuters()
print("type(X): {}".format(type(X)))
print("shape: {}\n".format(X.shape))
# the vocab
vocab = lda.datasets.load_reuters_vocab()
print("type(vocab): {}".format(type(vocab)))
print("len(vocab): {}\n".format(len(vocab)))
# titles for each story
titles = lda.datasets.load_reuters_titles()
print("type(titles): {}".format(type(titles)))
print("len(titles): {}\n".format(len(titles)))
doc_id = 0
word_id = 3117
print("doc id: {} word id: {}".format(doc_id, word_id))
print("-- count: {}".format(X[doc_id, word_id]))
print("-- word : {}".format(vocab[word_id]))
print("-- doc : {}".format(titles[doc_id]))
model = lda.LDA(n_topics=20, n_iter=500, random_state=1)
model.fit(X)
topic_word = model.topic_word_
print("type(topic_word): {}".format(type(topic_word)))
print("shape: {}".format(topic_word.shape))
for n in range(5):
sum_pr = sum(topic_word[n,:])
print("topic: {} sum: {}".format(n, sum_pr))
n = 5
for i, topic_dist in enumerate(topic_word):
topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n+1):-1]
print('*Topic {}\n- {}'.format(i, ' '.join(topic_words)))
doc_topic = model.doc_topic_
print("type(doc_topic): {}".format(type(doc_topic)))
print("shape: {}".format(doc_topic.shape))
for n in range(5):
sum_pr = sum(doc_topic[n,:])
print("document: {} sum: {}".format(n, sum_pr))
for n in range(10):
topic_most_pr = doc_topic[n].argmax()
print("doc: {} topic: {}\n{}...".format(n,
topic_most_pr,
titles[n][:50]))
reuters_dataset = lda.datasets.load_reuters()
vocab = lda.datasets.load_reuters_vocab()
titles = lda.datasets.load_reuters_titles()
print('Dataset shape', reuters_dataset.shape)
print(reuters_dataset[0].shape)
print('Vocab shape', len(vocab))
print(vocab[0])
print('Titles shape', len(titles))
print(titles[0])
print(titles[1])
print(titles[100])
for word in reuters_dataset[0]:
if word > 1:
print(word)
| run | identifier_name |
lda_testing.py | __author__ = 'fpena'
import numpy as np
import lda
import lda.datasets
def run():
# document-term matrix
|
reuters_dataset = lda.datasets.load_reuters()
vocab = lda.datasets.load_reuters_vocab()
titles = lda.datasets.load_reuters_titles()
print('Dataset shape', reuters_dataset.shape)
print(reuters_dataset[0].shape)
print('Vocab shape', len(vocab))
print(vocab[0])
print('Titles shape', len(titles))
print(titles[0])
print(titles[1])
print(titles[100])
for word in reuters_dataset[0]:
if word > 1:
print(word)
| X = lda.datasets.load_reuters()
print("type(X): {}".format(type(X)))
print("shape: {}\n".format(X.shape))
# the vocab
vocab = lda.datasets.load_reuters_vocab()
print("type(vocab): {}".format(type(vocab)))
print("len(vocab): {}\n".format(len(vocab)))
# titles for each story
titles = lda.datasets.load_reuters_titles()
print("type(titles): {}".format(type(titles)))
print("len(titles): {}\n".format(len(titles)))
doc_id = 0
word_id = 3117
print("doc id: {} word id: {}".format(doc_id, word_id))
print("-- count: {}".format(X[doc_id, word_id]))
print("-- word : {}".format(vocab[word_id]))
print("-- doc : {}".format(titles[doc_id]))
model = lda.LDA(n_topics=20, n_iter=500, random_state=1)
model.fit(X)
topic_word = model.topic_word_
print("type(topic_word): {}".format(type(topic_word)))
print("shape: {}".format(topic_word.shape))
for n in range(5):
sum_pr = sum(topic_word[n,:])
print("topic: {} sum: {}".format(n, sum_pr))
n = 5
for i, topic_dist in enumerate(topic_word):
topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n+1):-1]
print('*Topic {}\n- {}'.format(i, ' '.join(topic_words)))
doc_topic = model.doc_topic_
print("type(doc_topic): {}".format(type(doc_topic)))
print("shape: {}".format(doc_topic.shape))
for n in range(5):
sum_pr = sum(doc_topic[n,:])
print("document: {} sum: {}".format(n, sum_pr))
for n in range(10):
topic_most_pr = doc_topic[n].argmax()
print("doc: {} topic: {}\n{}...".format(n,
topic_most_pr,
titles[n][:50])) | identifier_body |
lda_testing.py | __author__ = 'fpena'
import numpy as np
import lda
import lda.datasets
def run():
# document-term matrix
X = lda.datasets.load_reuters()
print("type(X): {}".format(type(X)))
print("shape: {}\n".format(X.shape))
# the vocab
vocab = lda.datasets.load_reuters_vocab()
print("type(vocab): {}".format(type(vocab)))
print("len(vocab): {}\n".format(len(vocab)))
# titles for each story
titles = lda.datasets.load_reuters_titles()
print("type(titles): {}".format(type(titles)))
print("len(titles): {}\n".format(len(titles)))
doc_id = 0
word_id = 3117
print("doc id: {} word id: {}".format(doc_id, word_id))
print("-- count: {}".format(X[doc_id, word_id]))
print("-- word : {}".format(vocab[word_id]))
print("-- doc : {}".format(titles[doc_id]))
model = lda.LDA(n_topics=20, n_iter=500, random_state=1)
model.fit(X)
topic_word = model.topic_word_
print("type(topic_word): {}".format(type(topic_word)))
print("shape: {}".format(topic_word.shape))
for n in range(5):
sum_pr = sum(topic_word[n,:])
print("topic: {} sum: {}".format(n, sum_pr))
n = 5
for i, topic_dist in enumerate(topic_word):
topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n+1):-1]
print('*Topic {}\n- {}'.format(i, ' '.join(topic_words)))
doc_topic = model.doc_topic_
print("type(doc_topic): {}".format(type(doc_topic)))
print("shape: {}".format(doc_topic.shape))
for n in range(5):
sum_pr = sum(doc_topic[n,:])
print("document: {} sum: {}".format(n, sum_pr))
for n in range(10):
topic_most_pr = doc_topic[n].argmax()
print("doc: {} topic: {}\n{}...".format(n,
topic_most_pr,
titles[n][:50]))
reuters_dataset = lda.datasets.load_reuters()
vocab = lda.datasets.load_reuters_vocab()
titles = lda.datasets.load_reuters_titles()
print('Dataset shape', reuters_dataset.shape)
print(reuters_dataset[0].shape)
print('Vocab shape', len(vocab))
print(vocab[0])
print('Titles shape', len(titles))
print(titles[0])
print(titles[1])
print(titles[100])
for word in reuters_dataset[0]:
| if word > 1:
print(word) | conditional_block | |
lda_testing.py | __author__ = 'fpena'
import numpy as np
import lda
import lda.datasets
def run():
# document-term matrix
X = lda.datasets.load_reuters()
print("type(X): {}".format(type(X))) | vocab = lda.datasets.load_reuters_vocab()
print("type(vocab): {}".format(type(vocab)))
print("len(vocab): {}\n".format(len(vocab)))
# titles for each story
titles = lda.datasets.load_reuters_titles()
print("type(titles): {}".format(type(titles)))
print("len(titles): {}\n".format(len(titles)))
doc_id = 0
word_id = 3117
print("doc id: {} word id: {}".format(doc_id, word_id))
print("-- count: {}".format(X[doc_id, word_id]))
print("-- word : {}".format(vocab[word_id]))
print("-- doc : {}".format(titles[doc_id]))
model = lda.LDA(n_topics=20, n_iter=500, random_state=1)
model.fit(X)
topic_word = model.topic_word_
print("type(topic_word): {}".format(type(topic_word)))
print("shape: {}".format(topic_word.shape))
for n in range(5):
sum_pr = sum(topic_word[n,:])
print("topic: {} sum: {}".format(n, sum_pr))
n = 5
for i, topic_dist in enumerate(topic_word):
topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n+1):-1]
print('*Topic {}\n- {}'.format(i, ' '.join(topic_words)))
doc_topic = model.doc_topic_
print("type(doc_topic): {}".format(type(doc_topic)))
print("shape: {}".format(doc_topic.shape))
for n in range(5):
sum_pr = sum(doc_topic[n,:])
print("document: {} sum: {}".format(n, sum_pr))
for n in range(10):
topic_most_pr = doc_topic[n].argmax()
print("doc: {} topic: {}\n{}...".format(n,
topic_most_pr,
titles[n][:50]))
reuters_dataset = lda.datasets.load_reuters()
vocab = lda.datasets.load_reuters_vocab()
titles = lda.datasets.load_reuters_titles()
print('Dataset shape', reuters_dataset.shape)
print(reuters_dataset[0].shape)
print('Vocab shape', len(vocab))
print(vocab[0])
print('Titles shape', len(titles))
print(titles[0])
print(titles[1])
print(titles[100])
for word in reuters_dataset[0]:
if word > 1:
print(word) | print("shape: {}\n".format(X.shape))
# the vocab | random_line_split |
es5.demo.config.ts | import * as fs from 'fs';
import * as minimist from 'minimist';
import * as path from 'path';
import {baseConfig} from './es5.base.config';
| // Allow for specific demos to built with a --demos=<someName>,<someOtherName>
// CLI format.
const args = minimist(process.argv.slice(2));
const specified: string[] = args.demos ? args.demos.split(',') : [];
const getDemos = source => {
return fs.readdirSync(source)
.filter(name => path.extname(name) === '.html' && name !== 'index.html')
.map(name => path.basename(name, '.html'))
.filter(demo => specified.length ? specified.includes(demo) : true);
};
const entries = getDemos('./demos').reduce((obj, name) => {
obj[name] = `./demos/${name}.ts`;
return obj;
}, {});
module.exports = {
...baseConfig,
devtool: 'inline-source-map',
mode: 'development',
entry: {
...entries,
},
output: {
filename: '[name]_bundle.js',
path: path.resolve(__dirname, '../demos'),
},
devServer: {
contentBase: path.join(__dirname, '../demos'),
port: 8080,
},
}; | random_line_split | |
getting_started.py | from dcgpy import expression_gdual_double as expression
from dcgpy import kernel_set_gdual_double as kernel_set
from pyaudi import gdual_double as gdual
# 1- Instantiate a random expression using the 4 basic arithmetic operations
ks = kernel_set(["sum", "diff", "div", "mul"])
ex = expression(inputs = 1,
outputs = 1,
rows = 1,
cols = 6,
levels_back = 6,
arity = 2, | n_eph = 0,
seed = 4232123212)
# 2 - Define the symbol set to be used in visualizing the expression
# (in our case, 1 input variable named "x") and visualize the expression
in_sym = ["x"]
print("Expression:", ex(in_sym)[0])
# 3 - Print the simplified expression
print("Simplified expression:", ex.simplify(in_sym))
# 4 - Visualize the dCGP graph
ex.visualize(in_sym)
# 5 - Define a gdual number of value 1.2 and truncation order 2
x = gdual(1.2, "x", 2)
# 6 - Compute the output of the expression and its second derivative in x = 1.2 and print
print("Expression in x=1.2:", ex([x])[0])
print("Second derivative:", ex([x])[0].get_derivative([2]))
# 5 - Mutate the expression with 2 random mutations of active genes and print
ex.mutate_active(2)
print("Mutated expression:", ex(in_sym)[0]) | kernels = ks(), | random_line_split |
controller.js | import angular from 'angular';
class PhotosController {
/** @ngInject */
constructor($scope, $stateParams, $state, photosGallery) {
this.$scope = $scope;
this.$stateParams = $stateParams;
this.$state = $state;
this.photosGallery = photosGallery;
this.photosByMonth = {};
this.initWatchers();
this.showPhotos();
}
initWatchers() {
this.$scope.$watch(() => this.photosGallery.photos, this.groupPhotosByMonth.bind(this));
}
showPhoto(id) {
this.$state.go('photo-detail', { id });
}
showPage(page) {
this.$state.go(
'photos',
{ page, search: this.photosGallery.search },
{ location: 'replace' }
);
}
showPhotos() {
const page = parseInt(this.$stateParams.page, 10) || undefined
const search = this.$stateParams.search
this.photosGallery.showPhotos({ page, search });
}
pageButtonClass(page) {
if (page === this.photosGallery.currentPage) {
return 'md-raised md-primary';
}
return 'md-raised custom';
}
groupPhotosByMonth(photos) {
const res = {};
photos.forEach((photo) => {
const month = this.monthLabel(photo);
if (!res[month]) {
res[month] = [];
}
res[month].push(photo);
});
this.photosByMonth = res;
console.log("PhotosController.groupPhotosByMonth: \n", this.photosByMonth);
}
| (photo) {
const date = new Date(photo.metadata.createDate);
const month = date.toLocaleString('en', { month: 'short' });
return `${month} ${date.getFullYear()} `;
}
}
export default angular.module('photos.controller', [])
.controller('photosController', PhotosController);
| monthLabel | identifier_name |
controller.js | import angular from 'angular';
class PhotosController {
/** @ngInject */
constructor($scope, $stateParams, $state, photosGallery) |
initWatchers() {
this.$scope.$watch(() => this.photosGallery.photos, this.groupPhotosByMonth.bind(this));
}
showPhoto(id) {
this.$state.go('photo-detail', { id });
}
showPage(page) {
this.$state.go(
'photos',
{ page, search: this.photosGallery.search },
{ location: 'replace' }
);
}
showPhotos() {
const page = parseInt(this.$stateParams.page, 10) || undefined
const search = this.$stateParams.search
this.photosGallery.showPhotos({ page, search });
}
pageButtonClass(page) {
if (page === this.photosGallery.currentPage) {
return 'md-raised md-primary';
}
return 'md-raised custom';
}
groupPhotosByMonth(photos) {
const res = {};
photos.forEach((photo) => {
const month = this.monthLabel(photo);
if (!res[month]) {
res[month] = [];
}
res[month].push(photo);
});
this.photosByMonth = res;
console.log("PhotosController.groupPhotosByMonth: \n", this.photosByMonth);
}
monthLabel(photo) {
const date = new Date(photo.metadata.createDate);
const month = date.toLocaleString('en', { month: 'short' });
return `${month} ${date.getFullYear()} `;
}
}
export default angular.module('photos.controller', [])
.controller('photosController', PhotosController);
| {
this.$scope = $scope;
this.$stateParams = $stateParams;
this.$state = $state;
this.photosGallery = photosGallery;
this.photosByMonth = {};
this.initWatchers();
this.showPhotos();
} | identifier_body |
controller.js | import angular from 'angular';
class PhotosController {
/** @ngInject */
constructor($scope, $stateParams, $state, photosGallery) {
this.$scope = $scope;
this.$stateParams = $stateParams;
this.$state = $state;
this.photosGallery = photosGallery;
this.photosByMonth = {};
this.initWatchers();
this.showPhotos();
}
initWatchers() {
this.$scope.$watch(() => this.photosGallery.photos, this.groupPhotosByMonth.bind(this));
}
showPhoto(id) {
this.$state.go('photo-detail', { id });
}
showPage(page) {
this.$state.go(
'photos',
{ page, search: this.photosGallery.search },
{ location: 'replace' }
);
}
showPhotos() {
const page = parseInt(this.$stateParams.page, 10) || undefined
const search = this.$stateParams.search
| this.photosGallery.showPhotos({ page, search });
}
pageButtonClass(page) {
if (page === this.photosGallery.currentPage) {
return 'md-raised md-primary';
}
return 'md-raised custom';
}
groupPhotosByMonth(photos) {
const res = {};
photos.forEach((photo) => {
const month = this.monthLabel(photo);
if (!res[month]) {
res[month] = [];
}
res[month].push(photo);
});
this.photosByMonth = res;
console.log("PhotosController.groupPhotosByMonth: \n", this.photosByMonth);
}
monthLabel(photo) {
const date = new Date(photo.metadata.createDate);
const month = date.toLocaleString('en', { month: 'short' });
return `${month} ${date.getFullYear()} `;
}
}
export default angular.module('photos.controller', [])
.controller('photosController', PhotosController); | random_line_split | |
controller.js | import angular from 'angular';
class PhotosController {
/** @ngInject */
constructor($scope, $stateParams, $state, photosGallery) {
this.$scope = $scope;
this.$stateParams = $stateParams;
this.$state = $state;
this.photosGallery = photosGallery;
this.photosByMonth = {};
this.initWatchers();
this.showPhotos();
}
initWatchers() {
this.$scope.$watch(() => this.photosGallery.photos, this.groupPhotosByMonth.bind(this));
}
showPhoto(id) {
this.$state.go('photo-detail', { id });
}
showPage(page) {
this.$state.go(
'photos',
{ page, search: this.photosGallery.search },
{ location: 'replace' }
);
}
showPhotos() {
const page = parseInt(this.$stateParams.page, 10) || undefined
const search = this.$stateParams.search
this.photosGallery.showPhotos({ page, search });
}
pageButtonClass(page) {
if (page === this.photosGallery.currentPage) |
return 'md-raised custom';
}
groupPhotosByMonth(photos) {
const res = {};
photos.forEach((photo) => {
const month = this.monthLabel(photo);
if (!res[month]) {
res[month] = [];
}
res[month].push(photo);
});
this.photosByMonth = res;
console.log("PhotosController.groupPhotosByMonth: \n", this.photosByMonth);
}
monthLabel(photo) {
const date = new Date(photo.metadata.createDate);
const month = date.toLocaleString('en', { month: 'short' });
return `${month} ${date.getFullYear()} `;
}
}
export default angular.module('photos.controller', [])
.controller('photosController', PhotosController);
| {
return 'md-raised md-primary';
} | conditional_block |
app.py | import sys
sys.path.append('../..')
import web
from web.contrib.template import render_jinja
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from social.utils import setting_name
from social.apps.webpy_app.utils import psa, backends
from social.apps.webpy_app import app as social_app
import local_settings
web.config.debug = False
web.config[setting_name('USER_MODEL')] = 'models.User'
web.config[setting_name('AUTHENTICATION_BACKENDS')] = (
'social.backends.open_id.OpenIdAuth',
'social.backends.google.GoogleOpenId',
'social.backends.google.GoogleOAuth2',
'social.backends.google.GoogleOAuth',
'social.backends.twitter.TwitterOAuth',
'social.backends.yahoo.YahooOpenId',
'social.backends.stripe.StripeOAuth2',
'social.backends.persona.PersonaAuth',
'social.backends.facebook.FacebookOAuth2',
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.yahoo.YahooOAuth',
'social.backends.angel.AngelOAuth2',
'social.backends.behance.BehanceOAuth2',
'social.backends.bitbucket.BitbucketOAuth',
'social.backends.box.BoxOAuth2',
'social.backends.linkedin.LinkedinOAuth',
'social.backends.github.GithubOAuth2',
'social.backends.foursquare.FoursquareOAuth2',
'social.backends.instagram.InstagramOAuth2',
'social.backends.live.LiveOAuth2',
'social.backends.vk.VKOAuth2',
'social.backends.dailymotion.DailymotionOAuth2',
'social.backends.disqus.DisqusOAuth2',
'social.backends.dropbox.DropboxOAuth',
'social.backends.eveonline.EVEOnlineOAuth2',
'social.backends.evernote.EvernoteSandboxOAuth',
'social.backends.fitbit.FitbitOAuth2',
'social.backends.flickr.FlickrOAuth',
'social.backends.livejournal.LiveJournalOpenId',
'social.backends.soundcloud.SoundcloudOAuth2',
'social.backends.thisismyjam.ThisIsMyJamOAuth1',
'social.backends.stocktwits.StocktwitsOAuth2',
'social.backends.tripit.TripItOAuth',
'social.backends.clef.ClefOAuth2',
'social.backends.twilio.TwilioAuth',
'social.backends.xing.XingOAuth',
'social.backends.yandex.YandexOAuth2',
'social.backends.podio.PodioOAuth2',
'social.backends.mineid.MineIDOAuth2',
'social.backends.wunderlist.WunderlistOAuth2',
'social.backends.upwork.UpworkOAuth',
)
web.config[setting_name('LOGIN_REDIRECT_URL')] = '/done/'
urls = (
'^/$', 'main',
'^/done/$', 'done',
'', social_app.app_social
)
render = render_jinja('templates/')
class main(object):
|
class done(social_app.BaseViewClass):
def GET(self):
user = self.get_current_user()
return render.done(user=user, backends=backends(user))
engine = create_engine('sqlite:///test.db', echo=True)
def load_sqla(handler):
web.ctx.orm = scoped_session(sessionmaker(bind=engine))
try:
return handler()
except web.HTTPError:
web.ctx.orm.commit()
raise
except:
web.ctx.orm.rollback()
raise
finally:
web.ctx.orm.commit()
# web.ctx.orm.expunge_all()
Session = sessionmaker(bind=engine)
Session.configure(bind=engine)
app = web.application(urls, locals())
app.add_processor(load_sqla)
session = web.session.Session(app, web.session.DiskStore('sessions'))
web.db_session = Session()
web.web_session = session
if __name__ == "__main__":
app.run()
| def GET(self):
return render.home() | identifier_body |
app.py | import sys
sys.path.append('../..')
import web
from web.contrib.template import render_jinja
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from social.utils import setting_name
from social.apps.webpy_app.utils import psa, backends
from social.apps.webpy_app import app as social_app
import local_settings
web.config.debug = False
web.config[setting_name('USER_MODEL')] = 'models.User'
web.config[setting_name('AUTHENTICATION_BACKENDS')] = (
'social.backends.open_id.OpenIdAuth',
'social.backends.google.GoogleOpenId',
'social.backends.google.GoogleOAuth2',
'social.backends.google.GoogleOAuth',
'social.backends.twitter.TwitterOAuth',
'social.backends.yahoo.YahooOpenId',
'social.backends.stripe.StripeOAuth2',
'social.backends.persona.PersonaAuth',
'social.backends.facebook.FacebookOAuth2',
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.yahoo.YahooOAuth',
'social.backends.angel.AngelOAuth2',
'social.backends.behance.BehanceOAuth2',
'social.backends.bitbucket.BitbucketOAuth',
'social.backends.box.BoxOAuth2',
'social.backends.linkedin.LinkedinOAuth',
'social.backends.github.GithubOAuth2',
'social.backends.foursquare.FoursquareOAuth2',
'social.backends.instagram.InstagramOAuth2',
'social.backends.live.LiveOAuth2',
'social.backends.vk.VKOAuth2',
'social.backends.dailymotion.DailymotionOAuth2',
'social.backends.disqus.DisqusOAuth2',
'social.backends.dropbox.DropboxOAuth',
'social.backends.eveonline.EVEOnlineOAuth2',
'social.backends.evernote.EvernoteSandboxOAuth',
'social.backends.fitbit.FitbitOAuth2',
'social.backends.flickr.FlickrOAuth',
'social.backends.livejournal.LiveJournalOpenId',
'social.backends.soundcloud.SoundcloudOAuth2',
'social.backends.thisismyjam.ThisIsMyJamOAuth1',
'social.backends.stocktwits.StocktwitsOAuth2',
'social.backends.tripit.TripItOAuth',
'social.backends.clef.ClefOAuth2',
'social.backends.twilio.TwilioAuth',
'social.backends.xing.XingOAuth',
'social.backends.yandex.YandexOAuth2',
'social.backends.podio.PodioOAuth2',
'social.backends.mineid.MineIDOAuth2',
'social.backends.wunderlist.WunderlistOAuth2',
'social.backends.upwork.UpworkOAuth',
)
web.config[setting_name('LOGIN_REDIRECT_URL')] = '/done/'
urls = (
'^/$', 'main',
'^/done/$', 'done',
'', social_app.app_social
)
render = render_jinja('templates/')
class main(object):
def | (self):
return render.home()
class done(social_app.BaseViewClass):
def GET(self):
user = self.get_current_user()
return render.done(user=user, backends=backends(user))
engine = create_engine('sqlite:///test.db', echo=True)
def load_sqla(handler):
web.ctx.orm = scoped_session(sessionmaker(bind=engine))
try:
return handler()
except web.HTTPError:
web.ctx.orm.commit()
raise
except:
web.ctx.orm.rollback()
raise
finally:
web.ctx.orm.commit()
# web.ctx.orm.expunge_all()
Session = sessionmaker(bind=engine)
Session.configure(bind=engine)
app = web.application(urls, locals())
app.add_processor(load_sqla)
session = web.session.Session(app, web.session.DiskStore('sessions'))
web.db_session = Session()
web.web_session = session
if __name__ == "__main__":
app.run()
| GET | identifier_name |
app.py | import sys
sys.path.append('../..')
import web
from web.contrib.template import render_jinja
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from social.utils import setting_name
from social.apps.webpy_app.utils import psa, backends
from social.apps.webpy_app import app as social_app
import local_settings
web.config.debug = False
web.config[setting_name('USER_MODEL')] = 'models.User'
web.config[setting_name('AUTHENTICATION_BACKENDS')] = (
'social.backends.open_id.OpenIdAuth',
'social.backends.google.GoogleOpenId',
'social.backends.google.GoogleOAuth2',
'social.backends.google.GoogleOAuth',
'social.backends.twitter.TwitterOAuth',
'social.backends.yahoo.YahooOpenId',
'social.backends.stripe.StripeOAuth2',
'social.backends.persona.PersonaAuth',
'social.backends.facebook.FacebookOAuth2',
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.yahoo.YahooOAuth',
'social.backends.angel.AngelOAuth2',
'social.backends.behance.BehanceOAuth2',
'social.backends.bitbucket.BitbucketOAuth',
'social.backends.box.BoxOAuth2',
'social.backends.linkedin.LinkedinOAuth',
'social.backends.github.GithubOAuth2',
'social.backends.foursquare.FoursquareOAuth2',
'social.backends.instagram.InstagramOAuth2',
'social.backends.live.LiveOAuth2',
'social.backends.vk.VKOAuth2',
'social.backends.dailymotion.DailymotionOAuth2',
'social.backends.disqus.DisqusOAuth2',
'social.backends.dropbox.DropboxOAuth',
'social.backends.eveonline.EVEOnlineOAuth2',
'social.backends.evernote.EvernoteSandboxOAuth',
'social.backends.fitbit.FitbitOAuth2',
'social.backends.flickr.FlickrOAuth',
'social.backends.livejournal.LiveJournalOpenId',
'social.backends.soundcloud.SoundcloudOAuth2',
'social.backends.thisismyjam.ThisIsMyJamOAuth1',
'social.backends.stocktwits.StocktwitsOAuth2',
'social.backends.tripit.TripItOAuth',
'social.backends.clef.ClefOAuth2',
'social.backends.twilio.TwilioAuth',
'social.backends.xing.XingOAuth',
'social.backends.yandex.YandexOAuth2',
'social.backends.podio.PodioOAuth2',
'social.backends.mineid.MineIDOAuth2',
'social.backends.wunderlist.WunderlistOAuth2',
'social.backends.upwork.UpworkOAuth',
)
web.config[setting_name('LOGIN_REDIRECT_URL')] = '/done/'
urls = (
'^/$', 'main',
'^/done/$', 'done',
'', social_app.app_social
)
render = render_jinja('templates/')
class main(object):
def GET(self):
return render.home()
class done(social_app.BaseViewClass):
def GET(self):
user = self.get_current_user()
return render.done(user=user, backends=backends(user))
engine = create_engine('sqlite:///test.db', echo=True)
def load_sqla(handler):
web.ctx.orm = scoped_session(sessionmaker(bind=engine))
try:
return handler()
except web.HTTPError:
web.ctx.orm.commit()
raise
except:
web.ctx.orm.rollback()
raise
finally:
web.ctx.orm.commit()
# web.ctx.orm.expunge_all()
Session = sessionmaker(bind=engine)
Session.configure(bind=engine)
app = web.application(urls, locals())
app.add_processor(load_sqla)
session = web.session.Session(app, web.session.DiskStore('sessions'))
web.db_session = Session()
web.web_session = session
if __name__ == "__main__":
| app.run() | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.