file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
main.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:crate_with_invalid_spans.rs
// pretty-expanded FIXME #23616
extern crate crate_with_invalid_spans;
fn | () {
// The AST of `exported_generic` stored in crate_with_invalid_spans's
// metadata should contain an invalid span where span.lo() > span.hi().
// Let's make sure the compiler doesn't crash when encountering this.
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
}
| main | identifier_name |
main.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// aux-build:crate_with_invalid_spans.rs
// pretty-expanded FIXME #23616
extern crate crate_with_invalid_spans;
fn main() | {
// The AST of `exported_generic` stored in crate_with_invalid_spans's
// metadata should contain an invalid span where span.lo() > span.hi().
// Let's make sure the compiler doesn't crash when encountering this.
let _ = crate_with_invalid_spans::exported_generic(32u32, 7u32);
} | identifier_body | |
20require.js | /*xlib.20require 资源加载函数*/
(function(x){
x.define=function(id,deps,result){/*定义一个资源 id是可选项 rs可以是data也可以是函数 资源的加载*/
for(var i=0;i<rsList.length;i++){
var rs=rsList[i];
if(rs.id==id){/*如果id一致,且对象处于load状态*/
rs.state="complete";/*加载完毕*/ | rs.callBack(result);
break;
}
}
}
/*每个rs拥有src属性,加载状态属性,加载结果*/
var seed_id=1;
x.require=function(id,src,callBack,flag){/*按顺序加载资源 异步执行 执行完调用回调函数 flag是调试信息是否输出*/
/*每次require都会产生一条记录 相当于每次require只能执行一个加载请求*/
var obj={};
if(id==null) id=seed_id++;
obj.id=id; /*关联id*/
obj.src=src;
obj.state="ready";/*3种状态 ready load complete*/
obj.callBack=callBack;
rsList.push(obj);
var t = document.createElement("script");
t.setAttribute("type","text/javascript");
t.defer =t.async= true;/*非阻塞加载*/
t.onreadystatechange = t.onload = function(){
if(!t.readyState || t.readyState == 'loaded' || t.readyState == 'complete'){/*装载完成*/
t.onreadystatechange = t.onload = t.onerror = null;
}
};
t.src = src;
document.body.appendChild(t);
}
})(window); | random_line_split | |
20require.js | /*xlib.20require 资源加载函数*/
(function(x){
x.define=function(id,deps,result){/*定义一个资源 id是可选项 rs可以是data也可以是函数 资源的加载*/
for(var i=0;i<rsList.length;i++){
var rs=rsList[i];
if(rs.id==id){/*如果id一致,且对象处于load状态*/
rs.state="complete";/*加载完毕*/
rs.callBack(result);
break;
}
}
}
/*每个rs拥有src属性,加载状态属性,加载结果*/
var seed_id=1;
x.require=function(id,src,callBack,flag){/*按顺序加载资源 异步执行 执行完调用回调函数 flag是调试信息是否输出*/
/*每次require都会产生一条记录 相当于每次require只能执行一个加载请求*/
var obj={};
if(id==null) id=seed_id++;
obj.id=id; /*关联id*/
obj.src=src;
obj.state="ready";/*3种状态 ready load complete*/
obj.callBack=callBack;
rsList.push(obj);
var t = document.createElement("script");
t.setAttribute("type","text/javascript");
t.defer =t.async= true;/*非阻塞加载*/
t.onreadystatechange = t.onload = function(){
if(!t.readyState || t.readyState == 'loaded' || t.readyState == 'complete'){/*装载完成*/
t.onreadystatechange = t.onload = t.onerror = null;
}
};
t.src = src;
document.body.appendChild(t);
}
})(window);
| conditional_block | ||
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct | {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character, ..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() {
unsafe {
cur_pos = cursor_pos();
}
}
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char: ' ' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
}
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline]
unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8);
}
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
}
| character | identifier_name |
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct character {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character, ..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() {
unsafe {
cur_pos = cursor_pos();
}
}
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char: ' ' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
}
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline] | }
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
} | unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8); | random_line_split |
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct character {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character, ..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() |
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char: ' ' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
}
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline]
unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8);
}
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
}
| {
unsafe {
cur_pos = cursor_pos();
}
} | identifier_body |
vga.rs | use extra::prelude::*;
use cpu;
use cpu::io;
#[packed]
pub struct character {
char: u8,
attr: u8,
}
static SCREEN_ROWS: uint = 25;
static SCREEN_COLS: uint = 80;
static SCREEN_SIZE: uint = SCREEN_ROWS*SCREEN_COLS;
type screen_buf = [character, ..SCREEN_SIZE];
static screen: *mut screen_buf = 0xB8000 as *mut screen_buf;
static mut cur_pos: uint = 0;
pub fn init() {
unsafe {
cur_pos = cursor_pos();
}
}
pub fn puts(string: &str, attr: term::color::Color) {
stdio::puts(string, attr, putc, new_line);
}
pub fn putc(c: char, attr: term::color::Color) {
unsafe {
put_char(cur_pos, character{char: c as u8, attr: attr as u8});
cursor_move(1);
}
}
pub fn new_line() {
unsafe {
cursor_move(SCREEN_COLS - cur_pos % SCREEN_COLS);
}
}
#[inline]
unsafe fn cursor_move(delta: uint) {
cur_pos += delta;
if cur_pos >= SCREEN_SIZE |
cursor_to(cur_pos);
}
#[inline]
unsafe fn put_char(pos: uint, c: character) {
(*screen)[pos] = c;
}
#[inline]
unsafe fn cursor_pos() -> uint {
let mut pos: uint;
io::outb(0x3D4, 14);
pos = (io::inb(0x3D5) as uint) << 8;
io::outb(0x3D4, 15);
pos |= io::inb(0x3D5) as uint;
pos
}
#[inline]
unsafe fn cursor_to(pos: uint) {
io::outb(0x3D4, 14);
io::outb(0x3D5, (pos >> 8) as u8);
io::outb(0x3D4, 15);
io::outb(0x3D5, pos as u8);
}
#[inline]
unsafe fn mem_ptr_of(row: uint, col: uint) -> uint {
screen as uint +
row * SCREEN_COLS * mem::size_of::<character>() +
col * mem::size_of::<character>()
}
| {
cpu::memmove(mem_ptr_of(0, 0), mem_ptr_of(1, 0),
(SCREEN_SIZE - SCREEN_COLS) * mem::size_of::<character>());
let mut i = SCREEN_SIZE - SCREEN_COLS;
while i < SCREEN_SIZE {
put_char(i, character{char: ' ' as u8, attr: term::color::BLACK as u8});
i += 1;
};
cur_pos -= SCREEN_COLS;
} | conditional_block |
debug.py | # -*- coding: utf-8 -*-
"""
jinja2.debug
~~~~~~~~~~~~
Implements the debug interface for Jinja. This module does some pretty
ugly stuff with the Python traceback system in order to achieve tracebacks
with correct line numbers, locals and contents.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import traceback
from jinja2.utils import CodeType, missing, internal_code
from jinja2.exceptions import TemplateSyntaxError
# how does the raise helper look like?
try:
exec "raise TypeError, 'foo'"
except SyntaxError:
raise_helper = 'raise __jinja_exception__[1]'
except TypeError:
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
class TracebackFrameProxy(object):
"""Proxies a traceback frame."""
def __init__(self, tb):
self.tb = tb
def _set_tb_next(self, next):
if tb_set_next is not None:
tb_set_next(self.tb, next and next.tb or None)
self._tb_next = next
def _get_tb_next(self):
return self._tb_next
tb_next = property(_get_tb_next, _set_tb_next)
del _get_tb_next, _set_tb_next
@property
def is_jinja_frame(self):
return '__jinja_template__' in self.tb.tb_frame.f_globals
def __getattr__(self, name):
return getattr(self.tb, name)
class ProcessedTraceback(object):
"""Holds a Jinja preprocessed traceback for priting or reraising."""
def __init__(self, exc_type, exc_value, frames):
assert frames, 'no frames for this traceback?'
self.exc_type = exc_type
self.exc_value = exc_value
self.frames = frames
def chain_frames(self):
"""Chains the frames. Requires ctypes or the speedups extension."""
prev_tb = None
for tb in self.frames:
if prev_tb is not None:
prev_tb.tb_next = tb
prev_tb = tb
prev_tb.tb_next = None
def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip()
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
@property
def is_template_syntax_error(self):
"""`True` if this is a template syntax error."""
return isinstance(self.exc_value, TemplateSyntaxError)
@property
def exc_info(self):
"""Exception info tuple with a proxy around the frame objects."""
return self.exc_type, self.exc_value, self.frames[0]
@property
def standard_exc_info(self):
"""Standard python exc_info for re-raising"""
return self.exc_type, self.exc_value, self.frames[0].tb
def make_traceback(exc_info, source_hint=None):
"""Creates a processed traceback object from the exc_info."""
exc_type, exc_value, tb = exc_info
if isinstance(exc_value, TemplateSyntaxError):
exc_info = translate_syntax_error(exc_value, source_hint)
initial_skip = 0
else:
initial_skip = 1
return translate_exception(exc_info, initial_skip)
def translate_syntax_error(error, source=None):
"""Rewrites a syntax error to please traceback systems."""
error.source = source
error.translated = True
exc_info = (error.__class__, error, None)
filename = error.filename
if filename is None:
filename = '<unknown>'
return fake_exc_info(exc_info, filename, error.lineno)
def translate_exception(exc_info, initial_skip=0):
"""If passed an exc_info it will automatically rewrite the exceptions
all the way down to the correct line numbers and frames.
"""
tb = exc_info[2]
frames = []
# skip some internal frames if wanted
for x in xrange(initial_skip):
if tb is not None:
tb = tb.tb_next
initial_tb = tb
while tb is not None:
# skip frames decorated with @internalcode. These are internal
# calls we can't avoid and that are useless in template debugging
# output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
# save a reference to the next frame if we override the current
# one with a faked one.
next = tb.tb_next
# fake template exceptions
template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
lineno)[2]
frames.append(TracebackFrameProxy(tb))
tb = next
# if we don't have any exceptions in the frames left, we have to
# reraise it unchanged.
# XXX: can we backup here? when could this happen?
if not frames:
raise exc_info[0], exc_info[1], exc_info[2]
traceback = ProcessedTraceback(exc_info[0], exc_info[1], frames)
if tb_set_next is not None:
traceback.chain_frames()
return traceback
def fake_exc_info(exc_info, filename, lineno):
"""Helper for `translate_exception`."""
exc_type, exc_value, tb = exc_info
# figure the real context out
if tb is not None:
real_locals = tb.tb_frame.f_locals.copy()
ctx = real_locals.get('context')
if ctx:
locals = ctx.get_all()
else:
locals = {}
for name, value in real_locals.iteritems():
if name.startswith('l_') and value is not missing:
locals[name[2:]] = value
# if there is a local called __jinja_exception__, we get
# rid of it to not break the debug functionality.
locals.pop('__jinja_exception__', None)
else:
locals = {}
# assamble fake globals we need
globals = {
'__name__': filename,
'__file__': filename,
'__jinja_exception__': exc_info[:2],
# we don't want to keep the reference to the template around
# to not cause circular dependencies, but we mark it as Jinja
# frame for the ProcessedTraceback
'__jinja_template__': None
}
# and fake the exception
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
# if it's possible, change the name of the code. This won't work
# on some python environments such as google appengine
try:
if tb is None:
location = 'template'
else:
function = tb.tb_frame.f_code.co_name
if function == 'root':
location = 'top-level template code'
elif function.startswith('block_'):
location = 'block "%s"' % function[6:]
else:
location = 'template'
code = CodeType(0, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
except:
pass
# execute the code and catch the new traceback
try:
exec code in globals, locals
except:
exc_info = sys.exc_info()
new_tb = exc_info[2].tb_next
# return without this frame
return exc_info[:2] + (new_tb,)
def _init_ugly_crap():
"""This function implements a few ugly things so that we can patch the
traceback objects. The function returned allows resetting `tb_next` on
any python traceback object.
"""
import ctypes
from types import TracebackType
# figure out side of _Py_ssize_t
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
# regular python
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
# python with trace
if object.__basicsize__ != ctypes.sizeof(_PyObject):
|
class _Traceback(_PyObject):
pass
_Traceback._fields_ = [
('tb_next', ctypes.POINTER(_Traceback)),
('tb_frame', ctypes.POINTER(_PyObject)),
('tb_lasti', ctypes.c_int),
('tb_lineno', ctypes.c_int)
]
def tb_set_next(tb, next):
"""Set the tb_next attribute of a traceback object."""
if not (isinstance(tb, TracebackType) and
(next is None or isinstance(next, TracebackType))):
raise TypeError('tb_set_next arguments must be traceback objects')
obj = _Traceback.from_address(id(tb))
if tb.tb_next is not None:
old = _Traceback.from_address(id(tb.tb_next))
old.ob_refcnt -= 1
if next is None:
obj.tb_next = ctypes.POINTER(_Traceback)()
else:
next = _Traceback.from_address(id(next))
next.ob_refcnt += 1
obj.tb_next = ctypes.pointer(next)
return tb_set_next
# try to get a tb_set_next implementation
try:
from jinja2._speedups import tb_set_next
except ImportError:
try:
tb_set_next = _init_ugly_crap()
except:
tb_set_next = None
del _init_ugly_crap
| class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
] | conditional_block |
debug.py | # -*- coding: utf-8 -*-
"""
jinja2.debug
~~~~~~~~~~~~
Implements the debug interface for Jinja. This module does some pretty
ugly stuff with the Python traceback system in order to achieve tracebacks
with correct line numbers, locals and contents.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import traceback
from jinja2.utils import CodeType, missing, internal_code
from jinja2.exceptions import TemplateSyntaxError
# how does the raise helper look like?
try:
exec "raise TypeError, 'foo'"
except SyntaxError:
raise_helper = 'raise __jinja_exception__[1]'
except TypeError:
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
class TracebackFrameProxy(object):
"""Proxies a traceback frame."""
def __init__(self, tb):
self.tb = tb
def _set_tb_next(self, next):
if tb_set_next is not None:
tb_set_next(self.tb, next and next.tb or None)
self._tb_next = next
def _get_tb_next(self):
return self._tb_next
tb_next = property(_get_tb_next, _set_tb_next)
del _get_tb_next, _set_tb_next
@property
def is_jinja_frame(self):
return '__jinja_template__' in self.tb.tb_frame.f_globals
def __getattr__(self, name):
return getattr(self.tb, name)
class ProcessedTraceback(object):
"""Holds a Jinja preprocessed traceback for priting or reraising."""
def __init__(self, exc_type, exc_value, frames):
assert frames, 'no frames for this traceback?'
self.exc_type = exc_type
self.exc_value = exc_value
self.frames = frames
def chain_frames(self):
"""Chains the frames. Requires ctypes or the speedups extension."""
prev_tb = None
for tb in self.frames:
if prev_tb is not None:
prev_tb.tb_next = tb
prev_tb = tb
prev_tb.tb_next = None
def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip()
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
@property
def is_template_syntax_error(self):
"""`True` if this is a template syntax error."""
return isinstance(self.exc_value, TemplateSyntaxError)
@property
def exc_info(self):
"""Exception info tuple with a proxy around the frame objects."""
return self.exc_type, self.exc_value, self.frames[0]
@property
def standard_exc_info(self):
"""Standard python exc_info for re-raising"""
return self.exc_type, self.exc_value, self.frames[0].tb
def make_traceback(exc_info, source_hint=None):
"""Creates a processed traceback object from the exc_info."""
exc_type, exc_value, tb = exc_info
if isinstance(exc_value, TemplateSyntaxError):
exc_info = translate_syntax_error(exc_value, source_hint)
initial_skip = 0
else:
initial_skip = 1
return translate_exception(exc_info, initial_skip)
def translate_syntax_error(error, source=None):
"""Rewrites a syntax error to please traceback systems."""
error.source = source
error.translated = True
exc_info = (error.__class__, error, None)
filename = error.filename
if filename is None:
filename = '<unknown>'
return fake_exc_info(exc_info, filename, error.lineno)
def translate_exception(exc_info, initial_skip=0):
"""If passed an exc_info it will automatically rewrite the exceptions
all the way down to the correct line numbers and frames.
"""
tb = exc_info[2]
frames = []
# skip some internal frames if wanted
for x in xrange(initial_skip):
if tb is not None:
tb = tb.tb_next
initial_tb = tb
while tb is not None:
# skip frames decorated with @internalcode. These are internal
# calls we can't avoid and that are useless in template debugging
# output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
# save a reference to the next frame if we override the current
# one with a faked one.
next = tb.tb_next
# fake template exceptions
template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
lineno)[2]
frames.append(TracebackFrameProxy(tb))
tb = next
# if we don't have any exceptions in the frames left, we have to
# reraise it unchanged.
# XXX: can we backup here? when could this happen?
if not frames:
raise exc_info[0], exc_info[1], exc_info[2]
traceback = ProcessedTraceback(exc_info[0], exc_info[1], frames)
if tb_set_next is not None:
traceback.chain_frames()
return traceback
def fake_exc_info(exc_info, filename, lineno):
"""Helper for `translate_exception`."""
exc_type, exc_value, tb = exc_info
# figure the real context out
if tb is not None:
real_locals = tb.tb_frame.f_locals.copy()
ctx = real_locals.get('context')
if ctx:
locals = ctx.get_all()
else:
locals = {}
for name, value in real_locals.iteritems():
if name.startswith('l_') and value is not missing:
locals[name[2:]] = value
# if there is a local called __jinja_exception__, we get
# rid of it to not break the debug functionality.
locals.pop('__jinja_exception__', None)
else:
locals = {}
# assamble fake globals we need
globals = {
'__name__': filename,
'__file__': filename,
'__jinja_exception__': exc_info[:2],
# we don't want to keep the reference to the template around
# to not cause circular dependencies, but we mark it as Jinja
# frame for the ProcessedTraceback
'__jinja_template__': None
}
# and fake the exception
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
# if it's possible, change the name of the code. This won't work
# on some python environments such as google appengine
try:
if tb is None:
location = 'template'
else:
function = tb.tb_frame.f_code.co_name
if function == 'root':
location = 'top-level template code'
elif function.startswith('block_'):
location = 'block "%s"' % function[6:]
else:
location = 'template'
code = CodeType(0, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
except:
pass
# execute the code and catch the new traceback
try:
exec code in globals, locals
except:
exc_info = sys.exc_info()
new_tb = exc_info[2].tb_next
# return without this frame
return exc_info[:2] + (new_tb,)
def _init_ugly_crap():
"""This function implements a few ugly things so that we can patch the
traceback objects. The function returned allows resetting `tb_next` on
any python traceback object.
"""
import ctypes
from types import TracebackType
# figure out side of _Py_ssize_t
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
# regular python
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
# python with trace
if object.__basicsize__ != ctypes.sizeof(_PyObject):
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
class | (_PyObject):
pass
_Traceback._fields_ = [
('tb_next', ctypes.POINTER(_Traceback)),
('tb_frame', ctypes.POINTER(_PyObject)),
('tb_lasti', ctypes.c_int),
('tb_lineno', ctypes.c_int)
]
def tb_set_next(tb, next):
"""Set the tb_next attribute of a traceback object."""
if not (isinstance(tb, TracebackType) and
(next is None or isinstance(next, TracebackType))):
raise TypeError('tb_set_next arguments must be traceback objects')
obj = _Traceback.from_address(id(tb))
if tb.tb_next is not None:
old = _Traceback.from_address(id(tb.tb_next))
old.ob_refcnt -= 1
if next is None:
obj.tb_next = ctypes.POINTER(_Traceback)()
else:
next = _Traceback.from_address(id(next))
next.ob_refcnt += 1
obj.tb_next = ctypes.pointer(next)
return tb_set_next
# try to get a tb_set_next implementation
try:
from jinja2._speedups import tb_set_next
except ImportError:
try:
tb_set_next = _init_ugly_crap()
except:
tb_set_next = None
del _init_ugly_crap
| _Traceback | identifier_name |
debug.py | # -*- coding: utf-8 -*-
"""
jinja2.debug
~~~~~~~~~~~~
Implements the debug interface for Jinja. This module does some pretty
ugly stuff with the Python traceback system in order to achieve tracebacks
with correct line numbers, locals and contents.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import traceback
from jinja2.utils import CodeType, missing, internal_code
from jinja2.exceptions import TemplateSyntaxError
# how does the raise helper look like?
try:
exec "raise TypeError, 'foo'"
except SyntaxError:
raise_helper = 'raise __jinja_exception__[1]'
except TypeError:
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
class TracebackFrameProxy(object):
"""Proxies a traceback frame."""
def __init__(self, tb):
self.tb = tb
def _set_tb_next(self, next):
if tb_set_next is not None:
tb_set_next(self.tb, next and next.tb or None)
self._tb_next = next
def _get_tb_next(self):
return self._tb_next
tb_next = property(_get_tb_next, _set_tb_next)
del _get_tb_next, _set_tb_next
@property
def is_jinja_frame(self):
return '__jinja_template__' in self.tb.tb_frame.f_globals
def __getattr__(self, name):
return getattr(self.tb, name)
class ProcessedTraceback(object):
"""Holds a Jinja preprocessed traceback for priting or reraising."""
def __init__(self, exc_type, exc_value, frames):
assert frames, 'no frames for this traceback?'
self.exc_type = exc_type
self.exc_value = exc_value
self.frames = frames
def chain_frames(self):
"""Chains the frames. Requires ctypes or the speedups extension."""
prev_tb = None
for tb in self.frames:
if prev_tb is not None:
prev_tb.tb_next = tb
prev_tb = tb
prev_tb.tb_next = None
def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip()
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
@property
def is_template_syntax_error(self):
"""`True` if this is a template syntax error."""
return isinstance(self.exc_value, TemplateSyntaxError)
@property
def exc_info(self):
"""Exception info tuple with a proxy around the frame objects."""
return self.exc_type, self.exc_value, self.frames[0]
@property
def standard_exc_info(self):
"""Standard python exc_info for re-raising"""
return self.exc_type, self.exc_value, self.frames[0].tb
def make_traceback(exc_info, source_hint=None):
"""Creates a processed traceback object from the exc_info."""
exc_type, exc_value, tb = exc_info
if isinstance(exc_value, TemplateSyntaxError):
exc_info = translate_syntax_error(exc_value, source_hint)
initial_skip = 0
else:
initial_skip = 1
return translate_exception(exc_info, initial_skip)
def translate_syntax_error(error, source=None):
|
def translate_exception(exc_info, initial_skip=0):
"""If passed an exc_info it will automatically rewrite the exceptions
all the way down to the correct line numbers and frames.
"""
tb = exc_info[2]
frames = []
# skip some internal frames if wanted
for x in xrange(initial_skip):
if tb is not None:
tb = tb.tb_next
initial_tb = tb
while tb is not None:
# skip frames decorated with @internalcode. These are internal
# calls we can't avoid and that are useless in template debugging
# output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
# save a reference to the next frame if we override the current
# one with a faked one.
next = tb.tb_next
# fake template exceptions
template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
lineno)[2]
frames.append(TracebackFrameProxy(tb))
tb = next
# if we don't have any exceptions in the frames left, we have to
# reraise it unchanged.
# XXX: can we backup here? when could this happen?
if not frames:
raise exc_info[0], exc_info[1], exc_info[2]
traceback = ProcessedTraceback(exc_info[0], exc_info[1], frames)
if tb_set_next is not None:
traceback.chain_frames()
return traceback
def fake_exc_info(exc_info, filename, lineno):
"""Helper for `translate_exception`."""
exc_type, exc_value, tb = exc_info
# figure the real context out
if tb is not None:
real_locals = tb.tb_frame.f_locals.copy()
ctx = real_locals.get('context')
if ctx:
locals = ctx.get_all()
else:
locals = {}
for name, value in real_locals.iteritems():
if name.startswith('l_') and value is not missing:
locals[name[2:]] = value
# if there is a local called __jinja_exception__, we get
# rid of it to not break the debug functionality.
locals.pop('__jinja_exception__', None)
else:
locals = {}
# assamble fake globals we need
globals = {
'__name__': filename,
'__file__': filename,
'__jinja_exception__': exc_info[:2],
# we don't want to keep the reference to the template around
# to not cause circular dependencies, but we mark it as Jinja
# frame for the ProcessedTraceback
'__jinja_template__': None
}
# and fake the exception
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
# if it's possible, change the name of the code. This won't work
# on some python environments such as google appengine
try:
if tb is None:
location = 'template'
else:
function = tb.tb_frame.f_code.co_name
if function == 'root':
location = 'top-level template code'
elif function.startswith('block_'):
location = 'block "%s"' % function[6:]
else:
location = 'template'
code = CodeType(0, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
except:
pass
# execute the code and catch the new traceback
try:
exec code in globals, locals
except:
exc_info = sys.exc_info()
new_tb = exc_info[2].tb_next
# return without this frame
return exc_info[:2] + (new_tb,)
def _init_ugly_crap():
"""This function implements a few ugly things so that we can patch the
traceback objects. The function returned allows resetting `tb_next` on
any python traceback object.
"""
import ctypes
from types import TracebackType
# figure out side of _Py_ssize_t
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
# regular python
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
# python with trace
if object.__basicsize__ != ctypes.sizeof(_PyObject):
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
class _Traceback(_PyObject):
pass
_Traceback._fields_ = [
('tb_next', ctypes.POINTER(_Traceback)),
('tb_frame', ctypes.POINTER(_PyObject)),
('tb_lasti', ctypes.c_int),
('tb_lineno', ctypes.c_int)
]
def tb_set_next(tb, next):
"""Set the tb_next attribute of a traceback object."""
if not (isinstance(tb, TracebackType) and
(next is None or isinstance(next, TracebackType))):
raise TypeError('tb_set_next arguments must be traceback objects')
obj = _Traceback.from_address(id(tb))
if tb.tb_next is not None:
old = _Traceback.from_address(id(tb.tb_next))
old.ob_refcnt -= 1
if next is None:
obj.tb_next = ctypes.POINTER(_Traceback)()
else:
next = _Traceback.from_address(id(next))
next.ob_refcnt += 1
obj.tb_next = ctypes.pointer(next)
return tb_set_next
# try to get a tb_set_next implementation
try:
from jinja2._speedups import tb_set_next
except ImportError:
try:
tb_set_next = _init_ugly_crap()
except:
tb_set_next = None
del _init_ugly_crap
| """Rewrites a syntax error to please traceback systems."""
error.source = source
error.translated = True
exc_info = (error.__class__, error, None)
filename = error.filename
if filename is None:
filename = '<unknown>'
return fake_exc_info(exc_info, filename, error.lineno) | identifier_body |
debug.py | # -*- coding: utf-8 -*-
"""
jinja2.debug
~~~~~~~~~~~~
Implements the debug interface for Jinja. This module does some pretty
ugly stuff with the Python traceback system in order to achieve tracebacks
with correct line numbers, locals and contents.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import traceback
from jinja2.utils import CodeType, missing, internal_code
from jinja2.exceptions import TemplateSyntaxError
# how does the raise helper look like?
try:
exec "raise TypeError, 'foo'"
except SyntaxError:
raise_helper = 'raise __jinja_exception__[1]'
except TypeError:
raise_helper = 'raise __jinja_exception__[0], __jinja_exception__[1]'
class TracebackFrameProxy(object):
"""Proxies a traceback frame."""
def __init__(self, tb):
self.tb = tb
def _set_tb_next(self, next):
if tb_set_next is not None: |
tb_next = property(_get_tb_next, _set_tb_next)
del _get_tb_next, _set_tb_next
@property
def is_jinja_frame(self):
return '__jinja_template__' in self.tb.tb_frame.f_globals
def __getattr__(self, name):
return getattr(self.tb, name)
class ProcessedTraceback(object):
"""Holds a Jinja preprocessed traceback for priting or reraising."""
def __init__(self, exc_type, exc_value, frames):
assert frames, 'no frames for this traceback?'
self.exc_type = exc_type
self.exc_value = exc_value
self.frames = frames
def chain_frames(self):
"""Chains the frames. Requires ctypes or the speedups extension."""
prev_tb = None
for tb in self.frames:
if prev_tb is not None:
prev_tb.tb_next = tb
prev_tb = tb
prev_tb.tb_next = None
def render_as_text(self, limit=None):
"""Return a string with the traceback."""
lines = traceback.format_exception(self.exc_type, self.exc_value,
self.frames[0], limit=limit)
return ''.join(lines).rstrip()
def render_as_html(self, full=False):
"""Return a unicode string with the traceback as rendered HTML."""
from jinja2.debugrenderer import render_traceback
return u'%s\n\n<!--\n%s\n-->' % (
render_traceback(self, full=full),
self.render_as_text().decode('utf-8', 'replace')
)
@property
def is_template_syntax_error(self):
"""`True` if this is a template syntax error."""
return isinstance(self.exc_value, TemplateSyntaxError)
@property
def exc_info(self):
"""Exception info tuple with a proxy around the frame objects."""
return self.exc_type, self.exc_value, self.frames[0]
@property
def standard_exc_info(self):
"""Standard python exc_info for re-raising"""
return self.exc_type, self.exc_value, self.frames[0].tb
def make_traceback(exc_info, source_hint=None):
"""Creates a processed traceback object from the exc_info."""
exc_type, exc_value, tb = exc_info
if isinstance(exc_value, TemplateSyntaxError):
exc_info = translate_syntax_error(exc_value, source_hint)
initial_skip = 0
else:
initial_skip = 1
return translate_exception(exc_info, initial_skip)
def translate_syntax_error(error, source=None):
"""Rewrites a syntax error to please traceback systems."""
error.source = source
error.translated = True
exc_info = (error.__class__, error, None)
filename = error.filename
if filename is None:
filename = '<unknown>'
return fake_exc_info(exc_info, filename, error.lineno)
def translate_exception(exc_info, initial_skip=0):
"""If passed an exc_info it will automatically rewrite the exceptions
all the way down to the correct line numbers and frames.
"""
tb = exc_info[2]
frames = []
# skip some internal frames if wanted
for x in xrange(initial_skip):
if tb is not None:
tb = tb.tb_next
initial_tb = tb
while tb is not None:
# skip frames decorated with @internalcode. These are internal
# calls we can't avoid and that are useless in template debugging
# output.
if tb.tb_frame.f_code in internal_code:
tb = tb.tb_next
continue
# save a reference to the next frame if we override the current
# one with a faked one.
next = tb.tb_next
# fake template exceptions
template = tb.tb_frame.f_globals.get('__jinja_template__')
if template is not None:
lineno = template.get_corresponding_lineno(tb.tb_lineno)
tb = fake_exc_info(exc_info[:2] + (tb,), template.filename,
lineno)[2]
frames.append(TracebackFrameProxy(tb))
tb = next
# if we don't have any exceptions in the frames left, we have to
# reraise it unchanged.
# XXX: can we backup here? when could this happen?
if not frames:
raise exc_info[0], exc_info[1], exc_info[2]
traceback = ProcessedTraceback(exc_info[0], exc_info[1], frames)
if tb_set_next is not None:
traceback.chain_frames()
return traceback
def fake_exc_info(exc_info, filename, lineno):
"""Helper for `translate_exception`."""
exc_type, exc_value, tb = exc_info
# figure the real context out
if tb is not None:
real_locals = tb.tb_frame.f_locals.copy()
ctx = real_locals.get('context')
if ctx:
locals = ctx.get_all()
else:
locals = {}
for name, value in real_locals.iteritems():
if name.startswith('l_') and value is not missing:
locals[name[2:]] = value
# if there is a local called __jinja_exception__, we get
# rid of it to not break the debug functionality.
locals.pop('__jinja_exception__', None)
else:
locals = {}
# assamble fake globals we need
globals = {
'__name__': filename,
'__file__': filename,
'__jinja_exception__': exc_info[:2],
# we don't want to keep the reference to the template around
# to not cause circular dependencies, but we mark it as Jinja
# frame for the ProcessedTraceback
'__jinja_template__': None
}
# and fake the exception
code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec')
# if it's possible, change the name of the code. This won't work
# on some python environments such as google appengine
try:
if tb is None:
location = 'template'
else:
function = tb.tb_frame.f_code.co_name
if function == 'root':
location = 'top-level template code'
elif function.startswith('block_'):
location = 'block "%s"' % function[6:]
else:
location = 'template'
code = CodeType(0, code.co_nlocals, code.co_stacksize,
code.co_flags, code.co_code, code.co_consts,
code.co_names, code.co_varnames, filename,
location, code.co_firstlineno,
code.co_lnotab, (), ())
except:
pass
# execute the code and catch the new traceback
try:
exec code in globals, locals
except:
exc_info = sys.exc_info()
new_tb = exc_info[2].tb_next
# return without this frame
return exc_info[:2] + (new_tb,)
def _init_ugly_crap():
"""This function implements a few ugly things so that we can patch the
traceback objects. The function returned allows resetting `tb_next` on
any python traceback object.
"""
import ctypes
from types import TracebackType
# figure out side of _Py_ssize_t
if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'):
_Py_ssize_t = ctypes.c_int64
else:
_Py_ssize_t = ctypes.c_int
# regular python
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
# python with trace
if object.__basicsize__ != ctypes.sizeof(_PyObject):
class _PyObject(ctypes.Structure):
pass
_PyObject._fields_ = [
('_ob_next', ctypes.POINTER(_PyObject)),
('_ob_prev', ctypes.POINTER(_PyObject)),
('ob_refcnt', _Py_ssize_t),
('ob_type', ctypes.POINTER(_PyObject))
]
class _Traceback(_PyObject):
pass
_Traceback._fields_ = [
('tb_next', ctypes.POINTER(_Traceback)),
('tb_frame', ctypes.POINTER(_PyObject)),
('tb_lasti', ctypes.c_int),
('tb_lineno', ctypes.c_int)
]
def tb_set_next(tb, next):
"""Set the tb_next attribute of a traceback object."""
if not (isinstance(tb, TracebackType) and
(next is None or isinstance(next, TracebackType))):
raise TypeError('tb_set_next arguments must be traceback objects')
obj = _Traceback.from_address(id(tb))
if tb.tb_next is not None:
old = _Traceback.from_address(id(tb.tb_next))
old.ob_refcnt -= 1
if next is None:
obj.tb_next = ctypes.POINTER(_Traceback)()
else:
next = _Traceback.from_address(id(next))
next.ob_refcnt += 1
obj.tb_next = ctypes.pointer(next)
return tb_set_next
# try to get a tb_set_next implementation
try:
from jinja2._speedups import tb_set_next
except ImportError:
try:
tb_set_next = _init_ugly_crap()
except:
tb_set_next = None
del _init_ugly_crap | tb_set_next(self.tb, next and next.tb or None)
self._tb_next = next
def _get_tb_next(self):
return self._tb_next | random_line_split |
test.js |
function main() {
var N = 10000;
var lines = generateLines(N);
//timeCanvas2D(lines, N);
timeBatchDraw(lines, N);
}
function generateLines(N) {
var lines = new Array(N);
let canvas = document.getElementById("canvas");
let w = canvas.width;
let h = canvas.height;
// Create funky lines:
for (i=0; i<N; i++) {
lines[i] = {
fromX: (1.3*i/N) * w,
fromY: 0.5/(2*(i/N) + 1) * h,
toX: (0.1*i-1)/(N - i) * w,
toY: (0.3*N)/(5*(i*i)/N) * 0.5 * h
};
}
//console.log(lines);
return lines;
}
function | (lines, N) {
let canvas = document.getElementById("canvas");
let params = {
maxLines: N,
clearColor: {r: 1, g: 1, b: 1, a: 1}
};
let batchDrawer = new BatchDrawer(canvas, params);
if (batchDrawer.error != null) {
console.log(batchDrawer.error);
return;
}
console.time("BatchDraw");
for (i=0; i<N; i++) {
batchDrawer.addLine(lines[i].fromX, lines[i].fromY, lines[i].toX, lines[i].toY, 0.001, 1, 0.5, 0.1, 1);
}
batchDrawer.draw(false);
console.timeEnd("BatchDraw");
}
function timeCanvas2D(lines, N) {
let canvas = document.getElementById("canvas");
let ctx = canvas.getContext("2d");
ctx.lineWidth = 0.01;
ctx.strokeStyle = '#ffa500';
ctx.fillStyle="#FFFFFF";
console.time("Canvas2D");
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (i=0; i<N; i++) {
ctx.beginPath();
ctx.moveTo(lines[i].fromX, lines[i].fromY);
ctx.lineTo(lines[i].toX, lines[i].toY);
ctx.stroke();
}
console.timeEnd("Canvas2D");
}
| timeBatchDraw | identifier_name |
test.js |
function main() {
var N = 10000;
var lines = generateLines(N);
//timeCanvas2D(lines, N);
timeBatchDraw(lines, N);
}
function generateLines(N) {
var lines = new Array(N);
let canvas = document.getElementById("canvas");
let w = canvas.width;
let h = canvas.height;
// Create funky lines:
for (i=0; i<N; i++) {
lines[i] = {
fromX: (1.3*i/N) * w,
fromY: 0.5/(2*(i/N) + 1) * h,
toX: (0.1*i-1)/(N - i) * w,
toY: (0.3*N)/(5*(i*i)/N) * 0.5 * h
};
}
//console.log(lines);
return lines;
}
function timeBatchDraw(lines, N) |
function timeCanvas2D(lines, N) {
let canvas = document.getElementById("canvas");
let ctx = canvas.getContext("2d");
ctx.lineWidth = 0.01;
ctx.strokeStyle = '#ffa500';
ctx.fillStyle="#FFFFFF";
console.time("Canvas2D");
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (i=0; i<N; i++) {
ctx.beginPath();
ctx.moveTo(lines[i].fromX, lines[i].fromY);
ctx.lineTo(lines[i].toX, lines[i].toY);
ctx.stroke();
}
console.timeEnd("Canvas2D");
}
| {
let canvas = document.getElementById("canvas");
let params = {
maxLines: N,
clearColor: {r: 1, g: 1, b: 1, a: 1}
};
let batchDrawer = new BatchDrawer(canvas, params);
if (batchDrawer.error != null) {
console.log(batchDrawer.error);
return;
}
console.time("BatchDraw");
for (i=0; i<N; i++) {
batchDrawer.addLine(lines[i].fromX, lines[i].fromY, lines[i].toX, lines[i].toY, 0.001, 1, 0.5, 0.1, 1);
}
batchDrawer.draw(false);
console.timeEnd("BatchDraw");
} | identifier_body |
test.js |
function main() {
var N = 10000;
var lines = generateLines(N);
//timeCanvas2D(lines, N);
timeBatchDraw(lines, N);
}
function generateLines(N) {
var lines = new Array(N);
let canvas = document.getElementById("canvas");
let w = canvas.width;
let h = canvas.height;
// Create funky lines:
for (i=0; i<N; i++) |
//console.log(lines);
return lines;
}
function timeBatchDraw(lines, N) {
let canvas = document.getElementById("canvas");
let params = {
maxLines: N,
clearColor: {r: 1, g: 1, b: 1, a: 1}
};
let batchDrawer = new BatchDrawer(canvas, params);
if (batchDrawer.error != null) {
console.log(batchDrawer.error);
return;
}
console.time("BatchDraw");
for (i=0; i<N; i++) {
batchDrawer.addLine(lines[i].fromX, lines[i].fromY, lines[i].toX, lines[i].toY, 0.001, 1, 0.5, 0.1, 1);
}
batchDrawer.draw(false);
console.timeEnd("BatchDraw");
}
function timeCanvas2D(lines, N) {
let canvas = document.getElementById("canvas");
let ctx = canvas.getContext("2d");
ctx.lineWidth = 0.01;
ctx.strokeStyle = '#ffa500';
ctx.fillStyle="#FFFFFF";
console.time("Canvas2D");
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (i=0; i<N; i++) {
ctx.beginPath();
ctx.moveTo(lines[i].fromX, lines[i].fromY);
ctx.lineTo(lines[i].toX, lines[i].toY);
ctx.stroke();
}
console.timeEnd("Canvas2D");
}
| {
lines[i] = {
fromX: (1.3*i/N) * w,
fromY: 0.5/(2*(i/N) + 1) * h,
toX: (0.1*i-1)/(N - i) * w,
toY: (0.3*N)/(5*(i*i)/N) * 0.5 * h
};
} | conditional_block |
test.js | function main() {
var N = 10000;
var lines = generateLines(N);
//timeCanvas2D(lines, N);
timeBatchDraw(lines, N);
}
function generateLines(N) {
var lines = new Array(N);
let canvas = document.getElementById("canvas");
let w = canvas.width;
let h = canvas.height;
// Create funky lines:
for (i=0; i<N; i++) {
lines[i] = {
fromX: (1.3*i/N) * w,
fromY: 0.5/(2*(i/N) + 1) * h,
toX: (0.1*i-1)/(N - i) * w,
toY: (0.3*N)/(5*(i*i)/N) * 0.5 * h
};
}
//console.log(lines);
return lines;
}
| let params = {
maxLines: N,
clearColor: {r: 1, g: 1, b: 1, a: 1}
};
let batchDrawer = new BatchDrawer(canvas, params);
if (batchDrawer.error != null) {
console.log(batchDrawer.error);
return;
}
console.time("BatchDraw");
for (i=0; i<N; i++) {
batchDrawer.addLine(lines[i].fromX, lines[i].fromY, lines[i].toX, lines[i].toY, 0.001, 1, 0.5, 0.1, 1);
}
batchDrawer.draw(false);
console.timeEnd("BatchDraw");
}
function timeCanvas2D(lines, N) {
let canvas = document.getElementById("canvas");
let ctx = canvas.getContext("2d");
ctx.lineWidth = 0.01;
ctx.strokeStyle = '#ffa500';
ctx.fillStyle="#FFFFFF";
console.time("Canvas2D");
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (i=0; i<N; i++) {
ctx.beginPath();
ctx.moveTo(lines[i].fromX, lines[i].fromY);
ctx.lineTo(lines[i].toX, lines[i].toY);
ctx.stroke();
}
console.timeEnd("Canvas2D");
} |
function timeBatchDraw(lines, N) {
let canvas = document.getElementById("canvas"); | random_line_split |
example-pane-trigger.component.ts | import { Component, ChangeDetectionStrategy, ViewChild, AfterViewInit } from '@angular/core';
import { DataService, Human } from '../data.service';
import { Observable } from 'rxjs';
import { Command, GridComponent, PaneComponent } from 'ng2-qgrid';
const EXAMPLE_TAGS = [
'pane-trigger',
'Pane for selected row can be opened by clicking on the triangle button'
];
@Component({
selector: 'example-pane-trigger',
templateUrl: 'example-pane-trigger.component.html',
styleUrls: ['example-pane-trigger.component.scss'],
providers: [DataService],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class ExamplePaneTriggerComponent implements AfterViewInit {
static tags = EXAMPLE_TAGS;
title = EXAMPLE_TAGS[1];
@ViewChild(GridComponent) grid: GridComponent;
@ViewChild(PaneComponent) pane: PaneComponent;
rows$: Observable<Human[]>;
selectedRow: Human;
openPane = new Command({
execute: () => this.pane.open('right'),
canExecute: () => !!this.selectedRow,
});
constructor(dataService: DataService) {
this.rows$ = dataService.getPeople();
}
ngAfterViewInit() {
const { model } = this.grid;
model.selectionChanged.watch(e => {
if (e.hasChanges('items')) |
});
}
}
| {
this.selectedRow = e.state.items[0];
this.openPane.canExecuteCheck.next();
} | conditional_block |
example-pane-trigger.component.ts | import { Component, ChangeDetectionStrategy, ViewChild, AfterViewInit } from '@angular/core';
import { DataService, Human } from '../data.service';
import { Observable } from 'rxjs';
import { Command, GridComponent, PaneComponent } from 'ng2-qgrid';
const EXAMPLE_TAGS = [
'pane-trigger',
'Pane for selected row can be opened by clicking on the triangle button'
];
@Component({
selector: 'example-pane-trigger',
templateUrl: 'example-pane-trigger.component.html',
styleUrls: ['example-pane-trigger.component.scss'],
providers: [DataService],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class ExamplePaneTriggerComponent implements AfterViewInit {
static tags = EXAMPLE_TAGS;
title = EXAMPLE_TAGS[1];
@ViewChild(GridComponent) grid: GridComponent;
@ViewChild(PaneComponent) pane: PaneComponent;
rows$: Observable<Human[]>;
selectedRow: Human;
openPane = new Command({
execute: () => this.pane.open('right'),
canExecute: () => !!this.selectedRow,
});
constructor(dataService: DataService) {
this.rows$ = dataService.getPeople();
} | this.selectedRow = e.state.items[0];
this.openPane.canExecuteCheck.next();
}
});
}
} |
ngAfterViewInit() {
const { model } = this.grid;
model.selectionChanged.watch(e => {
if (e.hasChanges('items')) { | random_line_split |
example-pane-trigger.component.ts | import { Component, ChangeDetectionStrategy, ViewChild, AfterViewInit } from '@angular/core';
import { DataService, Human } from '../data.service';
import { Observable } from 'rxjs';
import { Command, GridComponent, PaneComponent } from 'ng2-qgrid';
const EXAMPLE_TAGS = [
'pane-trigger',
'Pane for selected row can be opened by clicking on the triangle button'
];
@Component({
selector: 'example-pane-trigger',
templateUrl: 'example-pane-trigger.component.html',
styleUrls: ['example-pane-trigger.component.scss'],
providers: [DataService],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class ExamplePaneTriggerComponent implements AfterViewInit {
static tags = EXAMPLE_TAGS;
title = EXAMPLE_TAGS[1];
@ViewChild(GridComponent) grid: GridComponent;
@ViewChild(PaneComponent) pane: PaneComponent;
rows$: Observable<Human[]>;
selectedRow: Human;
openPane = new Command({
execute: () => this.pane.open('right'),
canExecute: () => !!this.selectedRow,
});
constructor(dataService: DataService) {
this.rows$ = dataService.getPeople();
}
ngAfterViewInit() |
}
| {
const { model } = this.grid;
model.selectionChanged.watch(e => {
if (e.hasChanges('items')) {
this.selectedRow = e.state.items[0];
this.openPane.canExecuteCheck.next();
}
});
} | identifier_body |
example-pane-trigger.component.ts | import { Component, ChangeDetectionStrategy, ViewChild, AfterViewInit } from '@angular/core';
import { DataService, Human } from '../data.service';
import { Observable } from 'rxjs';
import { Command, GridComponent, PaneComponent } from 'ng2-qgrid';
const EXAMPLE_TAGS = [
'pane-trigger',
'Pane for selected row can be opened by clicking on the triangle button'
];
@Component({
selector: 'example-pane-trigger',
templateUrl: 'example-pane-trigger.component.html',
styleUrls: ['example-pane-trigger.component.scss'],
providers: [DataService],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class ExamplePaneTriggerComponent implements AfterViewInit {
static tags = EXAMPLE_TAGS;
title = EXAMPLE_TAGS[1];
@ViewChild(GridComponent) grid: GridComponent;
@ViewChild(PaneComponent) pane: PaneComponent;
rows$: Observable<Human[]>;
selectedRow: Human;
openPane = new Command({
execute: () => this.pane.open('right'),
canExecute: () => !!this.selectedRow,
});
constructor(dataService: DataService) {
this.rows$ = dataService.getPeople();
}
| () {
const { model } = this.grid;
model.selectionChanged.watch(e => {
if (e.hasChanges('items')) {
this.selectedRow = e.state.items[0];
this.openPane.canExecuteCheck.next();
}
});
}
}
| ngAfterViewInit | identifier_name |
lib.rs | pub fn score(word: &str) -> usize | {
// lowercase for case insensitivity
// use map to convert to numbers
// sum them
word.to_lowercase()
.chars()
.map(|c| match c {
'a' | 'e' | 'i' | 'o' | 'u' | 'l' | 'n' | 'r' | 's' | 't' => 1,
'd' | 'g' => 2,
'b' | 'c' | 'm' | 'p' => 3,
'f' | 'h' | 'v' | 'w' | 'y' => 4,
'k' => 5,
'j' | 'x' => 8,
'q' | 'z' => 10,
_ => 0,
})
.fold(0, |accu, x| accu + x)
} | identifier_body | |
lib.rs | pub fn | (word: &str) -> usize {
// lowercase for case insensitivity
// use map to convert to numbers
// sum them
word.to_lowercase()
.chars()
.map(|c| match c {
'a' | 'e' | 'i' | 'o' | 'u' | 'l' | 'n' | 'r' | 's' | 't' => 1,
'd' | 'g' => 2,
'b' | 'c' | 'm' | 'p' => 3,
'f' | 'h' | 'v' | 'w' | 'y' => 4,
'k' => 5,
'j' | 'x' => 8,
'q' | 'z' => 10,
_ => 0,
})
.fold(0, |accu, x| accu + x)
}
| score | identifier_name |
lib.rs | pub fn score(word: &str) -> usize {
// lowercase for case insensitivity
// use map to convert to numbers | 'd' | 'g' => 2,
'b' | 'c' | 'm' | 'p' => 3,
'f' | 'h' | 'v' | 'w' | 'y' => 4,
'k' => 5,
'j' | 'x' => 8,
'q' | 'z' => 10,
_ => 0,
})
.fold(0, |accu, x| accu + x)
} | // sum them
word.to_lowercase()
.chars()
.map(|c| match c {
'a' | 'e' | 'i' | 'o' | 'u' | 'l' | 'n' | 'r' | 's' | 't' => 1, | random_line_split |
hash_matcher.py | import hashlib
import re
import os
import pickle
from functools import partial
from externals.lib.misc import file_scan, update_dict
import logging
log = logging.getLogger(__name__)
VERSION = "0.0"
# Constants --------------------------------------------------------------------
DEFAULT_DESTINATION = './files/'
DEFAULT_CACHE_FILENAME = 'hash_cache.pickle'
DEFAULT_FILE_EXTS = {'mp4', 'avi', 'rm', 'mkv', 'ogm', 'ssa', 'srt', 'ass'}
# Utils ------------------------------------------------------------------------
def hash_files(folder, file_regex=None, hasher=hashlib.sha256):
return {
f.hash: f
for f in file_scan(folder, file_regex=file_regex, hasher=hasher)
}
# ------------------------------------------------------------------------------
def hash_source_dest(source_folder=None, destination_folder=None, hasher=hashlib.sha256, file_exts=DEFAULT_FILE_EXTS, **kwargs):
file_regex = re.compile(r'.*\.({})$'.format('|'.join(file_exts)))
gen_hashs_folder = partial(hash_files, **dict(hasher=hasher, file_regex=file_regex))
return {
'source_files': gen_hashs_folder(source_folder),
'destination_files': gen_hashs_folder(destination_folder),
}
def symlink_matched_files(source_files=None, destination_files=None, destination_folder=None, dry_run=False, **kwargs):
for key in sorted(set(source_files.keys()).difference(set(destination_files.keys())), key=lambda key: source_files[key].file):
f = source_files[key]
log.debug(f.file)
if not dry_run:
try:
os.symlink(f.absolute, os.path.join(destination_folder, f.file))
except OSError:
log.info('unable to symlink {0}'.format(f.file))
# ------------------------------------------------------------------------------
def move_files():
pass
# Command Line -----------------------------------------------------------------
def get_args():
import argparse
parser = argparse.ArgumentParser(
description="""
Find the duplicates
""",
epilog=""" """
)
# Folders
parser.add_argument('-d', '--destination_folder', action='store', help='', default=DEFAULT_DESTINATION)
parser.add_argument('-s', '--source_folder', action='store', help='', required=True)
parser.add_argument('-e', '--file_exts', nargs='*', help='file exts to find', default=DEFAULT_FILE_EXTS)
# Operation
#parser.add_argument('-c', '--copy', action='store_true', help='copy files to destination (to be ready for importing)', default=False)
# Cache
parser.add_argument('--cache_filename', action='store', help='', default=DEFAULT_CACHE_FILENAME)
# Common
parser.add_argument('--dry_run', action='store_true', help='', default=False)
parser.add_argument('-v', '--verbose', action='store_true', help='', default=False)
parser.add_argument('--version', action='version', version=VERSION)
args = vars(parser.parse_args())
return args
def main():
args = get_args()
logging.basicConfig(level=logging.DEBUG if args['verbose'] else logging.INFO)
try:
with open(args['cache_filename'], 'rb') as f:
data = pickle.load(f)
except IOError:
with open(args['cache_filename'], 'wb') as f:
data = hash_source_dest(**args)
pickle.dump(data, f)
symlink_matched_files(**update_dict(args.copy(), data))
| if __name__ == "__main__":
main() | random_line_split | |
hash_matcher.py | import hashlib
import re
import os
import pickle
from functools import partial
from externals.lib.misc import file_scan, update_dict
import logging
log = logging.getLogger(__name__)
VERSION = "0.0"
# Constants --------------------------------------------------------------------
DEFAULT_DESTINATION = './files/'
DEFAULT_CACHE_FILENAME = 'hash_cache.pickle'
DEFAULT_FILE_EXTS = {'mp4', 'avi', 'rm', 'mkv', 'ogm', 'ssa', 'srt', 'ass'}
# Utils ------------------------------------------------------------------------
def hash_files(folder, file_regex=None, hasher=hashlib.sha256):
return {
f.hash: f
for f in file_scan(folder, file_regex=file_regex, hasher=hasher)
}
# ------------------------------------------------------------------------------
def hash_source_dest(source_folder=None, destination_folder=None, hasher=hashlib.sha256, file_exts=DEFAULT_FILE_EXTS, **kwargs):
file_regex = re.compile(r'.*\.({})$'.format('|'.join(file_exts)))
gen_hashs_folder = partial(hash_files, **dict(hasher=hasher, file_regex=file_regex))
return {
'source_files': gen_hashs_folder(source_folder),
'destination_files': gen_hashs_folder(destination_folder),
}
def symlink_matched_files(source_files=None, destination_files=None, destination_folder=None, dry_run=False, **kwargs):
for key in sorted(set(source_files.keys()).difference(set(destination_files.keys())), key=lambda key: source_files[key].file):
|
# ------------------------------------------------------------------------------
def move_files():
pass
# Command Line -----------------------------------------------------------------
def get_args():
import argparse
parser = argparse.ArgumentParser(
description="""
Find the duplicates
""",
epilog=""" """
)
# Folders
parser.add_argument('-d', '--destination_folder', action='store', help='', default=DEFAULT_DESTINATION)
parser.add_argument('-s', '--source_folder', action='store', help='', required=True)
parser.add_argument('-e', '--file_exts', nargs='*', help='file exts to find', default=DEFAULT_FILE_EXTS)
# Operation
#parser.add_argument('-c', '--copy', action='store_true', help='copy files to destination (to be ready for importing)', default=False)
# Cache
parser.add_argument('--cache_filename', action='store', help='', default=DEFAULT_CACHE_FILENAME)
# Common
parser.add_argument('--dry_run', action='store_true', help='', default=False)
parser.add_argument('-v', '--verbose', action='store_true', help='', default=False)
parser.add_argument('--version', action='version', version=VERSION)
args = vars(parser.parse_args())
return args
def main():
args = get_args()
logging.basicConfig(level=logging.DEBUG if args['verbose'] else logging.INFO)
try:
with open(args['cache_filename'], 'rb') as f:
data = pickle.load(f)
except IOError:
with open(args['cache_filename'], 'wb') as f:
data = hash_source_dest(**args)
pickle.dump(data, f)
symlink_matched_files(**update_dict(args.copy(), data))
if __name__ == "__main__":
main()
| f = source_files[key]
log.debug(f.file)
if not dry_run:
try:
os.symlink(f.absolute, os.path.join(destination_folder, f.file))
except OSError:
log.info('unable to symlink {0}'.format(f.file)) | conditional_block |
hash_matcher.py | import hashlib
import re
import os
import pickle
from functools import partial
from externals.lib.misc import file_scan, update_dict
import logging
log = logging.getLogger(__name__)
VERSION = "0.0"
# Constants --------------------------------------------------------------------
DEFAULT_DESTINATION = './files/'
DEFAULT_CACHE_FILENAME = 'hash_cache.pickle'
DEFAULT_FILE_EXTS = {'mp4', 'avi', 'rm', 'mkv', 'ogm', 'ssa', 'srt', 'ass'}
# Utils ------------------------------------------------------------------------
def hash_files(folder, file_regex=None, hasher=hashlib.sha256):
return {
f.hash: f
for f in file_scan(folder, file_regex=file_regex, hasher=hasher)
}
# ------------------------------------------------------------------------------
def hash_source_dest(source_folder=None, destination_folder=None, hasher=hashlib.sha256, file_exts=DEFAULT_FILE_EXTS, **kwargs):
file_regex = re.compile(r'.*\.({})$'.format('|'.join(file_exts)))
gen_hashs_folder = partial(hash_files, **dict(hasher=hasher, file_regex=file_regex))
return {
'source_files': gen_hashs_folder(source_folder),
'destination_files': gen_hashs_folder(destination_folder),
}
def symlink_matched_files(source_files=None, destination_files=None, destination_folder=None, dry_run=False, **kwargs):
for key in sorted(set(source_files.keys()).difference(set(destination_files.keys())), key=lambda key: source_files[key].file):
f = source_files[key]
log.debug(f.file)
if not dry_run:
try:
os.symlink(f.absolute, os.path.join(destination_folder, f.file))
except OSError:
log.info('unable to symlink {0}'.format(f.file))
# ------------------------------------------------------------------------------
def move_files():
pass
# Command Line -----------------------------------------------------------------
def get_args():
import argparse
parser = argparse.ArgumentParser(
description="""
Find the duplicates
""",
epilog=""" """
)
# Folders
parser.add_argument('-d', '--destination_folder', action='store', help='', default=DEFAULT_DESTINATION)
parser.add_argument('-s', '--source_folder', action='store', help='', required=True)
parser.add_argument('-e', '--file_exts', nargs='*', help='file exts to find', default=DEFAULT_FILE_EXTS)
# Operation
#parser.add_argument('-c', '--copy', action='store_true', help='copy files to destination (to be ready for importing)', default=False)
# Cache
parser.add_argument('--cache_filename', action='store', help='', default=DEFAULT_CACHE_FILENAME)
# Common
parser.add_argument('--dry_run', action='store_true', help='', default=False)
parser.add_argument('-v', '--verbose', action='store_true', help='', default=False)
parser.add_argument('--version', action='version', version=VERSION)
args = vars(parser.parse_args())
return args
def main():
|
if __name__ == "__main__":
main()
| args = get_args()
logging.basicConfig(level=logging.DEBUG if args['verbose'] else logging.INFO)
try:
with open(args['cache_filename'], 'rb') as f:
data = pickle.load(f)
except IOError:
with open(args['cache_filename'], 'wb') as f:
data = hash_source_dest(**args)
pickle.dump(data, f)
symlink_matched_files(**update_dict(args.copy(), data)) | identifier_body |
hash_matcher.py | import hashlib
import re
import os
import pickle
from functools import partial
from externals.lib.misc import file_scan, update_dict
import logging
log = logging.getLogger(__name__)
VERSION = "0.0"
# Constants --------------------------------------------------------------------
DEFAULT_DESTINATION = './files/'
DEFAULT_CACHE_FILENAME = 'hash_cache.pickle'
DEFAULT_FILE_EXTS = {'mp4', 'avi', 'rm', 'mkv', 'ogm', 'ssa', 'srt', 'ass'}
# Utils ------------------------------------------------------------------------
def hash_files(folder, file_regex=None, hasher=hashlib.sha256):
return {
f.hash: f
for f in file_scan(folder, file_regex=file_regex, hasher=hasher)
}
# ------------------------------------------------------------------------------
def hash_source_dest(source_folder=None, destination_folder=None, hasher=hashlib.sha256, file_exts=DEFAULT_FILE_EXTS, **kwargs):
file_regex = re.compile(r'.*\.({})$'.format('|'.join(file_exts)))
gen_hashs_folder = partial(hash_files, **dict(hasher=hasher, file_regex=file_regex))
return {
'source_files': gen_hashs_folder(source_folder),
'destination_files': gen_hashs_folder(destination_folder),
}
def symlink_matched_files(source_files=None, destination_files=None, destination_folder=None, dry_run=False, **kwargs):
for key in sorted(set(source_files.keys()).difference(set(destination_files.keys())), key=lambda key: source_files[key].file):
f = source_files[key]
log.debug(f.file)
if not dry_run:
try:
os.symlink(f.absolute, os.path.join(destination_folder, f.file))
except OSError:
log.info('unable to symlink {0}'.format(f.file))
# ------------------------------------------------------------------------------
def move_files():
pass
# Command Line -----------------------------------------------------------------
def | ():
import argparse
parser = argparse.ArgumentParser(
description="""
Find the duplicates
""",
epilog=""" """
)
# Folders
parser.add_argument('-d', '--destination_folder', action='store', help='', default=DEFAULT_DESTINATION)
parser.add_argument('-s', '--source_folder', action='store', help='', required=True)
parser.add_argument('-e', '--file_exts', nargs='*', help='file exts to find', default=DEFAULT_FILE_EXTS)
# Operation
#parser.add_argument('-c', '--copy', action='store_true', help='copy files to destination (to be ready for importing)', default=False)
# Cache
parser.add_argument('--cache_filename', action='store', help='', default=DEFAULT_CACHE_FILENAME)
# Common
parser.add_argument('--dry_run', action='store_true', help='', default=False)
parser.add_argument('-v', '--verbose', action='store_true', help='', default=False)
parser.add_argument('--version', action='version', version=VERSION)
args = vars(parser.parse_args())
return args
def main():
args = get_args()
logging.basicConfig(level=logging.DEBUG if args['verbose'] else logging.INFO)
try:
with open(args['cache_filename'], 'rb') as f:
data = pickle.load(f)
except IOError:
with open(args['cache_filename'], 'wb') as f:
data = hash_source_dest(**args)
pickle.dump(data, f)
symlink_matched_files(**update_dict(args.copy(), data))
if __name__ == "__main__":
main()
| get_args | identifier_name |
parser.py | from parso.python import tree
from parso.python.token import PythonTokenTypes
from parso.parser import BaseParser
NAME = PythonTokenTypes.NAME
INDENT = PythonTokenTypes.INDENT
DEDENT = PythonTokenTypes.DEDENT
class Parser(BaseParser):
"""
This class is used to parse a Python file, it then divides them into a
class structure of different scopes.
:param pgen_grammar: The grammar object of pgen2. Loaded by load_grammar.
"""
node_map = {
'expr_stmt': tree.ExprStmt,
'classdef': tree.Class,
'funcdef': tree.Function,
'file_input': tree.Module,
'import_name': tree.ImportName,
'import_from': tree.ImportFrom,
'break_stmt': tree.KeywordStatement,
'continue_stmt': tree.KeywordStatement,
'return_stmt': tree.ReturnStmt,
'raise_stmt': tree.KeywordStatement,
'yield_expr': tree.YieldExpr,
'del_stmt': tree.KeywordStatement,
'pass_stmt': tree.KeywordStatement,
'global_stmt': tree.GlobalStmt,
'nonlocal_stmt': tree.KeywordStatement,
'print_stmt': tree.KeywordStatement,
'assert_stmt': tree.AssertStmt,
'if_stmt': tree.IfStmt,
'with_stmt': tree.WithStmt,
'for_stmt': tree.ForStmt,
'while_stmt': tree.WhileStmt,
'try_stmt': tree.TryStmt,
'sync_comp_for': tree.SyncCompFor,
# Not sure if this is the best idea, but IMO it's the easiest way to
# avoid extreme amounts of work around the subtle difference of 2/3
# grammar in list comoprehensions.
'decorator': tree.Decorator,
'lambdef': tree.Lambda,
'lambdef_nocond': tree.Lambda,
'namedexpr_test': tree.NamedExpr,
}
default_node = tree.PythonNode
# Names/Keywords are handled separately
_leaf_map = {
PythonTokenTypes.STRING: tree.String,
PythonTokenTypes.NUMBER: tree.Number,
PythonTokenTypes.NEWLINE: tree.Newline,
PythonTokenTypes.ENDMARKER: tree.EndMarker,
PythonTokenTypes.FSTRING_STRING: tree.FStringString,
PythonTokenTypes.FSTRING_START: tree.FStringStart,
PythonTokenTypes.FSTRING_END: tree.FStringEnd,
}
def __init__(self, pgen_grammar, error_recovery=True, start_nonterminal='file_input'):
super().__init__(pgen_grammar, start_nonterminal,
error_recovery=error_recovery)
self.syntax_errors = []
self._omit_dedent_list = []
self._indent_counter = 0
def parse(self, tokens):
if self._error_recovery:
|
return super().parse(tokens)
def convert_node(self, nonterminal, children):
"""
Convert raw node information to a PythonBaseNode instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
try:
node = self.node_map[nonterminal](children)
except KeyError:
if nonterminal == 'suite':
# We don't want the INDENT/DEDENT in our parser tree. Those
# leaves are just cancer. They are virtual leaves and not real
# ones and therefore have pseudo start/end positions and no
# prefixes. Just ignore them.
children = [children[0]] + children[2:-1]
node = self.default_node(nonterminal, children)
for c in children:
c.parent = node
return node
def convert_leaf(self, type, value, prefix, start_pos):
# print('leaf', repr(value), token.tok_name[type])
if type == NAME:
if value in self._pgen_grammar.reserved_syntax_strings:
return tree.Keyword(value, start_pos, prefix)
else:
return tree.Name(value, start_pos, prefix)
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
def error_recovery(self, token):
tos_nodes = self.stack[-1].nodes
if tos_nodes:
last_leaf = tos_nodes[-1].get_last_leaf()
else:
last_leaf = None
if self._start_nonterminal == 'file_input' and \
(token.type == PythonTokenTypes.ENDMARKER
or token.type == DEDENT and not last_leaf.value.endswith('\n')
and not last_leaf.value.endswith('\r')):
# In Python statements need to end with a newline. But since it's
# possible (and valid in Python) that there's no newline at the
# end of a file, we have to recover even if the user doesn't want
# error recovery.
if self.stack[-1].dfa.from_rule == 'simple_stmt':
try:
plan = self.stack[-1].dfa.transitions[PythonTokenTypes.NEWLINE]
except KeyError:
pass
else:
if plan.next_dfa.is_final and not plan.dfa_pushes:
# We are ignoring here that the newline would be
# required for a simple_stmt.
self.stack[-1].dfa = plan.next_dfa
self._add_token(token)
return
if not self._error_recovery:
return super().error_recovery(token)
def current_suite(stack):
# For now just discard everything that is not a suite or
# file_input, if we detect an error.
for until_index, stack_node in reversed(list(enumerate(stack))):
# `suite` can sometimes be only simple_stmt, not stmt.
if stack_node.nonterminal == 'file_input':
break
elif stack_node.nonterminal == 'suite':
# In the case where we just have a newline we don't want to
# do error recovery here. In all other cases, we want to do
# error recovery.
if len(stack_node.nodes) != 1:
break
return until_index
until_index = current_suite(self.stack)
if self._stack_removal(until_index + 1):
self._add_token(token)
else:
typ, value, start_pos, prefix = token
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
self.stack[-1].nodes.append(error_leaf)
tos = self.stack[-1]
if tos.nonterminal == 'suite':
# Need at least one statement in the suite. This happend with the
# error recovery above.
try:
tos.dfa = tos.dfa.arcs['stmt']
except KeyError:
# We're already in a final state.
pass
def _stack_removal(self, start_index):
all_nodes = [node for stack_node in self.stack[start_index:] for node in stack_node.nodes]
if all_nodes:
node = tree.PythonErrorNode(all_nodes)
for n in all_nodes:
n.parent = node
self.stack[start_index - 1].nodes.append(node)
self.stack[start_index:] = []
return bool(all_nodes)
def _recovery_tokenize(self, tokens):
for token in tokens:
typ = token[0]
if typ == DEDENT:
# We need to count indents, because if we just omit any DEDENT,
# we might omit them in the wrong place.
o = self._omit_dedent_list
if o and o[-1] == self._indent_counter:
o.pop()
self._indent_counter -= 1
continue
self._indent_counter -= 1
elif typ == INDENT:
self._indent_counter += 1
yield token
| if self._start_nonterminal != 'file_input':
raise NotImplementedError
tokens = self._recovery_tokenize(tokens) | conditional_block |
parser.py | from parso.python import tree
from parso.python.token import PythonTokenTypes
from parso.parser import BaseParser
NAME = PythonTokenTypes.NAME
INDENT = PythonTokenTypes.INDENT
DEDENT = PythonTokenTypes.DEDENT
class Parser(BaseParser):
"""
This class is used to parse a Python file, it then divides them into a
class structure of different scopes.
:param pgen_grammar: The grammar object of pgen2. Loaded by load_grammar.
"""
node_map = {
'expr_stmt': tree.ExprStmt,
'classdef': tree.Class,
'funcdef': tree.Function,
'file_input': tree.Module,
'import_name': tree.ImportName,
'import_from': tree.ImportFrom,
'break_stmt': tree.KeywordStatement,
'continue_stmt': tree.KeywordStatement,
'return_stmt': tree.ReturnStmt,
'raise_stmt': tree.KeywordStatement,
'yield_expr': tree.YieldExpr,
'del_stmt': tree.KeywordStatement,
'pass_stmt': tree.KeywordStatement,
'global_stmt': tree.GlobalStmt,
'nonlocal_stmt': tree.KeywordStatement,
'print_stmt': tree.KeywordStatement,
'assert_stmt': tree.AssertStmt,
'if_stmt': tree.IfStmt,
'with_stmt': tree.WithStmt,
'for_stmt': tree.ForStmt,
'while_stmt': tree.WhileStmt,
'try_stmt': tree.TryStmt,
'sync_comp_for': tree.SyncCompFor,
# Not sure if this is the best idea, but IMO it's the easiest way to
# avoid extreme amounts of work around the subtle difference of 2/3
# grammar in list comoprehensions.
'decorator': tree.Decorator,
'lambdef': tree.Lambda,
'lambdef_nocond': tree.Lambda,
'namedexpr_test': tree.NamedExpr,
}
default_node = tree.PythonNode
# Names/Keywords are handled separately
_leaf_map = {
PythonTokenTypes.STRING: tree.String,
PythonTokenTypes.NUMBER: tree.Number,
PythonTokenTypes.NEWLINE: tree.Newline,
PythonTokenTypes.ENDMARKER: tree.EndMarker,
PythonTokenTypes.FSTRING_STRING: tree.FStringString,
PythonTokenTypes.FSTRING_START: tree.FStringStart,
PythonTokenTypes.FSTRING_END: tree.FStringEnd,
}
def __init__(self, pgen_grammar, error_recovery=True, start_nonterminal='file_input'):
super().__init__(pgen_grammar, start_nonterminal,
error_recovery=error_recovery)
self.syntax_errors = []
self._omit_dedent_list = []
self._indent_counter = 0
def parse(self, tokens):
if self._error_recovery:
if self._start_nonterminal != 'file_input':
raise NotImplementedError
tokens = self._recovery_tokenize(tokens)
return super().parse(tokens)
def convert_node(self, nonterminal, children):
"""
Convert raw node information to a PythonBaseNode instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
try:
node = self.node_map[nonterminal](children)
except KeyError:
if nonterminal == 'suite':
# We don't want the INDENT/DEDENT in our parser tree. Those
# leaves are just cancer. They are virtual leaves and not real
# ones and therefore have pseudo start/end positions and no
# prefixes. Just ignore them.
children = [children[0]] + children[2:-1]
node = self.default_node(nonterminal, children)
for c in children:
c.parent = node
return node
def convert_leaf(self, type, value, prefix, start_pos):
# print('leaf', repr(value), token.tok_name[type])
if type == NAME:
if value in self._pgen_grammar.reserved_syntax_strings:
return tree.Keyword(value, start_pos, prefix)
else:
return tree.Name(value, start_pos, prefix)
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
def error_recovery(self, token):
tos_nodes = self.stack[-1].nodes
if tos_nodes:
last_leaf = tos_nodes[-1].get_last_leaf()
else:
last_leaf = None
if self._start_nonterminal == 'file_input' and \
(token.type == PythonTokenTypes.ENDMARKER
or token.type == DEDENT and not last_leaf.value.endswith('\n')
and not last_leaf.value.endswith('\r')):
# In Python statements need to end with a newline. But since it's
# possible (and valid in Python) that there's no newline at the
# end of a file, we have to recover even if the user doesn't want
# error recovery.
if self.stack[-1].dfa.from_rule == 'simple_stmt':
try:
plan = self.stack[-1].dfa.transitions[PythonTokenTypes.NEWLINE]
except KeyError:
pass
else:
if plan.next_dfa.is_final and not plan.dfa_pushes:
# We are ignoring here that the newline would be
# required for a simple_stmt.
self.stack[-1].dfa = plan.next_dfa
self._add_token(token)
return
if not self._error_recovery:
return super().error_recovery(token)
def current_suite(stack):
# For now just discard everything that is not a suite or
# file_input, if we detect an error.
for until_index, stack_node in reversed(list(enumerate(stack))):
# `suite` can sometimes be only simple_stmt, not stmt.
if stack_node.nonterminal == 'file_input':
break
elif stack_node.nonterminal == 'suite':
# In the case where we just have a newline we don't want to
# do error recovery here. In all other cases, we want to do
# error recovery.
if len(stack_node.nodes) != 1:
break
return until_index
until_index = current_suite(self.stack)
if self._stack_removal(until_index + 1):
self._add_token(token)
else:
typ, value, start_pos, prefix = token
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
self.stack[-1].nodes.append(error_leaf)
tos = self.stack[-1]
if tos.nonterminal == 'suite':
# Need at least one statement in the suite. This happend with the
# error recovery above.
try:
tos.dfa = tos.dfa.arcs['stmt']
except KeyError:
# We're already in a final state.
pass
def _stack_removal(self, start_index):
all_nodes = [node for stack_node in self.stack[start_index:] for node in stack_node.nodes]
if all_nodes:
node = tree.PythonErrorNode(all_nodes)
for n in all_nodes:
n.parent = node
self.stack[start_index - 1].nodes.append(node)
self.stack[start_index:] = []
return bool(all_nodes)
def _recovery_tokenize(self, tokens):
for token in tokens:
typ = token[0]
if typ == DEDENT:
# We need to count indents, because if we just omit any DEDENT,
# we might omit them in the wrong place.
o = self._omit_dedent_list | self._indent_counter -= 1
continue
self._indent_counter -= 1
elif typ == INDENT:
self._indent_counter += 1
yield token | if o and o[-1] == self._indent_counter:
o.pop() | random_line_split |
parser.py | from parso.python import tree
from parso.python.token import PythonTokenTypes
from parso.parser import BaseParser
NAME = PythonTokenTypes.NAME
INDENT = PythonTokenTypes.INDENT
DEDENT = PythonTokenTypes.DEDENT
class Parser(BaseParser):
"""
This class is used to parse a Python file, it then divides them into a
class structure of different scopes.
:param pgen_grammar: The grammar object of pgen2. Loaded by load_grammar.
"""
node_map = {
'expr_stmt': tree.ExprStmt,
'classdef': tree.Class,
'funcdef': tree.Function,
'file_input': tree.Module,
'import_name': tree.ImportName,
'import_from': tree.ImportFrom,
'break_stmt': tree.KeywordStatement,
'continue_stmt': tree.KeywordStatement,
'return_stmt': tree.ReturnStmt,
'raise_stmt': tree.KeywordStatement,
'yield_expr': tree.YieldExpr,
'del_stmt': tree.KeywordStatement,
'pass_stmt': tree.KeywordStatement,
'global_stmt': tree.GlobalStmt,
'nonlocal_stmt': tree.KeywordStatement,
'print_stmt': tree.KeywordStatement,
'assert_stmt': tree.AssertStmt,
'if_stmt': tree.IfStmt,
'with_stmt': tree.WithStmt,
'for_stmt': tree.ForStmt,
'while_stmt': tree.WhileStmt,
'try_stmt': tree.TryStmt,
'sync_comp_for': tree.SyncCompFor,
# Not sure if this is the best idea, but IMO it's the easiest way to
# avoid extreme amounts of work around the subtle difference of 2/3
# grammar in list comoprehensions.
'decorator': tree.Decorator,
'lambdef': tree.Lambda,
'lambdef_nocond': tree.Lambda,
'namedexpr_test': tree.NamedExpr,
}
default_node = tree.PythonNode
# Names/Keywords are handled separately
_leaf_map = {
PythonTokenTypes.STRING: tree.String,
PythonTokenTypes.NUMBER: tree.Number,
PythonTokenTypes.NEWLINE: tree.Newline,
PythonTokenTypes.ENDMARKER: tree.EndMarker,
PythonTokenTypes.FSTRING_STRING: tree.FStringString,
PythonTokenTypes.FSTRING_START: tree.FStringStart,
PythonTokenTypes.FSTRING_END: tree.FStringEnd,
}
def __init__(self, pgen_grammar, error_recovery=True, start_nonterminal='file_input'):
super().__init__(pgen_grammar, start_nonterminal,
error_recovery=error_recovery)
self.syntax_errors = []
self._omit_dedent_list = []
self._indent_counter = 0
def parse(self, tokens):
if self._error_recovery:
if self._start_nonterminal != 'file_input':
raise NotImplementedError
tokens = self._recovery_tokenize(tokens)
return super().parse(tokens)
def convert_node(self, nonterminal, children):
"""
Convert raw node information to a PythonBaseNode instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
try:
node = self.node_map[nonterminal](children)
except KeyError:
if nonterminal == 'suite':
# We don't want the INDENT/DEDENT in our parser tree. Those
# leaves are just cancer. They are virtual leaves and not real
# ones and therefore have pseudo start/end positions and no
# prefixes. Just ignore them.
children = [children[0]] + children[2:-1]
node = self.default_node(nonterminal, children)
for c in children:
c.parent = node
return node
def | (self, type, value, prefix, start_pos):
# print('leaf', repr(value), token.tok_name[type])
if type == NAME:
if value in self._pgen_grammar.reserved_syntax_strings:
return tree.Keyword(value, start_pos, prefix)
else:
return tree.Name(value, start_pos, prefix)
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
def error_recovery(self, token):
tos_nodes = self.stack[-1].nodes
if tos_nodes:
last_leaf = tos_nodes[-1].get_last_leaf()
else:
last_leaf = None
if self._start_nonterminal == 'file_input' and \
(token.type == PythonTokenTypes.ENDMARKER
or token.type == DEDENT and not last_leaf.value.endswith('\n')
and not last_leaf.value.endswith('\r')):
# In Python statements need to end with a newline. But since it's
# possible (and valid in Python) that there's no newline at the
# end of a file, we have to recover even if the user doesn't want
# error recovery.
if self.stack[-1].dfa.from_rule == 'simple_stmt':
try:
plan = self.stack[-1].dfa.transitions[PythonTokenTypes.NEWLINE]
except KeyError:
pass
else:
if plan.next_dfa.is_final and not plan.dfa_pushes:
# We are ignoring here that the newline would be
# required for a simple_stmt.
self.stack[-1].dfa = plan.next_dfa
self._add_token(token)
return
if not self._error_recovery:
return super().error_recovery(token)
def current_suite(stack):
# For now just discard everything that is not a suite or
# file_input, if we detect an error.
for until_index, stack_node in reversed(list(enumerate(stack))):
# `suite` can sometimes be only simple_stmt, not stmt.
if stack_node.nonterminal == 'file_input':
break
elif stack_node.nonterminal == 'suite':
# In the case where we just have a newline we don't want to
# do error recovery here. In all other cases, we want to do
# error recovery.
if len(stack_node.nodes) != 1:
break
return until_index
until_index = current_suite(self.stack)
if self._stack_removal(until_index + 1):
self._add_token(token)
else:
typ, value, start_pos, prefix = token
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
self.stack[-1].nodes.append(error_leaf)
tos = self.stack[-1]
if tos.nonterminal == 'suite':
# Need at least one statement in the suite. This happend with the
# error recovery above.
try:
tos.dfa = tos.dfa.arcs['stmt']
except KeyError:
# We're already in a final state.
pass
def _stack_removal(self, start_index):
all_nodes = [node for stack_node in self.stack[start_index:] for node in stack_node.nodes]
if all_nodes:
node = tree.PythonErrorNode(all_nodes)
for n in all_nodes:
n.parent = node
self.stack[start_index - 1].nodes.append(node)
self.stack[start_index:] = []
return bool(all_nodes)
def _recovery_tokenize(self, tokens):
for token in tokens:
typ = token[0]
if typ == DEDENT:
# We need to count indents, because if we just omit any DEDENT,
# we might omit them in the wrong place.
o = self._omit_dedent_list
if o and o[-1] == self._indent_counter:
o.pop()
self._indent_counter -= 1
continue
self._indent_counter -= 1
elif typ == INDENT:
self._indent_counter += 1
yield token
| convert_leaf | identifier_name |
parser.py | from parso.python import tree
from parso.python.token import PythonTokenTypes
from parso.parser import BaseParser
NAME = PythonTokenTypes.NAME
INDENT = PythonTokenTypes.INDENT
DEDENT = PythonTokenTypes.DEDENT
class Parser(BaseParser):
"""
This class is used to parse a Python file, it then divides them into a
class structure of different scopes.
:param pgen_grammar: The grammar object of pgen2. Loaded by load_grammar.
"""
node_map = {
'expr_stmt': tree.ExprStmt,
'classdef': tree.Class,
'funcdef': tree.Function,
'file_input': tree.Module,
'import_name': tree.ImportName,
'import_from': tree.ImportFrom,
'break_stmt': tree.KeywordStatement,
'continue_stmt': tree.KeywordStatement,
'return_stmt': tree.ReturnStmt,
'raise_stmt': tree.KeywordStatement,
'yield_expr': tree.YieldExpr,
'del_stmt': tree.KeywordStatement,
'pass_stmt': tree.KeywordStatement,
'global_stmt': tree.GlobalStmt,
'nonlocal_stmt': tree.KeywordStatement,
'print_stmt': tree.KeywordStatement,
'assert_stmt': tree.AssertStmt,
'if_stmt': tree.IfStmt,
'with_stmt': tree.WithStmt,
'for_stmt': tree.ForStmt,
'while_stmt': tree.WhileStmt,
'try_stmt': tree.TryStmt,
'sync_comp_for': tree.SyncCompFor,
# Not sure if this is the best idea, but IMO it's the easiest way to
# avoid extreme amounts of work around the subtle difference of 2/3
# grammar in list comoprehensions.
'decorator': tree.Decorator,
'lambdef': tree.Lambda,
'lambdef_nocond': tree.Lambda,
'namedexpr_test': tree.NamedExpr,
}
default_node = tree.PythonNode
# Names/Keywords are handled separately
_leaf_map = {
PythonTokenTypes.STRING: tree.String,
PythonTokenTypes.NUMBER: tree.Number,
PythonTokenTypes.NEWLINE: tree.Newline,
PythonTokenTypes.ENDMARKER: tree.EndMarker,
PythonTokenTypes.FSTRING_STRING: tree.FStringString,
PythonTokenTypes.FSTRING_START: tree.FStringStart,
PythonTokenTypes.FSTRING_END: tree.FStringEnd,
}
def __init__(self, pgen_grammar, error_recovery=True, start_nonterminal='file_input'):
super().__init__(pgen_grammar, start_nonterminal,
error_recovery=error_recovery)
self.syntax_errors = []
self._omit_dedent_list = []
self._indent_counter = 0
def parse(self, tokens):
if self._error_recovery:
if self._start_nonterminal != 'file_input':
raise NotImplementedError
tokens = self._recovery_tokenize(tokens)
return super().parse(tokens)
def convert_node(self, nonterminal, children):
"""
Convert raw node information to a PythonBaseNode instance.
This is passed to the parser driver which calls it whenever a reduction of a
grammar rule produces a new complete node, so that the tree is build
strictly bottom-up.
"""
try:
node = self.node_map[nonterminal](children)
except KeyError:
if nonterminal == 'suite':
# We don't want the INDENT/DEDENT in our parser tree. Those
# leaves are just cancer. They are virtual leaves and not real
# ones and therefore have pseudo start/end positions and no
# prefixes. Just ignore them.
children = [children[0]] + children[2:-1]
node = self.default_node(nonterminal, children)
for c in children:
c.parent = node
return node
def convert_leaf(self, type, value, prefix, start_pos):
# print('leaf', repr(value), token.tok_name[type])
if type == NAME:
if value in self._pgen_grammar.reserved_syntax_strings:
return tree.Keyword(value, start_pos, prefix)
else:
return tree.Name(value, start_pos, prefix)
return self._leaf_map.get(type, tree.Operator)(value, start_pos, prefix)
def error_recovery(self, token):
tos_nodes = self.stack[-1].nodes
if tos_nodes:
last_leaf = tos_nodes[-1].get_last_leaf()
else:
last_leaf = None
if self._start_nonterminal == 'file_input' and \
(token.type == PythonTokenTypes.ENDMARKER
or token.type == DEDENT and not last_leaf.value.endswith('\n')
and not last_leaf.value.endswith('\r')):
# In Python statements need to end with a newline. But since it's
# possible (and valid in Python) that there's no newline at the
# end of a file, we have to recover even if the user doesn't want
# error recovery.
if self.stack[-1].dfa.from_rule == 'simple_stmt':
try:
plan = self.stack[-1].dfa.transitions[PythonTokenTypes.NEWLINE]
except KeyError:
pass
else:
if plan.next_dfa.is_final and not plan.dfa_pushes:
# We are ignoring here that the newline would be
# required for a simple_stmt.
self.stack[-1].dfa = plan.next_dfa
self._add_token(token)
return
if not self._error_recovery:
return super().error_recovery(token)
def current_suite(stack):
# For now just discard everything that is not a suite or
# file_input, if we detect an error.
|
until_index = current_suite(self.stack)
if self._stack_removal(until_index + 1):
self._add_token(token)
else:
typ, value, start_pos, prefix = token
if typ == INDENT:
# For every deleted INDENT we have to delete a DEDENT as well.
# Otherwise the parser will get into trouble and DEDENT too early.
self._omit_dedent_list.append(self._indent_counter)
error_leaf = tree.PythonErrorLeaf(typ.name, value, start_pos, prefix)
self.stack[-1].nodes.append(error_leaf)
tos = self.stack[-1]
if tos.nonterminal == 'suite':
# Need at least one statement in the suite. This happend with the
# error recovery above.
try:
tos.dfa = tos.dfa.arcs['stmt']
except KeyError:
# We're already in a final state.
pass
def _stack_removal(self, start_index):
all_nodes = [node for stack_node in self.stack[start_index:] for node in stack_node.nodes]
if all_nodes:
node = tree.PythonErrorNode(all_nodes)
for n in all_nodes:
n.parent = node
self.stack[start_index - 1].nodes.append(node)
self.stack[start_index:] = []
return bool(all_nodes)
def _recovery_tokenize(self, tokens):
for token in tokens:
typ = token[0]
if typ == DEDENT:
# We need to count indents, because if we just omit any DEDENT,
# we might omit them in the wrong place.
o = self._omit_dedent_list
if o and o[-1] == self._indent_counter:
o.pop()
self._indent_counter -= 1
continue
self._indent_counter -= 1
elif typ == INDENT:
self._indent_counter += 1
yield token
| for until_index, stack_node in reversed(list(enumerate(stack))):
# `suite` can sometimes be only simple_stmt, not stmt.
if stack_node.nonterminal == 'file_input':
break
elif stack_node.nonterminal == 'suite':
# In the case where we just have a newline we don't want to
# do error recovery here. In all other cases, we want to do
# error recovery.
if len(stack_node.nodes) != 1:
break
return until_index | identifier_body |
advance.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{Arc, Mutex};
use std::time::Duration;
use collections::HashMap;
use concurrency_manager::ConcurrencyManager;
use engine_traits::KvEngine;
use futures::compat::Future01CompatExt;
use grpcio::{ChannelBuilder, Environment};
use kvproto::kvrpcpb::{CheckLeaderRequest, LeaderInfo};
use kvproto::metapb::{Peer, PeerRole};
use kvproto::tikvpb::TikvClient;
use pd_client::PdClient;
use protobuf::Message;
use raftstore::store::fsm::StoreMeta;
use raftstore::store::util::RegionReadProgressRegistry;
use security::SecurityManager;
use tikv_util::timer::SteadyTimer;
use tikv_util::worker::Scheduler;
use tokio::runtime::{Builder, Runtime};
use txn_types::TimeStamp;
use crate::endpoint::Task;
use crate::errors::Result;
use crate::metrics::{CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM, CHECK_LEADER_REQ_SIZE_HISTOGRAM};
const DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS: u64 = 5_000; // 5s
pub struct AdvanceTsWorker<E: KvEngine> {
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
timer: SteadyTimer,
worker: Runtime,
scheduler: Scheduler<Task<E::Snapshot>>,
/// The concurrency manager for transactions. It's needed for CDC to check locks when
/// calculating resolved_ts.
concurrency_manager: ConcurrencyManager,
// store_id -> client
tikv_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn new(
pd_client: Arc<dyn PdClient>,
scheduler: Scheduler<Task<E::Snapshot>>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
concurrency_manager: ConcurrencyManager,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
) -> Self {
let worker = Builder::new_multi_thread()
.thread_name("advance-ts")
.worker_threads(1)
.enable_time()
.build()
.unwrap();
Self {
env,
security_mgr,
scheduler,
pd_client,
worker,
timer: SteadyTimer::default(),
store_meta,
region_read_progress,
concurrency_manager,
tikv_clients: Arc::new(Mutex::new(HashMap::default())),
}
}
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn advance_ts_for_regions(&self, regions: Vec<u64>) {
let pd_client = self.pd_client.clone();
let scheduler = self.scheduler.clone();
let cm: ConcurrencyManager = self.concurrency_manager.clone();
let env = self.env.clone();
let security_mgr = self.security_mgr.clone();
let store_meta = self.store_meta.clone();
let tikv_clients = self.tikv_clients.clone();
let region_read_progress = self.region_read_progress.clone();
let fut = async move {
// Ignore get tso errors since we will retry every `advance_ts_interval`.
let mut min_ts = pd_client.get_tso().await.unwrap_or_default();
// Sync with concurrency manager so that it can work correctly when optimizations
// like async commit is enabled.
// Note: This step must be done before scheduling `Task::MinTS` task, and the
// resolver must be checked in or after `Task::MinTS`' execution.
cm.update_max_ts(min_ts);
if let Some(min_mem_lock_ts) = cm.global_min_lock_ts() {
if min_mem_lock_ts < min_ts {
min_ts = min_mem_lock_ts;
}
}
let regions = Self::region_resolved_ts_store(
regions,
store_meta,
region_read_progress,
pd_client,
security_mgr,
env,
tikv_clients,
min_ts,
)
.await;
if !regions.is_empty() {
if let Err(e) = scheduler.schedule(Task::AdvanceResolvedTs {
regions,
ts: min_ts,
}) {
info!("failed to schedule advance event"; "err" => ?e);
}
}
};
self.worker.spawn(fut);
}
pub fn register_next_event(&self, advance_ts_interval: Duration, cfg_version: usize) {
let scheduler = self.scheduler.clone();
let timeout = self.timer.delay(advance_ts_interval);
let fut = async move {
let _ = timeout.compat().await;
if let Err(e) = scheduler.schedule(Task::RegisterAdvanceEvent { cfg_version }) {
info!("failed to schedule register advance event"; "err" => ?e);
}
};
self.worker.spawn(fut);
}
// Confirms leadership of region peer before trying to advance resolved ts.
// This function broadcasts a special message to all stores, get the leader id of them to confirm whether
// current peer has a quorum which accept its leadership.
async fn region_resolved_ts_store(
regions: Vec<u64>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
security_mgr: Arc<SecurityManager>,
env: Arc<Environment>,
cdc_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
min_ts: TimeStamp,
) -> Vec<u64> {
#[cfg(feature = "failpoint")]
(|| fail_point!("before_sync_replica_read_state", |_| regions))();
let store_id = match store_meta.lock().unwrap().store_id {
Some(id) => id,
None => return vec![],
};
// store_id -> leaders info, record the request to each stores
let mut store_map: HashMap<u64, Vec<LeaderInfo>> = HashMap::default();
// region_id -> region, cache the information of regions
let mut region_map: HashMap<u64, Vec<Peer>> = HashMap::default();
// region_id -> peers id, record the responses
let mut resp_map: HashMap<u64, Vec<u64>> = HashMap::default();
// region_id -> `(Vec<Peer>, LeaderInfo)`
let info_map = region_read_progress.dump_leader_infos(®ions);
for (region_id, (peer_list, leader_info)) in info_map {
let leader_id = leader_info.get_peer_id();
// Check if the leader in this store
if find_store_id(&peer_list, leader_id) != Some(store_id) {
continue;
}
for peer in &peer_list {
if peer.store_id == store_id && peer.id == leader_id {
resp_map.entry(region_id).or_default().push(store_id);
continue;
}
store_map
.entry(peer.store_id)
.or_default()
.push(leader_info.clone());
}
region_map.insert(region_id, peer_list);
}
// Approximate `LeaderInfo` size
let leader_info_size = store_map
.values()
.next()
.map_or(0, |regions| regions[0].compute_size());
let stores = store_map.into_iter().map(|(store_id, regions)| {
let cdc_clients = cdc_clients.clone();
let env = env.clone();
let pd_client = pd_client.clone();
let security_mgr = security_mgr.clone();
let region_num = regions.len() as u32;
CHECK_LEADER_REQ_SIZE_HISTOGRAM.observe((leader_info_size * region_num) as f64);
CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM.observe(region_num as f64);
async move {
if cdc_clients.lock().unwrap().get(&store_id).is_none() {
let store = box_try!(pd_client.get_store_async(store_id).await);
let cb = ChannelBuilder::new(env.clone());
let channel = security_mgr.connect(cb, &store.address);
cdc_clients
.lock()
.unwrap()
.insert(store_id, TikvClient::new(channel));
}
let client = cdc_clients.lock().unwrap().get(&store_id).unwrap().clone();
let mut req = CheckLeaderRequest::default();
req.set_regions(regions.into());
req.set_ts(min_ts.into_inner());
let res = box_try!(
tokio::time::timeout(
Duration::from_millis(DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS),
box_try!(client.check_leader_async(&req))
)
.await
);
let resp = box_try!(res);
Result::Ok((store_id, resp))
} | .filter_map(|resp| match resp {
Ok(resp) => Some(resp),
Err(e) => {
debug!("resolved-ts check leader error"; "err" =>?e);
None
}
})
.map(|(store_id, resp)| {
resp.regions
.into_iter()
.map(move |region_id| (store_id, region_id))
})
.flatten()
.for_each(|(store_id, region_id)| {
resp_map.entry(region_id).or_default().push(store_id);
});
resp_map
.into_iter()
.filter_map(|(region_id, stores)| {
if region_has_quorum(®ion_map[®ion_id], &stores) {
Some(region_id)
} else {
debug!(
"resolved-ts cannot get quorum for resolved ts";
"region_id" => region_id,
"stores" => ?stores,
"region" => ?®ion_map[®ion_id]
);
None
}
})
.collect()
}
}
fn region_has_quorum(peers: &[Peer], stores: &[u64]) -> bool {
let mut voters = 0;
let mut incoming_voters = 0;
let mut demoting_voters = 0;
let mut resp_voters = 0;
let mut resp_incoming_voters = 0;
let mut resp_demoting_voters = 0;
peers.iter().for_each(|peer| {
let mut in_resp = false;
for store_id in stores {
if *store_id == peer.store_id {
in_resp = true;
break;
}
}
match peer.get_role() {
PeerRole::Voter => {
voters += 1;
if in_resp {
resp_voters += 1;
}
}
PeerRole::IncomingVoter => {
incoming_voters += 1;
if in_resp {
resp_incoming_voters += 1;
}
}
PeerRole::DemotingVoter => {
demoting_voters += 1;
if in_resp {
resp_demoting_voters += 1;
}
}
PeerRole::Learner => (),
}
});
let has_incoming_majority =
(resp_voters + resp_incoming_voters) >= ((voters + incoming_voters) / 2 + 1);
let has_demoting_majority =
(resp_voters + resp_demoting_voters) >= ((voters + demoting_voters) / 2 + 1);
has_incoming_majority && has_demoting_majority
}
fn find_store_id(peer_list: &[Peer], peer_id: u64) -> Option<u64> {
for peer in peer_list {
if peer.id == peer_id {
return Some(peer.store_id);
}
}
None
} | });
let resps = futures::future::join_all(stores).await;
resps
.into_iter() | random_line_split |
advance.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{Arc, Mutex};
use std::time::Duration;
use collections::HashMap;
use concurrency_manager::ConcurrencyManager;
use engine_traits::KvEngine;
use futures::compat::Future01CompatExt;
use grpcio::{ChannelBuilder, Environment};
use kvproto::kvrpcpb::{CheckLeaderRequest, LeaderInfo};
use kvproto::metapb::{Peer, PeerRole};
use kvproto::tikvpb::TikvClient;
use pd_client::PdClient;
use protobuf::Message;
use raftstore::store::fsm::StoreMeta;
use raftstore::store::util::RegionReadProgressRegistry;
use security::SecurityManager;
use tikv_util::timer::SteadyTimer;
use tikv_util::worker::Scheduler;
use tokio::runtime::{Builder, Runtime};
use txn_types::TimeStamp;
use crate::endpoint::Task;
use crate::errors::Result;
use crate::metrics::{CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM, CHECK_LEADER_REQ_SIZE_HISTOGRAM};
const DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS: u64 = 5_000; // 5s
pub struct AdvanceTsWorker<E: KvEngine> {
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
timer: SteadyTimer,
worker: Runtime,
scheduler: Scheduler<Task<E::Snapshot>>,
/// The concurrency manager for transactions. It's needed for CDC to check locks when
/// calculating resolved_ts.
concurrency_manager: ConcurrencyManager,
// store_id -> client
tikv_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn new(
pd_client: Arc<dyn PdClient>,
scheduler: Scheduler<Task<E::Snapshot>>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
concurrency_manager: ConcurrencyManager,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
) -> Self |
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn advance_ts_for_regions(&self, regions: Vec<u64>) {
let pd_client = self.pd_client.clone();
let scheduler = self.scheduler.clone();
let cm: ConcurrencyManager = self.concurrency_manager.clone();
let env = self.env.clone();
let security_mgr = self.security_mgr.clone();
let store_meta = self.store_meta.clone();
let tikv_clients = self.tikv_clients.clone();
let region_read_progress = self.region_read_progress.clone();
let fut = async move {
// Ignore get tso errors since we will retry every `advance_ts_interval`.
let mut min_ts = pd_client.get_tso().await.unwrap_or_default();
// Sync with concurrency manager so that it can work correctly when optimizations
// like async commit is enabled.
// Note: This step must be done before scheduling `Task::MinTS` task, and the
// resolver must be checked in or after `Task::MinTS`' execution.
cm.update_max_ts(min_ts);
if let Some(min_mem_lock_ts) = cm.global_min_lock_ts() {
if min_mem_lock_ts < min_ts {
min_ts = min_mem_lock_ts;
}
}
let regions = Self::region_resolved_ts_store(
regions,
store_meta,
region_read_progress,
pd_client,
security_mgr,
env,
tikv_clients,
min_ts,
)
.await;
if !regions.is_empty() {
if let Err(e) = scheduler.schedule(Task::AdvanceResolvedTs {
regions,
ts: min_ts,
}) {
info!("failed to schedule advance event"; "err" => ?e);
}
}
};
self.worker.spawn(fut);
}
pub fn register_next_event(&self, advance_ts_interval: Duration, cfg_version: usize) {
let scheduler = self.scheduler.clone();
let timeout = self.timer.delay(advance_ts_interval);
let fut = async move {
let _ = timeout.compat().await;
if let Err(e) = scheduler.schedule(Task::RegisterAdvanceEvent { cfg_version }) {
info!("failed to schedule register advance event"; "err" => ?e);
}
};
self.worker.spawn(fut);
}
// Confirms leadership of region peer before trying to advance resolved ts.
// This function broadcasts a special message to all stores, get the leader id of them to confirm whether
// current peer has a quorum which accept its leadership.
async fn region_resolved_ts_store(
regions: Vec<u64>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
security_mgr: Arc<SecurityManager>,
env: Arc<Environment>,
cdc_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
min_ts: TimeStamp,
) -> Vec<u64> {
#[cfg(feature = "failpoint")]
(|| fail_point!("before_sync_replica_read_state", |_| regions))();
let store_id = match store_meta.lock().unwrap().store_id {
Some(id) => id,
None => return vec![],
};
// store_id -> leaders info, record the request to each stores
let mut store_map: HashMap<u64, Vec<LeaderInfo>> = HashMap::default();
// region_id -> region, cache the information of regions
let mut region_map: HashMap<u64, Vec<Peer>> = HashMap::default();
// region_id -> peers id, record the responses
let mut resp_map: HashMap<u64, Vec<u64>> = HashMap::default();
// region_id -> `(Vec<Peer>, LeaderInfo)`
let info_map = region_read_progress.dump_leader_infos(®ions);
for (region_id, (peer_list, leader_info)) in info_map {
let leader_id = leader_info.get_peer_id();
// Check if the leader in this store
if find_store_id(&peer_list, leader_id) != Some(store_id) {
continue;
}
for peer in &peer_list {
if peer.store_id == store_id && peer.id == leader_id {
resp_map.entry(region_id).or_default().push(store_id);
continue;
}
store_map
.entry(peer.store_id)
.or_default()
.push(leader_info.clone());
}
region_map.insert(region_id, peer_list);
}
// Approximate `LeaderInfo` size
let leader_info_size = store_map
.values()
.next()
.map_or(0, |regions| regions[0].compute_size());
let stores = store_map.into_iter().map(|(store_id, regions)| {
let cdc_clients = cdc_clients.clone();
let env = env.clone();
let pd_client = pd_client.clone();
let security_mgr = security_mgr.clone();
let region_num = regions.len() as u32;
CHECK_LEADER_REQ_SIZE_HISTOGRAM.observe((leader_info_size * region_num) as f64);
CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM.observe(region_num as f64);
async move {
if cdc_clients.lock().unwrap().get(&store_id).is_none() {
let store = box_try!(pd_client.get_store_async(store_id).await);
let cb = ChannelBuilder::new(env.clone());
let channel = security_mgr.connect(cb, &store.address);
cdc_clients
.lock()
.unwrap()
.insert(store_id, TikvClient::new(channel));
}
let client = cdc_clients.lock().unwrap().get(&store_id).unwrap().clone();
let mut req = CheckLeaderRequest::default();
req.set_regions(regions.into());
req.set_ts(min_ts.into_inner());
let res = box_try!(
tokio::time::timeout(
Duration::from_millis(DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS),
box_try!(client.check_leader_async(&req))
)
.await
);
let resp = box_try!(res);
Result::Ok((store_id, resp))
}
});
let resps = futures::future::join_all(stores).await;
resps
.into_iter()
.filter_map(|resp| match resp {
Ok(resp) => Some(resp),
Err(e) => {
debug!("resolved-ts check leader error"; "err" =>?e);
None
}
})
.map(|(store_id, resp)| {
resp.regions
.into_iter()
.map(move |region_id| (store_id, region_id))
})
.flatten()
.for_each(|(store_id, region_id)| {
resp_map.entry(region_id).or_default().push(store_id);
});
resp_map
.into_iter()
.filter_map(|(region_id, stores)| {
if region_has_quorum(®ion_map[®ion_id], &stores) {
Some(region_id)
} else {
debug!(
"resolved-ts cannot get quorum for resolved ts";
"region_id" => region_id,
"stores" => ?stores,
"region" => ?®ion_map[®ion_id]
);
None
}
})
.collect()
}
}
fn region_has_quorum(peers: &[Peer], stores: &[u64]) -> bool {
let mut voters = 0;
let mut incoming_voters = 0;
let mut demoting_voters = 0;
let mut resp_voters = 0;
let mut resp_incoming_voters = 0;
let mut resp_demoting_voters = 0;
peers.iter().for_each(|peer| {
let mut in_resp = false;
for store_id in stores {
if *store_id == peer.store_id {
in_resp = true;
break;
}
}
match peer.get_role() {
PeerRole::Voter => {
voters += 1;
if in_resp {
resp_voters += 1;
}
}
PeerRole::IncomingVoter => {
incoming_voters += 1;
if in_resp {
resp_incoming_voters += 1;
}
}
PeerRole::DemotingVoter => {
demoting_voters += 1;
if in_resp {
resp_demoting_voters += 1;
}
}
PeerRole::Learner => (),
}
});
let has_incoming_majority =
(resp_voters + resp_incoming_voters) >= ((voters + incoming_voters) / 2 + 1);
let has_demoting_majority =
(resp_voters + resp_demoting_voters) >= ((voters + demoting_voters) / 2 + 1);
has_incoming_majority && has_demoting_majority
}
fn find_store_id(peer_list: &[Peer], peer_id: u64) -> Option<u64> {
for peer in peer_list {
if peer.id == peer_id {
return Some(peer.store_id);
}
}
None
}
| {
let worker = Builder::new_multi_thread()
.thread_name("advance-ts")
.worker_threads(1)
.enable_time()
.build()
.unwrap();
Self {
env,
security_mgr,
scheduler,
pd_client,
worker,
timer: SteadyTimer::default(),
store_meta,
region_read_progress,
concurrency_manager,
tikv_clients: Arc::new(Mutex::new(HashMap::default())),
}
} | identifier_body |
advance.rs | // Copyright 2021 TiKV Project Authors. Licensed under Apache-2.0.
use std::sync::{Arc, Mutex};
use std::time::Duration;
use collections::HashMap;
use concurrency_manager::ConcurrencyManager;
use engine_traits::KvEngine;
use futures::compat::Future01CompatExt;
use grpcio::{ChannelBuilder, Environment};
use kvproto::kvrpcpb::{CheckLeaderRequest, LeaderInfo};
use kvproto::metapb::{Peer, PeerRole};
use kvproto::tikvpb::TikvClient;
use pd_client::PdClient;
use protobuf::Message;
use raftstore::store::fsm::StoreMeta;
use raftstore::store::util::RegionReadProgressRegistry;
use security::SecurityManager;
use tikv_util::timer::SteadyTimer;
use tikv_util::worker::Scheduler;
use tokio::runtime::{Builder, Runtime};
use txn_types::TimeStamp;
use crate::endpoint::Task;
use crate::errors::Result;
use crate::metrics::{CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM, CHECK_LEADER_REQ_SIZE_HISTOGRAM};
const DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS: u64 = 5_000; // 5s
pub struct | <E: KvEngine> {
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
timer: SteadyTimer,
worker: Runtime,
scheduler: Scheduler<Task<E::Snapshot>>,
/// The concurrency manager for transactions. It's needed for CDC to check locks when
/// calculating resolved_ts.
concurrency_manager: ConcurrencyManager,
// store_id -> client
tikv_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn new(
pd_client: Arc<dyn PdClient>,
scheduler: Scheduler<Task<E::Snapshot>>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
concurrency_manager: ConcurrencyManager,
env: Arc<Environment>,
security_mgr: Arc<SecurityManager>,
) -> Self {
let worker = Builder::new_multi_thread()
.thread_name("advance-ts")
.worker_threads(1)
.enable_time()
.build()
.unwrap();
Self {
env,
security_mgr,
scheduler,
pd_client,
worker,
timer: SteadyTimer::default(),
store_meta,
region_read_progress,
concurrency_manager,
tikv_clients: Arc::new(Mutex::new(HashMap::default())),
}
}
}
impl<E: KvEngine> AdvanceTsWorker<E> {
pub fn advance_ts_for_regions(&self, regions: Vec<u64>) {
let pd_client = self.pd_client.clone();
let scheduler = self.scheduler.clone();
let cm: ConcurrencyManager = self.concurrency_manager.clone();
let env = self.env.clone();
let security_mgr = self.security_mgr.clone();
let store_meta = self.store_meta.clone();
let tikv_clients = self.tikv_clients.clone();
let region_read_progress = self.region_read_progress.clone();
let fut = async move {
// Ignore get tso errors since we will retry every `advance_ts_interval`.
let mut min_ts = pd_client.get_tso().await.unwrap_or_default();
// Sync with concurrency manager so that it can work correctly when optimizations
// like async commit is enabled.
// Note: This step must be done before scheduling `Task::MinTS` task, and the
// resolver must be checked in or after `Task::MinTS`' execution.
cm.update_max_ts(min_ts);
if let Some(min_mem_lock_ts) = cm.global_min_lock_ts() {
if min_mem_lock_ts < min_ts {
min_ts = min_mem_lock_ts;
}
}
let regions = Self::region_resolved_ts_store(
regions,
store_meta,
region_read_progress,
pd_client,
security_mgr,
env,
tikv_clients,
min_ts,
)
.await;
if !regions.is_empty() {
if let Err(e) = scheduler.schedule(Task::AdvanceResolvedTs {
regions,
ts: min_ts,
}) {
info!("failed to schedule advance event"; "err" => ?e);
}
}
};
self.worker.spawn(fut);
}
pub fn register_next_event(&self, advance_ts_interval: Duration, cfg_version: usize) {
let scheduler = self.scheduler.clone();
let timeout = self.timer.delay(advance_ts_interval);
let fut = async move {
let _ = timeout.compat().await;
if let Err(e) = scheduler.schedule(Task::RegisterAdvanceEvent { cfg_version }) {
info!("failed to schedule register advance event"; "err" => ?e);
}
};
self.worker.spawn(fut);
}
// Confirms leadership of region peer before trying to advance resolved ts.
// This function broadcasts a special message to all stores, get the leader id of them to confirm whether
// current peer has a quorum which accept its leadership.
async fn region_resolved_ts_store(
regions: Vec<u64>,
store_meta: Arc<Mutex<StoreMeta>>,
region_read_progress: RegionReadProgressRegistry,
pd_client: Arc<dyn PdClient>,
security_mgr: Arc<SecurityManager>,
env: Arc<Environment>,
cdc_clients: Arc<Mutex<HashMap<u64, TikvClient>>>,
min_ts: TimeStamp,
) -> Vec<u64> {
#[cfg(feature = "failpoint")]
(|| fail_point!("before_sync_replica_read_state", |_| regions))();
let store_id = match store_meta.lock().unwrap().store_id {
Some(id) => id,
None => return vec![],
};
// store_id -> leaders info, record the request to each stores
let mut store_map: HashMap<u64, Vec<LeaderInfo>> = HashMap::default();
// region_id -> region, cache the information of regions
let mut region_map: HashMap<u64, Vec<Peer>> = HashMap::default();
// region_id -> peers id, record the responses
let mut resp_map: HashMap<u64, Vec<u64>> = HashMap::default();
// region_id -> `(Vec<Peer>, LeaderInfo)`
let info_map = region_read_progress.dump_leader_infos(®ions);
for (region_id, (peer_list, leader_info)) in info_map {
let leader_id = leader_info.get_peer_id();
// Check if the leader in this store
if find_store_id(&peer_list, leader_id) != Some(store_id) {
continue;
}
for peer in &peer_list {
if peer.store_id == store_id && peer.id == leader_id {
resp_map.entry(region_id).or_default().push(store_id);
continue;
}
store_map
.entry(peer.store_id)
.or_default()
.push(leader_info.clone());
}
region_map.insert(region_id, peer_list);
}
// Approximate `LeaderInfo` size
let leader_info_size = store_map
.values()
.next()
.map_or(0, |regions| regions[0].compute_size());
let stores = store_map.into_iter().map(|(store_id, regions)| {
let cdc_clients = cdc_clients.clone();
let env = env.clone();
let pd_client = pd_client.clone();
let security_mgr = security_mgr.clone();
let region_num = regions.len() as u32;
CHECK_LEADER_REQ_SIZE_HISTOGRAM.observe((leader_info_size * region_num) as f64);
CHECK_LEADER_REQ_ITEM_COUNT_HISTOGRAM.observe(region_num as f64);
async move {
if cdc_clients.lock().unwrap().get(&store_id).is_none() {
let store = box_try!(pd_client.get_store_async(store_id).await);
let cb = ChannelBuilder::new(env.clone());
let channel = security_mgr.connect(cb, &store.address);
cdc_clients
.lock()
.unwrap()
.insert(store_id, TikvClient::new(channel));
}
let client = cdc_clients.lock().unwrap().get(&store_id).unwrap().clone();
let mut req = CheckLeaderRequest::default();
req.set_regions(regions.into());
req.set_ts(min_ts.into_inner());
let res = box_try!(
tokio::time::timeout(
Duration::from_millis(DEFAULT_CHECK_LEADER_TIMEOUT_MILLISECONDS),
box_try!(client.check_leader_async(&req))
)
.await
);
let resp = box_try!(res);
Result::Ok((store_id, resp))
}
});
let resps = futures::future::join_all(stores).await;
resps
.into_iter()
.filter_map(|resp| match resp {
Ok(resp) => Some(resp),
Err(e) => {
debug!("resolved-ts check leader error"; "err" =>?e);
None
}
})
.map(|(store_id, resp)| {
resp.regions
.into_iter()
.map(move |region_id| (store_id, region_id))
})
.flatten()
.for_each(|(store_id, region_id)| {
resp_map.entry(region_id).or_default().push(store_id);
});
resp_map
.into_iter()
.filter_map(|(region_id, stores)| {
if region_has_quorum(®ion_map[®ion_id], &stores) {
Some(region_id)
} else {
debug!(
"resolved-ts cannot get quorum for resolved ts";
"region_id" => region_id,
"stores" => ?stores,
"region" => ?®ion_map[®ion_id]
);
None
}
})
.collect()
}
}
fn region_has_quorum(peers: &[Peer], stores: &[u64]) -> bool {
let mut voters = 0;
let mut incoming_voters = 0;
let mut demoting_voters = 0;
let mut resp_voters = 0;
let mut resp_incoming_voters = 0;
let mut resp_demoting_voters = 0;
peers.iter().for_each(|peer| {
let mut in_resp = false;
for store_id in stores {
if *store_id == peer.store_id {
in_resp = true;
break;
}
}
match peer.get_role() {
PeerRole::Voter => {
voters += 1;
if in_resp {
resp_voters += 1;
}
}
PeerRole::IncomingVoter => {
incoming_voters += 1;
if in_resp {
resp_incoming_voters += 1;
}
}
PeerRole::DemotingVoter => {
demoting_voters += 1;
if in_resp {
resp_demoting_voters += 1;
}
}
PeerRole::Learner => (),
}
});
let has_incoming_majority =
(resp_voters + resp_incoming_voters) >= ((voters + incoming_voters) / 2 + 1);
let has_demoting_majority =
(resp_voters + resp_demoting_voters) >= ((voters + demoting_voters) / 2 + 1);
has_incoming_majority && has_demoting_majority
}
fn find_store_id(peer_list: &[Peer], peer_id: u64) -> Option<u64> {
for peer in peer_list {
if peer.id == peer_id {
return Some(peer.store_id);
}
}
None
}
| AdvanceTsWorker | identifier_name |
25-status.js | /**
* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, | * limitations under the License.
**/
module.exports = function(RED) {
"use strict";
function StatusNode(n) {
RED.nodes.createNode(this,n);
var node = this;
this.scope = n.scope;
this.on("input", function(msg) {
this.send(msg);
});
}
RED.nodes.registerType("status",StatusNode);
} | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and | random_line_split |
25-status.js | /**
* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
module.exports = function(RED) {
"use strict";
function | (n) {
RED.nodes.createNode(this,n);
var node = this;
this.scope = n.scope;
this.on("input", function(msg) {
this.send(msg);
});
}
RED.nodes.registerType("status",StatusNode);
}
| StatusNode | identifier_name |
25-status.js | /**
* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
module.exports = function(RED) {
"use strict";
function StatusNode(n) |
RED.nodes.registerType("status",StatusNode);
}
| {
RED.nodes.createNode(this,n);
var node = this;
this.scope = n.scope;
this.on("input", function(msg) {
this.send(msg);
});
} | identifier_body |
SWIGOUTDIR.py | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION |
"""
Verify that use of the $SWIGOUTDIR variable causes SCons to recognize
that Java files are created in the specified output directory.
"""
import TestSCons
test = TestSCons.TestSCons()
swig = test.where_is('swig')
if not swig:
test.skip_test('Can not find installed "swig", skipping test.\n')
where_java_include=test.java_where_includes()
if not where_java_include:
test.skip_test('Can not find installed Java include files, skipping test.\n')
test.write(['SConstruct'], """\
env = Environment(tools = ['default', 'swig'],
CPPPATH=%(where_java_include)s,
)
Java_foo_interface = env.SharedLibrary(
'Java_foo_interface',
'Java_foo_interface.i',
SWIGOUTDIR = 'java/build dir',
SWIGFLAGS = '-c++ -java -Wall',
SWIGCXXFILESUFFIX = "_wrap.cpp")
""" % locals())
test.write('Java_foo_interface.i', """\
%module foopack
""")
# SCons should realize that it needs to create the "java/build dir"
# subdirectory to hold the generated .java files.
test.run(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
# SCons should remove the built .java files.
test.run(arguments = '-c')
test.must_not_exist('java/build dir/foopackJNI.java')
test.must_not_exist('java/build dir/foopack.java')
# SCons should realize it needs to rebuild the removed .java files.
test.not_up_to_date(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/SWIG/SWIGOUTDIR.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog" | random_line_split |
SWIGOUTDIR.py | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/SWIG/SWIGOUTDIR.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify that use of the $SWIGOUTDIR variable causes SCons to recognize
that Java files are created in the specified output directory.
"""
import TestSCons
test = TestSCons.TestSCons()
swig = test.where_is('swig')
if not swig:
test.skip_test('Can not find installed "swig", skipping test.\n')
where_java_include=test.java_where_includes()
if not where_java_include:
|
test.write(['SConstruct'], """\
env = Environment(tools = ['default', 'swig'],
CPPPATH=%(where_java_include)s,
)
Java_foo_interface = env.SharedLibrary(
'Java_foo_interface',
'Java_foo_interface.i',
SWIGOUTDIR = 'java/build dir',
SWIGFLAGS = '-c++ -java -Wall',
SWIGCXXFILESUFFIX = "_wrap.cpp")
""" % locals())
test.write('Java_foo_interface.i', """\
%module foopack
""")
# SCons should realize that it needs to create the "java/build dir"
# subdirectory to hold the generated .java files.
test.run(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
# SCons should remove the built .java files.
test.run(arguments = '-c')
test.must_not_exist('java/build dir/foopackJNI.java')
test.must_not_exist('java/build dir/foopack.java')
# SCons should realize it needs to rebuild the removed .java files.
test.not_up_to_date(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| test.skip_test('Can not find installed Java include files, skipping test.\n') | conditional_block |
break-points.spec.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// RxJS Operators used by the classes...
import 'rxjs/add/operator/filter';
import 'rxjs/add/operator/map';
import {TestBed, inject, async} from '@angular/core/testing';
import {BreakPoint} from '../break-point';
import {BREAKPOINTS} from '../break-points-token';
import {DEFAULT_BREAKPOINTS} from './break-points';
describe('break-point-provider', () => {
let breakPoints: BreakPoint[ ];
describe('with default configuration', () => {
beforeEach(() => {
// Configure testbed to prepare services
TestBed.configureTestingModule({
providers: [{provide: BREAKPOINTS, useValue: DEFAULT_BREAKPOINTS}] | })));
it('has the standard breakpoints', () => {
expect(breakPoints.length).toEqual(DEFAULT_BREAKPOINTS.length);
expect(breakPoints[0].alias).toEqual('xs');
expect(breakPoints[breakPoints.length - 1].alias).toEqual('xl');
});
});
describe('with custom configuration', () => {
let bpList;
const CUSTOM_BPS: BreakPoint[] = [
{
alias: 'ab',
suffix: 'Ab',
mediaQuery: '(max-width: 297px)',
overlapping: false
},
{
alias: 'cd',
suffix: 'Cd',
mediaQuery: '(min-width: 298px) and (max-width:414px',
overlapping: false
}
];
beforeEach(() => {
// Configure testbed to prepare services
TestBed.configureTestingModule({
providers: [{provide: BREAKPOINTS, useValue: CUSTOM_BPS}]
});
});
// tslint:disable-next-line:no-shadowed-variable
beforeEach(async(inject([BREAKPOINTS], (breakPoints) => {
bpList = breakPoints;
})));
it('has the custom breakpoints', () => {
expect(bpList.length).toEqual(CUSTOM_BPS.length);
expect(bpList[0].alias).toEqual('ab');
expect(bpList[bpList.length - 1].suffix).toEqual('Cd');
});
});
}); | });
});
beforeEach(async(inject([BREAKPOINTS], (_breakPoints_) => {
breakPoints = _breakPoints_; | random_line_split |
conftest.py | import json
import os
from textwrap import dedent
import boto3
import moto
import pytest
from moto.ec2 import ec2_backend
from moto.ec2 import utils as ec2_utils
from ecs_deplojo.connection import Connection
from ecs_deplojo.task_definitions import TaskDefinition
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
@pytest.yield_fixture(scope="function")
def cluster():
with moto.mock_ecs(), moto.mock_ec2():
boto3.setup_default_session(region_name="eu-west-1")
ec2 = boto3.resource("ec2", region_name="eu-west-1")
ecs = boto3.client("ecs", region_name="eu-west-1")
known_amis = list(ec2_backend.describe_images())
test_instance = ec2.create_instances(
ImageId=known_amis[0].id, MinCount=1, MaxCount=1
)[0]
instance_id_document = json.dumps(
ec2_utils.generate_instance_identity_document(test_instance)
)
cluster = ecs.create_cluster(clusterName="default")
ecs.register_container_instance(
cluster="default", instanceIdentityDocument=instance_id_document
)
yield cluster
@pytest.fixture
def connection(cluster):
return Connection()
@pytest.fixture
def definition():
path = os.path.join(BASE_DIR, "files/default_taskdef.json")
with open(path, "r") as json_file:
return TaskDefinition(json.load(json_file))
@pytest.fixture
def | ():
path = os.path.join(BASE_DIR, "files/default_config.yml")
with open(path, "r") as fh:
yield fh
@pytest.fixture
def example_project(tmpdir):
data = """
{
"family": "default",
"volumes": [],
"containerDefinitions": [
{
"name": "web-1",
"image": "${image}",
"essential": true,
"command": ["hello", "world"],
"memory": 256,
"cpu": 0,
"portMappings": [
{
"containerPort": 8080,
"hostPort": 0
}
]
},
{
"name": "web-2",
"image": "${image}",
"essential": true,
"command": ["hello", "world"],
"memory": 256,
"cpu": 0,
"portMappings": [
{
"containerPort": 8080,
"hostPort": 0
}
]
}
]
}
""".strip()
filename = tmpdir.join("task_definition.json")
filename.write(data)
data = dedent(
"""
---
cluster_name: default
environment:
DATABASE_URL: postgresql://
environment_groups:
group-1:
ENV_CODE: 12345
task_definitions:
web:
template: %(template_filename)s
environment_group: group-1
task_role_arn: my-test
overrides:
web-1:
memory: 512
portMappings:
- hostPort: 0
containerPort: 8080
protocol: tcp
services:
web:
task_definition: web
before_deploy:
- task_definition: web
container: web-1
command: manage.py migrate --noinput
after_deploy:
- task_definition: web
container: web-1
command: manage.py clearsessions
"""
% {"template_filename": filename.strpath}
)
filename = tmpdir.join("config.yml")
filename.write(data)
return filename
| default_config | identifier_name |
conftest.py | import json
import os
from textwrap import dedent
import boto3
import moto
import pytest
from moto.ec2 import ec2_backend
from moto.ec2 import utils as ec2_utils
from ecs_deplojo.connection import Connection
from ecs_deplojo.task_definitions import TaskDefinition
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
@pytest.yield_fixture(scope="function")
def cluster():
with moto.mock_ecs(), moto.mock_ec2():
boto3.setup_default_session(region_name="eu-west-1")
ec2 = boto3.resource("ec2", region_name="eu-west-1")
ecs = boto3.client("ecs", region_name="eu-west-1")
known_amis = list(ec2_backend.describe_images())
test_instance = ec2.create_instances(
ImageId=known_amis[0].id, MinCount=1, MaxCount=1
)[0]
instance_id_document = json.dumps(
ec2_utils.generate_instance_identity_document(test_instance)
)
cluster = ecs.create_cluster(clusterName="default")
ecs.register_container_instance(
cluster="default", instanceIdentityDocument=instance_id_document
)
yield cluster
@pytest.fixture
def connection(cluster):
return Connection()
@pytest.fixture
def definition():
path = os.path.join(BASE_DIR, "files/default_taskdef.json")
with open(path, "r") as json_file:
return TaskDefinition(json.load(json_file))
@pytest.fixture
def default_config():
|
@pytest.fixture
def example_project(tmpdir):
data = """
{
"family": "default",
"volumes": [],
"containerDefinitions": [
{
"name": "web-1",
"image": "${image}",
"essential": true,
"command": ["hello", "world"],
"memory": 256,
"cpu": 0,
"portMappings": [
{
"containerPort": 8080,
"hostPort": 0
}
]
},
{
"name": "web-2",
"image": "${image}",
"essential": true,
"command": ["hello", "world"],
"memory": 256,
"cpu": 0,
"portMappings": [
{
"containerPort": 8080,
"hostPort": 0
}
]
}
]
}
""".strip()
filename = tmpdir.join("task_definition.json")
filename.write(data)
data = dedent(
"""
---
cluster_name: default
environment:
DATABASE_URL: postgresql://
environment_groups:
group-1:
ENV_CODE: 12345
task_definitions:
web:
template: %(template_filename)s
environment_group: group-1
task_role_arn: my-test
overrides:
web-1:
memory: 512
portMappings:
- hostPort: 0
containerPort: 8080
protocol: tcp
services:
web:
task_definition: web
before_deploy:
- task_definition: web
container: web-1
command: manage.py migrate --noinput
after_deploy:
- task_definition: web
container: web-1
command: manage.py clearsessions
"""
% {"template_filename": filename.strpath}
)
filename = tmpdir.join("config.yml")
filename.write(data)
return filename
| path = os.path.join(BASE_DIR, "files/default_config.yml")
with open(path, "r") as fh:
yield fh | identifier_body |
conftest.py | import json
import os
from textwrap import dedent
import boto3
import moto
import pytest
from moto.ec2 import ec2_backend
from moto.ec2 import utils as ec2_utils
from ecs_deplojo.connection import Connection
from ecs_deplojo.task_definitions import TaskDefinition
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
@pytest.yield_fixture(scope="function")
def cluster():
with moto.mock_ecs(), moto.mock_ec2():
boto3.setup_default_session(region_name="eu-west-1")
ec2 = boto3.resource("ec2", region_name="eu-west-1")
ecs = boto3.client("ecs", region_name="eu-west-1")
known_amis = list(ec2_backend.describe_images())
test_instance = ec2.create_instances(
ImageId=known_amis[0].id, MinCount=1, MaxCount=1
)[0]
instance_id_document = json.dumps(
ec2_utils.generate_instance_identity_document(test_instance)
)
cluster = ecs.create_cluster(clusterName="default")
ecs.register_container_instance(
cluster="default", instanceIdentityDocument=instance_id_document
)
yield cluster
@pytest.fixture
def connection(cluster):
return Connection()
@pytest.fixture
def definition():
path = os.path.join(BASE_DIR, "files/default_taskdef.json")
with open(path, "r") as json_file:
return TaskDefinition(json.load(json_file))
@pytest.fixture
def default_config():
path = os.path.join(BASE_DIR, "files/default_config.yml")
with open(path, "r") as fh:
yield fh
@pytest.fixture
def example_project(tmpdir):
data = """
{
"family": "default",
"volumes": [],
"containerDefinitions": [
{
"name": "web-1",
"image": "${image}",
"essential": true,
"command": ["hello", "world"],
"memory": 256,
"cpu": 0,
"portMappings": [
{
"containerPort": 8080,
"hostPort": 0
}
]
},
{
"name": "web-2",
"image": "${image}",
"essential": true,
"command": ["hello", "world"],
"memory": 256,
"cpu": 0,
"portMappings": [
{
"containerPort": 8080,
"hostPort": 0
}
]
}
]
}
""".strip()
filename = tmpdir.join("task_definition.json")
filename.write(data)
data = dedent(
"""
---
cluster_name: default
environment:
DATABASE_URL: postgresql://
environment_groups:
group-1: | ENV_CODE: 12345
task_definitions:
web:
template: %(template_filename)s
environment_group: group-1
task_role_arn: my-test
overrides:
web-1:
memory: 512
portMappings:
- hostPort: 0
containerPort: 8080
protocol: tcp
services:
web:
task_definition: web
before_deploy:
- task_definition: web
container: web-1
command: manage.py migrate --noinput
after_deploy:
- task_definition: web
container: web-1
command: manage.py clearsessions
"""
% {"template_filename": filename.strpath}
)
filename = tmpdir.join("config.yml")
filename.write(data)
return filename | random_line_split | |
day_5.rs | pub use tdd_kata::lcd_kata::day_5::Display;
pub use tdd_kata::lcd_kata::day_5::Number::{One, Two, Three, Four, Five, Six, Seven, Eight, Nine, Zero};
pub use tdd_kata::lcd_kata::day_5::Data::{NotANumber, Output};
pub use expectest::prelude::be_equal_to;
describe! lcd_tests {
before_each {
let mut display = Display::new();
}
it "should output nothing on new display " {
expect!(display.output()).to(be_equal_to(Output(vec![])));
}
it "should output nothing with empty input" {
display.input("");
expect!(display.output()).to(be_equal_to(Output(vec![])));
}
it "should output one" {
display.input("1");
expect!(display.output()).to(be_equal_to(Output(vec![One])));
}
| it "should output all numbers" {
display.input("1234567890");
expect!(display.output()).to(be_equal_to(Output(vec![One, Two, Three, Four, Five, Six, Seven, Eight, Nine, Zero])));
}
it "should show error when input is not a number" {
display.input("abc");
expect!(display.output()).to(be_equal_to(NotANumber));
}
} | random_line_split | |
sagas.js | import { takeLatest, call, put } from 'redux-saga/effects';
import { gql } from 'react-apollo';
import { push } from 'react-router-redux';
import jwtDecode from 'jwt-decode';
import { setJwtToken } from '../../utils/auth';
import { bootstrap } from '../../utils/sagas';
import { registerError, registerSuccess } from './actions';
import { REGISTER } from './constants';
import { loginSuccess } from '../Login/actions';
import { client } from '../../graphql';
import { homePage } from '../../local-urls';
const RegisterMutation = gql`
mutation RegisterMutation($nick: String!, $password: String!, $name: String!, $email: String!){
register(nick: $nick, password: $password, name: $name, email: $email)
}
`;
function sendRegister(user) |
function* register({ user }) {
try {
const response = yield call(sendRegister, user);
const token = response.data.register;
const userInfo = jwtDecode(token);
setJwtToken(token);
yield put(registerSuccess());
yield put(loginSuccess(userInfo));
yield put(push(homePage()));
} catch (e) {
yield put(registerError());
}
}
function* registerSaga() {
yield takeLatest(REGISTER, register);
}
export default bootstrap([
registerSaga,
]);
| {
return client.mutate({ mutation: RegisterMutation, variables: user });
} | identifier_body |
sagas.js | import { takeLatest, call, put } from 'redux-saga/effects';
import { gql } from 'react-apollo';
import { push } from 'react-router-redux';
import jwtDecode from 'jwt-decode';
import { setJwtToken } from '../../utils/auth';
import { bootstrap } from '../../utils/sagas';
import { registerError, registerSuccess } from './actions';
import { REGISTER } from './constants';
import { loginSuccess } from '../Login/actions';
import { client } from '../../graphql';
import { homePage } from '../../local-urls';
const RegisterMutation = gql`
mutation RegisterMutation($nick: String!, $password: String!, $name: String!, $email: String!){
register(nick: $nick, password: $password, name: $name, email: $email)
}
`;
function | (user) {
return client.mutate({ mutation: RegisterMutation, variables: user });
}
function* register({ user }) {
try {
const response = yield call(sendRegister, user);
const token = response.data.register;
const userInfo = jwtDecode(token);
setJwtToken(token);
yield put(registerSuccess());
yield put(loginSuccess(userInfo));
yield put(push(homePage()));
} catch (e) {
yield put(registerError());
}
}
function* registerSaga() {
yield takeLatest(REGISTER, register);
}
export default bootstrap([
registerSaga,
]);
| sendRegister | identifier_name |
print_with_newline.rs | // FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
// // run-rustfix
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn | () {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
print!("\n");
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
println!("\nbla\n\n"); // #3126
// Escaping
print!("\\n"); // #3514
print!("\\\n"); // should fail
print!("\\\\n");
// Raw strings
print!(r"\n"); // #3778
// Literal newlines should also fail
print!(
"
"
);
print!(
r"
"
);
// Don't warn on CRLF (#4208)
print!("\r\n");
print!("foo\r\n");
print!("\\r\n"); //~ ERROR
print!("foo\rbar\n") // ~ ERROR
}
| main | identifier_name |
print_with_newline.rs | // FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
// // run-rustfix
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn main() {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
print!("\n");
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
println!("\nbla\n\n"); // #3126
// Escaping
print!("\\n"); // #3514
print!("\\\n"); // should fail
print!("\\\\n");
// Raw strings
print!(r"\n"); // #3778
// Literal newlines should also fail | );
print!(
r"
"
);
// Don't warn on CRLF (#4208)
print!("\r\n");
print!("foo\r\n");
print!("\\r\n"); //~ ERROR
print!("foo\rbar\n") // ~ ERROR
} | print!(
"
" | random_line_split |
print_with_newline.rs | // FIXME: Ideally these suggestions would be fixed via rustfix. Blocked by rust-lang/rust#53934
// // run-rustfix
#![allow(clippy::print_literal)]
#![warn(clippy::print_with_newline)]
fn main() | {
print!("Hello\n");
print!("Hello {}\n", "world");
print!("Hello {} {}\n", "world", "#2");
print!("{}\n", 1265);
print!("\n");
// these are all fine
print!("");
print!("Hello");
println!("Hello");
println!("Hello\n");
println!("Hello {}\n", "world");
print!("Issue\n{}", 1265);
print!("{}", 1265);
print!("\n{}", 1275);
print!("\n\n");
print!("like eof\n\n");
print!("Hello {} {}\n\n", "world", "#2");
println!("\ndon't\nwarn\nfor\nmultiple\nnewlines\n"); // #3126
println!("\nbla\n\n"); // #3126
// Escaping
print!("\\n"); // #3514
print!("\\\n"); // should fail
print!("\\\\n");
// Raw strings
print!(r"\n"); // #3778
// Literal newlines should also fail
print!(
"
"
);
print!(
r"
"
);
// Don't warn on CRLF (#4208)
print!("\r\n");
print!("foo\r\n");
print!("\\r\n"); //~ ERROR
print!("foo\rbar\n") // ~ ERROR
} | identifier_body | |
GW_SCENE_INFORMATION_CHANGED_NTF-test.ts | "use strict";
import { expect } from "chai";
import 'mocha';
import { GW_SCENE_INFORMATION_CHANGED_NTF, SceneChangeType } from "../../src";
describe("KLF200-API", function() {
describe("GW_SCENE_INFORMATION_CHANGED_NTF", function() {
describe("Constructor", function() {
const data = Buffer.from([5, 0x04, 0x19, 0x01, 42]);
it("should create without error", function() {
expect(() => new GW_SCENE_INFORMATION_CHANGED_NTF(data)).not.to.throw();
});
it("should return the scenes ID", function() { | const result = new GW_SCENE_INFORMATION_CHANGED_NTF(data);
expect(result.SceneChangeType).to.equal(SceneChangeType.Modified);
});
});
});
}); | const result = new GW_SCENE_INFORMATION_CHANGED_NTF(data);
expect(result.SceneID).to.equal(42);
});
it("should return the scenes change type", function() { | random_line_split |
app.js | (function () {
var app = angular.module('app', ['firebase', 'ui.router'])
.constant('firebaseUrl', "https://popping-torch-4767.firebaseio.com/");
app.config(function ($stateProvider, $urlRouterProvider, firebaseUrl) {
$urlRouterProvider.otherwise('/home');
$stateProvider
.state('home', {
url: '/home',
templateUrl: 'partial-home.html'
})
.state('authenticated', {
url: '/authenticated',
templateUrl: 'partial-authenticated.html',
resolve: { | var ref = new Firebase(firebaseUrl);
var authObj = $firebaseAuth(ref);
return authObj.$requireAuth();
}]
}
});
});
})(); | // controller will not be loaded until $waitForAuth resolves
// Auth refers to our $firebaseAuth wrapper in the example above
"currentAuth": ["$firebaseAuth", function ($firebaseAuth) {
// $waitForAuth returns a promise so the resolve waits for it to complete | random_line_split |
cmd_interface_02_server_clients.py | #!/usr/bin/env python
# coding: utf-8
#
# StreamBuddy - a video and data streaming serviweng zieleinfahrtce.
# Copyright (c) 2015, Tobias Bleiker & Dumeni Manatschal
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# Source on github:
# https://github.com/tbleiker/StreamBug
#
import multiprocessing
import time
import zmq
from streambug import cmd_interface
from streambug import mplogger
# set up logging
mplogger.setup(debug=True)
log = mplogger.getLogger()
def server_thread(zeromq_context, address, port_pull, port_pub):
server = cmd_interface.Server(zeromq_context, address, port_pull, port_pub)
server.start()
server.join()
def | (name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def test():
return 'test successful'
client.add_command('test', test, 'simple test')
client.start()
# send an update and join client
time.sleep(0.5)
log.info('### Test 1: Send an update.')
client.send_update('This is an update message.')
client.join()
def c1_thread(name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def update_func(msg):
log.info('Got update message: {msg}'.format(msg=msg))
client.set_update_func(update_func)
client.start()
time.sleep(2)
log.info('### Test 2: Get server status.')
client.get_server_status()
time.sleep(0.5)
log.info('### Test 3: Request help.')
client.get_help('F1')
time.sleep(0.5)
log.info('### Test 4: Send command test1.')
ret = client.send_cmd('F1', 'test')
log.info('Got: {ret}'.format(ret=ret))
if __name__ == '__main__':
zeromq_context = zmq.Context()
client_c1 = multiprocessing.Process(name='Client-c1', target=c1_thread,
args=('c1', 'commander',
zeromq_context, '0.0.0.0', 7001,
7000))
client_f1 = multiprocessing.Process(name='Client-f1', target=f1_thread,
args=('f1', 'follower', zeromq_context,
'0.0.0.0', 7001, 7000))
server = multiprocessing.Process(name='Server', target=server_thread,
args=(zeromq_context, '0.0.0.0', 7001,
7000))
server.start()
time.sleep(0.5)
log.info('### Starting clients...')
client_f1.start()
time.sleep(0.1)
client_c1.start()
time.sleep(0.1)
client_c1.join()
client_f1.terminate()
server.terminate()
| f1_thread | identifier_name |
cmd_interface_02_server_clients.py | #!/usr/bin/env python
# coding: utf-8
#
# StreamBuddy - a video and data streaming serviweng zieleinfahrtce.
# Copyright (c) 2015, Tobias Bleiker & Dumeni Manatschal
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# Source on github:
# https://github.com/tbleiker/StreamBug
#
import multiprocessing
import time
import zmq
from streambug import cmd_interface
from streambug import mplogger
# set up logging
mplogger.setup(debug=True)
log = mplogger.getLogger()
def server_thread(zeromq_context, address, port_pull, port_pub):
server = cmd_interface.Server(zeromq_context, address, port_pull, port_pub)
server.start()
server.join()
def f1_thread(name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def test():
return 'test successful'
client.add_command('test', test, 'simple test')
client.start()
# send an update and join client
time.sleep(0.5)
log.info('### Test 1: Send an update.')
client.send_update('This is an update message.')
client.join()
def c1_thread(name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def update_func(msg):
log.info('Got update message: {msg}'.format(msg=msg))
client.set_update_func(update_func)
client.start()
time.sleep(2)
log.info('### Test 2: Get server status.')
client.get_server_status() | log.info('### Test 3: Request help.')
client.get_help('F1')
time.sleep(0.5)
log.info('### Test 4: Send command test1.')
ret = client.send_cmd('F1', 'test')
log.info('Got: {ret}'.format(ret=ret))
if __name__ == '__main__':
zeromq_context = zmq.Context()
client_c1 = multiprocessing.Process(name='Client-c1', target=c1_thread,
args=('c1', 'commander',
zeromq_context, '0.0.0.0', 7001,
7000))
client_f1 = multiprocessing.Process(name='Client-f1', target=f1_thread,
args=('f1', 'follower', zeromq_context,
'0.0.0.0', 7001, 7000))
server = multiprocessing.Process(name='Server', target=server_thread,
args=(zeromq_context, '0.0.0.0', 7001,
7000))
server.start()
time.sleep(0.5)
log.info('### Starting clients...')
client_f1.start()
time.sleep(0.1)
client_c1.start()
time.sleep(0.1)
client_c1.join()
client_f1.terminate()
server.terminate() |
time.sleep(0.5)
| random_line_split |
cmd_interface_02_server_clients.py | #!/usr/bin/env python
# coding: utf-8
#
# StreamBuddy - a video and data streaming serviweng zieleinfahrtce.
# Copyright (c) 2015, Tobias Bleiker & Dumeni Manatschal
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# Source on github:
# https://github.com/tbleiker/StreamBug
#
import multiprocessing
import time
import zmq
from streambug import cmd_interface
from streambug import mplogger
# set up logging
mplogger.setup(debug=True)
log = mplogger.getLogger()
def server_thread(zeromq_context, address, port_pull, port_pub):
server = cmd_interface.Server(zeromq_context, address, port_pull, port_pub)
server.start()
server.join()
def f1_thread(name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def test():
return 'test successful'
client.add_command('test', test, 'simple test')
client.start()
# send an update and join client
time.sleep(0.5)
log.info('### Test 1: Send an update.')
client.send_update('This is an update message.')
client.join()
def c1_thread(name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def update_func(msg):
log.info('Got update message: {msg}'.format(msg=msg))
client.set_update_func(update_func)
client.start()
time.sleep(2)
log.info('### Test 2: Get server status.')
client.get_server_status()
time.sleep(0.5)
log.info('### Test 3: Request help.')
client.get_help('F1')
time.sleep(0.5)
log.info('### Test 4: Send command test1.')
ret = client.send_cmd('F1', 'test')
log.info('Got: {ret}'.format(ret=ret))
if __name__ == '__main__':
| zeromq_context = zmq.Context()
client_c1 = multiprocessing.Process(name='Client-c1', target=c1_thread,
args=('c1', 'commander',
zeromq_context, '0.0.0.0', 7001,
7000))
client_f1 = multiprocessing.Process(name='Client-f1', target=f1_thread,
args=('f1', 'follower', zeromq_context,
'0.0.0.0', 7001, 7000))
server = multiprocessing.Process(name='Server', target=server_thread,
args=(zeromq_context, '0.0.0.0', 7001,
7000))
server.start()
time.sleep(0.5)
log.info('### Starting clients...')
client_f1.start()
time.sleep(0.1)
client_c1.start()
time.sleep(0.1)
client_c1.join()
client_f1.terminate()
server.terminate() | conditional_block | |
cmd_interface_02_server_clients.py | #!/usr/bin/env python
# coding: utf-8
#
# StreamBuddy - a video and data streaming serviweng zieleinfahrtce.
# Copyright (c) 2015, Tobias Bleiker & Dumeni Manatschal
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# Source on github:
# https://github.com/tbleiker/StreamBug
#
import multiprocessing
import time
import zmq
from streambug import cmd_interface
from streambug import mplogger
# set up logging
mplogger.setup(debug=True)
log = mplogger.getLogger()
def server_thread(zeromq_context, address, port_pull, port_pub):
server = cmd_interface.Server(zeromq_context, address, port_pull, port_pub)
server.start()
server.join()
def f1_thread(name, role, zeromq_context, address, port_pub, port_pull):
|
def c1_thread(name, role, zeromq_context, address, port_pub, port_pull):
client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def update_func(msg):
log.info('Got update message: {msg}'.format(msg=msg))
client.set_update_func(update_func)
client.start()
time.sleep(2)
log.info('### Test 2: Get server status.')
client.get_server_status()
time.sleep(0.5)
log.info('### Test 3: Request help.')
client.get_help('F1')
time.sleep(0.5)
log.info('### Test 4: Send command test1.')
ret = client.send_cmd('F1', 'test')
log.info('Got: {ret}'.format(ret=ret))
if __name__ == '__main__':
zeromq_context = zmq.Context()
client_c1 = multiprocessing.Process(name='Client-c1', target=c1_thread,
args=('c1', 'commander',
zeromq_context, '0.0.0.0', 7001,
7000))
client_f1 = multiprocessing.Process(name='Client-f1', target=f1_thread,
args=('f1', 'follower', zeromq_context,
'0.0.0.0', 7001, 7000))
server = multiprocessing.Process(name='Server', target=server_thread,
args=(zeromq_context, '0.0.0.0', 7001,
7000))
server.start()
time.sleep(0.5)
log.info('### Starting clients...')
client_f1.start()
time.sleep(0.1)
client_c1.start()
time.sleep(0.1)
client_c1.join()
client_f1.terminate()
server.terminate()
| client = cmd_interface.Client(name, role, zeromq_context, address,
port_pub, port_pull)
def test():
return 'test successful'
client.add_command('test', test, 'simple test')
client.start()
# send an update and join client
time.sleep(0.5)
log.info('### Test 1: Send an update.')
client.send_update('This is an update message.')
client.join() | identifier_body |
os.js | /*
* These service routines are provided by Brian J. Hartman
* under the following license:
*
* This software is provided "AS IS". No warranty is expressed or implied.
* Permission to copy without fee all or part of this material is granted
* provided that the copies are not made or distributed for direct commercial
* advantage, this copyright notice is included, and notice that copying is
* by permission of the author of this software. To copy otherwise, or to
* republish, requires a fee and/or specific written permission from the author.
* Copyright (c) 1998 - 2003 Brian J. Hartman
* brianhartman@hotmail.com
*/
const symbolTable = {
"TRAP_GETC": 0x0400,
"TRAP_OUT": 0x0430,
"TRAP_PUTS": 0x0450,
"TRAP_IN": 0x04A0,
"TRAP_PUTSP": 0x04E0,
"TRAP_HALT": 0xFD70,
};
const memory = (() => {
let memory = {
//
// Trap vector table (valid entries)
0x0020: 0x0400,
0x0021: 0x0430,
0x0022: 0x0450,
0x0023: 0x04A0,
0x0024: 0x04E0,
0x0025: 0xFD70,
//
// Implementation of GETC
0x0400: 0x3E07,
0x0401: 0xA004,
0x0402: 0x07FE,
0x0403: 0xA003,
0x0404: 0x2E03,
0x0405: 0xC1C0,
0x0406: 0xFE00,
0x0407: 0xFE02,
//
// Implementation of OUT
0x0430: 0x3E0A,
0x0431: 0x3208,
0x0432: 0xA205,
0x0433: 0x07FE,
0x0434: 0xB004,
0x0435: 0x2204,
0x0436: 0x2E04,
0x0437: 0xC1C0,
0x0438: 0xFE04,
0x0439: 0xFE06,
//
// Implementation of PUTS
0x0450: 0x3E16,
0x0451: 0x3012,
0x0452: 0x3212,
0x0453: 0x3412,
0x0454: 0x6200,
0x0455: 0x0405,
0x0456: 0xA409,
0x0457: 0x07FE,
0x0458: 0xB208,
0x0459: 0x1021,
0x045A: 0x0FF9,
0x045B: 0x2008,
0x045C: 0x2208,
0x045D: 0x2408,
0x045E: 0x2E08,
0x045F: 0xC1C0,
0x0460: 0xFE04,
0x0461: 0xFE06,
0x0462: 0xF3FD,
0x0463: 0xF3FE,
//
// Implementation of IN
0x04A0: 0x3E27,
0x04A1: 0x3625,
0x04A2: 0x3423,
0x04A3: 0x3221,
0x04A4: 0x201F,
0x04A5: 0x4813,
0x04A6: 0xE222,
0x04A7: 0x6040,
0x04A8: 0x0403,
0x04A9: 0x480F,
0x04AA: 0x1261,
0x04AB: 0x0FFB,
0x04AC: 0xA616,
0x04AD: 0x07FE,
0x04AE: 0xA013,
0x04AF: 0x1420,
0x04B0: 0x4808,
0x04B1: 0x2012,
0x04B2: 0x4806,
0x04B3: 0x10A0,
0x04B4: 0x2210,
0x04B5: 0x2410,
0x04B6: 0x2610,
0x04B7: 0x2E10,
0x04B8: 0xC1C0,
0x04B9: 0x3E05,
0x04BA: 0xA606,
0x04BB: 0x07FD,
0x04BC: 0xB003,
0x04BD: 0x2E01,
0x04BE: 0xC1C0,
0x04C0: 0xFE06,
0x04C1: 0xFE04,
0x04C2: 0xFE02,
0x04C3: 0xFE00,
//
// Implementation of PUTSP
0x04E0: 0x3E27,
0x04E1: 0x3022,
0x04E2: 0x3222,
0x04E3: 0x3422,
0x04E4: 0x3622,
0x04E5: 0x1220,
0x04E6: 0x6040,
0x04E7: 0x0406,
0x04E8: 0x480D,
0x04E9: 0x2418,
0x04EA: 0x5002,
0x04EB: 0x0402,
0x04EC: 0x1261,
0x04ED: 0x0FF8,
0x04EE: 0x2014,
0x04EF: 0x4806,
0x04F0: 0x2013,
0x04F1: 0x2213,
0x04F2: 0x2413,
0x04F3: 0x2613,
0x04F4: 0x2E13,
0x04F5: 0xC1C0,
0x04F6: 0x3E06,
0x04F7: 0xA607,
0x04F8: 0x0801,
0x04F9: 0x0FFC,
0x04FA: 0xB003,
0x04FB: 0x2E01,
0x04FC: 0xC1C0,
0x04FE: 0xFE06,
0x04FF: 0xFE04,
0x0500: 0xF3FD,
0x0501: 0xF3FE,
0x0502: 0xFF00,
//
// Implementation of HALT
0xFD00: 0x3E3E,
0xFD01: 0x303C,
0xFD02: 0x2007,
0xFD03: 0xF021,
0xFD04: 0xE006,
0xFD05: 0xF022,
0xFD06: 0xF025,
0xFD07: 0x2036,
0xFD08: 0x2E36,
0xFD09: 0xC1C0,
0xFD70: 0x3E0E,
0xFD71: 0x320C,
0xFD72: 0x300A,
0xFD73: 0xE00C,
0xFD74: 0xF022,
0xFD75: 0xA22F,
0xFD76: 0x202F,
0xFD77: 0x5040,
0xFD78: 0xB02C,
0xFD79: 0x2003,
0xFD7A: 0x2203,
0xFD7B: 0x2E03, | //
0xFDA5: 0xFFFE,
0xFDA6: 0x7FFF,
//
// Display status register
0xFE04: 0x8000,
//
// Machine control register
0xFFFE: 0xFFFF,
};
// Fill in the bad trap vectors.
for (let i = 0; i < 0xFF; i++) {
if (memory[i] === undefined) {
memory[i] = 0xFD00;
}
}
// Fill in the halting message.
const haltMessage = "\n----- Halting the processor ----- \n\0";
for (let i = 0; i < haltMessage.length; i++) {
memory[0xFD80 + i] = haltMessage.charCodeAt(i);
}
return memory;
})();
export default {symbolTable, memory}; | 0xFD7C: 0xC1C0,
//
// 0xFD7D, 0xFD7E, and 0xFD7F are callee-saved register locations.
// The "halting the processor" message
// occupies addresses 0xFD80 through 0xFDA4, inclusive. | random_line_split |
os.js | /*
* These service routines are provided by Brian J. Hartman
* under the following license:
*
* This software is provided "AS IS". No warranty is expressed or implied.
* Permission to copy without fee all or part of this material is granted
* provided that the copies are not made or distributed for direct commercial
* advantage, this copyright notice is included, and notice that copying is
* by permission of the author of this software. To copy otherwise, or to
* republish, requires a fee and/or specific written permission from the author.
* Copyright (c) 1998 - 2003 Brian J. Hartman
* brianhartman@hotmail.com
*/
const symbolTable = {
"TRAP_GETC": 0x0400,
"TRAP_OUT": 0x0430,
"TRAP_PUTS": 0x0450,
"TRAP_IN": 0x04A0,
"TRAP_PUTSP": 0x04E0,
"TRAP_HALT": 0xFD70,
};
const memory = (() => {
let memory = {
//
// Trap vector table (valid entries)
0x0020: 0x0400,
0x0021: 0x0430,
0x0022: 0x0450,
0x0023: 0x04A0,
0x0024: 0x04E0,
0x0025: 0xFD70,
//
// Implementation of GETC
0x0400: 0x3E07,
0x0401: 0xA004,
0x0402: 0x07FE,
0x0403: 0xA003,
0x0404: 0x2E03,
0x0405: 0xC1C0,
0x0406: 0xFE00,
0x0407: 0xFE02,
//
// Implementation of OUT
0x0430: 0x3E0A,
0x0431: 0x3208,
0x0432: 0xA205,
0x0433: 0x07FE,
0x0434: 0xB004,
0x0435: 0x2204,
0x0436: 0x2E04,
0x0437: 0xC1C0,
0x0438: 0xFE04,
0x0439: 0xFE06,
//
// Implementation of PUTS
0x0450: 0x3E16,
0x0451: 0x3012,
0x0452: 0x3212,
0x0453: 0x3412,
0x0454: 0x6200,
0x0455: 0x0405,
0x0456: 0xA409,
0x0457: 0x07FE,
0x0458: 0xB208,
0x0459: 0x1021,
0x045A: 0x0FF9,
0x045B: 0x2008,
0x045C: 0x2208,
0x045D: 0x2408,
0x045E: 0x2E08,
0x045F: 0xC1C0,
0x0460: 0xFE04,
0x0461: 0xFE06,
0x0462: 0xF3FD,
0x0463: 0xF3FE,
//
// Implementation of IN
0x04A0: 0x3E27,
0x04A1: 0x3625,
0x04A2: 0x3423,
0x04A3: 0x3221,
0x04A4: 0x201F,
0x04A5: 0x4813,
0x04A6: 0xE222,
0x04A7: 0x6040,
0x04A8: 0x0403,
0x04A9: 0x480F,
0x04AA: 0x1261,
0x04AB: 0x0FFB,
0x04AC: 0xA616,
0x04AD: 0x07FE,
0x04AE: 0xA013,
0x04AF: 0x1420,
0x04B0: 0x4808,
0x04B1: 0x2012,
0x04B2: 0x4806,
0x04B3: 0x10A0,
0x04B4: 0x2210,
0x04B5: 0x2410,
0x04B6: 0x2610,
0x04B7: 0x2E10,
0x04B8: 0xC1C0,
0x04B9: 0x3E05,
0x04BA: 0xA606,
0x04BB: 0x07FD,
0x04BC: 0xB003,
0x04BD: 0x2E01,
0x04BE: 0xC1C0,
0x04C0: 0xFE06,
0x04C1: 0xFE04,
0x04C2: 0xFE02,
0x04C3: 0xFE00,
//
// Implementation of PUTSP
0x04E0: 0x3E27,
0x04E1: 0x3022,
0x04E2: 0x3222,
0x04E3: 0x3422,
0x04E4: 0x3622,
0x04E5: 0x1220,
0x04E6: 0x6040,
0x04E7: 0x0406,
0x04E8: 0x480D,
0x04E9: 0x2418,
0x04EA: 0x5002,
0x04EB: 0x0402,
0x04EC: 0x1261,
0x04ED: 0x0FF8,
0x04EE: 0x2014,
0x04EF: 0x4806,
0x04F0: 0x2013,
0x04F1: 0x2213,
0x04F2: 0x2413,
0x04F3: 0x2613,
0x04F4: 0x2E13,
0x04F5: 0xC1C0,
0x04F6: 0x3E06,
0x04F7: 0xA607,
0x04F8: 0x0801,
0x04F9: 0x0FFC,
0x04FA: 0xB003,
0x04FB: 0x2E01,
0x04FC: 0xC1C0,
0x04FE: 0xFE06,
0x04FF: 0xFE04,
0x0500: 0xF3FD,
0x0501: 0xF3FE,
0x0502: 0xFF00,
//
// Implementation of HALT
0xFD00: 0x3E3E,
0xFD01: 0x303C,
0xFD02: 0x2007,
0xFD03: 0xF021,
0xFD04: 0xE006,
0xFD05: 0xF022,
0xFD06: 0xF025,
0xFD07: 0x2036,
0xFD08: 0x2E36,
0xFD09: 0xC1C0,
0xFD70: 0x3E0E,
0xFD71: 0x320C,
0xFD72: 0x300A,
0xFD73: 0xE00C,
0xFD74: 0xF022,
0xFD75: 0xA22F,
0xFD76: 0x202F,
0xFD77: 0x5040,
0xFD78: 0xB02C,
0xFD79: 0x2003,
0xFD7A: 0x2203,
0xFD7B: 0x2E03,
0xFD7C: 0xC1C0,
//
// 0xFD7D, 0xFD7E, and 0xFD7F are callee-saved register locations.
// The "halting the processor" message
// occupies addresses 0xFD80 through 0xFDA4, inclusive.
//
0xFDA5: 0xFFFE,
0xFDA6: 0x7FFF,
//
// Display status register
0xFE04: 0x8000,
//
// Machine control register
0xFFFE: 0xFFFF,
};
// Fill in the bad trap vectors.
for (let i = 0; i < 0xFF; i++) |
// Fill in the halting message.
const haltMessage = "\n----- Halting the processor ----- \n\0";
for (let i = 0; i < haltMessage.length; i++) {
memory[0xFD80 + i] = haltMessage.charCodeAt(i);
}
return memory;
})();
export default {symbolTable, memory};
| {
if (memory[i] === undefined) {
memory[i] = 0xFD00;
}
} | conditional_block |
urls.py | from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_swagger.views import get_swagger_view
from . import views, views_api
# Django REST framework
router = DefaultRouter()
router.register(r'election', views_api.ElectionInterface)
router.register(r'district', views_api.DistrictInterface)
router.register(r'municipality', views_api.MunicipalityInterface)
router.register(r'party', views_api.PartyInterface)
router.register(r'polling_station', views_api.PollingStationInterface)
router.register(r'list', views_api.ListInterface)
router.register(r'result', views_api.PollingStationResultInterface)
router.register(r'regional_electoral_district', views_api.RegionalElectoralDistrictInterface)
# Django OpenAPI Swagger
schema_view = get_swagger_view(title='Offene Wahlen API')
| url(r'^api/docs$', schema_view)
] | urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^loaderio-eac9628bcae9be5601e1f3c62594d162.txt$', views.load_test, name='load_test'),
url(r'^api/', include(router.urls)), | random_line_split |
GroupInfo.tsx | // TODO: Chance the databse schema to include: creator, current members, lat, long, and radius | import * as React from "react";
import {
BackHandler,
DeviceEventEmitter,
Dimensions,
Platform,
StatusBar,
StyleSheet,
Text,
TextInput,
TouchableOpacity,
View,
ScrollView
} from "react-native";
import {SocialIcon, Button} from "react-native-elements";
var MapView = require('react-native-maps');
import Icon from 'react-native-vector-icons/FontAwesome';
const {width} = Dimensions.get("window");
import * as firebase from "firebase";
import {GooglePlacesAutocomplete} from "react-native-google-places-autocomplete";
interface IProps {
navigation: any;
}
interface IState {
desc: string;
name: string;
region: any
}
const rootRef = firebase.database().ref();
const crowdsRef = rootRef.child("crowds");
class GroupInfo extends React.Component<IProps, IState> {
constructor(props: any) {
super(props);
this.state = {
desc: '',
name: '',
region: {
latitude: 42.405804,
longitude: -71.11956,
latitudeDelta: 0.02,
longitudeDelta: 0.01,
}
};
}
componentDidMount() {
this.getGroupInfo();
}
map: any;
static navigationOptions = ({navigation}) => {
return {
title: 'Crowd Information',
headerTintColor: "#FFFFFF",
gesturesEnabled: false,
headerStyle: {
backgroundColor: "#003EFF",
marginTop: (Platform.OS === 'ios') ? -20 : 0,
},
};
};
public getGroupInfo = () => {
crowdsRef.child(this.props.navigation.state.params.groupID).once("value", (snapshot) => {
let returnObj = snapshot.val();
let region = {
latitude: returnObj.lat,
longitude: returnObj.lng,
latitudeDelta: 0.02,
longitudeDelta: 0.01,
};
this.setState({
region: region,
name: returnObj.name,
desc: returnObj.desc
});
this.map.animateToRegion(region, 2);
})
};
public render() {
return (
<View style={styles.container}>
<StatusBar hidden={true}/>
<View style={styles.mapView}>
<MapView
style={{
height: Dimensions.get('window').height,
width: Dimensions.get('window').width,
margin: 0
}}
initialRegion={this.state.region}
showsUserLocation={true}
ref={ref => {
this.map = ref;
}}
>
{this.state.name !== "" && <MapView.Marker
coordinate={this.state.region}
title={this.state.name}
description={this.state.desc}
/>}
</MapView>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'white'
},
image: {
flex: 1,
},
slide: {
backgroundColor: "transparent",
flex: 1,
justifyContent: "center",
},
submitView: {
bottom: 0,
position: "absolute",
},
text: {
color: "#fff",
fontSize: 30,
fontWeight: "bold",
},
textView: {
marginLeft: 40,
marginRight: 40,
},
wrapper: {},
iconText: {
flex: 1,
flexDirection: "row",
marginLeft: 20
},
mapView: {
flex: 1,
}
});
export default GroupInfo; | //
//
//
// | random_line_split |
GroupInfo.tsx | // TODO: Chance the databse schema to include: creator, current members, lat, long, and radius
//
//
//
//
import * as React from "react";
import {
BackHandler,
DeviceEventEmitter,
Dimensions,
Platform,
StatusBar,
StyleSheet,
Text,
TextInput,
TouchableOpacity,
View,
ScrollView
} from "react-native";
import {SocialIcon, Button} from "react-native-elements";
var MapView = require('react-native-maps');
import Icon from 'react-native-vector-icons/FontAwesome';
const {width} = Dimensions.get("window");
import * as firebase from "firebase";
import {GooglePlacesAutocomplete} from "react-native-google-places-autocomplete";
interface IProps {
navigation: any;
}
interface IState {
desc: string;
name: string;
region: any
}
const rootRef = firebase.database().ref();
const crowdsRef = rootRef.child("crowds");
class GroupInfo extends React.Component<IProps, IState> {
constructor(props: any) |
componentDidMount() {
this.getGroupInfo();
}
map: any;
static navigationOptions = ({navigation}) => {
return {
title: 'Crowd Information',
headerTintColor: "#FFFFFF",
gesturesEnabled: false,
headerStyle: {
backgroundColor: "#003EFF",
marginTop: (Platform.OS === 'ios') ? -20 : 0,
},
};
};
public getGroupInfo = () => {
crowdsRef.child(this.props.navigation.state.params.groupID).once("value", (snapshot) => {
let returnObj = snapshot.val();
let region = {
latitude: returnObj.lat,
longitude: returnObj.lng,
latitudeDelta: 0.02,
longitudeDelta: 0.01,
};
this.setState({
region: region,
name: returnObj.name,
desc: returnObj.desc
});
this.map.animateToRegion(region, 2);
})
};
public render() {
return (
<View style={styles.container}>
<StatusBar hidden={true}/>
<View style={styles.mapView}>
<MapView
style={{
height: Dimensions.get('window').height,
width: Dimensions.get('window').width,
margin: 0
}}
initialRegion={this.state.region}
showsUserLocation={true}
ref={ref => {
this.map = ref;
}}
>
{this.state.name !== "" && <MapView.Marker
coordinate={this.state.region}
title={this.state.name}
description={this.state.desc}
/>}
</MapView>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'white'
},
image: {
flex: 1,
},
slide: {
backgroundColor: "transparent",
flex: 1,
justifyContent: "center",
},
submitView: {
bottom: 0,
position: "absolute",
},
text: {
color: "#fff",
fontSize: 30,
fontWeight: "bold",
},
textView: {
marginLeft: 40,
marginRight: 40,
},
wrapper: {},
iconText: {
flex: 1,
flexDirection: "row",
marginLeft: 20
},
mapView: {
flex: 1,
}
});
export default GroupInfo;
| {
super(props);
this.state = {
desc: '',
name: '',
region: {
latitude: 42.405804,
longitude: -71.11956,
latitudeDelta: 0.02,
longitudeDelta: 0.01,
}
};
} | identifier_body |
GroupInfo.tsx | // TODO: Chance the databse schema to include: creator, current members, lat, long, and radius
//
//
//
//
import * as React from "react";
import {
BackHandler,
DeviceEventEmitter,
Dimensions,
Platform,
StatusBar,
StyleSheet,
Text,
TextInput,
TouchableOpacity,
View,
ScrollView
} from "react-native";
import {SocialIcon, Button} from "react-native-elements";
var MapView = require('react-native-maps');
import Icon from 'react-native-vector-icons/FontAwesome';
const {width} = Dimensions.get("window");
import * as firebase from "firebase";
import {GooglePlacesAutocomplete} from "react-native-google-places-autocomplete";
interface IProps {
navigation: any;
}
interface IState {
desc: string;
name: string;
region: any
}
const rootRef = firebase.database().ref();
const crowdsRef = rootRef.child("crowds");
class GroupInfo extends React.Component<IProps, IState> {
constructor(props: any) {
super(props);
this.state = {
desc: '',
name: '',
region: {
latitude: 42.405804,
longitude: -71.11956,
latitudeDelta: 0.02,
longitudeDelta: 0.01,
}
};
}
| () {
this.getGroupInfo();
}
map: any;
static navigationOptions = ({navigation}) => {
return {
title: 'Crowd Information',
headerTintColor: "#FFFFFF",
gesturesEnabled: false,
headerStyle: {
backgroundColor: "#003EFF",
marginTop: (Platform.OS === 'ios') ? -20 : 0,
},
};
};
public getGroupInfo = () => {
crowdsRef.child(this.props.navigation.state.params.groupID).once("value", (snapshot) => {
let returnObj = snapshot.val();
let region = {
latitude: returnObj.lat,
longitude: returnObj.lng,
latitudeDelta: 0.02,
longitudeDelta: 0.01,
};
this.setState({
region: region,
name: returnObj.name,
desc: returnObj.desc
});
this.map.animateToRegion(region, 2);
})
};
public render() {
return (
<View style={styles.container}>
<StatusBar hidden={true}/>
<View style={styles.mapView}>
<MapView
style={{
height: Dimensions.get('window').height,
width: Dimensions.get('window').width,
margin: 0
}}
initialRegion={this.state.region}
showsUserLocation={true}
ref={ref => {
this.map = ref;
}}
>
{this.state.name !== "" && <MapView.Marker
coordinate={this.state.region}
title={this.state.name}
description={this.state.desc}
/>}
</MapView>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'white'
},
image: {
flex: 1,
},
slide: {
backgroundColor: "transparent",
flex: 1,
justifyContent: "center",
},
submitView: {
bottom: 0,
position: "absolute",
},
text: {
color: "#fff",
fontSize: 30,
fontWeight: "bold",
},
textView: {
marginLeft: 40,
marginRight: 40,
},
wrapper: {},
iconText: {
flex: 1,
flexDirection: "row",
marginLeft: 20
},
mapView: {
flex: 1,
}
});
export default GroupInfo;
| componentDidMount | identifier_name |
test_settings.py | from django.test import override_settings, SimpleTestCase
from arcutils.settings import NO_DEFAULT, PrefixedSettings, get_setting
@override_settings(ARC={
'a': 'a',
'b': [0, 1],
'c': [{'c': 'c'}],
'd': 'd',
})
class TestGetSettings(SimpleTestCase):
def get_setting(self, key, default=NO_DEFAULT):
return get_setting(key, default=default)
def test_can_traverse_into_dict(self):
self.assertEqual(self.get_setting('ARC.a'), 'a')
def test_can_traverse_into_dict_then_list(self):
self.assertEqual(self.get_setting('ARC.b.0'), 0)
def test_can_traverse_into_list_then_dict(self):
self.assertEqual(self.get_setting('ARC.c.0.c'), 'c')
def test_returns_default_for_non_existent_root(self):
default = object()
self.assertIs(self.get_setting('NOPE', default), default)
def test_returns_default_for_non_existent_nested_setting(self):
default = object()
self.assertIs(self.get_setting('ARC.nope', default), default)
def test_raises_when_not_found_and_no_default(self):
self.assertRaises(KeyError, self.get_setting, 'NOPE')
def test_can_traverse_into_string_setting(self):
self.assertEqual(self.get_setting('ARC.d.0'), 'd')
def test_bad_index_causes_type_error(self):
self.assertRaises(TypeError, self.get_setting, 'ARC.b.nope')
@override_settings(CAS={
'extra': 'extra',
'overridden': 'overridden',
})
class TestGetPrefixedSettings(SimpleTestCase): | super().setUp()
defaults = {
'base_url': 'http://example.com/cas/',
'parent': {
'child': 'child',
},
'overridden': 'default',
}
self.settings = PrefixedSettings('CAS', defaults)
def test_get_from_defaults(self):
self.assertEqual(self.settings.get('base_url'), 'http://example.com/cas/')
def test_get_nested_from_defaults(self):
self.assertEqual(self.settings.get('parent.child'), 'child')
def test_get_from_project_settings(self):
self.assertEqual(self.settings.get('extra'), 'extra')
def test_get_setting_overridden_in_project_settings(self):
self.assertEqual(self.settings.get('overridden'), 'overridden')
def test_defaults_trump_passed_default(self):
self.assertEqual(
self.settings.get('base_url', 'http://example.com/other/'),
'http://example.com/cas/')
def test_passed_default_does_not_trump_project_setting(self):
self.assertEqual(self.settings.get('extra', 'default'), 'extra')
def test_get_default_for_nonexistent(self):
self.assertEqual(self.settings.get('pants', 'jeans'), 'jeans') |
def setUp(self): | random_line_split |
test_settings.py | from django.test import override_settings, SimpleTestCase
from arcutils.settings import NO_DEFAULT, PrefixedSettings, get_setting
@override_settings(ARC={
'a': 'a',
'b': [0, 1],
'c': [{'c': 'c'}],
'd': 'd',
})
class TestGetSettings(SimpleTestCase):
def get_setting(self, key, default=NO_DEFAULT):
return get_setting(key, default=default)
def test_can_traverse_into_dict(self):
self.assertEqual(self.get_setting('ARC.a'), 'a')
def test_can_traverse_into_dict_then_list(self):
self.assertEqual(self.get_setting('ARC.b.0'), 0)
def test_can_traverse_into_list_then_dict(self):
self.assertEqual(self.get_setting('ARC.c.0.c'), 'c')
def test_returns_default_for_non_existent_root(self):
default = object()
self.assertIs(self.get_setting('NOPE', default), default)
def test_returns_default_for_non_existent_nested_setting(self):
|
def test_raises_when_not_found_and_no_default(self):
self.assertRaises(KeyError, self.get_setting, 'NOPE')
def test_can_traverse_into_string_setting(self):
self.assertEqual(self.get_setting('ARC.d.0'), 'd')
def test_bad_index_causes_type_error(self):
self.assertRaises(TypeError, self.get_setting, 'ARC.b.nope')
@override_settings(CAS={
'extra': 'extra',
'overridden': 'overridden',
})
class TestGetPrefixedSettings(SimpleTestCase):
def setUp(self):
super().setUp()
defaults = {
'base_url': 'http://example.com/cas/',
'parent': {
'child': 'child',
},
'overridden': 'default',
}
self.settings = PrefixedSettings('CAS', defaults)
def test_get_from_defaults(self):
self.assertEqual(self.settings.get('base_url'), 'http://example.com/cas/')
def test_get_nested_from_defaults(self):
self.assertEqual(self.settings.get('parent.child'), 'child')
def test_get_from_project_settings(self):
self.assertEqual(self.settings.get('extra'), 'extra')
def test_get_setting_overridden_in_project_settings(self):
self.assertEqual(self.settings.get('overridden'), 'overridden')
def test_defaults_trump_passed_default(self):
self.assertEqual(
self.settings.get('base_url', 'http://example.com/other/'),
'http://example.com/cas/')
def test_passed_default_does_not_trump_project_setting(self):
self.assertEqual(self.settings.get('extra', 'default'), 'extra')
def test_get_default_for_nonexistent(self):
self.assertEqual(self.settings.get('pants', 'jeans'), 'jeans')
| default = object()
self.assertIs(self.get_setting('ARC.nope', default), default) | identifier_body |
test_settings.py | from django.test import override_settings, SimpleTestCase
from arcutils.settings import NO_DEFAULT, PrefixedSettings, get_setting
@override_settings(ARC={
'a': 'a',
'b': [0, 1],
'c': [{'c': 'c'}],
'd': 'd',
})
class | (SimpleTestCase):
def get_setting(self, key, default=NO_DEFAULT):
return get_setting(key, default=default)
def test_can_traverse_into_dict(self):
self.assertEqual(self.get_setting('ARC.a'), 'a')
def test_can_traverse_into_dict_then_list(self):
self.assertEqual(self.get_setting('ARC.b.0'), 0)
def test_can_traverse_into_list_then_dict(self):
self.assertEqual(self.get_setting('ARC.c.0.c'), 'c')
def test_returns_default_for_non_existent_root(self):
default = object()
self.assertIs(self.get_setting('NOPE', default), default)
def test_returns_default_for_non_existent_nested_setting(self):
default = object()
self.assertIs(self.get_setting('ARC.nope', default), default)
def test_raises_when_not_found_and_no_default(self):
self.assertRaises(KeyError, self.get_setting, 'NOPE')
def test_can_traverse_into_string_setting(self):
self.assertEqual(self.get_setting('ARC.d.0'), 'd')
def test_bad_index_causes_type_error(self):
self.assertRaises(TypeError, self.get_setting, 'ARC.b.nope')
@override_settings(CAS={
'extra': 'extra',
'overridden': 'overridden',
})
class TestGetPrefixedSettings(SimpleTestCase):
def setUp(self):
super().setUp()
defaults = {
'base_url': 'http://example.com/cas/',
'parent': {
'child': 'child',
},
'overridden': 'default',
}
self.settings = PrefixedSettings('CAS', defaults)
def test_get_from_defaults(self):
self.assertEqual(self.settings.get('base_url'), 'http://example.com/cas/')
def test_get_nested_from_defaults(self):
self.assertEqual(self.settings.get('parent.child'), 'child')
def test_get_from_project_settings(self):
self.assertEqual(self.settings.get('extra'), 'extra')
def test_get_setting_overridden_in_project_settings(self):
self.assertEqual(self.settings.get('overridden'), 'overridden')
def test_defaults_trump_passed_default(self):
self.assertEqual(
self.settings.get('base_url', 'http://example.com/other/'),
'http://example.com/cas/')
def test_passed_default_does_not_trump_project_setting(self):
self.assertEqual(self.settings.get('extra', 'default'), 'extra')
def test_get_default_for_nonexistent(self):
self.assertEqual(self.settings.get('pants', 'jeans'), 'jeans')
| TestGetSettings | identifier_name |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn new<S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> |
}
| {
let mut resp = Response::default();
resp.set_is_cache_hit(true);
if let Some(v) = self.data_version {
resp.set_cache_last_version(v);
}
Ok(resp)
} | identifier_body |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn new<S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> {
let mut resp = Response::default(); | resp.set_cache_last_version(v);
}
Ok(resp)
}
} | resp.set_is_cache_hit(true);
if let Some(v) = self.data_version { | random_line_split |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn | <S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> {
let mut resp = Response::default();
resp.set_is_cache_hit(true);
if let Some(v) = self.data_version {
resp.set_cache_last_version(v);
}
Ok(resp)
}
}
| new | identifier_name |
cache.rs | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use async_trait::async_trait;
use kvproto::coprocessor::Response;
use crate::coprocessor::RequestHandler;
use crate::coprocessor::*;
use crate::storage::Snapshot;
pub struct CachedRequestHandler {
data_version: Option<u64>,
}
impl CachedRequestHandler {
pub fn new<S: Snapshot>(snap: S) -> Self {
Self {
data_version: snap.get_data_version(),
}
}
pub fn builder<S: Snapshot>() -> RequestHandlerBuilder<S> {
Box::new(|snap, _req_ctx: &ReqContext| Ok(CachedRequestHandler::new(snap).into_boxed()))
}
}
#[async_trait]
impl RequestHandler for CachedRequestHandler {
async fn handle_request(&mut self) -> Result<Response> {
let mut resp = Response::default();
resp.set_is_cache_hit(true);
if let Some(v) = self.data_version |
Ok(resp)
}
}
| {
resp.set_cache_last_version(v);
} | conditional_block |
igcformat.js | goog.provide('ol.format.IGC');
goog.provide('ol.format.IGCZ');
goog.require('goog.asserts');
goog.require('goog.string');
goog.require('goog.string.newlines');
goog.require('ol.Feature');
goog.require('ol.format.Feature');
goog.require('ol.format.TextFeature');
goog.require('ol.geom.LineString');
goog.require('ol.proj');
/**
* IGC altitude/z. One of 'barometric', 'gps', 'none'.
* @enum {string}
* @api
*/
ol.format.IGCZ = {
BAROMETRIC: 'barometric',
GPS: 'gps',
NONE: 'none'
};
/**
* @classdesc
* Feature format for `*.igc` flight recording files. | * @api
*/
ol.format.IGC = function(opt_options) {
var options = goog.isDef(opt_options) ? opt_options : {};
goog.base(this);
/**
* @inheritDoc
*/
this.defaultDataProjection = ol.proj.get('EPSG:4326');
/**
* @private
* @type {ol.format.IGCZ}
*/
this.altitudeMode_ = goog.isDef(options.altitudeMode) ?
options.altitudeMode : ol.format.IGCZ.NONE;
};
goog.inherits(ol.format.IGC, ol.format.TextFeature);
/**
* @const
* @type {Array.<string>}
* @private
*/
ol.format.IGC.EXTENSIONS_ = ['.igc'];
/**
* @const
* @type {RegExp}
* @private
*/
ol.format.IGC.B_RECORD_RE_ =
/^B(\d{2})(\d{2})(\d{2})(\d{2})(\d{5})([NS])(\d{3})(\d{5})([EW])([AV])(\d{5})(\d{5})/;
/**
* @const
* @type {RegExp}
* @private
*/
ol.format.IGC.H_RECORD_RE_ = /^H.([A-Z]{3}).*?:(.*)/;
/**
* @const
* @type {RegExp}
* @private
*/
ol.format.IGC.HFDTE_RECORD_RE_ = /^HFDTE(\d{2})(\d{2})(\d{2})/;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.getExtensions = function() {
return ol.format.IGC.EXTENSIONS_;
};
/**
* Read the feature from the IGC source.
*
* @function
* @param {ArrayBuffer|Document|Node|Object|string} source Source.
* @param {olx.format.ReadOptions=} opt_options Read options.
* @return {ol.Feature} Feature.
* @api
*/
ol.format.IGC.prototype.readFeature;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.readFeatureFromText = function(text, opt_options) {
var altitudeMode = this.altitudeMode_;
var lines = goog.string.newlines.splitLines(text);
/** @type {Object.<string, string>} */
var properties = {};
var flatCoordinates = [];
var year = 2000;
var month = 0;
var day = 1;
var i, ii;
for (i = 0, ii = lines.length; i < ii; ++i) {
var line = lines[i];
var m;
if (line.charAt(0) == 'B') {
m = ol.format.IGC.B_RECORD_RE_.exec(line);
if (m) {
var hour = parseInt(m[1], 10);
var minute = parseInt(m[2], 10);
var second = parseInt(m[3], 10);
var y = parseInt(m[4], 10) + parseInt(m[5], 10) / 60000;
if (m[6] == 'S') {
y = -y;
}
var x = parseInt(m[7], 10) + parseInt(m[8], 10) / 60000;
if (m[9] == 'W') {
x = -x;
}
flatCoordinates.push(x, y);
if (altitudeMode != ol.format.IGCZ.NONE) {
var z;
if (altitudeMode == ol.format.IGCZ.GPS) {
z = parseInt(m[11], 10);
} else if (altitudeMode == ol.format.IGCZ.BAROMETRIC) {
z = parseInt(m[12], 10);
} else {
goog.asserts.fail();
z = 0;
}
flatCoordinates.push(z);
}
var dateTime = Date.UTC(year, month, day, hour, minute, second);
flatCoordinates.push(dateTime / 1000);
}
} else if (line.charAt(0) == 'H') {
m = ol.format.IGC.HFDTE_RECORD_RE_.exec(line);
if (m) {
day = parseInt(m[1], 10);
month = parseInt(m[2], 10) - 1;
year = 2000 + parseInt(m[3], 10);
} else {
m = ol.format.IGC.H_RECORD_RE_.exec(line);
if (m) {
properties[m[1]] = goog.string.trim(m[2]);
m = ol.format.IGC.HFDTE_RECORD_RE_.exec(line);
}
}
}
}
if (flatCoordinates.length === 0) {
return null;
}
var lineString = new ol.geom.LineString(null);
var layout = altitudeMode == ol.format.IGCZ.NONE ?
ol.geom.GeometryLayout.XYM : ol.geom.GeometryLayout.XYZM;
lineString.setFlatCoordinates(layout, flatCoordinates);
var feature = new ol.Feature(ol.format.Feature.transformWithOptions(
lineString, false, opt_options));
feature.setProperties(properties);
return feature;
};
/**
* Read the feature from the source. As IGC sources contain a single
* feature, this will return the feature in an array.
*
* @function
* @param {ArrayBuffer|Document|Node|Object|string} source Source.
* @param {olx.format.ReadOptions=} opt_options Read options.
* @return {Array.<ol.Feature>} Features.
* @api
*/
ol.format.IGC.prototype.readFeatures;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.readFeaturesFromText = function(text, opt_options) {
var feature = this.readFeatureFromText(text, opt_options);
if (!goog.isNull(feature)) {
return [feature];
} else {
return [];
}
};
/**
* Read the projection from the IGC source.
*
* @function
* @param {ArrayBuffer|Document|Node|Object|string} source Source.
* @return {ol.proj.Projection} Projection.
* @api
*/
ol.format.IGC.prototype.readProjection;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.readProjectionFromText = function(text) {
return this.defaultDataProjection;
}; | *
* @constructor
* @extends {ol.format.TextFeature}
* @param {olx.format.IGCOptions=} opt_options Options. | random_line_split |
igcformat.js | goog.provide('ol.format.IGC');
goog.provide('ol.format.IGCZ');
goog.require('goog.asserts');
goog.require('goog.string');
goog.require('goog.string.newlines');
goog.require('ol.Feature');
goog.require('ol.format.Feature');
goog.require('ol.format.TextFeature');
goog.require('ol.geom.LineString');
goog.require('ol.proj');
/**
* IGC altitude/z. One of 'barometric', 'gps', 'none'.
* @enum {string}
* @api
*/
ol.format.IGCZ = {
BAROMETRIC: 'barometric',
GPS: 'gps',
NONE: 'none'
};
/**
* @classdesc
* Feature format for `*.igc` flight recording files.
*
* @constructor
* @extends {ol.format.TextFeature}
* @param {olx.format.IGCOptions=} opt_options Options.
* @api
*/
ol.format.IGC = function(opt_options) {
var options = goog.isDef(opt_options) ? opt_options : {};
goog.base(this);
/**
* @inheritDoc
*/
this.defaultDataProjection = ol.proj.get('EPSG:4326');
/**
* @private
* @type {ol.format.IGCZ}
*/
this.altitudeMode_ = goog.isDef(options.altitudeMode) ?
options.altitudeMode : ol.format.IGCZ.NONE;
};
goog.inherits(ol.format.IGC, ol.format.TextFeature);
/**
* @const
* @type {Array.<string>}
* @private
*/
ol.format.IGC.EXTENSIONS_ = ['.igc'];
/**
* @const
* @type {RegExp}
* @private
*/
ol.format.IGC.B_RECORD_RE_ =
/^B(\d{2})(\d{2})(\d{2})(\d{2})(\d{5})([NS])(\d{3})(\d{5})([EW])([AV])(\d{5})(\d{5})/;
/**
* @const
* @type {RegExp}
* @private
*/
ol.format.IGC.H_RECORD_RE_ = /^H.([A-Z]{3}).*?:(.*)/;
/**
* @const
* @type {RegExp}
* @private
*/
ol.format.IGC.HFDTE_RECORD_RE_ = /^HFDTE(\d{2})(\d{2})(\d{2})/;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.getExtensions = function() {
return ol.format.IGC.EXTENSIONS_;
};
/**
* Read the feature from the IGC source.
*
* @function
* @param {ArrayBuffer|Document|Node|Object|string} source Source.
* @param {olx.format.ReadOptions=} opt_options Read options.
* @return {ol.Feature} Feature.
* @api
*/
ol.format.IGC.prototype.readFeature;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.readFeatureFromText = function(text, opt_options) {
var altitudeMode = this.altitudeMode_;
var lines = goog.string.newlines.splitLines(text);
/** @type {Object.<string, string>} */
var properties = {};
var flatCoordinates = [];
var year = 2000;
var month = 0;
var day = 1;
var i, ii;
for (i = 0, ii = lines.length; i < ii; ++i) {
var line = lines[i];
var m;
if (line.charAt(0) == 'B') {
m = ol.format.IGC.B_RECORD_RE_.exec(line);
if (m) {
var hour = parseInt(m[1], 10);
var minute = parseInt(m[2], 10);
var second = parseInt(m[3], 10);
var y = parseInt(m[4], 10) + parseInt(m[5], 10) / 60000;
if (m[6] == 'S') {
y = -y;
}
var x = parseInt(m[7], 10) + parseInt(m[8], 10) / 60000;
if (m[9] == 'W') {
x = -x;
}
flatCoordinates.push(x, y);
if (altitudeMode != ol.format.IGCZ.NONE) {
var z;
if (altitudeMode == ol.format.IGCZ.GPS) {
z = parseInt(m[11], 10);
} else if (altitudeMode == ol.format.IGCZ.BAROMETRIC) | else {
goog.asserts.fail();
z = 0;
}
flatCoordinates.push(z);
}
var dateTime = Date.UTC(year, month, day, hour, minute, second);
flatCoordinates.push(dateTime / 1000);
}
} else if (line.charAt(0) == 'H') {
m = ol.format.IGC.HFDTE_RECORD_RE_.exec(line);
if (m) {
day = parseInt(m[1], 10);
month = parseInt(m[2], 10) - 1;
year = 2000 + parseInt(m[3], 10);
} else {
m = ol.format.IGC.H_RECORD_RE_.exec(line);
if (m) {
properties[m[1]] = goog.string.trim(m[2]);
m = ol.format.IGC.HFDTE_RECORD_RE_.exec(line);
}
}
}
}
if (flatCoordinates.length === 0) {
return null;
}
var lineString = new ol.geom.LineString(null);
var layout = altitudeMode == ol.format.IGCZ.NONE ?
ol.geom.GeometryLayout.XYM : ol.geom.GeometryLayout.XYZM;
lineString.setFlatCoordinates(layout, flatCoordinates);
var feature = new ol.Feature(ol.format.Feature.transformWithOptions(
lineString, false, opt_options));
feature.setProperties(properties);
return feature;
};
/**
* Read the feature from the source. As IGC sources contain a single
* feature, this will return the feature in an array.
*
* @function
* @param {ArrayBuffer|Document|Node|Object|string} source Source.
* @param {olx.format.ReadOptions=} opt_options Read options.
* @return {Array.<ol.Feature>} Features.
* @api
*/
ol.format.IGC.prototype.readFeatures;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.readFeaturesFromText = function(text, opt_options) {
var feature = this.readFeatureFromText(text, opt_options);
if (!goog.isNull(feature)) {
return [feature];
} else {
return [];
}
};
/**
* Read the projection from the IGC source.
*
* @function
* @param {ArrayBuffer|Document|Node|Object|string} source Source.
* @return {ol.proj.Projection} Projection.
* @api
*/
ol.format.IGC.prototype.readProjection;
/**
* @inheritDoc
*/
ol.format.IGC.prototype.readProjectionFromText = function(text) {
return this.defaultDataProjection;
};
| {
z = parseInt(m[12], 10);
} | conditional_block |
language.ts | <message>
<source>English</source>
<translation>Angļu</translation>
</message>
<message>
<source>default</source>
<translation>noklusētā</translation>
</message>
<message>
<source>Language</source>
<translation>Valoda</translation>
</message>
<message>
<source><qt>Attention, all windows will be closed by changing the language
without saving the Data.<br><br>Go on?</qt></source>
<translation><qt>Uzmanību, mainot valodu visi logi tiks aizvērti
nesaglabājot datus.<br><br>Turpināt?</qt></translation>
</message>
</context>
<context>
<name>LanguageSettingsBase</name>
<message>
<source>Language Settings</source>
<translation>Valodas Uzstādījumi</translation>
</message>
<message>
<source>Select language</source>
<translation>Izvēlaties valodu</translation>
</message>
</context>
</TS> | <!DOCTYPE TS><TS>
<context>
<name>LanguageSettings</name> | random_line_split | |
error.rs | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::config::ValidationError;
use prometheus::Error as MetricsError;
#[cfg(doc)]
use crate::filters::{Filter, FilterFactory};
/// An error that occurred when attempting to create a [`Filter`] from
/// a [`FilterFactory`].
#[derive(Debug, PartialEq, thiserror::Error)]
pub enum Error {
#[error("filter `{}` not found", .0)]
NotFound(String),
#[error("filter `{}` requires configuration, but none provided", .0)]
MissingConfig(&'static str),
#[error("field `{}` is invalid, reason: {}", field, reason)]
FieldInvalid { field: String, reason: String },
#[error("Deserialization failed: {}", .0)]
DeserializeFailed(String),
#[error("Failed to initialize metrics: {}", .0)]
InitializeMetricsFailed(String),
#[error("Protobuf error: {}", .0)]
ConvertProtoConfig(ConvertProtoConfigError),
}
impl From<Error> for ValidationError {
fn from(error: Error) -> Self {
Self::FilterInvalid(error)
}
}
impl From<MetricsError> for Error {
fn from(error: MetricsError) -> Self |
}
/// An error representing failure to convert a filter's protobuf configuration
/// to its static representation.
#[derive(Debug, PartialEq, thiserror::Error)]
#[error(
"{}failed to convert protobuf config: {}",
self.field.as_ref().map(|f| format!("Field `{f}`")).unwrap_or_default(),
reason
)]
pub struct ConvertProtoConfigError {
/// Reason for the failure.
reason: String,
/// Set if the failure is specific to a single field in the config.
field: Option<String>,
}
impl ConvertProtoConfigError {
pub fn new(reason: impl std::fmt::Display, field: Option<String>) -> Self {
Self {
reason: reason.to_string(),
field,
}
}
}
/// Returns a [`ConvertProtoConfigError`] with an error message when
/// an invalid proto enum value was provided in a filter's proto config.
#[macro_export]
macro_rules! enum_no_match_error {
(
field = $field:literal,
invalid_value = $invalid_value:ident,
enum_type = $enum_type:ty,
allowed_values = [ $( $allowed_value:tt ),+ ]
) => {
Err($crate::filters::error::ConvertProtoConfigError::new(
format!(
"invalid value `{}` provided: allowed values are {}",
$invalid_value,
vec![
$( (stringify!($allowed_value), <$enum_type>::$allowed_value as i32) ),+
]
.into_iter()
.map(|(a, b)| format!("{a} => {}", b as i32))
.collect::<Vec<_>>()
.join(", ")
),
Some($field.into()),
))
};
}
/// Maps an integer from a protobuf enum value to a target enum variant.
/// Both protobuf and target enum must have similar variants.
/// The protobuf enum variant should be cast-able to an i32
/// Returns an `OK` Result with the target enum variant otherwise [`ConvertProtoConfigError`]
/// if the provided value does not map to any enum variant.
#[macro_export]
macro_rules! map_proto_enum {
(
value = $value:expr,
field = $field:literal,
proto_enum_type = $proto_enum_type:ty,
target_enum_type = $target_enum_type:ty,
variants = [ $( $variant:tt ),+ ]
) => {
match $value {
$( v if v == <$proto_enum_type>::$variant as i32 => Ok(<$target_enum_type>::$variant) ),+,
invalid => $crate::enum_no_match_error!(
field = $field,
invalid_value = invalid,
enum_type = $proto_enum_type,
allowed_values = [ $( $variant ),+ ]
)
}
}
}
| {
Error::InitializeMetricsFailed(error.to_string())
} | identifier_body |
error.rs | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::config::ValidationError;
use prometheus::Error as MetricsError;
#[cfg(doc)]
use crate::filters::{Filter, FilterFactory};
/// An error that occurred when attempting to create a [`Filter`] from | #[derive(Debug, PartialEq, thiserror::Error)]
pub enum Error {
#[error("filter `{}` not found", .0)]
NotFound(String),
#[error("filter `{}` requires configuration, but none provided", .0)]
MissingConfig(&'static str),
#[error("field `{}` is invalid, reason: {}", field, reason)]
FieldInvalid { field: String, reason: String },
#[error("Deserialization failed: {}", .0)]
DeserializeFailed(String),
#[error("Failed to initialize metrics: {}", .0)]
InitializeMetricsFailed(String),
#[error("Protobuf error: {}", .0)]
ConvertProtoConfig(ConvertProtoConfigError),
}
impl From<Error> for ValidationError {
fn from(error: Error) -> Self {
Self::FilterInvalid(error)
}
}
impl From<MetricsError> for Error {
fn from(error: MetricsError) -> Self {
Error::InitializeMetricsFailed(error.to_string())
}
}
/// An error representing failure to convert a filter's protobuf configuration
/// to its static representation.
#[derive(Debug, PartialEq, thiserror::Error)]
#[error(
"{}failed to convert protobuf config: {}",
self.field.as_ref().map(|f| format!("Field `{f}`")).unwrap_or_default(),
reason
)]
pub struct ConvertProtoConfigError {
/// Reason for the failure.
reason: String,
/// Set if the failure is specific to a single field in the config.
field: Option<String>,
}
impl ConvertProtoConfigError {
pub fn new(reason: impl std::fmt::Display, field: Option<String>) -> Self {
Self {
reason: reason.to_string(),
field,
}
}
}
/// Returns a [`ConvertProtoConfigError`] with an error message when
/// an invalid proto enum value was provided in a filter's proto config.
#[macro_export]
macro_rules! enum_no_match_error {
(
field = $field:literal,
invalid_value = $invalid_value:ident,
enum_type = $enum_type:ty,
allowed_values = [ $( $allowed_value:tt ),+ ]
) => {
Err($crate::filters::error::ConvertProtoConfigError::new(
format!(
"invalid value `{}` provided: allowed values are {}",
$invalid_value,
vec![
$( (stringify!($allowed_value), <$enum_type>::$allowed_value as i32) ),+
]
.into_iter()
.map(|(a, b)| format!("{a} => {}", b as i32))
.collect::<Vec<_>>()
.join(", ")
),
Some($field.into()),
))
};
}
/// Maps an integer from a protobuf enum value to a target enum variant.
/// Both protobuf and target enum must have similar variants.
/// The protobuf enum variant should be cast-able to an i32
/// Returns an `OK` Result with the target enum variant otherwise [`ConvertProtoConfigError`]
/// if the provided value does not map to any enum variant.
#[macro_export]
macro_rules! map_proto_enum {
(
value = $value:expr,
field = $field:literal,
proto_enum_type = $proto_enum_type:ty,
target_enum_type = $target_enum_type:ty,
variants = [ $( $variant:tt ),+ ]
) => {
match $value {
$( v if v == <$proto_enum_type>::$variant as i32 => Ok(<$target_enum_type>::$variant) ),+,
invalid => $crate::enum_no_match_error!(
field = $field,
invalid_value = invalid,
enum_type = $proto_enum_type,
allowed_values = [ $( $variant ),+ ]
)
}
}
} | /// a [`FilterFactory`]. | random_line_split |
error.rs | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use crate::config::ValidationError;
use prometheus::Error as MetricsError;
#[cfg(doc)]
use crate::filters::{Filter, FilterFactory};
/// An error that occurred when attempting to create a [`Filter`] from
/// a [`FilterFactory`].
#[derive(Debug, PartialEq, thiserror::Error)]
pub enum Error {
#[error("filter `{}` not found", .0)]
NotFound(String),
#[error("filter `{}` requires configuration, but none provided", .0)]
MissingConfig(&'static str),
#[error("field `{}` is invalid, reason: {}", field, reason)]
FieldInvalid { field: String, reason: String },
#[error("Deserialization failed: {}", .0)]
DeserializeFailed(String),
#[error("Failed to initialize metrics: {}", .0)]
InitializeMetricsFailed(String),
#[error("Protobuf error: {}", .0)]
ConvertProtoConfig(ConvertProtoConfigError),
}
impl From<Error> for ValidationError {
fn from(error: Error) -> Self {
Self::FilterInvalid(error)
}
}
impl From<MetricsError> for Error {
fn from(error: MetricsError) -> Self {
Error::InitializeMetricsFailed(error.to_string())
}
}
/// An error representing failure to convert a filter's protobuf configuration
/// to its static representation.
#[derive(Debug, PartialEq, thiserror::Error)]
#[error(
"{}failed to convert protobuf config: {}",
self.field.as_ref().map(|f| format!("Field `{f}`")).unwrap_or_default(),
reason
)]
pub struct | {
/// Reason for the failure.
reason: String,
/// Set if the failure is specific to a single field in the config.
field: Option<String>,
}
impl ConvertProtoConfigError {
pub fn new(reason: impl std::fmt::Display, field: Option<String>) -> Self {
Self {
reason: reason.to_string(),
field,
}
}
}
/// Returns a [`ConvertProtoConfigError`] with an error message when
/// an invalid proto enum value was provided in a filter's proto config.
#[macro_export]
macro_rules! enum_no_match_error {
(
field = $field:literal,
invalid_value = $invalid_value:ident,
enum_type = $enum_type:ty,
allowed_values = [ $( $allowed_value:tt ),+ ]
) => {
Err($crate::filters::error::ConvertProtoConfigError::new(
format!(
"invalid value `{}` provided: allowed values are {}",
$invalid_value,
vec![
$( (stringify!($allowed_value), <$enum_type>::$allowed_value as i32) ),+
]
.into_iter()
.map(|(a, b)| format!("{a} => {}", b as i32))
.collect::<Vec<_>>()
.join(", ")
),
Some($field.into()),
))
};
}
/// Maps an integer from a protobuf enum value to a target enum variant.
/// Both protobuf and target enum must have similar variants.
/// The protobuf enum variant should be cast-able to an i32
/// Returns an `OK` Result with the target enum variant otherwise [`ConvertProtoConfigError`]
/// if the provided value does not map to any enum variant.
#[macro_export]
macro_rules! map_proto_enum {
(
value = $value:expr,
field = $field:literal,
proto_enum_type = $proto_enum_type:ty,
target_enum_type = $target_enum_type:ty,
variants = [ $( $variant:tt ),+ ]
) => {
match $value {
$( v if v == <$proto_enum_type>::$variant as i32 => Ok(<$target_enum_type>::$variant) ),+,
invalid => $crate::enum_no_match_error!(
field = $field,
invalid_value = invalid,
enum_type = $proto_enum_type,
allowed_values = [ $( $variant ),+ ]
)
}
}
}
| ConvertProtoConfigError | identifier_name |
periodo.module.ts | import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { NgaModule } from '../../theme/nga.module';
import { HttpModule, JsonpModule } from '@angular/http';
import { Ng2Bs3ModalModule } from 'ng2-bs3-modal/ng2-bs3-modal';
import { Periodo } from './periodo.component';
import { routing } from './periodo.routing';
import { PeriodosService } from '../periodos/periodos.service'; | imports: [
CommonModule,
ReactiveFormsModule,
FormsModule,
NgaModule,
HttpModule,
JsonpModule,
Ng2Bs3ModalModule,
routing
],
declarations: [
Periodo
],
providers: [
PeriodosService,
Constants
]
})
export default class PeriodoModule {} | import { Constants } from '../../app.constants';
@NgModule({ | random_line_split |
periodo.module.ts | import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { FormsModule, ReactiveFormsModule } from '@angular/forms';
import { NgaModule } from '../../theme/nga.module';
import { HttpModule, JsonpModule } from '@angular/http';
import { Ng2Bs3ModalModule } from 'ng2-bs3-modal/ng2-bs3-modal';
import { Periodo } from './periodo.component';
import { routing } from './periodo.routing';
import { PeriodosService } from '../periodos/periodos.service';
import { Constants } from '../../app.constants';
@NgModule({
imports: [
CommonModule,
ReactiveFormsModule,
FormsModule,
NgaModule,
HttpModule,
JsonpModule,
Ng2Bs3ModalModule,
routing
],
declarations: [
Periodo
],
providers: [
PeriodosService,
Constants
]
})
export default class | {}
| PeriodoModule | identifier_name |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) {
}
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
}
}
impl Controller<Creature> for KeyboardController {
fn | (&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
}
| update | identifier_name |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) {
}
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
}
}
impl Controller<Creature> for KeyboardController {
fn update(&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) |
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
}
| {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
} | conditional_block |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) {
}
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
} | impl Controller<Creature> for KeyboardController {
fn update(&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
} | }
| random_line_split |
mod.rs | use std::rc::Rc;
use std::cell::RefCell;
use rand;
use rand::XorShiftRng;
use rand::Rng;
use gmath::vectors::Vec2;
use game::entity::{Object, Physics};
use game::entity::creature::Creature;
use keyboard::KeyboardState;
use sdl2::keycode;
pub trait Controller<A> {
/// Update the controller
/// # Arguments
/// `object` - The object to control
/// `secs` - The time elapsed sinced last update
fn update(&mut self, _object: &mut A, _secs: f32) |
}
pub struct NoneController<A>;
impl<A: Object> NoneController<A> {
pub fn new() -> NoneController<A> {
NoneController
}
}
impl<A: Object> Controller<A> for NoneController<A> {
// Just use default trait implementations
}
/// A controller that controls objects using the keyboard
pub struct KeyboardController {
keyboard: Rc<RefCell<KeyboardState>>,
}
impl KeyboardController {
pub fn new(keyboard: Rc<RefCell<KeyboardState>>) -> KeyboardController {
KeyboardController {
keyboard: keyboard,
}
}
}
impl Controller<Creature> for KeyboardController {
fn update(&mut self, object: &mut Creature, _: f32) {
let keyboard = self.keyboard.borrow();
let move_accel = object.move_accel;
let x_accel =
if keyboard.is_keydown(keycode::LeftKey) {
-move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else if keyboard.is_keydown(keycode::RightKey) {
move_accel * if object.is_on_ground() { 1.0 } else { 0.6 }
}
else {
0.0
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
let jump_accel = object.jump_accel;
if object.is_on_ground() && keyboard.is_keydown(keycode::UpKey) {
let new_velocity = object.velocity() + Vec2::new(0.0, -jump_accel);
object.set_velocity(new_velocity);
}
}
}
/// A controller that controls objects using randomness
pub struct RandomController {
rng: XorShiftRng,
move_time: f32,
wait_time: f32,
}
impl RandomController {
pub fn new(move_time: f32) -> RandomController {
RandomController {
rng: rand::weak_rng(),
move_time: move_time,
wait_time: 0.0,
}
}
}
impl Controller<Creature> for RandomController {
fn update(&mut self, object: &mut Creature, secs: f32) {
self.wait_time += secs;
if self.wait_time > self.move_time {
let move_accel = object.move_accel;
let x_accel = match self.rng.gen::<f32>() {
dir if dir < 0.5 => 0.0,
dir if dir < 0.75 => move_accel,
_ => -move_accel,
};
let new_accel = Vec2::new(x_accel, object.acceleration().y);
object.set_acceleration(new_accel);
self.wait_time -= self.move_time;
}
}
}
| {
} | identifier_body |
dojox.widget._CalendarYearView.d.ts | /// <reference path="Object.d.ts" />
/// <reference path="dojox.widget._CalendarView.d.ts" />
/// <reference path="dijit._Templated.d.ts" />
module dojox.widget{
export class | extends dojox.widget._CalendarView {
templateString : String;
templatePath : String;
widgetsInTemplate : bool;
_skipNodeCache : bool;
_earlyTemplatedStartup : bool;
_attachPoints : any;
_attachEvents : any[];
declaredClass : any;
_startupWidgets : Object;
_supportingWidgets : Object;
_templateCache : Object;
_stringRepl (tmpl:any) : any;
_fillContent (source:HTMLElement) : any;
_attachTemplateNodes (rootNode:HTMLElement,getAttrFunc?:Function) : any;
getCachedTemplate (templatePath:String,templateString?:String,alwaysUseString?:any) : any;
displayedYears : number;
_populateYears : any;
_setValueAttr (value:any) : any;
}
}
| _CalendarYearView | identifier_name |
dojox.widget._CalendarYearView.d.ts | /// <reference path="Object.d.ts" />
/// <reference path="dojox.widget._CalendarView.d.ts" />
/// <reference path="dijit._Templated.d.ts" />
module dojox.widget{
export class _CalendarYearView extends dojox.widget._CalendarView { | widgetsInTemplate : bool;
_skipNodeCache : bool;
_earlyTemplatedStartup : bool;
_attachPoints : any;
_attachEvents : any[];
declaredClass : any;
_startupWidgets : Object;
_supportingWidgets : Object;
_templateCache : Object;
_stringRepl (tmpl:any) : any;
_fillContent (source:HTMLElement) : any;
_attachTemplateNodes (rootNode:HTMLElement,getAttrFunc?:Function) : any;
getCachedTemplate (templatePath:String,templateString?:String,alwaysUseString?:any) : any;
displayedYears : number;
_populateYears : any;
_setValueAttr (value:any) : any;
}
} | templateString : String;
templatePath : String; | random_line_split |
getaddons.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer/getaddons.ui'
#
# Created: Fri Aug 22 00:57:31 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(367, 204)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Dialog)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout.addWidget(self.label_2)
self.code = QtGui.QLineEdit(Dialog)
self.code.setObjectName(_fromUtf8("code"))
self.horizontalLayout.addWidget(self.code)
self.verticalLayout.addLayout(self.horizontalLayout)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def | (self, Dialog):
Dialog.setWindowTitle(_("Install Add-on"))
self.label.setText(_("To browse add-ons, please click the browse button below.<br><br>When you\'ve found an add-on you like, please paste its code below."))
self.label_2.setText(_("Code:"))
| retranslateUi | identifier_name |
getaddons.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer/getaddons.ui'
#
# Created: Fri Aug 22 00:57:31 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
|
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(367, 204)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Dialog)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout.addWidget(self.label_2)
self.code = QtGui.QLineEdit(Dialog)
self.code.setObjectName(_fromUtf8("code"))
self.horizontalLayout.addWidget(self.code)
self.verticalLayout.addLayout(self.horizontalLayout)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_("Install Add-on"))
self.label.setText(_("To browse add-ons, please click the browse button below.<br><br>When you\'ve found an add-on you like, please paste its code below."))
self.label_2.setText(_("Code:"))
| return QtGui.QApplication.translate(context, text, disambig, _encoding) | identifier_body |
getaddons.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer/getaddons.ui'
#
# Created: Fri Aug 22 00:57:31 2014
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(367, 204)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Dialog) | self.verticalLayout.addItem(spacerItem)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout.addWidget(self.label_2)
self.code = QtGui.QLineEdit(Dialog)
self.code.setObjectName(_fromUtf8("code"))
self.horizontalLayout.addWidget(self.code)
self.verticalLayout.addLayout(self.horizontalLayout)
self.buttonBox = QtGui.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Dialog)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), Dialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_("Install Add-on"))
self.label.setText(_("To browse add-ons, please click the browse button below.<br><br>When you\'ve found an add-on you like, please paste its code below."))
self.label_2.setText(_("Code:")) | self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding) | random_line_split |
nav.d.ts | import { AfterViewInit, ComponentFactoryResolver, ElementRef, NgZone, Renderer, ViewContainerRef } from '@angular/core';
import { App } from '../app/app';
import { Config } from '../../config/config';
import { DeepLinker } from '../../navigation/deep-linker';
import { GestureController } from '../../gestures/gesture-controller';
import { Keyboard } from '../../util/keyboard';
import { NavController } from '../../navigation/nav-controller';
import { NavControllerBase } from '../../navigation/nav-controller-base';
import { NavOptions } from '../../navigation/nav-util';
import { TransitionController } from '../../transitions/transition-controller';
import { ViewController } from '../../navigation/view-controller';
import { DomController } from '../../util/dom-controller';
/**
* @name Nav
* @description
*
* `ion-nav` is the declarative component for a [NavController](../../../navigation/NavController/).
*
* For more information on using nav controllers like Nav or [Tab](../../Tabs/Tab/),
* take a look at the [NavController API Docs](../../../navigation/NavController/).
*
*
* @usage
* You must set a root page to be loaded initially by any Nav you create, using
* the 'root' property:
*
* ```ts | * })
* class MyApp {
* root = GettingStartedPage;
*
* constructor(){
* }
* }
* ```
*
* @demo /docs/demos/src/navigation/
* @see {@link /docs/components#navigation Navigation Component Docs}
*/
export declare class Nav extends NavControllerBase implements AfterViewInit {
private _root;
private _hasInit;
constructor(viewCtrl: ViewController, parent: NavController, app: App, config: Config, keyboard: Keyboard, elementRef: ElementRef, zone: NgZone, renderer: Renderer, cfr: ComponentFactoryResolver, gestureCtrl: GestureController, transCtrl: TransitionController, linker: DeepLinker, domCtrl: DomController);
/**
* @private
*/
_vp: ViewContainerRef;
ngAfterViewInit(): void;
goToRoot(opts: NavOptions): void;
/**
* @input {Page} The Page component to load as the root page within this nav.
*/
root: any;
/**
* @input {object} Any nav-params to pass to the root page of this nav.
*/
rootParams: any;
/**
* @input {boolean} Whether it's possible to swipe-to-go-back on this nav controller or not.
*/
swipeBackEnabled: boolean;
/**
* @private
*/
destroy(): void;
} | * import { Component } from '@angular/core';
* import { GettingStartedPage } from './getting-started';
*
* @Component({
* template: `<ion-nav [root]="root"></ion-nav>` | random_line_split |
nav.d.ts | import { AfterViewInit, ComponentFactoryResolver, ElementRef, NgZone, Renderer, ViewContainerRef } from '@angular/core';
import { App } from '../app/app';
import { Config } from '../../config/config';
import { DeepLinker } from '../../navigation/deep-linker';
import { GestureController } from '../../gestures/gesture-controller';
import { Keyboard } from '../../util/keyboard';
import { NavController } from '../../navigation/nav-controller';
import { NavControllerBase } from '../../navigation/nav-controller-base';
import { NavOptions } from '../../navigation/nav-util';
import { TransitionController } from '../../transitions/transition-controller';
import { ViewController } from '../../navigation/view-controller';
import { DomController } from '../../util/dom-controller';
/**
* @name Nav
* @description
*
* `ion-nav` is the declarative component for a [NavController](../../../navigation/NavController/).
*
* For more information on using nav controllers like Nav or [Tab](../../Tabs/Tab/),
* take a look at the [NavController API Docs](../../../navigation/NavController/).
*
*
* @usage
* You must set a root page to be loaded initially by any Nav you create, using
* the 'root' property:
*
* ```ts
* import { Component } from '@angular/core';
* import { GettingStartedPage } from './getting-started';
*
* @Component({
* template: `<ion-nav [root]="root"></ion-nav>`
* })
* class MyApp {
* root = GettingStartedPage;
*
* constructor(){
* }
* }
* ```
*
* @demo /docs/demos/src/navigation/
* @see {@link /docs/components#navigation Navigation Component Docs}
*/
export declare class | extends NavControllerBase implements AfterViewInit {
private _root;
private _hasInit;
constructor(viewCtrl: ViewController, parent: NavController, app: App, config: Config, keyboard: Keyboard, elementRef: ElementRef, zone: NgZone, renderer: Renderer, cfr: ComponentFactoryResolver, gestureCtrl: GestureController, transCtrl: TransitionController, linker: DeepLinker, domCtrl: DomController);
/**
* @private
*/
_vp: ViewContainerRef;
ngAfterViewInit(): void;
goToRoot(opts: NavOptions): void;
/**
* @input {Page} The Page component to load as the root page within this nav.
*/
root: any;
/**
* @input {object} Any nav-params to pass to the root page of this nav.
*/
rootParams: any;
/**
* @input {boolean} Whether it's possible to swipe-to-go-back on this nav controller or not.
*/
swipeBackEnabled: boolean;
/**
* @private
*/
destroy(): void;
}
| Nav | identifier_name |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn view(app: &App, frame: Frame) {
// Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y))
.stroke_weight(app.mouse.y / 20.0)
.caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
| app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
| conditional_block | |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn view(app: &App, frame: Frame) {
| // Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y))
.stroke_weight(app.mouse.y / 20.0)
.caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
| identifier_body | |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn vie | p: &App, frame: Frame) {
// Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y))
.stroke_weight(app.mouse.y / 20.0)
.caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
}
| w(ap | identifier_name |
p_2_0_01.rs | // P_2_0_01
//
// Generative Gestaltung – Creative Coding im Web
// ISBN: 978-3-87439-902-9, First Edition, Hermann Schmidt, Mainz, 2018
// Benedikt Groß, Hartmut Bohnacker, Julia Laub, Claudius Lazzeroni
// with contributions by Joey Lee and Niels Poldervaart
// Copyright 2018
//
// http://www.generative-gestaltung.de
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* drawing a filled circle with lines.
*
* MOUSE
* position x : length
* position y : thickness and number of lines
*
* KEYS
* s : save png
*/
extern crate nannou;
use nannou::prelude::*;
fn main() {
nannou::sketch(view).size(550, 550).run();
}
fn view(app: &App, frame: Frame) {
// Prepare to draw.
let draw = app.draw();
let win = app.window_rect();
let circle_resolution = map_range(app.mouse.y, win.top(), win.bottom(), 2, 80);
let radius = app.mouse.x - win.left();
let angle = TAU / circle_resolution as f32;
draw.background().color(BLACK);
for i in 0..circle_resolution {
let x = (angle * i as f32).cos() * radius;
let y = (angle * i as f32).sin() * radius;
draw.line()
.start(pt2(0.0, 0.0))
.end(pt2(x, y)) | .caps_round()
.color(WHITE);
}
// Write to the window frame.
draw.to_frame(app, &frame).unwrap();
if app.keys.down.contains(&Key::S) {
app.main_window()
.capture_frame(app.exe_name().unwrap() + ".png");
}
} | .stroke_weight(app.mouse.y / 20.0) | random_line_split |
goal_lists.js | /*
* This file is part of Saladay <https://www.crimx.com/rn-saladay/>.
* Copyright (C) 2017 CRIMX <straybugs@gmail.com>
*
* Saladay is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as
* published by the Free Software Foundation.
*
* Saladay is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Saladay. If not, see <http://www.gnu.org/licenses/>.
*/
import { SQLite } from 'expo'
import { pickle } from './helpers'
export default class GoalLists {
constructor (db) {
this.db = db || SQLite.openDatabase('saladay.db')
}
/**
* @param {object|object[]} data
* @return Promse
*/
insert (data) {
let arr = Array.isArray(data) ? data : [data]
const keys = ['list_id', 'list_title', 'list_color', 'list_order']
const sql = (
`INSERT INTO goal_lists
(${keys.join(',')})
VALUES
(${Array(keys.length).fill('?').join(',')});`
)
return new Promise((resolve, reject) => {
this.db.transaction(tx => {
pickle(arr, keys).forEach(d => {
tx.executeSql(
sql,
keys.map(k => d[k])
)
})
}, reject, resolve)
})
}
/**
* @return Promse<object[]>
*/
getAll () {
return new Promise((resolve, reject) => {
this.db.transaction(tx => {
tx.executeSql(
`SELECT * FROM goal_lists ORDER BY list_order`,
[],
(_, {rows}) => resolve(rows._array)
)
}, reject)
})
}
| (listItem) {
return new Promise((resolve, reject) => {
if (!listItem.list_id) { return reject('updateListItem: Missing PK list_id') }
let keys = [
'list_title',
'list_color',
'list_order'
].filter(k => listItem[k] !== undefined)
this.db.transaction(tx => {
tx.executeSql(
`UPDATE goal_lists
SET ${keys.map(k => k + ' = ?').join(',')}
WHERE list_id = ?`,
[...keys.map(k => listItem[k]), listItem.list_id]
)
}, reject, resolve)
})
}
}
| update | identifier_name |
goal_lists.js | /*
* This file is part of Saladay <https://www.crimx.com/rn-saladay/>.
* Copyright (C) 2017 CRIMX <straybugs@gmail.com>
*
* Saladay is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as
* published by the Free Software Foundation.
*
* Saladay is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Saladay. If not, see <http://www.gnu.org/licenses/>.
*/
import { SQLite } from 'expo'
import { pickle } from './helpers'
export default class GoalLists {
constructor (db) {
this.db = db || SQLite.openDatabase('saladay.db')
}
/**
* @param {object|object[]} data
* @return Promse
*/
insert (data) {
let arr = Array.isArray(data) ? data : [data]
const keys = ['list_id', 'list_title', 'list_color', 'list_order']
const sql = (
`INSERT INTO goal_lists
(${keys.join(',')})
VALUES
(${Array(keys.length).fill('?').join(',')});`
)
return new Promise((resolve, reject) => {
this.db.transaction(tx => {
pickle(arr, keys).forEach(d => {
tx.executeSql(
sql,
keys.map(k => d[k])
)
})
}, reject, resolve)
})
}
/**
* @return Promse<object[]>
*/
getAll () {
return new Promise((resolve, reject) => {
this.db.transaction(tx => {
tx.executeSql(
`SELECT * FROM goal_lists ORDER BY list_order`,
[],
(_, {rows}) => resolve(rows._array)
)
}, reject)
})
}
update (listItem) {
return new Promise((resolve, reject) => {
if (!listItem.list_id) |
let keys = [
'list_title',
'list_color',
'list_order'
].filter(k => listItem[k] !== undefined)
this.db.transaction(tx => {
tx.executeSql(
`UPDATE goal_lists
SET ${keys.map(k => k + ' = ?').join(',')}
WHERE list_id = ?`,
[...keys.map(k => listItem[k]), listItem.list_id]
)
}, reject, resolve)
})
}
}
| { return reject('updateListItem: Missing PK list_id') } | conditional_block |
goal_lists.js | /*
* This file is part of Saladay <https://www.crimx.com/rn-saladay/>.
* Copyright (C) 2017 CRIMX <straybugs@gmail.com>
*
* Saladay is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as | * GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Saladay. If not, see <http://www.gnu.org/licenses/>.
*/
import { SQLite } from 'expo'
import { pickle } from './helpers'
export default class GoalLists {
constructor (db) {
this.db = db || SQLite.openDatabase('saladay.db')
}
/**
* @param {object|object[]} data
* @return Promse
*/
insert (data) {
let arr = Array.isArray(data) ? data : [data]
const keys = ['list_id', 'list_title', 'list_color', 'list_order']
const sql = (
`INSERT INTO goal_lists
(${keys.join(',')})
VALUES
(${Array(keys.length).fill('?').join(',')});`
)
return new Promise((resolve, reject) => {
this.db.transaction(tx => {
pickle(arr, keys).forEach(d => {
tx.executeSql(
sql,
keys.map(k => d[k])
)
})
}, reject, resolve)
})
}
/**
* @return Promse<object[]>
*/
getAll () {
return new Promise((resolve, reject) => {
this.db.transaction(tx => {
tx.executeSql(
`SELECT * FROM goal_lists ORDER BY list_order`,
[],
(_, {rows}) => resolve(rows._array)
)
}, reject)
})
}
update (listItem) {
return new Promise((resolve, reject) => {
if (!listItem.list_id) { return reject('updateListItem: Missing PK list_id') }
let keys = [
'list_title',
'list_color',
'list_order'
].filter(k => listItem[k] !== undefined)
this.db.transaction(tx => {
tx.executeSql(
`UPDATE goal_lists
SET ${keys.map(k => k + ' = ?').join(',')}
WHERE list_id = ?`,
[...keys.map(k => listItem[k]), listItem.list_id]
)
}, reject, resolve)
})
}
} | * published by the Free Software Foundation.
*
* Saladay is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | random_line_split |
parameter.ts | import { Type, ReflectionType } from '../types/index';
import { Reflection, DefaultValueContainer, TypeContainer, TraverseCallback, TraverseProperty } from './abstract';
import { SignatureReflection } from './signature';
export class ParameterReflection extends Reflection implements DefaultValueContainer, TypeContainer {
parent?: SignatureReflection;
defaultValue?: string;
type?: Type;
/**
* Traverse all potential child reflections of this reflection.
*
* The given callback will be invoked for all children, signatures and type parameters
* attached to this reflection.
*
* @param callback The callback function that should be applied for each child reflection.
*/
traverse(callback: TraverseCallback) {
if (this.type instanceof ReflectionType) {
callback(this.type.declaration, TraverseProperty.TypeLiteral);
}
super.traverse(callback);
}
/**
* Return a raw object representation of this reflection.
* @deprecated Use serializers instead
*/
toObject(): any {
const result = super.toObject();
if (this.type) |
if (this.defaultValue) {
result.defaultValue = this.defaultValue;
}
return result;
}
/**
* Return a string representation of this reflection.
*/
toString() {
return super.toString() + (this.type ? ':' + this.type.toString() : '');
}
}
| {
result.type = this.type.toObject();
} | conditional_block |
parameter.ts | import { Type, ReflectionType } from '../types/index';
import { Reflection, DefaultValueContainer, TypeContainer, TraverseCallback, TraverseProperty } from './abstract';
import { SignatureReflection } from './signature';
export class | extends Reflection implements DefaultValueContainer, TypeContainer {
parent?: SignatureReflection;
defaultValue?: string;
type?: Type;
/**
* Traverse all potential child reflections of this reflection.
*
* The given callback will be invoked for all children, signatures and type parameters
* attached to this reflection.
*
* @param callback The callback function that should be applied for each child reflection.
*/
traverse(callback: TraverseCallback) {
if (this.type instanceof ReflectionType) {
callback(this.type.declaration, TraverseProperty.TypeLiteral);
}
super.traverse(callback);
}
/**
* Return a raw object representation of this reflection.
* @deprecated Use serializers instead
*/
toObject(): any {
const result = super.toObject();
if (this.type) {
result.type = this.type.toObject();
}
if (this.defaultValue) {
result.defaultValue = this.defaultValue;
}
return result;
}
/**
* Return a string representation of this reflection.
*/
toString() {
return super.toString() + (this.type ? ':' + this.type.toString() : '');
}
}
| ParameterReflection | identifier_name |
parameter.ts | import { Type, ReflectionType } from '../types/index';
import { Reflection, DefaultValueContainer, TypeContainer, TraverseCallback, TraverseProperty } from './abstract';
import { SignatureReflection } from './signature';
export class ParameterReflection extends Reflection implements DefaultValueContainer, TypeContainer {
parent?: SignatureReflection;
defaultValue?: string;
type?: Type;
/**
* Traverse all potential child reflections of this reflection.
*
* The given callback will be invoked for all children, signatures and type parameters
* attached to this reflection.
*
* @param callback The callback function that should be applied for each child reflection.
*/
traverse(callback: TraverseCallback) {
if (this.type instanceof ReflectionType) {
callback(this.type.declaration, TraverseProperty.TypeLiteral);
}
super.traverse(callback);
}
/**
* Return a raw object representation of this reflection.
* @deprecated Use serializers instead
*/
toObject(): any |
/**
* Return a string representation of this reflection.
*/
toString() {
return super.toString() + (this.type ? ':' + this.type.toString() : '');
}
}
| {
const result = super.toObject();
if (this.type) {
result.type = this.type.toObject();
}
if (this.defaultValue) {
result.defaultValue = this.defaultValue;
}
return result;
} | identifier_body |
parameter.ts | import { Type, ReflectionType } from '../types/index';
import { Reflection, DefaultValueContainer, TypeContainer, TraverseCallback, TraverseProperty } from './abstract';
import { SignatureReflection } from './signature';
export class ParameterReflection extends Reflection implements DefaultValueContainer, TypeContainer {
parent?: SignatureReflection;
defaultValue?: string;
type?: Type;
/**
* Traverse all potential child reflections of this reflection.
*
* The given callback will be invoked for all children, signatures and type parameters
* attached to this reflection.
*
* @param callback The callback function that should be applied for each child reflection.
*/
traverse(callback: TraverseCallback) {
if (this.type instanceof ReflectionType) {
callback(this.type.declaration, TraverseProperty.TypeLiteral);
}
super.traverse(callback);
}
/**
* Return a raw object representation of this reflection.
* @deprecated Use serializers instead
*/
toObject(): any {
const result = super.toObject();
if (this.type) { | if (this.defaultValue) {
result.defaultValue = this.defaultValue;
}
return result;
}
/**
* Return a string representation of this reflection.
*/
toString() {
return super.toString() + (this.type ? ':' + this.type.toString() : '');
}
} | result.type = this.type.toObject();
}
| random_line_split |
SearchUtility.js | /** @flow */
import { INDEX_MODES } from "./constants";
import SearchIndex from "./SearchIndex";
import type { IndexMode } from "./constants";
import type { SearchApiIndex } from "../types";
type UidMap = {
[uid: string]: boolean
};
/**
* Synchronous client-side full-text search utility.
* Forked from JS search (github.com/bvaughn/js-search).
*/
export default class SearchUtility implements SearchApiIndex {
_caseSensitive: boolean;
_indexMode: IndexMode;
_matchAnyToken: boolean;
_searchIndex: SearchIndex;
_tokenizePattern: RegExp;
_uids: UidMap;
/**
* Constructor.
*
* @param indexMode See #setIndexMode
* @param tokenizePattern See #setTokenizePattern
* @param caseSensitive See #setCaseSensitive
* @param matchAnyToken See #setMatchAnyToken
*/
constructor(
{
caseSensitive = false,
indexMode = INDEX_MODES.ALL_SUBSTRINGS,
matchAnyToken = false,
tokenizePattern = /\s+/
}: {
caseSensitive?: boolean,
indexMode?: IndexMode,
matchAnyToken?: boolean,
tokenizePattern?: RegExp
} = {}
) {
this._caseSensitive = caseSensitive;
this._indexMode = indexMode;
this._matchAnyToken = matchAnyToken;
this._tokenizePattern = tokenizePattern;
this._searchIndex = new SearchIndex();
this._uids = {};
}
/**
* Returns a constant representing the current case-sensitive bit.
*/
getCaseSensitive(): boolean {
return this._caseSensitive;
}
/**
* Returns a constant representing the current index mode.
*/
getIndexMode(): string {
return this._indexMode;
}
/**
* Returns a constant representing the current match-any-token bit.
*/
getMatchAnyToken(): boolean {
return this._matchAnyToken;
}
/**
* Returns a constant representing the current tokenize pattern.
*/
getTokenizePattern(): RegExp {
return this._tokenizePattern;
}
/**
* Adds or updates a uid in the search index and associates it with the specified text.
* Note that at this time uids can only be added or updated in the index, not removed.
*
* @param uid Uniquely identifies a searchable object
* @param text Text to associate with uid
*/
indexDocument = (uid: any, text: string): SearchApiIndex => {
this._uids[uid] = true;
var fieldTokens: Array<string> = this._tokenize(this._sanitize(text));
fieldTokens.forEach(fieldToken => {
var expandedTokens: Array<string> = this._expandToken(fieldToken);
expandedTokens.forEach(expandedToken => {
this._searchIndex.indexDocument(expandedToken, uid);
});
});
return this;
};
/**
* Searches the current index for the specified query text.
* Only uids matching all of the words within the text will be accepted,
* unless matchAny is set to true.
* If an empty query string is provided all indexed uids will be returned.
*
* Document searches are case-insensitive by default (e.g. "search" will match "Search").
* Document searches use substring matching by default (e.g. "na" and "me" will both match "name").
*
* @param query Searchable query text
* @return Array of uids
*/
search = (query: string): Array<any> => {
if (!query) | else {
var tokens: Array<string> = this._tokenize(this._sanitize(query));
return this._searchIndex.search(tokens, this._matchAnyToken);
}
};
/**
* Sets a new case-sensitive bit
*/
setCaseSensitive(caseSensitive: boolean): void {
this._caseSensitive = caseSensitive;
}
/**
* Sets a new index mode.
* See util/constants/INDEX_MODES
*/
setIndexMode(indexMode: IndexMode): void {
if (Object.keys(this._uids).length > 0) {
throw Error(
"indexMode cannot be changed once documents have been indexed"
);
}
this._indexMode = indexMode;
}
/**
* Sets a new match-any-token bit
*/
setMatchAnyToken(matchAnyToken: boolean): void {
this._matchAnyToken = matchAnyToken;
}
/**
* Sets a new tokenize pattern (regular expression)
*/
setTokenizePattern(pattern: RegExp): void {
this._tokenizePattern = pattern;
}
/**
* Added to make class adhere to interface. Add cleanup code as needed.
*/
terminate = () => {};
/**
* Index strategy based on 'all-substrings-index-strategy.ts' in github.com/bvaughn/js-search/
*
* @private
*/
_expandToken(token: string): Array<string> {
switch (this._indexMode) {
case INDEX_MODES.EXACT_WORDS:
return [token];
case INDEX_MODES.PREFIXES:
return this._expandPrefixTokens(token);
case INDEX_MODES.ALL_SUBSTRINGS:
default:
return this._expandAllSubstringTokens(token);
}
}
_expandAllSubstringTokens(token: string): Array<string> {
const expandedTokens = [];
// String.prototype.charAt() may return surrogate halves instead of whole characters.
// When this happens in the context of a web-worker it can cause Chrome to crash.
// Catching the error is a simple solution for now; in the future I may try to better support non-BMP characters.
// Resources:
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/charAt
// https://mathiasbynens.be/notes/javascript-unicode
try {
for (let i = 0, length = token.length; i < length; ++i) {
let substring: string = "";
for (let j = i; j < length; ++j) {
substring += token.charAt(j);
expandedTokens.push(substring);
}
}
} catch (error) {
console.error(`Unable to parse token "${token}" ${error}`);
}
return expandedTokens;
}
_expandPrefixTokens(token: string): Array<string> {
const expandedTokens = [];
// String.prototype.charAt() may return surrogate halves instead of whole characters.
// When this happens in the context of a web-worker it can cause Chrome to crash.
// Catching the error is a simple solution for now; in the future I may try to better support non-BMP characters.
// Resources:
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/charAt
// https://mathiasbynens.be/notes/javascript-unicode
try {
for (let i = 0, length = token.length; i < length; ++i) {
expandedTokens.push(token.substr(0, i + 1));
}
} catch (error) {
console.error(`Unable to parse token "${token}" ${error}`);
}
return expandedTokens;
}
/**
* @private
*/
_sanitize(string: string): string {
return this._caseSensitive
? string.trim()
: string.trim().toLocaleLowerCase();
}
/**
* @private
*/
_tokenize(text: string): Array<string> {
return text.split(this._tokenizePattern).filter(text => text); // Remove empty tokens
}
}
| {
return Object.keys(this._uids);
} | identifier_body |
SearchUtility.js | /** @flow */
import { INDEX_MODES } from "./constants";
import SearchIndex from "./SearchIndex";
import type { IndexMode } from "./constants";
import type { SearchApiIndex } from "../types";
type UidMap = {
[uid: string]: boolean
};
/**
* Synchronous client-side full-text search utility.
* Forked from JS search (github.com/bvaughn/js-search).
*/
export default class SearchUtility implements SearchApiIndex {
_caseSensitive: boolean;
_indexMode: IndexMode;
_matchAnyToken: boolean;
_searchIndex: SearchIndex;
_tokenizePattern: RegExp;
_uids: UidMap;
/**
* Constructor.
*
* @param indexMode See #setIndexMode
* @param tokenizePattern See #setTokenizePattern
* @param caseSensitive See #setCaseSensitive
* @param matchAnyToken See #setMatchAnyToken
*/
constructor(
{
caseSensitive = false,
indexMode = INDEX_MODES.ALL_SUBSTRINGS,
matchAnyToken = false,
tokenizePattern = /\s+/
}: {
caseSensitive?: boolean,
indexMode?: IndexMode,
matchAnyToken?: boolean,
tokenizePattern?: RegExp
} = {}
) {
this._caseSensitive = caseSensitive;
this._indexMode = indexMode;
this._matchAnyToken = matchAnyToken;
this._tokenizePattern = tokenizePattern;
this._searchIndex = new SearchIndex();
this._uids = {};
}
/**
* Returns a constant representing the current case-sensitive bit.
*/
getCaseSensitive(): boolean {
return this._caseSensitive;
}
/**
* Returns a constant representing the current index mode.
*/
getIndexMode(): string {
return this._indexMode;
}
/**
* Returns a constant representing the current match-any-token bit.
*/
getMatchAnyToken(): boolean {
return this._matchAnyToken;
}
/**
* Returns a constant representing the current tokenize pattern.
*/
getTokenizePattern(): RegExp {
return this._tokenizePattern;
}
/**
* Adds or updates a uid in the search index and associates it with the specified text.
* Note that at this time uids can only be added or updated in the index, not removed.
*
* @param uid Uniquely identifies a searchable object
* @param text Text to associate with uid
*/
indexDocument = (uid: any, text: string): SearchApiIndex => {
this._uids[uid] = true;
var fieldTokens: Array<string> = this._tokenize(this._sanitize(text));
fieldTokens.forEach(fieldToken => {
var expandedTokens: Array<string> = this._expandToken(fieldToken);
expandedTokens.forEach(expandedToken => {
this._searchIndex.indexDocument(expandedToken, uid);
});
});
return this;
};
/**
* Searches the current index for the specified query text.
* Only uids matching all of the words within the text will be accepted,
* unless matchAny is set to true.
* If an empty query string is provided all indexed uids will be returned.
*
* Document searches are case-insensitive by default (e.g. "search" will match "Search").
* Document searches use substring matching by default (e.g. "na" and "me" will both match "name").
*
* @param query Searchable query text
* @return Array of uids
*/
search = (query: string): Array<any> => {
if (!query) {
return Object.keys(this._uids);
} else {
var tokens: Array<string> = this._tokenize(this._sanitize(query));
return this._searchIndex.search(tokens, this._matchAnyToken);
}
};
/**
* Sets a new case-sensitive bit
*/
setCaseSensitive(caseSensitive: boolean): void {
this._caseSensitive = caseSensitive;
}
/**
* Sets a new index mode.
* See util/constants/INDEX_MODES
*/
setIndexMode(indexMode: IndexMode): void {
if (Object.keys(this._uids).length > 0) {
throw Error(
"indexMode cannot be changed once documents have been indexed"
);
}
this._indexMode = indexMode;
}
/**
* Sets a new match-any-token bit
*/
setMatchAnyToken(matchAnyToken: boolean): void {
this._matchAnyToken = matchAnyToken;
}
/**
* Sets a new tokenize pattern (regular expression)
*/
setTokenizePattern(pattern: RegExp): void {
this._tokenizePattern = pattern;
}
/**
* Added to make class adhere to interface. Add cleanup code as needed.
*/
terminate = () => {};
/**
* Index strategy based on 'all-substrings-index-strategy.ts' in github.com/bvaughn/js-search/
*
* @private
*/
_expandToken(token: string): Array<string> {
switch (this._indexMode) {
case INDEX_MODES.EXACT_WORDS:
return [token];
case INDEX_MODES.PREFIXES:
return this._expandPrefixTokens(token);
case INDEX_MODES.ALL_SUBSTRINGS:
default:
return this._expandAllSubstringTokens(token);
}
}
_expandAllSubstringTokens(token: string): Array<string> {
const expandedTokens = [];
// String.prototype.charAt() may return surrogate halves instead of whole characters.
// When this happens in the context of a web-worker it can cause Chrome to crash.
// Catching the error is a simple solution for now; in the future I may try to better support non-BMP characters.
// Resources:
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/charAt
// https://mathiasbynens.be/notes/javascript-unicode
try {
for (let i = 0, length = token.length; i < length; ++i) |
} catch (error) {
console.error(`Unable to parse token "${token}" ${error}`);
}
return expandedTokens;
}
_expandPrefixTokens(token: string): Array<string> {
const expandedTokens = [];
// String.prototype.charAt() may return surrogate halves instead of whole characters.
// When this happens in the context of a web-worker it can cause Chrome to crash.
// Catching the error is a simple solution for now; in the future I may try to better support non-BMP characters.
// Resources:
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/charAt
// https://mathiasbynens.be/notes/javascript-unicode
try {
for (let i = 0, length = token.length; i < length; ++i) {
expandedTokens.push(token.substr(0, i + 1));
}
} catch (error) {
console.error(`Unable to parse token "${token}" ${error}`);
}
return expandedTokens;
}
/**
* @private
*/
_sanitize(string: string): string {
return this._caseSensitive
? string.trim()
: string.trim().toLocaleLowerCase();
}
/**
* @private
*/
_tokenize(text: string): Array<string> {
return text.split(this._tokenizePattern).filter(text => text); // Remove empty tokens
}
}
| {
let substring: string = "";
for (let j = i; j < length; ++j) {
substring += token.charAt(j);
expandedTokens.push(substring);
}
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.