prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import sys, unittest, os
sys.path.append(os.path.realpath(os.path.dirname(__file__))+'/lib')
tests_folder = os.path.realpath(os.path.dirname(__file__))+'/tests'
from tests import ConsoleTestRunner
def load_tests(loader, tests, pattern):
suite = unittest.TestSuite()
pattern='test_*.py'
for dirname, dirnames, filenames in os.walk(tests_folder):
for path in dirnames:
path=dirname+'/'+path
for all_test_suite in unittest.defaultTestLoader.discover(path, pattern=pattern, top_level_dir=path):<|fim▁hole|> return suite
if __name__ == '__main__':
os.environ['ENVIRONMENT'] = 'test'
unittest.main(verbosity=2, exit=False, testRunner=ConsoleTestRunner)<|fim▁end|> | for test_suite in all_test_suite:
suite.addTest(test_suite)
|
<|file_name|>wagtailimages_tags.py<|end_file_name|><|fim▁begin|>import re
from django import template<|fim▁hole|>from wagtail.images.models import Filter
from wagtail.images.shortcuts import get_rendition_or_not_found
from wagtail.images.views.serve import generate_image_url
register = template.Library()
allowed_filter_pattern = re.compile(r"^[A-Za-z0-9_\-\.]+$")
@register.tag(name="image")
def image(parser, token):
bits = token.split_contents()[1:]
image_expr = parser.compile_filter(bits[0])
bits = bits[1:]
filter_specs = []
attrs = {}
output_var_name = None
as_context = False # if True, the next bit to be read is the output variable name
is_valid = True
for bit in bits:
if bit == 'as':
# token is of the form {% image self.photo max-320x200 as img %}
as_context = True
elif as_context:
if output_var_name is None:
output_var_name = bit
else:
# more than one item exists after 'as' - reject as invalid
is_valid = False
else:
try:
name, value = bit.split('=')
attrs[name] = parser.compile_filter(value) # setup to resolve context variables as value
except ValueError:
if allowed_filter_pattern.match(bit):
filter_specs.append(bit)
else:
raise template.TemplateSyntaxError(
"filter specs in 'image' tag may only contain A-Z, a-z, 0-9, dots, hyphens and underscores. "
"(given filter: {})".format(bit)
)
if as_context and output_var_name is None:
# context was introduced but no variable given ...
is_valid = False
if output_var_name and attrs:
# attributes are not valid when using the 'as img' form of the tag
is_valid = False
if len(filter_specs) == 0:
# there must always be at least one filter spec provided
is_valid = False
if len(bits) == 0:
# no resize rule provided eg. {% image page.image %}
raise template.TemplateSyntaxError(
"no resize rule provided. "
"'image' tag should be of the form {% image self.photo max-320x200 [ custom-attr=\"value\" ... ] %} "
"or {% image self.photo max-320x200 as img %}"
)
if is_valid:
return ImageNode(image_expr, '|'.join(filter_specs), attrs=attrs, output_var_name=output_var_name)
else:
raise template.TemplateSyntaxError(
"'image' tag should be of the form {% image self.photo max-320x200 [ custom-attr=\"value\" ... ] %} "
"or {% image self.photo max-320x200 as img %}"
)
class ImageNode(template.Node):
def __init__(self, image_expr, filter_spec, output_var_name=None, attrs={}):
self.image_expr = image_expr
self.output_var_name = output_var_name
self.attrs = attrs
self.filter_spec = filter_spec
@cached_property
def filter(self):
return Filter(spec=self.filter_spec)
def render(self, context):
try:
image = self.image_expr.resolve(context)
except template.VariableDoesNotExist:
return ''
if not image:
if self.output_var_name:
context[self.output_var_name] = None
return ''
if not hasattr(image, 'get_rendition'):
raise ValueError("image tag expected an Image object, got %r" % image)
rendition = get_rendition_or_not_found(image, self.filter)
if self.output_var_name:
# return the rendition object in the given variable
context[self.output_var_name] = rendition
return ''
else:
# render the rendition's image tag now
resolved_attrs = {}
for key in self.attrs:
resolved_attrs[key] = self.attrs[key].resolve(context)
return rendition.img_tag(resolved_attrs)
@register.simple_tag()
def image_url(image, filter_spec, viewname='wagtailimages_serve'):
try:
return generate_image_url(image, filter_spec, viewname)
except NoReverseMatch:
raise ImproperlyConfigured(
"'image_url' tag requires the " + viewname + " view to be configured. Please see "
"https://docs.wagtail.org/en/stable/advanced_topics/images/image_serve_view.html#setup for instructions."
)<|fim▁end|> | from django.core.exceptions import ImproperlyConfigured
from django.urls import NoReverseMatch
from django.utils.functional import cached_property
|
<|file_name|>TextInputExample.android.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* The examples provided by Facebook are for non-commercial testing and
* evaluation purposes only.
*
* Facebook reserves all rights not expressly granted.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
* FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* @flow
*/
'use strict';
var React = require('react');
var ReactNative = require('react-native');
var {
Text,
TextInput,
View,
StyleSheet,
} = ReactNative;
var TextEventsExample = React.createClass({
getInitialState: function() {
return {
curText: '<No Event>',
prevText: '<No Event>',
prev2Text: '<No Event>',
};
},
updateText: function(text) {
this.setState((state) => {
return {
curText: text,
prevText: state.curText,
prev2Text: state.prevText,
};
});
},
render: function() {
return (
<View>
<TextInput
autoCapitalize="none"
placeholder="Enter text to see events"
autoCorrect={false}
onFocus={() => this.updateText('onFocus')}
onBlur={() => this.updateText('onBlur')}
onChange={(event) => this.updateText(
'onChange text: ' + event.nativeEvent.text
)}
onEndEditing={(event) => this.updateText(
'onEndEditing text: ' + event.nativeEvent.text
)}
onSubmitEditing={(event) => this.updateText(
'onSubmitEditing text: ' + event.nativeEvent.text
)}
style={styles.singleLine}
/>
<Text style={styles.eventLabel}>
{this.state.curText}{'\n'}
(prev: {this.state.prevText}){'\n'}
(prev2: {this.state.prev2Text})
</Text>
</View>
);
}
});
class AutoExpandingTextInput extends React.Component {
constructor(props) {
super(props);
this.state = {
text: 'React Native enables you to build world-class application experiences on native platforms using a consistent developer experience based on JavaScript and React. The focus of React Native is on developer efficiency across all the platforms you care about — learn once, write anywhere. Facebook uses React Native in multiple production apps and will continue investing in React Native.',
height: 0,
};
}
render() {
return (
<TextInput
{...this.props}
multiline={true}
onContentSizeChange={(event) => {
this.setState({height: event.nativeEvent.contentSize.height});
}}
onChangeText={(text) => {
this.setState({text});
}}
style={[styles.default, {height: Math.max(35, this.state.height)}]}
value={this.state.text}
/>
);
}
}
class RewriteExample extends React.Component {
constructor(props) {
super(props);
this.state = {text: ''};
}
render() {
var limit = 20;
var remainder = limit - this.state.text.length;
var remainderColor = remainder > 5 ? 'blue' : 'red';
return (
<View style={styles.rewriteContainer}>
<TextInput
multiline={false}
maxLength={limit}
onChangeText={(text) => {
text = text.replace(/ /g, '_');
this.setState({text});
}}
style={styles.default}
value={this.state.text}
/>
<Text style={[styles.remainder, {color: remainderColor}]}>
{remainder}
</Text>
</View>
);
}
}
class TokenizedTextExample extends React.Component {
constructor(props) {
super(props);
this.state = {text: 'Hello #World'};
}
render() {
//define delimiter
let delimiter = /\s+/;
//split string
let _text = this.state.text;
let token, index, parts = [];
while (_text) {
delimiter.lastIndex = 0;
token = delimiter.exec(_text);
if (token === null) {
break;
}
index = token.index;
if (token[0].length === 0) {
index = 1;
}
parts.push(_text.substr(0, index));
parts.push(token[0]);
index = index + token[0].length;
_text = _text.slice(index);
}
parts.push(_text);
//highlight hashtags
parts = parts.map((text) => {
if (/^#/.test(text)) {
return <Text key={text} style={styles.hashtag}>{text}</Text>;
} else {
return text;
}
});
return (
<View>
<TextInput
multiline={true}
style={styles.multiline}
onChangeText={(text) => {
this.setState({text});
}}>
<Text>{parts}</Text>
</TextInput>
</View>
);
}
}
var BlurOnSubmitExample = React.createClass({
focusNextField(nextField) {
this.refs[nextField].focus();
},
render: function() {
return (
<View>
<TextInput
ref="1"
style={styles.singleLine}
placeholder="blurOnSubmit = false"
returnKeyType="next"
blurOnSubmit={false}
onSubmitEditing={() => this.focusNextField('2')}
/>
<TextInput
ref="2"
style={styles.singleLine}
keyboardType="email-address"
placeholder="blurOnSubmit = false"
returnKeyType="next"
blurOnSubmit={false}
onSubmitEditing={() => this.focusNextField('3')}
/>
<TextInput
ref="3"
style={styles.singleLine}
keyboardType="url"
placeholder="blurOnSubmit = false"
returnKeyType="next"
blurOnSubmit={false}
onSubmitEditing={() => this.focusNextField('4')}
/>
<TextInput
ref="4"
style={styles.singleLine}
keyboardType="numeric"
placeholder="blurOnSubmit = false"
blurOnSubmit={false}
onSubmitEditing={() => this.focusNextField('5')}
/>
<TextInput
ref="5"
style={styles.singleLine}
keyboardType="numbers-and-punctuation"
placeholder="blurOnSubmit = true"
returnKeyType="done"
/>
</View>
);
}
});
var styles = StyleSheet.create({
multiline: {
height: 60,
fontSize: 16,
padding: 4,
marginBottom: 10,
},
eventLabel: {
margin: 3,
fontSize: 12,
},
singleLine: {
fontSize: 16,
padding: 4,
},
singleLineWithHeightTextInput: {
height: 30,
},
hashtag: {
color: 'blue',
fontWeight: 'bold',
},
});
exports.title = '<TextInput>';
exports.description = 'Single and multi-line text inputs.';
exports.examples = [
{
title: 'Auto-focus',
render: function() {
return (
<TextInput
autoFocus={true}
style={styles.singleLine}
accessibilityLabel="I am the accessibility label for text input"
/>
);
}
},
{
title: "Live Re-Write (<sp> -> '_')",
render: function() {
return <RewriteExample />;
}
},
{
title: 'Auto-capitalize',
render: function() {
var autoCapitalizeTypes = [
'none',
'sentences',
'words',
'characters',
];
var examples = autoCapitalizeTypes.map((type) => {
return (
<TextInput
key={type}
autoCapitalize={type}
placeholder={'autoCapitalize: ' + type}
style={styles.singleLine}
/>
);
});
return <View>{examples}</View>;
}
},
{
title: 'Auto-correct',
render: function() {
return (
<View><|fim▁hole|> autoCorrect={true}
placeholder="This has autoCorrect"
style={styles.singleLine}
/>
<TextInput
autoCorrect={false}
placeholder="This does not have autoCorrect"
style={styles.singleLine}
/>
</View>
);
}
},
{
title: 'Keyboard types',
render: function() {
var keyboardTypes = [
'default',
'email-address',
'numeric',
'phone-pad',
];
var examples = keyboardTypes.map((type) => {
return (
<TextInput
key={type}
keyboardType={type}
placeholder={'keyboardType: ' + type}
style={styles.singleLine}
/>
);
});
return <View>{examples}</View>;
}
},
{
title: 'Blur on submit',
render: function(): ReactElement { return <BlurOnSubmitExample />; },
},
{
title: 'Event handling',
render: function(): ReactElement { return <TextEventsExample />; },
},
{
title: 'Colors and text inputs',
render: function() {
return (
<View>
<TextInput
style={[styles.singleLine]}
defaultValue="Default color text"
/>
<TextInput
style={[styles.singleLine, {color: 'green'}]}
defaultValue="Green Text"
/>
<TextInput
placeholder="Default placeholder text color"
style={styles.singleLine}
/>
<TextInput
placeholder="Red placeholder text color"
placeholderTextColor="red"
style={styles.singleLine}
/>
<TextInput
placeholder="Default underline color"
style={styles.singleLine}
/>
<TextInput
placeholder="Blue underline color"
style={styles.singleLine}
underlineColorAndroid="blue"
/>
<TextInput
defaultValue="Same BackgroundColor as View "
style={[styles.singleLine, {backgroundColor: 'rgba(100, 100, 100, 0.3)'}]}>
<Text style={{backgroundColor: 'rgba(100, 100, 100, 0.3)'}}>
Darker backgroundColor
</Text>
</TextInput>
<TextInput
defaultValue="Highlight Color is red"
selectionColor={'red'}
style={styles.singleLine}>
</TextInput>
</View>
);
}
},
{
title: 'Text input, themes and heights',
render: function() {
return (
<TextInput
placeholder="If you set height, beware of padding set from themes"
style={[styles.singleLineWithHeightTextInput]}
/>
);
}
},
{
title: 'fontFamily, fontWeight and fontStyle',
render: function() {
return (
<View>
<TextInput
style={[styles.singleLine, {fontFamily: 'sans-serif'}]}
placeholder="Custom fonts like Sans-Serif are supported"
/>
<TextInput
style={[styles.singleLine, {fontFamily: 'sans-serif', fontWeight: 'bold'}]}
placeholder="Sans-Serif bold"
/>
<TextInput
style={[styles.singleLine, {fontFamily: 'sans-serif', fontStyle: 'italic'}]}
placeholder="Sans-Serif italic"
/>
<TextInput
style={[styles.singleLine, {fontFamily: 'serif'}]}
placeholder="Serif"
/>
</View>
);
}
},
{
title: 'Passwords',
render: function() {
return (
<View>
<TextInput
defaultValue="iloveturtles"
secureTextEntry={true}
style={styles.singleLine}
/>
<TextInput
secureTextEntry={true}
style={[styles.singleLine, {color: 'red'}]}
placeholder="color is supported too"
placeholderTextColor="red"
/>
</View>
);
}
},
{
title: 'Editable',
render: function() {
return (
<TextInput
defaultValue="Can't touch this! (>'-')> ^(' - ')^ <('-'<) (>'-')> ^(' - ')^"
editable={false}
style={styles.singleLine}
/>
);
}
},
{
title: 'Multiline',
render: function() {
return (
<View>
<TextInput
autoCorrect={true}
placeholder="multiline, aligned top-left"
placeholderTextColor="red"
multiline={true}
style={[styles.multiline, {textAlign: "left", textAlignVertical: "top"}]}
/>
<TextInput
autoCorrect={true}
placeholder="multiline, aligned center"
placeholderTextColor="green"
multiline={true}
style={[styles.multiline, {textAlign: "center", textAlignVertical: "center"}]}
/>
<TextInput
autoCorrect={true}
multiline={true}
style={[styles.multiline, {color: 'blue'}, {textAlign: "right", textAlignVertical: "bottom"}]}>
<Text style={styles.multiline}>multiline with children, aligned bottom-right</Text>
</TextInput>
</View>
);
}
},
{
title: 'Fixed number of lines',
platform: 'android',
render: function() {
return (
<View>
<TextInput numberOfLines={2}
multiline={true}
placeholder="Two line input"
/>
<TextInput numberOfLines={5}
multiline={true}
placeholder="Five line input"
/>
</View>
);
}
},
{
title: 'Auto-expanding',
render: function() {
return (
<View>
<AutoExpandingTextInput
placeholder="height increases with content"
enablesReturnKeyAutomatically={true}
returnKeyType="done"
/>
</View>
);
}
},
{
title: 'Attributed text',
render: function() {
return <TokenizedTextExample />;
}
},
{
title: 'Return key',
render: function() {
var returnKeyTypes = [
'none',
'go',
'search',
'send',
'done',
'previous',
'next',
];
var returnKeyLabels = [
'Compile',
'React Native',
];
var examples = returnKeyTypes.map((type) => {
return (
<TextInput
key={type}
returnKeyType={type}
placeholder={'returnKeyType: ' + type}
style={styles.singleLine}
/>
);
});
var types = returnKeyLabels.map((type) => {
return (
<TextInput
key={type}
returnKeyLabel={type}
placeholder={'returnKeyLabel: ' + type}
style={styles.singleLine}
/>
);
});
return <View>{examples}{types}</View>;
}
},
{
title: 'Inline Images',
render: function() {
return (
<View>
<TextInput
inlineImageLeft="ic_menu_black_24dp"
placeholder="This has drawableLeft set"
style={styles.singleLine}
/>
<TextInput
inlineImageLeft="ic_menu_black_24dp"
inlineImagePadding={30}
placeholder="This has drawableLeft and drawablePadding set"
style={styles.singleLine}
/>
<TextInput
placeholder="This does not have drawable props set"
style={styles.singleLine}
/>
</View>
);
}
},
];<|fim▁end|> | <TextInput |
<|file_name|>drawing.rs<|end_file_name|><|fim▁begin|>use std::char;
use std::cmp::{max, min};
use std::collections::VecDeque;
use num::{Complex, Float};
use rustty::{Attr, Color, Terminal, Cell, CellAccessor, HasSize};
use rustty::ui::{Alignable, Widget, VerticalAlign, HorizontalAlign};
use itertools::{Itertools, EitherOrBoth};
use std::io;
pub struct Canvas {
term: Terminal,
spectrum: Widget,
waterfall: Widget,
history: VecDeque<Vec<f32>>,
}
impl Canvas {
pub fn new() -> Result<Self, io::Error> {
let term = try!(Terminal::new());
let mut canvas = Canvas {
term: term,
spectrum: Widget::new(0, 0),
waterfall: Widget::new(0, 0),
history: VecDeque::new(),
};
canvas.resize();
Ok(canvas)
}
fn resize(&mut self) {
let (cols, rows) = self.term.size();
let spectrum_height = rows / 2;
let waterfall_height = if rows % 2 == 0 { rows / 2 } else { rows / 2 + 1 };
self.spectrum = Widget::new(cols, spectrum_height);
self.spectrum.align(&self.term, HorizontalAlign::Middle, VerticalAlign::Top, 0);
self.waterfall = Widget::new(cols, waterfall_height);
self.waterfall.align(&self.term, HorizontalAlign::Middle, VerticalAlign::Bottom, 0);
self.history.reserve(waterfall_height * 2);
}
fn check_and_resize(&mut self) {
let (cols, rows) = self.term.size();
let (spectrum_cols, spectrum_rows) = self.spectrum.size();
let (waterfall_cols, waterfall_rows) = self.waterfall.size();
// if the terminal size has changed...
if cols != spectrum_cols || cols != waterfall_cols ||
rows != (spectrum_rows + waterfall_rows) {
self.resize();
}
}
/// Adds a spectrum to the history and draws it on the waterfall
/// and the spectrum view.
pub fn add_spectrum(&mut self, spec: Vec<Complex<f32>>) {
let normalized = normalize_spectrum(&spec, 50.0);
draw_spectrum(&mut self.spectrum, &normalized);
// Since the waterfall has half the horizontal resolution of the spectrum view,
// average every two values and store the averaged spectrum.
let averaged = normalized.chunks(2).map(|v| (v[0] + v[1]) / 2.0).collect();
// push spectrum onto the history
self.history.push_front(averaged);
let (_, rows) = self.waterfall.size();
if self.history.len() >= rows * 2 {
self.history.pop_back();
}
draw_waterfall(&mut self.waterfall, &self.history);
self.spectrum.draw_into(&mut self.term);
self.waterfall.draw_into(&mut self.term);
self.term.swap_buffers().unwrap();
self.check_and_resize();
}
pub fn get_term(&mut self) -> &mut Terminal {
&mut self.term
}
pub fn get_spectrum_width(&self) -> usize {
2 * self.term.cols()
}
}
fn draw_waterfall<T: CellAccessor + HasSize>(canvas: &mut T, spectra: &VecDeque<Vec<f32>>) {
let (cols, rows) = canvas.size();
for (row, mut specs) in (0..rows).zip(&spectra.iter().chunks_lazy(2)) {
let upper_heights = specs.next().into_iter().flat_map(|x| x);
let lower_heights = specs.next().into_iter().flat_map(|x| x);
for (c, heights) in (0..cols).zip(upper_heights.zip_longest(lower_heights)) {
let (u, l) = match heights {
EitherOrBoth::Both(&upper, &lower) => (upper, lower),
EitherOrBoth::Left(&upper) => (upper, 0.0),
EitherOrBoth::Right(&lower) => (0.0, lower),
};
*canvas.get_mut(c, row).unwrap() = spectrum_heights_to_waterfall_cell(u, l);
}
}
}
fn spectrum_heights_to_waterfall_cell(upper: f32, lower: f32) -> Cell {
Cell::new('▀',
Color::Byte(color_mapping(upper)),
Color::Byte(color_mapping(lower)),
Attr::Default)
}
/// Assumes `f` is between 0 and 1. Anything outside of this range
/// will be clamped.
fn color_mapping(f: f32) -> u8 {
let mapping = [16, 17, 18, 19, 21, 27, 33, 39, 45, 51,
50, 49, 48, 47, 46, 82, 118, 154, 190, 226];
let idx = (f * (mapping.len() as f32)) as i32;
if idx < 0 {
mapping[0]
} else if idx >= mapping.len() as i32 {
mapping[mapping.len() - 1]
} else {
mapping[idx as usize]
}
}
fn normalize_spectrum(spec: &[Complex<f32>], max_db: f32) -> Vec<f32> {
// FFT shift
let (first_half, last_half) = spec.split_at((spec.len() + 1) / 2);
let shifted_spec = last_half.iter().chain(first_half.iter());
// normalize and take the log
shifted_spec.map(Complex::norm)
.map(Float::log10)
.map(|x| 10.0 * x)<|fim▁hole|>// indexing is from the top of the cell
fn pixel_nums_to_braille(p1: Option<u8>, p2: Option<u8>) -> char {
let pixel_map = [[0x01, 0x08],
[0x02, 0x10],
[0x04, 0x20],
[0x40, 0x80]];
let mut c = 0;
if let Some(p) = p1 {
for i in p..4 {
c |= pixel_map[i as usize][0];
}
}
if let Some(p) = p2 {
for i in p..4 {
c |= pixel_map[i as usize][1];
}
}
char::from_u32((0x2800 + c) as u32).unwrap()
}
fn char_to_cell(c: char) -> Cell {
let mut cell = Cell::with_char(c);
cell.set_attrs(Attr::Bold);
cell
}
fn draw_pixel_pair<T>(canvas: &mut T, col_idx: usize, p1: usize, p2: usize)
where T: CellAccessor + HasSize
{
let (_, rows) = canvas.size();
let max_pixel_height = 4 * rows;
// clamp heights
let p1 = if p1 >= max_pixel_height { max_pixel_height - 1} else { p1 };
let p2 = if p2 >= max_pixel_height { max_pixel_height - 1} else { p2 };
// Reverse it, since the terminal indexing is from the top
let p1 = max_pixel_height - p1 - 1;
let p2 = max_pixel_height - p2 - 1;
// cell indices
let c1 = p1 / 4;
let c2 = p2 / 4;
// Fill in full height cells.
let full_cell_char = pixel_nums_to_braille(Some(0), Some(0));
for row_idx in max(c1, c2)..rows {
*canvas.get_mut(col_idx, row_idx).unwrap() = char_to_cell(full_cell_char);
}
let left_fill_cell_char = pixel_nums_to_braille(Some(0), None);
for row_idx in min(c1, c2)..c2 {
*canvas.get_mut(col_idx, row_idx).unwrap() = char_to_cell(left_fill_cell_char);
}
let right_fill_cell_char = pixel_nums_to_braille(None, Some(0));
for row_idx in min(c1, c2)..c1 {
*canvas.get_mut(col_idx, row_idx).unwrap() = char_to_cell(right_fill_cell_char);
}
// Now fill in partial height cells.
if c1 == c2 {
// top pixels are in the same cell
*canvas.get_mut(col_idx, c1).unwrap() = char_to_cell(
pixel_nums_to_braille(Some((p1 % 4) as u8), Some((p2 % 4) as u8)));
} else if c1 > c2 {
// right pixel is in a higher cell.
*canvas.get_mut(col_idx, c1).unwrap() = char_to_cell(
pixel_nums_to_braille(Some((p1 % 4) as u8), Some(0)));
*canvas.get_mut(col_idx, c2).unwrap() = char_to_cell(
pixel_nums_to_braille(None, Some((p2 % 4) as u8)));
} else {
// left pixel is in a higher cell.
*canvas.get_mut(col_idx, c1).unwrap() = char_to_cell(
pixel_nums_to_braille(Some((p1 % 4) as u8), None));
*canvas.get_mut(col_idx, c2).unwrap() = char_to_cell(
pixel_nums_to_braille(Some(0), Some((p2 % 4) as u8)));
}
}
fn draw_spectrum<T: CellAccessor + HasSize>(canvas: &mut T, spec: &[f32]) {
canvas.clear(Cell::default());
let (num_cols, num_rows) = canvas.size();
let pixel_height = num_rows * 4;
for (col_idx, chunk) in (0..num_cols).zip(spec.chunks(2)) {
// height in float between 0 and 1.
let h1 = chunk[0];
let h2 = chunk[1];
// The "pixel" height of each point.
let p1 = (h1 * pixel_height as f32).floor().max(0.0) as usize;
let p2 = (h2 * pixel_height as f32).floor().max(0.0) as usize;
draw_pixel_pair(canvas, col_idx, p1, p2);
}
}
#[cfg(test)]
mod tests {
use super::{pixel_nums_to_braille, draw_pixel_pair};
use rustty::Terminal;
#[test]
fn test_pixel_nums() {
assert_eq!(pixel_nums_to_braille(Some(0), Some(0)), '⣿');
assert_eq!(pixel_nums_to_braille(Some(1), Some(2)), '⣦');
assert_eq!(pixel_nums_to_braille(None, Some(3)), '⢀');
assert_eq!(pixel_nums_to_braille(Some(2), None), '⡄');
assert_eq!(pixel_nums_to_braille(None, None), '⠀');
}
#[test]
fn test_draw_pixel_pair() {
let mut term = Terminal::new().unwrap();
// Test drawing with the same top cell
draw_pixel_pair(&mut term, 0, 4, 6);
assert_eq!(term[(0, term.rows() - 3)].ch(), ' ');
assert_eq!(term[(0, term.rows() - 2)].ch(), '⣰');
assert_eq!(term[(0, term.rows() - 1)].ch(), '⣿');
term.clear().unwrap();
// Test drawing with the top pixel in each column being in
// different cells
draw_pixel_pair(&mut term, 0, 4, 8);
assert_eq!(term[(0, term.rows() - 4)].ch(), ' ');
assert_eq!(term[(0, term.rows() - 3)].ch(), '⢀');
assert_eq!(term[(0, term.rows() - 2)].ch(), '⣸');
assert_eq!(term[(0, term.rows() - 1)].ch(), '⣿');
term.clear().unwrap();
draw_pixel_pair(&mut term, 1, 13, 2);
assert_eq!(term[(1, term.rows() - 5)].ch(), ' ');
assert_eq!(term[(1, term.rows() - 4)].ch(), '⡄');
assert_eq!(term[(1, term.rows() - 3)].ch(), '⡇');
assert_eq!(term[(1, term.rows() - 2)].ch(), '⡇');
assert_eq!(term[(1, term.rows() - 1)].ch(), '⣷');
term.clear().unwrap();
}
}<|fim▁end|> | .map(|x| x / max_db)
.collect()
}
|
<|file_name|>test_windowwithcount.py<|end_file_name|><|fim▁begin|>import unittest
from rx.observable import Observable
from rx.testing import TestScheduler, ReactiveTest<|fim▁hole|>on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestWindowWithCount(unittest.TestCase):
def test_window_with_count_basic(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(100, 1), on_next(210, 2), on_next(240, 3), on_next(280, 4), on_next(320, 5), on_next(350, 6), on_next(380, 7), on_next(420, 8), on_next(470, 9), on_completed(600))
def create():
def proj(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window_with_count(3, 2).map(proj).merge_observable()
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, "0 2"), on_next(240, "0 3"), on_next(280, "0 4"), on_next(280, "1 4"), on_next(320, "1 5"), on_next(350, "1 6"), on_next(350, "2 6"), on_next(380, "2 7"), on_next(420, "2 8"), on_next(420, "3 8"), on_next(470, "3 9"), on_completed(600))
xs.subscriptions.assert_equal(subscribe(200, 600))
def test_window_with_count_disposed(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(100, 1), on_next(210, 2), on_next(240, 3), on_next(280, 4), on_next(320, 5), on_next(350, 6), on_next(380, 7), on_next(420, 8), on_next(470, 9), on_completed(600))
def create():
def proj(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window_with_count(3, 2).map(proj).merge_observable()
results = scheduler.start(create, disposed=370)
results.messages.assert_equal(on_next(210, "0 2"), on_next(240, "0 3"), on_next(280, "0 4"), on_next(280, "1 4"), on_next(320, "1 5"), on_next(350, "1 6"), on_next(350, "2 6"))
xs.subscriptions.assert_equal(subscribe(200, 370))
def test_window_with_count_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(100, 1), on_next(210, 2), on_next(240, 3), on_next(280, 4), on_next(320, 5), on_next(350, 6), on_next(380, 7), on_next(420, 8), on_next(470, 9), on_error(600, ex))
def create():
def selector(w, i):
def mapping(x):
return "%s %s" % (i, x)
return w.map(mapping)
return xs.window_with_count(3, 2).map(selector).merge_observable()
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, "0 2"), on_next(240, "0 3"), on_next(280, "0 4"), on_next(280, "1 4"), on_next(320, "1 5"), on_next(350, "1 6"), on_next(350, "2 6"), on_next(380, "2 7"), on_next(420, "2 8"), on_next(420, "3 8"), on_next(470, "3 9"), on_error(600, ex))
xs.subscriptions.assert_equal(subscribe(200, 600))<|fim▁end|> | from rx.disposables import Disposable, SerialDisposable
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed |
<|file_name|>nsis.js<|end_file_name|><|fim▁begin|>/* ***** BEGIN LICENSE BLOCK *****
* Distributed under the BSD license:
*
* Copyright (c) 2012, Ajax.org B.V.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Ajax.org B.V. nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL AJAX.ORG B.V. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES<|fim▁hole|> * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ***** END LICENSE BLOCK ***** */
/*
THIS FILE WAS AUTOGENERATED BY mode.tmpl.js
*/
define(function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var NSISHighlightRules = require("./nsis_highlight_rules").NSISHighlightRules;
// TODO: pick appropriate fold mode
var FoldMode = require("./folding/cstyle").FoldMode;
var Mode = function() {
this.HighlightRules = NSISHighlightRules;
this.foldingRules = new FoldMode();
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = [";", "#"];
this.blockComment = {start: "/*", end: "*/"};
this.$id = "ace/mode/nsis";
}).call(Mode.prototype);
exports.Mode = Mode;
});<|fim▁end|> | * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND |
<|file_name|>algorithm_base.py<|end_file_name|><|fim▁begin|>__author__ = 'jdaniel'
import copy
import random
import itertools
import operator
import math
import struct
import os
import sys
import json
from collections import defaultdict
class AlgorithmBase(object):
def __init__(self, objective_function):
"""
Base Algorithm class which contains utility functionality
common to all other algorithms and acts as the standalone
API for Algorithm usage.
:param objective_function: <function> The model function to be used
def my_objective(x):
f = list_of_objective_values
h = list_of_equality_constraint_values
g = list_of_inequality_constraint_values
return [f,h,g]
:return: None
"""
self._objective_function = objective_function
self._variables = []
self._equality_constraints = []
self._inequality_constraints = []
self._objectives = []
# Algorithm Options
self._pop_size = None
self._generations = None
self._conv_tol = None
self._eqcon_tol = None
self._seed = None
self._eta_c = None
self._eta_m = None
self._p_cross = None
self._p_mut = None
self._islands = None
self._epoch = None
self._migrants = None
self._spheres = None
# Problem information
self._ndim = None
self._neqcon = None
self._nneqcon = None
self._lower_bound = []
self._upper_bound = []
# Data objects
self._history = History()
self._archive = Archive()
self._metadata = Metadata()
# Random number generator
self._rnd = random.Random()
def register_variable(self, name, lower, upper):
"""
Register a decision variable with the algorithm
:param name: <string> Reference name of the decision variable
:param lower: <float> Lower bound for the variable
:param upper: <float> Upper bound for the variable
:return: None
"""
var = Variable(name, lower, upper)
self._variables.append(var)
def register_constraint(self, name, ctype):
"""
Register a constraint variable with the algorithm
:param name: <string> Reference name of the constraint variable
:param ctype: <string> Set constraint type, 'e': equality constraint; 'i': inequality constraint
:return: None
"""
con = Constraint(name)
if ctype == 'e':
self._equality_constraints.append(con)
elif ctype == 'i':
self._inequality_constraints.append(con)
else:
err_msg = 'Unrecognized constraint type ' + repr(ctype)
raise AlgorithmException(err_msg)
def register_objective(self, name):
"""
Register an objective variable with the algorithm
:param name: <string> Reference name of the objective variable
:return: None
"""
obj = Objective(name)
self._objectives.append(obj)
def set_options(self, option, value):
"""
Set an algorithm option value
:param option: <string> Name of the option to set
:param value: <int, float> Value of the option to set
:return: None
"""
if option == 'population_size':
self.check_population_size(value)
self._pop_size = value
elif option == 'generations':
self.check_generations(value)
self._generations = value
elif option == 'conv_tol':
self.check_conv_tol(value)
self._conv_tol = value
elif option == 'eqcon_tol':
self.check_eqcon_tol(value)
self._eqcon_tol = value
elif option == 'eta_c':
self.check_eta_c(value)
self._eta_c = value
elif option == 'eta_m':
self.check_eta_m(value)
self._eta_m = value
elif option == 'p_cross':
self.check_p_cross(value)
self._p_cross = value
elif option == 'p_mut':
self.check_p_mut(value)
self._p_mut = value
elif option == 'islands':
self.check_islands(value)
self._islands = value
elif option == 'epoch':
self.check_epoch(value)
self._epoch = value
elif option == 'migrants':<|fim▁hole|> self.check_spheres(value)
self._spheres = value
elif option == 'seed':
self.set_seed(value)
else:
err_msg = 'Unrecognized option ' + repr(option)
raise AlgorithmException(err_msg)
def set_seed(self, value):
"""
Set the seed value for the optimisation
:param value: Value to set
:return: None
"""
if value == 0:
self._seed = struct.unpack("<L", os.urandom(4))[0]
else:
self._seed = value
self._rnd.seed(self._seed)
@staticmethod
def check_population_size(value):
"""
Check the population value
:param value: Value to set
:return:
"""
# Check if integer
if not isinstance(value, (int, long)):
err_msg = 'Population is not an integer'
raise AlgorithmException(err_msg)
# Check if greater than zero
if value <= 0:
err_msg = 'Population size must be greater than zero'
raise AlgorithmException(err_msg)
# Check if divisible by 4
if value % 4 != 0:
err_msg = 'Population size must be evenly divisible by four'
raise AlgorithmException(err_msg)
@staticmethod
def check_generations(value):
"""
Check the generations value
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'The generations value but be an integer greater than 0'
raise AlgorithmException(err_msg)
@staticmethod
def check_conv_tol(value):
"""
Check the convergence tolerance value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value >= 1.0 or value <= 0.0:
err_msg = 'The convergence tolerance value conv_tol must be between (0.0, 1.0)'
raise AlgorithmException(err_msg)
@staticmethod
def check_eqcon_tol(value):
"""
Check the equality constraint tolerance value
:param value: Value to set
:return: None
"""
# Check if greater than 0
if value <= 0.0:
err_msg = 'The equality constraint tolerance value eqcon_tol must be greater than 0'
raise AlgorithmException(err_msg)
@staticmethod
def check_eta_c(value):
"""
Check the crossover distribution index value
:param value: Value to set
:return: None
"""
# Check if greater than zero
if value <= 0:
err_msg = 'The crossover distribution index eta_c must be greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_eta_m(value):
"""
Check the mutation distribution index value
:param value: Value to set
:return: None
"""
# Check if greater than zero
if value <= 0:
err_msg = 'The mutation distribution index eta_m must be greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_p_cross(value):
"""
Check the crossover probability value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value < 0.0 or value > 1.0:
err_msg = 'The crossover probability p_cross must be between 0.0 and 1.0'
raise AlgorithmException(err_msg)
@staticmethod
def check_p_mut(value):
"""
Check the mutation probability value
:param value: Value to set
:return: None
"""
# Check if between (0.0, 1.0)
if value < 0.0 or value > 1.0:
err_msg = 'The mutation probability p_mut must be between 0.0 and 1.0'
raise AlgorithmException(err_msg)
@staticmethod
def check_islands(value):
"""
Check the number of islands
:param value: Value to set
:return: None
"""
# Check greater than zero
if value <= 0:
err_msg = 'Number of islands must be a positive integer greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_spheres(value):
"""
Check the number of spheres
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'Number of spheres must be a positive integer greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_epoch(value):
"""
Check the epoch rate
:param value: Value to set
:return: None
"""
if value <= 0:
err_msg = 'The epoch rate must be a positive integer greater than zero'
raise AlgorithmException(err_msg)
@staticmethod
def check_migrants(value):
"""
Check the migrants value
:param value: Value to set
:return: None
"""
if value < 0:
err_msg = 'The number of migrants must be zero or greater'
raise AlgorithmException(err_msg)
def setup_variables(self):
"""
Get information about the model once all the variables
have been added.
:return: None
"""
self._ndim = len(self._variables)
self._neqcon = len(self._equality_constraints)
self._nneqcon = len(self._inequality_constraints)
for var in self._variables:
self._lower_bound.append(var.lower)
self._upper_bound.append(var.upper)
def evaluate_population(self, population):
"""
Evaluate a population
:param population: <Population> Population to evaluate
:return: None
"""
for ind in population:
self.evaluate(ind)
def evaluate(self, individual):
"""
Evaluate an individual
:param individual: <Individual> Individual to evaluate
:return: None
"""
f, h, g = self._objective_function(individual.x)
individual.f = f
individual.h = h
individual.g = g
# Calculate the constraint violation
s = 0.0
for i in xrange(self._neqcon):
s += math.fabs(h[i]) - self._eqcon_tol
for i in xrange(self._nneqcon):
s += max(0.0, g[i])
self._history.add_point(individual)
class Individual(object):
def __init__(self):
"""
Class for holding information and methods related to the concept
of an individual.
:return: None
"""
# Decision variables
self.x = None
# Objective variables
self.f = None
# Equality constraint variables
self.h = None
# Inequality constraint variables
self.g = None
# Distance metric
self.d = None
# Constraint violation
self.s = None
# Domination count
self.n = None
# Rank
self.r = None
# ID
self.id = None
def dominates(self, other):
"""
Method to determine if the individual dominates another individual using
the constraint dominance approach.
:param other: <Individual> Other individual to test against
:return: <bool> True if self dominates other, False if other dominates self or both are equal
"""
not_equal = False
flag1 = 0
flag2 = 0
if self.s > 0:
flag1 = 1
if other.s > 0:
flag2 = 1
# Both individuals are invalid
if flag1 == 1 and flag2 == 1:
if self.s < other.s:
return True
else:
return False
# One of the individuals is invalid
elif flag1 ^ flag2:
if flag1:
return False
else:
return True
# Both individuals are valid
else:
for self_fit, other_fit in zip(self.f, other.f):
if self_fit > other_fit:
return False
elif self_fit < other_fit:
not_equal = True
return not_equal
def __hash__(self):
"""
Return a hash based on the objective values of the individuals decision values
:return: Hash for the individual
"""
return hash(repr(self.x))
def __repr__(self):
s = 'ID: ' + repr(self.id) + '\n'
s += 'x: ' + repr(self.x) + '\n'
s += 'f: ' + repr(self.f) + '\n'
if self.h is not None:
s += 'h: ' + repr(self.h) + '\n'
if self.g is not None:
s += 'g: ' + repr(self.g) + '\n'
if self.d is not None:
s += 'd: ' + repr(self.d) + '\n'
if self.s is not None:
s += 's: ' + repr(self.s) + '\n'
if self.r is not None:
s += 'r: ' + repr(self.r) + '\n'
return s
def __eq__(self, other):
for self_x, other_x in zip(self.x, other.x):
if self_x != other_x:
return False
return True
class Population(list):
def __init__(self):
super(Population, self).__init__()
def populate(self, individuals):
"""
Populate the population with a list of individuals
:param individuals: <List<Individual>> List of individuals to use
:return: None
"""
for ind in individuals:
self.append(copy.deepcopy(ind))
def to_json(self):
return json.dumps(self, default=lambda o: o.__dict__)
def __repr__(self):
s = ''
for idx, ind in enumerate(self):
s += repr(ind) + '\n'
return s
class SubPopulation(list):
def __init__(self):
super(SubPopulation, self).__init__()
def populate(self, individuals):
"""
Populate the sub-population with a list of individuals
:param individuals: <List<Individual>> List of individuals to use
:return: None
"""
for ind in individuals:
self.append(copy.deepcopy(ind))
class Archive(object):
def __init__(self):
"""
Optimization run archive of non-dominated solutions at each generation
which is used to predict convergence of the algorithm.
:return: None
"""
# Tracks the archive of non-dominated solutions
self._archive = []
# Tracks the size of the non-dominated archive
self._idx = 0
# Tracks the consolidation ratio
self._consolidation_ratio = []
# Population size
self._population_size = None
def initialize(self, population):
"""
Initialize the archive
:param population: <Population> Individuals to initialize the population with
:return: None
"""
self._archive.append(nondominated_sort(population, len(population), first_front_only=True)[0])
self._consolidation_ratio.append(0)
self._population_size = len(population)
def update(self, population):
"""
Update the archive
:param population: <Population> Population to update the archive with
:return: None
"""
nondominated_solutions = nondominated_sort(copy.deepcopy(population), len(population), first_front_only=True)[0]
archive_copy = copy.deepcopy(self._archive[self._idx])
archive_copy = archive_copy + nondominated_solutions
nondominated_solutions = nondominated_sort(archive_copy, len(archive_copy), first_front_only=True)[0]
# Remove copies
nondominated_solutions = list(set(nondominated_solutions))
# Update the archive
self._archive.append(nondominated_solutions)
self._idx += 1
self._consolidation_ratio.append(len(self._archive[self._idx])/float(2*self._population_size))
def get_consolidation_ratio(self):
"""
Return the most recent calculated consolidation ratio
:return: <float> Current consolidation ratio value
"""
return self._consolidation_ratio[self._idx]
def get_consolidation_ratio_history(self):
"""
Return the consolidation ratio history
:return: <List<float>> Consolidation ratio history
"""
return self._consolidation_ratio
def get_archive(self):
"""
Get the saved archive at each update
:return: <List<Population>> archive
"""
return self._archive
class Metadata:
def __init__(self):
self.end_msg = None
self.fevals = None
self.gen = None
def __repr__(self):
s = '\n' + self.end_msg + '\n'
s += 'fevals: ' + repr(self.fevals) + '\n'
s += 'gen: ' + repr(self.gen) + '\n'
return s
class History(list):
def __init__(self):
super(History, self).__init__()
def add_point(self, individual):
"""
Add a design point to the history
:param individual: <Individual> Individual to add to the history
:return: None
"""
self.append(individual)
class Variable(object):
def __init__(self, name, lower, upper):
"""
Data structure that contains decision variable information.
:param name: <string> Reference name for the decision variable
:param lower: <float> Lower bound of the decision variable
:param upper: <float> Upper bound of the decision variable
:return: None
"""
self.name = name
self.lower = lower
self.upper = upper
class Constraint(object):
def __init__(self, name):
"""
Data structure that contains constraint variable information.
:param name: <string> Reference name for the constraint variable
:return: None
"""
self.name = name
class Objective(object):
def __init__(self, name):
"""
Data structure that contains objective variable information.
:param name: <string> Reference name for the objective variable
:return: None
"""
self.name = name
class AlgorithmException(Exception):
def __init__(self, message):
"""
Exception class that gets raised when an error occurs with the algorithm.
:param message: Error message to display
:return: None
"""
Exception.__init__(self, message)
# Utility functions for dealing with algorithms containing sub-populations
def flatten_population(population_list):
"""
Combine each of the sub-populations into a single global population
:param population_list: <List<Population>>
:return: <List<Individual>>
"""
global_pop = Population()
for pop in population_list:
global_pop.append(pop[:])
return global_pop
# Genetic Operators
def mutation(population, n_dim, lower, upper, eta_m, p_mut):
"""
Performs bounded polynomial mutation on the population.
:param population: <Population> Population to perform mutation on
:param n_dim: <int> Number of decision variable dimensions
:param lower: <list<float>> List of decision variable lower bound values
:param upper: <list<float>> List of upper bound decision variable values
:param eta_m: <float> Mutation index
:param p_mut: <float> Mutation probability
:return: None
"""
for ind in population:
mutate(ind, n_dim, lower, upper, eta_m, p_mut)
def mutate(individual, n_dim, lower, upper, eta_m, p_mut):
"""
Performs bounded polynomial mutation on an individual.
:param individual: <Individual> Individual to perform mutation on
:param n_dim: <int> Number of decision variable dimension
:param lower: <list<float>> List of decision variable lower bound values.
:param upper: <list<float>> List of decision variable upper bound values.
:param eta_m: <float> Mutation index
:param p_mut: <float> Mutation probability
:return: None
"""
for i, xl, xu in zip(xrange(n_dim), lower, upper):
if random.random() <= p_mut:
x = copy.deepcopy(individual.x[i])
delta_1 = (x - xl) / (xu - xl)
delta_2 = (xu - x) / (xu - xl)
rand = random.random()
mut_pow = 1.0 / (eta_m + 1.0)
if rand < 0.5:
xy = 1.0 - delta_1
val = 2.0 * rand + (1.0 - 2.0*rand)*(xy**(eta_m + 1))
delta_q = val**mut_pow - 1.0
else:
xy = 1.0 - delta_2
val = 2.0 * (1.0 - rand) + 2.0 * (rand - 0.5)*(xy**(eta_m + 1))
delta_q = 1.0 - val**mut_pow
x += delta_q * (xu - xl)
x = min(max(x, xl), xu)
individual.x[i] = x
def crossover(population, n_dim, lower, upper, eta_c, p_cross):
"""
Perform simulated binary crossover on the population.
:param population: <Population> Population to perform crossover on.
:param n_dim: <int> Number of decision variable dimensions.
:param lower: <list<float>> List of decision variable lower bound values.
:param upper: <list<float>> List of decision variable upper bound values.
:param eta_c: <float> Crossover index.
:param p_cross: <float> Crossover probability.
:return: <Population> Child population
"""
child_pop = Population()
child_pop.populate(population)
for ind1, ind2 in zip(child_pop[::2], child_pop[1::2]):
if random.random() <= p_cross:
mate(ind1, ind2, n_dim, lower, upper, eta_c)
return child_pop
def mate(ind1, ind2, n_dim, lower, upper, eta_c):
"""
Performs simulated binary crossover between two individuals to produce
two offspring.
:param ind1: <Individual> First individual involved in crossover
:param ind2: <Individual> Second individual involved in crossover
:param n_dim: <int> Number of decision variable dimensions.
:param lower: <list<float>> List of decision variable lower bound values.
:param upper: <list<float>> List of decision variable upper bound values.
:param eta_c: <float> Crossover index.
:return: None
"""
for i, xl, xu in zip(xrange(n_dim), lower, upper):
if random.random() <= 0.5:
if abs(ind1.x[i] - ind2.x[i]) > 1e-14:
x1 = min(ind1.x[i], ind2.x[i])
x2 = max(ind1.x[i], ind2.x[i])
rand = random.random()
beta = 1.0 + (2.0*(x1 - xl) / (x2 - x1))
alpha = 2.0 - beta**-(eta_c + 1)
if rand <= 1.0 / alpha:
beta_q = (rand*alpha)**(1.0 / (eta_c + 1))
else:
beta_q = (1.0 / (2.0 - rand*alpha))**(1.0 / (eta_c + 1))
c1 = 0.5 * (x1 + x2 - beta_q * (x2 - x1))
beta = 1.0 + (2.0*(xu - x2) / (x2 - x1))
alpha = 2.0 - beta**-(eta_c + 1)
if rand <= 1.0 / alpha:
beta_q = (rand*alpha)**(1.0 / (eta_c + 1))
else:
beta_q = (1.0 / (2.0 - rand * alpha))**(1.0 / (eta_c + 1))
c2 = 0.5 * (x1 + x2 + beta_q*(x2 - x1))
c1 = min(max(c1, xl), xu)
c2 = min(max(c2, xl), xu)
if random.random() <= 0.5:
ind1.x[i] = c2
ind2.x[i] = c1
else:
ind1.x[i] = c1
ind2.x[i] = c2
def selection(population, k):
"""
Apply the NSGA-II selection operator on a population of individuals.
:param population: <Population> Population of individuals to select from
:param k: <int> The number of individuals to select
:return: <Population> Selected population of individuals
"""
pareto_fronts = nondominated_sort(population, k)
for front in pareto_fronts:
assign_crowding_distance(front)
chosen = list(itertools.chain(*pareto_fronts[:-1]))
k -= len(chosen)
if k > 0:
sorted_front = sorted(pareto_fronts[-1], key=operator.attrgetter("d"), reverse=True)
chosen.extend(sorted_front[:k])
return copy.deepcopy(chosen)
def nondominated_sort(population, k, first_front_only=False):
"""
Sort the first k individuals from the population into different nondomination
levels using the Fast Nondominated Sorting Approach proposed by Deb et al.
Function structure and implementation adapted from the DEAP package.
:param first_front_only:
:param population: <Population> Population of individuals to sort
:param k: The number of individuals to select
:return: <List<Individual>> A list of ordered Pareto fronts
"""
if k == 0:
return []
map_fit_ind = defaultdict(list)
for ind in population:
map_fit_ind[(tuple(ind.f))] = ind
fits = map_fit_ind.keys()
current_front = []
next_front = []
dominating_fits = defaultdict(int)
dominated_fits = defaultdict(list)
# Rank first Pareto front
for i, fit_i in enumerate(fits):
for fit_j in fits[i+1:]:
if map_fit_ind[tuple(fit_i)].dominates(map_fit_ind[tuple(fit_j)]):
dominating_fits[fit_j] += 1
dominated_fits[fit_i].append(fit_j)
elif map_fit_ind[tuple(fit_j)].dominates(map_fit_ind[tuple(fit_i)]):
dominating_fits[fit_i] += 1
dominated_fits[fit_j].append(fit_i)
if dominating_fits[fit_i] == 0:
map_fit_ind[tuple(fit_i)].r = 1
current_front.append(fit_i)
fronts = [[]]
for fit in current_front:
fronts[-1].append(map_fit_ind[tuple(fit)])
pareto_sorted = len(fronts[-1])
# Rank the next front until all individuals are sorted or
# the given number of individual are sorted.
if not first_front_only:
N = min(len(population), k)
while pareto_sorted < N:
fronts.append([])
for fit_p in current_front:
for fit_d in dominated_fits[fit_p]:
dominating_fits[fit_d] -= 1
if dominating_fits[fit_d] == 0:
next_front.append(fit_d)
pareto_sorted += 1
fronts[-1].append(map_fit_ind[tuple(fit_d)])
map_fit_ind[tuple(fit_d)].r = len(fronts)
current_front = next_front
next_front = []
return copy.deepcopy(fronts)
def assign_crowding_distance(individuals):
"""
Assign the crowding distance to each individual.
:param individuals: <Population, List> Individuals to assign crowding distance to.
:return: None
"""
if len(individuals) == 0:
return
distances = [0.0] * len(individuals)
crowd = [(ind.f, i) for i, ind in enumerate(individuals)]
nobj = len(individuals[0].f)
for i in xrange(nobj):
crowd.sort(key=lambda element: element[0][i])
distances[crowd[0][1]] = float("inf")
distances[crowd[-1][1]] = float("inf")
if crowd[-1][0][i] == crowd[0][0][i]:
continue
norm = nobj * float(crowd[-1][0][i] - crowd[0][0][i])
for prev, cur, nexxt in zip(crowd[:-2], crowd[1:-1], crowd[2:]):
distances[cur[1]] += (nexxt[0][i] - prev[0][i]) / norm
for i, dist in enumerate(distances):
individuals[i].d = dist
def tournament_select(population, k):
"""
Tournament selection based on the constraint dominance principle and the
crowding distance.
:param population: <Population, List> Individuals to select from
:param k: <int> The number of individuals to select.
:return: <List<Individual>> The list of selected individuals
"""
def tournament(ind1, ind2):
if ind1.dominates(ind2):
return copy.deepcopy(ind1)
elif ind2.dominates(ind1):
return copy.deepcopy(ind2)
if ind1.d < ind2.d:
return copy.deepcopy(ind2)
elif ind1.d > ind2.d:
return copy.deepcopy(ind1)
if random.random() <= 0.5:
return copy.deepcopy(ind1)
return copy.deepcopy(ind2)
population_1 = random.sample(population, len(population))
population_2 = random.sample(population, len(population))
chosen = []
for i in xrange(0, k, 4):
chosen.append(tournament(population_1[i], population_1[i+1]))
chosen.append(tournament(population_1[i+1], population_1[i+3]))
chosen.append(tournament(population_2[i], population_2[i+1]))
chosen.append(tournament(population_2[i+2], population_2[i+3]))
return chosen
def update_progress(progress):
bar_length = 20 # Modify this to change the length of the progress bar
status = ""
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
status = "error: progress var must be float\r\n"
if progress < 0:
progress = 0
status = "Halt...\r\n"
if progress >= 1:
progress = 1
status = "Done...\r\n"
block = int(round(bar_length*progress))
text = "\rPercent: [{0}] {1}% {2}".format("="*block + " "*(bar_length-block), progress*100, status)
sys.stdout.write(text)
sys.stdout.flush()<|fim▁end|> | self.check_migrants(value)
self._migrants = value
elif option == 'spheres': |
<|file_name|>shutter.py<|end_file_name|><|fim▁begin|>import serial
import struct
import time
# j = 2 means open, j = 1 means close shutter
def command_shutter(port, j):
# first, start the serial port to communicate with the arduino
if port.isOpen():
print "port open"
port.write(struct.pack('>B', j))<|fim▁hole|> else:
return 0
#while(1 == 1):
#cover_or_not = int(input('Enter a number. 1 will cover the Lenses of the NDI, while 2 will open the blinds.'))
#data.write(struct.pack('>B',cover_or_not))<|fim▁end|> | return 1 |
<|file_name|>10394.js<|end_file_name|><|fim▁begin|>{
if (Array.isArray(t) && c(e))<|fim▁hole|> return (t.length = Math.max(t.length, e)), t.splice(e, 1, n), n;
if (d(t, e)) return (t[e] = n), n;
var r = t.__ob__;
return t._isVue || (r && r.vmCount)
? n
: r ? (D(r.value, e, n), r.dep.notify(), n) : ((t[e] = n), n);
}<|fim▁end|> | |
<|file_name|>glue.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use atomic_refcell::AtomicRefMut;
use cssparser::Parser;
use cssparser::ToCss as ParserToCss;
use env_logger::LogBuilder;
use num_cpus;
use parking_lot::RwLock;
use rayon;
use selectors::Element;
use servo_url::ServoUrl;
use std::borrow::Cow;
use std::cmp;
use std::env;
use std::fmt::Write;
use std::ptr;
use std::sync::{Arc, Mutex};
use style::arc_ptr_eq;
use style::context::{QuirksMode, SharedStyleContext, StyleContext};
use style::context::{ThreadLocalStyleContext, ThreadLocalStyleContextCreationInfo};
use style::data::{ElementData, ElementStyles, RestyleData};
use style::dom::{ShowSubtreeData, TElement, TNode};
use style::error_reporting::StdoutErrorReporter;
use style::gecko::data::{PerDocumentStyleData, PerDocumentStyleDataImpl};
use style::gecko::restyle_damage::GeckoRestyleDamage;
use style::gecko::selector_parser::{SelectorImpl, PseudoElement};
use style::gecko::traversal::RecalcStyleOnly;
use style::gecko::wrapper::DUMMY_BASE_URL;
use style::gecko::wrapper::GeckoElement;
use style::gecko_bindings::bindings;
use style::gecko_bindings::bindings::{RawGeckoKeyframeListBorrowed, RawGeckoKeyframeListBorrowedMut};
use style::gecko_bindings::bindings::{RawServoDeclarationBlockBorrowed, RawServoDeclarationBlockStrong};
use style::gecko_bindings::bindings::{RawServoMediaListBorrowed, RawServoMediaListStrong};
use style::gecko_bindings::bindings::{RawServoMediaRule, RawServoMediaRuleBorrowed};
use style::gecko_bindings::bindings::{RawServoNamespaceRule, RawServoNamespaceRuleBorrowed};
use style::gecko_bindings::bindings::{RawServoStyleRule, RawServoStyleRuleBorrowed};
use style::gecko_bindings::bindings::{RawServoStyleSetBorrowed, RawServoStyleSetOwned};
use style::gecko_bindings::bindings::{RawServoStyleSheetBorrowed, ServoComputedValuesBorrowed};
use style::gecko_bindings::bindings::{RawServoStyleSheetStrong, ServoComputedValuesStrong};
use style::gecko_bindings::bindings::{ServoCssRulesBorrowed, ServoCssRulesStrong};
use style::gecko_bindings::bindings::{nsACString, nsAString};
use style::gecko_bindings::bindings::Gecko_AnimationAppendKeyframe;
use style::gecko_bindings::bindings::RawGeckoComputedKeyframeValuesListBorrowedMut;
use style::gecko_bindings::bindings::RawGeckoElementBorrowed;
use style::gecko_bindings::bindings::RawServoAnimationValueBorrowed;
use style::gecko_bindings::bindings::RawServoAnimationValueStrong;
use style::gecko_bindings::bindings::RawServoImportRuleBorrowed;
use style::gecko_bindings::bindings::ServoComputedValuesBorrowedOrNull;
use style::gecko_bindings::bindings::nsTArrayBorrowed_uintptr_t;
use style::gecko_bindings::structs;
use style::gecko_bindings::structs::{SheetParsingMode, nsIAtom, nsCSSPropertyID};
use style::gecko_bindings::structs::{ThreadSafePrincipalHolder, ThreadSafeURIHolder};
use style::gecko_bindings::structs::{nsRestyleHint, nsChangeHint};
use style::gecko_bindings::structs::Loader;
use style::gecko_bindings::structs::RawGeckoPresContextOwned;
use style::gecko_bindings::structs::RawServoAnimationValueBorrowedListBorrowed;
use style::gecko_bindings::structs::ServoStyleSheet;
use style::gecko_bindings::structs::nsCSSValueSharedList;
use style::gecko_bindings::structs::nsTimingFunction;
use style::gecko_bindings::structs::nsresult;
use style::gecko_bindings::sugar::ownership::{FFIArcHelpers, HasArcFFI, HasBoxFFI};
use style::gecko_bindings::sugar::ownership::{HasSimpleFFI, Strong};
use style::gecko_bindings::sugar::refptr::{GeckoArcPrincipal, GeckoArcURI};
use style::gecko_properties::{self, style_structs};
use style::keyframes::KeyframesStepValue;
use style::media_queries::{MediaList, parse_media_query_list};
use style::parallel;
use style::parser::{ParserContext, ParserContextExtraData};
use style::properties::{CascadeFlags, ComputedValues, Importance, ParsedDeclaration};
use style::properties::{PropertyDeclarationBlock, PropertyId};
use style::properties::SKIP_ROOT_AND_ITEM_BASED_DISPLAY_FIXUP;
use style::properties::animated_properties::{AnimationValue, Interpolate, TransitionProperty};
use style::properties::parse_one_declaration;
use style::restyle_hints::{self, RestyleHint};
use style::selector_parser::PseudoElementCascadeType;
use style::sequential;
use style::string_cache::Atom;
use style::stylesheets::{CssRule, CssRules, ImportRule, MediaRule, NamespaceRule};
use style::stylesheets::{Origin, Stylesheet, StyleRule};
use style::stylesheets::StylesheetLoader as StyleStylesheetLoader;
use style::supports::parse_condition_or_declaration;
use style::thread_state;
use style::timer::Timer;
use style::traversal::{resolve_style, DomTraversal, TraversalDriver};
use style_traits::ToCss;
use stylesheet_loader::StylesheetLoader;
/*
* For Gecko->Servo function calls, we need to redeclare the same signature that was declared in
* the C header in Gecko. In order to catch accidental mismatches, we run rust-bindgen against
* those signatures as well, giving us a second declaration of all the Servo_* functions in this
* crate. If there's a mismatch, LLVM will assert and abort, which is a rather awful thing to
* depend on but good enough for our purposes.
*/
struct GlobalStyleData {
// How many threads parallel styling can use.
pub num_threads: usize,
// The parallel styling thread pool.
pub style_thread_pool: Option<rayon::ThreadPool>,
}
impl GlobalStyleData {
pub fn new() -> Self {
let stylo_threads = env::var("STYLO_THREADS")
.map(|s| s.parse::<usize>().expect("invalid STYLO_THREADS value"));
let num_threads = match stylo_threads {
Ok(num) => num,
_ => cmp::max(num_cpus::get() * 3 / 4, 1),
};
let pool = if num_threads <= 1 {
None
} else {
let configuration =
rayon::Configuration::new().set_num_threads(num_threads);
let pool = rayon::ThreadPool::new(configuration).ok();
pool
};
GlobalStyleData {
num_threads: num_threads,
style_thread_pool: pool,
}
}
}
lazy_static! {
static ref GLOBAL_STYLE_DATA: GlobalStyleData = {
GlobalStyleData::new()
};
}
#[no_mangle]
pub extern "C" fn Servo_Initialize() {
// Initialize logging.
let mut builder = LogBuilder::new();
let default_level = if cfg!(debug_assertions) { "warn" } else { "error" };
match env::var("RUST_LOG") {
Ok(v) => builder.parse(&v).init().unwrap(),
_ => builder.parse(default_level).init().unwrap(),
};
// Pretend that we're a Servo Layout thread, to make some assertions happy.
thread_state::initialize(thread_state::LAYOUT);
// Perform some debug-only runtime assertions.
restyle_hints::assert_restyle_hints_match();
// Initialize some static data.
gecko_properties::initialize();
}
#[no_mangle]
pub extern "C" fn Servo_Shutdown() {
// Clear some static data to avoid shutdown leaks.
gecko_properties::shutdown();
}
fn create_shared_context(per_doc_data: &PerDocumentStyleDataImpl) -> SharedStyleContext {
let local_context_data =
ThreadLocalStyleContextCreationInfo::new(per_doc_data.new_animations_sender.clone());
SharedStyleContext {
stylist: per_doc_data.stylist.clone(),
running_animations: per_doc_data.running_animations.clone(),
expired_animations: per_doc_data.expired_animations.clone(),
// FIXME(emilio): Stop boxing here.
error_reporter: Box::new(StdoutErrorReporter),
local_context_creation_data: Mutex::new(local_context_data),
timer: Timer::new(),
// FIXME Find the real QuirksMode information for this document
quirks_mode: QuirksMode::NoQuirks,
}
}
fn traverse_subtree(element: GeckoElement, raw_data: RawServoStyleSetBorrowed,
unstyled_children_only: bool) {
// When new content is inserted in a display:none subtree, we will call into
// servo to try to style it. Detect that here and bail out.
if let Some(parent) = element.parent_element() {
if parent.borrow_data().map_or(true, |d| d.styles().is_display_none()) {
debug!("{:?} has unstyled parent - ignoring call to traverse_subtree", parent);
return;
}
}
let per_doc_data = PerDocumentStyleData::from_ffi(raw_data).borrow();
let token = RecalcStyleOnly::pre_traverse(element, &per_doc_data.stylist, unstyled_children_only);
if !token.should_traverse() {
return;
}
debug!("Traversing subtree:");
debug!("{:?}", ShowSubtreeData(element.as_node()));
let shared_style_context = create_shared_context(&per_doc_data);
let ref global_style_data = *GLOBAL_STYLE_DATA;
let traversal_driver = if global_style_data.style_thread_pool.is_none() {
TraversalDriver::Sequential
} else {
TraversalDriver::Parallel
};
let traversal = RecalcStyleOnly::new(shared_style_context, traversal_driver);
let known_depth = None;
if traversal_driver.is_parallel() {
parallel::traverse_dom(&traversal, element, known_depth, token,
global_style_data.style_thread_pool.as_ref().unwrap());
} else {
sequential::traverse_dom(&traversal, element, token);
}
}
/// Traverses the subtree rooted at `root` for restyling. Returns whether a
/// Gecko post-traversal (to perform lazy frame construction, or consume any
/// RestyleData, or drop any ElementData) is required.
#[no_mangle]
pub extern "C" fn Servo_TraverseSubtree(root: RawGeckoElementBorrowed,
raw_data: RawServoStyleSetBorrowed,
behavior: structs::TraversalRootBehavior) -> bool {
let element = GeckoElement(root);
debug!("Servo_TraverseSubtree: {:?}", element);
traverse_subtree(element, raw_data,
behavior == structs::TraversalRootBehavior::UnstyledChildrenOnly);
element.has_dirty_descendants() || element.mutate_data().unwrap().has_restyle()
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValues_Interpolate(from: RawServoAnimationValueBorrowed,
to: RawServoAnimationValueBorrowed,
progress: f64)
-> RawServoAnimationValueStrong
{
let from_value = AnimationValue::as_arc(&from);
let to_value = AnimationValue::as_arc(&to);
if let Ok(value) = from_value.interpolate(to_value, progress) {
Arc::new(value).into_strong()
} else {
RawServoAnimationValueStrong::null()
}
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValues_Uncompute(value: RawServoAnimationValueBorrowedListBorrowed)
-> RawServoDeclarationBlockStrong
{
let value = unsafe { value.as_ref().unwrap() };
let mut block = PropertyDeclarationBlock::new();
for v in value.iter() {
let raw_anim = unsafe { v.as_ref().unwrap() };
let anim = AnimationValue::as_arc(&raw_anim);
block.push(anim.uncompute(), Importance::Normal);
}
Arc::new(RwLock::new(block)).into_strong()
}
macro_rules! get_property_id_from_nscsspropertyid {
($property_id: ident, $ret: expr) => {{
match PropertyId::from_nscsspropertyid($property_id) {
Ok(property_id) => property_id,
Err(()) => { return $ret; }
}
}}
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValue_Serialize(value: RawServoAnimationValueBorrowed,
property: nsCSSPropertyID,
buffer: *mut nsAString)
{
let uncomputed_value = AnimationValue::as_arc(&value).uncompute();
let mut string = String::new();
let rv = PropertyDeclarationBlock::with_one(uncomputed_value, Importance::Normal)
.single_value_to_css(&get_property_id_from_nscsspropertyid!(property, ()), &mut string);
debug_assert!(rv.is_ok());
write!(unsafe { &mut *buffer }, "{}", string).expect("Failed to copy string");
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValue_GetOpacity(value: RawServoAnimationValueBorrowed)
-> f32
{
let value = AnimationValue::as_arc(&value);
if let AnimationValue::Opacity(opacity) = **value {
opacity
} else {
panic!("The AnimationValue should be Opacity");
}
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValue_GetTransform(value: RawServoAnimationValueBorrowed,
list: *mut structs::RefPtr<nsCSSValueSharedList>)
{
let value = AnimationValue::as_arc(&value);
if let AnimationValue::Transform(ref servo_list) = **value {
style_structs::Box::convert_transform(servo_list.0.clone().unwrap(), unsafe { &mut *list });
} else {
panic!("The AnimationValue should be transform");
}
}
#[no_mangle]
pub extern "C" fn Servo_AnimationValue_DeepEqual(this: RawServoAnimationValueBorrowed,
other: RawServoAnimationValueBorrowed)
-> bool
{
let this_value = AnimationValue::as_arc(&this);
let other_value = AnimationValue::as_arc(&other);
this_value == other_value
}
#[no_mangle]
pub extern "C" fn Servo_StyleWorkerThreadCount() -> u32 {
GLOBAL_STYLE_DATA.num_threads as u32
}
#[no_mangle]
pub extern "C" fn Servo_Element_ClearData(element: RawGeckoElementBorrowed) {
GeckoElement(element).clear_data();
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_Empty(mode: SheetParsingMode) -> RawServoStyleSheetStrong {
let url = ServoUrl::parse("about:blank").unwrap();
let extra_data = ParserContextExtraData::default();
let origin = match mode {
SheetParsingMode::eAuthorSheetFeatures => Origin::Author,
SheetParsingMode::eUserSheetFeatures => Origin::User,
SheetParsingMode::eAgentSheetFeatures => Origin::UserAgent,
};
Arc::new(Stylesheet::from_str(
"", url, origin, Default::default(), None,
&StdoutErrorReporter, extra_data)
).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_FromUTF8Bytes(loader: *mut Loader,
stylesheet: *mut ServoStyleSheet,
data: *const nsACString,
mode: SheetParsingMode,
base_url: *const nsACString,
base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
-> RawServoStyleSheetStrong {
let input = unsafe { data.as_ref().unwrap().as_str_unchecked() };
let origin = match mode {
SheetParsingMode::eAuthorSheetFeatures => Origin::Author,
SheetParsingMode::eUserSheetFeatures => Origin::User,
SheetParsingMode::eAgentSheetFeatures => Origin::UserAgent,
};
let base_str = unsafe { base_url.as_ref().unwrap().as_str_unchecked() };
let url = ServoUrl::parse(base_str).unwrap();
let extra_data = unsafe { ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
}};
let loader = if loader.is_null() {
None
} else {
Some(StylesheetLoader::new(loader, stylesheet))
};
// FIXME(emilio): loader.as_ref() doesn't typecheck for some reason?
let loader: Option<&StyleStylesheetLoader> = match loader {
None => None,
Some(ref s) => Some(s),
};
Arc::new(Stylesheet::from_str(
input, url, origin, Default::default(), loader,
&StdoutErrorReporter, extra_data)
).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_ClearAndUpdate(stylesheet: RawServoStyleSheetBorrowed,
loader: *mut Loader,
gecko_stylesheet: *mut ServoStyleSheet,
data: *const nsACString,
base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
{
let input = unsafe { data.as_ref().unwrap().as_str_unchecked() };
let extra_data = unsafe { ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
}};
let loader = if loader.is_null() {
None
} else {
Some(StylesheetLoader::new(loader, gecko_stylesheet))
};
// FIXME(emilio): loader.as_ref() doesn't typecheck for some reason?
let loader: Option<&StyleStylesheetLoader> = match loader {
None => None,
Some(ref s) => Some(s),
};
let sheet = Stylesheet::as_arc(&stylesheet);
sheet.rules.write().0.clear();
Stylesheet::update_from_str(&sheet, input, loader,
&StdoutErrorReporter, extra_data);
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_AppendStyleSheet(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed,
flush: bool) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.push(sheet.clone());
data.stylesheets_changed = true;
if flush {
data.flush_stylesheets();
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_PrependStyleSheet(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed,
flush: bool) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.insert(0, sheet.clone());
data.stylesheets_changed = true;
if flush {
data.flush_stylesheets();
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_InsertStyleSheetBefore(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed,
raw_reference: RawServoStyleSheetBorrowed,
flush: bool) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
let reference = HasArcFFI::as_arc(&raw_reference);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
let index = data.stylesheets.iter().position(|x| arc_ptr_eq(x, reference)).unwrap();
data.stylesheets.insert(index, sheet.clone());
data.stylesheets_changed = true;
if flush {
data.flush_stylesheets();
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_RemoveStyleSheet(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed,
flush: bool) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets_changed = true;
if flush {
data.flush_stylesheets();
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_FlushStyleSheets(raw_data: RawServoStyleSetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
data.flush_stylesheets();
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_NoteStyleSheetsChanged(raw_data: RawServoStyleSetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_HasRules(raw_sheet: RawServoStyleSheetBorrowed) -> bool {
!Stylesheet::as_arc(&raw_sheet).rules.read().0.is_empty()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_GetRules(sheet: RawServoStyleSheetBorrowed) -> ServoCssRulesStrong {
Stylesheet::as_arc(&sheet).rules.clone().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_CssRules_ListTypes(rules: ServoCssRulesBorrowed,
result: nsTArrayBorrowed_uintptr_t) {
let rules = RwLock::<CssRules>::as_arc(&rules).read();
let iter = rules.0.iter().map(|rule| rule.rule_type() as usize);
let (size, upper) = iter.size_hint();
debug_assert_eq!(size, upper.unwrap());
unsafe { result.set_len(size as u32) };
result.iter_mut().zip(iter).fold((), |_, (r, v)| *r = v);
}
#[no_mangle]
pub extern "C" fn Servo_CssRules_InsertRule(rules: ServoCssRulesBorrowed, sheet: RawServoStyleSheetBorrowed,
rule: *const nsACString, index: u32, nested: bool,
rule_type: *mut u16) -> nsresult {
let rules = RwLock::<CssRules>::as_arc(&rules);
let sheet = Stylesheet::as_arc(&sheet);
let rule = unsafe { rule.as_ref().unwrap().as_str_unchecked() };
match rules.write().insert_rule(rule, sheet, index as usize, nested) {
Ok(new_rule) => {
*unsafe { rule_type.as_mut().unwrap() } = new_rule.rule_type() as u16;
nsresult::NS_OK
}
Err(err) => err.into()
}
}
#[no_mangle]
pub extern "C" fn Servo_CssRules_DeleteRule(rules: ServoCssRulesBorrowed, index: u32) -> nsresult {
let rules = RwLock::<CssRules>::as_arc(&rules);
match rules.write().remove_rule(index as usize) {
Ok(_) => nsresult::NS_OK,
Err(err) => err.into()
}
}
macro_rules! impl_basic_rule_funcs {
{ ($name:ident, $rule_type:ty, $raw_type:ty),
getter: $getter:ident,
debug: $debug:ident,
to_css: $to_css:ident,
} => {
#[no_mangle]
pub extern "C" fn $getter(rules: ServoCssRulesBorrowed, index: u32) -> Strong<$raw_type> {
let rules = RwLock::<CssRules>::as_arc(&rules).read();
match rules.0[index as usize] {
CssRule::$name(ref rule) => rule.clone().into_strong(),
_ => {
unreachable!(concat!(stringify!($getter), "should only be called ",
"on a ", stringify!($name), " rule"));
}
}
}
#[no_mangle]
pub extern "C" fn $debug(rule: &$raw_type, result: *mut nsACString) {
let rule = RwLock::<$rule_type>::as_arc(&rule);
let result = unsafe { result.as_mut().unwrap() };
write!(result, "{:?}", *rule.read()).unwrap();
}
#[no_mangle]
pub extern "C" fn $to_css(rule: &$raw_type, result: *mut nsAString) {
let rule = RwLock::<$rule_type>::as_arc(&rule);
rule.read().to_css(unsafe { result.as_mut().unwrap() }).unwrap();
}
}
}
impl_basic_rule_funcs! { (Style, StyleRule, RawServoStyleRule),
getter: Servo_CssRules_GetStyleRuleAt,
debug: Servo_StyleRule_Debug,
to_css: Servo_StyleRule_GetCssText,
}
impl_basic_rule_funcs! { (Media, MediaRule, RawServoMediaRule),
getter: Servo_CssRules_GetMediaRuleAt,
debug: Servo_MediaRule_Debug,
to_css: Servo_MediaRule_GetCssText,
}
impl_basic_rule_funcs! { (Namespace, NamespaceRule, RawServoNamespaceRule),
getter: Servo_CssRules_GetNamespaceRuleAt,
debug: Servo_NamespaceRule_Debug,
to_css: Servo_NamespaceRule_GetCssText,
}
#[no_mangle]
pub extern "C" fn Servo_StyleRule_GetStyle(rule: RawServoStyleRuleBorrowed) -> RawServoDeclarationBlockStrong {
let rule = RwLock::<StyleRule>::as_arc(&rule);
rule.read().block.clone().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_StyleRule_SetStyle(rule: RawServoStyleRuleBorrowed,
declarations: RawServoDeclarationBlockBorrowed) {
let rule = RwLock::<StyleRule>::as_arc(&rule);
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
rule.write().block = declarations.clone();
}
#[no_mangle]
pub extern "C" fn Servo_StyleRule_GetSelectorText(rule: RawServoStyleRuleBorrowed, result: *mut nsAString) {
let rule = RwLock::<StyleRule>::as_arc(&rule);
rule.read().selectors.to_css(unsafe { result.as_mut().unwrap() }).unwrap();
}
#[no_mangle]
pub extern "C" fn Servo_MediaRule_GetMedia(rule: RawServoMediaRuleBorrowed) -> RawServoMediaListStrong {
let rule = RwLock::<MediaRule>::as_arc(&rule);
rule.read().media_queries.clone().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_MediaRule_GetRules(rule: RawServoMediaRuleBorrowed) -> ServoCssRulesStrong {
let rule = RwLock::<MediaRule>::as_arc(&rule);
rule.read().rules.clone().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_NamespaceRule_GetPrefix(rule: RawServoNamespaceRuleBorrowed) -> *mut nsIAtom {
let rule = RwLock::<NamespaceRule>::as_arc(&rule);
rule.read().prefix.as_ref().unwrap_or(&atom!("")).as_ptr()
}
#[no_mangle]
pub extern "C" fn Servo_NamespaceRule_GetURI(rule: RawServoNamespaceRuleBorrowed) -> *mut nsIAtom {
let rule = RwLock::<NamespaceRule>::as_arc(&rule);
rule.read().url.0.as_ptr()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForAnonymousBox(parent_style_or_null: ServoComputedValuesBorrowedOrNull,
pseudo_tag: *mut nsIAtom,
skip_display_fixup: bool,
raw_data: RawServoStyleSetBorrowed)
-> ServoComputedValuesStrong {
let data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ true);
let maybe_parent = ComputedValues::arc_from_borrowed(&parent_style_or_null);
let mut cascade_flags = CascadeFlags::empty();
if skip_display_fixup {
cascade_flags.insert(SKIP_ROOT_AND_ITEM_BASED_DISPLAY_FIXUP);
}
data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent,
cascade_flags)
.values.unwrap()
.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ResolvePseudoStyle(element: RawGeckoElementBorrowed,
pseudo_tag: *mut nsIAtom, is_probe: bool,
raw_data: RawServoStyleSetBorrowed)
-> ServoComputedValuesStrong
{
let element = GeckoElement(element);
let data = unsafe { element.ensure_data() }.borrow_mut();
let doc_data = PerDocumentStyleData::from_ffi(raw_data);
// FIXME(bholley): Assert against this.
if data.get_styles().is_none() {
warn!("Calling Servo_ResolvePseudoStyle on unstyled element");
return if is_probe {
Strong::null()
} else {
doc_data.borrow().default_computed_values().clone().into_strong()
};
}
match get_pseudo_style(element, pseudo_tag, data.styles(), doc_data) {
Some(values) => values.into_strong(),
None if !is_probe => data.styles().primary.values().clone().into_strong(),
None => Strong::null(),
}
}
fn get_pseudo_style(element: GeckoElement, pseudo_tag: *mut nsIAtom,
styles: &ElementStyles, doc_data: &PerDocumentStyleData)
-> Option<Arc<ComputedValues>>
{
let pseudo = PseudoElement::from_atom_unchecked(Atom::from(pseudo_tag), false);
match SelectorImpl::pseudo_element_cascade_type(&pseudo) {
PseudoElementCascadeType::Eager => styles.pseudos.get(&pseudo).map(|s| s.values().clone()),
PseudoElementCascadeType::Precomputed => unreachable!("No anonymous boxes"),
PseudoElementCascadeType::Lazy => {
let d = doc_data.borrow_mut();
let base = styles.primary.values();
d.stylist.lazily_compute_pseudo_element_style(&element,
&pseudo,
base)
.map(|s| s.values().clone())
},
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Inherit(
raw_data: RawServoStyleSetBorrowed,
parent_style: ServoComputedValuesBorrowedOrNull)
-> ServoComputedValuesStrong {
let data = PerDocumentStyleData::from_ffi(raw_data).borrow();
let maybe_arc = ComputedValues::arc_from_borrowed(&parent_style);
let style = if let Some(reference) = maybe_arc.as_ref() {
ComputedValues::inherit_from(reference, &data.default_computed_values())
} else {
data.default_computed_values().clone()
};
style.into_strong()
}
/// See the comment in `Device` to see why it's ok to pass an owned reference to
/// the pres context (hint: the context outlives the StyleSet, that holds the
/// device alive).
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Init(pres_context: RawGeckoPresContextOwned)
-> RawServoStyleSetOwned {
let data = Box::new(PerDocumentStyleData::new(pres_context));
data.into_ffi()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_RebuildData(raw_data: RawServoStyleSetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
data.reset_device();
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Drop(data: RawServoStyleSetOwned) {
let _ = data.into_box::<PerDocumentStyleData>();
}
// Must be a macro since we need to store the base_url on the stack somewhere
/// Initializes the data needed for constructing a ParserContext from
/// Gecko-side values
macro_rules! make_context {
(($base:ident, $data:ident) => ($base_url:ident, $extra_data:ident)) => {
let base_str = unsafe { $base.as_ref().unwrap().as_str_unchecked() };
let $base_url = ServoUrl::parse(base_str).unwrap();
let $extra_data = unsafe { ParserContextExtraData::new($data) };
}
}
#[no_mangle]
pub extern "C" fn Servo_ParseProperty(property: *const nsACString, value: *const nsACString,
base: *const nsACString,
data: *const structs::GeckoParserExtraData)
-> RawServoDeclarationBlockStrong {
let name = unsafe { property.as_ref().unwrap().as_str_unchecked() };
let id = if let Ok(id) = PropertyId::parse(name.into()) {
id
} else {
return RawServoDeclarationBlockStrong::null()
};
let value = unsafe { value.as_ref().unwrap().as_str_unchecked() };
make_context!((base, data) => (base_url, extra_data));
let reporter = StdoutErrorReporter;
let context = ParserContext::new_with_extra_data(Origin::Author,
&base_url,
&reporter,
extra_data);
match ParsedDeclaration::parse(id, &context, &mut Parser::new(value), false) {
Ok(parsed) => {
let mut block = PropertyDeclarationBlock::new();
parsed.expand(|d| block.push(d, Importance::Normal));
Arc::new(RwLock::new(block)).into_strong()
}
Err(_) => RawServoDeclarationBlockStrong::null()
}
}
#[no_mangle]
pub extern "C" fn Servo_ParseStyleAttribute(data: *const nsACString,
base: *const nsACString,
raw_extra_data: *const structs::GeckoParserExtraData)
-> RawServoDeclarationBlockStrong {
let value = unsafe { data.as_ref().unwrap().as_str_unchecked() };
make_context!((base, raw_extra_data) => (base_url, extra_data));
Arc::new(RwLock::new(GeckoElement::parse_style_attribute(value, &base_url, extra_data))).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_CreateEmpty() -> RawServoDeclarationBlockStrong {
Arc::new(RwLock::new(PropertyDeclarationBlock::new())).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Clone(declarations: RawServoDeclarationBlockBorrowed)
-> RawServoDeclarationBlockStrong {
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
Arc::new(RwLock::new(declarations.read().clone())).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Equals(a: RawServoDeclarationBlockBorrowed,
b: RawServoDeclarationBlockBorrowed)
-> bool {
*RwLock::<PropertyDeclarationBlock>::as_arc(&a).read().declarations() ==
*RwLock::<PropertyDeclarationBlock>::as_arc(&b).read().declarations()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetCssText(declarations: RawServoDeclarationBlockBorrowed,
result: *mut nsAString) {
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
declarations.read().to_css(unsafe { result.as_mut().unwrap() }).unwrap();
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SerializeOneValue(
declarations: RawServoDeclarationBlockBorrowed,
property_id: nsCSSPropertyID, buffer: *mut nsAString)
{
let property_id = get_property_id_from_nscsspropertyid!(property_id, ());
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let mut string = String::new();
let rv = declarations.read().single_value_to_css(&property_id, &mut string);
debug_assert!(rv.is_ok());
write!(unsafe { &mut *buffer }, "{}", string).expect("Failed to copy string");
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Count(declarations: RawServoDeclarationBlockBorrowed) -> u32 {
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
declarations.read().declarations().len() as u32
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetNthProperty(declarations: RawServoDeclarationBlockBorrowed,
index: u32, result: *mut nsAString) -> bool {
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
if let Some(&(ref decl, _)) = declarations.read().declarations().get(index as usize) {
let result = unsafe { result.as_mut().unwrap() };
decl.id().to_css(result).unwrap();
true
} else {
false
}
}
macro_rules! get_property_id_from_property {
($property: ident, $ret: expr) => {{
let property = unsafe { $property.as_ref().unwrap().as_str_unchecked() };
match PropertyId::parse(Cow::Borrowed(property)) {
Ok(property_id) => property_id,
Err(()) => { return $ret; }
}
}}
}
fn get_property_value(declarations: RawServoDeclarationBlockBorrowed,
property_id: PropertyId, value: *mut nsAString) {
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
declarations.read().property_value_to_css(&property_id, unsafe { value.as_mut().unwrap() }).unwrap();
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetPropertyValue(declarations: RawServoDeclarationBlockBorrowed,
property: *const nsACString, value: *mut nsAString) {
get_property_value(declarations, get_property_id_from_property!(property, ()), value)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetPropertyValueById(declarations: RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID, value: *mut nsAString) {
get_property_value(declarations, get_property_id_from_nscsspropertyid!(property, ()), value)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_GetPropertyIsImportant(declarations: RawServoDeclarationBlockBorrowed,
property: *const nsACString) -> bool {
let property_id = get_property_id_from_property!(property, false);
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
declarations.read().property_priority(&property_id).important()
}
fn set_property(declarations: RawServoDeclarationBlockBorrowed, property_id: PropertyId,
value: *const nsACString, is_important: bool,
base: *const nsACString, data: *const structs::GeckoParserExtraData) -> bool {
let value = unsafe { value.as_ref().unwrap().as_str_unchecked() };
make_context!((base, data) => (base_url, extra_data));
if let Ok(parsed) = parse_one_declaration(property_id, value, &base_url,
&StdoutErrorReporter, extra_data) {
let mut declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations).write();
let importance = if is_important { Importance::Important } else { Importance::Normal };
let mut changed = false;
parsed.expand(|decl| {<|fim▁hole|> changed |= declarations.set_parsed_declaration(decl, importance);
});
changed
} else {
false
}
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetProperty(declarations: RawServoDeclarationBlockBorrowed,
property: *const nsACString, value: *const nsACString,
is_important: bool,
base: *const nsACString,
data: *const structs::GeckoParserExtraData) -> bool {
set_property(declarations, get_property_id_from_property!(property, false),
value, is_important, base, data)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetPropertyById(declarations: RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID, value: *const nsACString,
is_important: bool,
base: *const nsACString,
data: *const structs::GeckoParserExtraData) -> bool {
set_property(declarations, get_property_id_from_nscsspropertyid!(property, false),
value, is_important, base, data)
}
fn remove_property(declarations: RawServoDeclarationBlockBorrowed, property_id: PropertyId) {
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
declarations.write().remove_property(&property_id);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_RemoveProperty(declarations: RawServoDeclarationBlockBorrowed,
property: *const nsACString) {
remove_property(declarations, get_property_id_from_property!(property, ()))
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_RemovePropertyById(declarations: RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID) {
remove_property(declarations, get_property_id_from_nscsspropertyid!(property, ()))
}
#[no_mangle]
pub extern "C" fn Servo_MediaList_GetText(list: RawServoMediaListBorrowed, result: *mut nsAString) {
let list = RwLock::<MediaList>::as_arc(&list);
list.read().to_css(unsafe { result.as_mut().unwrap() }).unwrap();
}
#[no_mangle]
pub extern "C" fn Servo_MediaList_SetText(list: RawServoMediaListBorrowed, text: *const nsACString) {
let list = RwLock::<MediaList>::as_arc(&list);
let text = unsafe { text.as_ref().unwrap().as_str_unchecked() };
let mut parser = Parser::new(&text);
*list.write() = parse_media_query_list(&mut parser);
}
#[no_mangle]
pub extern "C" fn Servo_MediaList_GetLength(list: RawServoMediaListBorrowed) -> u32 {
let list = RwLock::<MediaList>::as_arc(&list);
list.read().media_queries.len() as u32
}
#[no_mangle]
pub extern "C" fn Servo_MediaList_GetMediumAt(list: RawServoMediaListBorrowed, index: u32,
result: *mut nsAString) -> bool {
let list = RwLock::<MediaList>::as_arc(&list);
if let Some(media_query) = list.read().media_queries.get(index as usize) {
media_query.to_css(unsafe { result.as_mut().unwrap() }).unwrap();
true
} else {
false
}
}
#[no_mangle]
pub extern "C" fn Servo_MediaList_AppendMedium(list: RawServoMediaListBorrowed,
new_medium: *const nsACString) {
let list = RwLock::<MediaList>::as_arc(&list);
let new_medium = unsafe { new_medium.as_ref().unwrap().as_str_unchecked() };
list.write().append_medium(new_medium);
}
#[no_mangle]
pub extern "C" fn Servo_MediaList_DeleteMedium(list: RawServoMediaListBorrowed,
old_medium: *const nsACString) -> bool {
let list = RwLock::<MediaList>::as_arc(&list);
let old_medium = unsafe { old_medium.as_ref().unwrap().as_str_unchecked() };
list.write().delete_medium(old_medium)
}
macro_rules! get_longhand_from_id {
($id:expr, $retval:expr) => {
match PropertyId::from_nscsspropertyid($id) {
Ok(PropertyId::Longhand(long)) => long,
_ => {
error!("stylo: unknown presentation property with id {:?}", $id);
return $retval
}
}
};
($id:expr) => {
get_longhand_from_id!($id, ())
}
}
macro_rules! match_wrap_declared {
($longhand:ident, $($property:ident => $inner:expr,)*) => (
match $longhand {
$(
LonghandId::$property => PropertyDeclaration::$property($inner),
)*
_ => {
error!("stylo: Don't know how to handle presentation property {:?}", $longhand);
return
}
}
)
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_PropertyIsSet(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID)
-> bool {
use style::properties::PropertyDeclarationId;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property, false);
declarations.read().get(PropertyDeclarationId::Longhand(long)).is_some()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetIdentStringValue(declarations:
RawServoDeclarationBlockBorrowed,
property:
nsCSSPropertyID,
value:
*mut nsIAtom) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::properties::longhands::_x_lang::computed_value::T as Lang;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let prop = match_wrap_declared! { long,
XLang => Lang(Atom::from(value)),
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
#[allow(unreachable_code)]
pub extern "C" fn Servo_DeclarationBlock_SetKeywordValue(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID,
value: i32) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::properties::longhands;
use style::values::specified::{BorderStyle, NoCalcLength};
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let value = value as u32;
let prop = match_wrap_declared! { long,
MozUserModify => longhands::_moz_user_modify::SpecifiedValue::from_gecko_keyword(value),
// TextEmphasisPosition => FIXME implement text-emphasis-position
Display => longhands::display::SpecifiedValue::from_gecko_keyword(value),
Float => longhands::float::SpecifiedValue::from_gecko_keyword(value),
VerticalAlign => longhands::vertical_align::SpecifiedValue::from_gecko_keyword(value),
TextAlign => longhands::text_align::SpecifiedValue::from_gecko_keyword(value),
Clear => longhands::clear::SpecifiedValue::from_gecko_keyword(value),
FontSize => {
// We rely on Gecko passing in font-size values (0...7) here.
longhands::font_size::SpecifiedValue(NoCalcLength::from_font_size_int(value as u8).into())
},
ListStyleType => longhands::list_style_type::SpecifiedValue::from_gecko_keyword(value),
WhiteSpace => longhands::white_space::SpecifiedValue::from_gecko_keyword(value),
CaptionSide => longhands::caption_side::SpecifiedValue::from_gecko_keyword(value),
BorderTopStyle => BorderStyle::from_gecko_keyword(value),
BorderRightStyle => BorderStyle::from_gecko_keyword(value),
BorderBottomStyle => BorderStyle::from_gecko_keyword(value),
BorderLeftStyle => BorderStyle::from_gecko_keyword(value),
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetIntValue(declarations: RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID,
value: i32) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::properties::longhands::_x_span::computed_value::T as Span;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let prop = match_wrap_declared! { long,
XSpan => Span(value),
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetPixelValue(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID,
value: f32) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::properties::longhands::border_spacing::SpecifiedValue as BorderSpacing;
use style::values::specified::BorderWidth;
use style::values::specified::length::NoCalcLength;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let nocalc = NoCalcLength::from_px(value);
let prop = match_wrap_declared! { long,
Height => nocalc.into(),
Width => nocalc.into(),
BorderTopWidth => BorderWidth::Width(nocalc.into()),
BorderRightWidth => BorderWidth::Width(nocalc.into()),
BorderBottomWidth => BorderWidth::Width(nocalc.into()),
BorderLeftWidth => BorderWidth::Width(nocalc.into()),
MarginTop => nocalc.into(),
MarginRight => nocalc.into(),
MarginBottom => nocalc.into(),
MarginLeft => nocalc.into(),
PaddingTop => nocalc.into(),
PaddingRight => nocalc.into(),
PaddingBottom => nocalc.into(),
PaddingLeft => nocalc.into(),
BorderSpacing => Box::new(
BorderSpacing {
horizontal: nocalc.into(),
vertical: nocalc.into(),
}
),
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetPercentValue(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID,
value: f32) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::values::specified::length::Percentage;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let pc = Percentage(value);
let prop = match_wrap_declared! { long,
Height => pc.into(),
Width => pc.into(),
MarginTop => pc.into(),
MarginRight => pc.into(),
MarginBottom => pc.into(),
MarginLeft => pc.into(),
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetAutoValue(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::values::specified::LengthOrPercentageOrAuto;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let auto = LengthOrPercentageOrAuto::Auto;
let prop = match_wrap_declared! { long,
Height => auto,
Width => auto,
MarginTop => auto,
MarginRight => auto,
MarginBottom => auto,
MarginLeft => auto,
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetCurrentColor(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID) {
use style::properties::{PropertyDeclaration, LonghandId};
use style::values::specified::{Color, CSSColor};
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let cc = CSSColor { parsed: Color::CurrentColor, authored: None };
let prop = match_wrap_declared! { long,
BorderTopColor => cc,
BorderRightColor => cc,
BorderBottomColor => cc,
BorderLeftColor => cc,
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetColorValue(declarations:
RawServoDeclarationBlockBorrowed,
property: nsCSSPropertyID,
value: structs::nscolor) {
use style::gecko::values::convert_nscolor_to_rgba;
use style::properties::{PropertyDeclaration, LonghandId};
use style::properties::longhands;
use style::values::specified::{Color, CSSColor};
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let long = get_longhand_from_id!(property);
let rgba = convert_nscolor_to_rgba(value);
let color = CSSColor { parsed: Color::RGBA(rgba), authored: None };
let prop = match_wrap_declared! { long,
BorderTopColor => color,
BorderRightColor => color,
BorderBottomColor => color,
BorderLeftColor => color,
Color => longhands::color::SpecifiedValue(color),
BackgroundColor => color,
};
declarations.write().push(prop, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetFontFamily(declarations:
RawServoDeclarationBlockBorrowed,
value: *const nsAString) {
use cssparser::Parser;
use style::properties::PropertyDeclaration;
use style::properties::longhands::font_family::SpecifiedValue as FontFamily;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let string = unsafe { (*value).to_string() };
let mut parser = Parser::new(&string);
if let Ok(family) = FontFamily::parse(&mut parser) {
if parser.is_exhausted() {
let decl = PropertyDeclaration::FontFamily(family);
declarations.write().push(decl, Importance::Normal);
}
}
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SetTextDecorationColorOverride(declarations:
RawServoDeclarationBlockBorrowed) {
use style::properties::PropertyDeclaration;
use style::properties::longhands::text_decoration_line;
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let mut decoration = text_decoration_line::computed_value::none;
decoration |= text_decoration_line::COLOR_OVERRIDE;
let decl = PropertyDeclaration::TextDecorationLine(decoration);
declarations.write().push(decl, Importance::Normal);
}
#[no_mangle]
pub extern "C" fn Servo_CSSSupports2(property: *const nsACString, value: *const nsACString) -> bool {
let property = unsafe { property.as_ref().unwrap().as_str_unchecked() };
let id = if let Ok(id) = PropertyId::parse(property.into()) {
id
} else {
return false
};
let value = unsafe { value.as_ref().unwrap().as_str_unchecked() };
let base_url = &*DUMMY_BASE_URL;
let extra_data = ParserContextExtraData::default();
parse_one_declaration(id, &value, &base_url, &StdoutErrorReporter, extra_data).is_ok()
}
#[no_mangle]
pub extern "C" fn Servo_CSSSupports(cond: *const nsACString) -> bool {
let condition = unsafe { cond.as_ref().unwrap().as_str_unchecked() };
let mut input = Parser::new(&condition);
let cond = parse_condition_or_declaration(&mut input);
if let Ok(cond) = cond {
let url = ServoUrl::parse("about:blank").unwrap();
let reporter = StdoutErrorReporter;
let context = ParserContext::new_for_cssom(&url, &reporter);
cond.eval(&context)
} else {
false
}
}
/// Only safe to call on the main thread, with exclusive access to the element and
/// its ancestors.
unsafe fn maybe_restyle<'a>(data: &'a mut AtomicRefMut<ElementData>, element: GeckoElement)
-> Option<&'a mut RestyleData>
{
// Don't generate a useless RestyleData if the element hasn't been styled.
if !data.has_styles() {
return None;
}
// Propagate the bit up the chain.
let mut curr = element;
while let Some(parent) = curr.parent_element() {
curr = parent;
if curr.has_dirty_descendants() { break; }
curr.set_dirty_descendants();
}
bindings::Gecko_SetOwnerDocumentNeedsStyleFlush(element.0);
// Ensure and return the RestyleData.
Some(data.ensure_restyle())
}
#[no_mangle]
pub extern "C" fn Servo_Element_GetSnapshot(element: RawGeckoElementBorrowed) -> *mut structs::ServoElementSnapshot
{
let element = GeckoElement(element);
let snapshot = match element.mutate_data() {
None => ptr::null_mut(),
Some(mut data) => {
if let Some(restyle_data) = unsafe { maybe_restyle(&mut data, element) } {
restyle_data.snapshot.ensure(|| element.create_snapshot()).borrow_mut_raw()
} else {
ptr::null_mut()
}
},
};
debug!("Servo_Element_GetSnapshot: {:?}: {:?}", element, snapshot);
snapshot
}
#[no_mangle]
pub extern "C" fn Servo_NoteExplicitHints(element: RawGeckoElementBorrowed,
restyle_hint: nsRestyleHint,
change_hint: nsChangeHint) {
let element = GeckoElement(element);
let damage = GeckoRestyleDamage::new(change_hint);
debug!("Servo_NoteExplicitHints: {:?}, restyle_hint={:?}, change_hint={:?}",
element, restyle_hint, change_hint);
let mut maybe_data = element.mutate_data();
let maybe_restyle_data =
maybe_data.as_mut().and_then(|d| unsafe { maybe_restyle(d, element) });
if let Some(restyle_data) = maybe_restyle_data {
let restyle_hint: RestyleHint = restyle_hint.into();
restyle_data.hint.insert(&restyle_hint.into());
restyle_data.damage |= damage;
} else {
debug!("(Element not styled, discarding hints)");
}
}
#[no_mangle]
pub extern "C" fn Servo_ImportRule_GetSheet(import_rule:
RawServoImportRuleBorrowed)
-> RawServoStyleSheetStrong {
let import_rule = RwLock::<ImportRule>::as_arc(&import_rule);
import_rule.read().stylesheet.clone().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_TakeChangeHint(element: RawGeckoElementBorrowed) -> nsChangeHint
{
let element = GeckoElement(element);
let damage = if let Some(mut data) = element.mutate_data() {
let d = data.get_restyle().map_or(GeckoRestyleDamage::empty(), |r| r.damage);
data.clear_restyle();
d
} else {
warn!("Trying to get change hint from unstyled element");
GeckoRestyleDamage::empty()
};
debug!("Servo_TakeChangeHint: {:?}, damage={:?}", element, damage);
damage.as_change_hint()
}
#[no_mangle]
pub extern "C" fn Servo_ResolveStyle(element: RawGeckoElementBorrowed,
raw_data: RawServoStyleSetBorrowed)
-> ServoComputedValuesStrong
{
let element = GeckoElement(element);
debug!("Servo_ResolveStyle: {:?}", element);
let data = unsafe { element.ensure_data() }.borrow_mut();
if !data.has_current_styles() {
warn!("Resolving style on unstyled element with lazy computation forbidden.");
let per_doc_data = PerDocumentStyleData::from_ffi(raw_data).borrow();
return per_doc_data.default_computed_values().clone().into_strong();
}
data.styles().primary.values().clone().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ResolveStyleLazily(element: RawGeckoElementBorrowed,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowed)
-> ServoComputedValuesStrong
{
let element = GeckoElement(element);
let doc_data = PerDocumentStyleData::from_ffi(raw_data);
let finish = |styles: &ElementStyles| -> Arc<ComputedValues> {
let maybe_pseudo = if !pseudo_tag.is_null() {
get_pseudo_style(element, pseudo_tag, styles, doc_data)
} else {
None
};
maybe_pseudo.unwrap_or_else(|| styles.primary.values().clone())
};
// In the common case we already have the style. Check that before setting
// up all the computation machinery.
let mut result = element.mutate_data()
.and_then(|d| d.get_styles().map(&finish));
if result.is_some() {
return result.unwrap().into_strong();
}
// We don't have the style ready. Go ahead and compute it as necessary.
let shared = create_shared_context(&mut doc_data.borrow_mut());
let mut tlc = ThreadLocalStyleContext::new(&shared);
let mut context = StyleContext {
shared: &shared,
thread_local: &mut tlc,
};
let ensure = |el: GeckoElement| { unsafe { el.ensure_data(); } };
let clear = |el: GeckoElement| el.clear_data();
resolve_style(&mut context, element, &ensure, &clear,
|styles| result = Some(finish(styles)));
result.unwrap().into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_GetComputedKeyframeValues(keyframes: RawGeckoKeyframeListBorrowed,
style: ServoComputedValuesBorrowed,
parent_style: ServoComputedValuesBorrowedOrNull,
raw_data: RawServoStyleSetBorrowed,
computed_keyframes: RawGeckoComputedKeyframeValuesListBorrowedMut)
{
use style::properties::LonghandIdSet;
use style::properties::declaration_block::Importance;
use style::values::computed::Context;
let data = PerDocumentStyleData::from_ffi(raw_data).borrow();
let style = ComputedValues::as_arc(&style);
let parent_style = parent_style.as_ref().map(|r| &**ComputedValues::as_arc(&r));
let default_values = data.default_computed_values();
let context = Context {
is_root_element: false,
device: &data.stylist.device,
inherited_style: parent_style.unwrap_or(default_values),
layout_parent_style: parent_style.unwrap_or(default_values),
style: (**style).clone(),
font_metrics_provider: None,
};
for (index, keyframe) in keyframes.iter().enumerate() {
let ref mut animation_values = computed_keyframes[index];
let mut seen = LonghandIdSet::new();
// mServoDeclarationBlock is null in the case where we have an invalid css property.
let iter = keyframe.mPropertyValues.iter()
.filter(|&property| !property.mServoDeclarationBlock.mRawPtr.is_null());
for property in iter {
let declarations = unsafe { &*property.mServoDeclarationBlock.mRawPtr.clone() };
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let guard = declarations.read();
let anim_iter = guard.declarations()
.iter()
.filter_map(|&(ref decl, imp)| {
if imp == Importance::Normal {
let property = TransitionProperty::from_declaration(decl);
let animation = AnimationValue::from_declaration(decl, &context, default_values);
debug_assert!(property.is_none() == animation.is_none(),
"The failure condition of TransitionProperty::from_declaration \
and AnimationValue::from_declaration should be the same");
// Skip the property if either ::from_declaration fails.
if property.is_none() || animation.is_none() {
None
} else {
Some((property.unwrap(), animation.unwrap()))
}
} else {
None
}
});
for (i, anim) in anim_iter.enumerate() {
if !seen.has_transition_property_bit(&anim.0) {
// This is safe since we immediately write to the uninitialized values.
unsafe { animation_values.set_len((i + 1) as u32) };
seen.set_transition_property_bit(&anim.0);
animation_values[i].mProperty = anim.0.into();
animation_values[i].mValue.mServo.set_arc_leaky(Arc::new(anim.1));
}
}
}
}
}
#[no_mangle]
pub extern "C" fn Servo_AssertTreeIsClean(root: RawGeckoElementBorrowed) {
if !cfg!(debug_assertions) {
panic!("Calling Servo_AssertTreeIsClean in release build");
}
let root = GeckoElement(root);
fn assert_subtree_is_clean<'le>(el: GeckoElement<'le>) {
debug_assert!(!el.has_dirty_descendants());
for child in el.as_node().children() {
if let Some(child) = child.as_element() {
assert_subtree_is_clean(child);
}
}
}
assert_subtree_is_clean(root);
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_FillKeyframesForName(raw_data: RawServoStyleSetBorrowed,
name: *const nsACString,
timing_function: *const nsTimingFunction,
style: ServoComputedValuesBorrowed,
keyframes: RawGeckoKeyframeListBorrowedMut) -> bool {
use style::gecko_bindings::structs::Keyframe;
use style::properties::LonghandIdSet;
let data = PerDocumentStyleData::from_ffi(raw_data).borrow();
let name = unsafe { Atom::from(name.as_ref().unwrap().as_str_unchecked()) };
let style_timing_function = unsafe { timing_function.as_ref().unwrap() };
let style = ComputedValues::as_arc(&style);
if let Some(ref animation) = data.stylist.animations().get(&name) {
for step in &animation.steps {
// Override timing_function if the keyframe has animation-timing-function.
let timing_function = if let Some(val) = step.get_animation_timing_function() {
val.into()
} else {
*style_timing_function
};
let keyframe = unsafe {
Gecko_AnimationAppendKeyframe(keyframes,
step.start_percentage.0 as f32,
&timing_function)
};
fn add_computed_property_value(keyframe: *mut Keyframe,
index: usize,
style: &ComputedValues,
property: &TransitionProperty) {
let block = style.to_declaration_block(property.clone().into());
unsafe {
(*keyframe).mPropertyValues.set_len((index + 1) as u32);
(*keyframe).mPropertyValues[index].mProperty = property.clone().into();
// FIXME. Do not set computed values once we handles missing keyframes
// with additive composition.
(*keyframe).mPropertyValues[index].mServoDeclarationBlock.set_arc_leaky(
Arc::new(RwLock::new(block)));
}
}
match step.value {
KeyframesStepValue::ComputedValues => {
for (index, property) in animation.properties_changed.iter().enumerate() {
add_computed_property_value(keyframe, index, style, property);
}
},
KeyframesStepValue::Declarations { ref block } => {
let guard = block.read();
// Filter out non-animatable properties.
let animatable =
guard.declarations()
.iter()
.filter(|&&(ref declaration, _)| {
declaration.is_animatable()
});
let mut seen = LonghandIdSet::new();
for (index, &(ref declaration, _)) in animatable.enumerate() {
unsafe {
let property = TransitionProperty::from_declaration(declaration).unwrap();
(*keyframe).mPropertyValues.set_len((index + 1) as u32);
(*keyframe).mPropertyValues[index].mProperty = property.into();
(*keyframe).mPropertyValues[index].mServoDeclarationBlock.set_arc_leaky(
Arc::new(RwLock::new(PropertyDeclarationBlock::with_one(
declaration.clone(), Importance::Normal
))));
if step.start_percentage.0 == 0. ||
step.start_percentage.0 == 1. {
seen.set_transition_property_bit(&property);
}
}
}
// Append missing property values in the initial or the finial keyframes.
if step.start_percentage.0 == 0. ||
step.start_percentage.0 == 1. {
let mut index = unsafe { (*keyframe).mPropertyValues.len() };
for property in animation.properties_changed.iter() {
if !seen.has_transition_property_bit(&property) {
add_computed_property_value(keyframe, index, style, property);
index += 1;
}
}
}
},
}
}
return true
}
false
}<|fim▁end|> | |
<|file_name|>kindck-nonsendable-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>fn foo(_x: @uint) {}
fn main() {
let x = @3u;
let _: ~fn() = || foo(x); //~ ERROR does not fulfill `Send`
let _: ~fn() = || foo(x); //~ ERROR does not fulfill `Send`
let _: ~fn() = || foo(x); //~ ERROR does not fulfill `Send`
}<|fim▁end|> | |
<|file_name|>windows.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | ../../../../../../share/pyshared/ubuntu-sso-client/ubuntu_sso/xdg_base_directory/windows.py |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>"""
WSGI config for hpt project.
<|fim▁hole|>https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hpt.settings")
application = get_wsgi_application()<|fim▁end|> | It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see |
<|file_name|>macro_crate_test.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// force-host
#![feature(plugin_registrar, quote, rustc_private)]
extern crate syntax;
extern crate rustc;
use syntax::ast::{self, TokenTree, Item, MetaItem, ImplItem, TraitItem};
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::parse::{self, token};
use syntax::ptr::P;
use rustc::plugin::Registry;
#[macro_export]
macro_rules! exported_macro { () => (2) }
macro_rules! unexported_macro { () => (3) }
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("make_a_1", expand_make_a_1);
reg.register_macro("identity", expand_identity);
reg.register_syntax_extension(
token::intern("into_foo"),
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
Modifier(Box::new(expand_into_foo)));
reg.register_syntax_extension(
token::intern("into_multi_foo"),
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
MultiModifier(Box::new(expand_into_foo_multi)));
reg.register_syntax_extension(
token::intern("duplicate"),
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
MultiDecorator(Box::new(expand_duplicate)));
}
fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<MacResult+'static> {
if !tts.is_empty() {
cx.span_fatal(sp, "make_a_1 takes no arguments");
}
MacEager::expr(quote_expr!(cx, 1))
}
// See Issue #15750
fn expand_identity(cx: &mut ExtCtxt, _span: Span, tts: &[TokenTree])
-> Box<MacResult+'static> {
// Parse an expression and emit it unchanged.
let mut parser = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(), tts.to_vec());
let expr = parser.parse_expr();
MacEager::expr(quote_expr!(&mut *cx, $expr))
}
fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: &MetaItem, it: P<Item>)
-> P<Item> {
P(Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone()
})
}
fn expand_into_foo_multi(cx: &mut ExtCtxt,
sp: Span,
attr: &MetaItem,
it: Annotatable) -> Annotatable {
match it {
Annotatable::Item(it) => {
Annotatable::Item(P(Item {
attrs: it.attrs.clone(),
..(*quote_item!(cx, enum Foo2 { Bar2, Baz2 }).unwrap()).clone()
}))
}
Annotatable::ImplItem(it) => {
quote_item!(cx, impl X { fn foo(&self) -> i32 { 42 } }).unwrap().and_then(|i| {
match i.node {
ast::ItemImpl(_, _, _, _, _, mut items) => {
Annotatable::ImplItem(items.pop().expect("impl method not found"))
}
_ => unreachable!("impl parsed to something other than impl")
}
})
}
Annotatable::TraitItem(it) => {
quote_item!(cx, trait X { fn foo(&self) -> i32 { 0 } }).unwrap().and_then(|i| {
match i.node {
ast::ItemTrait(_, _, _, mut items) => {
Annotatable::TraitItem(items.pop().expect("trait method not found"))
}
_ => unreachable!("trait parsed to something other than trait")
}
})
}
}
}
// Create a duplicate of the annotatable, based on the MetaItem
fn expand_duplicate(cx: &mut ExtCtxt,
sp: Span,
mi: &MetaItem,
it: &Annotatable,
push: &mut FnMut(Annotatable))
{
let copy_name = match mi.node {
ast::MetaItem_::MetaList(_, ref xs) => {<|fim▁hole|> return;
}
}
_ => {
cx.span_err(mi.span, "Expected list");
return;
}
};
// Duplicate the item but replace its ident by the MetaItem
match it.clone() {
Annotatable::Item(it) => {
let mut new_it = (*it).clone();
new_it.attrs.clear();
new_it.ident = copy_name;
push(Annotatable::Item(P(new_it)));
}
Annotatable::ImplItem(it) => {
let mut new_it = (*it).clone();
new_it.attrs.clear();
new_it.ident = copy_name;
push(Annotatable::ImplItem(P(new_it)));
}
Annotatable::TraitItem(tt) => {
let mut new_it = (*tt).clone();
new_it.attrs.clear();
new_it.ident = copy_name;
push(Annotatable::TraitItem(P(new_it)));
}
}
}
pub fn foo() {}<|fim▁end|> | if let ast::MetaItem_::MetaWord(ref w) = xs[0].node {
token::str_to_ident(&w)
} else {
cx.span_err(mi.span, "Expected word"); |
<|file_name|>StiRadarAreaSeries.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * Could not load the following classes:
* com.stimulsoft.base.drawing.StiBrush
* com.stimulsoft.base.drawing.StiColor
* com.stimulsoft.base.drawing.StiSolidBrush
* com.stimulsoft.base.drawing.enums.StiPenStyle
* com.stimulsoft.base.serializing.annotations.StiDefaulValue
* com.stimulsoft.base.serializing.annotations.StiSerializable
*/
package com.stimulsoft.report.chart.view.series.radar;
import com.stimulsoft.base.drawing.StiBrush;
import com.stimulsoft.base.drawing.StiColor;
import com.stimulsoft.base.drawing.StiSolidBrush;
import com.stimulsoft.base.drawing.enums.StiPenStyle;
import com.stimulsoft.base.serializing.annotations.StiDefaulValue;
import com.stimulsoft.base.serializing.annotations.StiSerializable;
import com.stimulsoft.report.chart.core.series.StiSeriesCoreXF;
import com.stimulsoft.report.chart.core.series.radar.StiRadarAreaSeriesCoreXF;
import com.stimulsoft.report.chart.interfaces.series.IStiSeries;
import com.stimulsoft.report.chart.interfaces.series.radar.IStiRadarAreaSeries;
import com.stimulsoft.report.chart.view.areas.radar.StiRadarAreaArea;
import com.stimulsoft.report.chart.view.series.radar.StiRadarSeries;
public class StiRadarAreaSeries
extends StiRadarSeries
implements IStiRadarAreaSeries {
private StiColor lineColor = StiColor.Black;
private StiPenStyle lineStyle = StiPenStyle.Solid;
private boolean lighting = true;
private float lineWidth = 2.0f;
private StiBrush brush = new StiSolidBrush(StiColor.Gainsboro);
@StiSerializable
public StiColor getLineColor() {
return this.lineColor;
}
public void setLineColor(StiColor stiColor) {
this.lineColor = stiColor;
}
@StiDefaulValue(value="Solid")
@StiSerializable
public StiPenStyle getLineStyle() {
return this.lineStyle;
}
public void setLineStyle(StiPenStyle stiPenStyle) {
this.lineStyle = stiPenStyle;
}
@StiDefaulValue(value="true")
@StiSerializable
public boolean getLighting() {
return this.lighting;
}
public void setLighting(boolean bl) {
this.lighting = bl;
}
@StiDefaulValue(value="2.0")
@StiSerializable
public float getLineWidth() {
return this.lineWidth;
}
public void setLineWidth(float f) {
if (f > 0.0f) {
this.lineWidth = f;
}
}
@StiSerializable(shortName="bh")
public final StiBrush getBrush() {
return this.brush;
}
public final void setBrush(StiBrush stiBrush) {
this.brush = stiBrush;
}
public Class GetDefaultAreaType() {
return StiRadarAreaArea.class;
}
public StiRadarAreaSeries() {
this.setCore(new StiRadarAreaSeriesCoreXF(this));
}
}<|fim▁end|> | /*
* Decompiled with CFR 0_114.
* |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import logging.config
from flask import Flask, Blueprint
from werkzeug.contrib.fixers import ProxyFix
from ceep_api import settings
from ceep_api.api import restplus
from ceep_api.api.endpoints.adbmonitors import ns as adbmonitors_namespace
from ceep_api.api.restplus import api
from ceep_api.database import db
def configure_app(flask_app):
flask_app.config['SQLALCHEMY_DATABASE_URI'] = settings.SQLALCHEMY_DATABASE_URI
flask_app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = settings.SQLALCHEMY_TRACK_MODIFICATIONS
flask_app.config['SWAGGER_UI_DOC_EXPANSION'] = settings.RESTPLUS_SWAGGER_UI_DOC_EXPANSION
flask_app.config['RESTPLUS_VALIDATE'] = settings.RESTPLUS_VALIDATE
flask_app.config['RESTPLUS_MASK_SWAGGER'] = settings.RESTPLUS_MASK_SWAGGER
flask_app.config['ERROR_404_HELP'] = settings.RESTPLUS_ERROR_404_HELP
<|fim▁hole|> configure_app(flask_app)
blueprint = Blueprint('api', __name__, url_prefix='/api/1.0')
api.init_app(blueprint)
api.add_namespace(adbmonitors_namespace)
flask_app.register_blueprint(blueprint)
db.init_app(flask_app)
logging.config.fileConfig('logging.conf')
log = logging.getLogger(__name__)
app = Flask(__name__)
initialize_app(app)
app.wsgi_app = ProxyFix(app.wsgi_app)<|fim▁end|> |
def initialize_app(flask_app):
log.debug('Initialize APP...') |
<|file_name|>cfg-attr-crate.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
// https://github.com/rust-lang/rust/issues/21833#issuecomment-72353044
// pretty-expanded FIXME #23616
<|fim▁hole|><|fim▁end|> | #![cfg_attr(not_used, no_core)]
fn main() { } |
<|file_name|>gradient_optimizer.py<|end_file_name|><|fim▁begin|>import numpy as np
import theano
import theano.tensor as T
class GradientOptimizer:
def __init__(self, lr):
self.lr = lr
def __call__(self, cost, params):
pass
@property
def learningRate(self):
return self.lr
@learningRate.setter
def learningRate(self, i):
self.lr = i
class RMSprop(GradientOptimizer):
def __init__(self, lr=0.01, rho=0.9, epsilon=1e-6):
super(RMSprop, self).__init__(lr)
self.rho = rho
self.epsilon = epsilon
def __call__(self, cost, params):
grads = T.grad(cost=cost, wrt=params)
updates = []
for p, g in zip(params, grads):
acc = theano.shared(p.get_value() * 0.)
acc_new = self.rho * acc + (1 - self.rho) * g ** 2
gradient_scaling = T.sqrt(acc_new + self.epsilon)
g = g / gradient_scaling
updates.append((acc, acc_new))
updates.append((p, p - self.lr * g))
return updates
class Adam(GradientOptimizer):
def __init__(self, lr=0.01, beta1=0.9, beta2=0.999, epsilon=1e-7):
super(Adam, self).__init__(lr)
self.beta1 = beta1
self.beta2 = beta2
self.epsilon = epsilon
def __call__(self, cost, params):
grads = T.grad(cost=cost ,wrt=params)
updates = []
exp = theano.shared(np.float32(1.0),name='exp',borrow=True)
updates.append((exp, exp+1))
for p, g in zip(params, grads):
m = theano.shared(p.get_value() * 0.)
v = theano.shared(p.get_value() * 0.)
m_new = self.beta1 * m + (1 - self.beta1) * g
v_new = self.beta2 * v + (1 - self.beta2) * g**2
mt = m_new / (1 - self.beta1**exp)
vt = v_new / (1 - self.beta2**exp)
updates.append((m, m_new))
updates.append((v, v_new))
updates.append((p, p - self.lr * mt / (T.sqrt(vt) + self.epsilon)))
return updates
class Momentum(GradientOptimizer):
def __init__(self, lr=0.01, mu=0.5):
super(Momentum, self).__init__(lr)
self.mu = mu
def __call__(self, cost, params):
grads = T.grad(cost=cost ,wrt=params)
updates = []
for p, g in zip(params, grads):
v = theano.shared(p.get_value() * 0.)
new_v = self.mu * v + self.lr * g
updates.append((v, new_v))
updates.append((p, p - new_v))
return updates
class Nesterov(GradientOptimizer):
def __init__(self, lr=0.01, mu=0.5):
super(Nesterov, self).__init__(lr)<|fim▁hole|>
def __call__(self, cost, params):
grads = T.grad(cost=cost ,wrt=params)
updates = []
for p, g in zip(params, grads):
v = theano.shared(p.get_value() * 0.)
new_v = self.mu * v + self.lr * theano.clone(g, replace = {p: p - self.mu * v})
updates.append((v, new_v))
updates.append((p, p - new_v))
return updates
class Adagrad(GradientOptimizer):
def __init__(self, lr=0.01, epsilon=1e-7):
super(Adagrad, self).__init__(lr)
self.epsilon = epsilon
def __call__(self, cost, params):
grads = T.grad(cost=cost ,wrt=params)
updates = []
for p, g in zip(params, grads):
acc = theano.shared(p.get_value() * 0.)
acc_new = acc + g**2
updates.append((acc, acc_new))
updates.append((p, p - self.lr * g / T.sqrt(acc_new + self.epsilon)))
return updates<|fim▁end|> | self.mu = mu |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/sealeyes'
__version__ = '0.0.1'
def sealeyes():
return 'Hello World!'<|fim▁end|> | """Python Library Boilerplate contains all the boilerplate you need to create a Python package.""" |
<|file_name|>row.go<|end_file_name|><|fim▁begin|>package godependencies
import (
"github.com/goatcms/goatcli/cliapp/common/config"
)
// SetRow is single record in Set
type SetRow struct {
Dependency *config.Dependency
Imported bool
}
// SetImported update imported value
func (row *SetRow) SetImported(value bool) {<|fim▁hole|>}<|fim▁end|> | row.Imported = value |
<|file_name|>arkanoid.cpp<|end_file_name|><|fim▁begin|>// license:BSD-3-Clause
// copyright-holders:Brad Oliver
/***************************************************************************
arkanoid.cpp
Functions to emulate the video hardware of the machine.
***************************************************************************/
#include "emu.h"
#include "includes/arkanoid.h"
void arkanoid_state::arkanoid_videoram_w(offs_t offset, uint8_t data)
{
m_videoram[offset] = data;
m_bg_tilemap->mark_tile_dirty(offset / 2);
}
void arkanoid_state::arkanoid_d008_w(uint8_t data)
{
int bank;
/* bits 0 and 1 flip X and Y */
flip_screen_x_set(data & 0x01);
flip_screen_y_set(data & 0x02);
/* bit 2 selects the input paddle */
m_paddle_select = data & 0x04;
/* bit 3 is coin lockout (but not the service coin) */
machine().bookkeeping().coin_lockout_w(0, !(data & 0x08));
machine().bookkeeping().coin_lockout_w(1, !(data & 0x08));
/* bit 4 is unknown */
/* bit 5 controls the graphics rom bank */
bank = (data & 0x20) >> 5;
if (m_gfxbank != bank)
{
m_gfxbank = bank;
m_bg_tilemap->mark_all_dirty();
}
/* bit 6 controls the palette bank */
bank = (data & 0x40) >> 6;
if (m_palettebank != bank)
{
m_palettebank = bank;
m_bg_tilemap->mark_all_dirty();
}
// bit 7 resets the MCU and semaphore flipflops
// This bit is flipped early in bootup just prior to accessing the MCU for the first time.
if (m_mcuintf.found()) // Bootlegs don't have the MCU but still set this bit
m_mcuintf->reset_w(BIT(data, 7) ? CLEAR_LINE : ASSERT_LINE);
}
void arkanoid_state::brixian_d008_w(uint8_t data)
{
int bank;
/* bits 0 and 1 flip X and Y */
flip_screen_x_set(data & 0x01);
flip_screen_y_set(data & 0x02);
/* bit 2 selects the input paddle */
/* - not relevant to brixian */
/* bit 3 is coin lockout (but not the service coin) */
/* - not here, means you can only play 1 game */
/* bit 4 is unknown */
/* bit 5 controls the graphics rom bank */
bank = (data & 0x20) >> 5;
if (m_gfxbank != bank)
{
m_gfxbank = bank;
m_bg_tilemap->mark_all_dirty();
}
/* bit 6 controls the palette bank */
bank = (data & 0x40) >> 6;
if (m_palettebank != bank)
{
m_palettebank = bank;
m_bg_tilemap->mark_all_dirty();
}
/* bit 7 is MCU reset on Arkanoid */
/* - does it reset the Brixian MCU too? */
}
/* different hook-up, everything except for bits 0-1 and 7 aren't tested afaik. */
void arkanoid_state::tetrsark_d008_w(uint8_t data)
{
int bank;
/* bits 0 and 1 flip X and Y */
flip_screen_x_set(data & 0x01);
flip_screen_y_set(data & 0x02);
/* bit 2 selects the input paddle? */
m_paddle_select = data & 0x04;
/* bit 3-4 is unknown? */
/* bit 5 controls the graphics rom bank */
bank = (data & 0x20) >> 5;
if (m_gfxbank != bank)
{
m_gfxbank = bank;
m_bg_tilemap->mark_all_dirty();
}
/* bit 6 controls the palette bank */
bank = (data & 0x40) >> 6;
if (m_palettebank != bank)
{
m_palettebank = bank;
m_bg_tilemap->mark_all_dirty();
}
/* bit 7 is coin lockout (but not the service coin) */
machine().bookkeeping().coin_lockout_w(0, !(data & 0x80));
machine().bookkeeping().coin_lockout_w(1, !(data & 0x80));
}
void arkanoid_state::hexa_d008_w(uint8_t data)
{
/* bits 0 and 1 flip X and Y */
flip_screen_x_set(data & 0x01);
flip_screen_y_set(data & 0x02);
/* bit 2 - 3 unknown */
/* bit 4 could be the ROM bank selector for 8000-bfff (not sure) */
membank("bank1")->set_entry(((data & 0x10) >> 4));
/* bit 5 controls the graphics rom bank */
if (m_gfxbank != ((data & 0x20) >> 5))
{
m_gfxbank = (data & 0x20) >> 5;
m_bg_tilemap->mark_all_dirty();
}
/* bit 6 - 7 unknown */
}
TILE_GET_INFO_MEMBER(arkanoid_state::get_bg_tile_info)
{
int offs = tile_index * 2;
int code = m_videoram[offs + 1] + ((m_videoram[offs] & 0x07) << 8) + 2048 * m_gfxbank;
int color = ((m_videoram[offs] & 0xf8) >> 3) + 32 * m_palettebank;
tileinfo.set(0, code, color, 0);
}
void arkanoid_state::video_start()
{
m_bg_tilemap = &machine().tilemap().create(*m_gfxdecode, tilemap_get_info_delegate(*this, FUNC(arkanoid_state::get_bg_tile_info)), TILEMAP_SCAN_ROWS, 8, 8, 32, 32);
}
void arkanoid_state::draw_sprites( bitmap_ind16 &bitmap, const rectangle &cliprect )
{
int offs;
for (offs = 0; offs < m_spriteram.bytes(); offs += 4)
{
int sx, sy, code;
sx = m_spriteram[offs];
sy = 248 - m_spriteram[offs + 1];
if (flip_screen_x())
sx = 248 - sx;
if (flip_screen_y())
sy = 248 - sy;
code = m_spriteram[offs + 3] + ((m_spriteram[offs + 2] & 0x03) << 8) + 1024 * m_gfxbank;
m_gfxdecode->gfx(0)->transpen(bitmap,cliprect,
2 * code,
((m_spriteram[offs + 2] & 0xf8) >> 3) + 32 * m_palettebank,
flip_screen_x(),flip_screen_y(),
sx,sy + (flip_screen_y() ? 8 : -8),0);<|fim▁hole|> flip_screen_x(),flip_screen_y(),
sx,sy,0);
}
}
uint32_t arkanoid_state::screen_update_arkanoid(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect)
{
m_bg_tilemap->draw(screen, bitmap, cliprect, 0, 0);
draw_sprites(bitmap, cliprect);
return 0;
}
uint32_t arkanoid_state::screen_update_hexa(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect)
{
m_bg_tilemap->draw(screen, bitmap, cliprect, 0, 0);
return 0;
}<|fim▁end|> | m_gfxdecode->gfx(0)->transpen(bitmap,cliprect,
2 * code + 1,
((m_spriteram[offs + 2] & 0xf8) >> 3) + 32 * m_palettebank, |
<|file_name|>telnetFlowMeasure.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import pexpect
import traceback
import time
import os
import sys
import re
addr = 'telnet 192.168.99.1 10000'
uname = ['a', 'd', 'm', 'i', 'n']
passwd = ['p', 'a', 's', 's', 'w', 'd']
cmdline = "show statistics traffic 5/1/0-1\n"
qq = ["e", "x", "i", "t", "\n"]
logName = 'Traffic_' + time.strftime("%Y-%m-%d", time.localtime())
<|fim▁hole|>if __name__ == '__main__':
with open(logName, 'w') as fd:
fd.writelines(["start at: ", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()),
"\ndate,time, 5/1/0(Mbit/s), 5/1/1(Mbit/s)\n"])
while True:
try:
fd = open('Telnet_raw.log', 'w')
child = pexpect.spawn(addr)
child.logfile_read = fd
index = child.expect(['username:', pexpect.EOF, pexpect.TIMEOUT])
if index == 0:
for i in uname:
child.send(i)
child.send('\n.\n')
for i in passwd:
child.send(i)
child.send('\n.\n')
child.sendline(cmdline)
for i in qq:
child.send(i)
child.expect(['#', pexpect.EOF, pexpect.TIMEOUT])
child.close()
timeFin = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
else:
print index
except:
traceback.print_exc()
finally:
fd.close()
with open('Telnet_raw.log', 'r') as fs, open(logName, 'a') as fd:
temp = []
content = fs.readlines()
fd.write(time.strftime("%Y-%m-%d,%H:%M:%S,", time.localtime()))
for line in content:
temp.append(re.split(r'\s+', line))
fd.write(" %s,%s\n" % (temp[11][2], temp[13][2]))
time.sleep(1)<|fim▁end|> | |
<|file_name|>issue-55394.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(nll)]
struct Bar;<|fim▁hole|>
struct Foo<'s> {
bar: &'s mut Bar,
}
impl Foo<'_> {
fn new(bar: &mut Bar) -> Self {
Foo { bar } //~ ERROR unsatisfied lifetime constraints
}
}
fn main() { }<|fim▁end|> | |
<|file_name|>pushFile.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"os"
"github.com/Cloud-Foundations/Dominator/lib/filesystem"
"github.com/Cloud-Foundations/Dominator/lib/log"
objclient "github.com/Cloud-Foundations/Dominator/lib/objectserver/client"
"github.com/Cloud-Foundations/Dominator/lib/srpc"
"github.com/Cloud-Foundations/Dominator/lib/triggers"
"github.com/Cloud-Foundations/Dominator/lib/wsyscall"
"github.com/Cloud-Foundations/Dominator/proto/sub"
"github.com/Cloud-Foundations/Dominator/sub/client"
)
func pushFileSubcommand(args []string, logger log.DebugLogger) error {
srpcClient := getSubClient(logger)
defer srpcClient.Close()<|fim▁hole|> if err := pushFile(srpcClient, args[0], args[1]); err != nil {
return fmt.Errorf("Error pushing file: %s", err)
}
return nil
}
func pushFile(srpcClient *srpc.Client, source, dest string) error {
var sourceStat wsyscall.Stat_t
if err := wsyscall.Stat(source, &sourceStat); err != nil {
return err
}
sourceFile, err := os.Open(source)
if err != nil {
return err
}
defer sourceFile.Close()
objClient := objclient.AttachObjectClient(srpcClient)
defer objClient.Close()
hashVal, _, err := objClient.AddObject(sourceFile, uint64(sourceStat.Size),
nil)
if err != nil {
return err
}
newRegularInode := &filesystem.RegularInode{
Mode: filesystem.FileMode(sourceStat.Mode),
Uid: sourceStat.Uid,
Gid: sourceStat.Gid,
MtimeNanoSeconds: int32(sourceStat.Mtim.Nsec),
MtimeSeconds: sourceStat.Mtim.Sec,
Size: uint64(sourceStat.Size),
Hash: hashVal}
newInode := sub.Inode{Name: dest, GenericInode: newRegularInode}
var updateRequest sub.UpdateRequest
var updateReply sub.UpdateResponse
updateRequest.Wait = true
updateRequest.InodesToMake = append(updateRequest.InodesToMake, newInode)
if *triggersFile != "" {
updateRequest.Triggers, err = triggers.Load(*triggersFile)
if err != nil {
return err
}
} else if *triggersString != "" {
updateRequest.Triggers, err = triggers.Decode([]byte(*triggersString))
if err != nil {
return err
}
}
startTime := showStart("Subd.Update()")
err = client.CallUpdate(srpcClient, updateRequest, &updateReply)
showTimeTaken(startTime)
return err
}<|fim▁end|> | |
<|file_name|>ws_BinaryClass_10_LogisticRegression_db2_code_gen.py<|end_file_name|><|fim▁begin|>from sklearn2sql_heroku.tests.classification import generic as class_gen
<|fim▁hole|><|fim▁end|> | class_gen.test_model("LogisticRegression" , "BinaryClass_10" , "db2") |
<|file_name|>mock_readelf.py<|end_file_name|><|fim▁begin|># Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
_HEADERS = """ELF Header:
Magic: 7f 45 4c 46 01 01 01 00 00 00 00 00 00 00 00 00
Class: ELF32
Data: 2's complement, little endian
Version: 1 (current)
OS/ABI: UNIX - System V
ABI Version: 0
Type: DYN (Shared object file)
Machine: ARM
Version: 0x1
Entry point address: 0x0
Start of program headers: 52 (bytes into file)
Start of section headers: 628588000 (bytes into file)
Flags: 0x5000200, Version5 EABI, soft-float ABI
Size of this header: 52 (bytes)
Size of program headers: 32 (bytes)
Number of program headers: 9
Size of section headers: 40 (bytes)
Number of section headers: 40
Section header string table index: 39
"""
_SECTIONS = """There are 40 section headers, starting at offset 0x25777de0:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
[ 1] .interp PROGBITS 00000154 000154 000013 00 A 0 0 1
[ 2] .note.gnu.build-id NOTE 00000168 000168 000024 00 A 0 0 4
[ 3] .dynsym DYNSYM 0000018c 00018c 001960 10 A 4 1 4
[ 4] .dynstr STRTAB 00001b0c 001b0c 000fb9 00 A 0 0 1
[ 5] .hash HASH 00002ad4 002ad4 000a7c 04 A 3 0 4
[ 6] .gnu.version VERSYM 00003558 003558 00032c 02 A 3 0 2
[ 7] .gnu.version_d VERDEF 00003888 003888 00001c 00 A 4 1 4
[ 8] .gnu.version_r VERNEED 000038a4 0038a4 000060 00 A 4 3 4<|fim▁hole|> [10] .rel.plt REL 0029fbec 29fbec 000b00 08 A 3 0 4
[11] .plt PROGBITS 002a06ec 2a06ec 001094 00 AX 0 0 4
[12] .text PROGBITS 0028d900 28d900 2250ba8 00 AX 0 0 64
[13] .rodata PROGBITS 0266e5f0 000084 5a72e4 00 A 0 0 256
[14] .ARM.exidx ARM_EXIDX 02bd3d10 2bd3d10 1771c8 08 AL 12 0 4
[15] .ARM.extab PROGBITS 02bd5858 2bd5858 02cd50 00 A 0 0 4
[16] .data.rel.ro.local PROGBITS 02c176f0 2c166f0 0c0e08 00 WA 0 0 16
[17] .data.rel.ro PROGBITS 02cd8500 2cd8500 104108 00 WA 0 0 16
[18] .init_array INIT_ARRAY 02ddc608 2ddc608 000008 00 WA 0 0 4
[19] .fini_array FINI_ARRAY 02ddc6f4 2ddc6f4 000008 00 WA 0 0 4
[20] .dynamic DYNAMIC 02ddc6fc 2ddc6fc 000130 08 WA 4 0 4
[21] .got PROGBITS 02ddc834 2ddc834 00a7cc 00 WA 0 0 4
[22] .data PROGBITS 02de7000 2de7000 018d88 00 WA 0 0 32
[23] .bss NOBITS 02dffda0 2dffda0 13d7e8 00 WA 0 0 32
[35] .note.gnu.gold-version NOTE 00000000 22700c98 00001c 00 0 0 4
[36] .ARM.attributes ARM_ATTRIBUTES 00000000 22700cb4 00003c 00 0 0 1
[37] .symtab SYMTAB 00000000 22700cf0 105ef20 10 38 901679 4
[38] .strtab STRTAB 00000000 234c4950 213a4fe 00 0 0 1
[39] .shstrtab STRTAB 00000000 257b46da 0001b4 00 0 0 1
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
"""
_NOTES = """
Displaying notes found at file offset 0x00000168 with length 0x00000024:
Owner Data size\tDescription
GNU 0x00000014\tNT_GNU_BUILD_ID (unique build ID bitstring)
Build ID: WhatAnAmazingBuildId
Displaying notes found at file offset 0x226c41e8 with length 0x0000001c:
Owner Data size\tDescription
GNU 0x00000009\tNT_GNU_GOLD_VERSION (gold version)
"""
_OBJECT_OUTPUTS = {
'obj/third_party/icu/icuuc/ucnv_ext.o': """\
There are 71 section headers, starting at offset 0x3114:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
[ 1] .strtab STRTAB 00000000 0029ac 000765 00 0 0 1
[ 2] .text PROGBITS 00000000 000034 000000 00 AX 0 0 4
[ 3] .text.ucnv_extIni PROGBITS 00000000 000034 0000c6 00 AX 0 0 2
[ 4] .rel.text.ucnv_ex REL 00000000 0023f4 000010 08 70 3 4
[ 5] .ARM.exidx.text.u ARM_EXIDX 00000000 0000fc 000008 00 AL 3 0 4
[60] .rodata.str1.1 PROGBITS 00000000 000015 000015 01 AMS 0 0 1
[56] .debug_str PROGBITS 00000000 000c50 0003c5 01 MS 0 0 1
[57] .debug_abbrev PROGBITS 00000000 001015 0000a1 00 0 0 1
[58] .debug_info PROGBITS 00000000 0010b6 000151 00 0 0 1
[59] .rel.debug_info REL 00000000 002544 0001e8 08 70 58 4
[60] .debug_ranges PROGBITS 00000000 001207 0000b0 00 0 0 1
[61] .rel.debug_ranges REL 00000000 00272c 000130 08 70 60 4
[62] .debug_macinfo PROGBITS 00000000 0012b7 000001 00 0 0 1
[63] .comment PROGBITS 00000000 0012b8 000024 01 MS 0 0 1
[64] .note.GNU-stack PROGBITS 00000000 0012dc 000000 00 0 0 1
[65] .ARM.attributes ARM_ATTRIBUTES 00000000 0012dc 00003c 00 0 0 1
[66] .debug_frame PROGBITS 00000000 001318 0001e4 00 0 0 4
[67] .rel.debug_frame REL 00000000 00285c 0000e0 08 70 66 4
[68] .debug_line PROGBITS 00000000 0014fc 000965 00 0 0 1
[69] .rel.debug_line REL 00000000 00293c 000070 08 70 68 4
[70] .symtab SYMTAB 00000000 001e64 000590 10 1 74 4
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
""",
'obj/third_party/WebKit.a': """\
File: obj/third_party/WebKit.a(PaintChunker.o)
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
File: obj/third_party/WebKit.a(ContiguousContainer.o)
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
""",
'obj/base/base/page_allocator.o': """\
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
[ 1] .rodata.str1.1 PROGBITS 00000000 000015 000005 01 AMS 0 0 1
""",
'obj/third_party/ffmpeg/libffmpeg_internal.a': """\
File: obj/third_party/ffmpeg/libffmpeg_internal.a(fft_float.o)
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
[ 1] .rodata.str1.1 PROGBITS 00000000 000015 000005 01 AMS 0 0 1
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
File: obj/third_party/ffmpeg/libffmpeg_internal.a(fft_fixed.o)
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
""",
'../../third_party/gvr-android-sdk/libgvr_shim_static_arm.a': """\
File: ../../third_party/gvr-android-sdk/libgvr_shim_static_arm.a(\
libcontroller_api_impl.a_controller_api_impl.o)
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
File: ../../third_party/gvr-android-sdk/libgvr_shim_static_arm.a(\
libport_android_jni.a_jni_utils.o)
There are 68 section headers, starting at offset 0x5650:
Section Headers:
[Nr] Name Type Addr Off Size ES Flg Lk Inf Al
[ 0] NULL 00000000 000000 000000 00 0 0 0
Key to Flags:
W (write), A (alloc), X (execute), M (merge), S (strings)
I (info), L (link order), G (group), T (TLS), E (exclude), x (unknown)
O (extra OS processing required) o (OS specific), p (processor specific)
""",
}
def _PrintHeader(path):
sys.stdout.write('\n')
sys.stdout.write('File: ' + path + '\n')
def _PrintOutput(path):
payload = _OBJECT_OUTPUTS.get(os.path.normpath(path))
assert payload, 'No mock_nm.py entry for: ' + path
sys.stdout.write(payload)
sys.stdout.write('\n')
def main():
paths = [p for p in sys.argv[1:] if not p.startswith('-')]
if paths[0].endswith('.o') or paths[0].endswith('.a'):
if len(paths) > 1:
for path in paths:
_PrintHeader(path)
_PrintOutput(path)
else:
_PrintOutput(paths[0])
elif sys.argv[1] == '-h':
sys.stdout.write(_HEADERS)
elif sys.argv[1] == '-S':
sys.stdout.write(_SECTIONS)
elif sys.argv[1] == '-n':
sys.stdout.write(_NOTES)
else:
assert False, 'Invalid args: %s' % sys.argv
if __name__ == '__main__':
main()<|fim▁end|> | [ 9] .rel.dyn REL 00003904 003904 288498 08 A 3 0 4 |
<|file_name|>format_tree_and_trait_table.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# File created on 15 Jul 2011
from __future__ import division
__author__ = "Jesse Zaneveld"
__copyright__ = "Copyright 2011-2013, The PICRUSt Project"
__credits__ = ["Jesse Zaneveld","Morgan Langille"]
__license__ = "GPL"
__version__ = "1.0.0-dev"
__maintainer__ = "Jesse Zaneveld"
__email__ = "zaneveld@gmail.com"
__status__ = "Development"
from os.path import splitext
from string import maketrans
from sys import getrecursionlimit,setrecursionlimit
import re
from cogent.parse.tree import DndParser
from cogent.util.option_parsing import parse_command_line_parameters,\
make_option
from picrust.parse import parse_trait_table,yield_trait_table_fields
from util import PicrustNode
def reformat_tree_and_trait_table(tree,trait_table_lines,trait_to_tree_mapping,\
input_trait_table_delimiter="\t", output_trait_table_delimiter="\t",\
filter_table_by_tree_tips=True, convert_trait_floats_to_ints=False,\
filter_tree_by_table_entries=True,convert_to_bifurcating=False,\
add_branch_length_to_root=False, name_unnamed_nodes=True,\
remove_whitespace_from_labels = True,replace_ambiguous_states=True,\
replace_problematic_label_characters = True,min_branch_length=0.0001,\
verbose=True):
"""Return a full reformatted tree,pruned reformatted tree and set of trait table lines
tree - a PyCogent PhyloNode tree object
trait_table_lines -- the lines of a trait table, where
the rows are organisms and the columns are traits (e.g. gene counts).
trait_id_to_tree_mapping -- a dict keyed by trait table ids, with
values of tree ids. If provided, trait table ids will be mapped to
tree ids
filter_table_by_tree_tips -- if True, remove trait table rows that don't map to ids on the
tree
convert_trait_floats_to_ints -- if True, convert floating point values in trait table cells to integers.
filter_tree_by_table_entries -- if True, save only the subtree that encompasses organisms in the trait table.
(equivalent to removing all tips in the tree that don't map to the trait table)
convert_to_bifurcating -- if True, ensure that the tree is fully bifurcating by resolving polytomies with very short
branches.
add_branch_length_to_root -- if True, ensure that the root node has a minimum branch length
name_unnamed_nodes -- if True, name unnamed nodes in the tree. (Useful for ensuring internal nodes can be
consistently identified in both the reference and pruned trees)
remove_whitespace_from_labels -- if True, replace whitespace in organism labels with underscores
replace_ambiguous_states -- if True, replace various strings representing ambiguous character states,
as well as '-1' or -1 (used by IMG to represent a lack of data) with 0 values.
replace_problematic_table_chars -- if True, replace ':' and ';' in the results with '_', and remove double quotes.
(AncSR methods like ace can't handle these characters in organism labels)
min_branch_length -- set the minimum branch length for all edges in the tree.
This function combines the various reformatting functions in the
library into a catch-all reformatter.
TODO: This function is monolithic, so despite the individual
parts being tested seperately, it probably needs to be broken
down into several modular parts. This would need to be done
with care however, as the order of steps matters quite a bit.
"""
input_tree = tree
#Parse lines to fields once
if trait_table_lines:
if verbose:
print "Parsing trait table...."
header_line,trait_table_fields =\
parse_trait_table(trait_table_lines,delimiter = input_trait_table_delimiter)
else:
if verbose:
print "Found no trait table lines. Setting data and header to empty"
trait_table_fields = []
header_line = ''
# Tree reformatting
if convert_to_bifurcating:
if verbose:
print "Converting tree to bifurcating...."
#maximum recursion depth on large trees
#Try working around this issue with a large
#recursion depth limit
old_recursion_limit = getrecursionlimit()
setrecursionlimit(50000)
input_tree = input_tree.bifurcating() # Required by most ancSR programs
setrecursionlimit(old_recursion_limit)
#input_tree = ensure_root_is_bifurcating(input_tree)
# The below nutty-looking re-filtering step is necessary
# When ensuring the root is bifurcating, internal nodes can
#get moved to the tips so without additional filtering we
#get unannotated tip nodes
#if filter_tree_by_table_entries:
# input_tree = filter_tree_tips_by_presence_in_table(input_tree,\
# trait_table_fields,delimiter=input_trait_table_delimiter)
#Name unnamed nodes
if name_unnamed_nodes:
if verbose:
print "Naming unnamed nodes in the reference tree...."
input_tree=make_internal_nodes_unique(input_tree)
#input_tree.nameUnnamedNodes()
check_node_labels(input_tree,verbose=verbose)
#Paranoid check for missing names:
#if verbose:
# print "Checking that all nodes were named..."
#for i,n in enumerate(input_tree.preorder()):
# if n.Name is None:
# raise ValueError('Node #%s (in tree.preorder()) was not named!'%str(i))
#map trait table ids to tree ids
if trait_to_tree_mapping:
#if verbose:
# print "Validating that trait --> tree mappings match tree ids..."
# good,bad = validate_trait_table_to_tree_mappings(input_tree,\
# trait_to_tree_mapping.values(), verbose = True)
# print "Found %i valid ids." %(len(good))
# print "Found %i invalid ids." %(len(bad))
# #if bad:
# # raise RuntimeError("The following putative tree ids in mapping file aren't actually in the input tree: %s" % bad)
if verbose:
print "Remapping trait table ids to match tree ids...."
trait_table_fields =\
remap_trait_table_organisms(trait_table_fields,trait_to_tree_mapping,\
verbose = verbose)
label_conversion_fns =\
set_label_conversion_fns(remove_whitespace_from_labels=remove_whitespace_from_labels,\
replace_problematic_label_characters=replace_problematic_label_characters)
value_conversion_fns = set_value_conversion_fns(replace_ambiguous_states=replace_ambiguous_states,\
convert_trait_floats_to_ints=convert_trait_floats_to_ints)
#Apply both label and value converters to the trait table
trait_table_fields = convert_trait_table_entries(\
trait_table_fields,\
value_conversion_fns = value_conversion_fns,\
label_conversion_fns = label_conversion_fns)
#We now need to apply any formatting functions to the tree nodes as well, to ensure
#that names are consistent between the two.
if label_conversion_fns:
input_tree = fix_tree_labels(input_tree, label_conversion_fns)
#Then filter the trait table to include only tree tips
if filter_table_by_tree_tips:
if verbose:
print "Filtering trait table ids to include only those that match tree ids...."
trait_table_fields = filter_table_by_presence_in_tree(input_tree,\
trait_table_fields,delimiter=input_trait_table_delimiter)
#if verbose:
# print "Verifying that new trait table ids match tree:"
# print "# of trait_table_lines: %i" %len(trait_table_lines)
# all_tip_ids = [tip.Name for tip in input_tree.iterTips()]
# print "example tree tip ids:",all_tip_ids[0:10]
if filter_tree_by_table_entries:
if verbose:
print "filtering tree tips to match entries in trait table...."
input_tree = filter_tree_tips_by_presence_in_table(input_tree,\
trait_table_fields,delimiter=input_trait_table_delimiter,\
verbose=verbose)
if min_branch_length:
if verbose:
print "Setting a min branch length of %f throughout tree...." \
% min_branch_length
input_tree = set_min_branch_length(input_tree,min_length = min_branch_length)
if add_branch_length_to_root:
if vebose:
print "Adding a min branch length of %f to the root node...." \
% min_branch_length
input_tree = add_branch_length_to_root(input_tree,root_name=input_tree.Name,\
root_length=min_branch_length)
if verbose:
print "Performing a final round of tree pruning to remove internal nodes with only one child...."
input_tree.prune()
#Format resulting trait table lines
result_trait_table_lines = [header_line]
result_trait_table_lines.extend([output_trait_table_delimiter.join(f) for f in trait_table_fields])
if verbose:
print "Final reprocessing of trait table lines to remove trailing whitespace..."
result_trait_table_lines =\
[line.strip() for line in result_trait_table_lines if line.strip()]
if verbose:
print "Done reformatting tree and trait table"
return input_tree, result_trait_table_lines
def check_node_labels(input_tree,verbose=False):
"""Check that all nodes are named!"""
if verbose:
print "Checking that all nodes were named..."
for i,n in enumerate(input_tree.preorder()):
print i,n.Name, n.NameLoaded
if n.Name is None:
err_text = 'WARNING: Node #%s (in tree.preorder()) was not named!. Node properties: %s'%(str(i),str(dir(n)))
print err_text
def set_label_conversion_fns(remove_whitespace_from_labels=True,\
replace_problematic_label_characters=True,verbose=False):
"""Return a list of functions for formatting tree node or trait table labels"""
#Set the functions that will be applied to trait table labels
label_conversion_fns = []
if remove_whitespace_from_labels:
if verbose:
print "Removing whitespace from trait table organism labels..."
label_conversion_fns.append(remove_spaces)
if replace_problematic_label_characters:
# Replace ambiguous characters with
replacement_dict ={":":"_",";":"_"}
if verbose:
print "Replacing problematic labels in organism labels:"
for k,v in replacement_dict.items():
print k,'-->',v
chars_to_delete = """'"'"""
replace_problematic_chars_fn =\
make_char_translation_fn(replacement_dict,chars_to_delete)
label_conversion_fns.append(replace_problematic_chars_fn)
return label_conversion_fns
def set_value_conversion_fns(replace_ambiguous_states=True,\
convert_trait_floats_to_ints=False,verbose=False):
"""Return a list of value conversion functions for trait table values
replace_ambiguous_states -- if True, replace values of -,
-1,'-1','NULL' or None to 0
convert_trait_floats_to_ints -- if True convert floats to ints
verbose -- print verbose output describing the conversion fns
"""
#Set the functions that will be applied to trait table values
value_conversion_fns = []
if replace_ambiguous_states:
# Replace ambiguous characters with 0's
replacement_dict ={'-':0,'-1':0,-1:0,'NULL':0,None:0}
if verbose:
print "Replacing ambiguous characters:"
for k,v in replacement_dict.items():
print k,'-->',v
replace_ambig_fn = make_translate_conversion_fn(replacement_dict)
value_conversion_fns.append(replace_ambig_fn)
if convert_trait_floats_to_ints:
value_conversion_fns.append(lambda x: str(int(float(x))))
if verbose:
print "Converting floating point trait table values to integers...."
return value_conversion_fns
def fix_tree_labels(tree,label_conversion_fns,verbose=False):
"""Fix tree labels by removing problematic characters"""
if verbose:
print "reformatting tree node names..."
tree = format_tree_node_names(tree,label_conversion_fns)
#print "Number of tree tips with single quotes:",len([t.Name for t in tree if "'" in t.Name])
return tree
def make_internal_nodes_unique(tree,base_name='internal_node_%i'):
""" Removes names that are not unique for internal nodes.
First occurence of non-unique node is kept and subsequence ones are set to None"""
#make a list of the names that are already in the tree
names_in_use = set()
for i,node in enumerate(tree.preorder(include_self=True)):
if node.Name is not None:
if node.Name in names_in_use:
node.Name=None
else:
names_in_use.add(node.Name)
if node.Name is None:
while node.Name is None:
#Find a unique name by adding integers
proposed_name = base_name % i
if proposed_name not in names_in_use:
node.Name = proposed_name
names_in_use.add(proposed_name)
break
else:
i += 1
#Set this so that the PhyloNode *actually* outputs the Name
node.NameLoaded = True
return tree
def format_tree_node_names(tree,label_formatting_fns=[]):
"""Return tree with node names formatted using specified fns
tree -- a PyCogent PhyloNode tree object
formatting_fns -- a list of formatting functions that are to
be called on each node name in the tree, and which each return
a new node name.
"""
for n in tree.preorder():
if n.Name is None:
continue
new_node_name = n.Name
for formatting_fn in label_formatting_fns:
new_node_name = formatting_fn(new_node_name)
n.Name = new_node_name
return tree
def nexus_lines_from_tree(tree):
"""Return NEXUS formatted lines from a PyCogent PhyloNode tree"""
lines = ["#NEXUS"]
lines.extend(make_nexus_trees_block(tree))
return lines
def add_branch_length_to_root(tree, root_name ="root",root_length=0.0001):
"""Add branch length to the root of a tree if it's shorter than root_length
tree -- A PyCogent PhyloNode object
root_name -- the name of the root node
root_length -- the desired minimum root length
This is required by some programs such as BayesTraits"""
root = tree.getNodeMatchingName(root_name)
root.Length = max(root.Length,root_length)
return tree
def set_min_branch_length(tree,min_length= 0.0001):
"""Return tree modified so that all branchlengths are >= min_length.<|fim▁hole|>
for node in tree.preorder():
if not node.Parent:
continue
node.Length = max(node.Length,min_length)
return tree
def make_nexus_trees_block(tree):
"""Generate a NEXUS format 'trees' block for a given tree
WARNING: Removes names from internal nodes, as these cause problems
downstream
"""
# First generate the mappings for the NEXUS translate command
trees_block_template =\
["begin trees;",\
"\ttranslate"]
name_mappings = {}
line = None
for i,node in enumerate(tree.iterTips()):
name_mappings[node.Name] = i
if line:
trees_block_template.append(line)
line = "\t\t%i %s," %(i,node.Name)
# The last line needs a semicolon rather than a comma
line = "\t\t%i %s;" %(i,node.Name)
trees_block_template.append(line)
# Reformat tree newick such that names match NEXUS translation table
for name_to_fix in name_mappings.keys():
node_to_rename = tree.getNodeMatchingName(name_to_fix)
node_to_rename.Name=name_mappings[name_to_fix]
for nonTipNode in tree.iterNontips():
nonTipNode.Name=''
tree_newick = tree.getNewick(with_distances=True)
#for name_to_fix in name_mappings.keys():
# tree_newick = tree_newick.replace(name_to_fix+":",str(name_mappings[name_to_fix])+":")
#for nonTipNode in tree.iterNontips():
# tree_newick = tree_newick.replace(nonTipNode.Name+":","")
#tree_newick = tree_newick.replace(root_name,"")
tree_template = "\t\ttree %s = %s" # tree name then newick string
line = tree_template % ("PyCogent_tree",tree_newick)
trees_block_template.append(line)
trees_block_template.append("end;")
return trees_block_template
def validate_trait_table_to_tree_mappings(tree,trait_table_ids,verbose=True):
"""Report whether tree ids are even in mapping file"""
good = []
bad = []
nodes = [n.Name for n in tree.iterTips()]
for tt_id in trait_table_ids:
if tt_id in nodes:
good.append(tt_id)
else:
bad.append(tt_id)
if verbose:
print "Of %i ids, %i were OK (mapped to tree)" %(len(trait_table_ids),len(good))
print "Example good ids",good[0:min(len(good),10)]
print "Example bad ids",bad[0:min(len(bad),10)]
print "Example tip ids",nodes[0:min(len(nodes),10)]
return good,bad
def filter_table_by_presence_in_tree(tree,trait_table_fields,name_field_index = 0,delimiter="\t"):
"""yield lines of a trait table lacking organisms missing from the tree"""
tree_tips = [str(node.Name.strip()) for node in tree.preorder()]
#print tree_tips
result_fields = []
for fields in trait_table_fields:
curr_name = fields[name_field_index].strip()
if curr_name not in tree_tips:
#print curr_name,"could not be found in tree nodes"
#print curr_name in tree_tips
#try:
# print int(curr_name) in tree_tips
#except:
# pass
#print curr_name.strip() in tree_tips
continue
result_fields.append(fields)
return result_fields
def make_translate_conversion_fn(translation_dict):
"""Return a new function that replaces values in input values with output_value
translation_dict -- a dict that maps inputs that should be translated to
their appropriate output
"""
def translate_conversion_fn(trait_value_field):
# Return translation, or the original value if no translation
# is available
try:
trait_value_field = trait_value_field.strip()
except AttributeError:
trait_value_field = str(trait_value_field).strip()
result = translation_dict.get(trait_value_field,trait_value_field)
#print trait_value_field
#print translation_dict.keys()
if result in translation_dict.keys():
raise RuntimeError("failed to translate value: %s" % result)
return str(result)
return translate_conversion_fn
def make_char_translation_fn(translation_dict,deletion_chars=''):
"""Return a new function that replaces values in input values with output_value
translation_dict -- a dict that maps inputs that should be translated to
their appropriate output
"""
def translate_conversion_fn(trait_value_field):
# Return translation, or the original value if no translation
# is available
trait_value_field = str(trait_value_field).strip()
from_chars = ''
to_chars = ''
for k,v in translation_dict.items():
from_chars += k
to_chars += v
translation_table = maketrans(from_chars,to_chars)
#print trait_value_field
#print translation_dict.keys()
result = trait_value_field.translate(translation_table,deletion_chars)
if result in translation_dict.keys():
raise RuntimeError("failed to translate value: %s" % result)
return str(result)
return translate_conversion_fn
def remove_spaces(trait_label_field):
"""A conversion function that replaces spaces with underscores in a label
"""
label = str(trait_label_field)
fields = trait_label_field.lstrip().strip().split()
return "_".join(fields)
def convert_trait_table_entries(trait_table_fields,\
label_conversion_fns=[str],value_conversion_fns = [float]):
"""Convert trait values by running conversion_fns on labels and values
trait_table_fields -- list of strings (from a trait table line)
the first field is assumed to be an organism name, and so isn't
formatted.
label_conversion_fns -- a list of functions to be run on each
organism name label (in the order they should be run). Each
function should need only a single entry as input, and output
the resulting label
value_conversion_fns -- another list of functions, but for
trait values. Again these will be run in order on each table
value.
"""
name_field_index = 0
#print "Value conversion fns:",[f.__name__ for f in value_conversion_fns]
#print "label_conversion_fns:",[f.__name__ for f in label_conversion_fns]
for fields in trait_table_fields:
new_fields = []
for i,field in enumerate(fields):
if i != name_field_index:
converters_to_use = value_conversion_fns
else:
converters_to_use = label_conversion_fns
#Run appropriate converters on this field
new_val = field
for curr_conv_fn in converters_to_use:
new_val = str(curr_conv_fn(new_val))
new_fields.append(new_val)
yield new_fields
def ensure_root_is_bifurcating(tree,root_name='root',verbose=False):
"""Remove child node of root if it is a single child"""
root_node = tree.getNodeMatchingName(root_name)
if len(root_node.Children) == 1:
if verbose:
print "Rerooting to avoid monotomy at root"
tree = tree.rootedAt(root_node.Children[0].Name)
#tree.remove(root_node)
tree.prune()
return tree
def filter_tree_tips_by_presence_in_table(tree,trait_table_fields,name_field_index = 0,\
delimiter="\t",verbose=True):
"""yield a tree lacking organisms missing from the trait table
trait_table_fields -- a list of lists, containing the results of parsing the data
lines of the trait table. Each set of fields in the list should contain the organism name
at index 0, and data values for the various traits at other positions
"""
org_ids_in_trait_table = []
new_tree = tree.deepcopy()
for fields in trait_table_fields:
curr_org = fields[name_field_index].strip()
org_ids_in_trait_table.append(curr_org)
# Build up a list of tips to prune
tips_to_prune = []
tips_not_to_prune = []
n_tips_not_to_prune = 0
for tip in tree.iterTips():
if tip.Name.strip() not in org_ids_in_trait_table:
tips_to_prune.append(tip.Name)
else:
n_tips_not_to_prune += 1
tips_not_to_prune.append(tip.Name)
if verbose and tips_to_prune:
print "Found %i tips to prune." %(len(tips_to_prune))
print "Example pruned tree tip names:",tips_to_prune[0:min(len(tips_to_prune),10)]
print "Example valid org ids:",org_ids_in_trait_table[0:min(len(org_ids_in_trait_table),10)]
if not n_tips_not_to_prune:
raise RuntimeError(\
"filter_tree_tips_by_presence_in_table: operation would remove all tips. Is this due to a formatting error in inputs?")
if verbose:
print "%i of %i tips will be removed (leaving %i)" %(len(tips_to_prune),\
n_tips_not_to_prune + len(tips_to_prune), n_tips_not_to_prune)
print "Example tips that will be removed (first 10):\n\n%s" % \
tips_to_prune[0:min(len(tips_to_prune),10)]
new_tree = get_sub_tree(tree,tips_not_to_prune)
return new_tree
def get_sub_tree(tree,tips_not_to_prune):
"""Get sub tree, modifying recursion limit if necessary"""
try:
new_tree = tree.getSubTree(tips_not_to_prune)
except RuntimeError:
#NOTE: getSubTree will hit
#maximum recursion depth on large trees
#Try working around this issue with a large
#recursion depth limit
old_recursion_limit = getrecursionlimit()
setrecursionlimit(50000)
new_tree = tree.getSubTree(tips_not_to_prune)
setrecursionlimit(old_recursion_limit)
return new_tree
def print_node_summary_table(input_tree):
"""Print a summary of the name,children,length, and parents of each node"""
for node in input_tree.postorder():
if node.Parent:
parent_name = node.Parent.Name
else:
parent_name = None
yield "\t".join(map(str,[node.Name,len(node.Children),node.Length,parent_name]))
def add_to_filename(filename,new_suffix,delimiter="_"):
"""Add to a filename, preserving the extension"""
filename, ext = splitext(filename)
new_filename = delimiter.join([filename,new_suffix])
return "".join([new_filename,ext])
def make_id_mapping_dict(tree_to_trait_mappings):
"""Generates trait_to_tree mapping dictionary from a list of mapping tuples
mappings -- in the format tree_id, trait_id
"""
trait_to_tree_mapping_dict = {}
for tree_id,trait_id in tree_to_trait_mappings:
trait_to_tree_mapping_dict[trait_id] = tree_id
return trait_to_tree_mapping_dict
def parse_id_mapping_file(file_lines,delimiter="\t"):
"""Parse two-column id mapping file, returning a generator of fields"""
for line in file_lines:
yield line.strip().split(delimiter)
def remap_trait_table_organisms(trait_table_fields,trait_to_tree_mapping_dict,verbose=False):
"""Yield trait table fields with organism ids substituted using the mapping dict
An iterator containing lists for each trait. The first field in each list
should be the organism id, and the rest should be trait values.
"""
remapped_fields = []
bad_ids = []
default_total = 0
#if verbose:
# print trait_to_tree_mapping_dict
# print sorted(list(set(trait_to_tree_mapping_dict.keys())))
for fields in trait_table_fields:
try:
fields[0] = trait_to_tree_mapping_dict[fields[0]]
except KeyError:
bad_ids.append(fields[0])
continue
remapped_fields.append(fields)
if verbose and bad_ids:
print "%i of %i trait table ids could not be mapped to tree" %(len(bad_ids),len(remapped_fields))
print "Example trait table ids that could not be mapped to tree:" %(bad_ids[:min(len(bad_ids),10)])
return remapped_fields
def load_picrust_tree(tree_fp, verbose=False):
"""Safely load a tree for picrust"""
#PicrustNode seems to run into very slow/memory intentsive perfromance...
#tree = DndParser(open(opts.input_tree),constructor=PicrustNode)
tree = DndParser(open(tree_fp),constructor=PicrustNode)
label_conversion_fns = set_label_conversion_fns(verbose=verbose)
tree = fix_tree_labels(tree,label_conversion_fns)
return tree
def load_tab_delimited_trait_table(trait_table_fp,verbose=False):
"""Load a tab delimited trait table for picrust"""
input_trait_table = open(trait_table_fp,"U")
if verbose:
print "Parsing trait table..."
#Find which taxa are to be used in tests
#(by default trait table taxa)
trait_table_header,trait_table_fields = \
parse_trait_table(input_trait_table)
label_conversion_fns = set_label_conversion_fns(verbose=verbose)
trait_table_fields = convert_trait_table_entries(trait_table_fields,\
value_conversion_fns = [],\
label_conversion_fns = label_conversion_fns)
trait_table_fields = [t for t in trait_table_fields]
if verbose:
print "Number of trait table fields with single quotes:",\
len([t for t in trait_table_fields if "'" in t[0]])
return trait_table_header,trait_table_fields<|fim▁end|> |
tree -- a PyCogent PhyloNode object""" |
<|file_name|>auth_token.py<|end_file_name|><|fim▁begin|>import re
from keystonemiddleware import auth_token
from oslo_log import log
from nca47.common import exception
from nca47.common.i18n import _
from nca47.common import utils
LOG = log.getLogger(__name__)
<|fim▁hole|>class AuthTokenMiddleware(auth_token.AuthProtocol):
"""A wrapper on Keystone auth_token middleware.
Does not perform verification of authentication tokens
for public routes in the API.
"""
def __init__(self, app, conf, public_api_routes=[]):
self._nca_app = app
# TODO(mrda): Remove .xml and ensure that doesn't result in a
# 401 Authentication Required instead of 404 Not Found
route_pattern_tpl = '%s(\.json|\.xml)?$'
try:
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
for route_tpl in public_api_routes]
except re.error as e:
msg = _('Cannot compile public API routes: %s') % e
LOG.error(msg)
raise exception.ConfigInvalid(error_msg=msg)
super(AuthTokenMiddleware, self).__init__(app, conf)
def __call__(self, env, start_response):
path = utils.safe_rstrip(env.get('PATH_INFO'), '/')
# The information whether the API call is being performed against the
# public API is required for some other components. Saving it to the
# WSGI environment is reasonable thereby.
env['is_public_api'] = any(map(lambda pattern: re.match(pattern, path),
self.public_api_routes))
if env['is_public_api']:
return self._nca_app(env, start_response)
return super(AuthTokenMiddleware, self).__call__(env, start_response)<|fim▁end|> | |
<|file_name|>collision_objects_dispatcher.rs<|end_file_name|><|fim▁begin|>use utils::data::hash_map::HashMap;
use utils::data::pair::{Pair, PairTWHash};
use utils::data::uid_remap::{UidRemap, FastKey};
use queries::geometry::Contact;
use narrow_phase::{CollisionDispatcher, CollisionAlgorithm, ContactSignal, ContactSignalHandler};
use world::CollisionObject;
use math::Point;
// FIXME: move this to the `narrow_phase` module.
/// Collision detector dispatcher for collision objects.
pub struct CollisionObjectsDispatcher<P, M, T> {
signal: ContactSignal<T>,
shape_dispatcher: Box<CollisionDispatcher<P, M> + 'static>,
pairs: HashMap<Pair, CollisionAlgorithm<P, M>, PairTWHash>
}
impl<P: Point, M, T> CollisionObjectsDispatcher<P, M, T> {
/// Creates a new `CollisionObjectsDispatcher`.
pub fn new(shape_dispatcher: Box<CollisionDispatcher<P, M> + 'static>)
-> CollisionObjectsDispatcher<P, M, T> {
CollisionObjectsDispatcher {
signal: ContactSignal::new(),
pairs: HashMap::new(PairTWHash::new()),
shape_dispatcher: shape_dispatcher
}
}
/// Updates the contact pairs.
pub fn update(&mut self, objects: &UidRemap<CollisionObject<P, M, T>>, timestamp: usize) {
for e in self.pairs.elements_mut().iter_mut() {
let co1 = &objects[e.key.first];
let co2 = &objects[e.key.second];
if co1.timestamp == timestamp || co2.timestamp == timestamp {
let had_colls = e.value.num_colls() != 0;
e.value.update(&*self.shape_dispatcher,
&co1.position, &**co1.shape,
&co2.position, &**co2.shape);
if e.value.num_colls() == 0 {
if had_colls {
self.signal.trigger_contact_signal(&co1.data, &co2.data, false);
}
}
else {
if !had_colls {
self.signal.trigger_contact_signal(&co1.data, &co2.data, true)
}
}
}
}
}
/// Iterates through all the contact pairs.
#[inline(always)]
pub fn contact_pairs<F>(&self,
objects: &UidRemap<CollisionObject<P, M, T>>,
mut f: F)
where F: FnMut(&T, &T, &CollisionAlgorithm<P, M>) {
for e in self.pairs.elements().iter() {
let co1 = &objects[e.key.first];
let co2 = &objects[e.key.second];
f(&co1.data, &co2.data, &e.value)
}
}
/// Calls a closures on each contact between two objects.
#[inline(always)]
pub fn contacts<F>(&self,
objects: &UidRemap<CollisionObject<P, M, T>>,
mut f: F)
where F: FnMut(&T, &T, &Contact<P>) {
// FIXME: avoid allocation.
let mut collector = Vec::new();
for e in self.pairs.elements().iter() {
let co1 = &objects[e.key.first];<|fim▁hole|> let co2 = &objects[e.key.second];
e.value.colls(&mut collector);
for c in collector[..].iter() {
f(&co1.data, &co2.data, c)
}
collector.clear();
}
}
/// Registers a handler for contact start/stop events.
pub fn register_contact_signal_handler(&mut self,
name: &str,
handler: Box<ContactSignalHandler<T> + 'static>) {
self.signal.register_contact_signal_handler(name, handler)
}
/// Unregisters a handler for contact start/stop events.
pub fn unregister_contact_signal_handler(&mut self, name: &str) {
self.signal.unregister_contact_signal_handler(name)
}
/// Creates/removes the persistant collision detector associated to a given pair of objects.
pub fn handle_proximity(&mut self,
objects: &UidRemap<CollisionObject<P, M, T>>,
fk1: &FastKey,
fk2: &FastKey,
started: bool) {
let key = Pair::new(*fk1, *fk2);
if started {
let cd;
{
let co1 = &objects[*fk1];
let co2 = &objects[*fk2];
cd = self.shape_dispatcher.get_collision_algorithm(&co1.shape.repr(), &co2.shape.repr());
}
if let Some(cd) = cd {
let _ = self.pairs.insert(key, cd);
}
}
else {
// Proximity stopped.
match self.pairs.get_and_remove(&key) {
Some(detector) => {
// Trigger the collision lost signal if there was a contact.
if detector.value.num_colls() != 0 {
let co1 = &objects[*fk1];
let co2 = &objects[*fk2];
self.signal.trigger_contact_signal(&co1.data, &co2.data, false);
}
},
None => { }
}
}
}
/// Tests if two objects can be tested for mutual collision.
pub fn is_proximity_allowed(objects: &UidRemap<CollisionObject<P, M, T>>,
fk1: &FastKey,
fk2: &FastKey) -> bool {
let co1 = &objects[*fk1];
let co2 = &objects[*fk2];
let can_move_ok = true; // XXX: ba.can_move() || bb.can_move();
let groups_ok = co1.collision_groups.can_collide_with_groups(&co2.collision_groups);
if *fk1 == *fk2 {
can_move_ok && co1.collision_groups.can_collide_with_self()
}
else {
can_move_ok && groups_ok
}
}
}<|fim▁end|> | |
<|file_name|>test_descriptor.py<|end_file_name|><|fim▁begin|># Copyright 2014 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests in OpenHTF.
Tests are main entry point for OpenHTF tests. In its simplest form a
test is a series of Phases that are executed by the OpenHTF framework.
"""
import argparse
import collections
import logging
import os
import sys
import textwrap
import threading
from types import LambdaType
import uuid
import weakref
import colorama
import mutablerecords
from openhtf import util
from openhtf.core import phase_descriptor
from openhtf.core import phase_executor
from openhtf.core import phase_group
from openhtf.core import test_executor
from openhtf.core import test_record
from openhtf.util import conf
from openhtf.util import console_output
from openhtf.util import logs
import six
_LOG = logging.getLogger(__name__)
conf.declare('capture_source', description=textwrap.dedent(
'''Whether to capture the source of phases and the test module. This
defaults to False since this potentially reads many files and makes large
string copies.
Set to 'true' if you want to capture your test's source.'''),
default_value=False)
# TODO(arsharma): Deprecate this configuration after removing the old teardown
# specification.
conf.declare('teardown_timeout_s', default_value=30, description=
'Default timeout (in seconds) for test teardown functions; '
'this option is deprecated and only applies to the deprecated '
'Test level teardown function.')
<|fim▁hole|>
class UnrecognizedTestUidError(Exception):
"""Raised when information is requested about an unknown Test UID."""
class InvalidTestPhaseError(Exception):
"""Raised when an invalid method is decorated."""
class InvalidTestStateError(Exception):
"""Raised when an operation is attempted in an invalid state."""
def create_arg_parser(add_help=False):
"""Creates an argparse.ArgumentParser for parsing command line flags.
If you want to add arguments, create your own with this as a parent:
>>> parser = argparse.ArgumentParser(
'My args title', parents=[openhtf.create_arg_parser()])
>>> parser.parse_args()
Args:
add_help: boolean option passed through to arg parser.
Returns:
an `argparse.ArgumentParser`
"""
parser = argparse.ArgumentParser(
'OpenHTF-based testing',
parents=[
conf.ARG_PARSER,
console_output.ARG_PARSER,
logs.ARG_PARSER,
phase_executor.ARG_PARSER,
],
add_help=add_help)
parser.add_argument(
'--config-help', action='store_true',
help='Instead of executing the test, simply print all available config '
'keys and their description strings.')
return parser
class Test(object):
"""An object that represents an OpenHTF test.
Example:
def PhaseOne(test):
# Integrate more widgets
def PhaseTwo(test):
# Analyze widget integration status
Test(PhaseOne, PhaseTwo).execute()
Note that Test() objects *must* be created in the main thread, but can be
.execute()'d in a separate thread.
"""
TEST_INSTANCES = weakref.WeakValueDictionary()
HANDLED_SIGINT_ONCE = False
def __init__(self, *phases, **metadata):
# Some sanity checks on special metadata keys we automatically fill in.
if 'config' in metadata:
raise KeyError(
'Invalid metadata key "config", it will be automatically populated.')
self.created_time_millis = util.time_millis()
self.last_run_time_millis = None
self._test_options = TestOptions()
self._lock = threading.Lock()
self._executor = None
self._test_desc = TestDescriptor(
phases, test_record.CodeInfo.uncaptured(), metadata)
if conf.capture_source:
# First, we copy the phases with the real CodeInfo for them.
group = self._test_desc.phase_group.load_code_info()
# Then we replace the TestDescriptor with one that stores the test
# module's CodeInfo as well as our newly copied phases.
code_info = test_record.CodeInfo.for_module_from_stack(levels_up=2)
self._test_desc = self._test_desc._replace(
code_info=code_info, phase_group=group)
# Make sure configure() gets called at least once before Execute(). The
# user might call configure() again to override options, but we don't want
# to force them to if they want to use defaults. For default values, see
# the class definition of TestOptions.
if 'test_name' in metadata:
# Allow legacy metadata key for specifying test name.
self.configure(name=metadata['test_name'])
else:
self.configure()
@classmethod
def from_uid(cls, test_uid):
"""Get Test by UID.
Args:
test_uid: uuid for desired test.
Returns:
Test object for given by UID.
Raises:
UnrecognizedTestUidError: If the test_uid is not recognized.
"""
test = cls.TEST_INSTANCES.get(test_uid)
if not test:
raise UnrecognizedTestUidError('Test UID %s not recognized' % test_uid)
return test
@property
def uid(self):
if self._executor is not None:
return self._executor.uid
def make_uid(self):
"""Returns the next test execution's UID.
This identifier must be unique but trackable across invocations of
execute(). Therefore, it's made of four parts separated by ':'
* Process-specific (decided on process start up)
* Test descriptor-specific (decided on descriptor creation)
* Execution-specific (decided on test start)
"""
return '%s:%s:%s:%s' % (os.getpid(), self.descriptor.uid,
uuid.uuid4().hex[:16], util.time_millis())
@property
def descriptor(self):
"""Static data about this test, does not change across Execute() calls."""
return self._test_desc
@property
def state(self):
"""Transient state info about the currently executing test, or None."""
with self._lock:
if self._executor:
return self._executor.test_state
def get_option(self, option):
return getattr(self._test_options, option)
def add_output_callbacks(self, *callbacks):
"""Add the given function as an output module to this test."""
self._test_options.output_callbacks.extend(callbacks)
def configure(self, **kwargs):
"""Update test-wide configuration options. See TestOptions for docs."""
# These internally ensure they are safe to call multiple times with no weird
# side effects.
known_args, _ = create_arg_parser(add_help=True).parse_known_args()
if known_args.config_help:
sys.stdout.write(conf.help_text)
sys.exit(0)
logs.configure_logging()
for key, value in six.iteritems(kwargs):
setattr(self._test_options, key, value)
@classmethod
def handle_sig_int(cls, *_):
if cls.TEST_INSTANCES:
_LOG.error('Received SIGINT, stopping all tests.')
for test in cls.TEST_INSTANCES.values():
test.abort_from_sig_int()
if not cls.HANDLED_SIGINT_ONCE:
cls.HANDLED_SIGINT_ONCE = True
# Emilio 2018-09-21: Raising this KeyboardInterrupt caused a traceback to be shown on-screen after posting the
# test to the database. There's no point.
# raise KeyboardInterrupt
# Otherwise, does not raise KeyboardInterrupt to ensure that the tests are
# cleaned up.
def abort_from_sig_int(self):
"""Abort test execution abruptly, only in response to SIGINT."""
with self._lock:
_LOG.error('Aborting %s due to SIGINT', self)
if self._executor:
# TestState str()'s nicely to a descriptive string, so let's log that
# just for good measure.
_LOG.error('Test state: %s', self._executor.test_state)
self._executor.abort()
# TODO(arsharma): teardown_function test option is deprecated; remove this.
def _get_running_test_descriptor(self):
"""If there is a teardown_function, wrap current descriptor with it."""
if not self._test_options.teardown_function:
return self._test_desc
teardown_phase = phase_descriptor.PhaseDescriptor.wrap_or_copy(
self._test_options.teardown_function)
if not teardown_phase.options.timeout_s:
teardown_phase.options.timeout_s = conf.teardown_timeout_s
return TestDescriptor(
phase_group.PhaseGroup(main=[self._test_desc.phase_group],
teardown=[teardown_phase]),
self._test_desc.code_info, self._test_desc.metadata)
def execute(self, test_start=None):
"""Starts the framework and executes the given test.
Args:
test_start: Either a trigger phase for starting the test, or a function
that returns a DUT ID. If neither is provided, defaults to not
setting the DUT ID.
Returns:
Boolean indicating whether the test failed (False) or passed (True).
Raises:
InvalidTestStateError: if this test is already being executed.
"""
# Lock this section so we don't .stop() the executor between instantiating
# it and .Start()'ing it, doing so does weird things to the executor state.
with self._lock:
# Sanity check to make sure someone isn't doing something weird like
# trying to Execute() the same test twice in two separate threads. We
# hold the lock between here and Start()'ing the executor to guarantee
# that only one thread is successfully executing the test.
if self._executor:
raise InvalidTestStateError('Test already running', self._executor)
# Snapshot some things we care about and store them.
self._test_desc.metadata['test_name'] = self._test_options.name
self._test_desc.metadata['config'] = conf._asdict()
self.last_run_time_millis = util.time_millis()
if isinstance(test_start, LambdaType):
@phase_descriptor.PhaseOptions()
def trigger_phase(test):
test.test_record.dut_id = test_start()
trigger = trigger_phase
else:
trigger = test_start
if conf.capture_source:
trigger.code_info = test_record.CodeInfo.for_function(trigger.func)
test_desc = self._get_running_test_descriptor()
self._executor = test_executor.TestExecutor(
test_desc, self.make_uid(), trigger, self._test_options)
_LOG.info('Executing test: %s', self.descriptor.code_info.name)
self.TEST_INSTANCES[self.uid] = self
self._test_desc.metadata['openhtf_uid'] = self.uid
_LOG.debug('OpenHTF test instance uid "%s" recorded in metadata["openhtf_uid"]' % self.uid)
self._executor.start()
try:
self._executor.wait()
except KeyboardInterrupt:
# The SIGINT handler only raises the KeyboardInterrupt once, so only retry
# that once.
self._executor.wait()
raise
finally:
try:
final_state = self._executor.finalize()
_LOG.debug('Test completed for %s, outputting now.',
final_state.test_record.metadata['test_name'])
for output_cb in self._test_options.output_callbacks:
try:
output_cb(final_state.test_record)
except Exception: # pylint: disable=broad-except
_LOG.exception(
'Output callback %s raised; continuing anyway', output_cb)
# Make sure the final outcome of the test is printed last and in a
# noticeable color so it doesn't get scrolled off the screen or missed.
if final_state.test_record.outcome == test_record.Outcome.ERROR:
for detail in final_state.test_record.outcome_details:
console_output.error_print(detail.description)
else:
colors = collections.defaultdict(lambda: colorama.Style.BRIGHT)
colors[test_record.Outcome.PASS] = ''.join((colorama.Style.BRIGHT,
colorama.Fore.GREEN))
colors[test_record.Outcome.FAIL] = ''.join((colorama.Style.BRIGHT,
colorama.Fore.RED))
msg_template = 'test: {name} outcome: {color}{outcome}{rst}'
console_output.banner_print(msg_template.format(
name=final_state.test_record.metadata['test_name'],
color=colors[final_state.test_record.outcome],
outcome=final_state.test_record.outcome.name,
rst=colorama.Style.RESET_ALL))
finally:
del self.TEST_INSTANCES[self.uid]
self._executor = None
return final_state.test_record.outcome == test_record.Outcome.PASS
# TODO(arsharma): Deprecate the teardown_function in favor of PhaseGroups.
class TestOptions(mutablerecords.Record('TestOptions', [], {
'name': 'openhtf_test',
'output_callbacks': list,
'teardown_function': None,
'failure_exceptions': list,
'default_dut_id': 'UNKNOWN_DUT',
})):
"""Class encapsulating various tunable knobs for Tests and their defaults.
name: The name of the test to be put into the metadata.
output_callbacks: List of output callbacks to run, typically it's better to
use add_output_callbacks(), but you can pass [] here to reset them.
teardown_function: Function to run at teardown. We pass the same arguments to
it as a phase.
failure_exceptions: Exceptions to cause a test FAIL instead of ERROR. When a
test run exits early due to an exception, the run will be marked as a FAIL
if the raised exception matches one of the types in this list. Otherwise,
the run is marked as ERROR.
default_dut_id: The DUT ID that will be used if the start trigger and all
subsequent phases fail to set one.
"""
class TestDescriptor(collections.namedtuple(
'TestDescriptor', ['phase_group', 'code_info', 'metadata', 'uid'])):
"""An object that represents the reusable portions of an OpenHTF test.
This object encapsulates the static test information that is set once and used
by the framework along the way.
Attributes:
phase_group: The top level phase group to execute for this Test.
metadata: Any metadata that should be associated with test records.
code_info: Information about the module that created the Test.
uid: UID for this test.
"""
def __new__(cls, phases, code_info, metadata):
group = phase_group.PhaseGroup.convert_if_not(phases)
return super(TestDescriptor, cls).__new__(
cls, group, code_info, metadata, uid=uuid.uuid4().hex[:16])
@property
def plug_types(self):
"""Returns set of plug types required by this test."""
return {plug.cls
for phase in self.phase_group
for plug in phase.plugs}
class TestApi(collections.namedtuple('TestApi', [
'logger', 'state', 'test_record', 'measurements', 'attachments',
'attach', 'attach_from_file', 'get_measurement', 'get_attachment',
'notify_update'])):
"""Class passed to test phases as the first argument.
Attributes:
dut_id: This attribute provides getter and setter access to the DUT ID
of the device under test by the currently running openhtf.Test. A
non-empty DUT ID *must* be set by the end of a test, or no output
will be produced. It may be set via return value from a callable
test_start argument to openhtf.Test.Execute(), or may be set in a
test phase via this attribute.
logger: A Python Logger instance that can be used to log to the resulting
TestRecord. This object supports all the usual log levels, and
outputs to stdout (configurable) and the frontend via the Station
API, if it's enabled, in addition to the 'log_records' attribute
of the final TestRecord output by the running test.
measurements: A measurements.Collection object used to get/set
measurement values. See util/measurements.py for more implementation
details, but in the simple case, set measurements directly as
attributes on this object (see examples/measurements.py for examples).
state: A dict (initially empty) that is persisted across test phases (but
resets for every invocation of Execute() on an openhtf.Test). This
can be used for any test-wide state you need to persist across phases.
Use this with caution, however, as it is not persisted in the output
TestRecord or displayed on the web frontend in any way.
test_record: A reference to the output TestRecord for the currently
running openhtf.Test. Direct access to this attribute is *strongly*
discouraged, but provided as a catch-all for interfaces not otherwise
provided by TestApi. If you find yourself using this, please file a
feature request for an alternative at:
https://github.com/google/openhtf/issues/new
Callable Attributes:
attach: Attach binary data to the test, see TestState.attach().
attach_from_file: Attach binary data from a file, see
TestState.attach_from_file().
get_attachment: Get copy of attachment contents from current or previous
phase, see TestState.get_attachement.
get_measurement: Get copy of a measurement from a current or previous phase,
see TestState.get_measurement().
notify_update: Notify any frontends of an interesting update. Typically
this is automatically called internally when interesting things happen,
but it can be called by the user (takes no args), for instance if
modifying test_record directly.
Read-only Attributes:
attachments: Dict mapping attachment name to test_record.Attachment
instance containing the data that was attached (and the MIME type
that was assumed based on extension, if any). Only attachments
that have been attached in the current phase show up here, and this
attribute should not be modified directly; use TestApi.attach() or
TestApi.attach_from_file() instead.
"""
@property
def dut_id(self):
return self.test_record.dut_id
@dut_id.setter
def dut_id(self, dut_id):
if self.test_record.dut_id:
self.logger.warning('Overriding previous DUT ID "%s" with "%s".',
self.test_record.dut_id, dut_id)
self.test_record.dut_id = dut_id
self.notify_update()<|fim▁end|> | |
<|file_name|>good.js<|end_file_name|><|fim▁begin|>// Modules
const Config = require('../config/main'),
Good = require('good');
module.exports = {
options: Config.HAPI.GOOD_OPTIONS,
register: Good<|fim▁hole|>};<|fim▁end|> | |
<|file_name|>sprites.py<|end_file_name|><|fim▁begin|>class Sprite(object):
def __init__(self, xPos, yPos):
self.x = xPos
self.y = yPos
self.th = 32
self.tw = 32
def checkCollision(self, otherSprite):
if (self.x < otherSprite.x + otherSprite.tw and otherSprite.x < self.x + self.tw
and self.y < otherSprite.y + otherSprite.th and otherSprite.y < self.y + self.th):
return True
else:
return False
class Actor(Sprite):
def __init__(self, xPos, yPos):
super(Actor, self).__init__(xPos, yPos)
self.speed = 5
self.dy = 0
self.d = 3
self.dir = "right"
# self.newdir = "right"
self.state = "standing"
self.walkR = []
self.walkL = []
def loadPics(self):
self.standing = loadImage("gripe_stand.png")
self.falling = loadImage("grfalling.png")
for i in range(8):
imageName = "gr" + str(i) + ".png"
self.walkR.append(loadImage(imageName))
for i in range(8):
imageName = "gl" + str(i) + ".png"
self.walkL.append(loadImage(imageName))
def checkWall(self, wall):
if wall.state == "hidden":
if (self.x >= wall.x - self.d and
(self.x + 32 <= wall.x + 32 + self.d)):
return False
def move(self):
if self.dir == "right":
if self.state == "walking":
self.im = self.walkR[frameCount % 8]<|fim▁hole|> elif self.state == "standing":
self.im = self.standing
self.dx = 0
elif self.state == "falling":
self.im = self.falling
self.dx = 0
self.dy = 5
elif self.dir == "left":
if self.state == "walking":
self.im = self.walkL[frameCount % 8]
self.dx = -self.speed
elif self.state == "standing":
self.im = self.standing
self.dx = 0
elif self.state == "falling":
self.im = self.falling
self.dx = 0
self.dy = 5
else:
self.dx = 0
self.x += self.dx
self.y += self.dy
if self.x <= 0:
self.x = 0
if self.x >= 640 - self.tw:
self.x = 640 -self.tw
def display(self):
image(self.im, self.x, self.y)
class Block(Sprite):
def __init__(self, xPos, yPos):
super(Block, self).__init__(xPos, yPos)
self.state = "visible"
def loadPics(self):
self.im = loadImage("block.png")
def display(self):
if self.state == "visible":
image(self.im, self.x, self.y)<|fim▁end|> | self.dx = self.speed |
<|file_name|>tempus_winter.js<|end_file_name|><|fim▁begin|>black = '#202427';
red = '#EB6A58'; // red
green = '#49A61D'; // green
yellow = '#959721'; // yellow
blue = '#798FB7'; // blue
magenta = '#CD7B7E'; // pink
cyan = '#4FA090'; // cyan
white = '#909294'; // light gray
lightBlack = '#292B35'; // medium gray
lightRed = '#DB7824'; // red
lightGreen = '#09A854'; // green<|fim▁hole|>lightBlue = '#309DC1'; // blue
lightMagenta= '#C874C2'; // pink
lightCyan = '#1BA2A0'; // cyan
lightWhite = '#8DA3B8'; // white
t.prefs_.set('color-palette-overrides',
[ black , red , green , yellow,
blue , magenta , cyan , white,
lightBlack , lightRed , lightGreen , lightYellow,
lightBlue , lightMagenta , lightCyan , lightWhite ]);
t.prefs_.set('cursor-color', lightWhite);
t.prefs_.set('foreground-color', lightWhite);
t.prefs_.set('background-color', black);<|fim▁end|> | lightYellow = '#AD8E4B'; // yellow |
<|file_name|>completions.rs<|end_file_name|><|fim▁begin|>extern crate regex;
extern crate clap;
use clap::{App, Arg, SubCommand, Shell};
use regex::Regex;
static BASH: &'static str = r#"_myapp() {
local i cur prev opts cmds
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
cmd=""
opts=""
for i in ${COMP_WORDS[@]}
do
case "${i}" in
myapp)
cmd="myapp"
;;
help)
cmd+="__help"
;;
test)
cmd+="__test"
;;
*)
;;
esac
done
case "${cmd}" in
myapp)
opts=" -h -V --help --version <file> test help"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
myapp__help)
opts=" -h -V --help --version "
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
myapp__test)
opts=" -h -V --help --version --case "
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
--case)
COMPREPLY=("<case>")
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
esac
}
complete -F _myapp -o bashdefault -o default myapp
"#;
static ZSH: &'static str = r#"#compdef myapp
_myapp() {
typeset -A opt_args
local ret=1
local context curcontext="$curcontext" state line
_arguments -s -S -C \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
"1:: :_myapp_commands" \
"*:: :->myapp" \
&& ret=0
case $state in
(myapp)
curcontext="${curcontext%:*:*}:myapp-command-$words[1]:"
case $line[1] in
(test)
_arguments -s -S -C \
'--case+[the case to test]' \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
&& ret=0
;;
(help)
_arguments -s -S -C \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
&& ret=0
;;
esac
;;
esac
}
(( $+functions[_myapp_commands] )) ||
_myapp_commands() {
local commands; commands=(
"test:tests things" \
"help:Prints this message or the help of the given subcommand(s)" \
"FILE:some input file" \
)
_describe -t commands 'myapp commands' commands "$@"
}
(( $+functions[_myapp__help_commands] )) ||
_myapp__help_commands() {
local commands; commands=(
)
_describe -t commands 'myapp help commands' commands "$@"
}
(( $+functions[_myapp__test_commands] )) ||
_myapp__test_commands() {
local commands; commands=(
)
_describe -t commands 'myapp test commands' commands "$@"
}
_myapp "$@""#;
static FISH: &'static str = r#"function __fish_using_command
set cmd (commandline -opc)
if [ (count $cmd) -eq (count $argv) ]
for i in (seq (count $argv))
if [ $cmd[$i] != $argv[$i] ]
return 1
end
end
return 0
end
return 1
end
complete -c myapp -n "__fish_using_command myapp" -s h -l help -d 'Prints help information'
complete -c myapp -n "__fish_using_command myapp" -s V -l version -d 'Prints version information'
complete -c myapp -n "__fish_using_command myapp" -f -a "test" -d 'tests things'
complete -c myapp -n "__fish_using_command myapp" -f -a "help" -d 'Prints this message or the help of the given subcommand(s)'
complete -c myapp -n "__fish_using_command myapp test" -l case -d 'the case to test'
complete -c myapp -n "__fish_using_command myapp test" -s h -l help -d 'Prints help information'
complete -c myapp -n "__fish_using_command myapp test" -s V -l version -d 'Prints version information'
complete -c myapp -n "__fish_using_command myapp help" -s h -l help -d 'Prints help information'
complete -c myapp -n "__fish_using_command myapp help" -s V -l version -d 'Prints version information'
"#;
#[cfg(not(target_os="windows"))]
static POWERSHELL: &'static str = r#"
@('myapp', './myapp') | %{
Register-ArgumentCompleter -Native -CommandName $_ -ScriptBlock {
param($wordToComplete, $commandAst, $cursorPosition)
$command = '_myapp'
$commandAst.CommandElements |
Select-Object -Skip 1 |
%{
switch ($_.ToString()) {
'test' {
$command += '_test'
break
}
'help' {
$command += '_help'
break
}
}
}
$completions = @()
switch ($command) {
'_myapp' {
$completions = @('test', 'help', '-h', '-V', '--help', '--version')
}
'_myapp_test' {
$completions = @('-h', '-V', '--case', '--help', '--version')
}
'_myapp_help' {
$completions = @('-h', '-V', '--help', '--version')
}
}
$completions |
?{ $_ -like "$wordToComplete*" } |
Sort-Object |
%{ New-Object System.Management.Automation.CompletionResult $_, $_, 'ParameterValue', $_ }
}
}
"#;
#[cfg(target_os="windows")]
static POWERSHELL: &'static str = r#"
@('myapp', './myapp', 'myapp.exe', '.\myapp', '.\myapp.exe', './myapp.exe') | %{
Register-ArgumentCompleter -Native -CommandName $_ -ScriptBlock {
param($wordToComplete, $commandAst, $cursorPosition)
$command = '_myapp'
$commandAst.CommandElements |
Select-Object -Skip 1 |
%{
switch ($_.ToString()) {
'test' {
$command += '_test'
break
}
'help' {
$command += '_help'
break
}
}
}
$completions = @()
switch ($command) {
'_myapp' {
$completions = @('test', 'help', '-h', '-V', '--help', '--version')
}
'_myapp_test' {
$completions = @('-h', '-V', '--case', '--help', '--version')
}
'_myapp_help' {
$completions = @('-h', '-V', '--help', '--version')
}
}
$completions |
?{ $_ -like "$wordToComplete*" } |
Sort-Object |
%{ New-Object System.Management.Automation.CompletionResult $_, $_, 'ParameterValue', $_ }
}
}
"#;
#[cfg(not(target_os="windows"))]
static POWERSHELL_WUS: &'static str = r#"
@('my_app', './my_app') | %{
Register-ArgumentCompleter -Native -CommandName $_ -ScriptBlock {
param($wordToComplete, $commandAst, $cursorPosition)
$command = '_my_app'
$commandAst.CommandElements |
Select-Object -Skip 1 |
%{
switch ($_.ToString()) {
<|fim▁hole|> break
}
'some_cmd' {
$command += '_some_cmd'
break
}
'help' {
$command += '_help'
break
}
}
}
$completions = @()
switch ($command) {
'_my_app' {
$completions = @('test', 'some_cmd', 'help', '-h', '-V', '--help', '--version')
}
'_my_app_test' {
$completions = @('-h', '-V', '--case', '--help', '--version')
}
'_my_app_some_cmd' {
$completions = @('-h', '-V', '--config', '--help', '--version')
}
'_my_app_help' {
$completions = @('-h', '-V', '--help', '--version')
}
}
$completions |
?{ $_ -like "$wordToComplete*" } |
Sort-Object |
%{ New-Object System.Management.Automation.CompletionResult $_, $_, 'ParameterValue', $_ }
}
}
"#;
#[cfg(target_os="windows")]
static POWERSHELL_WUS: &'static str = r#"
@('my_app', './my_app', 'my_app.exe', '.\my_app', '.\my_app.exe', './my_app.exe') | %{
Register-ArgumentCompleter -Native -CommandName $_ -ScriptBlock {
param($wordToComplete, $commandAst, $cursorPosition)
$command = '_my_app'
$commandAst.CommandElements |
Select-Object -Skip 1 |
%{
switch ($_.ToString()) {
'test' {
$command += '_test'
break
}
'some_cmd' {
$command += '_some_cmd'
break
}
'help' {
$command += '_help'
break
}
}
}
$completions = @()
switch ($command) {
'_my_app' {
$completions = @('test', 'some_cmd', 'help', '-h', '-V', '--help', '--version')
}
'_my_app_test' {
$completions = @('-h', '-V', '--case', '--help', '--version')
}
'_my_app_some_cmd' {
$completions = @('-h', '-V', '--config', '--help', '--version')
}
'_my_app_help' {
$completions = @('-h', '-V', '--help', '--version')
}
}
$completions |
?{ $_ -like "$wordToComplete*" } |
Sort-Object |
%{ New-Object System.Management.Automation.CompletionResult $_, $_, 'ParameterValue', $_ }
}
}
"#;
static ZSH_WUS: &'static str = r#"#compdef my_app
_my_app() {
typeset -A opt_args
local ret=1
local context curcontext="$curcontext" state line
_arguments -s -S -C \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
"1:: :_my_app_commands" \
"*:: :->my_app" \
&& ret=0
case $state in
(my_app)
curcontext="${curcontext%:*:*}:my_app-command-$words[1]:"
case $line[1] in
(test)
_arguments -s -S -C \
'--case+[the case to test]' \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
&& ret=0
;;
(some_cmd)
_arguments -s -S -C \
'--config+[the other case to test]' \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
&& ret=0
;;
(help)
_arguments -s -S -C \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
&& ret=0
;;
esac
;;
esac
}
(( $+functions[_my_app_commands] )) ||
_my_app_commands() {
local commands; commands=(
"test:tests things" \
"some_cmd:tests other things" \
"help:Prints this message or the help of the given subcommand(s)" \
"FILE:some input file" \
)
_describe -t commands 'my_app commands' commands "$@"
}
(( $+functions[_my_app__help_commands] )) ||
_my_app__help_commands() {
local commands; commands=(
)
_describe -t commands 'my_app help commands' commands "$@"
}
(( $+functions[_my_app__some_cmd_commands] )) ||
_my_app__some_cmd_commands() {
local commands; commands=(
)
_describe -t commands 'my_app some_cmd commands' commands "$@"
}
(( $+functions[_my_app__test_commands] )) ||
_my_app__test_commands() {
local commands; commands=(
)
_describe -t commands 'my_app test commands' commands "$@"
}
_my_app "$@""#;
static FISH_WUS: &'static str = r#"function __fish_using_command
set cmd (commandline -opc)
if [ (count $cmd) -eq (count $argv) ]
for i in (seq (count $argv))
if [ $cmd[$i] != $argv[$i] ]
return 1
end
end
return 0
end
return 1
end
complete -c my_app -n "__fish_using_command my_app" -s h -l help -d 'Prints help information'
complete -c my_app -n "__fish_using_command my_app" -s V -l version -d 'Prints version information'
complete -c my_app -n "__fish_using_command my_app" -f -a "test" -d 'tests things'
complete -c my_app -n "__fish_using_command my_app" -f -a "some_cmd" -d 'tests other things'
complete -c my_app -n "__fish_using_command my_app" -f -a "help" -d 'Prints this message or the help of the given subcommand(s)'
complete -c my_app -n "__fish_using_command my_app test" -l case -d 'the case to test'
complete -c my_app -n "__fish_using_command my_app test" -s h -l help -d 'Prints help information'
complete -c my_app -n "__fish_using_command my_app test" -s V -l version -d 'Prints version information'
complete -c my_app -n "__fish_using_command my_app some_cmd" -l config -d 'the other case to test'
complete -c my_app -n "__fish_using_command my_app some_cmd" -s h -l help -d 'Prints help information'
complete -c my_app -n "__fish_using_command my_app some_cmd" -s V -l version -d 'Prints version information'
complete -c my_app -n "__fish_using_command my_app help" -s h -l help -d 'Prints help information'
complete -c my_app -n "__fish_using_command my_app help" -s V -l version -d 'Prints version information'
"#;
static BASH_WUS: &'static str = r#"_my_app() {
local i cur prev opts cmds
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
cmd=""
opts=""
for i in ${COMP_WORDS[@]}
do
case "${i}" in
my_app)
cmd="my_app"
;;
help)
cmd+="__help"
;;
some_cmd)
cmd+="__some_cmd"
;;
test)
cmd+="__test"
;;
*)
;;
esac
done
case "${cmd}" in
my_app)
opts=" -h -V --help --version <file> test some_cmd help"
if [[ ${cur} == -* || ${COMP_CWORD} -eq 1 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
my_app__help)
opts=" -h -V --help --version "
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
my_app__some_cmd)
opts=" -h -V --help --version --config "
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
--config)
COMPREPLY=("<config>")
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
my_app__test)
opts=" -h -V --help --version --case "
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
fi
case "${prev}" in
--case)
COMPREPLY=("<case>")
return 0
;;
*)
COMPREPLY=()
;;
esac
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
return 0
;;
esac
}
complete -F _my_app -o bashdefault -o default my_app
"#;
static FISH_SPECIAL: &'static str = r#"function __fish_using_command
set cmd (commandline -opc)
if [ (count $cmd) -eq (count $argv) ]
for i in (seq (count $argv))
if [ $cmd[$i] != $argv[$i] ]
return 1
end
end
return 0
end
return 1
end
complete -c my_app -n "__fish_using_command my_app" -l single-quotes -d 'Can be \'always\', \'auto\', or \'never\''
complete -c my_app -n "__fish_using_command my_app" -l double-quotes -d 'Can be "always", "auto", or "never"'
complete -c my_app -n "__fish_using_command my_app" -l backticks -d 'For more information see `echo test`'
complete -c my_app -n "__fish_using_command my_app" -l backslash -d 'Avoid \'\\n\''
complete -c my_app -n "__fish_using_command my_app" -l brackets -d 'List packages [filter]'
complete -c my_app -n "__fish_using_command my_app" -l expansions -d 'Execute the shell command with $SHELL'
complete -c my_app -n "__fish_using_command my_app" -s h -l help -d 'Prints help information'
complete -c my_app -n "__fish_using_command my_app" -s V -l version -d 'Prints version information'
"#;
static ZSH_SPECIAL: &'static str = r#"#compdef my_app
_my_app() {
typeset -A opt_args
local ret=1
local context curcontext="$curcontext" state line
_arguments -s -S -C \
'--single-quotes[Can be '\''always'\'', '\''auto'\'', or '\''never'\'']' \
'--double-quotes[Can be "always", "auto", or "never"]' \
'--backticks[For more information see `echo test`]' \
'--backslash[Avoid '\''\\n'\'']' \
'--brackets[List packages \[filter\]]' \
'--expansions[Execute the shell command with $SHELL]' \
'-h[Prints help information]' \
'--help[Prints help information]' \
'-V[Prints version information]' \
'--version[Prints version information]' \
&& ret=0
}
(( $+functions[_my_app_commands] )) ||
_my_app_commands() {
local commands; commands=(
)
_describe -t commands 'my_app commands' commands "$@"
}
(( $+functions[_my_app_commands] )) ||
_my_app_commands() {
local commands; commands=(
)
_describe -t commands 'my_app commands' commands "$@"
}
_my_app "$@""#;
fn compare(left: &str, right: &str) -> bool {
let b = left == right;
if !b {
let re = Regex::new(" ").unwrap();
println!("");
println!("--> left");
// println!("{}", left);
println!("{}", re.replace_all(left, "\u{2022}"));
println!("--> right");
println!("{}", re.replace_all(right, "\u{2022}"));
// println!("{}", right);
println!("--")
}
b
}
fn build_app() -> App<'static, 'static> { build_app_with_name("myapp") }
fn build_app_with_name(s: &'static str) -> App<'static, 'static> {
App::new(s)
.about("Tests completions")
.arg(Arg::with_name("file").help("some input file"))
.subcommand(SubCommand::with_name("test")
.about("tests things")
.arg(Arg::with_name("case")
.long("case")
.takes_value(true)
.help("the case to test")))
}
fn build_app_with_underscore() -> App<'static, 'static> {
build_app_with_name("my_app").subcommand(SubCommand::with_name("some_cmd")
.about("tests other things")
.arg(Arg::with_name("config")
.long("--config")
.takes_value(true)
.help("the other case to test")))
}
fn build_app_special() -> App<'static, 'static> {
App::new("my_app")
.arg(Arg::with_name("single-quotes")
.long("single-quotes")
.help("Can be 'always', 'auto', or 'never'"))
.arg(Arg::with_name("double-quotes")
.long("double-quotes")
.help("Can be \"always\", \"auto\", or \"never\""))
.arg(Arg::with_name("backticks")
.long("backticks")
.help("For more information see `echo test`"))
.arg(Arg::with_name("backslash")
.long("backslash")
.help("Avoid '\\n'"))
.arg(Arg::with_name("brackets")
.long("brackets")
.help("List packages [filter]"))
.arg(Arg::with_name("expansions")
.long("expansions")
.help("Execute the shell command with $SHELL"))
}
#[test]
fn bash() {
let mut app = build_app();
let mut buf = vec![];
app.gen_completions_to("myapp", Shell::Bash, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, BASH));
}
#[test]
fn zsh() {
let mut app = build_app();
let mut buf = vec![];
app.gen_completions_to("myapp", Shell::Zsh, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, ZSH));
}
#[test]
fn fish() {
let mut app = build_app();
let mut buf = vec![];
app.gen_completions_to("myapp", Shell::Fish, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, FISH));
}
// Disabled until I figure out this windows line ending and AppVeyor issues
//#[test]
// fn powershell() {
// let mut app = build_app();
// let mut buf = vec![];
// app.gen_completions_to("myapp", Shell::PowerShell, &mut buf);
// let string = String::from_utf8(buf).unwrap();
//
// assert!(compare(&*string, POWERSHELL));
// }
// Disabled until I figure out this windows line ending and AppVeyor issues
//#[test]
// fn powershell_with_underscore() {
// let mut app = build_app_with_underscore();
// let mut buf = vec![];
// app.gen_completions_to("my_app", Shell::PowerShell, &mut buf);
// let string = String::from_utf8(buf).unwrap();
//
// assert!(compare(&*string, POWERSHELL_WUS));
// }
#[test]
fn bash_with_underscore() {
let mut app = build_app_with_underscore();
let mut buf = vec![];
app.gen_completions_to("my_app", Shell::Bash, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, BASH_WUS));
}
#[test]
fn fish_with_underscore() {
let mut app = build_app_with_underscore();
let mut buf = vec![];
app.gen_completions_to("my_app", Shell::Fish, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, FISH_WUS));
}
#[test]
fn zsh_with_underscore() {
let mut app = build_app_with_underscore();
let mut buf = vec![];
app.gen_completions_to("my_app", Shell::Zsh, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, ZSH_WUS));
}
#[test]
fn fish_special() {
let mut app = build_app_special();
let mut buf = vec![];
app.gen_completions_to("my_app", Shell::Fish, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, FISH_SPECIAL));
}
#[test]
fn zsh_special() {
let mut app = build_app_special();
let mut buf = vec![];
app.gen_completions_to("my_app", Shell::Zsh, &mut buf);
let string = String::from_utf8(buf).unwrap();
assert!(compare(&*string, ZSH_SPECIAL));
}<|fim▁end|> | 'test' {
$command += '_test' |
<|file_name|>permissions.py<|end_file_name|><|fim▁begin|>from rest_framework import permissions<|fim▁hole|> if request.method == 'POST':
return True
return super(IsAuthenticatedOrCreate, self).has_permission(request, view)
class IsOwner(BasePermission):
message = "You must be the owner of this object."
def has_object_permission(self, request, view, obj):
my_safe_methods = []
if request.method in my_safe_methods:
return True
return obj.owner == request.user<|fim▁end|> | from rest_framework.permissions import BasePermission
class IsAuthenticatedOrCreate(permissions.IsAuthenticated):
def has_permission(self, request, view): |
<|file_name|>test_reference_resolver.py<|end_file_name|><|fim▁begin|># Copyright 2017 AT&T Intellectual Property. All other rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the data reference resolver."""
import base64
import responses
from drydock_provisioner.statemgmt.design.resolver import ReferenceResolver
class TestClass(object):
def test_resolve_file_url(self, input_files):
"""Test that the resolver will resolve file URLs."""
input_file = input_files.join("fullsite.yaml")
url = 'file://%s' % str(input_file)
content = ReferenceResolver.resolve_reference(url)
assert len(content) > 0
@responses.activate
def test_resolve_http_url(self):
"""Test that the resolver will resolve http URLs."""
url = 'http://foo.com/test.yaml'
responses.add(responses.GET, url)
ReferenceResolver.resolve_reference(url)
assert len(responses.calls) == 1
assert responses.calls[0].request.url == url
@responses.activate
def test_resolve_http_basicauth_url(self):
"""Test the resolver will resolve http URLs w/ basic auth."""
url = 'http://user:pass@foo.com/test.yaml'
auth_header = "Basic %s" % base64.b64encode(
"user:pass".encode('utf-8')).decode('utf-8')
responses.add(responses.GET, url)
ReferenceResolver.resolve_reference(url)
assert len(responses.calls) == 1
assert 'Authorization' in responses.calls[0].request.headers
assert responses.calls[0].request.headers.get(<|fim▁hole|><|fim▁end|> | 'Authorization') == auth_header |
<|file_name|>raygun_send_test.js<|end_file_name|><|fim▁begin|>"use strict";
var test = require("tap").test;
var semver = require("semver");
var VError = require("verror");
var nock = require("nock");
var Raygun = require("../lib/raygun.ts");
nock(/.*/)
.post(/.*/, function () {
return true;
})
.reply(202, {})
.persist();
var API_KEY = "apikey";
test("send basic", {}, function (t) {
t.plan(1);
if (semver.satisfies(process.version, "=0.10")) {
t.pass("Ignored on node 0.10");
t.end();
return;
}
var client = new Raygun.Client().init({
apiKey: API_KEY,
});
client.send(new Error(), {}, function (response) {
t.equal(response.statusCode, 202);
t.end();
});
});
test("send complex", {}, function (t) {
t.plan(1);
if (semver.satisfies(process.version, "=0.10")) {
t.pass("Ignored on node 0.10");
t.end();
return;
}
var client = new Raygun.Client()
.init({ apiKey: API_KEY })
.setUser("callum@mindscape.co.nz")
.setVersion("1.0.0.0");
client.send(new Error(), {}, function (response) {
t.equal(response.statusCode, 202);
t.end();
});
});
test("send with inner error", {}, function (t) {
t.plan(1);
if (semver.satisfies(process.version, "=0.10")) {
t.pass("Ignored on node 0.10");
t.end();
return;
}
var error = new Error("Outer");
var innerError = new Error("Inner");
error.cause = function () {
return innerError;
};
var client = new Raygun.Client().init({
apiKey: API_KEY,
});
client.send(error, {}, function (response) {
t.equal(response.statusCode, 202);
t.end();
});
});
test("send with verror", {}, function (t) {
t.plan(1);
if (semver.satisfies(process.version, "=0.10")) {
t.pass("Ignored on node 0.10");
t.end();
return;
}
var error = new VError(
new VError(new VError("Deep Error"), "Inner Error"),
"Outer Error"
);
var client = new Raygun.Client().init({
apiKey: API_KEY,
});
client.send(error, {}, function (response) {
t.equal(response.statusCode, 202);
t.end();
});
});
test("send with OnBeforeSend", {}, function (t) {
t.plan(1);
if (semver.satisfies(process.version, "=0.10")) {
t.pass("Ignored on node 0.10");
t.end();
return;
}
var client = new Raygun.Client().init({
apiKey: API_KEY,
});
var onBeforeSendCalled = false;
client.onBeforeSend(function (payload) {
onBeforeSendCalled = true;
return payload;
});
client.send(new Error(), {}, function () {
t.equal(onBeforeSendCalled, true);
t.end();
});
});
test("send with expressHandler custom data", function (t) {
t.plan(1);
var client = new Raygun.Client().init({
apiKey: API_KEY,
});
client.expressCustomData = function () {
return { test: "data" };
};
client._send = client.send;
client.send = function (err, data) {
client.send = client._send;
t.equal(data.test, "data");
t.end();
};
client.expressHandler(new Error(), {}, {}, function () {});
});
test("check that tags get passed through", {}, function (t) {
var tag = ["Test"];
var client = new Raygun.Client().init({ apiKey: "TEST" });
client.setTags(tag);
client.onBeforeSend(function (payload) {
t.same(payload.details.tags, tag);
return payload;
});
client.send(new Error(), {}, function () {
t.end();
});
});
test("check that tags get merged", {}, function (t) {
var client = new Raygun.Client().init({ apiKey: "TEST" });
client.setTags(["Tag1"]);
client.onBeforeSend(function (payload) {
t.same(payload.details.tags, ["Tag1", "Tag2"]);
return payload;
});
client.send(
new Error(),<|fim▁hole|> {},
function () {
t.end();
},
null,
["Tag2"]
);
});<|fim▁end|> | |
<|file_name|>BusScheduleServlet.java<|end_file_name|><|fim▁begin|>/*
* *************************************************************************
* Copyright (C) FRS Belgium NV ("FRSGlobal"). All rights reserved.
*
* This computer program is protected by copyright law and international
* treaties. Unauthorized reproduction or distribution of this program,
* or any portion of it, may result in severe civil and criminal penalties,
* and will be prosecuted to the maximum extent possible under the law.
* *************************************************************************
*/
package org.cluj.bus.servlet;
import com.google.gson.Gson;
import org.cluj.bus.model.BusSchedule;
import org.cluj.bus.model.BusScheduleDTO;
import org.cluj.bus.model.CategorySchedule;
import org.cluj.bus.services.JPARepository;
import org.cluj.bus.util.ScheduleUtilities;
<|fim▁hole|>import java.io.IOException;
import java.text.ParseException;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
public class BusScheduleServlet extends HttpServlet
{
private static final Logger LOGGER = Logger.getLogger(BusScheduleServlet.class.getName());
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException
{
doPost(req, resp);
}
@Override
protected void doPost(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse) throws ServletException, IOException
{
String busId = httpServletRequest.getParameter(ServletUtils.BUS_ID_PARAMETER_KEY);
ServletUtils.sendResponse(httpServletResponse, getResponseString(busId));
}
private String getResponseString(String busId)
{
List<BusSchedule> busSchedules = new JPARepository<>(BusSchedule.class).findAll("busId", busId);
Map<String, CategorySchedule> categorySchedules = new HashMap<>();
for (BusSchedule busSchedule : busSchedules)
{
String days = busSchedule.getDays();
CategorySchedule categorySchedule = categorySchedules.get(days);
if (categorySchedule == null)
{
categorySchedule = new CategorySchedule();
categorySchedules.put(days, categorySchedule);
categorySchedule.setDisplayName(busSchedule.getCategory());
categorySchedule.setApplicableDays(getApplicableDays(days));
}
Collection<Date> startTimes = categorySchedule.getStartTimes();
if (startTimes == null)
{
startTimes = new ArrayList<>();
categorySchedule.setStartTimes(startTimes);
}
try
{
startTimes.add(ScheduleUtilities.getStartTime(busSchedule.getStartTime()));
}
catch (ParseException e)
{
LOGGER.log(Level.SEVERE, "Error parsing start time", e);
}
}
BusScheduleDTO schedule = new BusScheduleDTO();
schedule.setSchedules(categorySchedules.values());
return new Gson().toJson(schedule);
}
private Collection<Integer> getApplicableDays(String days)
{
List<Integer> applicableDays = new ArrayList<>();
for (char aChar : days.toCharArray())
{
int day = Integer.parseInt(String.valueOf(aChar));
applicableDays.add(day);
}
return applicableDays;
}
}<|fim▁end|> | import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; |
<|file_name|>account_invoice_replace.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2018, Esther Cisneros
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from datetime import timedelta
from openerp import _, api, fields, models
from openerp.exceptions import UserError
class AccountInvoice(models.Model):
_name = 'account.invoice'
_inherit = 'account.invoice'
invoice_replaced = fields.Many2one(
'account.invoice',
string=_("Invoice that replaces"),
)
date_cancelled = fields.Date(
string=_("Cancellation Date"),
)
number_cancel = fields.Char(
string=("Nombre de la factura cancelada"),
)
@api.multi
def action_cancel(self):
for inv in self:
if inv.id == inv.invoice_replaced.id:
raise UserError(_("Please select an invoice to substitute different to the invoice to be canceled"))
inv.date_cancelled = fields.Date.today()
inv.number_cancel = inv.number
return super(AccountInvoice, self).action_cancel()
@api.model
def send_email_invoice_canceled(self):
limit_date = timedelta(days=1)
date_today_ = fields.Date.today()
dd = fields.Datetime.from_string(date_today_)
date_cancel = dd - limit_date
inv_ids = self.search([
('state', '=', ['cancel']),
('company_id', '=', 1),
('type', '=', 'out_invoice'),
('date_cancelled', '=', date_cancel)])
table = ''
remp_date = ''
remp_rep = ''
for inve in inv_ids:
if not inve.date_cancelled:
remp_date = '---'
else:
remp_date = inve.date_cancelled
if not inve.invoice_replaced:
remp_rep = '---'
else:
remp_rep = inve.invoice_replaced.number
table += """
<tr><td style="border-bottom: 1px solid silver;">%s</td>
<td style="border-bottom: 1px solid silver;">%s</td>
<td style="border-bottom: 1px solid silver;">%s</td>
<td align="right" style="border-bottom: 1px solid silver;">
%s</td></tr>
""" % (remp_date, inve.partner_id.name, inve.number_cancel, remp_rep)
mail_obj = self.env['mail.mail']
body_mail = u"""
<div summary="o_mail_notification" style="padding:0px; width:700px;
margin:0 auto; background: #FFFFFF repeat top /100%%; color:#77777
7">
<table cellspacing="0" cellpadding="0" style="width:700px;
border-collapse:collapse; background:inherit; color:inherit">
<tbody><tr>
<td valign="center" width="270" style="padding:5px 10px
5px 5px;font-size: 18px">
<p>Las siguientes facturas ya fueron canceladas</p>
</td>
<td valign="center" align="right" width="270"
style="padding:5px 15px 5px 10px; font-size: 12px;">
<p>
<strong>Sent by</strong>
<a href="http://erp.portalgebesa.com" style="text-
decoration:none; color: #a24689;">
<strong>%s</strong>
</a>
<strong>using</strong>
<a href="https://www.odoo.com" style="text-
decoration:none; color: #a24689;"><strong>Odoo
</strong></a>
</p>
</td>
</tr>
</tbody></table>
</div>
<div style="padding:0px; width:700px; margin:0 auto; background:
#FFFFFF repeat top /100%%; color:#777777">
<table cellspacing="0" cellpadding="0" style="vertical-align:
top; padding:0px; border-collapse:collapse; background:inherit;
color:inherit">
<tbody><tr>
<td valign="top" style="width:700px; padding:5px 10px
5px 5px; ">
<div>
<hr width="100%%" style="background-color:
rgb(204,204,204);border:medium none;clear:both;
display:block;font-size:0px;min-height:1px;
line-height:0;margin:15px auto;padding:0">
</div>
</td>
</tr></tbody>
</table>
</div>
<div style="padding:0px; width:700px; margin:0 auto; background:
#FFFFFF repeat top /100%%;color:#777777">
<table style="border-collapse:collapse; margin: 0 auto; width:
700px; background:inherit; color:inherit">
<tbody><tr>
<th width="16%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Fecha de Cancelacion</strong></th>
<th width="54%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Cliente</strong></th>
<th width="15%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Factura Cancelada</strong></th>
<th width="15%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Factura que Sustituye</strong></th>
</tr>
%s
</tbody>
</table>
</div>
""" % (self.env.user.company_id.name, table)
mail = mail_obj.create({
'subject': 'Facturas Canceladas',
'email_to': 'sergio.hernandez@gebesa.com,pedro.acosta@gebesa.com,andrea.mejia@gebesa.com,monica.sanchez@gebesa.com,jesus.castrellon@gebesa.com,christiansen.duenez@gebesa.com,esmeralda.gutierrez@gebesa.com,sistemas@gebesa.com',
'headers': "{'Return-Path': u'odoo@gebesa.com'}",
'body_html': body_mail,<|fim▁hole|> 'model': 'account.invoice',
#'res_id': inv_ids[0].id,
})
mail.send()
@api.model
def send_email_invoice_canceled_tgalbo(self):
limit_date = timedelta(days=1)
date_today_ = fields.Date.today()
dd = fields.Datetime.from_string(date_today_)
date_cancel = dd - limit_date
inv_ids = self.search([
('state', '=', ['cancel']),
('company_id', '=', 4),
('type', '=', 'out_invoice'),
('date_cancelled', '=', date_cancel)])
table = ''
remp_date = ''
remp_rep = ''
for inve in inv_ids:
if not inve.date_cancelled:
remp_date = '---'
else:
remp_date = inve.date_cancelled
if not inve.invoice_replaced:
remp_rep = '---'
else:
remp_rep = inve.invoice_replaced.number
table += """
<tr><td style="border-bottom: 1px solid silver;">%s</td>
<td style="border-bottom: 1px solid silver;">%s</td>
<td style="border-bottom: 1px solid silver;">%s</td>
<td align="right" style="border-bottom: 1px solid silver;">
%s</td></tr>
""" % (remp_date, inve.partner_id.name, inve.number_cancel, remp_rep)
mail_obj = self.env['mail.mail']
body_mail = u"""
<div summary="o_mail_notification" style="padding:0px; width:700px;
margin:0 auto; background: #FFFFFF repeat top /100%%; color:#77777
7">
<table cellspacing="0" cellpadding="0" style="width:700px;
border-collapse:collapse; background:inherit; color:inherit">
<tbody><tr>
<td valign="center" width="270" style="padding:5px 10px
5px 5px;font-size: 18px">
<p>Las siguientes facturas ya fueron canceladas</p>
</td>
<td valign="center" align="right" width="270"
style="padding:5px 15px 5px 10px; font-size: 12px;">
<p>
<strong>Sent by</strong>
<a href="http://erp.portalgebesa.com" style="text-
decoration:none; color: #a24689;">
<strong>%s</strong>
</a>
<strong>using</strong>
<a href="https://www.odoo.com" style="text-
decoration:none; color: #a24689;"><strong>Odoo
</strong></a>
</p>
</td>
</tr>
</tbody></table>
</div>
<div style="padding:0px; width:700px; margin:0 auto; background:
#FFFFFF repeat top /100%%; color:#777777">
<table cellspacing="0" cellpadding="0" style="vertical-align:
top; padding:0px; border-collapse:collapse; background:inherit;
color:inherit">
<tbody><tr>
<td valign="top" style="width:700px; padding:5px 10px
5px 5px; ">
<div>
<hr width="100%%" style="background-color:
rgb(204,204,204);border:medium none;clear:both;
display:block;font-size:0px;min-height:1px;
line-height:0;margin:15px auto;padding:0">
</div>
</td>
</tr></tbody>
</table>
</div>
<div style="padding:0px; width:700px; margin:0 auto; background:
#FFFFFF repeat top /100%%;color:#777777">
<table style="border-collapse:collapse; margin: 0 auto; width:
700px; background:inherit; color:inherit">
<tbody><tr>
<th width="16%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Fecha de Cancelacion</strong></th>
<th width="54%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Cliente</strong></th>
<th width="15%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Factura Cancelada</strong></th>
<th width="15%%" style="padding:5px 10px 5px 5px;font-
size: 14px; border-bottom: 2px solid silver;"><strong>
Factura que Sustituye</strong></th>
</tr>
%s
</tbody>
</table>
</div>
""" % (self.env.user.company_id.name, table)
mail = mail_obj.create({
'subject': 'Facturas Canceladas Transportes Galbo del Norte',
'email_to': 'soporte.odoo@gebesa.com,salmon@gebesa.com,contabilidad@tansportesgalbo.com,gabriel.oviedo@transportesgalbo.com',
'headers': "{'Return-Path': u'odoo@gebesa.com'}",
'body_html': body_mail,
'auto_delete': True,
'message_type': 'comment',
'model': 'account.invoice',
#'res_id': inv_ids[0].id,
})
mail.send()<|fim▁end|> | 'auto_delete': True,
'message_type': 'comment', |
<|file_name|>traits-default-method-macro.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>
trait Foo {
fn bar(&self) -> String {
format!("test")
}
}
enum Baz {
Quux
}
impl Foo for Baz {
}
pub fn main() {
let q = Baz::Quux;
assert_eq!(q.bar(), "test".to_string());
}<|fim▁end|> | // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms. |
<|file_name|>msgarea.py<|end_file_name|><|fim▁begin|># This file is part of the Hotwire Shell user interface.
#
# Copyright (C) 2007,2008 Colin Walters <walters@verbum.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import logging
import gobject
import gtk
from .wraplabel import WrapLabel
_logger = logging.getLogger("hotwire.ui.MsgArea")
# This file is a Python translation of gedit/gedit/gedit-message-area.c
class MsgArea(gtk.HBox):
__gtype_name__ = "MsgArea"
__gsignals__ = {
"response" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_INT,)),
"close" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [])
}
def __init__(self, buttons, **kwargs):
super(MsgArea, self).__init__(**kwargs)
self.__contents = None
self.__labels = []
self.__changing_style = False
self.__main_hbox = gtk.HBox(False, 16) # FIXME: use style properties
self.__main_hbox.show()
self.__main_hbox.set_border_width(8) # FIXME: use style properties
self.__action_area = gtk.VBox(True, 4); # FIXME: use style properties
self.__action_area.show()
self.__main_hbox.pack_end (self.__action_area, False, True, 0)
self.pack_start(self.__main_hbox, True, True, 0)
self.set_app_paintable(True)
self.connect("expose-event", self.__paint)
# Note that we connect to style-set on one of the internal
# widgets, not on the message area itself, since gtk does
# not deliver any further style-set signals for a widget on
# which the style has been forced with gtk_widget_set_style()
self.__main_hbox.connect("style-set", self.__on_style_set)
self.add_buttons(buttons)
def __get_response_data(self, w, create):
d = w.get_data('hotwire-msg-area-data')
if (d is None) and create:
d = {'respid': None}
w.set_data('hotwire-msg-area-data', d)
return d
def __find_button(self, respid):
children = self.__actionarea.get_children()
for child in children:
rd = self.__get_response_data(child, False)
if rd is not None and rd['respid'] == respid:
return child
def __close(self):
cancel = self.__find_button(gtk.RESPONSE_CANCEL)
if cancel is None:
return
self.response(gtk.RESPONSE_CANCEL)
def __paint(self, w, event):
gtk.Style.paint_flat_box(w.style,<|fim▁hole|> gtk.SHADOW_OUT,
None,
w,
"tooltip",
w.allocation.x + 1,
w.allocation.y + 1,
w.allocation.width - 2,
w.allocation.height - 2)
return False
def __on_style_set(self, w, style):
if self.__changing_style:
return
# This is a hack needed to use the tooltip background color
window = gtk.Window(gtk.WINDOW_POPUP);
window.set_name("gtk-tooltip")
window.ensure_style()
style = window.get_style()
self.__changing_style = True
self.set_style(style)
for label in self.__labels:
label.set_style(style)
self.__changing_style = False
window.destroy()
self.queue_draw()
def __get_response_for_widget(self, w):
rd = self.__get_response_data(w, False)
if rd is None:
return gtk.RESPONSE_NONE
return rd['respid']
def __on_action_widget_activated(self, w):
response_id = self.__get_response_for_widget(w)
self.response(response_id)
def add_action_widget(self, child, respid):
rd = self.__get_response_data(child, True)
rd['respid'] = respid
if not isinstance(child, gtk.Button):
raise ValueError("Can only pack buttons as action widgets")
child.connect('clicked', self.__on_action_widget_activated)
if respid != gtk.RESPONSE_HELP:
self.__action_area.pack_start(child, False, False, 0)
else:
self.__action_area.pack_end(child, False, False, 0)
def set_contents(self, contents):
self.__contents = contents
self.__main_hbox.pack_start(contents, True, True, 0)
def add_button(self, btext, respid):
button = gtk.Button(stock=btext)
button.set_focus_on_click(False)
button.set_flags(gtk.CAN_DEFAULT)
button.show()
self.add_action_widget(button, respid)
return button
def add_buttons(self, args):
_logger.debug("init buttons: %r", args)
for (btext, respid) in args:
self.add_button(btext, respid)
def set_response_sensitive(self, respid, setting):
for child in self.__action_area.get_children():
rd = self.__get_response_data(child, False)
if rd is not None and rd['respid'] == respid:
child.set_sensitive(setting)
break
def set_default_response(self, respid):
for child in self.__action_area.get_children():
rd = self.__get_response_data(child, False)
if rd is not None and rd['respid'] == respid:
child.grab_default()
break
def response(self, respid):
self.emit('response', respid)
def add_stock_button_with_text(self, text, stockid, respid):
b = gtk.Button(label=text)
b.set_focus_on_click(False)
img = gtk.Image()
img.set_from_stock(stockid, gtk.ICON_SIZE_BUTTON)
b.set_image(img)
b.show_all()
self.add_action_widget(b, respid)
return b
def set_text_and_icon(self, stockid, primary_text, secondary_text=None):
hbox_content = gtk.HBox(False, 8)
hbox_content.show()
image = gtk.Image()
image.set_from_stock(stockid, gtk.ICON_SIZE_DIALOG)
image.show()
hbox_content.pack_start(image, False, False, 0)
image.set_alignment(0.5, 0.5)
vbox = gtk.VBox(False, 6)
vbox.show()
hbox_content.pack_start (vbox, True, True, 0)
self.__labels = []
primary_markup = "<b>%s</b>" % (primary_text,)
primary_label = WrapLabel(primary_markup)
primary_label.show()
vbox.pack_start(primary_label, True, True, 0)
primary_label.set_use_markup(True)
primary_label.set_line_wrap(True)
primary_label.set_alignment(0, 0.5)
primary_label.set_flags(gtk.CAN_FOCUS)
primary_label.set_selectable(True)
self.__labels.append(primary_label)
if secondary_text:
secondary_markup = "<small>%s</small>" % (secondary_text,)
secondary_label = WrapLabel(secondary_markup)
secondary_label.show()
vbox.pack_start(secondary_label, True, True, 0)
secondary_label.set_flags(gtk.CAN_FOCUS)
secondary_label.set_use_markup(True)
secondary_label.set_line_wrap(True)
secondary_label.set_selectable(True)
secondary_label.set_alignment(0, 0.5)
self.__labels.append(secondary_label)
self.set_contents(hbox_content)
class MsgAreaController(gtk.HBox):
__gtype_name__ = "MsgAreaController"
def __init__(self):
super(MsgAreaController, self).__init__()
self.__msgarea = None
self.__msgid = None
def has_message(self):
return self.__msgarea is not None
def get_msg_id(self):
return self.__msgid
def set_msg_id(self, msgid):
self.__msgid = msgid
def clear(self):
if self.__msgarea is not None:
self.remove(self.__msgarea)
self.__msgarea.destroy()
self.__msgarea = None
self.__msgid = None
def new_from_text_and_icon(self, stockid, primary, secondary=None, buttons=[]):
self.clear()
msgarea = self.__msgarea = MsgArea(buttons)
msgarea.set_text_and_icon(stockid, primary, secondary)
self.pack_start(msgarea, expand=True)
return msgarea<|fim▁end|> | w.window,
gtk.STATE_NORMAL, |
<|file_name|>test_raw_bltzall_lt_back.py<|end_file_name|><|fim▁begin|>#-
# Copyright (c) 2011 Steven J. Murdoch
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#<|fim▁hole|># additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
from beritest_tools import BaseBERITestCase
class test_raw_bltzall_lt_back(BaseBERITestCase):
def test_before_bltzall(self):
self.assertRegisterNotEqual(self.MIPS.a0, 0, "instruction before bltzall missed")
def test_bltzall_branch_delay(self):
self.assertRegisterEqual(self.MIPS.a1, 2, "instruction in brach-delay slot missed")
def test_bltzall_skipped(self):
self.assertRegisterNotEqual(self.MIPS.a2, 3, "bltzall didn't branch")
def test_bltzall_target(self):
self.assertRegisterEqual(self.MIPS.a3, 4, "instruction at branch target didn't run")
def test_bltzall_ra(self):
self.assertRegisterEqual(self.MIPS.a4, self.MIPS.ra, "bltzall ra incorrect")<|fim▁end|> | # Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>'''
This file holds globally useful utility classes and functions, i.e., classes and
functions that are generic enough not to be specific to one app.
'''
import logging
import os
import re<|fim▁hole|>
from django.conf import settings
# Setup logging support.
LOGGER = logging.getLogger(__name__)
LOGGER.addHandler(settings.LOG_HANDLER)
# try to import the `fcntl` module for locking support through the `Lock` class
# below
try:
import fcntl
except ImportError:
LOGGER.warn("Locking support is not available for your (non-Unix?) system. "
"Using multiple processes might not be safe.")
def get_class_by_name(module_name, class_name):
'''
Given the name of a module (e.g., 'metashare.resedit.admin')
and the name of a class (e.g., 'ContactSMI'),
return the class type object (in the example, the class ContactSMI).
If no such class exists, throws an AttributeError
'''
try:
class_type = getattr(sys.modules[module_name], class_name)
return class_type
except AttributeError:
raise AttributeError("Module '{0}' has no class '{1}'".format(module_name, class_name))
def verify_subclass(subclass, superclass):
'''
Verify that subclass is indeed a subclass of superclass.
If that is not the case, a TypeError is raised.
'''
if not issubclass(subclass, superclass):
raise TypeError('class {0} is not a subclass of class {1}'.format(subclass, superclass))
def prettify_camel_case_string(cc_str):
'''
Prettifies the given camelCase string so that it is better readable.
For example, "speechAnnotation-soundToTextAlignment" is converted to "Speech
Annotation - Sound To Text Alignment". N.B.: The conversion currently only
recognizes boundaries with ASCII letters.
'''
result = cc_str
if len(result) > 1:
# result = result.replace('-', ' - ') AtA
result = result.replace('_', ' ')
result = result.replace('AtA', 'At a')
result = re.sub(r'(..)(?=[A-Z][a-z])', r'\1 ', result)
result = ' '.join([(len(token) > 1 and (token[0].upper() + token[1:]))
or token[0].upper() for token in result.split()])
return result
def create_breadcrumb_template_params(model, action):
'''
Create a dictionary for breadcrumb templates.
'''
opts = model._meta
dictionary = {
'app_label': opts.app_label,
'verbose_name': opts.verbose_name,
'action': action,
}
return dictionary
class Lock():
"""
Each instance of this class can be used to acquire an exclusive, system-wide
(multi-process) lock on a particular name.
This class will only work on Unix systems viz. systems that provide the
`fcntl` module. On other systems the class will silently do nothing.
"""
def __init__(self, lock_name):
"""
Create a `Lock` object which can create an exclusive lock on the given
name.
"""
if 'fcntl' in sys.modules:
self.handle = open(os.path.join(settings.LOCK_DIR, lock_name), 'w')
else:
self.handle = None
def acquire(self):
"""
Acquire a lock on the name for which this `Lock` was created.
"""
if self.handle:
fcntl.flock(self.handle, fcntl.LOCK_EX)
def release(self):
"""
Release any lock on the name for which this `Lock` was created.
"""
if self.handle:
fcntl.flock(self.handle, fcntl.LOCK_UN)
def __del__(self):
if self.handle:
self.handle.close()
class SimpleTimezone(tzinfo):
"""
A fixed offset timezone with an unknown name and an unknown DST adjustment.
"""
def __init__(self, offset):
self.__offset = timedelta(minutes=offset)
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return None
def dst(self, dt):
return None<|fim▁end|> | import sys
from datetime import tzinfo, timedelta |
<|file_name|>debug.go<|end_file_name|><|fim▁begin|>package http
import (
bm "go-common/library/net/http/blademaster"
)
func debugCache(c *bm.Context) {
opt := new(struct {
Keys string `form:"keys" validate:"required"`
})<|fim▁hole|> return
}
c.JSONMap(srv.DebugCache(opt.Keys), nil)
}<|fim▁end|> | if err := c.Bind(opt); err != nil { |
<|file_name|>apitest_args.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2006-2014 The FLWOR Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// standard
#include <algorithm>
#include <cctype>
#include <cstdlib>
#include <iostream>
// Zorba
#include <zorba/zorba.h>
#include <zorba/properties.h>
// local
#include "apitest_props.h"
using namespace std;
using namespace zorba;
static char const* check_args();
///////////////////////////////////////////////////////////////////////////////
#define HELP_OPT(ARG) ARG "\n"
char const* get_help_msg() {
return
////////// a //////////////////////////////////////////////////////////////
#ifndef NDEBUG
HELP_OPT( "--abort" )
"Call abort(3) when a ZorbaException is thrown.\n\n"
#endif /* NDEBUG */
////////// c //////////////////////////////////////////////////////////////
HELP_OPT( "--compile-only" )
"Only compile (don't execute).\n\n"
////////// d //////////////////////////////////////////////////////////////
#ifdef ZORBA_WITH_DEBUGGER
HELP_OPT( "--debug, -d" )
"Launch the Zorba debugger server and connect to a DBGP-enabled debugger client.\n\n"
#endif /* ZORBA_WITH_DEBUGGER */
HELP_OPT( "--debug-file <file>" )
"Sets the file to write developer debugging information to.\n\n"
HELP_OPT( "--debug-stream {1|cout|stdout|2|cerr|stderr}" )
"Sets the stream to write developer debugging information to.\n\n"
#ifdef ZORBA_WITH_DEBUGGER
HELP_OPT( "--debug-host, -h <host>" )
"The host where the DBGP-enabled debugger client listens for connections; default: 127.0.0.1.\n\n"
HELP_OPT( "--debug-port, -p <port>" )
"The port on which the DBGP-enabled debugger client listens for connections; default: 28028.\n\n"
#endif /* ZORBA_WITH_DEBUGGER */
////////// e //////////////////////////////////////////////////////////////
HELP_OPT( "--execute-plan" )
"Do not compile the query; instead load the execution plan from the file specified by the -f -q options (or by any file specified without any other argument), and execute the loaded plan.\n\n"
HELP_OPT( "--external-var, -x <name>{=<file>|:=<value>}" )
"Sets the value of an external variable.\n\n"
////////// i //////////////////////////////////////////////////////////////
HELP_OPT( "--infer-joins" )
"Infer joins.\n\n"
HELP_OPT( "--inline-udf" )
"Inline user-defined functions.\n\n"
////////// j //////////////////////////////////////////////////////////////
HELP_OPT( "--jsoniq, -j" )
"Force queries to be considered JSONiq.\n\n"
////////// l //////////////////////////////////////////////////////////////
HELP_OPT( "--lib-module, -l" )
"Query compiler option to treat the query as a library module. If this is set --compile-only option is also set to true.\n\n"
HELP_OPT( "--lib-path <path>" )
"Library path (list of directories) where Zorba will look for dynamic libraries (e.g., module external function implementations.\n\n"
HELP_OPT( "--loop-hosting" )
"Hoist expressions out of loops.\n\n"
////////// m //////////////////////////////////////////////////////////////
HELP_OPT( "--max-udf-call-depth <depth>" )
"Maximum stack depth of user-defined function calls.\n\n"
HELP_OPT( "--module-path <path>" )
"Path (list of directories) to add to both the URI and Library paths.\n\n"
////////// n //////////////////////////////////////////////////////////////
HELP_OPT( "--no-copy-optim" )
"Apply the no-copy optimization.\n\n"
HELP_OPT( "--no-serializer" )
"Do not serialize (discard) result.\n\n"
HELP_OPT( "--no-tree-ids" )
"Suppress IDs and locations from compiler tree dumps.\n\n"
////////// o //////////////////////////////////////////////////////////////
HELP_OPT( "--optimization-level, -O {0|1|2}" )
"Optimization level for the query compiler; default: 1.\n\n"
#ifdef ZORBA_WITH_FILE_ACCESS
HELP_OPT( "--output-file, -o <file>" )
"Write the result to the given file.\n\n"
#endif /* ZORBA_WITH_FILE_ACCESS */
////////// p //////////////////////////////////////////////////////////////
HELP_OPT( "--parse-only" )
"Stop after parsing the query.\n\n"
HELP_OPT( "--plan" )
"Test plan serialization.\n\n"
HELP_OPT( "--print-ast" )
"Print the abstract syntax tree.\n\n"
HELP_OPT( "--print-intermediate-opt" )
"Print intermediate optimizations.\n\n"
HELP_OPT( "--print-item-flow" )
"Print items produced by all iterators.\n\n"
HELP_OPT( "--print-iterator-tree" )
"Print the iterator tree.\n\n"
HELP_OPT( "--print-locations" )
"Print parser locations for compiler expressions.\n\n"
HELP_OPT( "--print-optimized" )
"Print the optimized expression tree.\n\n"
HELP_OPT( "--print-query" )
"Print the queries.\n\n"
HELP_OPT( "--print-static-types" )
"Print static type inference.\n\n"
HELP_OPT( "--print-translated" )
"Print the normalized expression tree.\n\n"
////////// q //////////////////////////////////////////////////////////////
HELP_OPT( "--query, -q <query>" )
"Query test or file URI (file://...).\n\n"
////////// s //////////////////////////////////////////////////////////////
HELP_OPT( "--serialization-parameter, -z <name>=<value>" )
"Set serialization parameter (see http://www.w3.org/TR/xslt-xquery-serialization/#serparam, e.g.: -z method=xhtml -z doctype-system=DTD/xhtml1-strict.dtd -z indent=yes).\n\n"
////////// t //////////////////////////////////////////////////////////////
HELP_OPT( "--trace-parsing" )
"Trace parsing.\n\n"
HELP_OPT( "--trace-scanning" )
"Trace scanning.\n\n"
#ifndef NDEBUG
HELP_OPT( "--trace-codegen" )
"Trace code generatio.\n\n"
HELP_OPT( "--trace-fulltext" )
"Trace full-text.\n\n"
HELP_OPT( "--trace-translator" )
"Trace the translator.\n\n"
#endif /* NDEBUG */
HELP_OPT( "--tz <minutes>" )
"Set implicit time zone (in minutes).\n\n"
////////// u //////////////////////////////////////////////////////////////
HELP_OPT( "--use-serializer" )
"Use serializer.\n\n"
////////// v //////////////////////////////////////////////////////////////
HELP_OPT( "--version, -v" )
"Print the version number and exit.\n\n"
;
}
///////////////////////////////////////////////////////////////////////////////
static bool split_key_value( char const *s, char const *split,
string *key, string *value ) {
char const *const p = strstr( s, split );
size_t const split_len = strlen( split );
if ( !p || p == s /* =xxx */ || !p[ split_len ] /* xxx= */ )
return false;
key->assign( s, 0, p - s );
value->assign( p + split_len );
return true;
}
inline char to_lower( char c ) {
return tolower( c );
}
template<class StringType> inline
typename enable_if<ZORBA_IS_STRING(StringType),void>::type
to_lower( StringType &s ) {
transform(
s.begin(), s.end(), s.begin(), static_cast<char (*)(char)>( to_lower )
);
}
static bool bool_of( char const *s ) {
string b( s );
to_lower( b );
if ( b == "t" || b == "true" || b == "y" || b == "yes" || b == "1" )
return true;
if ( b == "f" || b == "false" || b == "n" || b == "no" || b == "0" )
return false;
cerr << '"' << s << "\": invalid boolean value" << endl;
exit( 1 );
}
///////////////////////////////////////////////////////////////////////////////
#define IS_LONG_OPT(OPT) (strcmp( *argv, (OPT) ) == 0)
#define IS_SHORT_OPT(OPT) (strncmp( *argv, (OPT), 2 ) == 0)
#define IS_OPT(LOPT,SOPT) (IS_LONG_OPT(LOPT) || IS_SHORT_OPT(SOPT))
#define PARSE_ARG(ARG) \
int offset = 2; \
if ( (*argv)[1] == '-' || !(*argv)[2] ) { offset = 0; ++argv; } \
if ( !*argv ) { error = "No value given for " #ARG " option"; break; }
#define ARG_VAL (*argv + offset)
template<typename T> inline
void atoi( char const *s, T *result, int offset = 0 ) {
istringstream iss( s + offset );
iss >> *result;
}
#define SET_ATPROP(PROP) \
atoi( *argv, &at_props.PROP, offset )
template<class C,typename T> inline
void atoi( char const *s, C &c, void (C::*f)( T ), int offset = 0 ) {
T temp;
atoi( s, &temp, offset );
(c.*f)( temp );
}
#define SET_ZPROP(PROP) \
atoi( *argv, z_props, &Properties::set##PROP, offset )
int parse_args( int argc, char const *argv[] ) {
char const *error = 0;
Properties &z_props = Properties::instance();
APITestProperties &at_props = APITestProperties::instance();
char const **const argv_orig = argv;
for ( ++argv; *argv; ++argv ) {
////////// a //////////////////////////////////////////////////////////////
#ifndef NDEBUG
if ( IS_LONG_OPT( "--abort" ) )
z_props.setAbort( true );
else
#endif /* NDEBUG */
////////// c //////////////////////////////////////////////////////////////
if ( IS_LONG_OPT( "--compile-only" ) )
at_props.compile_only_ = true;
////////// d //////////////////////////////////////////////////////////////
#ifdef ZORBA_WITH_DEBUGGER
else if ( IS_OPT( "--debug", "-d" ) )
at_props.debug_ = true;
else if ( IS_OPT( "--debug-host", "-h" ) ) {
PARSE_ARG( "--debug-host" );
at_props.debug_host_ = ARG_VAL;
}
#endif /* ZORBA_WITH_DEBUGGER */
else if ( IS_LONG_OPT( "--debug-file" ) ) {
PARSE_ARG( "--debug-file" );
z_props.setDebugFile( ARG_VAL );
}
else if ( IS_LONG_OPT( "--debug-stream" ) ) {
PARSE_ARG( "--debug-stream" );
string val( ARG_VAL );
to_lower( val );
if ( val == "1" || val == "stdout" || val == "cout" )
z_props.setDebugStream( cout );
else if ( val == "2" || val == "stderr" || val == "cerr" )
z_props.setDebugStream( cerr );
else {
error = "--debug-stream argument must be one of: 1, stdout, cout, 2, stderr, or cerr.\n";
break;
}
}
#ifdef ZORBA_WITH_DEBUGGER
else if ( IS_OPT( "--debug-port", "-p" ) ) {
PARSE_ARG( "--debug-port" );
SET_ATPROP( debug_port_ );
}
#endif /* ZORBA_WITH_DEBUGGER */
else if ( IS_LONG_OPT( "--dot-plan-file" ) )
at_props.dot_plan_file_ = true;
else if ( IS_LONG_OPT( "--dump-lib" ) )
z_props.setDumpLib( true );
////////// e //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--execute-plan" ) )
at_props.load_plan_ = true;
else if ( IS_OPT( "--external-var", "-x" ) ) {<|fim▁hole|> error = "--external-var argument must be of the form name=file or name:=value";
break;
}
external_var ev;
ev.var_value = value;
if ( key[0] == ':' ) {
error = "--external-var argument must be of the form name=file or name:=value";
break;
}
if ( key[ key.size() - 1 ] == ':' )
key.erase( key.size() - 1 );
else
ev.inline_file = true;
ev.var_name = key;
at_props.external_vars_.push_back( ev );
}
////////// f //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--force-gflwor" ) )
z_props.setForceGFLWOR( true );
////////// h //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--help" ) ) {
cout << "Zorba NoSQL Query Processor, Version: "
<< Zorba::version() << '\n'
<< "Available options:\n\n"
<< get_help_msg();
exit( 0 );
}
////////// i //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--infer-joins" ) ) {
PARSE_ARG( "--infer-joins" );
z_props.setInferJoins( bool_of( ARG_VAL ) );
}
else if ( IS_LONG_OPT( "--inline-udf" ) ) {
PARSE_ARG( "--inline-udf" );
z_props.setInlineUDF( bool_of( ARG_VAL ) );
}
else if ( IS_LONG_OPT( "--iter-plan-test" ) )
at_props.iter_plan_test_ = true;
////////// j //////////////////////////////////////////////////////////////
else if ( IS_OPT( "--jsoniq", "-j" ) )
at_props.jsoniq_ = true;
////////// l //////////////////////////////////////////////////////////////
else if ( IS_OPT( "--lib-module", "-l" ) )
at_props.lib_module_ = true;
else if ( IS_LONG_OPT( "--lib-path" ) ) {
PARSE_ARG( "--lib-path" );
at_props.lib_path_ = ARG_VAL;
}
else if ( IS_LONG_OPT( "--loop-hoisting" ) ) {
PARSE_ARG( "--loop-hoisting" );
z_props.setLoopHoisting( bool_of( ARG_VAL ) );
}
////////// n //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--no-copy-optim" ) ) {
PARSE_ARG( "--no-copy-optim" );
z_props.setNoCopyOptim( bool_of( ARG_VAL ) );
}
else if ( IS_LONG_OPT( "--no-tree-ids" ) )
z_props.setNoTreeIDs( true );
////////// o //////////////////////////////////////////////////////////////
else if ( IS_OPT( "--optimization-level", "-O" ) ||
IS_LONG_OPT( "--optimizer" ) ) {
PARSE_ARG( "--optimization-level" );
unsigned opt_level;
atoi( ARG_VAL, &opt_level );
if ( opt_level > 2 ) {
error = "Only 0, 1 and 2 are allowed as values for --optimization-level";
break;
}
z_props.setOptimizationLevel( opt_level );
}
#ifdef ZORBA_WITH_FILE_ACCESS
else if ( IS_OPT( "--output-file", "-o" ) ) {
PARSE_ARG( "--output-file" );
at_props.output_file_ = ARG_VAL;
}
#endif /* ZORBA_WITH_FILE_ACCESS */
////////// p //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--parse-only" ) )
at_props.parse_only_ = true;
else if ( IS_LONG_OPT( "--plan" ) )
at_props.test_plan_serialization_ = true;
else if ( IS_OPT( "--print-ast", "-a" ) )
z_props.setPrintAST( true );
else if ( IS_LONG_OPT( "--print-intermediate-opt" ) )
z_props.setPrintIntermediateOpt( true );
else if ( IS_LONG_OPT( "--print-item-flow" ) )
z_props.setPrintItemFlow( true );
else if ( IS_LONG_OPT( "--print-iterator-tree" ) )
z_props.setPlanFormat( PLAN_FORMAT_XML );
else if ( IS_LONG_OPT( "--print-locations" ) )
z_props.setPrintLocations( true );
else if ( IS_OPT( "--print-optimized", "-P" ) )
z_props.setPrintOptimized( true );
else if ( IS_LONG_OPT( "--print-query" ) )
at_props.print_query_ = true;
else if ( IS_LONG_OPT( "--print-static-types" ) )
z_props.setPrintStaticTypes( true );
else if ( IS_LONG_OPT( "--print-translated" ) )
z_props.setPrintTranslated( true );
////////// q //////////////////////////////////////////////////////////////
else if ( IS_OPT( "--query", "-e" ) ) {
PARSE_ARG( "--query" );
at_props.query_ = ARG_VAL;
}
////////// s //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--serialize-only-query" ) ) {
PARSE_ARG( "--serialize-only-query" );
at_props.serialize_only_query_ = bool_of( ARG_VAL );
}
else if ( IS_OPT( "--serialization-parameter", "-z" ) ) {
PARSE_ARG( "--serialization-parameter" );
string key, value;
serialization_param sp;
if ( split_key_value( ARG_VAL, "=", &key, &value ) ) {
sp.first = key;
sp.second = value;
} else
sp.first = ARG_VAL;
at_props.serialization_params_.push_back( sp );
}
else if ( IS_LONG_OPT( "--stable-iterator-ids" ) )
z_props.setStableIteratorIDs( true );
////////// t //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--trace-parsing" ) )
z_props.setTraceParsing( true );
else if ( IS_LONG_OPT( "--trace-scanning" ) )
z_props.setTraceScanning( true );
#ifndef NDEBUG
else if ( IS_LONG_OPT( "--trace-codegen" ) )
z_props.setTraceCodegen( true );
else if ( IS_LONG_OPT( "--trace-fulltext" ) )
z_props.setTraceFulltext( true );
else if ( IS_LONG_OPT( "--trace-translator" ) )
z_props.setTraceTranslator( true );
#endif /* NDEBUG */
else if ( IS_LONG_OPT( "--tz" ) ) {
PARSE_ARG( "--tz" );
SET_ATPROP( tz_ );
}
////////// u //////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--use-indexes" ) )
z_props.setUseIndexes( true );
else if ( IS_LONG_OPT( "--use-serializer" ) )
at_props.use_serializer_ = true;
////////// v //////////////////////////////////////////////////////////////
else if ( IS_OPT( "--version", "-v" ) ) {
cout << "Zorba NoSQL Query Processor, Version: "
<< zorba::Zorba::version() << endl;
exit( 1 ); // should be 0, but the old code returned 1
}
////////// END ////////////////////////////////////////////////////////////
else if ( IS_LONG_OPT( "--" ) )
break;
else if ( (*argv)[0] == '-' ) {
cerr << "unknown command line option " << *argv << endl;
exit( 1 );
} else {
if ( !at_props.query_.empty() ) {
error = "exactly one inline query or query file must be specified";
break;
}
at_props.as_file_ = true;
at_props.query_ = *argv;
}
} // for
if ( !error )
error = check_args();
if ( error ) {
cout << "Error: " << error << "\nUse --help for help." << endl;
exit( 1 );
}
return argv - argv_orig;
}
///////////////////////////////////////////////////////////////////////////////
static char const* check_args() {
APITestProperties const &at_props = APITestProperties::instance();
if ( at_props.optimization_level_ > 2 )
return "only 0, 1 and 2 are allowed as values for the option --optimization-level";
return 0;
}
///////////////////////////////////////////////////////////////////////////////
/* vim:set et sw=2 ts=2: */<|fim▁end|> | PARSE_ARG( "--external-var" );
string key, value;
if ( !split_key_value( ARG_VAL, "=", &key, &value ) ) { |
<|file_name|>creation.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.db.backends.creation import BaseDatabaseCreation
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'IPAddressField': 'char(15)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
}
def sql_table_creation_suffix(self):
suffix = []
if settings.TEST_DATABASE_CHARSET:
suffix.append('CHARACTER SET %s' % settings.TEST_DATABASE_CHARSET)
if settings.TEST_DATABASE_COLLATION:
suffix.append('COLLATE %s' % settings.TEST_DATABASE_COLLATION)
return ' '.join(suffix)
def sql_for_inline_foreign_key_references(self, field, known_models, style):
"All inline references are pending under MySQL"<|fim▁hole|> def sql_for_inline_many_to_many_references(self, model, field, style):
from django.db import models
opts = model._meta
qn = self.connection.ops.quote_name
table_output = [
' %s %s %s,' %
(style.SQL_FIELD(qn(field.m2m_column_name())),
style.SQL_COLTYPE(models.ForeignKey(model).db_type()),
style.SQL_KEYWORD('NOT NULL')),
' %s %s %s,' %
(style.SQL_FIELD(qn(field.m2m_reverse_name())),
style.SQL_COLTYPE(models.ForeignKey(field.rel.to).db_type()),
style.SQL_KEYWORD('NOT NULL'))
]
deferred = [
(field.m2m_db_table(), field.m2m_column_name(), opts.db_table,
opts.pk.column),
(field.m2m_db_table(), field.m2m_reverse_name(),
field.rel.to._meta.db_table, field.rel.to._meta.pk.column)
]
return table_output, deferred<|fim▁end|> | return [], True
|
<|file_name|>sensor_list.tsx<|end_file_name|><|fim▁begin|>import * as React from "react";
import { readPin } from "../../devices/actions";
import { SensorListProps } from "./interfaces";
import { sortResourcesById } from "../../util";
import { Row, Col } from "../../ui";
import { isNumber } from "lodash";
import { ALLOWED_PIN_MODES } from "farmbot";
import { t } from "../../i18next_wrapper";
interface SensorReadingDisplayProps {
label: string;
value: number | undefined;
mode: number;
}
interface CalcStyleProps {
value: number;
mode: number;
}
const calcIndicatorStyle = ({ value, mode }: CalcStyleProps) => ({
left: `calc(${
(mode
? value / 1024 * 0.95 // analog
: value / 2) // digital
* 100}%)`,
width: `${mode ? 5 : 50}%`
});
const calcValueStyle = ({ value, mode }: CalcStyleProps) => ({
marginLeft: `${mode
? `${value > 500 ? -3.5 : 1.5}rem` // analog
: "7rem"}`, // digital
color: `${mode ? "" : "white"}`
});
const SensorReadingDisplay =
({ label, value, mode }: SensorReadingDisplayProps) => {
const moistureSensor = label.toLowerCase().includes("moisture") ?
"moisture-sensor" : "";
return <div className={`sensor-reading-display ${moistureSensor}`}>
{isNumber(value) && value >= 0 &&
<div className="indicator" style={calcIndicatorStyle({ value, mode })}>
<span style={calcValueStyle({ value, mode })}>
{value}
</span>
</div>}
</div>;
};
export const SensorList = (props: SensorListProps) =>
<div className="sensor-list">
{sortResourcesById(props.sensors).map(sensor => {
const { label, mode, pin } = sensor.body;
const pinNumber = (isNumber(pin) && isFinite(pin)) ? pin : -1;
const value = (props.pins[pinNumber] || { value: undefined }).value;
return <Row key={sensor.uuid}>
<Col xs={3}>
<label>{label}</label>
</Col>
<Col xs={1}>
<p>{pinNumber}</p>
</Col>
<Col xs={6}>
<SensorReadingDisplay label={label} value={value} mode={mode} />
</Col>
<Col xs={2}>
<ReadSensorButton
disabled={!!props.disabled}
sensorLabel={label}
pinNumber={pinNumber}
mode={mode} /><|fim▁hole|> </Col>
</Row>;
})}
</div>;
interface ReadSensorButtonProps {
disabled: boolean;
sensorLabel: string;
pinNumber: number;
mode: number;
}
const ReadSensorButton = (props: ReadSensorButtonProps) => {
const { disabled, sensorLabel, pinNumber, mode } = props;
return <button
className={"fb-button gray"}
disabled={disabled}
title={t(`read ${sensorLabel} sensor`)}
onClick={() =>
readPin(pinNumber, `pin${pinNumber}`, mode as ALLOWED_PIN_MODES)}>
{t("read sensor")}
</button>;
};<|fim▁end|> | |
<|file_name|>apptest.go<|end_file_name|><|fim▁begin|>package tests
import (
log "github.com/janekolszak/revfluent"
"github.com/revel/revel/testing"
)
type AppTest struct {
testing.TestSuite
}
func (t *AppTest) TestError() {
data := map[string]string{"message": "Error"}
log.Error(data)
}<|fim▁hole|> data := map[string]string{"message": "Debug"}
log.Debug(data)
}
func (t *AppTest) TestInfo() {
data := map[string]string{"message": "Info"}
log.Info(data)
}
func (t *AppTest) TestLog() {
data := map[string]string{"message": "Log"}
log.Log("tag", data)
}
func (t *AppTest) TestLogger() {
data := map[string]string{"message": "Logger"}
log.Logger.Post("tag", data)
}<|fim▁end|> |
func (t *AppTest) TestDebug() { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .iqr_session import IqrSession<|fim▁hole|> 'IqrController',
'IqrSession',
]<|fim▁end|> | from .iqr_controller import IqrController
__all__ = [ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
from datetime import datetime
from threading import Timer
from queue import Queue
import uuid
import logging
#Fallbacl for python < 3.3
try:
from time import perf_counter
except ImportError:
from time import clock as perf_counter
log = logging.getLogger(__name__)
class _Task:
_processing_time = 10
_scheduler = None
def __init__(self, function, due=None, interval=None, repeat=0):
self._function = function
if hasattr(due, '__iter__'):
self._due_iter = iter(due)
self._due = self._due_iter.__next__()
else:
self._due_iter = None
self._due = due
self._interval = interval
self._repeat = repeat
if not (self._due or self._interval):
raise ValueError
def __call__(self, *args, job_uuid=None, **kwargs):
start = perf_counter()
result = self._function(*args, **kwargs)
self._processing_time = perf_counter() - start
if self._scheduler:
del self._scheduler._scheduled[job_uuid]
if self._interval and self._repeat != 1:
if self._repeat > 0:
self._repeat -= 1
self._scheduler.schedule(self, *args, job_uuid=job_uuid, **kwargs)
if self._due_iter:
self._due = self._due_iter.__next__()
if self._due:
self._scheduler.schedule(self, *args, job_uuid=job_uuid, **kwargs)
return result
def __get__(self, obj, type=None):<|fim▁hole|> new_func = self._function.__get__(obj, type)
return self.__class__(new_func, self._due_iter or self._due, self._interval, self._repeat)
class Task:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def __call__(self, function):
return _Task(function, *self.args, **self.kwargs)
class Scheduler:
_queue = Queue()
_scheduled = dict()
def __init__(self):
pass
def schedule(self, function, *args, job_uuid=None, **kwargs):
if isinstance(function, _Task):
if not job_uuid:
job_uuid = uuid.uuid4()
kwargs['job_uuid'] = job_uuid
function._scheduler = self
if function._interval:
timer = Timer(function._interval, function, args, kwargs)
else:
remainder = (function._due - datetime.now()).total_seconds()
timer = Timer(remainder - function._processing_time, function, args, kwargs)
self._scheduled[job_uuid] = timer
timer.start()
return job_uuid
else:
self.queue.put((function, args, kwargs))
def cancel(self, job_uuid=None):
if job_uuid:
self._scheduled[job_uuid].cancel()
del self._scheduled[job_uuid]
else:
for job_uuid in self._scheduled:
self._scheduled[job_uuid].cancel()
del self._scheduled[job_uuid]<|fim▁end|> | if obj is None:
return self |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>"""
Views to support bulk email functionalities like opt-out.
"""
import logging
from six import text_type
from django.contrib.auth.models import User
from django.http import Http404
from bulk_email.models import Optout
from courseware.courses import get_course_by_id
from edxmako.shortcuts import render_to_response
from lms.djangoapps.discussion.notification_prefs.views import (
UsernameCipher,
UsernameDecryptionException,
)
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
log = logging.getLogger(__name__)
def opt_out_email_updates(request, token, course_id):
"""
A view that let users opt out of any email updates.
This meant is meant to be the target of an opt-out link or button.
The `token` parameter must decrypt to a valid username.
The `course_id` is the string course key of any course.
Raises a 404 if there are any errors parsing the input.
"""
try:
username = UsernameCipher().decrypt(token)
user = User.objects.get(username=username)<|fim▁hole|> course = get_course_by_id(course_key, depth=0)
except UnicodeDecodeError:
raise Http404("base64url")
except UsernameDecryptionException as exn:
raise Http404(text_type(exn))
except User.DoesNotExist:
raise Http404("username")
except InvalidKeyError:
raise Http404("course")
unsub_check = request.POST.get('unsubscribe', False)
context = {
'course': course,
'unsubscribe': unsub_check
}
if request.method == 'GET':
return render_to_response('bulk_email/confirm_unsubscribe.html', context)
if request.method == 'POST' and unsub_check:
Optout.objects.get_or_create(user=user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
user.username,
user.email,
course_id,
)
return render_to_response('bulk_email/unsubscribe_success.html', context)<|fim▁end|> | course_key = CourseKey.from_string(course_id) |
<|file_name|>mdsViewer.js<|end_file_name|><|fim▁begin|>/*
Chronic Disease Report Generator - Web based reports on quality of care standards
Copyright (C) 2015 Brice Wong, Tom Sitter, Kevin Lin - Hamilton Family Health Team
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
*/
/*
* Generates and displays chart and user controls
* Handles user interaction
*/
var mdsViewer = (function() {
//Member variables to store data and state
var mCanvas = d3.select("#canvasContainer").select("#canvasSVG");
var mCanvasSnapshot = d3.select("#canvasContainer_snapshot").select("#canvasSVG");;
var mMode = ""; //either "snapshot" or "tracking"
var mDataLabels = true; //either true or false (not currently used)
var mShowAverages = true; //LHIN Averages
var mShowHFHTAverages = true; //HFHT Averages
var mShowTargets = true; //HFHT Targets
var mReportTitle = ""; //Main chart title - used when saving images or PDFs
var mCalculatedData = null; // indicator result data set from mdsIndicators
var mSelectedPhysicians = {}; // selected physicians object [{docnumber: true/false}, ...]
var mArrayDates = null; //array of dates [date, date, ...]
var mTotalPatients = null; //# of patient records in file
var mCurrentIndSetIndex = 0; // current rule set index
var mCurrentIndSetName = ""; // current rule set name
var mCurrentIndicator = 0; // current indicator
var mCurrentDateIndex = 0; //current selected date when in tracking mode
var xScaleSnapshot, yScaleSnapshot, xAxisSnapshot, yAxisSnapshot;
var xScaleTracking, yScaleTracking, xAxisTracking, yAxisTracking;
//Static variables to handle graph dimensions and colors
var DEFAULT_CANVAS_WIDTH = 940;
var IMAGE_CANVAS_WIDTH = 752;
var mCanvasScale = 1;
var mCanvasWidth = DEFAULT_CANVAS_WIDTH * mCanvasScale; // pixels
var DEFAULT_CANVAS_HEIGHT = 480; // pixels
var mCanvasHeight = DEFAULT_CANVAS_HEIGHT;
var DEFAULT_PADDING_LEFT_SNAPSHOT = 250;
var mSnapshotPaddingLeft = DEFAULT_PADDING_LEFT_SNAPSHOT * mCanvasScale;
var DEFAULT_PADDING_TOP_SNAPSHOT = 50;
var DEFAULT_GRAPH_WIDTH_SNAPSHOT = DEFAULT_CANVAS_WIDTH - DEFAULT_PADDING_LEFT_SNAPSHOT - 25;
var DEFAULT_BAR_WIDTH = 50;
var mGraphWidthSnapshot = DEFAULT_GRAPH_WIDTH_SNAPSHOT * mCanvasScale;
var DEFAULT_GRAPH_HEIGHT_SNAPSHOT = DEFAULT_CANVAS_HEIGHT - (2 * DEFAULT_PADDING_TOP_SNAPSHOT);
var DEFAULT_PADDING_LEFT_TRACKING = 75;
var mTrackingPaddingLeft = DEFAULT_PADDING_LEFT_TRACKING * mCanvasScale;
var DEFAULT_PADDING_TOP_TRACKING = 50;
var DEFAULT_GRAPH_WIDTH_TRACKING = DEFAULT_CANVAS_WIDTH - (2 * DEFAULT_PADDING_LEFT_TRACKING);
var mGraphWidthTracking = DEFAULT_GRAPH_WIDTH_TRACKING * mCanvasScale;
var DEFAULT_GRAPH_HEIGHT_TRACKING = DEFAULT_CANVAS_HEIGHT - (2 * DEFAULT_PADDING_TOP_TRACKING);
var DEFAULT_YAXIS_CHAR_LENGTH = 25;
var DEFAULT_XAXIS_CHAR_LENGTH = 8;
var mYAxisCharLength = DEFAULT_YAXIS_CHAR_LENGTH * mCanvasScale;
var mXAxisCharLength = DEFAULT_XAXIS_CHAR_LENGTH;
var DEFAULT_COLOURS = ["firebrick", "steelblue", "yellowgreen", "mediumpurple", "cadetblue",
"sandybrown", "forestgreen", "goldenrod", "darkslateblue", "firebrick",
"palevioletred", "sienna", "bisque"];
var HIGHLIGHT_COLOURS = ["lightcoral", "#90B4D2", "#CCE698", "#DFD4F4", "#AFCED0",
"#FAD2B0", "#90C590", "lightcoral","steelblue" , "lightcoral"];
var MONTH_NAMES = [ "January", "February", "March", "April", "May", "June",
"July", "August", "September", "October", "November", "December" ];
var MONTH_NAMES_SHORT = [ "Jan", "Feb", "Mar", "Apr", "May", "June",
"July", "Aug", "Sept", "Oct", "Nov", "Dec" ];
//Whether the file has a "Rostered" field,
//used to check whether to make a "Rostered Patients Only" checkbox
var hasRosteredField = false;
var mRosteredOnly = true;
var resizeTimer;
//First time an extra canvas is generated, automatically scroll to it to show user the location
//then disable the feature
var mFirstScrollView = true;
//Scroll until element is completely in view
$.fn.scrollView = function () {
return this.each(function () {
$('html, body').animate({
scrollTop: $(this).offset().top
}, 1000);
});
};
//If element is less than 1/3 (approximately) in view then return true
//(only works if element is below current viewing window)
$.fn.inViewport = function () {
return $(this).position().top + $(this).height()/3
< (window.innerHeight || document.documentElement.clientHeight) + $(window).scrollTop();
};
//Check if two objects have the same keys and values
function isEquivalent(a, b) {
// Create arrays of property names
var aProps = Object.keys(a);
var bProps = Object.keys(b);
// If number of properties is different,
// objects are not equivalent
if (aProps.length != bProps.length) {
return false;
}
for (var i = 0; i < aProps.length; i++) {
var propName = aProps[i];
// If b does not have property
// objects are not equivalent
if (!b.hasOwnProperty(propName)) {
return false;
}
if (!(b[propName] === a[propName])) {
return false;
}
}
// If we made it this far, objects
// are considered equivalent
return true;
}
/*
* Called by mdsReader
* Removes and reinitializes UI elements and chart
* Calls appropriate graphing function based on mode
*/
function generateCharts(currentRuleSetIndex, calculatedData, selectedPhysicians, arrayDates, totalPatients) {
//mMode = mMode || (arrayDates.length > 1 ? "tracking" : "snapshot");
mMode = (arrayDates.length > 1 ? "tracking" : "snapshot");
mCurrentIndSetIndex = currentRuleSetIndex;
mCalculatedData = calculatedData;
//If the selected phyisicians objects have different keys (i.e. docs) then this is
//a new file and we have to update the physician list in the action bar
var isNewFile = !isEquivalent(mSelectedPhysicians, selectedPhysicians);
mSelectedPhysicians = selectedPhysicians;
mArrayDates = arrayDates;
mTotalPatients = totalPatients;
mCurrentIndSetName = mdsIndicators.ruleList[currentRuleSetIndex].name;
//mCurrentIndicator = 0;
$("#canvasContainer_snapshot").empty();
$("#canvasContainer_histogram").empty();
if (mCalculatedData == undefined) {
console.log("no calculated data!");
return;
}
if ($("#settings").children().length === 0) {
//addUserInterface(); TPS
} else if (isNewFile) {
addPhysicianList();
mCurrentDateIndex = 0;
}
clearCanvas();
updateCanvasSize();
addUserInterface();
if ($('#indicatorEditor').is(':empty')) {
addIndicatorEditor();
}
if (mMode === "snapshot") {
//calculatedData = calculatedData[0];
//$("#dropdownIndicators").hide();
generateSnapshot(0);
histogram();
} else {
var isEmpty = true;
for (var i = 0; i < mCalculatedData.length; i++) {
if (mCalculatedData[i].length>0) {
isEmpty = false;
} else {
mCalculatedData.splice(i, 1);
mArrayDates.splice(i, 1);
}
}
if (!isEmpty) {
//By default, select first item in dropdown
addIndicatorEditor();
generateTracking();
} else {
alert("No data found in these files for the " + $("#dropdownRules").val() + " rule set");
}
}
$("#dropdownRules").val(getCurrentIndSetName());
//Turn on canvas resizing
window.onresize = function(){
if (resizeTimer){
clearTimeout(resizeTimer);
}
resizeTimer = setTimeout(function(){
updateCanvasSize(true);
}, 250);
};
};
/*
* Remove graph and user interface elements
* Called when chart needs to be refreshed or cleared
*/
function clearCanvas() {
//Only applies to snapshot mode
mCanvasHeight = Math.floor(DEFAULT_BAR_WIDTH * mCalculatedData[mCurrentDateIndex].length + (2*DEFAULT_PADDING_TOP_SNAPSHOT))
$("#canvasContainer").empty();
mCanvas = d3.select("#canvasContainer").append("svg")
.attr("id", "canvasSVG")
.attr("width", mCanvasWidth)
.attr("height", mMode == 'snapshot' ? mCanvasHeight : DEFAULT_CANVAS_HEIGHT)
.style("border", "1px solid lightgray")
.append("g")
.attr("class", "g_main")
.attr("transform", function() {
switch (mMode) {
case "snapshot":
return "translate(" + mSnapshotPaddingLeft + ", " + DEFAULT_PADDING_TOP_SNAPSHOT + ")";
break;
case "tracking":
return "translate(" + DEFAULT_PADDING_LEFT_TRACKING + ", " + DEFAULT_PADDING_TOP_TRACKING + ")";
break;
}
});
};
function allEqual(val, obj){
for (k in obj) {
if (obj[k] != val) {
return false;
}
}
return true;
}
function getCurrentIndSetName(){
return mdsIndicators.ruleList[mCurrentIndSetIndex].name;
}
function getIndicator(){
if (arguments.length === 0) {
return mdsIndicators.ruleList[mCurrentIndSetIndex].rules[getInternalRuleIndex()];
} else {
return mdsIndicators.ruleList[mCurrentIndSetIndex].rules[arguments[0]];
}
}
function getIndicatorSet(){
if (arguments.length === 0) {
return mdsIndicators.ruleList[mCurrentIndSetIndex].rules;
} else {
return mdsIndicators.ruleList[arguments[0]].rules;
}
}
/*
* Adds and initializes user interface elements, namely
* Physician Selection
* Indicator Set dropdown
* Individual indicator dropdown (in tracking mode)
* Download buttons
*/
function addUserInterface() {
// If uploading new files, remove old side panels and recreate the panels with new filters based on the new imported data
// physicianSection, measuresSection, settingsSection
$("#settings").empty();
// Adding a panel section for selecting physicians
$("#settings").append('<ul id="selectPhysicians"></ul>' +
'<div id="dropdowns"></div>' +
'<div id="actionButtons"></div>');
addPhysicianList();
// Save to PNG
var btnSaveImage = '<button class="pure-button actionButton" id="btnSaveImage"><i class="fa fa-file-image-o"></i> Save as image</button>';
$("#actionButtons").append(btnSaveImage);
$("#btnSaveImage").unbind();<|fim▁hole|> $("#btnSaveImage").click(function() { saveFile('image'); });
var btnSavePDF = '<button class="pure-button actionButton" id="btnSavePDF"><i class="fa fa-file-pdf-o"></i> Save as PDF</button>';
$("#actionButtons").append(btnSavePDF);
$("#btnSavePDF").unbind();
$("#btnSavePDF").click(function() { saveFile('pdf'); });
var btnDownloadPatients = '<button class="pure-button actionButton" id="btnDownloadPatients"><i class="fa fa-file-text"></i> Download Patient Info</button>'
$("#actionButtons").append(btnDownloadPatients);
$("#btnDownloadPatients").unbind();
$("#btnDownloadPatients").click(function() {
var indicator = getIndicator();
var cols = indicator.col.slice();
//Remove current date from indicator columns
var hasCurrentDate = $.inArray("Current Date", cols);
if (hasCurrentDate >= 0) {
cols.splice(hasCurrentDate, 1 );
}
//get the data
var data = mdsReader.getData()[mCurrentDateIndex];
//store it once in a variable
var currentDate = data["Current Date"][0];
//Add patient ID to patient list
var patientList = {}
patientList['PatientID'] = data['Patient #'];
for (var i in cols) {
patientList[cols[i]] = data[cols[i]];
}
var patientsIndex = mCalculatedData[mCurrentDateIndex][mCurrentIndicator].passedIndex;
var csvPatientList = [];
for (var r=0; r < patientsIndex.length; r++) {
//Skip patients who are meeting criteria
if (patientsIndex[r] === true)
continue
//Store information for patients not meeting criteria
var row = [];
row.push(patientList["PatientID"][r]);
for (var i in cols) {
// Remove any commas in text such as dates
row.push(patientList[cols[i]][r].replace(",", ""));
}
csvPatientList.push([row.join(", ")]);
}
var message = [];
message.push(indicator.desc());
message.push("Data Extracted On: " + currentDate);
var header = ["Patient ID"];
for (var h in cols) {
header.push(cols[h]);
}
message.push(header.join(", "));
for (var p in csvPatientList) {
message.push(csvPatientList[p].toString());
}
var text = new Blob([message.join("\n")], {type:'text/plain'});
saveAs(text, 'patientList.csv');
});
// Toggle data labels
var btnToggleLabels = '<button class="pure-button actionButton" id="btnToggleLabels"><i class="fa fa-check-square-o"></i> Toggle data labels</button>';
$("#actionButtons").append(btnToggleLabels);
$("#btnToggleLabels").unbind();
$("#btnToggleLabels").click(function() {
toggleDataLabels();
$(this).find("i").toggleClass("fa-check-square-o fa-square-o");
return false;
});
/*
* Indicator set dropdown (e.g. diabetes, hypertension, immus, ...)
*/
var dropdownRules = ['<select id="dropdownRules" class="settingsDropdown">'];
// Add dropdown to switch between rule sets
for (var i=0; i<mdsIndicators.ruleList.length;i++) {
dropdownRules.push('<option>' + mdsIndicators.ruleList[i].name + '</option>');
}
dropdownRules.push('</select>');
$("#dropdowns").append(dropdownRules.join('\n'));
$("#dropdownRules").change(function() {
mCurrentIndSetIndex = this.selectedIndex;
mCurrentIndSetName = this.value;
mCurrentIndicator = 0;
mdsReader.reCalculate(mCurrentIndSetIndex, mSelectedPhysicians);
addIndicatorEditor();
});
$("#dropdownRules").val(getCurrentIndSetName());
/*
* EMR choice dropdown (PSS or Oscar)
*/
var dropdownEMR = '<select id="dropdownEMR">' +
'<option value="PSS">PSS</option>' +
'<option value="Oscar">Oscar</option>' +
'</select>';
$("#dropdowns").append(dropdownEMR);
// Create change function
$("#dropdownEMR").change(function() {
mdsIndicators.setEMR(this.value);
mdsReader.reCalculate(mCurrentIndSetIndex, mSelectedPhysicians);
});
//Set the selected EMR in the dropdown based on which is selected
$("#dropdownEMR").val(mdsIndicators.getEMR());
/*
Rostered checkbox. Only visible if a Rostered field is in the report
*/
//Add a checkbox to allow user to filter only rostered patients if that column exists.
//mdsReader has public variable that records whether this column exists
//If checked, tell mdsIndicators that the user only wants rostered patients
if (hasRosteredField) {
var rostered = ' <input type="checkbox" id="rostered">' +
'Rostered Patients Only' +
'</input>';
$("#dropdowns").append(rostered);
$("#rostered").prop("checked", mRosteredOnly);
$("#rostered").change(function() {
mRosteredOnly = $(this).is(':checked');
mdsReader.reCalculate(mCurrentIndSetIndex, mSelectedPhysicians);
});
}
};
function addPhysicianList() {
$("#selectPhysicians").empty();
var selected = allEqual(true, mSelectedPhysicians) ? "selected" : "notSelected";
$("#selectPhysicians").append('<li id="mainSelector" class="physicianListItem ' + selected + '"><span class="checkmark">\u2714</span>Select All</li>');
// Loop through 'arrayUniquePhysicians' and create a list item for each element. These will be the physician filters that will appear in the side
// panel. There will also be a filter for "All Selected Physicians"
//for (var i = 0; i < Object.keys(mSelectedPhysicians).length; i++) {
for (doc in mSelectedPhysicians) {
var selected = mSelectedPhysicians[doc] == true ? "selected" : "notSelected";
$("#selectPhysicians").append('<li class="physicianListItem ' + selected + '" data-docnumber="'+doc+'"><span class="checkmark">\u2714</span> Doctor Number ' + doc + '</li>');
}
//}
$(".physicianListItem").click( function(){
if (mCalculatedData == undefined) {
console.log("Calculated data undefined");
return false;
}
var isSelected = $(this).hasClass("selected");
if (isSelected === true) {
var className = 'notSelected';
} else {
var className = 'selected';
}
// If clicked on "Select All"
if (this.id === "mainSelector") {
// If class has 'selected', it currently is selected and must be unselected
for (doc in mSelectedPhysicians) {
if (mSelectedPhysicians.hasOwnProperty(doc)) {
//negate the isSelected status to select/deselect the option
mSelectedPhysicians[doc] = !isSelected;
}
}
$(".physicianListItem").removeClass("selected notSelected").addClass(className);
}
// Otherwise, clicked on an individual doctor
else {
var doc = $(this).data('docnumber');
mSelectedPhysicians[doc] = !isSelected;
$(this).toggleClass('selected notSelected');
if(allEqual(true, mSelectedPhysicians)) {
$("#mainSelector").removeClass("selected notSelected").addClass("selected");
} else {
$("#mainSelector").removeClass("selected notSelected").addClass("notSelected");
}
}
mdsReader.reCalculate(mCurrentIndSetIndex, mSelectedPhysicians);
return false;
});
}
/**
* Saves current chart to either PDF or PNG
* @param {String} fileType Either 'pdf' or 'image'
* @return {boolean} Always false - required for jQuery callback
*/
function saveFile(fileType) {
//Second true means that we are forcing it to be "MEDIUM" sized
updateCanvasSize(true, "MEDIUM");
// Append canvas to the document
var canvasHeight = (mMode == 'snapshot' ? mCanvasHeight : DEFAULT_CANVAS_HEIGHT)
var canvasString = '<canvas id="outputCanvas" width="' + IMAGE_CANVAS_WIDTH + '" height="' + canvasHeight +
'" style="border: 1px solid black; display:none;"></canvas>';
$("#outputCanvas").remove();
$("body").append(canvasString);
// Retrieve output canvas and copy the current visualization into the canvas
var output = $("#outputCanvas")[0];
var svgXML = (new XMLSerializer()).serializeToString($("#canvasSVG")[0]);
canvg(output, svgXML, { ignoreDimensions: true });
var ctx = output.getContext('2d');
ctx.save();
ctx.globalCompositeOperation = "destination-over";
ctx.fillStyle = 'white';
ctx.fillRect(0, 0, output.width, output.height);
if (fileType === 'pdf') {
// Retrieve data URL of the graph
var outputURL = output.toDataURL('image/jpeg');
// Create portrait PDF object
var doc = new jsPDF();
// Title
doc.setFontSize(20);
doc.setFont('times');
var splitTitle = doc.splitTextToSize(mReportTitle, 180);
var titleSpacing;
if (splitTitle[0].length >= 55) {
titleSpacing = 10;
} else {
titleSpacing = 60-(splitTitle[0].length/2);
}
doc.text(titleSpacing, 20, splitTitle);
//doc.addImage(outputURL, 'JPEG', 15, 60, 180, 100);
doc.addImage(outputURL, 'JPEG', 15, 60, mCanvasWidth*0.2, canvasHeight*0.2);
// save() to download automatically, output() to open in a new tab
//doc.save(mReportTitle.concat('.pdf'));
doc.output('save', mReportTitle.concat('.pdf'));
} else {
// Retrieve data string of the canvas and append to the hidden img element
var outputURL = output.toDataURL();
$("#outputImg").src = outputURL;
// Modify attributes of hidden elements and simulate file download
$("#outputA").download = mReportTitle;
$("#outputA").href = outputURL;
$("#outputA").click();
output.toBlob(function(blob) {
saveAs(blob, mReportTitle.concat('.png'));
});
}
ctx.restore();
//Restore canvas to previous size (if it changed)
updateCanvasSize(true);
//For jQuery callback
return false;
}
function addIndicatorEditor() {
function capitalize(s){
return s.toLowerCase().replace( /\b./g, function(a){ return a.toUpperCase(); } );
};
//Reset indicator editor bar
removeIndicatorEditor();
currentIndicator = getIndicator();
if (!currentIndicator.hasOwnProperty("modifiable")) {
return false;
}
var items = [];
//items.push('<div id="indicatorEditor" class="pure-g">');
items.push('<div class="pure-u-1 indicatorTitle">Modify Indicator Targets</div>');
$.each(currentIndicator.modifiable, function(i, item) {
var itemName = mdsIndicators.lookupVarNameTable[item];
if (typeof itemName === 'undefined') {
itemName = capitalize(item);
}
items.push('<div class="pure-u-1"><label for="' + item + '">' + itemName + '</label>');
items.push('<br/><input id="' + item + '" class="indicatorValue" value="' + currentIndicator[item] + '"></div>');
});
items.push('<div style="padding-top:15px;" class="pure-u-1-2"><button id="applybtn" class="pure-button actionButton">Apply Changes</button></div>');
items.push('<div class="pure-u-1-2" style="padding-top:15px;"><button style="float:right" id="resetbtn" class="pure-button actionButton">Reset</button></div>');
items.push('<div class="pure-u-1"><button id="resetallbtn" class="pure-button actionButton">Reset All</button></div>');
$("#indicatorEditor").append(items.join(''));
$("#indicatorEditor .indicatorValue").bind('keypress', function(e) {
var code = e.keyCode || e.which;
if(code == 13) {
updateIndicator();
}
});
$("#applybtn").unbind("click")
.click( function() { updateIndicator();} );
$("#resetbtn").unbind("click")
.click( function() { resetIndicator();} );
$("#resetallbtn").unbind("click")
.click( function() { resetAllIndicators();} );
$("#indicatorEditor").css("display", "block");
updateDropdownIndicators();
}
//Remove and re-add indicator dropdown using indicators in mCalculatedData
function updateDropdownIndicators() {
if ($("#indicatorEditor").length === 0) {
return false;
}
$("#dropdownIndicators").remove();
var dropdownIndicators = ['<select id="dropdownIndicators">'];
// Add the options for the different measures in the drop down menu
// Created dynamically based on default values
// To do: variables to store user input values
for (var i = 0; i < mCalculatedData[0].length; i++) {
if (getIndicator(getInternalRuleIndex(i)).hasOwnProperty('modifiable')) {
dropdownIndicators.push('<option>' + mCalculatedData[0][i]["desc"] + '</option>');
} else {
dropdownIndicators.push('<option disabled>' + mCalculatedData[0][i]["desc"] + '</option>');
}
}
dropdownIndicators.push('</select>');
$("#indicatorEditor").prepend(dropdownIndicators.join('\n'));
$("#dropdownIndicators")[0].selectedIndex = mCurrentIndicator;
$("#dropdownIndicators").change(function() {
mCurrentIndicator = this.selectedIndex;
updateCharts();
});
}
// Update indicators with values from indicator editor
function updateIndicator() {
var params_updated = 0;
var currentIndicator = getInternalRuleIndex();
$('.indicatorValue').each(function() {
if (isNaN(Number(this.value))) {
mdsIndicators.ruleList[mCurrentIndSetIndex].rules[currentIndicator][this.id] = 0;
} else {
mdsIndicators.ruleList[mCurrentIndSetIndex].rules[currentIndicator][this.id] = this.value;
}
params_updated++;
});
if (params_updated === $('.indicatorValue').length) {
recalculateIndicators();
}
}
//Call reset on the currently selected indicator
function resetIndicator() {
//var currentIndicator = getInternalRuleIndex();
mdsIndicators.resetToDefault(getIndicator());
recalculateIndicators();
}
function resetAllIndicators() {
var indicators = getIndicatorSet();
//Loop through all rules and Reset if they have a 'defaults' property
for (var i = 0; i < indicators.length; i++){
if (indicators[i].hasOwnProperty('defaults')) {
mdsIndicators.resetToDefault(indicators[i]);
}
}
recalculateIndicators();
}
//Re-add dropdown with indicators
//Recalculate graph, preserving currently selected indicator
//Re-add indicator editor
function recalculateIndicators(){
var currentIndicator = mCurrentIndicator;
mdsReader.reCalculate(mCurrentIndSetIndex, mSelectedPhysicians);
mCurrentIndicator = currentIndicator;
addIndicatorEditor();
}
function removeIndicatorEditor() {
$("#indicatorEditor").empty();
$("#indicatorEditor").css("display", "none");
}
function updateCharts() {
clearCanvas();
$("#canvasContainer_snapshot").empty();
$("#canvasContainer_histogram").empty();
if (mMode === "tracking") {
generateTracking();
} else {
generateSnapshot(0);
histogram();
}
addIndicatorEditor();
}
function getInternalRuleIndex() {
if (mCalculatedData[0].length > 0 && mCurrentIndicator < mCalculatedData[0].length) {
return mCalculatedData[0][mCurrentIndicator].index;
} else {
return 0;
}
}
/**
* Updates dimensions of drawing canvas based on window size or
* input parameters and redraws it
* @param {boolean} redraw Clears and redraws canvas if true
* @param {boolean} canvasSize One of ["SMALL", "MEDIUM", "LARGE"] to set canvas size manually
* @return Nothing
*/
function updateCanvasSize(redraw, canvasSize) {
var prevScale = mCanvasScale;
var small = 0.6;
var medium = 0.8;
var large = 1;
if (arguments.length === 2) {
switch(canvasSize){
case "LARGE":
mCanvasScale = large;
break;
case "MEDIUM":
mCanvasScale = medium;
break;
case "SMALL":
mCanvasScale = small;
break;
default:
mCanvasScale = medium;
}
}
else {
if (window.innerWidth >= 960) {
mCanvasScale = large;
} else if (window.innerWidth < 960 && window.innerWidth >= 780) {
mCanvasScale = medium;
} else if (window.innerWidth < 780) {
mCanvasScale = small;
}
}
if (prevScale != mCanvasScale) {
mCanvasWidth = Math.floor(DEFAULT_CANVAS_WIDTH*mCanvasScale);
mGraphWidthSnapshot = Math.floor(DEFAULT_GRAPH_WIDTH_SNAPSHOT*mCanvasScale);
mGraphWidthTracking = Math.floor(DEFAULT_GRAPH_WIDTH_TRACKING*mCanvasScale);
mSnapshotPaddingLeft = Math.floor(DEFAULT_PADDING_LEFT_SNAPSHOT*mCanvasScale);
mYAxisCharLength = Math.floor(DEFAULT_YAXIS_CHAR_LENGTH*mCanvasScale);
if (redraw) {
clearCanvas();
if (mMode === 'snapshot') {
generateSnapshot(0);
histogram();
} else {
generateTracking();
}
}
}
}
function splitText(textElement, lineLength, title) {
var isTitle = typeof title !== 'undefined' ? true : false;
var text = textElement.text();
var splitRegex = new RegExp(".{" + lineLength + "}\\S*\\s+", "g");
var splitText= text.replace(splitRegex, "$&@").split(/\s+@/);
var numLines = splitText.length;
textElement.text('').attr('y', '0');
for (var i = 0; i < splitText.length; i++) {
var tspan = textElement.append('tspan').text(splitText[i]);
if (isTitle) {
textElement.attr('y', -25);
tspan.attr('y', '-8').attr('x',mCanvasWidth/2 - splitText[i].length).attr("style","text-anchor:middle");
} else {
switch (splitText.length) {
case 2:
if (i==0) {tspan.attr('x', 0).attr('y', -6).attr('dx', '-10');}
else {tspan.attr('x', 0).attr('y', 12).attr('dx', '-10');}
break;
case 3:
if (i==0) {tspan.attr('x', 0).attr('y', -14).attr('dx', '-10');}
else if (i==1) {tspan.attr('x', 0).attr('y', 4).attr('dx', '-10');}
else {tspan.attr('x', 0).attr('y', 18).attr('dx', '-10');}
break;
default:
tspan.attr('x', 0).attr('dx', '-10');
}
}
}
}
var insertLinebreaks = function (d) {
var el = d3.select(this);
var words = d3.select(this).text();
var splitRegex = new RegExp(".{" + mXAxisCharLength + "}\\S*\\s+", "g");
var words = words.replace(splitRegex, "$&@").split(/\s+@/);
el.text('');
var length = 0;
var line = '';
for (var i = 0; i < words.length; i++) {
var tspan = el.append('tspan').text(words[i]);
if (i > 0)
tspan.attr('x', 0).attr('dy', '15');
}
};
/**
* Creates a bar chart for a report file
* @param {numeric} selectedDate Index into array of file dates, used to select which file to create the barchart. 0 for one file.
* @param {boolean} extraCanvas true if the bar chart should go in the secondary canvas, false or undefined to go into the main canvas
*/
function generateSnapshot(selectedDate, extraCanvas){
var selectedDate = selectedDate || 0;
var canvas = (typeof extraCanvas === "undefined" ? mCanvas : mCanvasSnapshot);
var data = mCalculatedData[selectedDate];
if (data.length === 0) {
removeIndicatorEditor();
return;
}
var mGraphHeight = DEFAULT_BAR_WIDTH * data.length;
// Add rectangles for percentage of patients within criteria
var arrayData = [];
var arrayDesc = [];
var arrayTooltip = [];
var arrayLabels = [];
if (typeof(data) === undefined || data.length == 0) {
return;
}
for (var i=0; i < data.length; i++) {
if (data[i]["total"] == 0) {
arrayLabels.push("0% (0/0)");
arrayData.push(0)
} else {
var percent = data[i]["passed"] / data[i]["total"] * 100;
var label = Math.round(percent) + "% (" + data[i]["passed"] + "/" + data[i]["total"]+ ")";
arrayData.push(percent);
arrayLabels.push(label);
}
//If the description is really long then insert a newline.
var desc = data[i]["desc"];
var tooltip = data[i]["tooltip"] || "";
arrayDesc.push(desc);
arrayTooltip.push(tooltip);
}
xScaleSnapshot = d3.scale.linear()
.domain([0, 100])
.range([0, mGraphWidthSnapshot]);
xAxisSnapshot = d3.svg.axis()
.scale(xScaleSnapshot)
.orient("bottom")
.tickFormat(function(d) { return d + "%"; });
yScaleSnapshot = d3.scale.ordinal()
.domain(arrayDesc)
.rangeRoundBands([0, mGraphHeight], 0.1);
yAxisSnapshot = d3.svg.axis()
.scale(yScaleSnapshot)
.orient("left");
canvas.selectAll(".tickline")
.data(xScaleSnapshot.ticks(10))
.enter().append("line")
.attr("x1", xScaleSnapshot)
.attr("x2", xScaleSnapshot)
.attr("y1", 0)
.attr("y2", mGraphHeight)
.style("stroke", "#ccc")
.style("stroke-width", 1)
.style("opacity", 0.7);
// Add x axis label
canvas.append("text")
.attr("class", "xAxisLabel")
.attr("x", mGraphWidthSnapshot / 2)
.attr("y", mGraphHeight + 40)
.attr("text-anchor", "middle")
.style("font-weight", "bold")
.style("font-size", "14px")
.style("font-family", "Arial")
.text("% of Patients");
// Graph title text
canvas.append("text")
.attr("class", "graphTitle")
.attr("x", mGraphWidthSnapshot / 2)
.attr("y", -DEFAULT_PADDING_TOP_SNAPSHOT / 2 + 10)
.attr("text-anchor", "middle")
.style("font-size", "14px")
.style("font-weight", "bold")
.text(function() {
var arraySelectedOnly = [];
for (var doc in mSelectedPhysicians) {
if (mSelectedPhysicians[doc] == true)
arraySelectedOnly.push(doc);
}
if (arraySelectedOnly.length == 0) {
return "No Doctors Selected";
}
var title = mCurrentIndSetName + " Report for Doctor";
if (arraySelectedOnly.length > 1) title += "s ";
else title += " ";
for (var i = 0; i < arraySelectedOnly.length; i++) {
if (i == arraySelectedOnly.length - 2)
title += arraySelectedOnly[i] + " and ";
else if (i == arraySelectedOnly.length - 1)
title += arraySelectedOnly[i];
else title += arraySelectedOnly[i] + ", ";
}
var date = mArrayDates[selectedDate];
title += " as of " + MONTH_NAMES_SHORT[date.getMonth()] + " " + date.getDate() + " " + date.getFullYear();
//title += " (n = " + mTotalPatients[selectedDate] + ")";
//store for when saving file
mReportTitle = title;
return title;
});
//Translate graph into center of page
canvas.append("g")
.attr("transform", "translate(0, " + mGraphHeight + ")")
.style("font-family", "Arial")
.style("font-size", "14px")
.call(xAxisSnapshot);
//Y axis labels
canvas.append("g")
.attr("class", "indicatorLabel")
.style("font-family", "Arial")
.style("font-size", "14px")
.attr("id", "yaxis")
.call(yAxisSnapshot);
canvas.selectAll('g#yaxis g text').each(function () { splitText(d3.select(this), mYAxisCharLength); });
// Add styling and attributes for major ticks in axes
var majorTicks = document.getElementsByClassName("tick major");
for (var i = 0; i < majorTicks.length; i++) {
majorTicks[i].childNodes[0].setAttribute("style", "fill:none; stroke:black");
majorTicks[i].childNodes[0].setAttribute("shape-rendering", "crispEdges");
}
// Add styling and attributes for axes paths
var paths = document.getElementsByClassName("domain");
for (var i = 0; i < paths.length; i++) {
paths[i].setAttribute("style", "fill:none; stroke:black");
paths[i].setAttribute("shape-rendering", "crispEdges");
}
if (arrayData.length == 0) {
return;
}
// Add bars for patients within criteria
canvas.selectAll("onTargetBar")
.data(arrayData)
.enter().append("rect")
.attr("class", "onTargetBar")
.attr("width", function(d) { return xScaleSnapshot(d); })
.attr("height", yScaleSnapshot.rangeBand())
.attr("y", function (d, i) { return yScaleSnapshot(arrayDesc[i]); })
.attr("fill", DEFAULT_COLOURS[mCurrentIndSetIndex])
.attr("data-ruleindex", function (d, i) { return i.toString(); }) //used to select/modify current rule
.on("click", function(d, i) {
handleBarClick(i, this.getAttribute("y"));
return false;
})
.style("stroke", "black")
.style("stroke-width", "1px")
.attr("shape-rendering", "crispEdges")
.append("svg:title")
.text(function(d, i) { return arrayTooltip[i]; });
// Add bars for patients not within criteria
canvas.selectAll("offTargetBar")
.data(arrayData)
.enter().append("rect")
.attr("class", "offTargetBar")
.attr("width", function(d) { return xScaleSnapshot(100 - d); })
.attr("height", yScaleSnapshot.rangeBand())
.attr("x", function(d) { return xScaleSnapshot(d); })
.attr("y", function(d, i) { return yScaleSnapshot(arrayDesc[i]); })
.attr("fill", "white")
.style("stroke", "black")
.style("stroke-width", "1px")
.attr("shape-rendering", "crispEdges")
.on("click", function(d, i) {
handleBarClick(i, this.getAttribute("y"));
return false;
})
.append("svg:title")
.text(function(d, i) { return arrayTooltip[i]; });
//Display HFHT Average for this indicator (if available)
if (mShowHFHTAverages) {
var yScaleAverages = d3.scale.linear()
.domain([0, 100])
.range([0, mGraphHeight]);
var indexes = [];
for (var i in data){
indexes.push(data[i].index);
}
var indicatorSet = getIndicatorSet();
var hfhtaverages = [];
for (var i in indexes) {
if (indicatorSet[indexes[i]].hasOwnProperty("hfhtaverage")) {
hfhtaverages.push({"index": +i,
"hfhtavg": +indicatorSet[indexes[i]].hfhtaverage });
}
}
canvas.selectAll("HFHTaverageLine")
.data(hfhtaverages)
.enter().append("line")
.attr("class", "HFHTaverageLine")
.attr("x1", function(d) { return xScaleSnapshot(100*d.hfhtavg); })
.attr("x2", function(d) { return xScaleSnapshot(100*d.hfhtavg); })
.attr("y1", function (d, i) { return yScaleSnapshot(arrayDesc[d.index]); })
.attr("y2", function (d, i) { return yScaleSnapshot(arrayDesc[d.index])+yScaleSnapshot.rangeBand(); })
.attr("stroke-width", 2)
.attr("stroke", "silver");
/* Continued after labels are inserted!! */
}
//Display LHIN 4 Average for this indicator (if available)
if (mShowAverages) {
var yScaleAverages = d3.scale.linear()
.domain([0, 100])
.range([0, mGraphHeight]);
var indexes = [];
for (var i in data){
indexes.push(data[i].index);
}
var indicatorSet = getIndicatorSet();
var averages = [];
for (var i in indexes) {
if (indicatorSet[indexes[i]].hasOwnProperty("average")) {
averages.push({"index": +i,
"avg": +indicatorSet[indexes[i]].average });
}
}
canvas.selectAll("averageLine")
.data(averages)
.enter().append("line")
.attr("class", "averageLine")
.attr("x1", function(d) { return xScaleSnapshot(100*d.avg); })
.attr("x2", function(d) { return xScaleSnapshot(100*d.avg); })
.attr("y1", function (d, i) { return yScaleSnapshot(arrayDesc[d.index]); })
.attr("y2", function (d, i) { return yScaleSnapshot(arrayDesc[d.index])+yScaleSnapshot.rangeBand(); })
.attr("stroke-width", 2)
.attr("stroke", "gold");
/* Continued after labels are inserted!! */
}
//Display HFHT Targets for this indicator (if available)
if (mShowTargets) {
var yScaleAverages = d3.scale.linear()
.domain([0, 100])
.range([0, mGraphHeight]);
var indexes = [];
for (var i in data){
indexes.push(data[i].index);
}
var indicatorSet = getIndicatorSet();
var targets = [];
for (var i in indexes) {
if (indicatorSet[indexes[i]].hasOwnProperty("goal")) {
targets.push({"index": indexes[i],
"goal": +indicatorSet[indexes[i]].goal });
}
}
canvas.selectAll("targetLine")
.data(targets)
.enter().append("line")
.attr("class", "targetLine")
.attr("x1", function(d) { return xScaleSnapshot(100*d.goal); })
.attr("x2", function(d) { return xScaleSnapshot(100*d.goal); })
.attr("y1", function (d, i) { return yScaleSnapshot(arrayDesc[d.index]); })
.attr("y2", function (d, i) { return yScaleSnapshot(arrayDesc[d.index])+yScaleSnapshot.rangeBand(); })
.attr("stroke-width", 2)
.attr("stroke", "#CD7F32");
/* Continued after labels are inserted!! */
}
//Labels for each bar
canvas.selectAll("onTargetLabel")
.data(arrayData)
.enter().append("text")
.attr("class", "dataLabelSnapshot")
.attr("x", function(d, i) {
if (d<20) { return xScaleSnapshot(d+2); }
else { return xScaleSnapshot(d/2); }
})
.attr("y", function(d, i) { return yScaleSnapshot(arrayDesc[i]) + (yScaleSnapshot.rangeBand()/2); })
.attr("text-anchor", function(d) {
if (d<20) { return "start"; }
else { return "middle"; }
})
.style("font-family", "Arial")
.style("font-size", "13px")
.attr("dy", ".35em")
.style("fill", function(d, i) {
if (d<20) { return "black"; }
else { return "white"; }
})
.text(function(d, i) { return arrayLabels[i]; });
//Rectangles are added here so that they lay on top of the labels
if (mShowAverages) {
//For tooltip
canvas.selectAll("averageRect")
.data(averages)
.enter().append("rect")
.attr("class", "averageRect")
.attr("width", xScaleSnapshot(5))
.attr("height", yScaleSnapshot.rangeBand())
.attr("x", function (d, i) {
return xScaleSnapshot(100*d.avg - 2.5); })
.attr("y", function (d, i) {
return yScaleSnapshot(arrayDesc[d.index]); })
.attr("fill", "rgba(0, 0, 0, 0)")
.append("svg:title")
.text(function(d) { return "LHIN 4 Average (" + (d.avg*100).toFixed(1) + "%)" });
}
//Rectangles are added here so that they lay on top of the labels
if (mShowHFHTAverages) {
//For tooltip
canvas.selectAll("HFHTaverageRect")
.data(hfhtaverages)
.enter().append("rect")
.attr("class", "HFHTaverageRect")
.attr("width", xScaleSnapshot(5))
.attr("height", yScaleSnapshot.rangeBand())
.attr("x", function (d, i) {
return xScaleSnapshot(100*d.hfhtavg - 2.5); })
.attr("y", function (d, i) {
return yScaleSnapshot(arrayDesc[d.index]); })
.attr("fill", "rgba(0, 0, 0, 0)")
.append("svg:title")
.text(function(d) { return "HFHT Average (" + (d.hfhtavg*100).toFixed(1) + "%)" });
}
//Rectangles are added here so that they lay on top of the labels
if (mShowTargets) {
//For tooltip
canvas.selectAll("targetRect")
.data(targets)
.enter().append("rect")
.attr("class", "targetRect")
.attr("width", xScaleSnapshot(5))
.attr("height", yScaleSnapshot.rangeBand())
.attr("x", function (d, i) {
return xScaleSnapshot(100*d.goal - 2.5); })
.attr("y", function (d, i) {
return yScaleSnapshot(arrayDesc[d.index]); })
.attr("fill", "rgba(0, 0, 0, 0)")
.append("svg:title")
.text(function(d) { return "HFHT Target (" + (d.goal*100).toFixed(1) + "%)" });
}
}; // End of generateSnapshot
function handleBarClick(i, y) {
var thisBar = $(".onTargetBar[y="+y+"]");
var isSelected = (thisBar.attr("data-selected") == "true")
$(".onTargetBar")
.attr("fill", DEFAULT_COLOURS[mCurrentIndSetIndex])
.attr("data-selected", "false");
thisBar.attr("data-selected", "true");
if (isSelected) {
thisBar.attr("fill", DEFAULT_COLOURS[mCurrentIndSetIndex])
.attr("data-selected", "false");
} else {
thisBar.attr("fill", HIGHLIGHT_COLOURS[mCurrentIndSetIndex])
.attr("data-selected", "true");
}
mCurrentIndicator = i;
histogram();
var currentIndicator = getIndicator();
if (currentIndicator.hasOwnProperty("modifiable")) {
addIndicatorEditor();
} else {
removeIndicatorEditor();
}
}
function generateTracking() {
var arrayDates = mArrayDates;
var arrayData = [];
var arrayDesc = [];
var arrayLabels = [];
for (var i=0; i < mCalculatedData.length; i++) {
arrayData.push([]);
arrayDesc.push([]);
arrayLabels.push([]);
for (var j=0; j < mCalculatedData[i].length; j++) {
if (mCalculatedData[i][j]["total"] == 0) {
arrayLabels[i].push("0% (0/0)");
continue;
}
var percent = mCalculatedData[i][j]["passed"] / mCalculatedData[i][j]["total"] * 100;
var label = Math.round(percent) + "% (" + mCalculatedData[i][j]["passed"] + "/" + mCalculatedData[i][j]["total"]+ ")";
arrayData[i].push(percent);
arrayLabels[i].push(label);
arrayDesc[i].push(mCalculatedData[i][j]["desc"]);
}
}
if (arrayData.length == 0) {
return;
}
// Create min and max dates for the time scale - 1 week before and after
var minDate = new Date(arrayDates[0]);
minDate.setDate(minDate.getDate()-30);
var maxDate = new Date(arrayDates[arrayDates.length - 1]);
maxDate.setDate(maxDate.getDate()+30);
// Create the scale for the X axis
xScaleTracking = d3.time.scale()
.domain([minDate, maxDate])
.range([0, mGraphWidthTracking]);
// To do: better date format
xAxisTracking = d3.svg.axis()
.scale(xScaleTracking)
.orient("bottom")
.tickFormat(d3.time.format("%b %Y"));
// Create Y Axis scale
yScaleTracking = d3.scale.linear()
.domain([0, 100])
.range([DEFAULT_GRAPH_HEIGHT_TRACKING, 0]);
yAxisTracking = d3.svg.axis()
.scale(yScaleTracking)
.orient("left");
// Create and append ticklines for the xAxis
mCanvas.selectAll(".xTickLine")
.data(arrayData)
.enter().append("line")
.attr("class", "tickLine xTickLine")
.attr("x1", function (d, i) { return xScaleTracking(arrayDates[i]); })
.attr("x2", function (d, i) { return xScaleTracking(arrayDates[i]); })
.attr("y1", 0)
.attr("y2", DEFAULT_GRAPH_HEIGHT_TRACKING)
.style("opacity", 0.7)
.style("stroke", "#cccccc")
.style("stroke-width", "1px");
// Create and append ticklines for the yAxis
mCanvas.selectAll(".yTickLine")
.data(yScaleTracking.ticks(10))
.enter().append("line")
.attr("class", "tickLine yTickLine")
.attr("x1", 0)
.attr("x2", mGraphWidthTracking)
.attr("y1", yScaleTracking)
.attr("y2", yScaleTracking)
.style("opacity", 0.7)
.style("stroke", "#cccccc")
.style("stroke-width", "1px");
// Append xAxis to the mCanvas
mCanvas.append("g")
.attr("class", "xAxis")
.attr("transform", "translate(0, " + DEFAULT_GRAPH_HEIGHT_TRACKING + ")")
.style("font-size", "14px")
.style("font-family", "Arial")
.call(xAxisTracking);
mCanvas.selectAll('g.xAxis g text').each(insertLinebreaks);
// Append yAxis to the mCanvas
mCanvas.append("g")
.attr("class", "yAxis")
.style("font-size", "14px")
.style("font-family", "Arial")
.call(yAxisTracking);
// Add styling and attributes for major ticks
var majorTicks = document.getElementsByClassName("tick major");
for (var i = 0; i < majorTicks.length; i++) {
// Get 'line' child nodes
majorTicks[i].childNodes[0].setAttribute("style", "fill:none; stroke:black");
majorTicks[i].childNodes[0].setAttribute("shape-rendering", "crispEdges");
}
// // Add styling and attributes for axes paths
var paths = document.getElementsByClassName("domain");
for (var i = 0; i < paths.length; i++) {
// Get child nodes within group
paths[i].setAttribute("style", "fill:none; stroke:black");
paths[i].setAttribute("shape-rendering", "crispEdges");
paths[i].setAttribute("vector-effect", "non-scaling-stroke");
}
// Append lines between data points
mCanvas.selectAll(".dataPointConnector")
.data(new Array(arrayData.length - 1))
.enter().append("line")
.attr("class", "dataPointConnector")
.attr("x1", function (d, i) { return xScaleTracking(arrayDates[i]); })
.attr("x2", function (d, i) { return xScaleTracking(arrayDates[i + 1]); })
.attr("y1", function (d, i) { return yScaleTracking(arrayData[i][mCurrentIndicator]); })
.attr("y2", function (d, i) { return yScaleTracking(arrayData[i + 1][mCurrentIndicator]); })
.attr("stroke", DEFAULT_COLOURS[mCurrentIndSetIndex])
.attr("stroke-width", 2);
// Append data points
mCanvas.selectAll(".dataPoint")
.data(arrayData)
.enter().append("circle")
.attr("class", "dataPoint")
.attr("cx", function (d, i) { return xScaleTracking(arrayDates[i]); })
.attr("cy", function(d, i) { return yScaleTracking(arrayData[i][mCurrentIndicator]); })
.attr("r", 5)
.attr("fill", DEFAULT_COLOURS[mCurrentIndSetIndex])
.on("mouseover", function(d) {
d3.select(this)
.attr("r", 7)
.style("fill", HIGHLIGHT_COLOURS[mCurrentIndSetIndex]);
})
.on("mouseout", function(d) {
d3.select(this)
.attr("r", 5)
.style("fill", DEFAULT_COLOURS[mCurrentIndSetIndex]);
})
.on("click", function(d, i) {
d3.selectAll(".dataPoint")
.attr("class", "dataPoint")
.attr("r", 5)
.style("fill", DEFAULT_COLOURS[mCurrentIndSetIndex])
.on("mouseout", function(d) {
d3.select(this)
.attr("r", 5)
.style("fill", DEFAULT_COLOURS[mCurrentIndSetIndex]);
});
d3.select(this).attr("class", "dataPoint selected")
.attr("r", 7)
.style("fill", HIGHLIGHT_COLOURS[mCurrentIndSetIndex])
.on("mouseout", function() {});
mCurrentDateIndex = i;
var scroll = $(window).scrollTop();
generateExtraCanvas();
histogram();
$(window).scrollTop(scroll);
});
// Add x axis label
mCanvas.append("text")
.attr("class", "xAxisLabel")
.attr("x", mGraphWidthTracking / 2)
.attr("y", DEFAULT_GRAPH_HEIGHT_TRACKING + 40)
.attr("text-anchor", "middle")
.style("font-weight", "bold")
.style("font-size", "14px")
.style("font-family", "Arial")
.text("Date");
// Add y axis label
mCanvas.append("text")
.attr("class", "yAxisLabel")
.attr("transform", "rotate(-90)")
.attr("x", -DEFAULT_GRAPH_HEIGHT_TRACKING / 2)
.attr("y", -DEFAULT_PADDING_LEFT_TRACKING / 2)
.attr("text-anchor", "middle")
.style("font-weight", "bold")
.style("font-size", "14px")
.style("font-family", "Arial")
.text("% of patients");
// Add graph title
mCanvas.append("text")
.attr("class", "graphTitle")
.attr("x", mGraphWidthTracking / 2)
.attr("y", -DEFAULT_PADDING_TOP_TRACKING / 2)
.attr("text-anchor", "middle")
.style("font-size", "14px")
.style("font-family", "sans-serif")
.style("font-weight", "bold")
.text(function() {
var indicator = mCalculatedData[0][mCurrentIndicator].desc;
var title = indicator + " for Doctor";
var arraySelectedOnly = [];
for (var doc in mSelectedPhysicians) {
if (mSelectedPhysicians[doc] == true)
arraySelectedOnly.push(doc);
}
if (arraySelectedOnly.length > 1) title += "s ";
else title += " ";
for (var i = 0; i < arraySelectedOnly.length; i++) {
if (i == arraySelectedOnly.length - 2)
title += arraySelectedOnly[i] + " and ";
else if (i == arraySelectedOnly.length - 1)
title += arraySelectedOnly[i];
else title += arraySelectedOnly[i] + ", ";
}
mReportTitle = title;
return title;
});
mCanvas.selectAll('.graphTitle').each(function () { splitText(d3.select(this), 180, true); });
var m = mCurrentIndicator;
// Add labels for data points
mCanvas.selectAll(".dataLabelTracking")
.data(arrayData)
.enter().append("text")
.attr("class", "dataLabelTracking")
.attr("x", function(d, i) { return xScaleTracking(arrayDates[i]); })
.attr("y", function(d, i) {
// If small value, place label above point
if ((arrayData[i][m]) < 10)
return yScaleTracking(arrayData[i][m]) - 15;
// Else
else {
// For first data point
if (i == 0) {
// If adjacent point is above, place label below, vice versa
if (arrayData[1][m] >= arrayData[i][m])
return yScaleTracking(arrayData[i][m]) + 25;
else return yScaleTracking(arrayData[i][m]) - 15;
}
// For last point, compare with second last point
else if (i == arrayData.length - 1) {
if (arrayData[arrayData.length - 2][m] >= arrayData[i][m])
return yScaleTracking(arrayData[i][m]) + 25;
else return yScaleTracking(arrayData[i][m]) - 15;
}
// Else all points in between, check both sides
else {
// If both adjacent points are above, place below
if (arrayData[i - 1][m] >= arrayData[i][m] && arrayData[i + 1][m] >= arrayData[i][m])
return yScaleTracking(arrayData[i][m]) + 25;
// Else if both are below, place above
else if (arrayData[i - 1][m] < arrayData[i][m] && arrayData[i + 1][m] < arrayData[i][m])
return yScaleTracking(arrayData[i][m]) - 15;
// Else just place above
else return yScaleTracking(arrayData[i][m]) - 15;
}
}
})
.attr("text-anchor", "middle")
.style("fill", "black")
.style("font-size", "13px")
.style("font-family", "Arial")
.text(function(d, i) {
return arrayLabels[i][m];
});
if (mCanvasSnapshot != null) {
generateExtraCanvas();
}
};
function generateExtraCanvas() {
$("#canvasContainer_histogram").empty();
$("#canvasContainer_snapshot").empty();
//Recreate the extra canvas
mCanvasSnapshot = d3.select("#canvasContainer_snapshot").append("svg")
.attr("id", "canvasSVGExtra")
.attr("width", mCanvasWidth)
.attr("height", mCanvasHeight)
.style("border", "1px solid lightgray")
.append("g")
.attr("class", "g_main")
.attr("transform", "translate(" + mSnapshotPaddingLeft + ", " + DEFAULT_PADDING_TOP_SNAPSHOT + ")");
//Add the snapshot graph to the extra canvas
if (mMode == "tracking") {
generateSnapshot(mCurrentDateIndex, true);
histogram();
}
//Scroll to the new canvas
if (!$("#canvasContainer_snapshot").inViewport() && mFirstScrollView) {
$("#canvasContainer_snapshot").scrollView();
mFirstScrollView = false;
}
// $("#canvasContainer_extra").scrollView();
}
function toggleDataLabels() {
var arrayData = [];
var arrayDesc = [];
var arrayLabels = [];
if (mMode === "snapshot") {
if (d3.selectAll(".dataLabelSnapshot")[0].length > 0) {
d3.selectAll(".dataLabelSnapshot").remove();
return;
} else {
var data = mCalculatedData[0];
for (var i=0; i < data.length; i++) {
if (data[i]["total"] == 0) {
arrayLabels.push("0% (0/0)");
arrayData.push(0);
arrayDesc.push(data[i]["desc"]);
continue;
}
var percent = data[i]["passed"] / data[i]["total"] * 100;
arrayData.push(percent);
var label = Math.round(percent) + "% (" + data[i]["passed"] + "/" + data[i]["total"]+ ")";
arrayLabels.push(label);
arrayDesc.push(data[i]["desc"]);
}
if (arrayData.length == 0) {
return;
}
mCanvas.selectAll("onTargetLabel")
.data(arrayData)
.enter().append("text")
.attr("class", "dataLabelSnapshot")
.attr("x", function(d, i) {
if (d<20) { return xScaleSnapshot(d+2); }
else { return xScaleSnapshot(d/2); }
})
.attr("y", function(d, i) { return yScaleSnapshot(arrayDesc[i]) + (yScaleSnapshot.rangeBand()/2); })
.attr("text-anchor", function(d) {
if (d<20) { return "start"; }
else { return "middle"; }
})
.style("font-family", "Arial")
.style("font-size", "13px")
.attr("dy", ".35em")
.style("fill", function(d, i) {
if (d<20) { return "black"; }
else { return "white"; }
})
.text(function(d, i) { return arrayLabels[i]; });
}
} else {
if (d3.selectAll(".dataLabelTracking")[0].length > 0) {
d3.selectAll(".dataLabelTracking").remove();
return;
} else {
for (var i=0; i < mCalculatedData.length; i++) {
arrayData.push([]);
arrayDesc.push([]);
arrayLabels.push([]);
for (var j=0; j < mCalculatedData[i].length; j++) {
if (mCalculatedData[i][j]["total"] == 0) {
arrayLabels[i].push("0% (0/0)");
continue;
}
var percent = mCalculatedData[i][j]["passed"] / mCalculatedData[i][j]["total"] * 100;
arrayData[i].push(percent);
var label = Math.round(percent) + "% (" + mCalculatedData[i][j]["passed"] + "/" + mCalculatedData[i][j]["total"]+ ")";
arrayLabels[i].push(label);
arrayDesc[i].push(mCalculatedData[i][j]["desc"]);
}
}
if (arrayData.length == 0) {
return;
}
var m = mCurrentIndicator;
mCanvas.selectAll(".dataLabelTracking")
.data(arrayData)
.enter().append("text")
.attr("class", "dataLabelTracking")
.attr("x", function(d, i) { return xScaleTracking(mArrayDates[i]); })
.attr("y", function(d, i) {
//Algorithm to decide whether to place the labels above or below the point
//Essentially, if they point is less than the previous one, place the label below
//Otherwise place it above (unless the point is very small -- ie not enough room below for label)
// If small value, place label above point
if ((arrayData[i][m]) < 10)
return yScaleTracking(arrayData[i][m]) - 15;
// Else
else {
// For first data point
if (i == 0) {
// If adjacent point is above, place label below, vice versa
if (arrayData[1][m] >= arrayData[i][m])
return yScaleTracking(arrayData[i][m]) + 25;
else return yScaleTracking(arrayData[i][m]) - 15;
}
// For last point, compare with second last point
else if (i == arrayData.length - 1) {
if (arrayData[arrayData.length - 2][m] >= arrayData[i][m])
return yScaleTracking(arrayData[i][m]) + 25;
else return yScaleTracking(arrayData[i][m]) - 15;
}
// Else all points in between, check both sides
else {
// If both adjacent points are above, place below
if (arrayData[i - 1][m] >= arrayData[i][m] && arrayData[i + 1][m] >= arrayData[i][m])
return yScaleTracking(arrayData[i][m]) + 25;
// Else if both are below, place above
else if (arrayData[i - 1][m] < arrayData[i][m] && arrayData[i + 1][m] < arrayData[i][m])
return yScaleTracking(arrayData[i][m]) - 15;
// Else just place above
else return yScaleTracking(arrayData[i][m]) - 15;
}
}
})
.attr("text-anchor", "middle")
.style("fill", "black")
.style("font-size", "13px")
.style("font-family", "Arial")
.text(function(d, i) {
return arrayLabels[i][m];
});
}
}
}; //end toggleDataLabels
function histogram() {
//data contains an array of values and axis label(s)
//[ [values], label]
//label can be an array of [x-label, y-label] or just a x-label for histograms
//values can be 1d for histogram or 2d for a scatter plot [ [x-values], [y-values]]
var data = mdsIndicators.getPlotData(getIndicator(), mCurrentDateIndex);
if (data === null) {
$("#canvasContainer_histogram").empty();
return;
}
var values = data[0];
var label = data[1];
var svg = $("#canvasContainer_histogram");
//Empty the extra canvas
svg.empty();
//Recreate the extra canvas
svg = d3.select("#canvasContainer_histogram").append("svg")
.attr("id", "canvasSVGExtra")
.attr("width", mCanvasWidth)
.attr("height", DEFAULT_CANVAS_HEIGHT)
.style("border", "1px solid lightgray")
.append("g")
.attr("class", "g_main")
.attr("transform", "translate(" + DEFAULT_PADDING_LEFT_TRACKING + ", " + DEFAULT_PADDING_TOP_TRACKING + ")");
// A formatter for counts.
var formatCount = d3.format(",.0f");
var xScale = d3.scale.linear()
.domain(d3.extent(values))
.range([0, mGraphWidthTracking])
.nice();
var xAxis = d3.svg.axis()
.scale(xScale)
.orient("bottom");
// Generate a histogram using twenty uniformly-spaced bins.
var histdata = d3.layout.histogram()
.bins(xScale.ticks(20))
(values);
var yScale = d3.scale.linear()
.domain([0, d3.max(histdata, function(d) { return d.y; })])
.range([DEFAULT_GRAPH_HEIGHT_TRACKING, 0]);
var yAxis = d3.svg.axis()
.scale(yScale)
.orient("left")
.ticks(10)
.tickFormat(d3.format("d"))
.tickSubdivide(0);
// Add x axis label
svg.append("text")
.attr("class", "xaxis xAxisLabel")
.attr("x", mGraphWidthTracking / 2)
.attr("y", DEFAULT_GRAPH_HEIGHT_TRACKING + 40)
.attr("text-anchor", "middle")
.style("font-weight", "bold")
.style("font-size", "14px")
.style("font-family", "Arial")
.text(label);
// Add y axis label
svg.append("text")
.attr("class", "yAxisLabel")
.attr("transform", "rotate(-90)")
.attr("x", -DEFAULT_GRAPH_HEIGHT_TRACKING / 2)
.attr("y", -DEFAULT_PADDING_LEFT_TRACKING / 2)
.attr("text-anchor", "middle")
.style("font-weight", "bold")
.style("font-size", "14px")
.style("font-family", "Arial")
.text("# of Patients");
var date = mArrayDates[mCurrentDateIndex];
formattedDate = MONTH_NAMES_SHORT[date.getMonth()] + " " + date.getDate() + " " + date.getFullYear();
// Add graph title
svg.append("text")
.attr("class", "graphTitle")
.attr("x", mGraphWidthTracking / 2)
.attr("y", -DEFAULT_PADDING_TOP_TRACKING / 2)
.attr("text-anchor", "middle")
.style("font-size", "14px")
.style("font-family", "sans-serif")
.style("font-weight", "bold")
.text(getIndicator().desc() + " as of " + formattedDate);
//Add xaxis
svg.append("g")
.attr("class", "xaxis")
.attr("transform", "translate(0, " + DEFAULT_GRAPH_HEIGHT_TRACKING + ")")
.call(xAxis);
var barWidth = (mGraphWidthTracking / histdata.length) - 4
// Align xaxis labels with center of bar (opposed to lefthand side)
// This is accomplished by moving them by 1/2 the bar width
svg.selectAll(".xaxis text")
.attr("dx", barWidth / 2);
//Add yaxis
svg.append("g")
.attr("class", "yaxis")
.call(yAxis);
var bar = svg.selectAll(".bar")
.data(histdata)
.enter().append("g")
.attr("class", "bar")
.attr("transform", function(d) { return "translate(" + xScale(d.x) + "," + yScale(d.y) + ")"; });
bar.append("rect")
.attr("x", 1)
.attr("fill", DEFAULT_COLOURS[mCurrentIndSetIndex])
.attr("width", barWidth)
//.attr("width", x(data[0].dx) - 1)
.attr("height", function(d) { return DEFAULT_GRAPH_HEIGHT_TRACKING - yScale(d.y); })
.style("stroke", "black")
.style("stroke-width", "1px")
.attr("shape-rendering", "crispEdges");
// Add styling and attributes for axes paths
var paths = document.getElementsByClassName("domain");
for (var i = 0; i < paths.length; i++) {
paths[i].setAttribute("style", "fill:none; stroke:black");
paths[i].setAttribute("shape-rendering", "crispEdges");
}
}; //end histogram
return {
generateCharts: generateCharts,
clearCanvas: clearCanvas,
mode: mMode,
histogram: histogram,
setHasRosteredField: function(x) { hasRosteredField = x; },
hasRosteredField: function() { return hasRosteredField; },
rosteredOnly: function() { return mRosteredOnly; }
};
})();<|fim▁end|> | |
<|file_name|>qaudioinput_win32_p.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation (qt-info@nokia.com)
**
** This file is part of the QtMultimedia module of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** GNU Lesser General Public License Usage
** This file may be used under the terms of the GNU Lesser General Public
** License version 2.1 as published by the Free Software Foundation and
** appearing in the file LICENSE.LGPL included in the packaging of this
** file. Please review the following information to ensure the GNU Lesser
** General Public License version 2.1 requirements will be met:
** http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Nokia gives you certain additional
** rights. These rights are described in the Nokia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU General
** Public License version 3.0 as published by the Free Software Foundation
** and appearing in the file LICENSE.GPL included in the packaging of this
** file. Please review the following information to ensure the GNU General
** Public License version 3.0 requirements will be met:
** http://www.gnu.org/copyleft/gpl.html.
**
** Other Usage
** Alternatively, this file may be used in accordance with the terms and
** conditions contained in a signed written agreement between you and Nokia.
**
**
**
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
//
// W A R N I N G
// -------------
//
// This file is not part of the Qt API. It exists for the convenience
// of other Qt classes. This header file may change from version to
// version without notice, or even be removed.
//
// We mean it.
//
#include "qaudioinput_win32_p.h"
QT_BEGIN_NAMESPACE
//#define DEBUG_AUDIO 1
QAudioInputPrivate::QAudioInputPrivate(const QByteArray &device, const QAudioFormat& audioFormat):
settings(audioFormat)
{
bytesAvailable = 0;
buffer_size = 0;
period_size = 0;
m_device = device;
totalTimeValue = 0;
intervalTime = 1000;
errorState = QAudio::NoError;
deviceState = QAudio::StoppedState;
audioSource = 0;
pullMode = true;
resuming = false;
finished = false;
}
QAudioInputPrivate::~QAudioInputPrivate()
{
stop();
}
void QT_WIN_CALLBACK QAudioInputPrivate::waveInProc( HWAVEIN hWaveIn, UINT uMsg,
DWORD dwInstance, DWORD dwParam1, DWORD dwParam2 )
{
Q_UNUSED(dwParam1)
Q_UNUSED(dwParam2)
Q_UNUSED(hWaveIn)
QAudioInputPrivate* qAudio;
qAudio = (QAudioInputPrivate*)(dwInstance);
if(!qAudio)
return;
QMutexLocker(&qAudio->mutex);
switch(uMsg) {
case WIM_OPEN:
break;
case WIM_DATA:
if(qAudio->waveFreeBlockCount > 0)
qAudio->waveFreeBlockCount--;
qAudio->feedback();
break;
case WIM_CLOSE:
qAudio->finished = true;
break;
default:
return;
}
}
WAVEHDR* QAudioInputPrivate::allocateBlocks(int size, int count)
{
int i;
unsigned char* buffer;
WAVEHDR* blocks;
DWORD totalBufferSize = (size + sizeof(WAVEHDR))*count;
if((buffer=(unsigned char*)HeapAlloc(GetProcessHeap(),HEAP_ZERO_MEMORY,
totalBufferSize)) == 0) {
qWarning("QAudioInput: Memory allocation error");
return 0;
}
blocks = (WAVEHDR*)buffer;
buffer += sizeof(WAVEHDR)*count;
for(i = 0; i < count; i++) {
blocks[i].dwBufferLength = size;
blocks[i].lpData = (LPSTR)buffer;
blocks[i].dwBytesRecorded=0;
blocks[i].dwUser = 0L;
blocks[i].dwFlags = 0L;
blocks[i].dwLoops = 0L;
result = waveInPrepareHeader(hWaveIn,&blocks[i], sizeof(WAVEHDR));
if(result != MMSYSERR_NOERROR) {
qWarning("QAudioInput: Can't prepare block %d",i);
return 0;
}
buffer += size;
}
return blocks;
}
void QAudioInputPrivate::freeBlocks(WAVEHDR* blockArray)
{
WAVEHDR* blocks = blockArray;
int count = buffer_size/period_size;
for(int i = 0; i < count; i++) {
waveInUnprepareHeader(hWaveIn,blocks, sizeof(WAVEHDR));
blocks++;
}
HeapFree(GetProcessHeap(), 0, blockArray);
}
QAudio::Error QAudioInputPrivate::error() const
{
return errorState;
}
QAudio::State QAudioInputPrivate::state() const
{
return deviceState;
}
QAudioFormat QAudioInputPrivate::format() const
{
return settings;
}
QIODevice* QAudioInputPrivate::start(QIODevice* device)
{
if(deviceState != QAudio::StoppedState)
close();
if(!pullMode && audioSource) {
delete audioSource;
}
if(device) {
//set to pull mode
pullMode = true;
audioSource = device;
deviceState = QAudio::ActiveState;
} else {
//set to push mode
pullMode = false;
deviceState = QAudio::IdleState;
audioSource = new InputPrivate(this);
audioSource->open(QIODevice::ReadOnly | QIODevice::Unbuffered);
}
if( !open() )
return 0;
emit stateChanged(deviceState);
return audioSource;
}
void QAudioInputPrivate::stop()
{
if(deviceState == QAudio::StoppedState)
return;
close();
emit stateChanged(deviceState);
}
bool QAudioInputPrivate::open()
{
#ifdef DEBUG_AUDIO
QTime now(QTime::currentTime());
qDebug()<<now.second()<<"s "<<now.msec()<<"ms :open()";
#endif
header = 0;
period_size = 0;
if (!settings.isValid()) {
qWarning("QAudioInput: open error, invalid format.");
} else if (settings.channels() <= 0) {
qWarning("QAudioInput: open error, invalid number of channels (%d).",
settings.channels());
} else if (settings.sampleSize() <= 0) {
qWarning("QAudioInput: open error, invalid sample size (%d).",
settings.sampleSize());
} else if (settings.frequency() < 8000 || settings.frequency() > 48000) {
qWarning("QAudioInput: open error, frequency out of range (%d).", settings.frequency());
} else if (buffer_size == 0) {
buffer_size
= (settings.frequency()
* settings.channels()
* settings.sampleSize()
#ifndef Q_OS_WINCE // Default buffer size, 200ms, default period size is 40ms
+ 39) / 40;
period_size = buffer_size / 5;
} else {
period_size = buffer_size / 5;
#else // For wince reduce size to 40ms for buffer size and 20ms period
+ 199) / 200;
period_size = buffer_size / 2;
} else {
period_size = buffer_size / 2;
#endif
}
if (period_size == 0) {
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
return false;
}
timeStamp.restart();
elapsedTimeOffset = 0;
wfx.nSamplesPerSec = settings.frequency();
wfx.wBitsPerSample = settings.sampleSize();
wfx.nChannels = settings.channels();
wfx.cbSize = 0;
wfx.wFormatTag = WAVE_FORMAT_PCM;
wfx.nBlockAlign = (wfx.wBitsPerSample >> 3) * wfx.nChannels;
wfx.nAvgBytesPerSec = wfx.nBlockAlign * wfx.nSamplesPerSec;
UINT_PTR devId = WAVE_MAPPER;
WAVEINCAPS wic;
unsigned long iNumDevs,ii;
iNumDevs = waveInGetNumDevs();
for(ii=0;ii<iNumDevs;ii++) {
if(waveInGetDevCaps(ii, &wic, sizeof(WAVEINCAPS))
== MMSYSERR_NOERROR) {
QString tmp;
tmp = QString((const QChar *)wic.szPname);
if(tmp.compare(QLatin1String(m_device)) == 0) {
devId = ii;
break;
}
}
}
if(waveInOpen(&hWaveIn, devId, &wfx,
(DWORD_PTR)&waveInProc,
(DWORD_PTR) this,
CALLBACK_FUNCTION) != MMSYSERR_NOERROR) {
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
qWarning("QAudioInput: failed to open audio device");
return false;
}
waveBlocks = allocateBlocks(period_size, buffer_size/period_size);
if(waveBlocks == 0) {
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
qWarning("QAudioInput: failed to allocate blocks. open failed");
return false;
}
mutex.lock();
waveFreeBlockCount = buffer_size/period_size;
mutex.unlock();
waveCurrentBlock = 0;
for(int i=0; i<buffer_size/period_size; i++) {
result = waveInAddBuffer(hWaveIn, &waveBlocks[i], sizeof(WAVEHDR));
if(result != MMSYSERR_NOERROR) {
qWarning("QAudioInput: failed to setup block %d,err=%d",i,result);
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
return false;
}
}
result = waveInStart(hWaveIn);
if(result) {
qWarning("QAudioInput: failed to start audio input");
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
return false;
}
timeStampOpened.restart();
elapsedTimeOffset = 0;
totalTimeValue = 0;
errorState = QAudio::NoError;
return true;
}
void QAudioInputPrivate::close()
{
if(deviceState == QAudio::StoppedState)
return;
deviceState = QAudio::StoppedState;
waveInReset(hWaveIn);
waveInClose(hWaveIn);
int count = 0;
while(!finished && count < 500) {
count++;
Sleep(10);
}
mutex.lock();
for(int i=0; i<waveFreeBlockCount; i++)
waveInUnprepareHeader(hWaveIn,&waveBlocks[i],sizeof(WAVEHDR));
freeBlocks(waveBlocks);
mutex.unlock();
}
int QAudioInputPrivate::bytesReady() const
{
if(period_size == 0 || buffer_size == 0)
return 0;
int buf = ((buffer_size/period_size)-waveFreeBlockCount)*period_size;
if(buf < 0)
buf = 0;
return buf;
}
qint64 QAudioInputPrivate::read(char* data, qint64 len)
{
bool done = false;
char* p = data;
qint64 l = 0;
qint64 written = 0;
while(!done) {
// Read in some audio data
if(waveBlocks[header].dwBytesRecorded > 0 && waveBlocks[header].dwFlags & WHDR_DONE) {
if(pullMode) {
l = audioSource->write(waveBlocks[header].lpData,
waveBlocks[header].dwBytesRecorded);
#ifdef DEBUG_AUDIO
qDebug()<<"IN: "<<waveBlocks[header].dwBytesRecorded<<", OUT: "<<l;
#endif
if(l < 0) {
// error
qWarning("QAudioInput: IOError");
errorState = QAudio::IOError;
} else if(l == 0) {
// cant write to IODevice
qWarning("QAudioInput: IOError, can't write to QIODevice");
errorState = QAudio::IOError;
} else {
totalTimeValue += waveBlocks[header].dwBytesRecorded;
errorState = QAudio::NoError;
if (deviceState != QAudio::ActiveState) {
deviceState = QAudio::ActiveState;
emit stateChanged(deviceState);
}
resuming = false;
}
} else {
l = qMin<qint64>(len, waveBlocks[header].dwBytesRecorded);
// push mode
memcpy(p, waveBlocks[header].lpData, l);
len -= l;
#ifdef DEBUG_AUDIO
qDebug()<<"IN: "<<waveBlocks[header].dwBytesRecorded<<", OUT: "<<l;
#endif
totalTimeValue += waveBlocks[header].dwBytesRecorded;
errorState = QAudio::NoError;
if (deviceState != QAudio::ActiveState) {
deviceState = QAudio::ActiveState;
emit stateChanged(deviceState);
}
resuming = false;
}
} else {
//no data, not ready yet, next time
break;
}
waveInUnprepareHeader(hWaveIn,&waveBlocks[header], sizeof(WAVEHDR));
mutex.lock();
waveFreeBlockCount++;
mutex.unlock();
waveBlocks[header].dwBytesRecorded=0;
waveBlocks[header].dwFlags = 0L;
result = waveInPrepareHeader(hWaveIn,&waveBlocks[header], sizeof(WAVEHDR));
if(result != MMSYSERR_NOERROR) {
result = waveInPrepareHeader(hWaveIn,&waveBlocks[header], sizeof(WAVEHDR));
qWarning("QAudioInput: failed to prepare block %d,err=%d",header,result);
errorState = QAudio::IOError;
mutex.lock();
waveFreeBlockCount--;
mutex.unlock();
return 0;
}
result = waveInAddBuffer(hWaveIn, &waveBlocks[header], sizeof(WAVEHDR));
if(result != MMSYSERR_NOERROR) {
qWarning("QAudioInput: failed to setup block %d,err=%d",header,result);
errorState = QAudio::IOError;
mutex.lock();
waveFreeBlockCount--;
mutex.unlock();
return 0;
}
header++;
if(header >= buffer_size/period_size)
header = 0;
p+=l;
mutex.lock();
if(!pullMode) {
if(len < period_size || waveFreeBlockCount == buffer_size/period_size)
done = true;
} else {
if(waveFreeBlockCount == buffer_size/period_size)
done = true;
}
mutex.unlock();
written+=l;
}
#ifdef DEBUG_AUDIO
qDebug()<<"read in len="<<written;
#endif
return written;
}
void QAudioInputPrivate::resume()
{
if(deviceState == QAudio::SuspendedState) {
deviceState = QAudio::ActiveState;
for(int i=0; i<buffer_size/period_size; i++) {
result = waveInAddBuffer(hWaveIn, &waveBlocks[i], sizeof(WAVEHDR));
if(result != MMSYSERR_NOERROR) {
qWarning("QAudioInput: failed to setup block %d,err=%d",i,result);
errorState = QAudio::OpenError;
deviceState = QAudio::StoppedState;
emit stateChanged(deviceState);
return;
}
}
mutex.lock();
waveFreeBlockCount = buffer_size/period_size;
mutex.unlock();
waveCurrentBlock = 0;
header = 0;
resuming = true;
waveInStart(hWaveIn);
QTimer::singleShot(20,this,SLOT(feedback()));
emit stateChanged(deviceState);
}
}
void QAudioInputPrivate::setBufferSize(int value)
{
buffer_size = value;
}
int QAudioInputPrivate::bufferSize() const
{
return buffer_size;
}
int QAudioInputPrivate::periodSize() const
{
return period_size;
}
void QAudioInputPrivate::setNotifyInterval(int ms)
{
intervalTime = qMax(0, ms);
}
int QAudioInputPrivate::notifyInterval() const
{
return intervalTime;
}
qint64 QAudioInputPrivate::processedUSecs() const
{
if (deviceState == QAudio::StoppedState)<|fim▁hole|> (settings.channels()*(settings.sampleSize()/8)) /
settings.frequency();
return result;
}
void QAudioInputPrivate::suspend()
{
if(deviceState == QAudio::ActiveState) {
waveInReset(hWaveIn);
deviceState = QAudio::SuspendedState;
emit stateChanged(deviceState);
}
}
void QAudioInputPrivate::feedback()
{
#ifdef DEBUG_AUDIO
QTime now(QTime::currentTime());
qDebug()<<now.second()<<"s "<<now.msec()<<"ms :feedback() INPUT "<<this;
#endif
if(!(deviceState==QAudio::StoppedState||deviceState==QAudio::SuspendedState))
QMetaObject::invokeMethod(this, "deviceReady", Qt::QueuedConnection);
}
bool QAudioInputPrivate::deviceReady()
{
bytesAvailable = bytesReady();
#ifdef DEBUG_AUDIO
QTime now(QTime::currentTime());
qDebug()<<now.second()<<"s "<<now.msec()<<"ms :deviceReady() INPUT";
#endif
if(deviceState != QAudio::ActiveState && deviceState != QAudio::IdleState)
return true;
if(pullMode) {
// reads some audio data and writes it to QIODevice
read(0, buffer_size);
} else {
// emits readyRead() so user will call read() on QIODevice to get some audio data
InputPrivate* a = qobject_cast<InputPrivate*>(audioSource);
a->trigger();
}
if(intervalTime && (timeStamp.elapsed() + elapsedTimeOffset) > intervalTime) {
emit notify();
elapsedTimeOffset = timeStamp.elapsed() + elapsedTimeOffset - intervalTime;
timeStamp.restart();
}
return true;
}
qint64 QAudioInputPrivate::elapsedUSecs() const
{
if (deviceState == QAudio::StoppedState)
return 0;
return timeStampOpened.elapsed()*1000;
}
void QAudioInputPrivate::reset()
{
close();
}
InputPrivate::InputPrivate(QAudioInputPrivate* audio)
{
audioDevice = qobject_cast<QAudioInputPrivate*>(audio);
}
InputPrivate::~InputPrivate() {}
qint64 InputPrivate::readData( char* data, qint64 len)
{
// push mode, user read() called
if(audioDevice->deviceState != QAudio::ActiveState &&
audioDevice->deviceState != QAudio::IdleState)
return 0;
// Read in some audio data
return audioDevice->read(data,len);
}
qint64 InputPrivate::writeData(const char* data, qint64 len)
{
Q_UNUSED(data)
Q_UNUSED(len)
emit readyRead();
return 0;
}
void InputPrivate::trigger()
{
emit readyRead();
}
QT_END_NAMESPACE<|fim▁end|> | return 0;
qint64 result = qint64(1000000) * totalTimeValue / |
<|file_name|>operator-filter.pipe.ts<|end_file_name|><|fim▁begin|>import { Pipe, PipeTransform } from '@angular/core';
import { Project } from 'app/models/project';
@Pipe({
name: 'operatorFilter'
})
export class OperatorFilterPipe implements PipeTransform {
transform(value: Project[], q: string) {
if (!q) {
return value;
}
return value.filter(item => -1 < item.operator.toLowerCase().indexOf(q.toLowerCase()));<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>dromaeo.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from metrics import power
from telemetry import test
from telemetry.core import util
from telemetry.page import page_measurement
from telemetry.page import page_set
class _DromaeoMeasurement(page_measurement.PageMeasurement):
def __init__(self):
super(_DromaeoMeasurement, self).__init__()
self._power_metric = power.PowerMetric()
def CustomizeBrowserOptions(self, options):
power.PowerMetric.CustomizeBrowserOptions(options)
def DidNavigateToPage(self, page, tab):
self._power_metric.Start(page, tab)
def MeasurePage(self, page, tab, results):
tab.WaitForJavaScriptExpression(
'window.document.cookie.indexOf("__done=1") >= 0', 600)
self._power_metric.Stop(page, tab)
self._power_metric.AddResults(tab, results)
js_get_results = 'JSON.stringify(window.automation.GetResults())'
print js_get_results
score = eval(tab.EvaluateJavaScript(js_get_results))
def Escape(k):
chars = [' ', '-', '/', '(', ')', '*']
for c in chars:
k = k.replace(c, '_')
return k
suffix = page.url[page.url.index('?') + 1 : page.url.index('&')]
for k, v in score.iteritems():
data_type = 'unimportant'
if k == suffix:
data_type = 'default'
results.Add(Escape(k), 'runs/s', float(v), data_type=data_type)
class _DromaeoBenchmark(test.Test):
"""A base class for Dromaeo benchmarks."""
test = _DromaeoMeasurement
def CreatePageSet(self, options):
"""Makes a PageSet for Dromaeo benchmarks."""
# Subclasses are expected to define a class member called query_param.
if not hasattr(self, 'query_param'):
raise NotImplementedError('query_param not in Dromaeo benchmark.')
url = 'file://index.html?%s&automated' % self.query_param
dromaeo_dir = os.path.join(util.GetChromiumSrcDir(),
'chrome', 'test', 'data', 'dromaeo')
ps = page_set.PageSet(file_path=dromaeo_dir)
ps.AddPageWithDefaultRunNavigate(url)
return ps
class DromaeoDomCoreAttr(_DromaeoBenchmark):
"""Dromaeo DOMCore attr JavaScript benchmark."""
tag = 'domcoreattr'
query_param = 'dom-attr'
@test.Disabled('xp') # crbug.com/323782
class DromaeoDomCoreModify(_DromaeoBenchmark):
"""Dromaeo DOMCore modify JavaScript benchmark."""
tag = 'domcoremodify'
query_param = 'dom-modify'
class DromaeoDomCoreQuery(_DromaeoBenchmark):
"""Dromaeo DOMCore query JavaScript benchmark."""
tag = 'domcorequery'
query_param = 'dom-query'
class DromaeoDomCoreTraverse(_DromaeoBenchmark):
"""Dromaeo DOMCore traverse JavaScript benchmark."""
tag = 'domcoretraverse'
query_param = 'dom-traverse'
class DromaeoJslibAttrJquery(_DromaeoBenchmark):
"""Dromaeo JSLib attr jquery JavaScript benchmark"""
tag = 'jslibattrjquery'
query_param = 'jslib-attr-jquery'
class DromaeoJslibAttrPrototype(_DromaeoBenchmark):
"""Dromaeo JSLib attr prototype JavaScript benchmark"""<|fim▁hole|>
class DromaeoJslibEventJquery(_DromaeoBenchmark):
"""Dromaeo JSLib event jquery JavaScript benchmark"""
tag = 'jslibeventjquery'
query_param = 'jslib-event-jquery'
class DromaeoJslibEventPrototype(_DromaeoBenchmark):
"""Dromaeo JSLib event prototype JavaScript benchmark"""
tag = 'jslibeventprototype'
query_param = 'jslib-event-prototype'
class DromaeoJslibModifyJquery(_DromaeoBenchmark):
"""Dromaeo JSLib modify jquery JavaScript benchmark"""
tag = 'jslibmodifyjquery'
query_param = 'jslib-modify-jquery'
class DromaeoJslibModifyPrototype(_DromaeoBenchmark):
"""Dromaeo JSLib modify prototype JavaScript benchmark"""
tag = 'jslibmodifyprototype'
query_param = 'jslib-modify-prototype'
class DromaeoJslibStyleJquery(_DromaeoBenchmark):
"""Dromaeo JSLib style jquery JavaScript benchmark"""
tag = 'jslibstylejquery'
query_param = 'jslib-style-jquery'
class DromaeoJslibStylePrototype(_DromaeoBenchmark):
"""Dromaeo JSLib style prototype JavaScript benchmark"""
tag = 'jslibstyleprototype'
query_param = 'jslib-style-prototype'
class DromaeoJslibTraverseJquery(_DromaeoBenchmark):
"""Dromaeo JSLib traverse jquery JavaScript benchmark"""
tag = 'jslibtraversejquery'
query_param = 'jslib-traverse-jquery'
class DromaeoJslibTraversePrototype(_DromaeoBenchmark):
"""Dromaeo JSLib traverse prototype JavaScript benchmark"""
tag = 'jslibtraverseprototype'
query_param = 'jslib-traverse-prototype'<|fim▁end|> | tag = 'jslibattrprototype'
query_param = 'jslib-attr-prototype' |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
cwd = os.getcwd()
parent = os.path.dirname(cwd)
sys.path.append(parent)
import cbh_core_model
# -- General configuration -----------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cbh_core_model'
copyright = u'2015, Andrew Stretton'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = cbh_core_model.__version__
# The full version, including alpha/beta/rc tags.
release = cbh_core_model.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'cbh_core_modeldoc'
# -- Options for LaTeX output --------------------------------------------<|fim▁hole|>latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'cbh_core_model.tex', u'cbh_core_model Documentation',
u'Andrew Stretton', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'cbh_core_model', u'cbh_core_model Documentation',
[u'Andrew Stretton'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'cbh_core_model', u'cbh_core_model Documentation',
u'Andrew Stretton', 'cbh_core_model', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | |
<|file_name|>ProcessMonitor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#*/1 * * * * python /xxx/monitor.py >> /xxx/logs/monitor.log 2>&1 &
import sys
import subprocess
import os.path as op
import socket
def this_abs_path(script_name):
return op.abspath(op.join(op.dirname(__file__), script_name))
def monitor_process(key_word, cmd):
p1 = subprocess.Popen(['ps', '-ef'], stdout=subprocess.PIPE)<|fim▁hole|> if len(lines) > 0:
return
sys.stderr.write('process[%s] is lost, run [%s]\n' % (key_word, cmd))
subprocess.call(cmd, shell=True)
def monitor_port(protocol, port, cmd):
address = ('127.0.0.1', port)
socket_type = socket.SOCK_STREAM if protocol == 'tcp' else socket.SOCK_DGRAM
client = socket.socket(socket.AF_INET, socket_type)
try:
client.bind(address)
except Exception, e:
pass
else:
sys.stderr.write('port[%s-%s] is lost, run [%s]\n' % (protocol, port, cmd))
subprocess.call(cmd, shell=True)
finally:
client.close()
#=============================================================================
def yuanzhaopin():
cmd = '%s start' % this_abs_path('gun.sh')
#monitor_process('\[yuanzhaopin\]', cmd)
monitor_port('tcp', 8635, cmd)
def main():
yuanzhaopin()
if __name__ == '__main__':
main()<|fim▁end|> | p2 = subprocess.Popen(['grep', key_word], stdin=p1.stdout, stdout=subprocess.PIPE)
p3 = subprocess.Popen(['grep', '-v', 'grep'], stdin=p2.stdout, stdout=subprocess.PIPE)
lines = p3.stdout.readlines() |
<|file_name|>KMCCommandExecutionEvent.java<|end_file_name|><|fim▁begin|>package net.diecode.killermoney.events;
import net.diecode.killermoney.objects.CCommand;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.event.Cancellable;
import org.bukkit.event.Event;<|fim▁hole|>public class KMCCommandExecutionEvent extends Event implements Cancellable {
private static final HandlerList handlers = new HandlerList();
private CCommand cCommand;
private Player killer;
private LivingEntity victim;
private boolean cancelled;
public KMCCommandExecutionEvent(CCommand cCommand, Player killer, LivingEntity victim) {
this.cCommand = cCommand;
this.killer = killer;
this.victim = victim;
}
public HandlerList getHandlers() {
return handlers;
}
public static HandlerList getHandlerList() {
return handlers;
}
public CCommand getCCommand() {
return cCommand;
}
public Player getKiller() {
return killer;
}
public LivingEntity getVictim() {
return victim;
}
@Override
public boolean isCancelled() {
return cancelled;
}
@Override
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
}<|fim▁end|> | import org.bukkit.event.HandlerList;
|
<|file_name|>replace.rs<|end_file_name|><|fim▁begin|>macro_rules! replace(
($name:ident, $which:ident, $re:expr,
$search:expr, $replace:expr, $result:expr) => (
#[test]
fn $name() {
let re = regex!($re);
assert_eq!(re.$which(text!($search), $replace), text!($result));
}
);
);
replace!(first, replace, r"[0-9]", "age: 26", t!("Z"), "age: Z6");
replace!(plus, replace, r"[0-9]+", "age: 26", t!("Z"), "age: Z");
replace!(all, replace_all, r"[0-9]", "age: 26", t!("Z"), "age: ZZ");
replace!(
groups,
replace,
r"(?-u)(\S+)\s+(\S+)",
"w1 w2",
t!("$2 $1"),
"w2 w1"
);
replace!(
double_dollar,
replace,
r"(?-u)(\S+)\s+(\S+)",
"w1 w2",
t!("$2 $$1"),
"w2 $1"
);
// replace!(adjacent_index, replace,
// r"([^aeiouy])ies$", "skies", t!("$1y"), "sky");
replace!(
named,
replace_all,
r"(?-u)(?P<first>\S+)\s+(?P<last>\S+)(?P<space>\s*)",
"w1 w2 w3 w4",
t!("$last $first$space"),
"w2 w1 w4 w3"
);
replace!(
trim,
replace_all,
"^[ \t]+|[ \t]+$",
" \t trim me\t \t",
t!(""),
"trim me"
);
replace!(number_hypen, replace, r"(.)(.)", "ab", t!("$1-$2"), "a-b");
// replace!(number_underscore, replace, r"(.)(.)", "ab", t!("$1_$2"), "a_b");
replace!(
simple_expand,
replace_all,
r"(?-u)(\w) (\w)",
"a b",
t!("$2 $1"),
"b a"
);
replace!(
literal_dollar1,
replace_all,
r"(?-u)(\w+) (\w+)",
"a b",
t!("$$1"),
"$1"
);
replace!(
literal_dollar2,
replace_all,
r"(?-u)(\w+) (\w+)",
"a b",
t!("$2 $$c $1"),
"b $c a"
);
replace!(
no_expand1,
replace,
r"(?-u)(\S+)\s+(\S+)",
"w1 w2",
no_expand!("$2 $1"),
"$2 $1"
);
replace!(
no_expand2,
replace,
r"(?-u)(\S+)\s+(\S+)",
"w1 w2",
no_expand!("$$1"),
"$$1"
);
use_!(Captures);
replace!(
closure_returning_reference,
replace,
r"([0-9]+)",
"age: 26",
|captures: &Captures<'_>| {
match_text!(captures.get(1).unwrap())[0..1].to_owned()
},
"age: 2"
);
replace!(
closure_returning_value,
replace,
r"[0-9]+",
"age: 26",
|_captures: &Captures<'_>| t!("Z").to_owned(),
"age: Z"
);
// See https://github.com/rust-lang/regex/issues/314
replace!(
match_at_start_replace_with_empty,
replace_all,
r"foo",
"foobar",
t!(""),
"bar"
);
// See https://github.com/rust-lang/regex/issues/393
replace!(single_empty_match, replace, r"^", "bar", t!("foo"), "foobar");
// See https://github.com/rust-lang/regex/issues/399
replace!(
capture_longest_possible_name,
replace_all,
r"(.)",
"b",
t!("${1}a $1a"),
"ba "
);
replace!(
impl_string,
replace,
r"[0-9]",
"age: 26",
t!("Z".to_string()),
"age: Z6"
);
replace!(
impl_string_ref,
replace,
r"[0-9]",
"age: 26",
t!(&"Z".to_string()),
"age: Z6"
);
replace!(
impl_cow_str_borrowed,
replace,
r"[0-9]",
"age: 26",
t!(std::borrow::Cow::<'_, str>::Borrowed("Z")),
"age: Z6"
);
replace!(
impl_cow_str_borrowed_ref,
replace,
r"[0-9]",
"age: 26",
t!(&std::borrow::Cow::<'_, str>::Borrowed("Z")),
"age: Z6"
);
replace!(
impl_cow_str_owned,
replace,
r"[0-9]",
"age: 26",
t!(std::borrow::Cow::<'_, str>::Owned("Z".to_string())),
"age: Z6"
);
replace!(
impl_cow_str_owned_ref,
replace,
r"[0-9]",
"age: 26",
t!(&std::borrow::Cow::<'_, str>::Owned("Z".to_string())),
"age: Z6"<|fim▁hole|> replace,
r"[0-9]",
"age: 26",
bytes!(vec![b'Z']),
"age: Z6"
);
replace!(
impl_vec_u8_ref,
replace,
r"[0-9]",
"age: 26",
bytes!(&vec![b'Z']),
"age: Z6"
);
replace!(
impl_cow_slice_borrowed,
replace,
r"[0-9]",
"age: 26",
bytes!(std::borrow::Cow::<'_, [u8]>::Borrowed(&[b'Z'])),
"age: Z6"
);
replace!(
impl_cow_slice_borrowed_ref,
replace,
r"[0-9]",
"age: 26",
bytes!(&std::borrow::Cow::<'_, [u8]>::Borrowed(&[b'Z'])),
"age: Z6"
);
replace!(
impl_cow_slice_owned,
replace,
r"[0-9]",
"age: 26",
bytes!(std::borrow::Cow::<'_, [u8]>::Owned(vec![b'Z'])),
"age: Z6"
);
replace!(
impl_cow_slice_owned_ref,
replace,
r"[0-9]",
"age: 26",
bytes!(&std::borrow::Cow::<'_, [u8]>::Owned(vec![b'Z'])),
"age: Z6"
);<|fim▁end|> | );
replace!(
impl_vec_u8, |
<|file_name|>proc_cmdline.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package proccmdline
import (
"errors"
"fmt"
"io/ioutil"
"strings"
"github.com/rancher/os/log"
"github.com/rancher/os/config/cloudinit/datasource"
"github.com/rancher/os/config/cloudinit/pkg"
)
const (
ProcCmdlineLocation = "/proc/cmdline"
ProcCmdlineCloudConfigFlag = "cloud-config-url"
)
type ProcCmdline struct {
Location string
lastError error
}
func NewDatasource() *ProcCmdline {
return &ProcCmdline{Location: ProcCmdlineLocation}<|fim▁hole|> contents, c.lastError = ioutil.ReadFile(c.Location)
if c.lastError != nil {
return false
}
cmdline := strings.TrimSpace(string(contents))
_, c.lastError = findCloudConfigURL(cmdline)
return (c.lastError == nil)
}
func (c *ProcCmdline) Finish() error {
return nil
}
func (c *ProcCmdline) String() string {
return fmt.Sprintf("%s: %s (lastError: %v)", c.Type(), c.Location, c.lastError)
}
func (c *ProcCmdline) AvailabilityChanges() bool {
return false
}
func (c *ProcCmdline) ConfigRoot() string {
return ""
}
func (c *ProcCmdline) FetchMetadata() (datasource.Metadata, error) {
return datasource.Metadata{}, nil
}
func (c *ProcCmdline) FetchUserdata() ([]byte, error) {
contents, err := ioutil.ReadFile(c.Location)
if err != nil {
return nil, err
}
cmdline := strings.TrimSpace(string(contents))
url, err := findCloudConfigURL(cmdline)
if err != nil {
return nil, err
}
client := pkg.NewHTTPClient()
cfg, err := client.GetRetry(url)
if err != nil {
return nil, err
}
return cfg, nil
}
func (c *ProcCmdline) Type() string {
return "proc-cmdline"
}
func findCloudConfigURL(input string) (url string, err error) {
err = errors.New("cloud-config-url not found")
for _, token := range strings.Split(input, " ") {
parts := strings.SplitN(token, "=", 2)
key := parts[0]
key = strings.Replace(key, "_", "-", -1)
if key != "cloud-config-url" {
continue
}
if len(parts) != 2 {
log.Printf("Found cloud-config-url in /proc/cmdline with no value, ignoring.")
continue
}
url = parts[1]
err = nil
}
return
}<|fim▁end|> | }
func (c *ProcCmdline) IsAvailable() bool {
var contents []byte |
<|file_name|>daysBetweenDates.py<|end_file_name|><|fim▁begin|># Define a daysBetweenDates procedure that would produce the
# correct output if there was a correct nextDay procedure.
#
# Udacity course work
def isLeapYear(year):
if year % 400 == 0:
return True
if year % 100 == 0:
return False
if year % 4 == 0:
return True
return False
def daysInMonth(year, month):
"""Provides days for each month of the year including leap years"""
days_of_months = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
if isLeapYear(year):
days_of_months[1] = 29
return days_of_months[month - 1]
def dateIsBefore(year1, month1, day1, year2, month2, day2):
"""Returns True if year1-month1-day1 is before
year2-month2-day2. Otherwise, returns False."""
if year1 < year2:
return True
if year1 == year2:
if month1 < month2:
return True
if month1 == month2:
# 'assert not' makes this true for a result
# of: days = 0
return day1 < day2
return False
def nextDay(year, month, day):
"""Simple version: assume every month has 30 days"""
if day < daysInMonth(year, month):
return year, month, day + 1
else:
if month == 12:
return year + 1, 1, 1
else:
return year, month + 1, 1
<|fim▁hole|> in Gregorian calendar, and the first date is not after
the second."""
assert not dateIsBefore(year2, month2, day2, year1, month1, day1)
days = 0
while dateIsBefore(year1, month1, day1, year2, month2, day2):
year1, month1, day1 = nextDay(year1, month1, day1)
days += 1
return days
def test():
test_cases = [((2012, 9, 30, 2012, 10, 30), 30),
((2012, 1, 1, 2013, 1, 1), 366),
((2012, 9, 1, 2012, 9, 4), 3),
((2016, 9, 1, 2012, 9, 4), "AssertionError"),
((2012, 10, 1, 2012, 9, 1), "AssertionError"),
((2012, 9, 1, 2012, 9, 1), 0),
((1900, 1, 1, 1999, 12, 31), 36523)]
for (args, answer) in test_cases:
try:
result = daysBetweenDates(*args)
if result != answer:
print "Expected:", answer, "Received:", result
else:
print "Test case passed!"
except AssertionError:
if answer == "AssertionError":
print "Test case passed!"
else:
print "Exception: {0} raised AssertionError!\n".format(args)
test()<|fim▁end|> |
def daysBetweenDates(year1, month1, day1, year2, month2, day2):
"""Returns the number of days between year1/month1/day1
and year2/month2/day2. Assumes inputs are valid dates |
<|file_name|>hero.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { Http, Headers } from '@angular/http';
import { Hero } from './hero';
import 'rxjs/add/operator/toPromise'
@Injectable()
export class HeroService {
private heroesUrl = 'app/heroes'; // URL to web api
constructor(private http: Http) {}
// Get Hero
getHeroes() {
return this.http.get(this.heroesUrl)
.toPromise()
.then(response => response.json().data as Hero[])
.catch(this.handleError);
}
// Get Hero
getHero(id: number) {<|fim▁hole|> return this.getHeroes()
.then(heroes => heroes.find(hero => hero.id === id));
}
// Add new Hero
private post(hero: Hero): Promise<Hero> {
let headers = new Headers({
'Content-Type': 'application/json'});
return this.http
.post(this.heroesUrl, JSON.stringify(hero), {headers: headers})
.toPromise()
.then(res => res.json().data)
.catch(this.handleError);
}
// Update Hero
private put(hero: Hero) {
let headers = new Headers();
headers.append('Content-Type', 'application/json');
let url = `${this.heroesUrl}/${hero.id}`;
return this.http
.put(url, JSON.stringify(hero), {headers: headers})
.toPromise()
.then(() => hero)
.catch(this.handleError);
}
delete(hero: Hero) {
let headers = new Headers();
headers.append('Content-Type', 'application/json');
let url = `${this.heroesUrl}/${hero.id}`;
return this.http
.delete(url, {headers: headers})
.toPromise()
.catch(this.handleError);
}
save(hero: Hero): Promise<Hero> {
if (hero.id) {
return this.put(hero);
}
return this.post(hero);
}
private handleError(error: any) {
console.error('An error occurred', error);
return Promise.reject(error.message || error);
}
}<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import ast
import os.path
import platform
import re
import sys
class Config(object):
'''A Config contains a dictionary that species a build configuration.'''
# Valid values for target_os:
OS_ANDROID = 'android'
OS_CHROMEOS = 'chromeos'
OS_LINUX = 'linux'
OS_MAC = 'mac'
OS_WINDOWS = 'windows'
# Valid values for target_cpu:
ARCH_X86 = 'x86'
ARCH_X64 = 'x64'
ARCH_ARM = 'arm'
def __init__(self, build_dir=None, target_os=None, target_cpu=None,
is_debug=None, is_verbose=None, apk_name='MojoRunner.apk'):
'''Function arguments take precedence over GN args and default values.'''
assert target_os in (None, Config.OS_ANDROID, Config.OS_CHROMEOS,
Config.OS_LINUX, Config.OS_MAC, Config.OS_WINDOWS)
assert target_cpu in (None, Config.ARCH_X86, Config.ARCH_X64,
Config.ARCH_ARM)
assert is_debug in (None, True, False)
assert is_verbose in (None, True, False)
self.values = {
'build_dir': build_dir,
'target_os': self.GetHostOS(),
'target_cpu': self.GetHostCPU(),
'is_debug': True,
'is_verbose': True,
'dcheck_always_on': False,
'is_asan': False,
'apk_name': apk_name,
}
self._ParseGNArgs()
if target_os is not None:
self.values['target_os'] = target_os
if target_cpu is not None:
self.values['target_cpu'] = target_cpu
if is_debug is not None:
self.values['is_debug'] = is_debug
if is_verbose is not None:
self.values['is_verbose'] = is_verbose
@staticmethod
def GetHostOS():
if sys.platform == 'linux2':
return Config.OS_LINUX
if sys.platform == 'darwin':
return Config.OS_MAC
if sys.platform == 'win32':
return Config.OS_WINDOWS
raise NotImplementedError('Unsupported host OS')
@staticmethod
def GetHostCPU():
# Derived from //native_client/pynacl/platform.py
machine = platform.machine()
if machine in ('x86', 'x86-32', 'x86_32', 'x8632', 'i386', 'i686', 'ia32',
'32'):
return Config.ARCH_X86
if machine in ('x86-64', 'amd64', 'AMD64', 'x86_64', 'x8664', '64'):
return Config.ARCH_X64
if machine.startswith('arm'):
return Config.ARCH_ARM
raise Exception('Cannot identify CPU arch: %s' % machine)
def _ParseGNArgs(self):
'''Parse the gn config file from the build directory, if it exists.'''
TRANSLATIONS = { 'true': 'True', 'false': 'False', }
if self.values['build_dir'] is None:
return
gn_file = os.path.join(self.values['build_dir'], 'args.gn')
if not os.path.isfile(gn_file):
return
with open(gn_file, 'r') as f:
for line in f:
line = re.sub('\s*#.*', '', line)
result = re.match('^\s*(\w+)\s*=\s*(.*)\s*$', line)
if result:
key = result.group(1)
value = result.group(2)
self.values[key] = ast.literal_eval(TRANSLATIONS.get(value, value))
# Getters for standard fields ------------------------------------------------
@property
def build_dir(self):
'''Build directory path.'''
return self.values['build_dir']
@property
def target_os(self):
'''OS of the build/test target.'''
return self.values['target_os']
@property<|fim▁hole|>
@property
def is_debug(self):
'''Is Debug build?'''
return self.values['is_debug']
@property
def is_verbose(self):
'''Should print additional logging information?'''
return self.values['is_verbose']
@property
def dcheck_always_on(self):
'''DCHECK is fatal even in release builds'''
return self.values['dcheck_always_on']
@property
def is_asan(self):
'''Is ASAN build?'''
return self.values['is_asan']
@property
def apk_name(self):
'''Name of the APK file to run'''
return self.values['apk_name']<|fim▁end|> | def target_cpu(self):
'''CPU arch of the build/test target.'''
return self.values['target_cpu'] |
<|file_name|>creatorpacket.cpp<|end_file_name|><|fim▁begin|>// This file is part of par2cmdline (a PAR 2.0 compatible file verification and
// repair tool). See http://parchive.sourceforge.net for details of PAR 2.0.
//
// Copyright (c) 2003 Peter Brian Clements
//
// par2cmdline is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// par2cmdline is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// 11/1/05 gmilow - Modified
#include "stdafx.h"
#include "par2cmdline.h"
#ifdef _MSC_VER
#ifdef _DEBUG
#undef THIS_FILE
static char THIS_FILE[]=__FILE__;
#define new DEBUG_NEW
#endif
#endif
// Construct the creator packet.
// The only external information required to complete construction is
// the set_id_hash (which is normally computed from information in the
// main packet).
bool CreatorPacket::Create(const MD5Hash &setid)
{
string creator = "Created by PACKAGE version VERSION .";
// Allocate a packet just large enough for creator name
CREATORPACKET *packet = (CREATORPACKET *)AllocatePacket(sizeof(*packet) + (~3 & (3+(u32)creator.size())));
// Fill in the details the we know
packet->header.magic = packet_magic;
packet->header.length = packetlength;
//packet->header.hash; // Compute shortly
packet->header.setid = setid;
packet->header.type = creatorpacket_type;
// Copy the creator description into the packet
memcpy(packet->client, creator.c_str(), creator.size());
// Compute the packet hash
MD5Context packetcontext;
packetcontext.Update(&packet->header.setid, packetlength - offsetof(PACKET_HEADER, setid));
packetcontext.Final(packet->header.hash);
<|fim▁hole|>
bool CreatorPacket::Load(DiskFile *diskfile, u64 offset, PACKET_HEADER &header)
{
// Is the packet long enough
if (header.length <= sizeof(CREATORPACKET))
{
return false;
}
// Is the packet too large (what is the longest reasonable creator description)
if (header.length - sizeof(CREATORPACKET) > 100000)
{
return false;
}
// Allocate the packet (with a little extra so we will have NULLs after the description)
CREATORPACKET *packet = (CREATORPACKET *)AllocatePacket((size_t)header.length, 4);
packet->header = header;
// Load the rest of the packet from disk
return diskfile->Read(offset + sizeof(PACKET_HEADER),
packet->client,
(size_t)packet->header.length - sizeof(PACKET_HEADER));
}<|fim▁end|> | return true;
}
// Load the packet from disk. |
<|file_name|>SmoozedComHook.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from module.plugins.internal.MultiHook import MultiHook
class SmoozedComHook(MultiHook):
__name__ = "SmoozedComHook"
__type__ = "hook"
__version__ = "0.04"
__status__ = "testing"
__config__ = [("pluginmode" , "all;listed;unlisted", "Use for plugins" , "all"),
("pluginlist" , "str" , "Plugin list (comma separated)", "" ),
("reload" , "bool" , "Reload plugin list" , True ),
("reloadinterval", "int" , "Reload interval in hours" , 12 )]<|fim▁hole|>
__description__ = """Smoozed.com hook plugin"""
__license__ = "GPLv3"
__authors__ = [("", "")]
def get_hosters(self):
user, info = self.account.select()
return self.account.get_data(user)['hosters']<|fim▁end|> | |
<|file_name|>lookup_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for lookup ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
import six
from tensorflow.python import tf2
from tensorflow.python.client import session
from tensorflow.python.data.experimental.ops import counter
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import readers as reader_ops
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variables
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow.python.saved_model import load as saved_model_load
from tensorflow.python.saved_model import save as saved_model_save
from tensorflow.python.training import saver
from tensorflow.python.training import server_lib
from tensorflow.python.training.tracking import graph_view
from tensorflow.python.training.tracking import tracking
from tensorflow.python.training.tracking import util as trackable
from tensorflow.python.util import compat
class BaseLookupTableTest(test.TestCase):
def getHashTable(self):
if tf2.enabled():
return lookup_ops.StaticHashTable
else:
return lookup_ops.StaticHashTableV1
def getVocabularyTable(self):
if tf2.enabled():
return lookup_ops.StaticVocabularyTable
else:
return lookup_ops.StaticVocabularyTableV1
def initialize_table(self, table):
if not tf2.enabled():
self.evaluate(table.initializer)
class StaticHashTableTest(BaseLookupTableTest):
def testStaticHashTable(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
exported_keys_tensor, exported_values_tensor = table.export()
self.assertItemsEqual([b"brain", b"salad", b"surgery"],
self.evaluate(exported_keys_tensor))
self.assertItemsEqual([0, 1, 2], self.evaluate(exported_values_tensor))
def testStaticHashTableFindHighRank(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [-1, -1]], result)
def testStaticHashTableInitWithPythonArrays(self):
default_val = -1
keys = ["brain", "salad", "surgery"]
values = [0, 1, 2]
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(
keys, values, value_dtype=dtypes.int64), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testStaticHashTableInitWithNumPyArrays(self):
default_val = -1
keys = np.array(["brain", "salad", "surgery"], dtype=np.str)
values = np.array([0, 1, 2], dtype=np.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testMultipleStaticHashTables(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table2 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
table3 = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table1)
self.initialize_table(table2)
self.initialize_table(table3)
self.assertAllEqual(3, self.evaluate(table1.size()))
self.assertAllEqual(3, self.evaluate(table2.size()))
self.assertAllEqual(3, self.evaluate(table3.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testStaticHashTableWithTensorDefault(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testStaticHashTableWithSparseTensorInput(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
sp_indices = [[0, 0], [0, 1], [1, 0]]
sp_shape = [2, 2]
input_tensor = sparse_tensor.SparseTensor(
constant_op.constant(sp_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "tank"]),
constant_op.constant(sp_shape, dtypes.int64))
output = table.lookup(input_tensor)
out_indices, out_values, out_shape = self.evaluate(output)
self.assertAllEqual([0, 1, -1], out_values)
self.assertAllEqual(sp_indices, out_indices)
self.assertAllEqual(sp_shape, out_shape)
def testStaticHashTableWithRaggedTensorInput(self):
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
row_splits = [0, 2, 3]
input_tensor = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant(["brain", "salad", "tank"]),
constant_op.constant(row_splits, dtypes.int64))
output = table.lookup(input_tensor)
out = self.evaluate(output)
self.assertAllEqual([0, 1, -1], out.values)
self.assertAllEqual(row_splits, out.row_splits)
def testSignatureMismatch(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
# Ref types do not produce a lookup signature mismatch.
input_string_ref = variables.Variable("brain")
self.evaluate(input_string_ref.initializer)
self.assertEqual(0, self.evaluate(table.lookup(input_string_ref)))
input_string = constant_op.constant([1, 2, 3], dtypes.int64)
with self.assertRaises(TypeError):
table.lookup(input_string)
with self.assertRaises(TypeError):
self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), "UNK")
def testDTypes(self):
default_val = -1
with self.assertRaises(TypeError):
self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(["a"], [1], [dtypes.string],
dtypes.int64), default_val)
@test_util.run_v1_only("(Cached) Sessions not available in TF2.0")
def testNotInitialized(self):
with self.cached_session():
default_val = -1
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(["a"], [1],
value_dtype=dtypes.int64),
default_val)
input_string = constant_op.constant(["brain", "salad", "surgery"])
output = table.lookup(input_string)
with self.assertRaisesOpError("Table not initialized"):
self.evaluate(output)
@test_util.run_v1_only("(Cached) Sessions not available in TF2.0")
def testInitializeTwice(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
# Make sure that initializing twice doesn't throw any errors.
self.initialize_table(table)
def testInitializationWithInvalidDimensions(self):
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2, 3, 4], dtypes.int64)
raised_error = ValueError
if context.executing_eagerly():
raised_error = errors_impl.InvalidArgumentError
with self.assertRaises(raised_error):
self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
@test_util.run_v1_only("Sessions not available in TF2.0")
def testMultipleSessions(self):
# Start a server
server = server_lib.Server({"local0": ["localhost:0"]},
protocol="grpc",
start=True)
# Create two sessions sharing the same state
session1 = session.Session(server.target)
session2 = session.Session(server.target)
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values),
default_val,
name="t1")
# Init the table in the first session.
with session1:
self.initialize_table(table)
self.assertAllEqual(3, self.evaluate(table.size()))
# Init the table in the second session and verify that we do not get a
# "Table already initialized" error.
with session2:
self.evaluate(table.initializer)
self.assertAllEqual(3, self.evaluate(table.size()))
@test_util.run_v2_only
def testImportedHashTable(self):
g = ops.Graph()
with g.as_default():
t = lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(["a"], [1]),
2)
init_op = t._init_op
op = t.lookup(ops.convert_to_tensor(["a"]))
meta_graph = saver.export_meta_graph()
def f():
saver.import_meta_graph(meta_graph)
return ops.get_default_graph().get_tensor_by_name(op.name)
wrapped = wrap_function.wrap_function(f, [])
pruned_init_fn = wrapped.prune(
(), [wrapped.graph.get_operation_by_name(init_op.name)])
self.evaluate(pruned_init_fn())
self.assertAllEqual([1], wrapped())
def testStaticHashTableInt32String(self):
default_val = "n/a"
keys = constant_op.constant([0, 1, 2], dtypes.int32)
values = constant_op.constant(["brain", "salad", "surgery"])
table = self.getHashTable()(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
self.initialize_table(table)
input_tensor = constant_op.constant([0, 1, -1])
output = table.lookup(input_tensor)
result = self.evaluate(output)
self.assertAllEqual([b"brain", b"salad", b"n/a"], result)
def testTableUseInFunction(self):
if not context.executing_eagerly():
self.skipTest("Only Eager mode test.")
keys = constant_op.constant([0, 1, 2], dtypes.int32)
values = constant_op.constant(["brain", "salad", "surgery"])
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), "n/a")
@function.defun()
def lookup_table_func(k):
return table.lookup(k)
result = lookup_table_func(constant_op.constant([0, 1, -1]))
self.assertAllEqual([b"brain", b"salad", b"n/a"], result)
result = lookup_table_func(constant_op.constant([2, -1, 1]))
self.assertAllEqual([b"surgery", b"n/a", b"salad"], result)
def testTableCreatedInFunction(self):
if not context.executing_eagerly():
self.skipTest("Only Eager mode test.")
keys = constant_op.constant([0, 1, 2], dtypes.int32)
values = constant_op.constant(["brain", "salad", "surgery"])
@function.defun()
def lookup_table_func(k):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), "n/a")
return table.lookup(k)
result = lookup_table_func(constant_op.constant([0, 1, -1]))
self.assertAllEqual([b"brain", b"salad", b"n/a"], result)
result = lookup_table_func(constant_op.constant([2, -1, 1]))
self.assertAllEqual([b"surgery", b"n/a", b"salad"], result)
def testTwoTablesInControlFlow(self):
keys = constant_op.constant([1, 2, 3], dtypes.int32)
values = constant_op.constant([5, 10, 15], dtypes.int32)
def table_func1(x):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), -1)
return table.lookup(x)
elems = np.array([2, 4, 1], dtype=np.int32)
result1 = map_fn.map_fn(table_func1, elems, dtype=dtypes.int32)
def table_func2(x):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
keys, values), -1)
return table.lookup(x)
elems = np.array([2, 4, 1], dtype=np.int32)
result2 = map_fn.map_fn(table_func2, elems, dtype=dtypes.int32)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual([10, -1, 5], self.evaluate(result1))
self.assertAllEqual([10, -1, 5], self.evaluate(result2))
@test_util.enable_control_flow_v2
def testLookupTableInWhileV2(self):
lookup = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
constant_op.constant([2, 5], dtype=dtypes.int64),
constant_op.constant([-10.0, 1], dtype=dtypes.float32)), -1)
beta = variables.Variable(1.0, trainable=True)
@def_function.function
def get_loss(unused_beta):
return map_fn.map_fn(
lookup.lookup,
constant_op.constant([2, 3], dtype=dtypes.int64),
dtype=dtypes.float32)
with backprop.GradientTape() as tape:
loss = get_loss(beta)
self.assertIsNone(tape.gradient(loss, beta))
@test_util.enable_control_flow_v2
def testLookupTableInCondV2(self):
lookup = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
constant_op.constant([2, 5], dtype=dtypes.int64),
constant_op.constant([-10.0, 1], dtype=dtypes.float32)), -1)
beta = variables.Variable(1.0, trainable=True)
@def_function.function
def get_loss(beta):
def true_fn():
return lookup.lookup(constant_op.constant(2, dtype=dtypes.int64))
def false_fn():
return constant_op.constant(0, dtype=dtypes.float32)
return beta * control_flow_ops.cond(
constant_op.constant(True), true_fn=true_fn, false_fn=false_fn)
with backprop.GradientTape() as tape:
loss = get_loss(beta)
grad = tape.gradient(loss, beta)
self.evaluate(variables.global_variables_initializer())
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual(grad, -10.)
def testExportShapeInference(self):
table = self.getHashTable()(lookup_ops.KeyValueTensorInitializer(
constant_op.constant([2, 5], dtype=dtypes.int64),
constant_op.constant([-10.0, 1], dtype=dtypes.float32)), -1)
actual_shapes = [t.shape for t in table.export()]
inferred_shapes = []
@def_function.function
def f():
for t in table.export():
inferred_shapes.append(t.shape)
f()
self.assertLen(actual_shapes, 2)
self.assertLen(inferred_shapes, 2)
self.assertTrue(inferred_shapes[0].is_compatible_with(actual_shapes[0]))
self.assertTrue(inferred_shapes[1].is_compatible_with(actual_shapes[1]))
class KeyValueTensorInitializerTest(BaseLookupTableTest):
def test_string(self):
init = lookup_ops.KeyValueTensorInitializer(
("brain", "salad", "surgery"), (0, 1, 2), dtypes.string, dtypes.int64)
table = self.getHashTable()(init, default_value=-1)
self.initialize_table(table)
def test_multiple_tables(self):
with ops.name_scope("table_scope"):
init1 = lookup_ops.KeyValueTensorInitializer(
("brain", "salad", "surgery"), (0, 1, 2), dtypes.string, dtypes.int64)
table1 = self.getHashTable()(init1, default_value=-1)
if not context.executing_eagerly():
self.assertEqual("hash_table", table1.name)
self.assertEqual("table_scope/hash_table",
table1.resource_handle.op.name)
init2 = lookup_ops.KeyValueTensorInitializer(
("brain", "salad", "surgery"), (0, 1, 2), dtypes.string, dtypes.int64)
table2 = self.getHashTable()(init2, default_value=-1)
if not context.executing_eagerly():
self.assertEqual("hash_table_1", table2.name)
self.assertEqual("table_scope/hash_table_1",
table2.resource_handle.op.name)
def test_int64(self):
init = lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int64, dtypes.int64)
table = self.getHashTable()(init, default_value=-1)
self.initialize_table(table)
def test_int32(self):
init = lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int32, dtypes.int64)
with self.assertRaises(errors_impl.OpError):
table = self.getHashTable()(init, default_value=-1)
self.initialize_table(table)
class DatasetInitializerTest(BaseLookupTableTest):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def test_basic(self):
keys = dataset_ops.Dataset.range(100)
values = dataset_ops.Dataset.range(100).map(
lambda x: string_ops.as_string(x * 2))
ds = dataset_ops.Dataset.zip((keys, values))
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
self.initialize_table(table)
output = table.lookup(constant_op.constant([0, 2, 5], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual(["0", "4", "10"], result)
def test_basic_bad_shape(self):
keys = dataset_ops.Dataset.range(100)
values = dataset_ops.Dataset.range(100).map(
lambda x: string_ops.as_string(x * 2))
values = values.batch(4)
ds = dataset_ops.Dataset.zip((keys, values))
with self.assertRaises(ValueError):
lookup_ops.DatasetInitializer(ds)
def test_from_file(self):
vocabulary_file = self._createVocabFile("test.txt", ("one", "two", "three"))
ds = reader_ops.TextLineDataset(vocabulary_file)
ds = ds.enumerate(start=1)
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
self.initialize_table(table)
output = table.lookup(constant_op.constant([2, 3, 4], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual(["two", "three", ""], result)
def test_from_multiple_files(self):
vocabulary_file1 = self._createVocabFile("test1.txt",
("one", "two", "three"))
vocabulary_file2 = self._createVocabFile("test2.txt",
("four", "five", "six"))
ds = reader_ops.TextLineDataset([vocabulary_file1, vocabulary_file2])
ds = ds.enumerate(start=1)
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
self.initialize_table(table)
output = table.lookup(constant_op.constant([2, 3, 4], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual(["two", "three", "four"], result)
def test_map_variable(self):
ds = dataset_ops.Dataset.range(100)
captured_var = variables.Variable(0)
def func(_):
return captured_var.assign_add(1)
ds = ds.map(func)
ds = ds.enumerate(start=1)
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value=-1)
self.evaluate(captured_var.initializer)
self.initialize_table(table)
output = table.lookup(constant_op.constant([1, 2, 101], dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual([1, 2, -1], result)
def test_compatibility(self):
with ops.Graph().as_default():
keys = dataset_ops.Dataset.range(100)
values = dataset_ops.Dataset.range(100).map(string_ops.as_string)
ds = dataset_ops.Dataset.zip((keys, values))
init = lookup_ops.DatasetInitializer(ds)
table = self.getHashTable()(init, default_value="")
output = table.lookup(constant_op.constant([0, 2, 5], dtypes.int64))
self.evaluate(lookup_ops.tables_initializer())
result = self.evaluate(output)
self.assertAllEqual(["0", "2", "5"], result)
class InitializeTableFromFileOpTest(BaseLookupTableTest):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def testInitializeStringTable(self):
vocabulary_file = self._createVocabFile("one_column_1.txt")
default_value = -1
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_1.txt_-2_-1", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
output = table.lookup(constant_op.constant(["brain", "salad", "tank"]))
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testInitializeInt64Table(self):
vocabulary_file = self._createVocabFile(
"one_column_int64.txt", values=("42", "1", "-1000"))
<|fim▁hole|> with self.cached_session():
default_value = -1
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.int64, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_int64.txt_-2_-1", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
output = table.lookup(
constant_op.constant((42, 1, 11), dtype=dtypes.int64))
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testInitializeIndexTable(self):
vocabulary_file = self._createVocabFile("one_column_2.txt")
with self.cached_session():
default_value = "UNK"
key_index = lookup_ops.TextFileIndex.LINE_NUMBER
value_index = lookup_ops.TextFileIndex.WHOLE_LINE
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.int64, key_index, dtypes.string, value_index)
self.assertIn("one_column_2.txt_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
output = table.lookup(input_values)
result = self.evaluate(output)
self.assertAllEqual([b"brain", b"salad", b"surgery", b"UNK"], result)
def testMultiColumn(self):
vocabulary_file = os.path.join(self.get_temp_dir(), "three_columns.txt")
with open(vocabulary_file, "w") as f:
f.write("\n".join(["0\tbrain\t1", "1\tsalad\t5", "2\tsurgery\t6"]) + "\n")
with self.cached_session():
default_value = -1
key_index = 1
value_index = 2
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, key_index, dtypes.int64, value_index)
self.assertIn("three_columns.txt_1_2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "surgery"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([1, 5, 6], result)
def testInvalidDataTypeInMultiColumn(self):
vocabulary_file = os.path.join(self.get_temp_dir(), "three_columns.txt")
with open(vocabulary_file, "w") as f:
f.write("\n".join(["0\tbrain\t1", "1\tsalad\t5", "2\tsurgery\t6"]) + "\n")
with self.cached_session():
default_value = -1
key_index = 2
value_index = 1
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, key_index, dtypes.int64, value_index)
self.assertIn("three_columns.txt_2_1", init._shared_name)
with self.assertRaisesOpError("is not a valid"):
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
def testInvalidDataType(self):
vocabulary_file = self._createVocabFile("one_column_3.txt")
with self.cached_session():
default_value = "UNK"
key_index = lookup_ops.TextFileIndex.WHOLE_LINE
value_index = lookup_ops.TextFileIndex.LINE_NUMBER
with self.assertRaises(ValueError):
init = lookup_ops.TextFileInitializer(vocabulary_file, dtypes.int64,
key_index, dtypes.string,
value_index)
self.assertIn("one_column_3.txt_-2_-1", init._shared_name)
self.getHashTable()(init, default_value)
def testInvalidIndex(self):
vocabulary_file = self._createVocabFile("one_column_4.txt")
with self.cached_session():
default_value = -1
key_index = 1 # second column of the line
value_index = lookup_ops.TextFileIndex.LINE_NUMBER
init = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, key_index, dtypes.int64, value_index)
self.assertIn("one_column_4.txt_1_-1", init._shared_name)
with self.assertRaisesOpError("Invalid number of columns"):
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
def testInitializeSameTableWithMultipleNodes(self):
vocabulary_file = self._createVocabFile("one_column_5.txt")
with self.cached_session():
default_value = -1
init1 = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_5.txt_-2_-1", init1._shared_name)
table1 = self.getHashTable()(init1, default_value)
init2 = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_5.txt_-2_-1", init2._shared_name)
table2 = self.getHashTable()(init2, default_value)
init3 = lookup_ops.TextFileInitializer(
vocabulary_file, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("one_column_5.txt_-2_-1", init3._shared_name)
table3 = self.getHashTable()(init3, default_value)
self.evaluate(lookup_ops.tables_initializer())
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testInitializeTableWithNoFilename(self):
with self.cached_session():
default_value = -1
with self.assertRaises(ValueError):
self.getHashTable()(lookup_ops.TextFileInitializer(
"", dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER), default_value)
def testInitializeWithVocabSize(self):
with self.cached_session():
default_value = -1
vocab_size = 3
vocabulary_file1 = self._createVocabFile("one_column6.txt")
init1 = lookup_ops.TextFileInitializer(
vocabulary_file1,
dtypes.string,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
vocab_size=vocab_size)
self.assertIn("one_column6.txt_3_-2_-1", init1._shared_name)
table1 = self.getHashTable()(init1, default_value)
# Initialize from file.
self.initialize_table(table1)
self.assertEqual(vocab_size, self.evaluate(table1.size()))
vocabulary_file2 = self._createVocabFile("one_column7.txt")
vocab_size = 5
init2 = lookup_ops.TextFileInitializer(
vocabulary_file2,
dtypes.string,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
vocab_size=vocab_size)
self.assertIn("one_column7.txt_5_-2_-1", init2._shared_name)
with self.assertRaisesOpError("Invalid vocab_size"):
table2 = self.getHashTable()(init2, default_value)
self.initialize_table(table2)
vocab_size = 1
vocabulary_file3 = self._createVocabFile("one_column3.txt")
init3 = lookup_ops.TextFileInitializer(
vocabulary_file3,
dtypes.string,
lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64,
lookup_ops.TextFileIndex.LINE_NUMBER,
vocab_size=vocab_size)
self.assertIn("one_column3.txt_1_-2_-1", init3._shared_name)
table3 = self.getHashTable()(init3, default_value)
# Smaller vocab size reads only vocab_size records.
self.initialize_table(table3)
self.assertEqual(vocab_size, self.evaluate(table3.size()))
@test_util.run_v1_only("placeholder usage")
def testFeedVocabularyName(self):
vocabulary_file = self._createVocabFile("feed_vocabulary.txt")
with self.cached_session():
default_value = -1
init = lookup_ops.TextFileInitializer(
"old_file.txt", dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER)
self.assertIn("old_file.txt_-2_-1", init._shared_name)
table = self.getHashTable()(init, default_value)
# Initialize with non existing file (old_file.txt) should fail.
# TODO(yleon): Update message, which might change per FileSystem.
with self.assertRaisesOpError("old_file.txt"):
self.evaluate(table.initializer)
# Initialize the model feeding the vocabulary file.
filenames = ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)
table.initializer.run(feed_dict={filenames[0]: vocabulary_file})
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testInvalidFilenames(self):
vocabulary_file = self._createVocabFile("filename_shape.txt")
with self.cached_session():
default_value = -1
# Invalid data type
other_type = constant_op.constant(1)
with self.assertRaises(Exception) as cm:
self.getHashTable()(lookup_ops.TextFileInitializer(
other_type, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER), default_value)
self.assertIsInstance(cm.exception, (ValueError, TypeError))
# Non-scalar filename
filenames = constant_op.constant([vocabulary_file, vocabulary_file])
if not context.executing_eagerly():
with self.assertRaises(Exception) as cm:
self.getHashTable()(lookup_ops.TextFileInitializer(
filenames, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER),
default_value)
self.assertIsInstance(cm.exception, (ValueError, TypeError))
else:
with self.assertRaises(errors_impl.InvalidArgumentError):
self.getHashTable()(lookup_ops.TextFileInitializer(
filenames, dtypes.string, lookup_ops.TextFileIndex.WHOLE_LINE,
dtypes.int64, lookup_ops.TextFileIndex.LINE_NUMBER),
default_value)
def testIdToStringTable(self):
vocab_file = self._createVocabFile("feat_to_id_1.txt")
with self.cached_session():
default_value = "UNK"
vocab_size = 3
init = lookup_ops.TextFileStringTableInitializer(
vocab_file, vocab_size=vocab_size)
self.assertTrue("feat_to_id_1.txt_3_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
out = table.lookup(input_values)
self.assertAllEqual([b"brain", b"salad", b"surgery", b"UNK"],
self.evaluate(out))
self.assertEqual(vocab_size, self.evaluate(table.size()))
def testStringToIdTable(self):
vocab_file = self._createVocabFile("feat_to_id_2.txt")
with self.cached_session():
default_value = -1
vocab_size = 3
init = lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size)
self.assertTrue("feat_to_id_2.txt_3_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
out = table.lookup(input_string)
self.assertAllEqual([0, 1, 2, -1], self.evaluate(out))
self.assertEqual(vocab_size, self.evaluate(table.size()))
def testInt64ToIdTable(self):
vocab_file = self._createVocabFile(
"feat_to_id_3.txt", values=("42", "1", "-1000"))
with self.cached_session():
default_value = -1
vocab_size = 3
init = lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64)
self.assertTrue("feat_to_id_3.txt_3_-1_-2", init._shared_name)
table = self.getHashTable()(init, default_value)
self.initialize_table(table)
out = table.lookup(
constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int64))
self.assertAllEqual((0, 1, 2, -1), self.evaluate(out))
self.assertEqual(vocab_size, self.evaluate(table.size()))
class StaticVocabularyTableTest(BaseLookupTableTest):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def testStringStaticVocabularyTable(self):
vocab_file = self._createVocabFile("feat_to_id_1.txt")
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
self.initialize_table(table)
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
out = table.lookup(input_string)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt32StaticVocabularyTable(self):
vocab_file = self._createVocabFile("feat_to_id_2.txt", ("42", "1", "-1000"))
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64),
oov_buckets,
lookup_key_dtype=dtypes.int32)
self.initialize_table(table)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int32)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt64StaticVocabularyTable(self):
vocab_file = self._createVocabFile("feat_to_id_3.txt", ("42", "1", "-1000"))
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64), oov_buckets)
self.initialize_table(table)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int64)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testStringStaticVocabularyTableNoInitializer(self):
oov_buckets = 5
# Set a table that only uses hash buckets, for each input value returns
# an id calculated by fingerprint("input") mod oov_buckets.
table = self.getVocabularyTable()(None, oov_buckets)
self.initialize_table(table)
values = constant_op.constant(("brain", "salad", "surgery"))
out = table.lookup(values)
self.assertAllEqual(
[
3, # fingerprint("brain") mod 5.
1, # fingerprint("salad") mod 5.
4 # fingerprint("surgery") mod 5
],
self.evaluate(out))
self.assertEqual(oov_buckets, self.evaluate(table.size()))
def testStaticVocabularyTableWithMultipleInitializers(self):
vocab_file = self._createVocabFile("feat_to_id_4.txt")
vocab_size = 3
oov_buckets = 3
init = lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size)
table1 = self.getVocabularyTable()(init, oov_buckets, name="table1")
table2 = self.getVocabularyTable()(init, oov_buckets, name="table2")
self.evaluate(lookup_ops.tables_initializer())
input_string = constant_op.constant(
["fruit", "brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string)
out2 = table2.lookup(input_string)
out1, out2 = self.evaluate([out1, out2])
self.assertAllEqual([5, 0, 1, 2, 5], out1)
self.assertAllEqual([5, 0, 1, 2, 5], out2)
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testStaticVocabularyTableInitializationAcrossSessions(self):
vocab_file = self._createVocabFile("feat_to_id_5.txt")
with self.cached_session():
vocab_size = 3
oov_buckets = 1
table1 = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
self.initialize_table(table1)
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string_1)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out1))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
with self.cached_session():
vocab_size = 3
oov_buckets = 1
# Underlying lookup table already initialized in previous session.
# No need to initialize table2
table2 = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out2 = table2.lookup(input_string_2)
self.assertAllEqual([3, 1, 3], self.evaluate(out2))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testStaticVocabularyTableAssetTracking(self):
vocab_file = self._createVocabFile("vocab.txt")
vocab_size = 3
oov_buckets = 1
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), oov_buckets)
object_graph_view = graph_view.ObjectGraphView(table)
objects = object_graph_view.list_objects()
assets = list(filter(lambda obj: isinstance(obj, tracking.Asset), objects))
self.assertLen(assets, 1)
self.assertEqual(
self.evaluate(assets[0].asset_path), compat.as_bytes(vocab_file))
def testSparseTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_shape, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=3), 1)
self.initialize_table(table)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testRaggedTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_row_splits, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=3), 1)
self.initialize_table(table)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt32SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_shape, dtypes.int64))
table = self.getVocabularyTable()(
lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int64, dtypes.int64),
1,
lookup_key_dtype=dtypes.int32)
self.initialize_table(table)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt32RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_row_splits, dtypes.int64))
table = self.getVocabularyTable()(
lookup_ops.KeyValueTensorInitializer((42, 1, -1000), (0, 1, 2),
dtypes.int64, dtypes.int64),
1,
lookup_key_dtype=dtypes.int32)
self.initialize_table(table)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt64SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_shape, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), 1)
self.initialize_table(table)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt64RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_row_splits, dtypes.int64))
table = self.getVocabularyTable()(lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), 1)
self.initialize_table(table)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testStaticVocabularyTableNoInnerTable(self):
table = self.getVocabularyTable()(None, num_oov_buckets=1)
self.assertIsNone(table.resource_handle)
class DenseHashTableOpTest(test.TestCase):
def testBasic(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant([12, 15], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 12, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, -1, -1], result)
def testBasicBool(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([True, True, True, True], dtypes.bool)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.bool,
default_value=False,
empty_key=0,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant([11, 15], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 12, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([False, True, False], result)
def testSameEmptyAndDeletedKey(self):
with self.cached_session():
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Empty and deleted keys"):
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=42,
deleted_key=42)
self.assertAllEqual(0, self.evaluate(table.size()))
@test_util.run_v1_only("uses placeholders")
def testLookupUnknownShape(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
placeholder_keys = array_ops.placeholder(dtypes.int64)
output = table.lookup(placeholder_keys)
self.assertAllEqual(None, output.get_shape())
result = output.eval({placeholder_keys: [11, 12, 15]})
self.assertAllEqual([0, 1, -1], result)
def testMapStringToFloat(self):
with self.cached_session():
keys = constant_op.constant(["a", "b", "c", "d"], dtypes.string)
values = constant_op.constant([0.0, 1.1, 2.2, 3.3], dtypes.float32)
default_value = constant_op.constant(-1.5, dtypes.float32)
table = lookup_ops.DenseHashTable(
dtypes.string,
dtypes.float32,
default_value=default_value,
empty_key="",
deleted_key="$")
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["b", "e"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "d", "e"], dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([4], output.get_shape())
result = self.evaluate(output)
self.assertAllClose([0, -1.5, 3.3, -1.5], result)
def testMapInt64ToFloat(self):
for float_dtype in [dtypes.float32, dtypes.float64]:
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0.0, 1.1, 2.2, 3.3], float_dtype)
default_value = constant_op.constant(-1.5, float_dtype)
table = lookup_ops.DenseHashTable(
dtypes.int64,
float_dtype,
default_value=default_value,
empty_key=0,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant([12, 15], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 12, 14, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([4], output.get_shape())
result = self.evaluate(output)
self.assertAllClose([0, -1.5, 3.3, -1.5], result)
def testVectorValues(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([[0, 1, 2, 3], [3, 4, 5, 6], [6, 7, 8, 9]],
dtypes.int64)
default_value = constant_op.constant([-1, -2, -3, -4], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=0,
deleted_key=-1,
initial_num_buckets=4)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(4, len(self.evaluate(table.export()[0])))
self.evaluate(
table.insert(
constant_op.constant([14], dtypes.int64),
constant_op.constant([[2, 3, 4, 5]], dtypes.int64)))
self.assertAllEqual(4, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
remove_string = constant_op.constant([12, 16], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
input_string = constant_op.constant([11, 12, 14, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([4, 4],
output.shape,
msg="Saw shape: %s" % output.shape)
result = self.evaluate(output)
self.assertAllEqual(
[[0, 1, 2, 3], [-1, -2, -3, -4], [2, 3, 4, 5], [-1, -2, -3, -4]],
result)
def testVectorKeys(self):
with self.cached_session():
keys = constant_op.constant([[0, 1], [1, 2], [1, 3]], dtypes.int64)
values = constant_op.constant([10, 11, 12], dtypes.int64)
empty_key = constant_op.constant([0, 3], dtypes.int64)
deleted_key = constant_op.constant([-1, -1], dtypes.int64)
default_value = constant_op.constant(-1, dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
initial_num_buckets=8)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
self.evaluate(
table.insert(
constant_op.constant([[0, 0]], dtypes.int64),
constant_op.constant([13], dtypes.int64)))
self.assertAllEqual(4, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
remove_string = constant_op.constant([[1, 2], [7, 8]], dtypes.int64)
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(8, len(self.evaluate(table.export()[0])))
input_string = constant_op.constant([[0, 1], [1, 2], [1, 3], [0, 2]],
dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([4], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([10, -1, 12, -1], result)
def testResize(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1,
initial_num_buckets=4)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
self.assertAllEqual(4, len(self.evaluate(table.export()[0])))
keys2 = constant_op.constant([12, 99], dtypes.int64)
self.evaluate(table.remove(keys2))
self.assertAllEqual(2, self.evaluate(table.size()))
self.assertAllEqual(4, len(self.evaluate(table.export()[0])))
keys3 = constant_op.constant([13, 14, 15, 16, 17], dtypes.int64)
values3 = constant_op.constant([3, 4, 5, 6, 7], dtypes.int64)
self.evaluate(table.insert(keys3, values3))
self.assertAllEqual(6, self.evaluate(table.size()))
self.assertAllEqual(16, len(self.evaluate(table.export()[0])))
keys4 = constant_op.constant([10, 11, 12, 13, 14, 15, 16, 17, 18],
dtypes.int64)
output = table.lookup(keys4)
self.assertAllEqual([-1, 0, -1, 3, 4, 5, 6, 7, -1], self.evaluate(output))
def testExport(self):
with self.cached_session():
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([1, 2, 3, 4], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=100,
deleted_key=200,
initial_num_buckets=8)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
keys2 = constant_op.constant([12, 15], dtypes.int64)
self.evaluate(table.remove(keys2))
self.assertAllEqual(3, self.evaluate(table.size()))
exported_keys, exported_values = table.export()
np_keys = self.evaluate(exported_keys)
np_values = self.evaluate(exported_values)
self.assertAllEqual(8, len(np_keys))
self.assertAllEqual(8, len(np_values))
# pair up keys and values, drop extra added dimension
pairs = np.dstack((np_keys.flatten(), np_values.flatten()))[0]
# sort by key
pairs = pairs[pairs[:, 0].argsort()]
self.assertAllEqual([[11, 1], [13, 3], [14, 4], [100, 0], [100, 0],
[100, 0], [100, 0], [200, 2]], pairs)
@test_util.run_v1_only("Saver V1 only")
def testSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver()
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([12, 15], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([11, 14], dtypes.int64),
constant_op.constant([12, 24], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant([10, 11, 12, 13, 14], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, -1, 2, 3], output)
@test_util.run_v1_only("Saver V1 only")
def testSaveRestoreOnlyTable(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13, 14], dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver([table])
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([12, 15], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([11, 14], dtypes.int64),
constant_op.constant([12, 24], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver([table])
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant([10, 11, 12, 13, 14], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, -1, 2, 3], output)
@test_util.run_in_graph_and_eager_modes
def testObjectSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_prefix = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
save_table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save_checkpoint = trackable.Checkpoint(table=save_table)
self.assertAllEqual(0, self.evaluate(save_table.size()))
self.evaluate(save_table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(save_table.size()))
self.assertAllEqual(32, len(self.evaluate(save_table.export()[0])))
save_path = save_checkpoint.save(save_prefix)
del save_table, save_checkpoint
load_table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
self.evaluate(
load_table.insert(
constant_op.constant([11, 14], dtypes.int64),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(load_table.size()))
self.assertAllEqual(64, len(self.evaluate(load_table.export()[0])))
restore_checkpoint = trackable.Checkpoint(table=load_table)
# Restore the saved values in the parameter nodes.
restore_checkpoint.restore(save_path).run_restore_ops()
self.assertAllEqual(3, self.evaluate(load_table.size()))
self.assertAllEqual(32, len(self.evaluate(load_table.export()[0])))
input_string = constant_op.constant([10, 11, 12, 13, 14], dtypes.int64)
output = load_table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_v2_only
def testSavedModelSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
root = tracking.AutoTrackable()
default_value = -1
empty_key = 0
deleted_key = -1
keys = constant_op.constant([11, 12, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
root.table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
@def_function.function(
input_signature=[tensor_spec.TensorSpec((), dtypes.int64)])
def lookup(key):
return root.table.lookup(key)
root.lookup = lookup
self.assertAllEqual(0, root.table.size())
root.table.insert(keys, values)
self.assertAllEqual(3, self.evaluate(root.table.size()))
self.assertAllEqual(32, len(self.evaluate(root.table.export()[0])))
saved_model_save.save(root, save_path)
del root
loaded = saved_model_load.load(save_path)
self.assertEqual(loaded.lookup(12), 1)
self.assertEqual(loaded.lookup(10), -1)
@test_util.run_v1_only("Saver V1 only")
def testVectorSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "vector_save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-2, -3], dtypes.int64)
default_value = constant_op.constant([-1, -2], dtypes.int64)
keys = constant_op.constant([[11, 12], [11, 14], [12, 13], [13, 14]],
dtypes.int64)
values = constant_op.constant([[0, 1], [2, 3], [2, 4], [4, 5]],
dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver()
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([[12, 13], [16, 17]], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-2, -3], dtypes.int64)
default_value = constant_op.constant([-1, -2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t1",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([[11, 12], [13, 15]], dtypes.int64),
constant_op.constant([[21, 22], [23, 24]], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant(
[[11, 12], [11, 14], [11, 15], [13, 14], [13, 15]], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([[0, 1], [2, 3], [-1, -2], [4, 5], [-1, -2]],
self.evaluate(output))
@test_util.run_v1_only("Saver V1 only")
def testVectorScalarSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "vector_scalar_save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-1, -1], dtypes.int64)
default_value = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant([[11, 12], [11, 14], [12, 13], [13, 14]],
dtypes.int64)
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t2",
checkpoint=True,
initial_num_buckets=32)
save = saver.Saver()
self.assertAllEqual(0, table.size())
table.insert(keys, values).run()
self.assertAllEqual(4, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
keys2 = constant_op.constant([[12, 13], [15, 16]], dtypes.int64)
table.remove(keys2).run()
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
empty_key = constant_op.constant([11, 13], dtypes.int64)
deleted_key = constant_op.constant([-1, -1], dtypes.int64)
default_value = constant_op.constant(-1, dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key,
name="t2",
checkpoint=True,
initial_num_buckets=64)
table.insert(
constant_op.constant([[11, 12], [13, 15]], dtypes.int64),
constant_op.constant([3, 4], dtypes.int64)).run()
self.assertAllEqual(2, table.size())
self.assertAllEqual(64, len(table.export()[0].eval()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
self.assertAllEqual(3, table.size())
self.assertAllEqual(32, len(table.export()[0].eval()))
input_string = constant_op.constant(
[[11, 12], [11, 14], [11, 15], [13, 14], [13, 15]], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([0, 1, -1, 3, -1], output)
def testReprobe(self):
with self.cached_session():
# Insert 6 keys into a table with 8 buckets.
# The values are chosen to make sure collisions occur when using GCC STL
keys = constant_op.constant([11, 12, 13, 19, 20, 21], dtypes.int64)
values = constant_op.constant([51, 52, 53, 54, 55, 56], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1,
initial_num_buckets=8)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(6, self.evaluate(table.size()))
input_string = constant_op.constant([10, 11, 12, 13, 14, 19, 20, 21, 22],
dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([9], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([-1, 51, 52, 53, -1, 54, 55, 56, -1], result)
def testCustomEmptyKey(self):
with self.cached_session():
keys = constant_op.constant([11, 0, 13], dtypes.int64)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=12,
deleted_key=-1)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([11, 0, 15], dtypes.int64)
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testErrors(self):
with self.cached_session():
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=0,
deleted_key=-1)
# Inserting the empty key returns an error
keys1 = constant_op.constant([11, 0], dtypes.int64)
values1 = constant_op.constant([0, 1], dtypes.int64)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"empty_key"):
self.evaluate(table.insert(keys1, values1))
# Looking up the empty key returns an error
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"empty_key"):
self.evaluate(table.lookup(keys1))
# Inserting the deleted key returns an error
keys2 = constant_op.constant([11, -1], dtypes.int64)
values2 = constant_op.constant([0, 1], dtypes.int64)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"deleted_key"):
self.evaluate(table.insert(keys2, values2))
# Looking up the empty key returns an error
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"deleted_key"):
self.evaluate(table.lookup(keys2))
# Arbitrary tensors of keys are not supported
keys = constant_op.constant([[11, 0], [12, 1]], dtypes.int64)
values = constant_op.constant([[11, 0], [12, 1]], dtypes.int64)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Expected key shape"):
self.evaluate(table.lookup(keys))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Expected key shape"):
self.evaluate(table.insert(keys, values))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Number of buckets must be"):
table2 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=17,
deleted_key=-1,
initial_num_buckets=12)
self.assertAllEqual(0, self.evaluate(table2.size()))
with self.assertRaisesRegex(
errors_impl.InvalidArgumentError,
"Empty and deleted keys must have same shape"):
table3 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=42,
deleted_key=[1, 2])
self.assertAllEqual(0, self.evaluate(table3.size()))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Empty and deleted keys cannot be equal"):
table4 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=42,
deleted_key=42)
self.assertAllEqual(0, self.evaluate(table4.size()))
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Empty and deleted keys cannot be equal"):
table5 = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=-1,
empty_key=[1, 2, 3],
deleted_key=[1, 2, 3])
self.assertAllEqual(0, self.evaluate(table5.size()))
@test_util.run_in_graph_and_eager_modes
def testStringToResource(self):
v = variables.Variable(1.)
v1 = variables.Variable(1.)
table = lookup_ops.DenseHashTable(
dtypes.string,
dtypes.resource,
default_value=v.handle,
empty_key="<empty>",
deleted_key="<deleted>")
self.assertEqual([], table.lookup("not_found").shape)
table.insert("v1", v1.handle)
self.assertEqual([], table.lookup("v1").shape)
def testExportShapeInference(self):
default_value = -1
empty_key = 0
deleted_key = -1
table = lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value,
empty_key=empty_key,
deleted_key=deleted_key)
actual_shapes = [t.shape for t in table.export()]
inferred_shapes = []
@def_function.function
def f():
for t in table.export():
inferred_shapes.append(t.shape)
f()
self.assertLen(actual_shapes, 2)
self.assertLen(inferred_shapes, 2)
self.assertTrue(inferred_shapes[0].is_compatible_with(actual_shapes[0]))
self.assertTrue(inferred_shapes[1].is_compatible_with(actual_shapes[1]))
class IndexTableFromFile(test.TestCase):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def test_string_index_table_from_file(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_string_index_table_from_multicolumn_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain\t300", "salad\t20", "surgery\t1"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_column_index=0,
value_column_index=lookup_ops.TextFileIndex.LINE_NUMBER)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_string_index_table_from_multicolumn_file_custom_delimiter(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain 300", "salad 20", "surgery 1"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_column_index=0,
value_column_index=lookup_ops.TextFileIndex.LINE_NUMBER,
delimiter=" ")
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_string_index_table_from_file_tensor_filename(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.cached_session():
vocabulary_file = constant_op.constant(vocabulary_file)
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
if not context.executing_eagerly():
self.assertEqual(1,
len(ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)))
@test_util.run_v1_only("placeholder usage")
def test_string_index_table_from_file_placeholder_filename(self):
vocabulary_file = self._createVocabFile("f2i_vocab1.txt")
with self.cached_session():
vocabulary_placeholder = array_ops.placeholder(dtypes.string, [])
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_placeholder, num_oov_buckets=1)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
feed_dict = {vocabulary_placeholder.name: vocabulary_file}
lookup_ops.tables_initializer().run(feed_dict=feed_dict)
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
self.assertEqual(0,
len(ops.get_collection(ops.GraphKeys.ASSET_FILEPATHS)))
def test_int32_index_table_from_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab2.txt", values=("42", "1", "-1000"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_dtype=dtypes.int32)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int32))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_int64_index_table_from_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab3.txt", values=("42", "1", "-1000"))
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
num_oov_buckets=1,
key_dtype=dtypes.int64)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_index_table_from_file_with_default_value(self):
default_value = -42
vocabulary_file = self._createVocabFile("f2i_vocab4.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, default_value=default_value)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, default_value), self.evaluate(ids))
def test_index_table_from_file_with_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab5.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1000)
ids = table.lookup(
constant_op.constant(["salad", "surgery", "tarkus", "toccata"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual(
(
1, # From vocabulary file.
2, # From vocabulary file.
867, # 3 + fingerprint("tarkus") mod 300.
860), # 3 + fingerprint("toccata") mod 300.
self.evaluate(ids))
def test_index_table_from_file_fails_with_empty_vocabulary_file_name(self):
self.assertRaises(
ValueError, lookup_ops.index_table_from_file, vocabulary_file="")
def test_index_table_from_file_fails_with_empty_vocabulary(self):
self.assertRaises(
ValueError, lookup_ops.index_table_from_file, vocabulary_file=None)
def test_index_table_from_file_str_fails_with_zero_size_vocabulary(self):
vocabulary_file = self._createVocabFile("zero_vocab_str.txt")
self.assertRaisesRegex(
ValueError, "vocab_size must be greater than 0, got 0. "
"vocabulary_file: .*zero_vocab_str.txt",
lookup_ops.index_table_from_file,
vocabulary_file=vocabulary_file,
vocab_size=0)
def test_index_table_from_file_tensor_fails_with_zero_size_vocabulary(self):
vocabulary_file = constant_op.constant(
self._createVocabFile("zero_vocab_tensor.txt"))
self.assertRaisesRegex(
ValueError, "vocab_size must be greater than 0, got 0. "
"vocabulary_file: .*zero_vocab_tensor.txt",
lookup_ops.index_table_from_file,
vocabulary_file=vocabulary_file,
vocab_size=0)
def test_index_table_from_file_with_vocab_size_too_small(self):
vocabulary_file = self._createVocabFile("f2i_vocab6.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=2)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, -1, -1), self.evaluate(ids))
self.assertEqual(2, self.evaluate(table.size()))
def test_index_table_from_file_with_vocab_size_too_large(self):
vocabulary_file = self._createVocabFile("f2i_vocab7.txt")
with self.cached_session():
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Invalid vocab_size"):
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=4)
self.evaluate(table.initializer)
def test_index_table_from_file_with_vocab_size(self):
vocabulary_file = self._createVocabFile("f2i_vocab8.txt")
self.assertRaises(
ValueError,
lookup_ops.index_table_from_file,
vocabulary_file=vocabulary_file,
vocab_size=0)
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=3)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, -1), self.evaluate(ids))
self.assertEqual(3, self.evaluate(table.size()))
def test_index_table_from_file_with_invalid_hashers(self):
vocabulary_file = self._createVocabFile("invalid_hasher.txt")
with self.cached_session():
with self.assertRaises(TypeError):
lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=3,
num_oov_buckets=1,
hasher_spec=1)
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=3,
num_oov_buckets=1,
hasher_spec=lookup_ops.HasherSpec("my-awesome-hash", None))
self.assertRaises(ValueError, table.lookup,
constant_op.constant(["salad", "surgery", "tarkus"]))
def test_index_table_from_file_table_ref_with_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab9.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=1)
self.assertIsNotNone(table.resource_handle)
def test_index_table_from_file_table_ref_without_oov_buckets(self):
vocabulary_file = self._createVocabFile("f2i_vocab10.txt")
with self.cached_session():
table = lookup_ops.index_table_from_file(
vocabulary_file=vocabulary_file, num_oov_buckets=0)
self.assertIsNotNone(table.resource_handle)
class IndexTableFromTensor(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_index_table_from_tensor_with_tensor_init(self):
table = lookup_ops.index_table_from_tensor(
vocabulary_list=("brain", "salad", "surgery"), num_oov_buckets=1)
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(
table.lookup(constant_op.constant(("salad", "surgery", "tarkus"))))
else:
# Reinitializing a table in eager should work.
table = lookup_ops.index_table_from_tensor(
vocabulary_list=("brain", "salad", "surgery"), num_oov_buckets=1)
self.evaluate(lookup_ops.tables_initializer())
ids = table.lookup(constant_op.constant(("salad", "surgery", "tarkus")))
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_int32_index_table_from_tensor_with_tensor_init(self):
with self.cached_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=(42, 1, -1000), num_oov_buckets=1, dtype=dtypes.int32)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int32))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_int64_index_table_from_tensor_with_tensor_init(self):
with self.cached_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=(42, 1, -1000), num_oov_buckets=1, dtype=dtypes.int64)
ids = table.lookup(
constant_op.constant((1, -1000, 11), dtype=dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, 3), self.evaluate(ids))
def test_index_table_from_tensor_with_default_value(self):
default_value = -42
with self.cached_session():
table = lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
default_value=default_value)
ids = table.lookup(constant_op.constant(["salad", "surgery", "tarkus"]))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.FailedPreconditionError):
self.evaluate(ids)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((1, 2, default_value), self.evaluate(ids))
def test_index_table_from_tensor_missing_vocabulary_list(self):
with self.cached_session():
with self.assertRaisesRegex(ValueError,
"vocabulary_list must be specified"):
lookup_ops.index_table_from_tensor(
vocabulary_list=None, num_oov_buckets=1)
def test_index_table_from_tensor_empty_vocabulary_list(self):
with self.cached_session():
with self.assertRaisesRegex(errors_impl.OpError,
"keys and values cannot be empty"):
_ = lookup_ops.index_table_from_tensor(
vocabulary_list=np.array([], dtype=np.str_), num_oov_buckets=1)
self.evaluate(lookup_ops.tables_initializer())
def test_index_table_from_tensor_with_invalid_hashers(self):
with self.cached_session():
with self.assertRaises(TypeError):
lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
num_oov_buckets=1,
hasher_spec=1)
table = lookup_ops.index_table_from_tensor(
vocabulary_list=["brain", "salad", "surgery"],
num_oov_buckets=1,
hasher_spec=lookup_ops.HasherSpec("my-awesome-hash", None))
self.assertRaises(ValueError, table.lookup,
constant_op.constant(["salad", "surgery", "tarkus"]))
class IndexToStringTableFromFileTest(test.TestCase):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def test_index_to_string_table(self):
vocabulary_path = self._createVocabFile("i2f_vocab1.txt")
# vocabulary_file supports string and tensor
type_funcs = [str, constant_op.constant]
for type_func in type_funcs:
vocabulary_file = type_func(vocabulary_path)
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file)
features = table.lookup(
constant_op.constant([0, 1, 2, 3], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_index_to_string_table_from_multicolumn_file(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain\t300", "salad\t20", "surgery\t1"))
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
key_column_index=lookup_ops.TextFileIndex.LINE_NUMBER,
value_column_index=0)
features = table.lookup(constant_op.constant([0, 1, 2, 3], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_index_to_string_table_from_multicolumn_file_custom_delimiter(self):
vocabulary_file = self._createVocabFile(
"f2i_vocab1.txt", values=("brain 300", "salad 20", "surgery 1"))
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
key_column_index=lookup_ops.TextFileIndex.LINE_NUMBER,
value_column_index=0,
delimiter=" ")
features = table.lookup(constant_op.constant([0, 1, 2, 3], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_index_to_string_table_with_default_value(self):
default_value = b"NONE"
vocabulary_file = self._createVocabFile("f2i_vocab2.txt")
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, default_value=default_value)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", b"surgery", default_value),
self.evaluate(features))
def test_index_to_string_table_with_vocab_size_too_small(self):
default_value = b"NONE"
vocabulary_file = self._createVocabFile("f2i_vocab2.txt")
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file,
vocab_size=2,
default_value=default_value)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", default_value, default_value),
self.evaluate(features))
def test_index_to_string_table_with_vocab_size_too_large(self):
vocabulary_file = self._createVocabFile("f2i_vocab6.txt")
with self.cached_session():
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"Invalid vocab_size"):
_ = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=4)
self.evaluate(lookup_ops.tables_initializer())
def test_index_to_string_table_with_vocab_size(self):
vocabulary_file = self._createVocabFile("f2i_vocab7.txt")
with self.cached_session():
table = lookup_ops.index_to_string_table_from_file(
vocabulary_file=vocabulary_file, vocab_size=3)
features = table.lookup(constant_op.constant([1, 2, 4], dtypes.int64))
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", b"surgery", b"UNK"),
self.evaluate(features))
class IndexToStringTableFromTensorTest(test.TestCase):
def test_index_to_string_table_from_tensor(self):
with self.cached_session():
vocabulary_list = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list)
indices = constant_op.constant([0, 1, 2, 3], dtypes.int64)
features = table.lookup(indices)
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"brain", b"salad", b"surgery", b"UNK"),
self.evaluate(features))
def test_duplicate_entries(self):
with self.cached_session():
vocabulary_list = constant_op.constant(["hello", "hello"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list)
indices = constant_op.constant([0, 1, 4], dtypes.int64)
features = table.lookup(indices)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"hello", b"hello", b"UNK"), self.evaluate(features))
def test_index_to_string_with_default_value(self):
default_value = b"NONE"
with self.cached_session():
vocabulary_list = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.index_to_string_table_from_tensor(
vocabulary_list=vocabulary_list, default_value=default_value)
indices = constant_op.constant([1, 2, 4], dtypes.int64)
features = table.lookup(indices)
if not context.executing_eagerly():
with self.assertRaises(errors_impl.OpError):
self.evaluate(features)
self.evaluate(lookup_ops.tables_initializer())
self.assertAllEqual((b"salad", b"surgery", default_value),
self.evaluate(features))
class IdTableWithHashBucketsTest(test.TestCase):
def _createVocabFile(self, basename, values=("brain", "salad", "surgery")):
vocabulary_file = os.path.join(self.get_temp_dir(), basename)
with open(vocabulary_file, "w") as f:
f.write("\n".join(values) + "\n")
return vocabulary_file
def testStringIdTableWithHashBuckets(self):
vocab_file = self._createVocabFile("feat_to_id_1.txt")
default_value = -1
vocab_size = 3
oov_buckets = 1
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value),
oov_buckets)
self.evaluate(table.initializer)
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
out = table.lookup(input_string)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt32IdTableWithHashBuckets(self):
vocab_file = self._createVocabFile("feat_to_id_2.txt", ("42", "1", "-1000"))
default_value = -1
vocab_size = 3
oov_buckets = 1
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64),
default_value),
oov_buckets,
key_dtype=dtypes.int32)
self.evaluate(table.initializer)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int32)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testInt64IdTableWithHashBuckets(self):
vocab_file = self._createVocabFile("feat_to_id_3.txt", ("42", "1", "-1000"))
default_value = -1
vocab_size = 3
oov_buckets = 1
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size, key_dtype=dtypes.int64),
default_value), oov_buckets)
self.evaluate(table.initializer)
values = constant_op.constant((42, 1, -1000, 11), dtype=dtypes.int64)
out = table.lookup(values)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table.size()))
def testStringIdTableWithOnlyHashBucket(self):
oov_buckets = 5
# Set a table that only uses hash buckets, for each input value returns
# an id calculated by fingerprint("input") mod oov_buckets.
table = lookup_ops.IdTableWithHashBuckets(None, oov_buckets)
self.evaluate(table.initializer)
values = constant_op.constant(("brain", "salad", "surgery"))
out = table.lookup(values)
self.assertAllEqual(
[
3, # fingerprint("brain") mod 5.
1, # fingerprint("salad") mod 5.
4 # fingerprint("surgery") mod 5
],
self.evaluate(out))
self.assertEqual(oov_buckets, self.evaluate(table.size()))
def testInt32IdTableWithOnlyHashBucket(self):
oov_buckets = 5
# Set a table that only uses hash buckets, for each input value returns
# an id calculated by fingerprint("input") mod oov_buckets.
table = lookup_ops.IdTableWithHashBuckets(
None, oov_buckets, key_dtype=dtypes.int32)
self.evaluate(table.initializer)
input_string = constant_op.constant([42, 1, -1000], dtype=dtypes.int32)
out = table.lookup(input_string)
self.assertAllEqual(
[
1, # fingerprint("42") mod 5.
4, # fingerprint("1") mod 5.
2 # fingerprint("-1000") mod 5
],
self.evaluate(out))
self.assertEqual(oov_buckets, self.evaluate(table.size()))
def testFloat64IdTableWithOnlyHashBucket(self):
with self.assertRaisesRegex(TypeError, "Invalid key_dtype"):
lookup_ops.IdTableWithHashBuckets(
None, num_oov_buckets=5, key_dtype=dtypes.float64)
def testBoolIdTableWithOnlyHashBucket(self):
with self.assertRaisesRegex(TypeError, "Invalid key_dtype"):
lookup_ops.IdTableWithHashBuckets(
None, num_oov_buckets=5, key_dtype=dtypes.bool)
def testIdTableWithHashBucketsWithMultipleInitializers(self):
vocab_file = self._createVocabFile("feat_to_id_4.txt")
default_value = -1
vocab_size = 3
oov_buckets = 3
vocab_table = lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value)
table1 = lookup_ops.IdTableWithHashBuckets(
vocab_table,
oov_buckets,
hasher_spec=lookup_ops.FastHashSpec,
name="table1")
table2 = lookup_ops.IdTableWithHashBuckets(
vocab_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec((1, 2)),
name="table2")
self.evaluate(lookup_ops.tables_initializer())
input_string = constant_op.constant(
["fruit", "brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string)
out2 = table2.lookup(input_string)
out1, out2 = self.evaluate([out1, out2])
self.assertAllEqual([5, 0, 1, 2, 5], out1)
self.assertAllEqual([5, 0, 1, 2, 3], out2)
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
if not context.executing_eagerly():
test_util.assert_ops_in_graph({
"table1_Lookup/hash_bucket": "StringToHashBucketFast",
"table2_Lookup/hash_bucket": "StringToHashBucketStrong",
}, ops.get_default_graph())
def testIdTableWithHashBucketsInitializationAcrossSessions(self):
vocab_file = self._createVocabFile("feat_to_id_5.txt")
with self.cached_session():
default_value = -1
vocab_size = 3
oov_buckets = 1
table1 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value),
oov_buckets)
self.evaluate(table1.initializer)
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
out1 = table1.lookup(input_string_1)
self.assertAllEqual([0, 1, 2, 3], self.evaluate(out1))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
with self.cached_session():
default_value = -1
vocab_size = 3
oov_buckets = 1
# Underlying lookup table already initialized in previous session.
# No need to call self.evaluate(table2.initializer)
table2 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value),
oov_buckets)
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out2 = table2.lookup(input_string_2)
self.assertAllEqual([3, 1, 3], self.evaluate(out2))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testIdTableWithHashBucketsWithMultipleInitializersDifferentDefault(self):
vocab_file = self._createVocabFile("feat_to_id_6.txt")
default_value1 = -1
vocab_size = 3
oov_buckets = 0
table1 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value1),
oov_buckets)
default_value2 = -2
table2 = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value2),
oov_buckets)
self.evaluate(lookup_ops.tables_initializer())
input_string_1 = constant_op.constant(
["brain", "salad", "surgery", "UNK"])
input_string_2 = constant_op.constant(["fruit", "salad", "UNK"])
out1 = table1.lookup(input_string_1)
out2 = table2.lookup(input_string_2)
out1, out2 = self.evaluate([out1, out2])
self.assertAllEqual([0, 1, 2, -1], out1)
self.assertAllEqual([-2, 1, -2], out2)
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table1.size()))
self.assertEqual(vocab_size + oov_buckets, self.evaluate(table2.size()))
def testSparseTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_shape, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(vocab_file, vocab_size=3),
-1), 1)
self.evaluate(table.initializer)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testRaggedTensor(self):
vocab_file = self._createVocabFile("feat_to_id_7.txt")
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant(["brain", "salad", "brain", "surgery", "tarkus"],
dtypes.string),
constant_op.constant(input_row_splits, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(vocab_file, vocab_size=3),
-1), 1)
self.evaluate(table.initializer)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt32SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_shape, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int32)
self.evaluate(table.initializer)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt32RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int32),
constant_op.constant(input_row_splits, dtypes.int32))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int32)
self.evaluate(table.initializer)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testInt64SparseTensor(self):
input_indices = [[0, 0], [0, 1], [2, 0], [2, 2], [3, 0]]
input_shape = [4, 4]
sp_features = sparse_tensor.SparseTensor(
constant_op.constant(input_indices, dtypes.int64),
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_shape, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int64)
self.evaluate(table.initializer)
sp_ids = table.lookup(sp_features)
self.assertAllEqual([5], sp_ids.values._shape_as_list())
sp_ids_ind, sp_ids_val, sp_ids_shape = self.evaluate(
[sp_ids.indices, sp_ids.values, sp_ids.dense_shape])
self.assertAllEqual(input_indices, sp_ids_ind)
self.assertAllEqual([0, 1, 0, 2, 3], sp_ids_val)
self.assertAllEqual(input_shape, sp_ids_shape)
def testInt64RaggedTensor(self):
input_row_splits = [0, 2, 4, 5]
ragged_features = ragged_tensor.RaggedTensor.from_row_splits(
constant_op.constant([42, 1, 42, -1000, 11], dtypes.int64),
constant_op.constant(input_row_splits, dtypes.int64))
table = lookup_ops.IdTableWithHashBuckets(
lookup_ops.StaticHashTable(
lookup_ops.KeyValueTensorInitializer(
(42, 1, -1000), (0, 1, 2), dtypes.int64, dtypes.int64), -1),
1,
key_dtype=dtypes.int64)
self.evaluate(table.initializer)
ragged_ids = table.lookup(ragged_features)
self.assertAllEqual([5], ragged_ids.values._shape_as_list())
ragged_ids_val, ragged_ids_row_splits = self.evaluate(
[ragged_ids.values, ragged_ids.row_splits])
self.assertAllEqual([0, 1, 0, 2, 3], ragged_ids_val)
self.assertAllEqual(input_row_splits, ragged_ids_row_splits)
def testIdTableWithHashBucketsWithInvalidHashers(self):
vocab_file = self._createVocabFile("feat_to_id_4.txt")
with self.cached_session():
default_value = -1
vocab_size = 3
oov_buckets = 1
lookup_table = lookup_ops.StaticHashTable(
lookup_ops.TextFileIdTableInitializer(
vocab_file, vocab_size=vocab_size), default_value)
with self.assertRaises(TypeError):
lookup_ops.IdTableWithHashBuckets(
lookup_table, oov_buckets, hasher_spec=1)
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.HasherSpec("my-awesome-hash", None))
input_string = constant_op.constant(["brain", "salad", "surgery", "UNK"])
with self.assertRaises(ValueError):
table.lookup(input_string)
with self.assertRaises(ValueError):
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec([]))
with self.assertRaises(ValueError):
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec([1, 2, 3]))
with self.assertRaises(TypeError):
table = lookup_ops.IdTableWithHashBuckets(
lookup_table,
oov_buckets,
hasher_spec=lookup_ops.StrongHashSpec([None, 2]))
def testIdTableWithHashBucketsNoInnerTable(self):
with self.cached_session():
table = lookup_ops.IdTableWithHashBuckets(None, num_oov_buckets=1)
self.assertIsNone(table.resource_handle)
class MutableHashTableOpTest(test.TestCase):
def testMutableHashTable(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery", "tarkus"])
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["tarkus", "tank"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
exported_keys, exported_values = table.export()
# exported data is in the order of the internal map, i.e. undefined
sorted_keys = np.sort(self.evaluate(exported_keys))
sorted_values = np.sort(self.evaluate(exported_values))
self.assertAllEqual([b"brain", b"salad", b"surgery"], sorted_keys)
self.assertAllEqual([0, 1, 2], sorted_values)
@test_util.run_v1_only("SaverV1")
def testSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
v0 = variables.Variable(10.0, name="v0")
v1 = variables.Variable(20.0, name="v1")
default_val = -1
keys = constant_op.constant(["b", "c", "d"], dtypes.string)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
save = saver.Saver()
self.evaluate(variables.global_variables_initializer())
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
v0 = variables.Variable(-1.0, name="v0")
v1 = variables.Variable(-1.0, name="v1")
default_val = -1
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
self.evaluate(
table.insert(
constant_op.constant(["a", "c"], dtypes.string),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(table.size()))
save = saver.Saver()
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "c", "d", "e"],
dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_v1_only("SaverV1")
def testSaveRestoreOnlyTable(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_path = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
with self.session(graph=ops.Graph()) as sess:
v0 = variables.Variable(10.0, name="v0")
v1 = variables.Variable(20.0, name="v1")
default_val = -1
keys = constant_op.constant(["b", "c", "d"], dtypes.string)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
save = saver.Saver([table])
self.evaluate(variables.global_variables_initializer())
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
val = save.save(sess, save_path)
self.assertIsInstance(val, six.string_types)
self.assertEqual(save_path, val)
with self.session(graph=ops.Graph()) as sess:
default_val = -1
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
self.evaluate(
table.insert(
constant_op.constant(["a", "c"], dtypes.string),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(table.size()))
save = saver.Saver([table])
# Restore the saved values in the parameter nodes.
save.restore(sess, save_path)
# Check that the parameter nodes have been restored.
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "c", "d", "e"],
dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_in_graph_and_eager_modes
def testObjectSaveRestore(self):
save_dir = os.path.join(self.get_temp_dir(), "save_restore")
save_prefix = os.path.join(tempfile.mkdtemp(prefix=save_dir), "hash")
v0 = variables.Variable(10.0, name="v0")
v1 = variables.Variable(20.0, name="v1")
default_val = -1
keys = constant_op.constant(["b", "c", "d"], dtypes.string)
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
checkpoint = trackable.Checkpoint(table=table, v0=v0, v1=v1)
self.evaluate([v0.initializer, v1.initializer])
# Check that the parameter nodes have been initialized.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
save_path = checkpoint.save(save_prefix)
del table, checkpoint, v0, v1
v0 = variables.Variable(-1.0, name="v0")
v1 = variables.Variable(-1.0, name="v1")
default_val = -1
table = lookup_ops.MutableHashTable(
dtypes.string, dtypes.int64, default_val, name="t1", checkpoint=True)
self.evaluate(
table.insert(
constant_op.constant(["a", "c"], dtypes.string),
constant_op.constant([12, 24], dtypes.int64)))
self.assertAllEqual(2, self.evaluate(table.size()))
checkpoint = trackable.Checkpoint(table=table, v0=v0, v1=v1)
# Restore the saved values in the parameter nodes.
checkpoint.restore(save_path).run_restore_ops()
# Check that the parameter nodes have been restored.
self.assertEqual(10.0, self.evaluate(v0))
self.assertEqual(20.0, self.evaluate(v1))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["a", "b", "c", "d", "e"],
dtypes.string)
output = table.lookup(input_string)
self.assertAllEqual([-1, 0, 1, 2, -1], self.evaluate(output))
@test_util.run_v1_only("Multiple sessions")
def testSharing(self):
# Start a server to store the table state
server = server_lib.Server({"local0": ["localhost:0"]},
protocol="grpc",
start=True)
# Create two sessions sharing the same state
session1 = session.Session(server.target)
session2 = session.Session(server.target)
table = lookup_ops.MutableHashTable(
dtypes.int64, dtypes.string, "-", name="t1")
# Populate the table in the first session
with session1:
self.assertAllEqual(0, table.size())
keys = constant_op.constant([11, 12], dtypes.int64)
values = constant_op.constant(["a", "b"])
table.insert(keys, values).run()
self.assertAllEqual(2, table.size())
output = table.lookup(constant_op.constant([11, 12, 13], dtypes.int64))
self.assertAllEqual([b"a", b"b", b"-"], output)
# Verify that we can access the shared data from the second session
with session2:
self.assertAllEqual(2, table.size())
output = table.lookup(constant_op.constant([10, 11, 12], dtypes.int64))
self.assertAllEqual([b"-", b"a", b"b"], output)
def testMutableHashTableOfTensors(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery", "tarkus"])
values = constant_op.constant([[0, 1], [2, 3], [4, 5], [6, 7]],
dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["tarkus", "tank"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
self.assertAllEqual([3, 2], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [2, 3], [-1, -1]], result)
exported_keys, exported_values = table.export()
# exported data is in the order of the internal map, i.e. undefined
sorted_keys = np.sort(self.evaluate(exported_keys))
sorted_values = np.sort(self.evaluate(exported_values), axis=0)
self.assertAllEqual([b"brain", b"salad", b"surgery"], sorted_keys)
sorted_expected_values = np.sort([[4, 5], [2, 3], [0, 1]], axis=0)
self.assertAllEqual(sorted_expected_values, sorted_values)
def testMutableHashTableExportInsert(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([[0, 1], [2, 3], [4, 5]], dtypes.int64)
table1 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table1.size()))
self.evaluate(table1.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table1.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
expected_output = [[0, 1], [2, 3], [-1, -1]]
output1 = table1.lookup(input_string)
self.assertAllEqual(expected_output, self.evaluate(output1))
exported_keys, exported_values = table1.export()
self.assertAllEqual(3, self.evaluate(exported_keys).size)
self.assertAllEqual(6, self.evaluate(exported_values).size)
# Populate a second table from the exported data
table2 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table2.size()))
self.evaluate(table2.insert(exported_keys, exported_values))
self.assertAllEqual(3, self.evaluate(table2.size()))
# Verify lookup result is still the same
output2 = table2.lookup(input_string)
self.assertAllEqual(expected_output, self.evaluate(output2))
def testMutableHashTableOfTensorsInvalidShape(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
# Shape [6] instead of [3, 2]
values = constant_op.constant([0, 1, 2, 3, 4, 5], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Shape [2,3] instead of [3, 2]
values = constant_op.constant([[0, 1, 2], [3, 4, 5]], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Shape [2, 2] instead of [3, 2]
values = constant_op.constant([[0, 1], [2, 3]], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Shape [3, 1] instead of [3, 2]
values = constant_op.constant([[0], [2], [4]], dtypes.int64)
with self.assertRaisesOpError("Expected shape"):
self.evaluate(table.insert(keys, values))
# Valid Insert
values = constant_op.constant([[0, 1], [2, 3], [4, 5]], dtypes.int64)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
def testMutableHashTableInvalidDefaultValue(self):
with self.cached_session():
default_val = constant_op.constant([[-1, -1]], dtypes.int64)
with self.assertRaisesOpError("Default value must be a vector"):
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
def testMutableHashTableDuplicateInsert(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery", "brain"])
values = constant_op.constant([0, 1, 2, 3], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([3, 1, -1], result)
def testMutableHashTableFindHighRank(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
self.assertAllEqual([2, 2], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual([[0, 1], [-1, -1]], result)
def testMutableHashTableInsertHighRank(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant([["brain", "salad"], ["surgery", "tank"]])
values = constant_op.constant([[0, 1], [2, 3]], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank", "tarkus"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, 3, -1], result)
def testMutableHashTableRemoveHighRank(self):
with self.test_session():
default_val = -1
keys = constant_op.constant([["brain", "salad"], ["surgery", "tank"]])
values = constant_op.constant([[0, 1], [2, 3]], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(4, self.evaluate(table.size()))
remove_string = constant_op.constant(["salad", "tarkus"])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank", "tarkus"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, -1, 3, -1], result)
def testMutableHashTableOfTensorsFindHighRank(self):
with self.cached_session():
default_val = constant_op.constant([-1, -1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([[0, 1, 2], [2, 3, 4], [4, 5, 6]],
dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["tank", "tarkus"]])
output = table.lookup(input_string)
self.assertAllEqual([2, 2, 3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual(
[[[0, 1, 2], [2, 3, 4]], [[-1, -1, -1], [-1, -1, -1]]], result)
def testMutableHashTableOfTensorsRemoveHighRank(self):
with self.test_session():
default_val = constant_op.constant([-1, -1, -1], dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([[0, 1, 2], [2, 3, 4], [4, 5, 6]],
dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
remove_string = constant_op.constant([["brain", "tank"]])
self.evaluate(table.remove(remove_string))
self.assertAllEqual(2, self.evaluate(table.size()))
input_string = constant_op.constant([["brain", "salad"],
["surgery", "tank"]])
output = table.lookup(input_string)
self.assertAllEqual([2, 2, 3], output.get_shape())
result = self.evaluate(output)
self.assertAllEqual(
[[[-1, -1, -1], [2, 3, 4]], [[4, 5, 6], [-1, -1, -1]]], result)
def testMultipleMutableHashTables(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table1 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
table2 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
table3 = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table1.insert(keys, values))
self.evaluate(table2.insert(keys, values))
self.evaluate(table3.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table1.size()))
self.assertAllEqual(3, self.evaluate(table2.size()))
self.assertAllEqual(3, self.evaluate(table3.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output1 = table1.lookup(input_string)
output2 = table2.lookup(input_string)
output3 = table3.lookup(input_string)
out1, out2, out3 = self.evaluate([output1, output2, output3])
self.assertAllEqual([0, 1, -1], out1)
self.assertAllEqual([0, 1, -1], out2)
self.assertAllEqual([0, 1, -1], out3)
def testMutableHashTableWithTensorDefault(self):
with self.cached_session():
default_val = constant_op.constant(-1, dtypes.int64)
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual([0, 1, -1], result)
def testSignatureMismatch(self):
with self.cached_session():
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.int64,
default_val)
# insert with keys of the wrong type
with self.assertRaises(ValueError):
self.evaluate(table.insert(constant_op.constant([4, 5, 6]), values))
# insert with values of the wrong type
with self.assertRaises(ValueError):
self.evaluate(table.insert(keys, constant_op.constant(["a", "b", "c"])))
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string_ref = variables.Variable("brain")
input_int64_ref = variables.Variable(-1, dtype=dtypes.int64)
self.evaluate(variables.global_variables_initializer())
# Ref types do not produce an insert signature mismatch.
self.evaluate(table.insert(input_string_ref, input_int64_ref))
self.assertAllEqual(3, self.evaluate(table.size()))
# Ref types do not produce a lookup signature mismatch.
self.assertEqual(-1, self.evaluate(table.lookup(input_string_ref)))
# lookup with keys of the wrong type
input_string = constant_op.constant([1, 2, 3], dtypes.int64)
with self.assertRaises(ValueError):
self.evaluate(table.lookup(input_string))
# default value of the wrong type
with self.assertRaises(TypeError):
lookup_ops.MutableHashTable(dtypes.string, dtypes.int64, "UNK")
def testMutableHashTableStringFloat(self):
with self.cached_session():
default_val = -1.5
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1.1, 2.2], dtypes.float32)
table = lookup_ops.MutableHashTable(dtypes.string, dtypes.float32,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant(["brain", "salad", "tank"])
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllClose([0, 1.1, default_val], result)
def testMutableHashTableIntFloat(self):
with self.cached_session():
default_val = -1.0
keys = constant_op.constant([3, 7, 0], dtypes.int64)
values = constant_op.constant([7.5, -1.2, 9.9], dtypes.float32)
table = lookup_ops.MutableHashTable(dtypes.int64, dtypes.float32,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([7, 0, 11], dtypes.int64)
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllClose([-1.2, 9.9, default_val], result)
def testMutableHashTableInt64String(self):
with self.cached_session():
default_val = "n/a"
keys = constant_op.constant([0, 1, 2], dtypes.int64)
values = constant_op.constant(["brain", "salad", "surgery"])
table = lookup_ops.MutableHashTable(dtypes.int64, dtypes.string,
default_val)
self.assertAllEqual(0, self.evaluate(table.size()))
self.evaluate(table.insert(keys, values))
self.assertAllEqual(3, self.evaluate(table.size()))
input_string = constant_op.constant([0, 1, 3], dtypes.int64)
output = table.lookup(input_string)
result = self.evaluate(output)
self.assertAllEqual((b"brain", b"salad", b"n/a"), result)
def testExportShapeInference(self):
default_value = -1
table = lookup_ops.MutableHashTable(
dtypes.int64,
dtypes.int64,
default_value=default_value)
actual_shapes = [t.shape for t in table.export()]
inferred_shapes = []
@def_function.function
def f():
for t in table.export():
inferred_shapes.append(t.shape)
f()
self.assertLen(actual_shapes, 2)
self.assertLen(inferred_shapes, 2)
self.assertTrue(inferred_shapes[0].is_compatible_with(actual_shapes[0]))
self.assertTrue(inferred_shapes[1].is_compatible_with(actual_shapes[1]))
class MutableHashTableBenchmark(test.Benchmark):
def _create_table(self):
return lookup_ops.MutableHashTable(dtypes.int64, dtypes.float32, 0.0)
def benchmark_single_repeated_scalar_insert_scalar(self):
table = self._create_table()
value = variables.Variable(1.0)
insert = table.insert(0, value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=10000)
assert sess.run(size) == 1
def benchmark_many_repeated_scalar_insert_scalar(self):
table = self._create_table()
c = dataset_ops.make_one_shot_iterator(counter.Counter()).get_next()
value = variables.Variable(1.0)
insert = table.insert(c, value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=10000)
assert sess.run(size) >= 10000
def benchmark_single_repeated_batch_32_insert_scalar(self):
table = self._create_table()
value = variables.Variable([1.0] * 32)
insert = table.insert(list(range(32)), value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=1000)
assert sess.run(size) == 32
def benchmark_many_repeated_batch_32_insert_scalar(self):
table = self._create_table()
c = dataset_ops.make_one_shot_iterator(counter.Counter()).get_next()
value = variables.Variable([1.0] * 32)
insert = table.insert(32 * c + list(range(32)), value)
size = table.size()
with session.Session() as sess:
sess.run(value.initializer)
self.run_op_benchmark(sess, insert, burn_iters=10, min_iters=1000)
assert sess.run(size) >= 1000 * 32
class DenseHashTableBenchmark(MutableHashTableBenchmark):
def _create_table(self):
return lookup_ops.DenseHashTable(
dtypes.int64,
dtypes.float32,
default_value=0.0,
empty_key=-1,
deleted_key=-2)
if __name__ == "__main__":
test.main()<|fim▁end|> | |
<|file_name|>not_in_sample.py<|end_file_name|><|fim▁begin|>__author__ = "Christian Kongsgaard"
__license__ = 'MIT'
# -------------------------------------------------------------------------------------------------------------------- #
# IMPORTS
# Modules
# RiBuild Modules
from delphin_6_automation.database_interactions.db_templates import delphin_entry
from delphin_6_automation.database_interactions.db_templates import sample_entry
from delphin_6_automation.database_interactions import mongo_setup
from delphin_6_automation.database_interactions.auth import auth_dict
# -------------------------------------------------------------------------------------------------------------------- #
# RIBuild
def correct_delphin():
samples = sample_entry.Sample.objects().only('delphin_docs')
print(f'There is {samples.count()} samples in DB')
sample_projects = []
for sample in samples:
if len(sample.delphin_docs) == 0:
print(f'Sample {sample.id} has no delphin projects. Deleting!')
sample.delete()
else:
for delphin in sample.delphin_docs:
sample_projects.append(delphin.id)
print(f'There is {len(sample_projects)} connected to a sample')
projects = delphin_entry.Delphin.objects().only('id')
print(f'There are currently {len(projects)} projects in the database')
print('Starting')
for proj in projects:
if proj.id not in sample_projects:
#print(f'Project with ID: {proj.id} is not part of a sample!')
proj.delete()
def correct_sample():
samples = sample_entry.Sample.objects()
for sample in samples:
docs = []
for ref in sample.delphin_docs:
delphin_projects = delphin_entry.Delphin.objects(id=ref.id)
if delphin_projects:
docs.append(delphin_projects.first())
else:
print(f'Found non existent project: {ref.id}')
sample.delphin_docs = docs
sample.save()
def correct_strategy():
strategy = sample_entry.Strategy.objects().first()
keep = []
for sample in strategy.samples:
found_sample = sample_entry.Sample.objects(id=sample.id)
if found_sample:
keep.append(found_sample.first().id)
else:
print(f"Sample {sample.id} was not in the DB")
print(f"Found samples {len(keep)} to keep: {keep}")
strategy.samples = keep
strategy.save()
def modify_sample():
id_ = "5e7878ce582e3e000172996d"
sample = sample_entry.Sample.objects(id=id_).first()
print('Got sample')
sample.mean = {}
sample.standard_deviation = {}
sample.save()
def correct_sample2():
samples = sample_entry.Sample.objects().only('id')
print(f"There is {samples.count()} samples in DB")
for i in range(samples.count()):
samples = sample_entry.Sample.objects(iteration=i).only('id')
print(f'There is {samples.count()} with iteration {i}')<|fim▁hole|>
if samples.count() > 1:
print(f"There is {samples.count()} samples with iteration {i}")
for j, sample in enumerate(samples):
if j == 0:
pass
else:
print(f'Deleting: {sample.id}')
#sample.delete()
if __name__ == '__main__':
server = mongo_setup.global_init(auth_dict)
#modify_sample()
#correct_sample()
#correct_sample2()
correct_delphin()
correct_strategy()
mongo_setup.global_end_ssh(server)<|fim▁end|> | |
<|file_name|>dom_html_base_font_element.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
<|fim▁hole|>use DOMElement;
use DOMEventTarget;
use DOMHTMLElement;
use DOMNode;
use DOMObject;
use glib::GString;
use glib::object::IsA;
use glib::translate::*;
use libc;
use std::fmt;
use webkit2_webextension_sys;
glib_wrapper! {
pub struct DOMHTMLBaseFontElement(Object<webkit2_webextension_sys::WebKitDOMHTMLBaseFontElement, webkit2_webextension_sys::WebKitDOMHTMLBaseFontElementClass, DOMHTMLBaseFontElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
get_type => || webkit2_webextension_sys::webkit_dom_html_base_font_element_get_type(),
}
}
pub const NONE_DOMHTML_BASE_FONT_ELEMENT: Option<&DOMHTMLBaseFontElement> = None;
pub trait DOMHTMLBaseFontElementExt: 'static {
#[cfg_attr(feature = "v2_12", deprecated)]
fn get_color(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_12", deprecated)]
fn get_face(&self) -> Option<GString>;
#[cfg_attr(feature = "v2_12", deprecated)]
fn get_size(&self) -> libc::c_long;
#[cfg_attr(feature = "v2_12", deprecated)]
fn set_color(&self, value: &str);
#[cfg_attr(feature = "v2_12", deprecated)]
fn set_face(&self, value: &str);
#[cfg_attr(feature = "v2_12", deprecated)]
fn set_size(&self, value: libc::c_long);
}
impl<O: IsA<DOMHTMLBaseFontElement>> DOMHTMLBaseFontElementExt for O {
fn get_color(&self) -> Option<GString> {
unsafe {
from_glib_full(webkit2_webextension_sys::webkit_dom_html_base_font_element_get_color(self.as_ref().to_glib_none().0))
}
}
fn get_face(&self) -> Option<GString> {
unsafe {
from_glib_full(webkit2_webextension_sys::webkit_dom_html_base_font_element_get_face(self.as_ref().to_glib_none().0))
}
}
fn get_size(&self) -> libc::c_long {
unsafe {
webkit2_webextension_sys::webkit_dom_html_base_font_element_get_size(self.as_ref().to_glib_none().0)
}
}
fn set_color(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_base_font_element_set_color(self.as_ref().to_glib_none().0, value.to_glib_none().0);
}
}
fn set_face(&self, value: &str) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_base_font_element_set_face(self.as_ref().to_glib_none().0, value.to_glib_none().0);
}
}
fn set_size(&self, value: libc::c_long) {
unsafe {
webkit2_webextension_sys::webkit_dom_html_base_font_element_set_size(self.as_ref().to_glib_none().0, value);
}
}
}
impl fmt::Display for DOMHTMLBaseFontElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "DOMHTMLBaseFontElement")
}
}<|fim▁end|> | |
<|file_name|>HPCJob.py<|end_file_name|><|fim▁begin|>import argparse
import logging
import os
import sys
from mpi4py import MPI
from pandayoda.yodacore import Yoda
from pandayoda.yodaexe import Droid
import logging
logging.basicConfig(level=logging.DEBUG)
def main(globalWorkDir, localWorkDir):
comm = MPI.COMM_WORLD
mpirank = comm.Get_rank()
# Create separate working directory for each rank
from os.path import abspath as _abspath, join as _join
curdir = _abspath (localWorkDir)
wkdirname = "rank_%s" % str(mpirank)
wkdir = _abspath (_join(curdir,wkdirname))
if not os.path.exists(wkdir):
os.makedirs (wkdir)
os.chdir (wkdir)
if mpirank==0:
yoda = Yoda.Yoda(globalWorkDir, localWorkDir)<|fim▁hole|>
if __name__ == "__main__":
usage = """
usage: %(prog)s <command> [options] [args]
Commands:
help <command> Output help for one of the commands below
"""
oparser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]), add_help=True)
oparser.add_argument('--globalWorkingDir', dest="globalWorkingDir", default=None, help="Global share working directory")
oparser.add_argument('--localWorkingDir', dest="localWorkingDir", default=None, help="Local working directory. if it's not set, it will use global working directory")
if len(sys.argv) == 1:
oparser.print_help()
sys.exit(-1)
args = oparser.parse_args(sys.argv[1:])
if args.globalWorkingDir is None:
print "Global working directory is needed."
oparser.print_help()
sys.exit(-1)
if args.localWorkingDir is None:
args.localWorkingDir = args.globalWorkingDir
try:
main(args.globalWorkingDir, args.localWorkingDir)
print "HPCJob-Yoda success"
sys.exit(0)
except Exception as e:
print "HPCJob-Yoda failed"
print(e)
sys.exit(1)<|fim▁end|> | yoda.run()
else:
droid = Droid.Droid(globalWorkDir, localWorkDir)
droid.run() |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .plot_widget import PlotWidget
from .filter_popup import FilterPopup
from .filterable_kw_list_model import FilterableKwListModel
from .data_type_keys_list_model import DataTypeKeysListModel
from .data_type_proxy_model import DataTypeProxyModel
from .data_type_keys_widget import DataTypeKeysWidget<|fim▁hole|>from .color_chooser import ColorBox
from .style_chooser import StyleChooser
from .plot_window import PlotWindow
from .plot_tool import PlotTool<|fim▁end|> |
from .plot_case_model import PlotCaseModel
from .plot_case_selection_widget import CaseSelectionWidget
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
__author__ = 'Ganesh'
from setuptools import setup
<|fim▁hole|>
setup(
name='cloudify-aws',
version=version,
author='ran',
author_email='ganeshpandi.g@cloudenablers.com',
packages=['cloudify_aws'],
license='LICENSE',
description='the cloudify amazon provider',
package_data={'cloudify_aws': ['cloudify-config.yaml',
'cloudify-config.defaults.yaml']},
install_requires=[
"scp",
"fabric",
"jsonschema",
"IPy", 'boto'
]
)<|fim▁end|> | version = '0.3' |
<|file_name|>workqueue.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! A work queue for scheduling units of work across threads in a fork-join fashion.
//!
//! Data associated with queues is simply a pair of unsigned integers. It is expected that a
//! higher-level API on top of this could allow safe fork-join parallelism.
use deque::{Abort, BufferPool, Data, Empty, Stealer, Worker};
use task::spawn_named;
use task_state;
use libc::funcs::posix88::unistd::usleep;
use rand::{Rng, weak_rng, XorShiftRng};
use std::mem;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::mpsc::{channel, Sender, Receiver};
/// A unit of work.
///
/// # Type parameters
///
/// - `QueueData`: global custom data for the entire work queue.
/// - `WorkData`: custom data specific to each unit of work.
pub struct WorkUnit<QueueData, WorkData> {
/// The function to execute.
pub fun: extern "Rust" fn(WorkData, &mut WorkerProxy<QueueData, WorkData>),
/// Arbitrary data.
pub data: WorkData,
}
/// Messages from the supervisor to the worker.
enum WorkerMsg<QueueData: 'static, WorkData: 'static> {
/// Tells the worker to start work.
Start(Worker<WorkUnit<QueueData, WorkData>>, *mut AtomicUsize, *const QueueData),
/// Tells the worker to stop. It can be restarted again with a `WorkerMsg::Start`.
Stop,
/// Tells the worker to measure the heap size of its TLS using the supplied function.
HeapSizeOfTLS(fn() -> usize),
/// Tells the worker thread to terminate.
Exit,
}
unsafe impl<QueueData: 'static, WorkData: 'static> Send for WorkerMsg<QueueData, WorkData> {}
/// Messages to the supervisor.
enum SupervisorMsg<QueueData: 'static, WorkData: 'static> {
Finished,
HeapSizeOfTLS(usize),
ReturnDeque(usize, Worker<WorkUnit<QueueData, WorkData>>),
}
unsafe impl<QueueData: 'static, WorkData: 'static> Send for SupervisorMsg<QueueData, WorkData> {}
/// Information that the supervisor thread keeps about the worker threads.
struct WorkerInfo<QueueData: 'static, WorkData: 'static> {
/// The communication channel to the workers.
chan: Sender<WorkerMsg<QueueData, WorkData>>,
/// The worker end of the deque, if we have it.
deque: Option<Worker<WorkUnit<QueueData, WorkData>>>,
/// The thief end of the work-stealing deque.
thief: Stealer<WorkUnit<QueueData, WorkData>>,
}
/// Information specific to each worker thread that the thread keeps.
struct WorkerThread<QueueData: 'static, WorkData: 'static> {
/// The index of this worker.
index: usize,
/// The communication port from the supervisor.
port: Receiver<WorkerMsg<QueueData, WorkData>>,
/// The communication channel on which messages are sent to the supervisor.
chan: Sender<SupervisorMsg<QueueData, WorkData>>,
/// The thief end of the work-stealing deque for all other workers.
other_deques: Vec<Stealer<WorkUnit<QueueData, WorkData>>>,
/// The random number generator for this worker.
rng: XorShiftRng,
}
unsafe impl<QueueData: 'static, WorkData: 'static> Send for WorkerThread<QueueData, WorkData> {}
const SPINS_UNTIL_BACKOFF: u32 = 128;
const BACKOFF_INCREMENT_IN_US: u32 = 5;
const BACKOFFS_UNTIL_CONTROL_CHECK: u32 = 6;
fn next_power_of_two(mut v: u32) -> u32 {
v -= 1;
v |= v >> 1;
v |= v >> 2;
v |= v >> 4;
v |= v >> 8;
v |= v >> 16;
v += 1;
v
}
impl<QueueData: Send, WorkData: Send> WorkerThread<QueueData, WorkData> {
/// The main logic. This function starts up the worker and listens for
/// messages.
fn start(&mut self) {
let deque_index_mask = next_power_of_two(self.other_deques.len() as u32) - 1;
loop {
// Wait for a start message.
let (mut deque, ref_count, queue_data) = match self.port.recv().unwrap() {
WorkerMsg::Start(deque, ref_count, queue_data) => (deque, ref_count, queue_data),
WorkerMsg::Stop => panic!("unexpected stop message"),
WorkerMsg::Exit => return,
WorkerMsg::HeapSizeOfTLS(f) => {
self.chan.send(SupervisorMsg::HeapSizeOfTLS(f())).unwrap();
continue;
}
};
let mut back_off_sleep = 0 as u32;
// We're off!
//
// FIXME(pcwalton): Can't use labeled break or continue cross-crate due to a Rust bug.
loop {
// FIXME(pcwalton): Nasty workaround for the lack of labeled break/continue
// cross-crate.
let mut work_unit = unsafe {
mem::uninitialized()
};
match deque.pop() {
Some(work) => work_unit = work,
None => {
// Become a thief.
let mut i = 0;
let mut should_continue = true;
loop {
// Don't just use `rand % len` because that's slow on ARM.
let mut victim;
loop {
victim = self.rng.next_u32() & deque_index_mask;
if (victim as usize) < self.other_deques.len() {
break
}
}
match self.other_deques[victim as usize].steal() {
Empty | Abort => {
// Continue.
}
Data(work) => {
work_unit = work;
back_off_sleep = 0 as u32;
break
}
}
if i > SPINS_UNTIL_BACKOFF {
if back_off_sleep >= BACKOFF_INCREMENT_IN_US *
BACKOFFS_UNTIL_CONTROL_CHECK {
match self.port.try_recv() {
Ok(WorkerMsg::Stop) => {
should_continue = false;
break
}
Ok(WorkerMsg::Exit) => return,
Ok(_) => panic!("unexpected message"),
_ => {}
}
}
unsafe {
usleep(back_off_sleep as u32);
}
back_off_sleep += BACKOFF_INCREMENT_IN_US;
i = 0
} else {
i += 1
}
}
if !should_continue {
break
}
}
}
// At this point, we have some work. Perform it.
let mut proxy = WorkerProxy {
worker: &mut deque,
ref_count: ref_count,
queue_data: queue_data,
worker_index: self.index as u8,
};
(work_unit.fun)(work_unit.data, &mut proxy);
// The work is done. Now decrement the count of outstanding work items. If this was
// the last work unit in the queue, then send a message on the channel.
unsafe {
if (*ref_count).fetch_sub(1, Ordering::Release) == 1 {
self.chan.send(SupervisorMsg::Finished).unwrap()
}
}
}
// Give the deque back to the supervisor.
self.chan.send(SupervisorMsg::ReturnDeque(self.index, deque)).unwrap()
}
}
}
/// A handle to the work queue that individual work units have.
pub struct WorkerProxy<'a, QueueData: 'a, WorkData: 'a> {
worker: &'a mut Worker<WorkUnit<QueueData, WorkData>>,
ref_count: *mut AtomicUsize,
queue_data: *const QueueData,
worker_index: u8,
}
impl<'a, QueueData: 'static, WorkData: Send + 'static> WorkerProxy<'a, QueueData, WorkData> {
/// Enqueues a block into the work queue.
#[inline]
pub fn push(&mut self, work_unit: WorkUnit<QueueData, WorkData>) {
unsafe {
drop((*self.ref_count).fetch_add(1, Ordering::Relaxed));
}
self.worker.push(work_unit);
}
/// Retrieves the queue user data.
#[inline]
pub fn user_data<'b>(&'b self) -> &'b QueueData {
unsafe {
mem::transmute(self.queue_data)
}
}
/// Retrieves the index of the worker.
#[inline]
pub fn worker_index(&self) -> u8 {
self.worker_index
}
}
/// A work queue on which units of work can be submitted.
pub struct WorkQueue<QueueData: 'static, WorkData: 'static> {
/// Information about each of the workers.
workers: Vec<WorkerInfo<QueueData, WorkData>>,
/// A port on which deques can be received from the workers.
port: Receiver<SupervisorMsg<QueueData, WorkData>>,
/// The amount of work that has been enqueued.
work_count: usize,
/// Arbitrary user data.
pub data: QueueData,
}
impl<QueueData: Send, WorkData: Send> WorkQueue<QueueData, WorkData> {
/// Creates a new work queue and spawns all the threads associated with
/// it.
pub fn new(task_name: &'static str,
state: task_state::TaskState,
thread_count: usize,
user_data: QueueData) -> WorkQueue<QueueData, WorkData> {
// Set up data structures.
let (supervisor_chan, supervisor_port) = channel();
let (mut infos, mut threads) = (vec!(), vec!());
for i in 0..thread_count {
let (worker_chan, worker_port) = channel();
let pool = BufferPool::new();
let (worker, thief) = pool.deque();
infos.push(WorkerInfo {
chan: worker_chan,
deque: Some(worker),
thief: thief,
});
threads.push(WorkerThread {
index: i,
port: worker_port,
chan: supervisor_chan.clone(),
other_deques: vec!(),
rng: weak_rng(),
});
}
// Connect workers to one another.
for i in 0..thread_count {
for j in 0..thread_count {
if i != j {
threads[i].other_deques.push(infos[j].thief.clone())
}
}
assert!(threads[i].other_deques.len() == thread_count - 1)
}
// Spawn threads.
for (i, thread) in threads.into_iter().enumerate() {
spawn_named(
format!("{} worker {}/{}", task_name, i+1, thread_count),
move || {
task_state::initialize(state | task_state::IN_WORKER);
let mut thread = thread;
thread.start()
})
}
WorkQueue {
workers: infos,
port: supervisor_port,
work_count: 0,
data: user_data,
}
}
/// Enqueues a block into the work queue.
#[inline]
pub fn push(&mut self, work_unit: WorkUnit<QueueData, WorkData>) {
let deque = &mut self.workers[0].deque;
match *deque {
None => {
panic!("tried to push a block but we don't have the deque?!")
}
Some(ref mut deque) => deque.push(work_unit),
}
self.work_count += 1
}
/// Synchronously runs all the enqueued tasks and waits for them to complete.
pub fn run(&mut self) {
// Tell the workers to start.
let mut work_count = AtomicUsize::new(self.work_count);
for worker in self.workers.iter_mut() {
worker.chan.send(WorkerMsg::Start(worker.deque.take().unwrap(),
&mut work_count,
&self.data)).unwrap()
}
<|fim▁hole|> drop(self.port.recv());
self.work_count = 0;
// Tell everyone to stop.
for worker in self.workers.iter() {
worker.chan.send(WorkerMsg::Stop).unwrap()
}
// Get our deques back.
for _ in 0..self.workers.len() {
match self.port.recv().unwrap() {
SupervisorMsg::ReturnDeque(index, deque) => self.workers[index].deque = Some(deque),
SupervisorMsg::HeapSizeOfTLS(_) => panic!("unexpected HeapSizeOfTLS message"),
SupervisorMsg::Finished => panic!("unexpected finished message!"),
}
}
}
/// Synchronously measure memory usage of any thread-local storage.
pub fn heap_size_of_tls(&self, f: fn() -> usize) -> Vec<usize> {
// Tell the workers to measure themselves.
for worker in self.workers.iter() {
worker.chan.send(WorkerMsg::HeapSizeOfTLS(f)).unwrap()
}
// Wait for the workers to finish measuring themselves.
let mut sizes = vec![];
for _ in 0..self.workers.len() {
match self.port.recv().unwrap() {
SupervisorMsg::HeapSizeOfTLS(size) => {
sizes.push(size);
}
_ => panic!("unexpected message!"),
}
}
sizes
}
pub fn shutdown(&mut self) {
for worker in self.workers.iter() {
worker.chan.send(WorkerMsg::Exit).unwrap()
}
}
}<|fim▁end|> | // Wait for the work to finish. |
<|file_name|>PacketHandler.cpp<|end_file_name|><|fim▁begin|>#include "Log.h"
#include "../PacketShare/PacketType.h"
#include "ClientSession.h"
#include "GameLiftManager.h"
//@{ Handler Helper
typedef std::shared_ptr<ClientSession> ClientSessionPtr;
typedef void(*HandlerFunc)(ClientSessionPtr session);
static HandlerFunc HandlerTable[PKT_MAX];
static void DefaultHandler(ClientSessionPtr session)
{
GConsoleLog->PrintOut(true, "Invalid packet handler");
session->Disconnect();
}
struct InitializeHandlers
{
InitializeHandlers()
{
for (int i = 0; i < PKT_MAX; ++i)
HandlerTable[i] = DefaultHandler;
}
} _init_handlers_;
struct RegisterHandler
{
RegisterHandler(int pktType, HandlerFunc handler)
{
HandlerTable[pktType] = handler;
}
};
#define REGISTER_HANDLER(PKT_TYPE) \
static void Handler_##PKT_TYPE(ClientSessionPtr session); \
static RegisterHandler _register_##PKT_TYPE(PKT_TYPE, Handler_##PKT_TYPE); \
static void Handler_##PKT_TYPE(ClientSessionPtr session)
//@}
///////////////////////////////////////////////////////////
void ClientSession::DispatchPacket()
{
/// packet parsing
while (true)
{
/// read packet header
PacketHeader header;
if (false == mRecvBuffer.Peek((char*)&header, sizeof(PacketHeader)))
return;
/// packet completed?
if (mRecvBuffer.GetStoredSize() < (size_t)header.mSize)
return;
if (header.mType >= PKT_MAX || header.mType <= PKT_NONE)
{
GConsoleLog->PrintOut(true, "Invalid packet type\n");
Disconnect();
return;
}
/// packet dispatch...
HandlerTable[header.mType](shared_from_this());
}<|fim▁hole|>REGISTER_HANDLER(PKT_CS_LOGIN)
{
LoginRequest inPacket;
if (false == session->ParsePacket(inPacket))
{
GConsoleLog->PrintOut(true, "packet parsing error, Type: %d\n", inPacket.mType);
return;
}
session->PlayerLogin(std::string(inPacket.mPlayerId));
}
REGISTER_HANDLER(PKT_CS_EXIT)
{
ExitRequest inPacket;
if (false == session->ParsePacket(inPacket))
{
GConsoleLog->PrintOut(true, "packet parsing error: %d\n", inPacket.mType);
return;
}
session->PlayerLogout(std::string(inPacket.mPlayerId));
}
REGISTER_HANDLER(PKT_CS_CHAT)
{
ChatBroadcastRequest inPacket;
if (false == session->ParsePacket(inPacket))
{
GConsoleLog->PrintOut(true, "[DEBUG] packet parsing error, Type: %d\n", inPacket.mType);
return;
}
/// direct response in case of chatting
ChatBroadcastResult outPacket;
strcpy(outPacket.mPlayerId, inPacket.mPlayerId);
strcpy(outPacket.mChat, inPacket.mChat);
GGameLiftManager->BroadcastMessage(&outPacket);
}
REGISTER_HANDLER(PKT_CS_MOVE)
{
MoveRequest inPacket;
if (false == session->ParsePacket(inPacket))
{
GConsoleLog->PrintOut(true, "[DEBUG] packet parsing error, Type: %d\n", inPacket.mType);
return;
}
/// just broadcast for now
MoveBroadcastResult outPacket;
outPacket.mPlayerIdx = inPacket.mPlayerIdx;
outPacket.mPosX = inPacket.mPosX;
outPacket.mPosY = inPacket.mPosY;
GGameLiftManager->BroadcastMessage(&outPacket);
}<|fim▁end|> | }
///////////////////////////////////////////////////////// |
<|file_name|>JBScrollPane.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.components;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.IdeGlassPane;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.ui.ButtonlessScrollBarUI;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.RegionPainter;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.border.LineBorder;
import javax.swing.plaf.ScrollBarUI;
import javax.swing.plaf.ScrollPaneUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.basic.BasicScrollBarUI;
import javax.swing.plaf.basic.BasicScrollPaneUI;
import java.awt.*;
import java.awt.event.InputEvent;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.event.MouseWheelListener;
import java.lang.reflect.Field;
public class JBScrollPane extends JScrollPane {
/**
* This key is used to specify which colors should use the scroll bars on the pane.
* If a client property is set to {@code true} the bar's brightness
* will be modified according to the view's background.
*
* @see UIUtil#putClientProperty
* @see UIUtil#isUnderDarcula
*/
public static final Key<Boolean> BRIGHTNESS_FROM_VIEW = Key.create("JB_SCROLL_PANE_BRIGHTNESS_FROM_VIEW");
@Deprecated
public static final RegionPainter<Float> THUMB_PAINTER = ScrollPainter.EditorThumb.DEFAULT;
@Deprecated
public static final RegionPainter<Float> THUMB_DARK_PAINTER = ScrollPainter.EditorThumb.DARCULA;
@Deprecated
public static final RegionPainter<Float> MAC_THUMB_PAINTER = ScrollPainter.EditorThumb.Mac.DEFAULT;
@Deprecated
public static final RegionPainter<Float> MAC_THUMB_DARK_PAINTER = ScrollPainter.EditorThumb.Mac.DARCULA;
private static final Logger LOG = Logger.getInstance(JBScrollPane.class);
private int myViewportBorderWidth = -1;
private boolean myHasOverlayScrollbars;
private volatile boolean myBackgroundRequested; // avoid cyclic references
public JBScrollPane(int viewportWidth) {
init(false);
myViewportBorderWidth = viewportWidth;
updateViewportBorder();
}
public JBScrollPane() {
init();
}
public JBScrollPane(Component view) {
super(view);
init();
}
public JBScrollPane(int vsbPolicy, int hsbPolicy) {
super(vsbPolicy, hsbPolicy);
init();
}
public JBScrollPane(Component view, int vsbPolicy, int hsbPolicy) {
super(view, vsbPolicy, hsbPolicy);
init();
}
@Override
public Color getBackground() {
Color color = super.getBackground();
if (!myBackgroundRequested && EventQueue.isDispatchThread() && Registry.is("ide.scroll.background.auto")) {
if (!isBackgroundSet() || color instanceof UIResource) {
Component child = getViewport();
if (child != null) {
try {
myBackgroundRequested = true;
return child.getBackground();
}
finally {
myBackgroundRequested = false;
}
}
}
}
return color;
}
static Color getViewBackground(JScrollPane pane) {
if (pane == null) return null;
JViewport viewport = pane.getViewport();
if (viewport == null) return null;
Component view = viewport.getView();
if (view == null) return null;
return view.getBackground();
}
public static JScrollPane findScrollPane(Component c) {
if (c == null) return null;
if (!(c instanceof JViewport)) {
Container vp = c.getParent();
if (vp instanceof JViewport) c = vp;
}
c = c.getParent();
if (!(c instanceof JScrollPane)) return null;
return (JScrollPane)c;
}<|fim▁hole|>
private void init(boolean setupCorners) {
setLayout(Registry.is("ide.scroll.new.layout") ? new Layout() : new ScrollPaneLayout());
if (setupCorners) {
setupCorners();
}
}
protected void setupCorners() {
setBorder(IdeBorderFactory.createBorder());
setCorner(UPPER_RIGHT_CORNER, new Corner(UPPER_RIGHT_CORNER));
setCorner(UPPER_LEFT_CORNER, new Corner(UPPER_LEFT_CORNER));
setCorner(LOWER_RIGHT_CORNER, new Corner(LOWER_RIGHT_CORNER));
setCorner(LOWER_LEFT_CORNER, new Corner(LOWER_LEFT_CORNER));
}
@Override
public void setUI(ScrollPaneUI ui) {
super.setUI(ui);
updateViewportBorder();
if (ui instanceof BasicScrollPaneUI) {
try {
Field field = BasicScrollPaneUI.class.getDeclaredField("mouseScrollListener");
field.setAccessible(true);
Object value = field.get(ui);
if (value instanceof MouseWheelListener) {
MouseWheelListener oldListener = (MouseWheelListener)value;
MouseWheelListener newListener = event -> {
if (isScrollEvent(event)) {
Object source = event.getSource();
if (source instanceof JScrollPane) {
JScrollPane pane = (JScrollPane)source;
if (pane.isWheelScrollingEnabled()) {
JScrollBar bar = event.isShiftDown() ? pane.getHorizontalScrollBar() : pane.getVerticalScrollBar();
if (bar != null && bar.isVisible()) oldListener.mouseWheelMoved(event);
}
}
}
};
field.set(ui, newListener);
// replace listener if field updated successfully
removeMouseWheelListener(oldListener);
addMouseWheelListener(newListener);
}
}
catch (Exception exception) {
LOG.warn(exception);
}
}
}
@Override
public boolean isOptimizedDrawingEnabled() {
if (getLayout() instanceof Layout) {
return isOptimizedDrawingEnabledFor(getVerticalScrollBar()) &&
isOptimizedDrawingEnabledFor(getHorizontalScrollBar());
}
return !myHasOverlayScrollbars;
}
/**
* Returns {@code false} for visible translucent scroll bars, or {@code true} otherwise.
* It is needed to repaint translucent scroll bars on viewport repainting.
*/
private static boolean isOptimizedDrawingEnabledFor(JScrollBar bar) {
return bar == null || bar.isOpaque() || !bar.isVisible();
}
private void updateViewportBorder() {
if (getViewportBorder() instanceof ViewportBorder) {
setViewportBorder(new ViewportBorder(myViewportBorderWidth >= 0 ? myViewportBorderWidth : 1));
}
}
public static ViewportBorder createIndentBorder() {
return new ViewportBorder(2);
}
@Override
public JScrollBar createVerticalScrollBar() {
return new MyScrollBar(Adjustable.VERTICAL);
}
@NotNull
@Override
public JScrollBar createHorizontalScrollBar() {
return new MyScrollBar(Adjustable.HORIZONTAL);
}
@Override
protected JViewport createViewport() {
return new JBViewport();
}
@SuppressWarnings("deprecation")
@Override
public void layout() {
LayoutManager layout = getLayout();
ScrollPaneLayout scrollLayout = layout instanceof ScrollPaneLayout ? (ScrollPaneLayout)layout : null;
// Now we let JScrollPane layout everything as necessary
super.layout();
if (layout instanceof Layout) return;
if (scrollLayout != null) {
// Now it's time to jump in and expand the viewport so it fits the whole area
// (taking into consideration corners, headers and other stuff).
myHasOverlayScrollbars = relayoutScrollbars(
this, scrollLayout,
myHasOverlayScrollbars // If last time we did relayouting, we should restore it back.
);
}
else {
myHasOverlayScrollbars = false;
}
}
private boolean relayoutScrollbars(@NotNull JComponent container, @NotNull ScrollPaneLayout layout, boolean forceRelayout) {
JViewport viewport = layout.getViewport();
if (viewport == null) return false;
JScrollBar vsb = layout.getVerticalScrollBar();
JScrollBar hsb = layout.getHorizontalScrollBar();
JViewport colHead = layout.getColumnHeader();
JViewport rowHead = layout.getRowHeader();
Rectangle viewportBounds = viewport.getBounds();
boolean extendViewportUnderVScrollbar = vsb != null && shouldExtendViewportUnderScrollbar(vsb);
boolean extendViewportUnderHScrollbar = hsb != null && shouldExtendViewportUnderScrollbar(hsb);
boolean hasOverlayScrollbars = extendViewportUnderVScrollbar || extendViewportUnderHScrollbar;
if (!hasOverlayScrollbars && !forceRelayout) return false;
container.setComponentZOrder(viewport, container.getComponentCount() - 1);
if (vsb != null) container.setComponentZOrder(vsb, 0);
if (hsb != null) container.setComponentZOrder(hsb, 0);
if (extendViewportUnderVScrollbar) {
int x2 = Math.max(vsb.getX() + vsb.getWidth(), viewportBounds.x + viewportBounds.width);
viewportBounds.x = Math.min(viewportBounds.x, vsb.getX());
viewportBounds.width = x2 - viewportBounds.x;
}
if (extendViewportUnderHScrollbar) {
int y2 = Math.max(hsb.getY() + hsb.getHeight(), viewportBounds.y + viewportBounds.height);
viewportBounds.y = Math.min(viewportBounds.y, hsb.getY());
viewportBounds.height = y2 - viewportBounds.y;
}
if (extendViewportUnderVScrollbar) {
if (hsb != null) {
Rectangle scrollbarBounds = hsb.getBounds();
scrollbarBounds.width = viewportBounds.x + viewportBounds.width - scrollbarBounds.x;
hsb.setBounds(scrollbarBounds);
}
if (colHead != null) {
Rectangle headerBounds = colHead.getBounds();
headerBounds.width = viewportBounds.width;
colHead.setBounds(headerBounds);
}
hideFromView(layout.getCorner(UPPER_RIGHT_CORNER));
hideFromView(layout.getCorner(LOWER_RIGHT_CORNER));
}
if (extendViewportUnderHScrollbar) {
if (vsb != null) {
Rectangle scrollbarBounds = vsb.getBounds();
scrollbarBounds.height = viewportBounds.y + viewportBounds.height - scrollbarBounds.y;
vsb.setBounds(scrollbarBounds);
}
if (rowHead != null) {
Rectangle headerBounds = rowHead.getBounds();
headerBounds.height = viewportBounds.height;
rowHead.setBounds(headerBounds);
}
hideFromView(layout.getCorner(LOWER_LEFT_CORNER));
hideFromView(layout.getCorner(LOWER_RIGHT_CORNER));
}
viewport.setBounds(viewportBounds);
return hasOverlayScrollbars;
}
private boolean shouldExtendViewportUnderScrollbar(@Nullable JScrollBar scrollbar) {
if (scrollbar == null || !scrollbar.isVisible()) return false;
return isOverlaidScrollbar(scrollbar);
}
protected boolean isOverlaidScrollbar(@Nullable JScrollBar scrollbar) {
if (!ButtonlessScrollBarUI.isMacOverlayScrollbarSupported()) return false;
ScrollBarUI vsbUI = scrollbar == null ? null : scrollbar.getUI();
return vsbUI instanceof ButtonlessScrollBarUI && !((ButtonlessScrollBarUI)vsbUI).alwaysShowTrack();
}
private static void hideFromView(Component component) {
if (component == null) return;
component.setBounds(-10, -10, 1, 1);
}
private class MyScrollBar extends ScrollBar implements IdeGlassPane.TopComponent {
public MyScrollBar(int orientation) {
super(orientation);
}
@Override
public void updateUI() {
ScrollBarUI ui = getUI();
if (ui instanceof DefaultScrollBarUI) return;
setUI(JBScrollBar.createUI(this));
}
@Override
public boolean canBePreprocessed(MouseEvent e) {
return JBScrollPane.canBePreprocessed(e, this);
}
}
public static boolean canBePreprocessed(MouseEvent e, JScrollBar bar) {
if (e.getID() == MouseEvent.MOUSE_MOVED || e.getID() == MouseEvent.MOUSE_PRESSED) {
ScrollBarUI ui = bar.getUI();
if (ui instanceof BasicScrollBarUI) {
BasicScrollBarUI bui = (BasicScrollBarUI)ui;
try {
Rectangle rect = (Rectangle)ReflectionUtil.getDeclaredMethod(BasicScrollBarUI.class, "getThumbBounds", ArrayUtil.EMPTY_CLASS_ARRAY).invoke(bui);
Point point = SwingUtilities.convertPoint(e.getComponent(), e.getX(), e.getY(), bar);
return !rect.contains(point);
}
catch (Exception e1) {
return true;
}
}
else if (ui instanceof DefaultScrollBarUI) {
DefaultScrollBarUI dui = (DefaultScrollBarUI)ui;
Point point = e.getLocationOnScreen();
SwingUtilities.convertPointFromScreen(point, bar);
return !dui.isThumbContains(point.x, point.y);
}
}
return true;
}
private static class Corner extends JPanel {
private final String myPos;
public Corner(String pos) {
myPos = pos;
ScrollColorProducer.setBackground(this);
ScrollColorProducer.setForeground(this);
}
@Override
protected void paintComponent(Graphics g) {
g.setColor(getBackground());
g.fillRect(0, 0, getWidth(), getHeight());
if (SystemInfo.isMac || !Registry.is("ide.scroll.track.border.paint")) return;
g.setColor(getForeground());
int x2 = getWidth() - 1;
int y2 = getHeight() - 1;
if (myPos == UPPER_LEFT_CORNER || myPos == UPPER_RIGHT_CORNER) {
g.drawLine(0, y2, x2, y2);
}
if (myPos == LOWER_LEFT_CORNER || myPos == LOWER_RIGHT_CORNER) {
g.drawLine(0, 0, x2, 0);
}
if (myPos == UPPER_LEFT_CORNER || myPos == LOWER_LEFT_CORNER) {
g.drawLine(x2, 0, x2, y2);
}
if (myPos == UPPER_RIGHT_CORNER || myPos == LOWER_RIGHT_CORNER) {
g.drawLine(0, 0, 0, y2);
}
}
}
private static class ViewportBorder extends LineBorder {
public ViewportBorder(int thickness) {
super(null, thickness);
}
@Override
public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
updateColor(c);
super.paintBorder(c, g, x, y, width, height);
}
private void updateColor(Component c) {
if (!(c instanceof JScrollPane)) return;
lineColor = getViewBackground((JScrollPane)c);
}
}
/**
* These client properties modify a scroll pane layout.
* Use the class object as a property key.
*
* @see #putClientProperty(Object, Object)
*/
public enum Flip {
NONE, VERTICAL, HORIZONTAL, BOTH
}
/**
* These client properties show a component position on a scroll pane.
* It is set by internal layout manager of the scroll pane.
*/
public enum Alignment {
TOP, LEFT, RIGHT, BOTTOM;
public static Alignment get(JComponent component) {
if (component != null) {
Object property = component.getClientProperty(Alignment.class);
if (property instanceof Alignment) return (Alignment)property;
Container parent = component.getParent();
if (parent instanceof JScrollPane) {
JScrollPane pane = (JScrollPane)parent;
if (component == pane.getColumnHeader()) {
return TOP;
}
if (component == pane.getHorizontalScrollBar()) {
return BOTTOM;
}
boolean ltr = pane.getComponentOrientation().isLeftToRight();
if (component == pane.getVerticalScrollBar()) {
return ltr ? RIGHT : LEFT;
}
if (component == pane.getRowHeader()) {
return ltr ? LEFT : RIGHT;
}
}
// assume alignment for a scroll bar,
// which is not contained in a scroll pane
if (component instanceof JScrollBar) {
JScrollBar bar = (JScrollBar)component;
switch (bar.getOrientation()) {
case Adjustable.HORIZONTAL:
return BOTTOM;
case Adjustable.VERTICAL:
return bar.getComponentOrientation().isLeftToRight()
? RIGHT
: LEFT;
}
}
}
return null;
}
}
/**
* ScrollPaneLayout implementation that supports
* ScrollBar flipping and non-opaque ScrollBars.
*/
private static class Layout extends ScrollPaneLayout {
private static final Insets EMPTY_INSETS = new Insets(0, 0, 0, 0);
@Override
public void layoutContainer(Container parent) {
JScrollPane pane = (JScrollPane)parent;
// Calculate inner bounds of the scroll pane
Rectangle bounds = new Rectangle(pane.getWidth(), pane.getHeight());
JBInsets.removeFrom(bounds, pane.getInsets());
// Determine positions of scroll bars on the scroll pane
Object property = pane.getClientProperty(Flip.class);
Flip flip = property instanceof Flip ? (Flip)property : Flip.NONE;
boolean hsbOnTop = flip == Flip.BOTH || flip == Flip.VERTICAL;
boolean vsbOnLeft = pane.getComponentOrientation().isLeftToRight()
? flip == Flip.BOTH || flip == Flip.HORIZONTAL
: flip == Flip.NONE || flip == Flip.VERTICAL;
// If there's a visible row header remove the space it needs.
// The row header is treated as if it were fixed width, arbitrary height.
Rectangle rowHeadBounds = new Rectangle(bounds.x, 0, 0, 0);
if (rowHead != null && rowHead.isVisible()) {
rowHeadBounds.width = min(bounds.width, rowHead.getPreferredSize().width);
bounds.width -= rowHeadBounds.width;
if (vsbOnLeft) {
rowHeadBounds.x += bounds.width;
}
else {
bounds.x += rowHeadBounds.width;
}
}
// If there's a visible column header remove the space it needs.
// The column header is treated as if it were fixed height, arbitrary width.
Rectangle colHeadBounds = new Rectangle(0, bounds.y, 0, 0);
if (colHead != null && colHead.isVisible()) {
colHeadBounds.height = min(bounds.height, colHead.getPreferredSize().height);
bounds.height -= colHeadBounds.height;
if (hsbOnTop) {
colHeadBounds.y += bounds.height;
}
else {
bounds.y += colHeadBounds.height;
}
}
// If there's a JScrollPane.viewportBorder, remove the space it occupies
Border border = pane.getViewportBorder();
Insets insets = border == null ? null : border.getBorderInsets(parent);
JBInsets.removeFrom(bounds, insets);
if (insets == null) insets = EMPTY_INSETS;
// At this point:
// colHeadBounds is correct except for its width and x
// rowHeadBounds is correct except for its height and y
// bounds - the space available for the viewport and scroll bars
// Once we're through computing the dimensions of these three parts
// we can go back and set the bounds for the corners and the dimensions of
// colHeadBounds.x, colHeadBounds.width, rowHeadBounds.y, rowHeadBounds.height.
boolean isEmpty = bounds.width < 0 || bounds.height < 0;
Component view = viewport == null ? null : viewport.getView();
Dimension viewPreferredSize = view == null ? new Dimension() : view.getPreferredSize();
if (view instanceof JComponent) JBViewport.fixPreferredSize(viewPreferredSize, (JComponent)view, vsb, hsb);
Dimension viewportExtentSize = viewport == null ? new Dimension() : viewport.toViewCoordinates(bounds.getSize());
// If the view is tracking the viewports width we don't bother with a horizontal scrollbar.
// If the view is tracking the viewports height we don't bother with a vertical scrollbar.
Scrollable scrollable = null;
boolean viewTracksViewportWidth = false;
boolean viewTracksViewportHeight = false;
// Don't bother checking the Scrollable methods if there is no room for the viewport,
// we aren't going to show any scroll bars in this case anyway.
if (!isEmpty && view instanceof Scrollable) {
scrollable = (Scrollable)view;
viewTracksViewportWidth = scrollable.getScrollableTracksViewportWidth();
viewTracksViewportHeight = scrollable.getScrollableTracksViewportHeight();
}
// If there's a vertical scroll bar and we need one, allocate space for it.
// A vertical scroll bar is considered to be fixed width, arbitrary height.
boolean vsbOpaque = false;
boolean vsbNeeded = false;
int vsbPolicy = pane.getVerticalScrollBarPolicy();
if (!isEmpty && vsbPolicy != VERTICAL_SCROLLBAR_NEVER) {
vsbNeeded = vsbPolicy == VERTICAL_SCROLLBAR_ALWAYS
|| !viewTracksViewportHeight && viewPreferredSize.height > viewportExtentSize.height;
}
Rectangle vsbBounds = new Rectangle(0, bounds.y - insets.top, 0, 0);
if (vsb != null) {
if (!SystemInfo.isMac && view instanceof JTable) vsb.setOpaque(true);
vsbOpaque = vsb.isOpaque();
if (vsbNeeded) {
adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft);
if (vsbOpaque && viewport != null) {
viewportExtentSize = viewport.toViewCoordinates(bounds.getSize());
}
}
}
// If there's a horizontal scroll bar and we need one, allocate space for it.
// A horizontal scroll bar is considered to be fixed height, arbitrary width.
boolean hsbOpaque = false;
boolean hsbNeeded = false;
int hsbPolicy = pane.getHorizontalScrollBarPolicy();
if (!isEmpty && hsbPolicy != HORIZONTAL_SCROLLBAR_NEVER) {
hsbNeeded = hsbPolicy == HORIZONTAL_SCROLLBAR_ALWAYS
|| !viewTracksViewportWidth && viewPreferredSize.width > viewportExtentSize.width;
}
Rectangle hsbBounds = new Rectangle(bounds.x - insets.left, 0, 0, 0);
if (hsb != null) {
if (!SystemInfo.isMac && view instanceof JTable) hsb.setOpaque(true);
hsbOpaque = hsb.isOpaque();
if (hsbNeeded) {
adjustForHSB(bounds, insets, hsbBounds, hsbOpaque, hsbOnTop);
if (hsbOpaque && viewport != null) {
// If we added the horizontal scrollbar and reduced the vertical space
// we may have to add the vertical scrollbar, if that hasn't been done so already.
if (vsb != null && !vsbNeeded && vsbPolicy != VERTICAL_SCROLLBAR_NEVER) {
viewportExtentSize = viewport.toViewCoordinates(bounds.getSize());
vsbNeeded = viewPreferredSize.height > viewportExtentSize.height;
if (vsbNeeded) adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft);
}
}
}
}
// Set the size of the viewport first, and then recheck the Scrollable methods.
// Some components base their return values for the Scrollable methods on the size of the viewport,
// so that if we don't ask after resetting the bounds we may have gotten the wrong answer.
if (viewport != null) {
viewport.setBounds(bounds);
if (scrollable != null && hsbOpaque && vsbOpaque) {
viewTracksViewportWidth = scrollable.getScrollableTracksViewportWidth();
viewTracksViewportHeight = scrollable.getScrollableTracksViewportHeight();
viewportExtentSize = viewport.toViewCoordinates(bounds.getSize());
boolean vsbNeededOld = vsbNeeded;
if (vsb != null && vsbPolicy == VERTICAL_SCROLLBAR_AS_NEEDED) {
boolean vsbNeededNew = !viewTracksViewportHeight && viewPreferredSize.height > viewportExtentSize.height;
if (vsbNeeded != vsbNeededNew) {
vsbNeeded = vsbNeededNew;
if (vsbNeeded) {
adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft);
}
else if (vsbOpaque) {
bounds.width += vsbBounds.width;
}
if (vsbOpaque) viewportExtentSize = viewport.toViewCoordinates(bounds.getSize());
}
}
boolean hsbNeededOld = hsbNeeded;
if (hsb != null && hsbPolicy == HORIZONTAL_SCROLLBAR_AS_NEEDED) {
boolean hsbNeededNew = !viewTracksViewportWidth && viewPreferredSize.width > viewportExtentSize.width;
if (hsbNeeded != hsbNeededNew) {
hsbNeeded = hsbNeededNew;
if (hsbNeeded) {
adjustForHSB(bounds, insets, hsbBounds, hsbOpaque, hsbOnTop);
}
else if (hsbOpaque) {
bounds.height += hsbBounds.height;
}
if (hsbOpaque && vsb != null && !vsbNeeded && vsbPolicy != VERTICAL_SCROLLBAR_NEVER) {
viewportExtentSize = viewport.toViewCoordinates(bounds.getSize());
vsbNeeded = viewPreferredSize.height > viewportExtentSize.height;
if (vsbNeeded) adjustForVSB(bounds, insets, vsbBounds, vsbOpaque, vsbOnLeft);
}
}
}
if (hsbNeededOld != hsbNeeded || vsbNeededOld != vsbNeeded) {
viewport.setBounds(bounds);
// You could argue that we should recheck the Scrollable methods again until they stop changing,
// but they might never stop changing, so we stop here and don't do any additional checks.
}
}
}
// Set the bounds of the row header.
rowHeadBounds.y = bounds.y - insets.top;
rowHeadBounds.height = bounds.height + insets.top + insets.bottom;
if (rowHead != null) {
rowHead.setBounds(rowHeadBounds);
rowHead.putClientProperty(Alignment.class, vsbOnLeft ? Alignment.RIGHT : Alignment.LEFT);
}
// Set the bounds of the column header.
colHeadBounds.x = bounds.x - insets.left;
colHeadBounds.width = bounds.width + insets.left + insets.right;
if (colHead != null) {
colHead.setBounds(colHeadBounds);
colHead.putClientProperty(Alignment.class, hsbOnTop ? Alignment.BOTTOM : Alignment.TOP);
}
// Calculate overlaps for translucent scroll bars
int overlapWidth = 0;
int overlapHeight = 0;
if (vsbNeeded && !vsbOpaque && hsbNeeded && !hsbOpaque) {
overlapWidth = vsbBounds.width; // shrink horizontally
//overlapHeight = hsbBounds.height; // shrink vertically
}
// Set the bounds of the vertical scroll bar.
vsbBounds.y = bounds.y - insets.top;
vsbBounds.height = bounds.height + insets.top + insets.bottom;
if (vsb != null) {
vsb.setVisible(vsbNeeded);
if (vsbNeeded) {
if (vsbOpaque && colHead != null && UIManager.getBoolean("ScrollPane.fillUpperCorner")) {
if ((vsbOnLeft ? upperLeft : upperRight) == null) {
// This is used primarily for GTK L&F, which needs to extend
// the vertical scrollbar to fill the upper corner near the column header.
// Note that we skip this step (and use the default behavior)
// if the user has set a custom corner component.
if (!hsbOnTop) vsbBounds.y -= colHeadBounds.height;
vsbBounds.height += colHeadBounds.height;
}
}
int overlapY = !hsbOnTop ? 0 : overlapHeight;
vsb.setBounds(vsbBounds.x, vsbBounds.y + overlapY, vsbBounds.width, vsbBounds.height - overlapHeight);
vsb.putClientProperty(Alignment.class, vsbOnLeft ? Alignment.LEFT : Alignment.RIGHT);
}
// Modify the bounds of the translucent scroll bar.
if (!vsbOpaque) {
if (!vsbOnLeft) vsbBounds.x += vsbBounds.width;
vsbBounds.width = 0;
}
}
// Set the bounds of the horizontal scroll bar.
hsbBounds.x = bounds.x - insets.left;
hsbBounds.width = bounds.width + insets.left + insets.right;
if (hsb != null) {
hsb.setVisible(hsbNeeded);
if (hsbNeeded) {
if (hsbOpaque && rowHead != null && UIManager.getBoolean("ScrollPane.fillLowerCorner")) {
if ((vsbOnLeft ? lowerRight : lowerLeft) == null) {
// This is used primarily for GTK L&F, which needs to extend
// the horizontal scrollbar to fill the lower corner near the row header.
// Note that we skip this step (and use the default behavior)
// if the user has set a custom corner component.
if (!vsbOnLeft) hsbBounds.x -= rowHeadBounds.width;
hsbBounds.width += rowHeadBounds.width;
}
}
int overlapX = !vsbOnLeft ? 0 : overlapWidth;
hsb.setBounds(hsbBounds.x + overlapX, hsbBounds.y, hsbBounds.width - overlapWidth, hsbBounds.height);
hsb.putClientProperty(Alignment.class, hsbOnTop ? Alignment.TOP : Alignment.BOTTOM);
}
// Modify the bounds of the translucent scroll bar.
if (!hsbOpaque) {
if (!hsbOnTop) hsbBounds.y += hsbBounds.height;
hsbBounds.height = 0;
}
}
// Set the bounds of the corners.
if (lowerLeft != null) {
lowerLeft.setBounds(vsbOnLeft ? vsbBounds.x : rowHeadBounds.x,
hsbOnTop ? colHeadBounds.y : hsbBounds.y,
vsbOnLeft ? vsbBounds.width : rowHeadBounds.width,
hsbOnTop ? colHeadBounds.height : hsbBounds.height);
}
if (lowerRight != null) {
lowerRight.setBounds(vsbOnLeft ? rowHeadBounds.x : vsbBounds.x,
hsbOnTop ? colHeadBounds.y : hsbBounds.y,
vsbOnLeft ? rowHeadBounds.width : vsbBounds.width,
hsbOnTop ? colHeadBounds.height : hsbBounds.height);
}
if (upperLeft != null) {
upperLeft.setBounds(vsbOnLeft ? vsbBounds.x : rowHeadBounds.x,
hsbOnTop ? hsbBounds.y : colHeadBounds.y,
vsbOnLeft ? vsbBounds.width : rowHeadBounds.width,
hsbOnTop ? hsbBounds.height : colHeadBounds.height);
}
if (upperRight != null) {
upperRight.setBounds(vsbOnLeft ? rowHeadBounds.x : vsbBounds.x,
hsbOnTop ? hsbBounds.y : colHeadBounds.y,
vsbOnLeft ? rowHeadBounds.width : vsbBounds.width,
hsbOnTop ? hsbBounds.height : colHeadBounds.height);
}
if (!vsbOpaque && vsbNeeded || !hsbOpaque && hsbNeeded) {
fixComponentZOrder(vsb, 0);
fixComponentZOrder(viewport, -1);
}
}
private static void fixComponentZOrder(Component component, int index) {
if (component != null) {
Container parent = component.getParent();
synchronized (parent.getTreeLock()) {
if (index < 0) index += parent.getComponentCount();
parent.setComponentZOrder(component, index);
}
}
}
private void adjustForVSB(Rectangle bounds, Insets insets, Rectangle vsbBounds, boolean vsbOpaque, boolean vsbOnLeft) {
vsbBounds.width = !vsb.isEnabled() ? 0 : min(bounds.width, vsb.getPreferredSize().width);
if (vsbOnLeft) {
vsbBounds.x = bounds.x - insets.left/* + vsbBounds.width*/;
if (vsbOpaque) bounds.x += vsbBounds.width;
}
else {
vsbBounds.x = bounds.x + bounds.width + insets.right - vsbBounds.width;
}
if (vsbOpaque) bounds.width -= vsbBounds.width;
}
private void adjustForHSB(Rectangle bounds, Insets insets, Rectangle hsbBounds, boolean hsbOpaque, boolean hsbOnTop) {
hsbBounds.height = !hsb.isEnabled() ? 0 : min(bounds.height, hsb.getPreferredSize().height);
if (hsbOnTop) {
hsbBounds.y = bounds.y - insets.top/* + hsbBounds.height*/;
if (hsbOpaque) bounds.y += hsbBounds.height;
}
else {
hsbBounds.y = bounds.y + bounds.height + insets.bottom - hsbBounds.height;
}
if (hsbOpaque) bounds.height -= hsbBounds.height;
}
private static int min(int one, int two) {
return Math.max(0, Math.min(one, two));
}
}
/**
* Indicates whether the specified event is not consumed and does not have unexpected modifiers.
*
* @param event a mouse wheel event to check for validity
* @return {@code true} if the specified event is valid, {@code false} otherwise
*/
public static boolean isScrollEvent(@NotNull MouseWheelEvent event) {
if (event.isConsumed()) return false; // event should not be consumed already
if (event.getWheelRotation() == 0) return false; // any rotation expected (forward or backward)
return 0 == (SCROLL_MODIFIERS & event.getModifiers());
}
private static final int SCROLL_MODIFIERS = // event modifiers allowed during scrolling
~InputEvent.SHIFT_MASK & ~InputEvent.SHIFT_DOWN_MASK & // for horizontal scrolling
~InputEvent.BUTTON1_MASK & ~InputEvent.BUTTON1_DOWN_MASK; // for selection
}<|fim▁end|> |
private void init() {
init(true);
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Pytrafikverket module."""
# flake8: noqa
from pytrafikverket.trafikverket import (AndFilter, FieldFilter, FieldSort,
Filter, FilterOperation, NodeHelper,
OrFilter, SortOrder, Trafikverket)
from pytrafikverket.trafikverket_train import (StationInfo, TrafikverketTrain,
TrainStop, TrainStopStatus)
from pytrafikverket.trafikverket_weather import (TrafikverketWeather,
WeatherStationInfo)
from pytrafikverket.trafikverket_ferry import (TrafikverketFerry,<|fim▁hole|><|fim▁end|> | FerryStop, FerryStopStatus) |
<|file_name|>HttpRequestLogger.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.azure.core.http.policy;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.logging.LogLevel;
import reactor.core.publisher.Mono;
/**
* Manages logging HTTP requests in {@link HttpLoggingPolicy}.
*/
@FunctionalInterface
public interface HttpRequestLogger {
/**
* Gets the {@link LogLevel} used to log the HTTP request.
* <p>
* By default this will return {@link LogLevel#INFORMATIONAL}.
*
* @param loggingOptions The information available during request logging.
* @return The {@link LogLevel} used to log the HTTP request.
*/
default LogLevel getLogLevel(HttpRequestLoggingContext loggingOptions) {
return LogLevel.INFORMATIONAL;
}
/**
* Logs the HTTP request.
* <p>
* To get the {@link LogLevel} used to log the HTTP request use {@link #getLogLevel(HttpRequestLoggingContext)}.
*
* @param logger The {@link ClientLogger} used to log the HTTP request.
* @param loggingOptions The information available during request logging.
* @return A reactive response that indicates that the HTTP request has been logged.
*/
Mono<Void> logRequest(ClientLogger logger, HttpRequestLoggingContext loggingOptions);
}<|fim▁end|> | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
|
<|file_name|>test-type.rs<|end_file_name|><|fim▁begin|>// compile-flags: --test --test-args=--test-threads=1
// check-pass
// normalize-stdout-test: "src/test/rustdoc-ui" -> "$$DIR"
// normalize-stdout-test "finished in \d+\.\d+s" -> "finished in $$TIME"
/// ```
/// let a = true;
/// ```
/// ```should_panic
/// panic!()
/// ```
/// ```ignore (incomplete-code)
/// fn foo() {
/// ```
/// ```no_run
/// loop {
/// println!("Hello, world");
/// }
/// ```
/// fails to compile
/// ```compile_fail<|fim▁hole|>/// let x = 5;
/// x += 2; // shouldn't compile!
/// ```
pub fn f() {}<|fim▁end|> | |
<|file_name|>test_artificial_32_RelativeDifference_LinearTrend_5__0.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
<|fim▁hole|><|fim▁end|> |
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "LinearTrend", cycle_length = 5, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 0); |
<|file_name|>issue-47312.rs<|end_file_name|><|fim▁begin|>// check-pass
// ignore-pretty pretty-printing is unhygienic
#![feature(decl_macro)]
#![allow(unused)]
mod foo {
pub macro m($s:tt, $i:tt) {
$s.$i
}
}
mod bar {
struct S(i32);
fn f() {
let s = S(0);
::foo::m!(s, 0);
}
}<|fim▁hole|><|fim▁end|> |
fn main() {} |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>import getpass
import json
import getopt
from genericpath import isfile
from os.path import sep
from pingdumb.main_module import url_type
def read_config():
f_path = "." + sep + "pingdumb.json"
if not isfile(f_path):
f = open(f_path, 'w')
conf = {
"url": "jellyms.kr",
"smtpServer": "smtp.gmail.com:587",
"smtpUser": "",
"toEmail": "",
"interval": 300,
}
f.write(json.dumps(conf))
f.close()<|fim▁hole|> f.close()
return conf
def write_config(conf):
if 'smtpPw' in conf:
del conf['smtpPw']
f_path = "." + sep + "pingdumb.json"
f = open(f_path, 'w')
f.truncate()
f.write(json.dumps(conf))
f.close()
def input_conf(message, default):
value = input(message)
if not value:
return default
return value
def set_config():
configure = read_config()
url_for_test = input_conf(
"URL to test? (" + configure["url"] + ")", configure["url"]
)
url_for_test = url_type(url_for_test)
recv_mail = input_conf(
"Receive mail? (" + configure["toEmail"] + ")",
configure["toEmail"]
)
s_server = input_conf(
"SMTP server? (" + configure["smtpServer"] + ")",
configure["smtpServer"]
)
s_user = input_conf(
"SMTP Server username? (" + configure["smtpUser"] + ")",
configure["smtpUser"]
)
s_pw = getpass.getpass("SMTP Server password?", "")
interval = input_conf(
"interval of seconds? (" + str(configure["interval"]) + ")",
configure["interval"]
)
interval = int(interval)
configure["url"] = url_for_test
configure["toEmail"] = recv_mail
configure["smtpServer"] = s_server
configure["smtpUser"] = s_user
configure["smtpPw"] = s_pw
configure["interval"] = interval
return configure
def configure_to_tuple():
configure = read_config()
return configure["url"], configure["smtpServer"], \
configure["smtpUser"], configure["toEmail"], configure["interval"]
def extract_password_with_argv(argv):
opts, args = getopt.getopt(argv, 'p')
for o, a in opts:
if o == "-p":
return getpass.getpass("SMTP Server password", "")<|fim▁end|> | return conf
else:
f = open(f_path, 'r+b')
conf = json.loads(f.read().decode('utf-8')) |
<|file_name|>roerrorapi.rs<|end_file_name|><|fim▁begin|>// Copyright © 2017 winapi-rs developers
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
ENUM!{enum RO_ERROR_REPORTING_FLAGS {<|fim▁hole|> RO_ERROR_REPORTING_FORCEEXCEPTIONS = 0x00000002,
RO_ERROR_REPORTING_USESETERRORINFO = 0x00000004,
RO_ERROR_REPORTING_SUPPRESSSETERRORINFO = 0x00000008,
}}
FN!{stdcall PINSPECT_MEMORY_CALLBACK(
*const ::VOID,
::UINT_PTR,
::UINT32,
*mut ::BYTE,
) -> ::HRESULT}<|fim▁end|> | RO_ERROR_REPORTING_NONE = 0x00000000,
RO_ERROR_REPORTING_SUPPRESSEXCEPTIONS = 0x00000001, |
<|file_name|>O2.4 SS, Single agent model.py<|end_file_name|><|fim▁begin|>import complexism as cx
import complexism.agentbased.statespace as ss
import epidag as dag
dbp = cx.read_dbp_script(cx.load_txt('../scripts/SIR_BN.txt'))
pc = dag.quick_build_parameter_core(cx.load_txt('../scripts/pSIR.txt'))
dc = dbp.generate_model('M1', **pc.get_samplers())
ag = ss.StSpAgent('Helen', dc['Sus'], pc)
model = cx.SingleIndividualABM('M1', ag)
model.add_observing_attribute('State')<|fim▁hole|>print(cx.simulate(model, None, 0, 10, 1))<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from django.db import models
import datetime
from django.db.models.signals import pre_save
from django.urls import reverse
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
from source_utils.starters import CommonInfo, GenericCategory
from versatileimagefield.fields import (
VersatileImageField,
PPOIField
)
def upload_location(instance, filename):
return "%s/%s" %(instance.slug, filename)
ASSESEMENT = (
('units', 'Per unit'),
('square feet', 'Square foot'),
('linear feet', 'Linear foot'),
('square meters', 'Square meter'),
('linear meters', 'Linear meter'),
)
class Base(GenericCategory):
"""
This model represents the general type of product category offered.
"""
class Meta:
verbose_name = _('Product Category')
verbose_name_plural = _('Product Categories')
ordering = ["category"]
def get_success_url(self):
return reverse("product:company_list")
def get_absolute_url(self):
return reverse(
"product:base_product_detail",
kwargs={'slug': self.slug}
)
def pre_save_category(sender, instance, *args, **kwargs):
instance.slug = slugify(instance.category)
pre_save.connect(pre_save_category, sender=Base)
class Product(CommonInfo):
"""
This model describes the specific product related to the category.
"""
base = models.ForeignKey(
Base,
on_delete=models.CASCADE
)
supplier = models.ForeignKey(
'company.Company',
on_delete=models.CASCADE
)
item = models.CharField(
max_length=30,
unique=True
)
admin_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
prep_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
field_time = models.DecimalField(
default=0,
max_digits=4,
decimal_places=2
)
admin_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2<|fim▁hole|> max_digits=8,
decimal_places=2
)
field_material = models.DecimalField(
default=0,
max_digits=8,
decimal_places=2
)
quantity_assesement = models.CharField(
max_length=12,
verbose_name=_("Quantity assesement method"),
choices=ASSESEMENT
)
order_if_below = models.SmallIntegerField()
discontinued = models.DateField(
null=True,
blank=True
)
order_now = models.BooleanField(
default=False
)
units_damaged_or_lost = models.SmallIntegerField(
default=0
)
quantity = models.SmallIntegerField(
"Usable quantity",
default=0,
null=True,
blank=True
)
quantity_called_for = models.SmallIntegerField(
default=0,
null=True,
blank=True
)
image = VersatileImageField(
'Image',
upload_to='images/product/',
null=True, blank=True,
width_field='width',
height_field='height',
ppoi_field='ppoi'
)
height = models.PositiveIntegerField(
'Image Height',
blank=True,
null=True
)
width = models.PositiveIntegerField(
'Image Width',
blank=True,
null=True
)
ppoi = PPOIField(
'Image PPOI'
)
no_longer_available = models.BooleanField(default=False)
class Meta:
ordering= ['item']
def __str__(self):
return self.item
def get_time(self):
return self.admin_time + self.prep_time + self.field_time
def get_cost(self):
return self.admin_material + self.prep_material + self.field_material
def get_usable_quantity(self):
return self.quantity - self.units_damaged_or_lost - self.quantity_called_for
def get_success_url(self):
return reverse("product:category_item_list", kwargs={'slug': self.base.slug})
def get_absolute_url(self):
return reverse("product:item_detail", kwargs={'slug': self.slug})
def pre_save_product(sender, instance, *args, **kwargs):
if not instance.no_longer_available:
instance.discontinued = None
elif instance.no_longer_available and instance.discontinued == None:
instance.discontinued = datetime.date.today()
if (
instance.quantity -
instance.units_damaged_or_lost -
instance.quantity_called_for
) < instance.order_if_below:
instance.order_now = True
else:
instance.order_now = False
instance.slug = slugify(instance.item)
pre_save.connect(pre_save_product, sender=Product)<|fim▁end|> | )
prep_material = models.DecimalField(
default=0, |
<|file_name|>application.js<|end_file_name|><|fim▁begin|><|fim▁hole|>//= require assets/templates/user<|fim▁end|> | |
<|file_name|>image.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>include!(concat!(env!("OUT_DIR"), "/sdl_image_bindings.rs"));<|fim▁end|> | |
<|file_name|>qlayout_ref_test.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
import unittest
from sys import getrefcount
from PySide.QtGui import QHBoxLayout, QVBoxLayout, QGridLayout, QWidget
from PySide.QtGui import QStackedLayout, QFormLayout
from PySide.QtGui import QApplication, QPushButton, QLabel
from helper import UsesQApplication
class SaveReference(UsesQApplication):
'''Test case to check if QLayout-derived classes increment the refcount
of widgets passed to addWidget()'''
# Adding here as nose can't see the qapplication attrib we inherit
qapplication = True
def setUp(self):
#Acquire resources
super(SaveReference, self).setUp()
self.widget1 = QPushButton('click me')
self.widget2 = QLabel('aaa')
def tearDown(self):
#Release resources
del self.widget2
del self.widget1
super(SaveReference, self).tearDown()
def checkLayoutReference(self, layout):
#Checks the reference cound handling of layout.addWidget
self.assertEqual(getrefcount(self.widget1), 2)
layout.addWidget(self.widget1)
self.assertEqual(getrefcount(self.widget1), 3)
self.assertEqual(getrefcount(self.widget2), 2)
layout.addWidget(self.widget2)
self.assertEqual(getrefcount(self.widget2), 3)
# Check if doesn't mess around with previous widget refcount
self.assertEqual(getrefcount(self.widget1), 3)
def testMoveLayout(self):
l = QHBoxLayout()
self.assertEqual(getrefcount(self.widget1), 2)
l.addWidget(self.widget1)
self.assertEqual(getrefcount(self.widget1), 3)
w = QWidget()
w.setLayout(l)
self.assertEqual(getrefcount(self.widget1), 3)
def testHBoxReference(self):
#QHBoxLayout.addWidget reference count
w = QWidget()
self.checkLayoutReference(QHBoxLayout(w))
def testVBoxReference(self):
#QVBoxLayout.addWidget reference count
w = QWidget()
self.checkLayoutReference(QVBoxLayout(w))
def testGridReference(self):
#QGridLayout.addWidget reference count
w = QWidget()
self.checkLayoutReference(QGridLayout(w))
def testFormReference(self):
#QFormLayout.addWidget reference count
w = QWidget()
self.checkLayoutReference(QFormLayout(w))
def testStackedReference(self):
#QStackedLayout.addWidget reference count
w = QWidget()
self.checkLayoutReference(QStackedLayout(w))
class MultipleAdd(UsesQApplication):
'''Test case to check if refcount is incremented only once when multiple
calls to addWidget are made with the same widget'''
qapplication = True
def setUp(self):
#Acquire resources
super(MultipleAdd, self).setUp()
self.widget = QPushButton('click me')
self.win = QWidget()
self.layout = QHBoxLayout(self.win)
def tearDown(self):
#Release resources
del self.widget
del self.layout
del self.win
super(MultipleAdd, self).tearDown()
def testRefCount(self):
#Multiple QLayout.addWidget calls on the same widget
self.assertEqual(getrefcount(self.widget), 2)
self.layout.addWidget(self.widget)
self.assertEqual(getrefcount(self.widget), 3)
self.layout.addWidget(self.widget)
self.assertEqual(getrefcount(self.widget), 3)
self.layout.addWidget(self.widget)
self.assertEqual(getrefcount(self.widget), 3)
class InternalAdd(UsesQApplication):
def testInternalRef(self):
mw = QWidget()
w = QWidget()
ow = QWidget()
topLayout = QGridLayout()
# unique reference
self.assertEqual(getrefcount(w), 2)
self.assertEqual(getrefcount(ow), 2)
topLayout.addWidget(w, 0, 0)
topLayout.addWidget(ow, 1, 0)
# layout keep the referemce
self.assertEqual(getrefcount(w), 3)
self.assertEqual(getrefcount(ow), 3)
mainLayout = QGridLayout()
mainLayout.addLayout(topLayout, 1, 0, 1, 4)
# the same reference
self.assertEqual(getrefcount(w), 3)
self.assertEqual(getrefcount(ow), 3)
mw.setLayout(mainLayout)
# now trasfer the ownership to mw
self.assertEqual(getrefcount(w), 3)
self.assertEqual(getrefcount(ow), 3)
del mw
# remove the ref and invalidate the widget
self.assertEqual(getrefcount(w), 2)
self.assertEqual(getrefcount(ow), 2)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | '''Test cases for QLayout handling of child widgets references''' |
<|file_name|>hmacHash.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" hmacHash.py
Implemention of Request for Comments: 2104
HMAC: Keyed-Hashing for Message Authentication
HMAC is a mechanism for message authentication
using cryptographic hash functions. HMAC can be used with any
iterative cryptographic hash function, e.g., MD5, SHA-1, in
combination with a secret shared key. The cryptographic strength of
HMAC depends on the properties of the underlying hash function.
This implementation of HMAC uses a generic cryptographic 'hashFunction'
(self.H). Hash functions must conform to the crypto.hash method
conventions and are not directly compatible with the Python sha1 or md5 algorithms.
[IETF] RFC 2104 "HMAC: Keyed-Hashing for Message Authentication"
>>>key = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
>>>keyedHashAlg = HMAC(SHA1, key)
>>>result = keyedHashAlg(data)
"""
from crypto.hash.hash import Hash
class HMAC(Hash):
""" To compute HMAC over the data `text' we perform
H(K XOR opad, H(K XOR ipad, text))
"""
def __init__(self, hashFunction, key = None):
""" initialize HMAC with hashfunction and optionally the key """
# should check for right type of function
self.H = hashFunction() # a new instance for inner hash
self.H_outer = hashFunction() # separate outer context to allow intermediate digests
self.B = self.H.raw_block_size # in bytes, note - hash block size typically 1
# and raw_block_size much larger
# e.g. raw_block_size is 64 bytes for SHA1 and MD5
self.name = 'HMAC_'+self.H.name
self.blocksize = 1 # single octets can be hashed by padding to raw block size
self.raw_block_size = self.H.raw_block_size
self.digest_size = self.H.digest_size
if key != None:
self.setKey(key)
else:
self.keyed = None
def setKey(self,key):
""" setKey(key) ... key is binary string """
if len(key) > self.B: # if key is too long then hash it
key = self.H(key) # humm... this is odd, hash can be smaller than B
else: # should raise error on short key, but breaks tests :-(
key =key + (self.B-len(key)) * chr(0)
self.k_xor_ipad = ''.join([chr(ord(bchar)^0x36) for bchar in key])
self.k_xor_opad = ''.join([chr(ord(bchar)^0x5C) for bchar in key])
self.keyed = 1
self.reset()
def reset(self):
self.H.reset()
if self.keyed == None :
raise 'no key defined'
self.H.update(self.k_xor_ipad) # start inner hash with key xored with ipad
# outer hash always called as one full pass (no updates)
def update(self,data):
if self.keyed == None :
raise 'no key defined'
self.H.update(data)<|fim▁hole|>
from crypto.hash.sha1Hash import SHA1
class HMAC_SHA1(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,SHA1,key)
from crypto.hash.md5Hash import MD5
class HMAC_MD5(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,MD5,key)<|fim▁end|> | def digest(self):
if self.keyed == None :
raise 'no key defined'
return self.H_outer(self.k_xor_opad+self.H.digest()) |
<|file_name|>Game.js<|end_file_name|><|fim▁begin|>/**
* @author Richard Davey <rich@photonstorm.com>
* @copyright 2016 Photon Storm Ltd.
* @license {@link https://github.com/photonstorm/phaser/blob/master/license.txt|MIT License}
*/
/**
* This is where the magic happens. The Game object is the heart of your game,
* providing quick access to common functions and handling the boot process.
*
* "Hell, there are no rules here - we're trying to accomplish something."
* Thomas A. Edison
*
* @class Phaser.Game
* @constructor
* @param {object} [gameConfig={}] - The game configuration object
*/
Phaser.Game = function (gameConfig) {
/**
* @property {number} id - Phaser Game ID (for when Pixi supports multiple instances).
* @readonly
*/
this.id = Phaser.GAMES.push(this) - 1;
/**
* @property {object} config - The Phaser.Game configuration object.
*/
this.config = null;
/**
* @property {object} physicsConfig - The Phaser.Physics.World configuration object.
*/
this.physicsConfig = null;
/**
* @property {string|HTMLElement} parent - The Games DOM parent.
* @default
*/
this.parent = '';
/**
* The current Game Width in pixels.
*
* _Do not modify this property directly:_ use {@link Phaser.ScaleManager#setGameSize} - eg. `game.scale.setGameSize(width, height)` - instead.
*
* @property {integer} width
* @readonly
* @default
*/
this.width = 800;
/**
* The current Game Height in pixels.
*
* _Do not modify this property directly:_ use {@link Phaser.ScaleManager#setGameSize} - eg. `game.scale.setGameSize(width, height)` - instead.
*
* @property {integer} height
* @readonly
* @default
*/
this.height = 600;
/**
* The resolution of your game. This value is read only, but can be changed at start time it via a game configuration object.
*
* @property {integer} resolution
* @readonly
* @default
*/
this.resolution = 1;
/**
* @property {integer} _width - Private internal var.
* @private
*/
this._width = 800;
/**
* @property {integer} _height - Private internal var.
* @private
*/
this._height = 600;
/**
* @property {boolean} transparent - Use a transparent canvas background or not.
* @default
*/
this.transparent = false;
/**
* @property {boolean} antialias - Anti-alias graphics. By default scaled images are smoothed in Canvas and WebGL, set anti-alias to false to disable this globally.
* @default
*/
this.antialias = false;
/**
* @property {boolean} preserveDrawingBuffer - The value of the preserveDrawingBuffer flag affects whether or not the contents of the stencil buffer is retained after rendering.
* @default
*/
this.preserveDrawingBuffer = false;
/**
* Clear the Canvas each frame before rendering the display list.
* You can set this to `false` to gain some performance if your game always contains a background that completely fills the display.
* @property {boolean} clearBeforeRender
* @default
*/
this.clearBeforeRender = true;
/**
* @property {PIXI.CanvasRenderer|PIXI.WebGLRenderer} renderer - The Pixi Renderer.
* @protected
*/
this.renderer = null;
/**
* @property {number} renderType - The Renderer this game will use. Either Phaser.AUTO, Phaser.CANVAS, Phaser.WEBGL, or Phaser.HEADLESS.
* @readonly
*/
this.renderType = Phaser.AUTO;
/**
* @property {Phaser.StateManager} state - The StateManager.
*/
this.state = null;
/**
* @property {boolean} isBooted - Whether the game engine is booted, aka available.
* @readonly
*/
this.isBooted = false;
/**
* @property {boolean} isRunning - Is game running or paused?
* @readonly
*/
this.isRunning = false;
/**
* @property {Phaser.RequestAnimationFrame} raf - Automatically handles the core game loop via requestAnimationFrame or setTimeout
* @protected
*/
this.raf = null;
/**
* @property {Phaser.GameObjectFactory} add - Reference to the Phaser.GameObjectFactory.
*/
this.add = null;
/**
* @property {Phaser.GameObjectCreator} make - Reference to the GameObject Creator.
*/
this.make = null;
/**
* @property {Phaser.Cache} cache - Reference to the assets cache.
*/
this.cache = null;
/**
* @property {Phaser.Input} input - Reference to the input manager
*/
this.input = null;
/**
* @property {Phaser.Loader} load - Reference to the assets loader.
*/
this.load = null;
/**
* @property {Phaser.Math} math - Reference to the math helper.
*/
this.math = null;
/**
* @property {Phaser.Net} net - Reference to the network class.
*/
this.net = null;
/**
* @property {Phaser.ScaleManager} scale - The game scale manager.
*/
this.scale = null;
/**
* @property {Phaser.SoundManager} sound - Reference to the sound manager.
*/
this.sound = null;
/**
* @property {Phaser.Stage} stage - Reference to the stage.
*/
this.stage = null;
/**
* @property {Phaser.Time} time - Reference to the core game clock.
*/
this.time = null;
/**
* @property {Phaser.TweenManager} tweens - Reference to the tween manager.
*/
this.tweens = null;
/**
* @property {Phaser.World} world - Reference to the world.
*/
this.world = null;
/**
* @property {Phaser.Physics} physics - Reference to the physics manager.
*/
this.physics = null;
/**
* @property {Phaser.PluginManager} plugins - Reference to the plugin manager.
*/
this.plugins = null;
/**
* @property {Phaser.RandomDataGenerator} rnd - Instance of repeatable random data generator helper.
*/
this.rnd = null;
/**
* @property {Phaser.Device} device - Contains device information and capabilities.
*/
this.device = Phaser.Device;
/**
* @property {Phaser.Camera} camera - A handy reference to world.camera.
*/
this.camera = null;
/**
* @property {HTMLCanvasElement} canvas - A handy reference to renderer.view, the canvas that the game is being rendered in to.
*/
this.canvas = null;
/**
* @property {CanvasRenderingContext2D} context - A handy reference to renderer.context (only set for CANVAS games, not WebGL)
*/
this.context = null;
/**
* @property {Phaser.Utils.Debug} debug - A set of useful debug utilities.
*/
this.debug = null;
/**
* @property {Phaser.Particles} particles - The Particle Manager.
*/
this.particles = null;
/**
* @property {Phaser.Create} create - The Asset Generator.
*/
this.create = null;
/**
* If `false` Phaser will automatically render the display list every update. If `true` the render loop will be skipped.
* You can toggle this value at run-time to gain exact control over when Phaser renders. This can be useful in certain types of game or application.
* Please note that if you don't render the display list then none of the game object transforms will be updated, so use this value carefully.
* @property {boolean} lockRender
* @default
*/
this.lockRender = false;
/**
* @property {boolean} stepping - Enable core loop stepping with Game.enableStep().
* @default
* @readonly
*/
this.stepping = false;
/**
* @property {boolean} pendingStep - An internal property used by enableStep, but also useful to query from your own game objects.
* @default
* @readonly
*/
this.pendingStep = false;
/**
* @property {number} stepCount - When stepping is enabled this contains the current step cycle.
* @default
* @readonly
*/
this.stepCount = 0;
/**
* @property {Phaser.Signal} onPause - This event is fired when the game pauses.
*/
this.onPause = null;
/**
* @property {Phaser.Signal} onResume - This event is fired when the game resumes from a paused state.
*/
this.onResume = null;
/**
* @property {Phaser.Signal} onBlur - This event is fired when the game no longer has focus (typically on page hide).
*/
this.onBlur = null;
/**
* @property {Phaser.Signal} onFocus - This event is fired when the game has focus (typically on page show).
*/
this.onFocus = null;
/**
* @property {boolean} _paused - Is game paused?
* @private
*/
this._paused = false;
/**
* @property {boolean} _codePaused - Was the game paused via code or a visibility change?
* @private
*/
this._codePaused = false;
/**
* The ID of the current/last logic update applied this render frame, starting from 0.
* The first update is `currentUpdateID === 0` and the last update is `currentUpdateID === updatesThisFrame.`
* @property {integer} currentUpdateID
* @protected
*/
this.currentUpdateID = 0;
/**
* Number of logic updates expected to occur this render frame; will be 1 unless there are catch-ups required (and allowed).
* @property {integer} updatesThisFrame
* @protected
*/
this.updatesThisFrame = 1;
/**
* @property {number} _deltaTime - Accumulate elapsed time until a logic update is due.
* @private
*/
this._deltaTime = 0;
/**
* @property {number} _lastCount - Remember how many 'catch-up' iterations were used on the logicUpdate last frame.
* @private
*/
this._lastCount = 0;
/**
* @property {number} _spiraling - If the 'catch-up' iterations are spiraling out of control, this counter is incremented.
* @private
*/
this._spiraling = 0;
/**
* @property {boolean} _kickstart - Force a logic update + render by default (always set on Boot and State swap)
* @private
*/
this._kickstart = true;
/**
* If the game is struggling to maintain the desired FPS, this signal will be dispatched.
* The desired/chosen FPS should probably be closer to the {@link Phaser.Time#suggestedFps} value.
* @property {Phaser.Signal} fpsProblemNotifier
* @public
*/
this.fpsProblemNotifier = new Phaser.Signal();
/**
* @property {boolean} forceSingleUpdate - Should the game loop force a logic update, regardless of the delta timer? Set to true if you know you need this. You can toggle it on the fly.
*/
this.forceSingleUpdate = true;
/**
* @property {number} _nextNotification - The soonest game.time.time value that the next fpsProblemNotifier can be dispatched.
* @private
*/
this._nextFpsNotification = 0;
// Parse the configuration object
if (typeof gameConfig !== 'object')
{
throw new Error('Missing game configuration object: ' + gameConfig);
}
this.parseConfig(gameConfig);
this.device.whenReady(this.boot, this);
return this;
};
Phaser.Game.prototype = {
/**
* Parses a Game configuration object.
*
* @method Phaser.Game#parseConfig
* @protected
*/
parseConfig: function (config) {
this.config = config;
if (config['enableDebug'] === undefined)
{
this.config.enableDebug = true;
}
if (config['width'])
{
this._width = config['width'];
}
if (config['height'])
{
this._height = config['height'];
}
if (config['renderer'])
{
this.renderType = config['renderer'];
}
if (config['parent'])
{
this.parent = config['parent'];
}
if (config['transparent'] !== undefined)
{
this.transparent = config['transparent'];
}
if (config['antialias'] !== undefined)
{
this.antialias = config['antialias'];
}
if (config['resolution'])
{
this.resolution = config['resolution'];
}
if (config['preserveDrawingBuffer'] !== undefined)
{
this.preserveDrawingBuffer = config['preserveDrawingBuffer'];
}
if (config['clearBeforeRender'] !== undefined)
{
this.clearBeforeRender = config['clearBeforeRender'];
}
if (config['physicsConfig'])
{
this.physicsConfig = config['physicsConfig'];
}
var seed = [(Date.now() * Math.random()).toString()];
if (config['seed'])
{
seed = config['seed'];
}
this.rnd = new Phaser.RandomDataGenerator(seed);
var state = null;
if (config['state'])
{
state = config['state'];
}
this.state = new Phaser.StateManager(this, state);
},
/**
* Initialize engine sub modules and start the game.
*
* @method Phaser.Game#boot
* @protected
*/
boot: function () {
if (this.isBooted)
{
return;<|fim▁hole|>
this.onPause = new Phaser.Signal();
this.onResume = new Phaser.Signal();
this.onBlur = new Phaser.Signal();
this.onFocus = new Phaser.Signal();
this.isBooted = true;
PIXI.game = this;
this.math = Phaser.Math;
this.scale = new Phaser.ScaleManager(this, this._width, this._height);
this.stage = new Phaser.Stage(this);
this.setUpRenderer();
this.world = new Phaser.World(this);
this.add = new Phaser.GameObjectFactory(this);
this.make = new Phaser.GameObjectCreator(this);
this.cache = new Phaser.Cache(this);
this.load = new Phaser.Loader(this);
this.time = new Phaser.Time(this);
this.tweens = new Phaser.TweenManager(this);
this.input = new Phaser.Input(this);
this.sound = new Phaser.SoundManager(this);
this.physics = new Phaser.Physics(this, this.physicsConfig);
this.particles = new Phaser.Particles(this);
this.create = new Phaser.Create(this);
this.plugins = new Phaser.PluginManager(this);
this.net = new Phaser.Net(this);
this.time.boot();
this.stage.boot();
this.world.boot();
this.scale.boot();
this.input.boot();
this.sound.boot();
this.state.boot();
if (this.config['enableDebug'])
{
this.debug = new Phaser.Utils.Debug(this);
this.debug.boot();
}
else
{
this.debug = { preUpdate: function () {}, update: function () {}, reset: function () {} };
}
this.showDebugHeader();
this.isRunning = true;
if (this.config && this.config['forceSetTimeOut'])
{
this.raf = new Phaser.RequestAnimationFrame(this, this.config['forceSetTimeOut']);
}
else
{
this.raf = new Phaser.RequestAnimationFrame(this, false);
}
this._kickstart = true;
if (window['focus'])
{
if (!window['PhaserGlobal'] || (window['PhaserGlobal'] && !window['PhaserGlobal'].stopFocus))
{
window.focus();
}
}
this.raf.start();
},
/**
* Displays a Phaser version debug header in the console.
*
* @method Phaser.Game#showDebugHeader
* @protected
*/
showDebugHeader: function () {
if (window['PhaserGlobal'] && window['PhaserGlobal'].hideBanner)
{
return;
}
var v = Phaser.VERSION;
var r = 'Canvas';
var a = 'HTML Audio';
var c = 1;
if (this.renderType === Phaser.WEBGL)
{
r = 'WebGL';
c++;
}
else if (this.renderType === Phaser.HEADLESS)
{
r = 'Headless';
}
if (this.device.webAudio)
{
a = 'WebAudio';
c++;
}
if (this.device.chrome)
{
var args = [
'%c %c %c Phaser v' + v + ' | Pixi.js | ' + r + ' | ' + a + ' %c %c ' + '%c http://phaser.io %c\u2665%c\u2665%c\u2665',
'background: #fb8cb3',
'background: #d44a52',
'color: #ffffff; background: #871905;',
'background: #d44a52',
'background: #fb8cb3',
'background: #ffffff'
];
for (var i = 0; i < 3; i++)
{
if (i < c)
{
args.push('color: #ff2424; background: #fff');
}
else
{
args.push('color: #959595; background: #fff');
}
}
console.log.apply(console, args);
}
else if (window['console'])
{
console.log('Phaser v' + v + ' | Pixi.js ' + PIXI.VERSION + ' | ' + r + ' | ' + a + ' | http://phaser.io');
}
},
/**
* Checks if the device is capable of using the requested renderer and sets it up or an alternative if not.
*
* @method Phaser.Game#setUpRenderer
* @protected
*/
setUpRenderer: function () {
if (this.config['canvas'])
{
this.canvas = this.config['canvas'];
}
else
{
this.canvas = Phaser.Canvas.create(this, this.width, this.height, this.config['canvasID'], true);
}
if (this.config['canvasStyle'])
{
this.canvas.style = this.config['canvasStyle'];
}
else
{
this.canvas.style['-webkit-full-screen'] = 'width: 100%; height: 100%';
}
if (this.renderType === Phaser.HEADLESS || this.renderType === Phaser.CANVAS || (this.renderType === Phaser.AUTO && !this.device.webGL))
{
if (this.device.canvas)
{
// They requested Canvas and their browser supports it
this.renderType = Phaser.CANVAS;
this.renderer = new PIXI.CanvasRenderer(this);
this.context = this.renderer.context;
}
else
{
throw new Error('Phaser.Game - Cannot create Canvas or WebGL context, aborting.');
}
}
else
{
// They requested WebGL and their browser supports it
this.renderType = Phaser.WEBGL;
this.renderer = new PIXI.WebGLRenderer(this);
this.context = null;
this.canvas.addEventListener('webglcontextlost', this.contextLost.bind(this), false);
this.canvas.addEventListener('webglcontextrestored', this.contextRestored.bind(this), false);
}
if (this.device.cocoonJS)
{
this.canvas.screencanvas = (this.renderType === Phaser.CANVAS) ? true : false;
}
if (this.renderType !== Phaser.HEADLESS)
{
this.stage.smoothed = this.antialias;
Phaser.Canvas.addToDOM(this.canvas, this.parent, false);
Phaser.Canvas.setTouchAction(this.canvas);
}
},
/**
* Handles WebGL context loss.
*
* @method Phaser.Game#contextLost
* @private
* @param {Event} event - The webglcontextlost event.
*/
contextLost: function (event) {
event.preventDefault();
this.renderer.contextLost = true;
},
/**
* Handles WebGL context restoration.
*
* @method Phaser.Game#contextRestored
* @private
*/
contextRestored: function () {
this.renderer.initContext();
this.cache.clearGLTextures();
this.renderer.contextLost = false;
},
/**
* The core game loop.
*
* @method Phaser.Game#update
* @protected
* @param {number} time - The current time as provided by RequestAnimationFrame.
*/
update: function (time) {
this.time.update(time);
if (this._kickstart)
{
this.updateLogic(this.time.desiredFpsMult);
// call the game render update exactly once every frame
this.updateRender(this.time.slowMotion * this.time.desiredFps);
this._kickstart = false;
return;
}
// if the logic time is spiraling upwards, skip a frame entirely
if (this._spiraling > 1 && !this.forceSingleUpdate)
{
// cause an event to warn the program that this CPU can't keep up with the current desiredFps rate
if (this.time.time > this._nextFpsNotification)
{
// only permit one fps notification per 10 seconds
this._nextFpsNotification = this.time.time + 10000;
// dispatch the notification signal
this.fpsProblemNotifier.dispatch();
}
// reset the _deltaTime accumulator which will cause all pending dropped frames to be permanently skipped
this._deltaTime = 0;
this._spiraling = 0;
// call the game render update exactly once every frame
this.updateRender(this.time.slowMotion * this.time.desiredFps);
}
else
{
// step size taking into account the slow motion speed
var slowStep = this.time.slowMotion * 1000.0 / this.time.desiredFps;
// accumulate time until the slowStep threshold is met or exceeded... up to a limit of 3 catch-up frames at slowStep intervals
this._deltaTime += Math.max(Math.min(slowStep * 3, this.time.elapsed), 0);
// call the game update logic multiple times if necessary to "catch up" with dropped frames
// unless forceSingleUpdate is true
var count = 0;
this.updatesThisFrame = Math.floor(this._deltaTime / slowStep);
if (this.forceSingleUpdate)
{
this.updatesThisFrame = Math.min(1, this.updatesThisFrame);
}
while (this._deltaTime >= slowStep)
{
this._deltaTime -= slowStep;
this.currentUpdateID = count;
this.updateLogic(this.time.desiredFpsMult);
count++;
if (this.forceSingleUpdate && count === 1)
{
break;
}
else
{
this.time.refresh();
}
}
// detect spiraling (if the catch-up loop isn't fast enough, the number of iterations will increase constantly)
if (count > this._lastCount)
{
this._spiraling++;
}
else if (count < this._lastCount)
{
// looks like it caught up successfully, reset the spiral alert counter
this._spiraling = 0;
}
this._lastCount = count;
// call the game render update exactly once every frame unless we're playing catch-up from a spiral condition
this.updateRender(this._deltaTime / slowStep);
}
},
/**
* Updates all logic subsystems in Phaser. Called automatically by Game.update.
*
* @method Phaser.Game#updateLogic
* @protected
* @param {number} timeStep - The current timeStep value as determined by Game.update.
*/
updateLogic: function (timeStep) {
if (!this._paused && !this.pendingStep)
{
if (this.stepping)
{
this.pendingStep = true;
}
this.scale.preUpdate();
this.debug.preUpdate();
this.camera.preUpdate();
this.physics.preUpdate();
this.state.preUpdate(timeStep);
this.plugins.preUpdate(timeStep);
this.stage.preUpdate();
this.state.update();
this.stage.update();
this.tweens.update();
this.sound.update();
this.input.update();
this.physics.update();
this.particles.update();
this.plugins.update();
this.stage.postUpdate();
this.plugins.postUpdate();
}
else
{
// Scaling and device orientation changes are still reflected when paused.
this.scale.pauseUpdate();
this.state.pauseUpdate();
this.debug.preUpdate();
}
this.stage.updateTransform();
},
/**
* Runs the Render cycle.
* It starts by calling State.preRender. In here you can do any last minute adjustments of display objects as required.
* It then calls the renderer, which renders the entire display list, starting from the Stage object and working down.
* It then calls plugin.render on any loaded plugins, in the order in which they were enabled.
* After this State.render is called. Any rendering that happens here will take place on-top of the display list.
* Finally plugin.postRender is called on any loaded plugins, in the order in which they were enabled.
* This method is called automatically by Game.update, you don't need to call it directly.
* Should you wish to have fine-grained control over when Phaser renders then use the `Game.lockRender` boolean.
* Phaser will only render when this boolean is `false`.
*
* @method Phaser.Game#updateRender
* @protected
* @param {number} elapsedTime - The time elapsed since the last update.
*/
updateRender: function (elapsedTime) {
if (this.lockRender)
{
return;
}
this.state.preRender(elapsedTime);
if (this.renderType !== Phaser.HEADLESS)
{
this.renderer.render(this.stage);
this.plugins.render(elapsedTime);
this.state.render(elapsedTime);
}
this.plugins.postRender(elapsedTime);
},
/**
* Enable core game loop stepping. When enabled you must call game.step() directly (perhaps via a DOM button?)
* Calling step will advance the game loop by one frame. This is extremely useful for hard to track down errors!
*
* @method Phaser.Game#enableStep
*/
enableStep: function () {
this.stepping = true;
this.pendingStep = false;
this.stepCount = 0;
},
/**
* Disables core game loop stepping.
*
* @method Phaser.Game#disableStep
*/
disableStep: function () {
this.stepping = false;
this.pendingStep = false;
},
/**
* When stepping is enabled you must call this function directly (perhaps via a DOM button?) to advance the game loop by one frame.
* This is extremely useful to hard to track down errors! Use the internal stepCount property to monitor progress.
*
* @method Phaser.Game#step
*/
step: function () {
this.pendingStep = false;
this.stepCount++;
},
/**
* Nukes the entire game from orbit.
*
* Calls destroy on Game.state, Game.sound, Game.scale, Game.stage, Game.input, Game.physics and Game.plugins.
*
* Then sets all of those local handlers to null, destroys the renderer, removes the canvas from the DOM
* and resets the PIXI default renderer.
*
* @method Phaser.Game#destroy
*/
destroy: function () {
this.raf.stop();
this.state.destroy();
this.sound.destroy();
this.scale.destroy();
this.stage.destroy();
this.input.destroy();
this.physics.destroy();
this.plugins.destroy();
this.state = null;
this.sound = null;
this.scale = null;
this.stage = null;
this.input = null;
this.physics = null;
this.plugins = null;
this.cache = null;
this.load = null;
this.time = null;
this.world = null;
this.isBooted = false;
this.renderer.destroy(false);
Phaser.Canvas.removeFromDOM(this.canvas);
PIXI.defaultRenderer = null;
Phaser.GAMES[this.id] = null;
},
/**
* Called by the Stage visibility handler.
*
* @method Phaser.Game#gamePaused
* @param {object} event - The DOM event that caused the game to pause, if any.
* @protected
*/
gamePaused: function (event) {
// If the game is already paused it was done via game code, so don't re-pause it
if (!this._paused)
{
this._paused = true;
this.time.gamePaused();
if (this.sound.muteOnPause)
{
this.sound.setMute();
}
this.onPause.dispatch(event);
// Avoids Cordova iOS crash event: https://github.com/photonstorm/phaser/issues/1800
if (this.device.cordova && this.device.iOS)
{
this.lockRender = true;
}
}
},
/**
* Called by the Stage visibility handler.
*
* @method Phaser.Game#gameResumed
* @param {object} event - The DOM event that caused the game to pause, if any.
* @protected
*/
gameResumed: function (event) {
// Game is paused, but wasn't paused via code, so resume it
if (this._paused && !this._codePaused)
{
this._paused = false;
this.time.gameResumed();
this.input.reset();
if (this.sound.muteOnPause)
{
this.sound.unsetMute();
}
this.onResume.dispatch(event);
// Avoids Cordova iOS crash event: https://github.com/photonstorm/phaser/issues/1800
if (this.device.cordova && this.device.iOS)
{
this.lockRender = false;
}
}
},
/**
* Called by the Stage visibility handler.
*
* @method Phaser.Game#focusLoss
* @param {object} event - The DOM event that caused the game to pause, if any.
* @protected
*/
focusLoss: function (event) {
this.onBlur.dispatch(event);
if (!this.stage.disableVisibilityChange)
{
this.gamePaused(event);
}
},
/**
* Called by the Stage visibility handler.
*
* @method Phaser.Game#focusGain
* @param {object} event - The DOM event that caused the game to pause, if any.
* @protected
*/
focusGain: function (event) {
this.onFocus.dispatch(event);
if (!this.stage.disableVisibilityChange)
{
this.gameResumed(event);
}
}
};
Phaser.Game.prototype.constructor = Phaser.Game;
/**
* The paused state of the Game. A paused game doesn't update any of its subsystems.
* When a game is paused the onPause event is dispatched. When it is resumed the onResume event is dispatched.
* @name Phaser.Game#paused
* @property {boolean} paused - Gets and sets the paused state of the Game.
*/
Object.defineProperty(Phaser.Game.prototype, "paused", {
get: function () {
return this._paused;
},
set: function (value) {
if (value === true)
{
if (this._paused === false)
{
this._paused = true;
this.sound.setMute();
this.time.gamePaused();
this.onPause.dispatch(this);
}
this._codePaused = true;
}
else
{
if (this._paused)
{
this._paused = false;
this.input.reset();
this.sound.unsetMute();
this.time.gameResumed();
this.onResume.dispatch(this);
}
this._codePaused = false;
}
}
});
/**
*
* "Deleted code is debugged code." - Jeff Sickel
*
* ヽ(〃^▽^〃)ノ
*
*/<|fim▁end|> | } |
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>//! A low-level interface to send and receive server-client protocol messages
use std;
use std::sync::{Arc, Mutex};
use std::sync::mpsc::{Sender, Receiver, TryRecvError};
use std::sync::atomic::{Ordering, AtomicUsize};
use bincode;
use common::protocol;
use common::socket::{SendSocket, ReceiveSocket};
#[allow(missing_docs)]
#[derive(Clone)]
pub struct SSender {
// A boxed slice is used to reduce the sent size
pub sender: Sender<Box<[u8]>>,
// Please replace with AtomicU64 when it becomes stable
pub bytes_sent: Arc<AtomicUsize>,
}
impl SSender {
#[allow(missing_docs)]
pub fn new(sender: Sender<Box<[u8]>>) -> SSender {
SSender {
sender: sender,
bytes_sent: Arc::new(AtomicUsize::new(0)),
}
}
#[allow(missing_docs)]
pub fn tell(&self, msg: &protocol::ClientToServer) {
let msg = bincode::serialize(msg, bincode::Infinite).unwrap();
// We aren't reading this until long after the write, so we use `Relaxed`
self.bytes_sent.fetch_add(msg.len() as usize, Ordering::Relaxed);
self.sender.send(msg.into_boxed_slice()).unwrap();
}
}
#[allow(missing_docs)]
#[derive(Clone)]
pub struct SReceiver (Arc<Mutex<Receiver<Box<[u8]>>>>);
impl SReceiver {
#[allow(missing_docs)]
pub fn try(&self) -> Option<protocol::ServerToClient> {
match self.0.lock().unwrap().try_recv() {
Ok(msg) => Some(bincode::deserialize(&Vec::from(msg)).unwrap()),
Err(TryRecvError::Empty) => None,
e => {
e.unwrap();
unreachable!();
},
}
}
#[allow(missing_docs)]
pub fn wait(&self) -> protocol::ServerToClient {
let msg = self.0.lock().unwrap().recv().unwrap();
bincode::deserialize(msg.as_ref()).unwrap()
}
}
#[allow(missing_docs)]
#[derive(Clone)]
pub struct T {
pub talk : SSender,
pub listen : SReceiver,
}
#[allow(missing_docs)]
pub fn new(
server_url: &str,
listen_url: &str,
) -> T {<|fim▁hole|> let listen_url = listen_url.to_owned();
let recv_send = recv_send.clone();
std::thread::spawn(move || {
let mut listen_socket =
ReceiveSocket::new(
listen_url.clone().as_ref(),
Some(std::time::Duration::from_secs(30)),
);
loop {
match listen_socket.read() {
None => break,
Some(msg) => {
recv_send.send(msg.into_boxed_slice()).unwrap()
},
}
}
})
};
let _send_thread = {
let server_url = server_url.to_owned();
std::thread::spawn(move || {
let mut talk_socket =
SendSocket::new(
server_url.as_ref(),
Some(std::time::Duration::from_secs(30)),
);
loop {
match send_recv.recv() {
Err(_) => break,
Ok(msg) => {
let msg = Vec::from(msg);
talk_socket.write(msg.as_ref()).unwrap();
},
}
}
})
};
T {
talk: SSender::new(send_send),
listen: SReceiver(Arc::new(Mutex::new(recv_recv))),
}
}<|fim▁end|> | let (send_send, send_recv) = std::sync::mpsc::channel();
let (recv_send, recv_recv) = std::sync::mpsc::channel();
let _recv_thread ={ |
<|file_name|>corecd.py<|end_file_name|><|fim▁begin|>#
# corecd.py
#
# Copyright (C) 2014 Fabio Erculiani
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from pyanaconda.installclass import BaseInstallClass
from pyanaconda.i18n import N_
from pyanaconda.sabayon import Entropy
class InstallClass(BaseInstallClass):
id = "sabayon_corecd"
name = N_("Sabayon Core")
sortPriority = 9998
_l10n_domain = "anaconda"
efi_dir = "sabayon"<|fim▁hole|> BaseInstallClass.configure(self, anaconda)
BaseInstallClass.setDefaultPartitioning(self, anaconda.storage)
def getBackend(self):
from pyanaconda.sabayon.livecd import LiveCDCopyBackend
return LiveCDCopyBackend
def __init__(self):
BaseInstallClass.__init__(self)<|fim▁end|> |
dmrc = None
def configure(self, anaconda): |
<|file_name|>iterable_differs.d.ts<|end_file_name|><|fim▁begin|>import { ChangeDetectorRef } from '../change_detector_ref';
import { Provider } from 'angular2/src/core/di';
export interface IterableDiffer {
diff(object: Object): any;
onDestroy(): any;
}
/**
* Provides a factory for {@link IterableDiffer}.
*/
export interface IterableDifferFactory {
supports(objects: Object): boolean;
create(cdRef: ChangeDetectorRef): IterableDiffer;
}
/**
* A repository of different iterable diffing strategies used by NgFor, NgClass, and others.
*/
export declare class IterableDiffers {
factories: IterableDifferFactory[];
constructor(factories: IterableDifferFactory[]);
static create(factories: IterableDifferFactory[], parent?: IterableDiffers): IterableDiffers;
/**
* Takes an array of {@link IterableDifferFactory} and returns a provider used to extend the
* inherited {@link IterableDiffers} instance with the provided factories and return a new
* {@link IterableDiffers} instance.
*
<|fim▁hole|> * ### Example
*
* ```
* @Component({
* viewProviders: [
* IterableDiffers.extend([new ImmutableListDiffer()])
* ]
* })
* ```
*/
static extend(factories: IterableDifferFactory[]): Provider;
find(iterable: Object): IterableDifferFactory;
}<|fim▁end|> | * The following example shows how to extend an existing list of factories,
* which will only be applied to the injector for this component and its children.
* This step is all that's required to make a new {@link IterableDiffer} available.
*
|
<|file_name|>basic.ts<|end_file_name|><|fim▁begin|>export const data = [
{
id: 549731,
name: 'Beautiful Lies',
artist: 'Birdy',
release: '2016.03.26',
type: 'Deluxe',
typeCode: '1',
genre: 'Pop',
genreCode: '1',
grade: '4',
price: 10000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 436461,
name: 'X',
artist: 'Ed Sheeran',
release: '2014.06.24',
type: 'Deluxe',
typeCode: '1',
genre: 'Pop',
genreCode: '1',
grade: '5',
price: 20000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 295651,
name: 'Moves Like Jagger',
release: '2011.08.08',
artist: 'Maroon5',
type: 'Single',
typeCode: '3',
genre: 'Pop,Rock',
genreCode: '1,2',
grade: '2',
price: 7000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 541713,
name: 'A Head Full Of Dreams',
artist: 'Coldplay',
release: '2015.12.04',
type: 'Deluxe',
typeCode: '1',
genre: 'Rock',
genreCode: '2',
grade: '3',
price: 25000,
downloadCount: 200,
listenCount: 5000
},
{
id: 265289,
name: '21',
artist: 'Adele',
release: '2011.01.21',
type: 'Deluxe',
typeCode: '1',
genre: 'Pop,R&B',
genreCode: '1,3',
grade: '5',
price: 15000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 555871,
name: 'Warm On A Cold Night',
artist: 'HONNE',
release: '2016.07.22',
type: 'EP',
typeCode: '1',
genre: 'R&B,Electronic',
genreCode: '3,4',
grade: '4',
price: 11000,
downloadCount: 34000,
listenCount: 5000
},
{
id: 550571,
name: 'Take Me To The Alley',
artist: 'Gregory Porter',
release: '2016.09.02',
type: 'Deluxe',
typeCode: '1',
genre: 'Jazz',
genreCode: '5',
grade: '3',
price: 30000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 544128,
name: 'Make Out',
artist: 'LANY',
release: '2015.12.11',
type: 'EP',
typeCode: '2',
genre: 'Electronic',
genreCode: '4',
grade: '2',<|fim▁hole|> downloadCount: 1200,
listenCount: 5000
},
{
id: 366374,
name: 'Get Lucky',
artist: 'Daft Punk',
release: '2013.04.23',
type: 'Single',
typeCode: '3',
genre: 'Pop,Funk',
genreCode: '1,5',
grade: '3',
price: 9000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 8012747,
name: 'Valtari',
artist: 'Sigur Rós',
release: '2012.05.31',
type: 'EP',
typeCode: '3',
genre: 'Rock',
genreCode: '2',
grade: '5',
price: 10000,
downloadCount: 1040,
listenCount: 5000
},
{
id: 502792,
name: 'Bush',
artist: 'Snoop Dogg',
release: '2015.05.12',
type: 'EP',
typeCode: '2',
genre: 'Hiphop',
genreCode: '5',
grade: '5',
price: 18000,
downloadCount: 2000,
listenCount: 5000
},
{
id: 294574,
name: '4',
artist: 'Beyoncé',
release: '2011.07.26',
type: 'Deluxe',
typeCode: '1',
genre: 'Pop',
genreCode: '1',
grade: '3',
price: 12000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 317659,
name: "I Won't Give Up",
artist: 'Jason Mraz',
release: '2012.01.03',
type: 'Single',
typeCode: '3',
genre: 'Pop',
genreCode: '1',
grade: '2',
price: 7000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 583551,
name: 'Following My Intuition',
artist: 'Craig David',
release: '2016.10.01',
type: 'Deluxe',
typeCode: '1',
genre: 'R&B,Electronic',
genreCode: '3,4',
grade: '5',
price: 15000,
downloadCount: 1000,
listenCount: 5000
},
{
id: 490500,
name: 'Blue Skies',
release: '2015.03.18',
artist: 'Lenka',
type: 'Single',
typeCode: '3',
genre: 'Pop,Rock',
genreCode: '1,2',
grade: '5',
price: 6000,
downloadCount: 2000,
listenCount: 5000
},
{
id: 587871,
name: 'This Is Acting',
artist: 'Sia',
release: '2016.10.22',
type: 'EP',
typeCode: '2',
genre: 'Pop',
genreCode: '1',
grade: '3',
price: 20000,
downloadCount: 1400,
listenCount: 5000
},
{
id: 504288,
name: 'Blurryface',
artist: 'Twenty One Pilots',
release: '2015.05.19',
type: 'EP',
typeCode: '2',
genre: 'Rock',
genreCode: '2',
grade: '1',
price: 13000,
downloadCount: 3000,
listenCount: 5000
},
{
id: 450720,
name: "I'm Not The Only One",
artist: 'Sam Smith',
release: '2014.09.15',
type: 'Single',
typeCode: '3',
genre: 'Pop,R&B',
genreCode: '1,3',
grade: '4',
price: 8000,
downloadCount: 2000,
listenCount: 5000
},
{
id: 498896,
name: 'The Magic Whip',
artist: 'Blur',
release: '2015.04.27',
type: 'EP',
typeCode: '2',
genre: 'Rock',
genreCode: '2',
grade: '3',
price: 15000,
downloadCount: 1200,
listenCount: 5000
},
{
id: 491379,
name: 'Chaos And The Calm',
artist: 'James Bay',
release: '2015.03.23',
type: 'EP',
typeCode: '2',
genre: 'Pop,Rock',
genreCode: '1,2',
grade: '5',
price: 12000,
downloadCount: 1000,
listenCount: 5100
}
];
export const sortData = [
{
alphabetA: 'BCA',
alphabetB: 'B',
alphabetC: 'ACC',
numberA: 2,
stringNumberA: '2',
mixedValue: 'A'
},
{
alphabetA: 'A',
alphabetB: 'A',
alphabetC: 'ACC',
numberA: 1,
stringNumberA: '1',
mixedValue: 'C'
},
{
alphabetA: 'C',
alphabetB: 'A',
alphabetC: 'C',
numberA: 1,
stringNumberA: '100',
mixedValue: 'EA'
},
{
alphabetA: 'A',
alphabetB: 'B',
alphabetC: 'ACA',
numberA: 1,
stringNumberA: '101',
mixedValue: 'AK'
},
{
alphabetA: 'A',
alphabetB: 'B',
alphabetC: 'C',
numberA: 10,
stringNumberA: '11',
mixedValue: '121'
},
{
alphabetA: 'D',
alphabetB: 'E',
alphabetC: 'CDD',
numberA: 1,
stringNumberA: '201',
mixedValue: '2'
},
{
alphabetA: 'BAA',
alphabetB: 'C',
alphabetC: 'C',
numberA: 20,
stringNumberA: '202',
mixedValue: '30'
},
{
alphabetA: 'A',
alphabetB: 'A',
alphabetC: 'A',
numberA: 24,
stringNumberA: '211',
mixedValue: '1'
},
{
alphabetA: 'FGA',
alphabetB: 'F',
alphabetC: 'FGA',
numberA: 25,
stringNumberA: '301',
mixedValue: 'O'
}
];<|fim▁end|> | price: 12000, |
<|file_name|>events.js<|end_file_name|><|fim▁begin|>/** @license MIT License (c) copyright B Cavalier & J Hann */
/**
* wire/dojo/events plugin
* wire plugin that can connect event handlers after an object is
* initialized, and disconnect them when an object is destroyed.
* This implementation uses dojo.connect and dojo.disconnect to do
* the work of connecting and disconnecting event handlers.
*
* wire is part of the cujo.js family of libraries (http://cujojs.com/)
*
* Licensed under the MIT License at:
* http://www.opensource.org/licenses/mit-license.php
*/
define(['when', '../lib/connection', 'dojo', 'dojo/_base/event'],
function(when, connection, events) {
return {
wire$plugin: function eventsPlugin(/*, options*/) {
var connectHandles = [];
function handleConnection(source, eventName, handler) {
connectHandles.push(events.connect(source, eventName, handler));
}
function connect(source, connect, options, wire) {
return connection.parse(source, connect, options, wire, handleConnection);
}
/*
Function: connectFacet
Setup connections for each specified in the connects param. Each key
in connects is a reference, and the corresponding value is an object
whose keys are event names, and whose values are methods of object to
invoke. For example:
connect: {
"refToOtherThing": {<|fim▁hole|> "onclick": "_handleButtonClick"
},
"dijit!myWidget": {
"onChange": "_handleValueChange"
}
"myOwnEventOrMethod": {
"refToOtherThing": "methodOfOtherThing"
}
}
Parameters:
factory - wiring factory
object - object being wired, will be the target of connected events
connects - specification of events to connect, see examples above.
*/
function connectFacet(wire, facet) {
var promises, connects;
promises = [];
connects = facet.options;
for(var ref in connects) {
promises.push(connect(facet, ref, connects[ref], wire));
}
return when.all(promises);
}
return {
context: {
destroy: function(resolver) {
for (var i = connectHandles.length - 1; i >= 0; i--){
events.disconnect(connectHandles[i]);
}
resolver.resolve();
}
},
facets: {
connect: {
connect: function(resolver, facet, wire) {
resolver.resolve(connectFacet(wire, facet));
}
}
}
};
}
};
});<|fim▁end|> | "eventOrMethodOfOtherThing": "myOwnMethod"
},
"dom!myButton": { |
<|file_name|>proxy_logging.py<|end_file_name|><|fim▁begin|># Copyright (C) 2020 OpenIO SAS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from swift.common.middleware.proxy_logging import ProxyLoggingMiddleware
from swift.common.swob import Request
from swift.common.utils import config_true_value, get_logger
def flat_dict_from_dict(dict_):
"""
Create a dictionary without depth.
{
'depth0': {
'depth1': {
'depth2': 'test1',
'depth2': 'test2'
}
}
}
=>
depth0.depth1.depth2:test1;depth0.depth1.depth2:test2
"""
flat_dict = dict()
for key, value in dict_.items():
if not isinstance(value, dict):
flat_dict[key] = value
continue
flat_dict_ = flat_dict_from_dict(value)
for key_, value_ in flat_dict_.items():
flat_dict[key + '.' + key_] = value_<|fim▁hole|> flat_perfdata = flat_dict_from_dict(perfdata)
perfdata_list = list()
perfdata_list.append('PERFDATA')
for key, value in sorted(flat_perfdata.items()):
if key.startswith('rawx.'):
if 'http' in key[5:]:
key = key[:key.index('http') + 4]
perfdata_list.append(key + ':' + '%.4f' % value)
return '...'.join(perfdata_list)
class OioProxyLoggingMiddleware(ProxyLoggingMiddleware):
"""
Keep the same behavior as ProxyLoggingMiddleware,
but add the values of 'perfdata' if it is enabled.
"""
def __init__(self, app, conf):
super(OioProxyLoggingMiddleware, self).__init__(app, conf)
self.logger = get_logger(conf)
self.perfdata = config_true_value(conf.get('oio_perfdata', 'false'))
self.perfdata_user_agents = None
if self.perfdata:
pattern_dict = {k: v for k, v in conf.items()
if k.startswith("oio_perfdata_user_agent")}
self.perfdata_user_agents = [re.compile(pattern_dict[k])
for k in sorted(pattern_dict.keys())]
if not self.perfdata_user_agents:
self.logger.warn('No user_agent pattern defined, '
'all clients will add perfdata.')
def log_request(self, req, *args, **kwargs):
oio_perfdata = req.environ.get('oio.perfdata')
if oio_perfdata is not None:
req.environ.setdefault('swift.log_info', []).append(
perfdata_to_str(oio_perfdata))
super(OioProxyLoggingMiddleware, self).log_request(
req, *args, **kwargs)
def __call__(self, env, start_response):
if self.perfdata:
add_perfata = False
if not self.perfdata_user_agents:
add_perfata = True
else:
req = Request(env)
if req.user_agent:
for pat in self.perfdata_user_agents:
if pat.match(req.user_agent):
add_perfata = True
break
if add_perfata:
env.setdefault('oio.perfdata', dict())
return super(OioProxyLoggingMiddleware, self).__call__(
env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def proxy_logger(app):
return OioProxyLoggingMiddleware(app, conf)
return proxy_logger<|fim▁end|> | return flat_dict
def perfdata_to_str(perfdata): |
<|file_name|>active.py<|end_file_name|><|fim▁begin|>import re
from django import template<|fim▁hole|>from django.core.urlresolvers import NoReverseMatch
from django.core.urlresolvers import reverse
register = template.Library()
@register.simple_tag(takes_context=True)
def active(context, name):
try:
pattern = reverse(name)
except NoReverseMatch:
return ''
if re.match(pattern, context['request'].path):
return 'active'
return ''<|fim▁end|> | |
<|file_name|>skate.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
if (typeof define === 'function' && define.amd) {
define(['exports', 'module', 'skatejs'], factory);
} else if (typeof exports !== 'undefined' && typeof module !== 'undefined') {
factory(exports, module, require('skatejs'));
} else {
var mod = {
exports: {}
};
factory(mod.exports, mod, global.skate);
global.skate = mod.exports;
}
})(this, function (exports, module, _skatejs) {
'use strict';
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _skate = _interopRequireDefault(_skatejs);
<|fim▁hole|>//# sourceMappingURL=../../../js/aui/internal/skate.js.map<|fim▁end|> | var auiSkate = _skate['default'].noConflict();
module.exports = auiSkate;
}); |
<|file_name|>Naive Bayes Classifier - Copie.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Wheat price prediction using Baysian classification.
# Version 1.0
# Christophe Foyer - 2016
from xlrd import open_workbook
import random
import math
#set filename:
filename = 'Wheat-price-data.xlsx'
#import wheat price data (will automate downloading later, probably a different script that writes to the excel file)
def importExcel(filename):
#this function is a very ugly, and not that effecient. but it should work...
excel = open_workbook(filename)
#extract data from excel sheet
for sheet in excel.sheets():
number_of_rows = sheet.nrows
number_of_columns = sheet.ncols
dataset = [[0.0 for x in range(number_of_columns + 3)] for y in range(number_of_rows)]
date = []
date_string = []
price = []
rows = []
for row in range(1, number_of_rows):
#excel stores dates as the number of days since 1900-Jan-0 (not sure if that means january 1st or december 31st but that won't matter much in our case)
#new method: substract number of days in year until negative
date_string = str(sheet.cell(row,0).value)
days = float(date_string)
dataset[row-1][0] = float(days)
[dataset[row-1][1], dataset[row-1][2], dataset[row-1][3]] = excelDate(days)
value = (sheet.cell(row,1).value)
try:
value = str(int(value))
dataset[row-1][4] = float(value)
except ValueError:
pass
finally:
dataset[row-1][4] = round(float(value)/10,0)*10
#now the rest of the data
for col in range(2, number_of_columns):
value = (sheet.cell(row,col).value)
try:
dataset[row-1][col + 3] = float(value)
except ValueError:
pass
#now all the data should be accessible from the "dataset" array
del dataset[-1]
#print dataset
return dataset
def excelDate(days):
month_day_count = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
leap_years = [1900, 1904, 1908, 1912, 1916, 1920, 1924, 1928, 1932, 1936, 1940, 1944, 1948, 1952, 1956, 1960, 1964, 1968, 1972, 1976, 1980, 1984, 1988, 1992, 1996, 2000, 2004, 2008, 2012, 2016, 2020, 2024, 2028, 2032, 2036, 2040, 2044, 2048, 2052, 2056, 2060, 2064, 2068, 2072, 2076, 2080, 2084, 2088, 2092, 2096]
i = 0
leap = 0
#this will find how many years and how many leftover days for that year
while days >= (365 + leap):
leap = 0
if i + 1900 in leap_years:
leap = 1
days = days - 365 - leap
i = i + 1
year = i
#now find the month and leftover days given leftover days
month = 1
for i in range(1, 12):
if (year + 1900 in leap_years) and (i == 2):
leap = 1
else:
leap = 0
if days <= (month_day_count[i-1] + leap):
break
else:
days = days - month_day_count[i-1] - leap
month = i + 1
#now we should have the exact date seperated in day, month and year
return [year, month, days]
def splitDataset(dataset, splitRatio):
trainSize = int(len(dataset) * splitRatio)
trainSet = []
copy = list(dataset)
while len(trainSet) < trainSize:
index = random.randrange(len(copy))
trainSet.append(copy.pop(index))
return [trainSet, copy]
def separateByClass(dataset):
separated = {}
for i in range(len(dataset)):
vector = dataset[i]
if (vector[4] not in separated):
separated[vector[4]] = []
separated[vector[4]].append(vector)
return separated
def mean(numbers):
return sum(numbers)/float(len(numbers))
def stdev(numbers):
if len(numbers) > 1:
avg = mean(numbers)
variance = sum([pow(x-avg,2) for x in numbers])/float(len(numbers)-1)
return math.sqrt(variance)
else:
return 0
def summarize(dataset):
summaries = [(mean(attribute), stdev(attribute)) for attribute in zip(*dataset)]
del summaries[4]
#print summaries
return summaries
def summarizeByClass(dataset):
separated = separateByClass(dataset)
print separated
summaries = {}
for classValue, instances in separated.iteritems():
summaries[classValue] = summarize(instances)
return summaries
def calculateProbability(x, mean, stdev):
if stdev !=0:
exponent = math.exp(-(math.pow(x-mean,2)/(2*math.pow(stdev,2))))
return (1 / (math.sqrt(2*math.pi) * stdev)) * exponent
else:
return 1
def calculateClassProbabilities(summaries, inputVector):
probabilities = {}
for classValue, classSummaries in summaries.iteritems():
probabilities[classValue] = 1
for i in range(len(classSummaries)):
mean, stdev = classSummaries[i]
x = inputVector[i]
probabilities[classValue] *= calculateProbability(x, mean, stdev)
return probabilities
def predict(summaries, inputVector):
probabilities = calculateClassProbabilities(summaries, inputVector)
bestLabel, bestProb = None, -1
for classValue, probability in probabilities.iteritems():
if bestLabel is None or probability > bestProb:
bestProb = probability
bestLabel = classValue
return bestLabel
def getPredictions(summaries, testSet):
predictions = []
for i in range(len(testSet)):<|fim▁hole|> return predictions
def getAccuracy(testSet, predictions):
correct = 0
for i in range(len(testSet)):
if testSet[i][4] == predictions[i]:
correct += 1
return (correct/float(len(testSet))) * 100.0
def reorganizeData(dataset):
reorganizedData = [["unknown"] for x in range(len(dataset))]
for i in range(len(dataset)):
for j in range(1, int(dataset[i][0]-min([l[0] for l in dataset]))):
reorganizedData[i][0] = dataset[i][4]
if (dataset[i][0]-j) in ([l[0] for l in dataset]):
index = [l[0] for l in dataset].index(dataset[i][0]-j)
for k in range(0, len(dataset[index])):
reorganizedData[i].append(dataset[index][k])
else:
for k in range(0, len(dataset[i])):
reorganizedData[i].append("unknown")
return reorganizedData
def main():
splitRatio = 0.67
dataset = importExcel(filename)
#reorganise data to include past days
dataset = reorganizeData(dataset)
print dataset
print('Loaded data file {0} with {1} rows').format(filename, len(dataset))
trainingSet, testSet = splitDataset(dataset, splitRatio)
print('Split {0} rows into train={1} and test={2} rows').format(len(dataset), len(trainingSet), len(testSet))
# prepare model
summaries = summarizeByClass(trainingSet)
# test model
predictions = getPredictions(summaries, testSet)
accuracy = getAccuracy(testSet, predictions)
print('Accuracy: {0}%').format(accuracy)
main()<|fim▁end|> | result = predict(summaries, testSet[i])
predictions.append(result) |
<|file_name|>alarmsetting.py<|end_file_name|><|fim▁begin|>from utils.face import Face
import pygame
from utils.message import Message
from utils.alarm import Alarm
class Button(pygame.sprite.Sprite):
def __init__(self, rect, color=(0,0,255), action=None):
pygame.sprite.Sprite.__init__(self)
self.color = color
self.action = action
self.rect = pygame.Rect(rect)
self.baseImage = pygame.Surface((self.rect.width, self.rect.height))
self.image = self.baseImage
def update(self):
rect = self.baseImage.get_rect()
pygame.draw.circle(self.baseImage, self.color, rect.center, rect.width/2, 1);
def touchDown(self):
rect = self.baseImage.get_rect()
pygame.draw.circle(self.baseImage, self.color, rect.center, rect.width/2, 0);
def touchUp(self):<|fim▁hole|> if self.action is not None:
self.action()
def setAction(self, action):
self.action = action
class Line(Face):
def __init__(self, rect, color=(0,0,255), text=""):
pygame.sprite.Sprite.__init__(self)
self._alarmList = {}
self.color = color
self.rect = pygame.Rect(rect)
self.text = text
self.baseImage = pygame.Surface((self.rect.width, self.rect.height))
self.image = self.baseImage
self.faceSprite = pygame.sprite.GroupSingle(Message((self.text,), vector=(0,0), fontsize=45, align="left", padding=0, fgcolor=(0,0,255)))
surfaceRect = self.image.get_rect()
self.faceSprite.sprite.rect.midleft = surfaceRect.midleft
def update(self):
self.faceSprite.draw(self.baseImage)
class AlarmSetting(Face):
def __init__(self, rect, alarm, color=(0,0,255)):
pygame.sprite.Sprite.__init__(self)
self._alarmList = {}
if isinstance(alarm, Alarm):
self._alarmObject = alarm
else:
raise Exception("Not an Alarm-class object")
self.color = color
self.rect = pygame.Rect(rect)
self.requestingFace = False
self.baseImage = pygame.Surface((self.rect.width, self.rect.height))
self.image = self.baseImage
self._lines = []
for i in range(4):
line = pygame.sprite.GroupSingle(Line(pygame.Rect((0, 0),(rect.height/5*4, rect.height/5)), text="Hello"))
line.sprite.rect.topright = (rect.width, rect.height/4*i)
self._lines.append(line)
def addAlarm(self):
line = pygame.sprite.GroupSingle(Button(pygame.Rect((0, 0),(self.rect.height/5, self.rect.height/5))))
line.sprite.rect.topright = (self.rect.width, self.rect.height/4)
line.sprite.setAction(self.addAlarm)
self._lines.append(line)
def update(self):
for line in self._lines:
line.update()
# line.sprite.rect.midbottom = self.image.get_rect()
line.draw(self.baseImage)
def handleEvent(self, event):
pos = pygame.mouse.get_pos()
if event.type == pygame.MOUSEBUTTONDOWN:
for butt in self._lines:
if butt.sprite.rect.collidepoint(pos):
butt.sprite.touchDown()
if event.type == pygame.MOUSEBUTTONUP:
for butt in self._lines:
if butt.sprite.rect.collidepoint(pos):
butt.sprite.touchUp()<|fim▁end|> | rect = self.baseImage.get_rect()
self.image.fill(pygame.Color("black"))
pygame.draw.circle(self.baseImage, self.color, rect.center, rect.width/2, 1); |
<|file_name|>chat_client.py<|end_file_name|><|fim▁begin|># chat_client.py
import sys, socket, select
def chat_client():
if(len(sys.argv) < 3) :
print 'Usage : python chat_client.py hostname port'
sys.exit()
host = sys.argv[1]
port = int(sys.argv[2])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(2)
# connect to remote host
try :
s.connect((host, port))
except :
print 'Unable to connect'
sys.exit()
print 'Connected to remote host. You can start sending messages'
sys.stdout.write('[Me] '); sys.stdout.flush()
while 1:
socket_list = [sys.stdin, s]
# Get the list sockets which are readable
read_sockets, write_sockets, error_sockets = select.select(socket_list , [], [])
for sock in read_sockets:
if sock == s:
# incoming message from remote server, s
data = sock.recv(4096)
if not data :
print '\nDisconnected from chat server'
sys.exit()
else :
#print data<|fim▁hole|> sys.stdout.write('[Me] '); sys.stdout.flush()
else :
# user entered a message
msg = sys.stdin.readline()
s.send(msg)
sys.stdout.write('[Me] '); sys.stdout.flush()
if __name__ == "__main__":
sys.exit(chat_client())<|fim▁end|> | sys.stdout.write(data) |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># Copyright 2019 Tecnativa - Sergio Teruel
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo.addons.website_sale.controllers.main import WebsiteSale
from odoo import http<|fim▁hole|>
@http.route()
def product(self, product, category='', search='', **kwargs):
res = super().product(
product, category=category, search=search, **kwargs)
attributes_detail = product.attribute_line_ids.filtered(
lambda x: x.attribute_id.website_product_detail_image_published)
res.qcontext['attributes_detail'] = attributes_detail
return res<|fim▁end|> |
class WebsiteSaleProductDetailAttributeImage(WebsiteSale): |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.