file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
Map.js | import EntityService from './EntityService';
import LevelService from './LevelService';
import {Point} from 'pixi.js';
import {centeredToTopLeft} from '../../utils/coordinate';
// coordinate system:
// top-left origin
//
// 0---x+
// |
// y
// +
const WIDTH = 1000;
const HEIGHT = 1000;
export default class Map {
constructor() {
this.element = document.createElement('canvas');
this.element.id = 'map';
this.element.style.display = 'none';
this.element.style.position = 'absolute';
this.element.style.top = '50%';
this.element.style.left = '50%';
this.element.style.margin = `-${WIDTH / 2}px 0 0 -${HEIGHT / 2}px`;
this.element.style.border = '1px #606060 solid';
this.element.style.backgroundColor = '#303030';
this.element.style.opacity = '0.9';
this.element.style.width = `${WIDTH}px`;
this.element.style.height = `${HEIGHT}px`;
this.element.width = WIDTH;
this.element.height = HEIGHT;
document.body.appendChild(this.element);
this.context = this.element.getContext('2d');
this.blips = [];
this.isVisible = false;
}
| (entities) {
this.blips = [];
entities.forEach(entity => {
let size = entity.getSize();
size = this.sizeScaledToCanvas(size);
let position = entity.getPosition();
position = this.positionScaledToCanvas(position);
position = centeredToTopLeft(position.x, position.y, WIDTH, HEIGHT);
position = this.positionAdjustedForFillRect(position.x, position.y, size.w, size.h);
let blip = {
x: position.x,
y: position.y,
w: size.w,
h: size.h,
color: this.getColorByEntityType(entity)
};
this.blips.push(blip);
});
}
draw() {
this.context.clearRect(0, 0, WIDTH, HEIGHT);
this.blips.forEach(blip => {
this.context.fillStyle = blip.color;
this.context.fillRect(blip.x, blip.y, blip.w, blip.h);
});
}
toggleVisibility() {
this.isVisible = !this.isVisible;
if (this.isVisible) {
this.element.style.display = 'block';
} else {
this.element.style.display = 'none';
}
}
positionRelativeToPlayer(position) {
let player = EntityService.get().getLocalPlayer().getPosition();
return {
x: position.x - player.x,
y: position.y - player.y
}
}
positionScaledToCanvas(position) {
let scale = {
x: WIDTH / LevelService.get().getWidth(),
y: HEIGHT / LevelService.get().getHeight()
};
return {
x: position.x * scale.x,
y: position.y * scale.y
}
}
positionAdjustedForFillRect(x, y, w, h) {
return {
x: x - (w / 2),
y: y - (h / 2)
}
}
sizeScaledToCanvas(size) {
let scale = {
x: WIDTH / LevelService.get().getWidth(),
y: HEIGHT / LevelService.get().getHeight()
};
return {
w: size.w * scale.x,
h: size.h * scale.y
}
}
getColorByEntityType(entity) {
switch (entity.type.toLowerCase()) {
case 'ship':
if (EntityService.get().isLocalPlayer(entity)) {
return 'white';
} else {
return 'red';
}
case 'wall':
return 'gray';
case 'asteroid':
return 'burlywood';
}
}
};
| update | identifier_name |
Map.js | import EntityService from './EntityService';
import LevelService from './LevelService';
import {Point} from 'pixi.js';
import {centeredToTopLeft} from '../../utils/coordinate';
// coordinate system:
// top-left origin
//
// 0---x+
// |
// y
// +
const WIDTH = 1000;
const HEIGHT = 1000;
export default class Map {
constructor() {
this.element = document.createElement('canvas');
this.element.id = 'map';
this.element.style.display = 'none';
this.element.style.position = 'absolute';
this.element.style.top = '50%';
this.element.style.left = '50%';
this.element.style.margin = `-${WIDTH / 2}px 0 0 -${HEIGHT / 2}px`;
this.element.style.border = '1px #606060 solid';
this.element.style.backgroundColor = '#303030';
this.element.style.opacity = '0.9';
this.element.style.width = `${WIDTH}px`;
this.element.style.height = `${HEIGHT}px`;
this.element.width = WIDTH;
this.element.height = HEIGHT;
document.body.appendChild(this.element);
this.context = this.element.getContext('2d');
this.blips = [];
this.isVisible = false;
}
update(entities) {
this.blips = [];
entities.forEach(entity => {
let size = entity.getSize();
size = this.sizeScaledToCanvas(size);
let position = entity.getPosition();
position = this.positionScaledToCanvas(position);
position = centeredToTopLeft(position.x, position.y, WIDTH, HEIGHT);
position = this.positionAdjustedForFillRect(position.x, position.y, size.w, size.h);
let blip = {
x: position.x,
y: position.y,
w: size.w,
h: size.h,
color: this.getColorByEntityType(entity)
};
this.blips.push(blip);
});
}
draw() |
toggleVisibility() {
this.isVisible = !this.isVisible;
if (this.isVisible) {
this.element.style.display = 'block';
} else {
this.element.style.display = 'none';
}
}
positionRelativeToPlayer(position) {
let player = EntityService.get().getLocalPlayer().getPosition();
return {
x: position.x - player.x,
y: position.y - player.y
}
}
positionScaledToCanvas(position) {
let scale = {
x: WIDTH / LevelService.get().getWidth(),
y: HEIGHT / LevelService.get().getHeight()
};
return {
x: position.x * scale.x,
y: position.y * scale.y
}
}
positionAdjustedForFillRect(x, y, w, h) {
return {
x: x - (w / 2),
y: y - (h / 2)
}
}
sizeScaledToCanvas(size) {
let scale = {
x: WIDTH / LevelService.get().getWidth(),
y: HEIGHT / LevelService.get().getHeight()
};
return {
w: size.w * scale.x,
h: size.h * scale.y
}
}
getColorByEntityType(entity) {
switch (entity.type.toLowerCase()) {
case 'ship':
if (EntityService.get().isLocalPlayer(entity)) {
return 'white';
} else {
return 'red';
}
case 'wall':
return 'gray';
case 'asteroid':
return 'burlywood';
}
}
};
| {
this.context.clearRect(0, 0, WIDTH, HEIGHT);
this.blips.forEach(blip => {
this.context.fillStyle = blip.color;
this.context.fillRect(blip.x, blip.y, blip.w, blip.h);
});
} | identifier_body |
Map.js | import EntityService from './EntityService';
import LevelService from './LevelService';
import {Point} from 'pixi.js';
import {centeredToTopLeft} from '../../utils/coordinate';
// coordinate system:
// top-left origin
//
// 0---x+
// |
// y
// +
const WIDTH = 1000;
const HEIGHT = 1000;
export default class Map {
constructor() {
this.element = document.createElement('canvas');
this.element.id = 'map';
this.element.style.display = 'none';
this.element.style.position = 'absolute';
this.element.style.top = '50%';
this.element.style.left = '50%';
this.element.style.margin = `-${WIDTH / 2}px 0 0 -${HEIGHT / 2}px`;
this.element.style.border = '1px #606060 solid';
this.element.style.backgroundColor = '#303030';
this.element.style.opacity = '0.9';
this.element.style.width = `${WIDTH}px`;
this.element.style.height = `${HEIGHT}px`; | this.element.height = HEIGHT;
document.body.appendChild(this.element);
this.context = this.element.getContext('2d');
this.blips = [];
this.isVisible = false;
}
update(entities) {
this.blips = [];
entities.forEach(entity => {
let size = entity.getSize();
size = this.sizeScaledToCanvas(size);
let position = entity.getPosition();
position = this.positionScaledToCanvas(position);
position = centeredToTopLeft(position.x, position.y, WIDTH, HEIGHT);
position = this.positionAdjustedForFillRect(position.x, position.y, size.w, size.h);
let blip = {
x: position.x,
y: position.y,
w: size.w,
h: size.h,
color: this.getColorByEntityType(entity)
};
this.blips.push(blip);
});
}
draw() {
this.context.clearRect(0, 0, WIDTH, HEIGHT);
this.blips.forEach(blip => {
this.context.fillStyle = blip.color;
this.context.fillRect(blip.x, blip.y, blip.w, blip.h);
});
}
toggleVisibility() {
this.isVisible = !this.isVisible;
if (this.isVisible) {
this.element.style.display = 'block';
} else {
this.element.style.display = 'none';
}
}
positionRelativeToPlayer(position) {
let player = EntityService.get().getLocalPlayer().getPosition();
return {
x: position.x - player.x,
y: position.y - player.y
}
}
positionScaledToCanvas(position) {
let scale = {
x: WIDTH / LevelService.get().getWidth(),
y: HEIGHT / LevelService.get().getHeight()
};
return {
x: position.x * scale.x,
y: position.y * scale.y
}
}
positionAdjustedForFillRect(x, y, w, h) {
return {
x: x - (w / 2),
y: y - (h / 2)
}
}
sizeScaledToCanvas(size) {
let scale = {
x: WIDTH / LevelService.get().getWidth(),
y: HEIGHT / LevelService.get().getHeight()
};
return {
w: size.w * scale.x,
h: size.h * scale.y
}
}
getColorByEntityType(entity) {
switch (entity.type.toLowerCase()) {
case 'ship':
if (EntityService.get().isLocalPlayer(entity)) {
return 'white';
} else {
return 'red';
}
case 'wall':
return 'gray';
case 'asteroid':
return 'burlywood';
}
}
}; | this.element.width = WIDTH; | random_line_split |
microdata.py | """Thin wrapper around the microdata library."""
from __future__ import absolute_import
import microdata
class Item(microdata.Item):
| """Add an "extra" field to microdata Items, so people won't feel the need
to make up ad-hoc properties.
Also add __eq__() and __repr__().
"""
def __init__(self, *args, **kwargs):
super(Item, self).__init__(*args, **kwargs)
self.extra = {}
def json_dict(self):
item = super(Item, self).json_dict()
if self.extra:
item['extra'] = self.extra
return item
def __eq__(self, other):
if not isinstance(other, microdata.Item):
return False
return (self.itemid == other.itemid and
self.itemtype == other.itemtype and
self.props == other.props and
self.extra == getattr(other, 'extra', {}))
def __repr__(self):
return '%s(%r, %r, props=%r, extra=%r)' % (
self.__class__.__name__,
' '.join(uri.string for uri in self.itemtype),
self.itemid,
self.props,
self.extra) | identifier_body | |
microdata.py | """Thin wrapper around the microdata library."""
from __future__ import absolute_import
import microdata
class Item(microdata.Item):
"""Add an "extra" field to microdata Items, so people won't feel the need
to make up ad-hoc properties.
Also add __eq__() and __repr__().
"""
def __init__(self, *args, **kwargs):
super(Item, self).__init__(*args, **kwargs)
self.extra = {}
def json_dict(self):
item = super(Item, self).json_dict()
if self.extra:
|
return item
def __eq__(self, other):
if not isinstance(other, microdata.Item):
return False
return (self.itemid == other.itemid and
self.itemtype == other.itemtype and
self.props == other.props and
self.extra == getattr(other, 'extra', {}))
def __repr__(self):
return '%s(%r, %r, props=%r, extra=%r)' % (
self.__class__.__name__,
' '.join(uri.string for uri in self.itemtype),
self.itemid,
self.props,
self.extra)
| item['extra'] = self.extra | conditional_block |
microdata.py | """Thin wrapper around the microdata library."""
from __future__ import absolute_import
import microdata
class Item(microdata.Item):
"""Add an "extra" field to microdata Items, so people won't feel the need
to make up ad-hoc properties.
Also add __eq__() and __repr__().
"""
def __init__(self, *args, **kwargs):
super(Item, self).__init__(*args, **kwargs)
self.extra = {}
def json_dict(self):
item = super(Item, self).json_dict()
if self.extra:
item['extra'] = self.extra
return item
def __eq__(self, other):
if not isinstance(other, microdata.Item):
return False | self.props == other.props and
self.extra == getattr(other, 'extra', {}))
def __repr__(self):
return '%s(%r, %r, props=%r, extra=%r)' % (
self.__class__.__name__,
' '.join(uri.string for uri in self.itemtype),
self.itemid,
self.props,
self.extra) |
return (self.itemid == other.itemid and
self.itemtype == other.itemtype and | random_line_split |
microdata.py | """Thin wrapper around the microdata library."""
from __future__ import absolute_import
import microdata
class Item(microdata.Item):
"""Add an "extra" field to microdata Items, so people won't feel the need
to make up ad-hoc properties.
Also add __eq__() and __repr__().
"""
def __init__(self, *args, **kwargs):
super(Item, self).__init__(*args, **kwargs)
self.extra = {}
def json_dict(self):
item = super(Item, self).json_dict()
if self.extra:
item['extra'] = self.extra
return item
def __eq__(self, other):
if not isinstance(other, microdata.Item):
return False
return (self.itemid == other.itemid and
self.itemtype == other.itemtype and
self.props == other.props and
self.extra == getattr(other, 'extra', {}))
def | (self):
return '%s(%r, %r, props=%r, extra=%r)' % (
self.__class__.__name__,
' '.join(uri.string for uri in self.itemtype),
self.itemid,
self.props,
self.extra)
| __repr__ | identifier_name |
cursor.rs | // Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use display::Display;
use pixbuf::Pixbuf;
use ffi;
pub type Type = ffi::GdkCursorType;
glib_wrapper! {
pub struct Cursor(Object<ffi::GdkCursor>);
match fn {
get_type => || ffi::gdk_cursor_get_type(),
}
}
impl Cursor {
pub fn new(cursor_type: Type) -> Cursor {
assert_initialized_main_thread!();
unsafe { from_glib_full(ffi::gdk_cursor_new(cursor_type)) }
}
pub fn | (display: &Display, pixbuf: &Pixbuf, x: i32, y: i32) -> Cursor {
skip_assert_initialized!();
unsafe {
from_glib_full(
ffi::gdk_cursor_new_from_pixbuf(display.to_glib_none().0,
pixbuf.to_glib_none().0, x, y))
}
}
pub fn new_from_name(display: &Display, name: &str) -> Option<Cursor> {
skip_assert_initialized!();
unsafe {
from_glib_full(ffi::gdk_cursor_new_from_name(display.to_glib_none().0,
name.to_glib_none().0))
}
}
pub fn new_for_display(display: &Display, cursor_type: Type) -> Cursor {
skip_assert_initialized!();
unsafe {
from_glib_full(ffi::gdk_cursor_new_for_display(display.to_glib_none().0, cursor_type))
}
}
pub fn get_display(&self) -> Display {
unsafe { from_glib_none(ffi::gdk_cursor_get_display(self.to_glib_none().0)) }
}
pub fn get_image(&self) -> Option<Pixbuf> {
unsafe { from_glib_full(ffi::gdk_cursor_get_image(self.to_glib_none().0)) }
}
pub fn get_cursor_type(&self) -> Type {
unsafe { ffi::gdk_cursor_get_cursor_type(self.to_glib_none().0) }
}
}
| new_from_pixbuf | identifier_name |
cursor.rs | // Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use display::Display;
use pixbuf::Pixbuf;
use ffi;
pub type Type = ffi::GdkCursorType;
glib_wrapper! {
pub struct Cursor(Object<ffi::GdkCursor>);
match fn {
get_type => || ffi::gdk_cursor_get_type(),
}
}
impl Cursor { | }
pub fn new_from_pixbuf(display: &Display, pixbuf: &Pixbuf, x: i32, y: i32) -> Cursor {
skip_assert_initialized!();
unsafe {
from_glib_full(
ffi::gdk_cursor_new_from_pixbuf(display.to_glib_none().0,
pixbuf.to_glib_none().0, x, y))
}
}
pub fn new_from_name(display: &Display, name: &str) -> Option<Cursor> {
skip_assert_initialized!();
unsafe {
from_glib_full(ffi::gdk_cursor_new_from_name(display.to_glib_none().0,
name.to_glib_none().0))
}
}
pub fn new_for_display(display: &Display, cursor_type: Type) -> Cursor {
skip_assert_initialized!();
unsafe {
from_glib_full(ffi::gdk_cursor_new_for_display(display.to_glib_none().0, cursor_type))
}
}
pub fn get_display(&self) -> Display {
unsafe { from_glib_none(ffi::gdk_cursor_get_display(self.to_glib_none().0)) }
}
pub fn get_image(&self) -> Option<Pixbuf> {
unsafe { from_glib_full(ffi::gdk_cursor_get_image(self.to_glib_none().0)) }
}
pub fn get_cursor_type(&self) -> Type {
unsafe { ffi::gdk_cursor_get_cursor_type(self.to_glib_none().0) }
}
} | pub fn new(cursor_type: Type) -> Cursor {
assert_initialized_main_thread!();
unsafe { from_glib_full(ffi::gdk_cursor_new(cursor_type)) } | random_line_split |
cursor.rs | // Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use display::Display;
use pixbuf::Pixbuf;
use ffi;
pub type Type = ffi::GdkCursorType;
glib_wrapper! {
pub struct Cursor(Object<ffi::GdkCursor>);
match fn {
get_type => || ffi::gdk_cursor_get_type(),
}
}
impl Cursor {
pub fn new(cursor_type: Type) -> Cursor {
assert_initialized_main_thread!();
unsafe { from_glib_full(ffi::gdk_cursor_new(cursor_type)) }
}
pub fn new_from_pixbuf(display: &Display, pixbuf: &Pixbuf, x: i32, y: i32) -> Cursor {
skip_assert_initialized!();
unsafe {
from_glib_full(
ffi::gdk_cursor_new_from_pixbuf(display.to_glib_none().0,
pixbuf.to_glib_none().0, x, y))
}
}
pub fn new_from_name(display: &Display, name: &str) -> Option<Cursor> {
skip_assert_initialized!();
unsafe {
from_glib_full(ffi::gdk_cursor_new_from_name(display.to_glib_none().0,
name.to_glib_none().0))
}
}
pub fn new_for_display(display: &Display, cursor_type: Type) -> Cursor {
skip_assert_initialized!();
unsafe {
from_glib_full(ffi::gdk_cursor_new_for_display(display.to_glib_none().0, cursor_type))
}
}
pub fn get_display(&self) -> Display {
unsafe { from_glib_none(ffi::gdk_cursor_get_display(self.to_glib_none().0)) }
}
pub fn get_image(&self) -> Option<Pixbuf> {
unsafe { from_glib_full(ffi::gdk_cursor_get_image(self.to_glib_none().0)) }
}
pub fn get_cursor_type(&self) -> Type |
}
| {
unsafe { ffi::gdk_cursor_get_cursor_type(self.to_glib_none().0) }
} | identifier_body |
issue-59523-on-implemented-is-not-unused.rs | // We should not see the unused_attributes lint fire for
// rustc_on_unimplemented, but with this bug we are seeing it fire (on
// subsequent runs) if incremental compilation is enabled.
// revisions: cfail1 cfail2
// build-pass (FIXME(62277): could be check-pass?)
#![feature(rustc_attrs)]
#![deny(unused_attributes)]
#[rustc_on_unimplemented = "invalid"]
trait Index<Idx: ?Sized> {
type Output: ?Sized;
fn index(&self, index: Idx) -> &Self::Output;
}
#[rustc_on_unimplemented = "a usize is required to index into a slice"]
impl Index<usize> for [i32] {
type Output = i32;
fn | (&self, index: usize) -> &i32 {
&self[index]
}
}
fn main() {
Index::<usize>::index(&[1, 2, 3] as &[i32], 2);
}
| index | identifier_name |
issue-59523-on-implemented-is-not-unused.rs | // We should not see the unused_attributes lint fire for
// rustc_on_unimplemented, but with this bug we are seeing it fire (on
// subsequent runs) if incremental compilation is enabled.
// revisions: cfail1 cfail2
// build-pass (FIXME(62277): could be check-pass?)
#![feature(rustc_attrs)]
#![deny(unused_attributes)]
#[rustc_on_unimplemented = "invalid"]
trait Index<Idx: ?Sized> {
type Output: ?Sized;
fn index(&self, index: Idx) -> &Self::Output;
}
#[rustc_on_unimplemented = "a usize is required to index into a slice"]
impl Index<usize> for [i32] {
type Output = i32;
fn index(&self, index: usize) -> &i32 {
&self[index]
}
}
fn main() | {
Index::<usize>::index(&[1, 2, 3] as &[i32], 2);
} | identifier_body | |
bench.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, SourcePosition};
use parking_lot::RwLock;
use rayon;
use servo_url::ServoUrl;
use std::sync::Arc;
use style::error_reporting::ParseErrorReporter;
use style::media_queries::MediaList;
use style::parser::ParserContextExtraData;
use style::properties::{longhands, Importance, PropertyDeclaration, PropertyDeclarationBlock};
use style::rule_tree::{CascadeLevel, RuleTree, StrongRuleNode, StyleSource};
use style::stylesheets::{Origin, Stylesheet, CssRule};
use test::{self, Bencher};
struct ErrorringErrorReporter;
impl ParseErrorReporter for ErrorringErrorReporter {
fn report_error(&self, _input: &mut Parser, position: SourcePosition, message: &str,
url: &ServoUrl) {
panic!("CSS error: {}\t\n{:?} {}", url.as_str(), position, message);
}
}
struct AutoGCRuleTree<'a>(&'a RuleTree);
impl<'a> AutoGCRuleTree<'a> {
fn new(r: &'a RuleTree) -> Self {
AutoGCRuleTree(r)
}
}
impl<'a> Drop for AutoGCRuleTree<'a> {
fn drop(&mut self) {
unsafe { self.0.gc() }
}
}
fn parse_rules(css: &str) -> Vec<(StyleSource, CascadeLevel)> {
let s = Stylesheet::from_str(css,
ServoUrl::parse("http://localhost").unwrap(),
Origin::Author,
MediaList {
media_queries: vec![],
},
None,
&ErrorringErrorReporter,
ParserContextExtraData {});
let rules = s.rules.read();
rules.0.iter().filter_map(|rule| {
match *rule {
CssRule::Style(ref style_rule) => Some(style_rule),
_ => None,
}
}).cloned().map(StyleSource::Style).map(|s| {
(s, CascadeLevel::UserNormal)
}).collect()
}
fn test_insertion(rule_tree: &RuleTree, rules: Vec<(StyleSource, CascadeLevel)>) -> StrongRuleNode {
rule_tree.insert_ordered_rules(rules.into_iter())
}
fn test_insertion_style_attribute(rule_tree: &RuleTree, rules: &[(StyleSource, CascadeLevel)]) -> StrongRuleNode {
let mut rules = rules.to_vec();
rules.push((StyleSource::Declarations(Arc::new(RwLock::new(PropertyDeclarationBlock::with_one(
PropertyDeclaration::Display(
longhands::display::SpecifiedValue::block),
Importance::Normal
)))), CascadeLevel::UserNormal));
test_insertion(rule_tree, rules)
}
#[bench]
fn bench_insertion_basic(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
for _ in 0..(4000 + 400) {
test::black_box(test_insertion(&r, rules_matched.clone()));
}
})
}
#[bench]
fn bench_insertion_basic_per_element(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
test::black_box(test_insertion(&r, rules_matched.clone()));
});
}
#[bench]
fn bench_expensive_insertion(b: &mut Bencher) {
let r = RuleTree::new();
// This test case tests a case where you style a bunch of siblings
// matching the same rules, with a different style attribute each
// one.
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
for _ in 0..(4000 + 400) {
test::black_box(test_insertion_style_attribute(&r, &rules_matched));
}
});
}
#[bench]
fn bench_insertion_basic_parallel(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
rayon::scope(|s| {
for _ in 0..4 {
s.spawn(|s| {
for _ in 0..1000 {
test::black_box(test_insertion(&r,
rules_matched.clone()));
}
s.spawn(|_| {
for _ in 0..100 {
test::black_box(test_insertion(&r,
rules_matched.clone()));
}
})
})
}
});
});
}
#[bench]
fn bench_expensive_insersion_parallel(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
rayon::scope(|s| {
for _ in 0..4 {
s.spawn(|s| {
for _ in 0..1000 { | test::black_box(test_insertion_style_attribute(&r,
&rules_matched));
}
})
})
}
});
});
} | test::black_box(test_insertion_style_attribute(&r,
&rules_matched));
}
s.spawn(|_| {
for _ in 0..100 { | random_line_split |
bench.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, SourcePosition};
use parking_lot::RwLock;
use rayon;
use servo_url::ServoUrl;
use std::sync::Arc;
use style::error_reporting::ParseErrorReporter;
use style::media_queries::MediaList;
use style::parser::ParserContextExtraData;
use style::properties::{longhands, Importance, PropertyDeclaration, PropertyDeclarationBlock};
use style::rule_tree::{CascadeLevel, RuleTree, StrongRuleNode, StyleSource};
use style::stylesheets::{Origin, Stylesheet, CssRule};
use test::{self, Bencher};
struct ErrorringErrorReporter;
impl ParseErrorReporter for ErrorringErrorReporter {
fn report_error(&self, _input: &mut Parser, position: SourcePosition, message: &str,
url: &ServoUrl) {
panic!("CSS error: {}\t\n{:?} {}", url.as_str(), position, message);
}
}
struct AutoGCRuleTree<'a>(&'a RuleTree);
impl<'a> AutoGCRuleTree<'a> {
fn new(r: &'a RuleTree) -> Self {
AutoGCRuleTree(r)
}
}
impl<'a> Drop for AutoGCRuleTree<'a> {
fn drop(&mut self) {
unsafe { self.0.gc() }
}
}
fn parse_rules(css: &str) -> Vec<(StyleSource, CascadeLevel)> |
fn test_insertion(rule_tree: &RuleTree, rules: Vec<(StyleSource, CascadeLevel)>) -> StrongRuleNode {
rule_tree.insert_ordered_rules(rules.into_iter())
}
fn test_insertion_style_attribute(rule_tree: &RuleTree, rules: &[(StyleSource, CascadeLevel)]) -> StrongRuleNode {
let mut rules = rules.to_vec();
rules.push((StyleSource::Declarations(Arc::new(RwLock::new(PropertyDeclarationBlock::with_one(
PropertyDeclaration::Display(
longhands::display::SpecifiedValue::block),
Importance::Normal
)))), CascadeLevel::UserNormal));
test_insertion(rule_tree, rules)
}
#[bench]
fn bench_insertion_basic(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
for _ in 0..(4000 + 400) {
test::black_box(test_insertion(&r, rules_matched.clone()));
}
})
}
#[bench]
fn bench_insertion_basic_per_element(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
test::black_box(test_insertion(&r, rules_matched.clone()));
});
}
#[bench]
fn bench_expensive_insertion(b: &mut Bencher) {
let r = RuleTree::new();
// This test case tests a case where you style a bunch of siblings
// matching the same rules, with a different style attribute each
// one.
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
for _ in 0..(4000 + 400) {
test::black_box(test_insertion_style_attribute(&r, &rules_matched));
}
});
}
#[bench]
fn bench_insertion_basic_parallel(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
rayon::scope(|s| {
for _ in 0..4 {
s.spawn(|s| {
for _ in 0..1000 {
test::black_box(test_insertion(&r,
rules_matched.clone()));
}
s.spawn(|_| {
for _ in 0..100 {
test::black_box(test_insertion(&r,
rules_matched.clone()));
}
})
})
}
});
});
}
#[bench]
fn bench_expensive_insersion_parallel(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
rayon::scope(|s| {
for _ in 0..4 {
s.spawn(|s| {
for _ in 0..1000 {
test::black_box(test_insertion_style_attribute(&r,
&rules_matched));
}
s.spawn(|_| {
for _ in 0..100 {
test::black_box(test_insertion_style_attribute(&r,
&rules_matched));
}
})
})
}
});
});
}
| {
let s = Stylesheet::from_str(css,
ServoUrl::parse("http://localhost").unwrap(),
Origin::Author,
MediaList {
media_queries: vec![],
},
None,
&ErrorringErrorReporter,
ParserContextExtraData {});
let rules = s.rules.read();
rules.0.iter().filter_map(|rule| {
match *rule {
CssRule::Style(ref style_rule) => Some(style_rule),
_ => None,
}
}).cloned().map(StyleSource::Style).map(|s| {
(s, CascadeLevel::UserNormal)
}).collect()
} | identifier_body |
bench.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::{Parser, SourcePosition};
use parking_lot::RwLock;
use rayon;
use servo_url::ServoUrl;
use std::sync::Arc;
use style::error_reporting::ParseErrorReporter;
use style::media_queries::MediaList;
use style::parser::ParserContextExtraData;
use style::properties::{longhands, Importance, PropertyDeclaration, PropertyDeclarationBlock};
use style::rule_tree::{CascadeLevel, RuleTree, StrongRuleNode, StyleSource};
use style::stylesheets::{Origin, Stylesheet, CssRule};
use test::{self, Bencher};
struct ErrorringErrorReporter;
impl ParseErrorReporter for ErrorringErrorReporter {
fn report_error(&self, _input: &mut Parser, position: SourcePosition, message: &str,
url: &ServoUrl) {
panic!("CSS error: {}\t\n{:?} {}", url.as_str(), position, message);
}
}
struct AutoGCRuleTree<'a>(&'a RuleTree);
impl<'a> AutoGCRuleTree<'a> {
fn new(r: &'a RuleTree) -> Self {
AutoGCRuleTree(r)
}
}
impl<'a> Drop for AutoGCRuleTree<'a> {
fn drop(&mut self) {
unsafe { self.0.gc() }
}
}
fn parse_rules(css: &str) -> Vec<(StyleSource, CascadeLevel)> {
let s = Stylesheet::from_str(css,
ServoUrl::parse("http://localhost").unwrap(),
Origin::Author,
MediaList {
media_queries: vec![],
},
None,
&ErrorringErrorReporter,
ParserContextExtraData {});
let rules = s.rules.read();
rules.0.iter().filter_map(|rule| {
match *rule {
CssRule::Style(ref style_rule) => Some(style_rule),
_ => None,
}
}).cloned().map(StyleSource::Style).map(|s| {
(s, CascadeLevel::UserNormal)
}).collect()
}
fn test_insertion(rule_tree: &RuleTree, rules: Vec<(StyleSource, CascadeLevel)>) -> StrongRuleNode {
rule_tree.insert_ordered_rules(rules.into_iter())
}
fn test_insertion_style_attribute(rule_tree: &RuleTree, rules: &[(StyleSource, CascadeLevel)]) -> StrongRuleNode {
let mut rules = rules.to_vec();
rules.push((StyleSource::Declarations(Arc::new(RwLock::new(PropertyDeclarationBlock::with_one(
PropertyDeclaration::Display(
longhands::display::SpecifiedValue::block),
Importance::Normal
)))), CascadeLevel::UserNormal));
test_insertion(rule_tree, rules)
}
#[bench]
fn bench_insertion_basic(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
for _ in 0..(4000 + 400) {
test::black_box(test_insertion(&r, rules_matched.clone()));
}
})
}
#[bench]
fn bench_insertion_basic_per_element(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
test::black_box(test_insertion(&r, rules_matched.clone()));
});
}
#[bench]
fn | (b: &mut Bencher) {
let r = RuleTree::new();
// This test case tests a case where you style a bunch of siblings
// matching the same rules, with a different style attribute each
// one.
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
for _ in 0..(4000 + 400) {
test::black_box(test_insertion_style_attribute(&r, &rules_matched));
}
});
}
#[bench]
fn bench_insertion_basic_parallel(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
rayon::scope(|s| {
for _ in 0..4 {
s.spawn(|s| {
for _ in 0..1000 {
test::black_box(test_insertion(&r,
rules_matched.clone()));
}
s.spawn(|_| {
for _ in 0..100 {
test::black_box(test_insertion(&r,
rules_matched.clone()));
}
})
})
}
});
});
}
#[bench]
fn bench_expensive_insersion_parallel(b: &mut Bencher) {
let r = RuleTree::new();
let rules_matched = parse_rules(
".foo { width: 200px; } \
.bar { height: 500px; } \
.baz { display: block; }");
b.iter(|| {
let _gc = AutoGCRuleTree::new(&r);
rayon::scope(|s| {
for _ in 0..4 {
s.spawn(|s| {
for _ in 0..1000 {
test::black_box(test_insertion_style_attribute(&r,
&rules_matched));
}
s.spawn(|_| {
for _ in 0..100 {
test::black_box(test_insertion_style_attribute(&r,
&rules_matched));
}
})
})
}
});
});
}
| bench_expensive_insertion | identifier_name |
deviceMotion.d.ts | // Type definitions for ngCordova device motion plugin
// Project: https://github.com/driftyco/ng-cordova
// Definitions by: Kapil Sachdeva <https://github.com/ksachdeva>
// Definitions: https://github.com/ksachdeva/DefinitelyTyped
declare module ngCordova {
export interface IDeviceMotionAcceleration {
x: number;
y: number;
z: number;
timestamp: number;
}
export interface IDeviceMotionAccelerometerOptions {
frequency: number;
}
|
export interface IDeviceMotionService {
getCurrentAcceleration(): ng.IPromise<IDeviceMotionAcceleration>;
watchAcceleration(options: IDeviceMotionAccelerometerOptions): IDeviceMotionWatchPromise;
clearWatch(watchId: number): void;
}
} | export interface IDeviceMotionWatchPromise extends ng.IPromise<IDeviceMotionAcceleration> {
watchID: number;
cancel: () => void;
clearWatch: (watchId?: number) => void;
} | random_line_split |
calc_velocity.py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 9 15:39:28 2014
@author: ibackus
"""
import numpy as np
import pynbody
SimArray = pynbody.array.SimArray
import isaac
import subprocess
import os
import glob
import time
def v_xy(f, param, changbin=None, nr=50, min_per_bin=100):
"""
Attempts to calculate the circular velocities for particles in a thin
(not flat) keplerian disk. Requires ChaNGa
**ARGUMENTS**
f : tipsy snapshot
For a gaseous disk
param : dict
a dictionary containing params for changa. (see isaac.configparser)
changbin : str (OPTIONAL)
If set, should be the full path to the ChaNGa executable. If None,
an attempt to find ChaNGa is made
nr : int (optional)
number of radial bins to use when averaging over accelerations
min_per_bin : int (optional)
The minimum number of particles to be in each bin. If there are too
few particles in a bin, it is merged with an adjacent bin. Thus,
actual number of radial bins may be less than nr.
**RETURNS**
vel : SimArray
An N by 3 SimArray of gas particle velocities.
"""
if changbin is None:
# Try to find the ChaNGa binary full path
changbin = os.popen('which ChaNGa').read().strip()
# Load stuff from the snapshot
x = f.g['x']
y = f.g['y']
z = f.g['z']
r = f.g['rxy']
vel0 = f.g['vel'].copy()
# Remove units from all quantities
r = isaac.strip_units(r)
x = isaac.strip_units(x)
y = isaac.strip_units(y)
z = isaac.strip_units(z)
# Temporary filenames for running ChaNGa
f_prefix = str(np.random.randint(0, 2**32))
f_name = f_prefix + '.std'
p_name = f_prefix + '.param'
# Update parameters
p_temp = param.copy()
p_temp['achInFile'] = f_name
p_temp['achOutName'] = f_prefix
if 'dDumpFrameTime' in p_temp: p_temp.pop('dDumpFrameTime')
if 'dDumpFrameStep' in p_temp: p_temp.pop('dDumpFrameStep')
# --------------------------------------------
# Estimate velocity from gravity only
# --------------------------------------------
# Note, accelerations due to gravity are calculated twice to be extra careful
# This is so that any velocity dependent effects are properly accounted for
# (although, ideally, there should be none)
# The second calculation uses the updated velocities from the first
for iGrav in range(2):
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, only calculating gravity
command = 'charmrun ++local ' + changbin + ' -gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# If a is not a vector, calculate radial acceleration. Otherwise, assume
# a is the radial acceleration
a_r = a[:,0]*x/r + a[:,1]*y/r
# Make sure the units are correct then remove them
a_r = isaac.match_units(a_r, a)[0]
a_r = isaac.strip_units(a_r)
# Calculate cos(theta) where theta is angle above x-y plane
cos = r/np.sqrt(r**2 + z**2)
ar2 = a_r*r**2
# Bin the data
r_edges = np.linspace(r.min(), (1+np.spacing(2))*r.max(), nr + 1)
ind, r_edges = isaac.digitize_threshold(r, min_per_bin, r_edges)
ind -= 1 | nr = len(r_edges) - 1
r_bins, ar2_mean, err = isaac.binned_mean(r, ar2, binedges=r_edges, \
weighted_bins=True)
# Fit lines to ar2 vs cos for each radial bin
m = np.zeros(nr)
b = np.zeros(nr)
for i in range(nr):
mask = (ind == i)
p = np.polyfit(cos[mask], ar2[mask], 1)
m[i] = p[0]
b[i] = p[1]
# Interpolate the line fits
m_spline = isaac.extrap1d(r_bins, m)
b_spline = isaac.extrap1d(r_bins, b)
# Calculate circular velocity
ar2_calc = m_spline(r)*cos + b_spline(r)
v_calc = np.sqrt(abs(ar2_calc)/r)
vel = f.g['vel'].copy()
v_calc = isaac.match_units(v_calc,vel)[0]
vel[:,0] = -v_calc*y/r
vel[:,1] = v_calc*x/r
# Assign to f
f.g['vel'] = vel
# --------------------------------------------
# Estimate pressure/gas dynamics accelerations
# --------------------------------------------
a_grav = a
ar2_calc_grav = ar2_calc
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, including SPH
command = 'charmrun ++local ' + changbin + ' +gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a_total = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# Estimate the accelerations due to pressure gradients/gas dynamics
a_gas = a_total - a_grav
ar_gas = a_gas[:,0]*x/r + a_gas[:,1]*y/r
ar_gas = isaac.strip_units(ar_gas)
ar2_gas = ar_gas*r**2
logr_bins, ratio, err = isaac.binned_mean(np.log(r), ar2_gas/ar2_calc_grav, nbins=nr,\
weighted_bins=True)
r_bins = np.exp(logr_bins)
ratio_spline = isaac.extrap1d(r_bins, ratio)
ar2_calc = ar2_calc_grav*(1 + ratio_spline(r))
a_calc = ar2_calc/r**2
v = np.sqrt(r*abs(a_calc))
v = isaac.match_units(v, vel0.units)[0]
vel = vel0.copy()
vel[:,0] = -v*y/r
vel[:,1] = v*x/r
# more cleanup
f.g['vel'] = vel0
return vel | random_line_split | |
calc_velocity.py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 9 15:39:28 2014
@author: ibackus
"""
import numpy as np
import pynbody
SimArray = pynbody.array.SimArray
import isaac
import subprocess
import os
import glob
import time
def v_xy(f, param, changbin=None, nr=50, min_per_bin=100):
| """
Attempts to calculate the circular velocities for particles in a thin
(not flat) keplerian disk. Requires ChaNGa
**ARGUMENTS**
f : tipsy snapshot
For a gaseous disk
param : dict
a dictionary containing params for changa. (see isaac.configparser)
changbin : str (OPTIONAL)
If set, should be the full path to the ChaNGa executable. If None,
an attempt to find ChaNGa is made
nr : int (optional)
number of radial bins to use when averaging over accelerations
min_per_bin : int (optional)
The minimum number of particles to be in each bin. If there are too
few particles in a bin, it is merged with an adjacent bin. Thus,
actual number of radial bins may be less than nr.
**RETURNS**
vel : SimArray
An N by 3 SimArray of gas particle velocities.
"""
if changbin is None:
# Try to find the ChaNGa binary full path
changbin = os.popen('which ChaNGa').read().strip()
# Load stuff from the snapshot
x = f.g['x']
y = f.g['y']
z = f.g['z']
r = f.g['rxy']
vel0 = f.g['vel'].copy()
# Remove units from all quantities
r = isaac.strip_units(r)
x = isaac.strip_units(x)
y = isaac.strip_units(y)
z = isaac.strip_units(z)
# Temporary filenames for running ChaNGa
f_prefix = str(np.random.randint(0, 2**32))
f_name = f_prefix + '.std'
p_name = f_prefix + '.param'
# Update parameters
p_temp = param.copy()
p_temp['achInFile'] = f_name
p_temp['achOutName'] = f_prefix
if 'dDumpFrameTime' in p_temp: p_temp.pop('dDumpFrameTime')
if 'dDumpFrameStep' in p_temp: p_temp.pop('dDumpFrameStep')
# --------------------------------------------
# Estimate velocity from gravity only
# --------------------------------------------
# Note, accelerations due to gravity are calculated twice to be extra careful
# This is so that any velocity dependent effects are properly accounted for
# (although, ideally, there should be none)
# The second calculation uses the updated velocities from the first
for iGrav in range(2):
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, only calculating gravity
command = 'charmrun ++local ' + changbin + ' -gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# If a is not a vector, calculate radial acceleration. Otherwise, assume
# a is the radial acceleration
a_r = a[:,0]*x/r + a[:,1]*y/r
# Make sure the units are correct then remove them
a_r = isaac.match_units(a_r, a)[0]
a_r = isaac.strip_units(a_r)
# Calculate cos(theta) where theta is angle above x-y plane
cos = r/np.sqrt(r**2 + z**2)
ar2 = a_r*r**2
# Bin the data
r_edges = np.linspace(r.min(), (1+np.spacing(2))*r.max(), nr + 1)
ind, r_edges = isaac.digitize_threshold(r, min_per_bin, r_edges)
ind -= 1
nr = len(r_edges) - 1
r_bins, ar2_mean, err = isaac.binned_mean(r, ar2, binedges=r_edges, \
weighted_bins=True)
# Fit lines to ar2 vs cos for each radial bin
m = np.zeros(nr)
b = np.zeros(nr)
for i in range(nr):
mask = (ind == i)
p = np.polyfit(cos[mask], ar2[mask], 1)
m[i] = p[0]
b[i] = p[1]
# Interpolate the line fits
m_spline = isaac.extrap1d(r_bins, m)
b_spline = isaac.extrap1d(r_bins, b)
# Calculate circular velocity
ar2_calc = m_spline(r)*cos + b_spline(r)
v_calc = np.sqrt(abs(ar2_calc)/r)
vel = f.g['vel'].copy()
v_calc = isaac.match_units(v_calc,vel)[0]
vel[:,0] = -v_calc*y/r
vel[:,1] = v_calc*x/r
# Assign to f
f.g['vel'] = vel
# --------------------------------------------
# Estimate pressure/gas dynamics accelerations
# --------------------------------------------
a_grav = a
ar2_calc_grav = ar2_calc
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, including SPH
command = 'charmrun ++local ' + changbin + ' +gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a_total = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# Estimate the accelerations due to pressure gradients/gas dynamics
a_gas = a_total - a_grav
ar_gas = a_gas[:,0]*x/r + a_gas[:,1]*y/r
ar_gas = isaac.strip_units(ar_gas)
ar2_gas = ar_gas*r**2
logr_bins, ratio, err = isaac.binned_mean(np.log(r), ar2_gas/ar2_calc_grav, nbins=nr,\
weighted_bins=True)
r_bins = np.exp(logr_bins)
ratio_spline = isaac.extrap1d(r_bins, ratio)
ar2_calc = ar2_calc_grav*(1 + ratio_spline(r))
a_calc = ar2_calc/r**2
v = np.sqrt(r*abs(a_calc))
v = isaac.match_units(v, vel0.units)[0]
vel = vel0.copy()
vel[:,0] = -v*y/r
vel[:,1] = v*x/r
# more cleanup
f.g['vel'] = vel0
return vel | identifier_body | |
calc_velocity.py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 9 15:39:28 2014
@author: ibackus
"""
import numpy as np
import pynbody
SimArray = pynbody.array.SimArray
import isaac
import subprocess
import os
import glob
import time
def | (f, param, changbin=None, nr=50, min_per_bin=100):
"""
Attempts to calculate the circular velocities for particles in a thin
(not flat) keplerian disk. Requires ChaNGa
**ARGUMENTS**
f : tipsy snapshot
For a gaseous disk
param : dict
a dictionary containing params for changa. (see isaac.configparser)
changbin : str (OPTIONAL)
If set, should be the full path to the ChaNGa executable. If None,
an attempt to find ChaNGa is made
nr : int (optional)
number of radial bins to use when averaging over accelerations
min_per_bin : int (optional)
The minimum number of particles to be in each bin. If there are too
few particles in a bin, it is merged with an adjacent bin. Thus,
actual number of radial bins may be less than nr.
**RETURNS**
vel : SimArray
An N by 3 SimArray of gas particle velocities.
"""
if changbin is None:
# Try to find the ChaNGa binary full path
changbin = os.popen('which ChaNGa').read().strip()
# Load stuff from the snapshot
x = f.g['x']
y = f.g['y']
z = f.g['z']
r = f.g['rxy']
vel0 = f.g['vel'].copy()
# Remove units from all quantities
r = isaac.strip_units(r)
x = isaac.strip_units(x)
y = isaac.strip_units(y)
z = isaac.strip_units(z)
# Temporary filenames for running ChaNGa
f_prefix = str(np.random.randint(0, 2**32))
f_name = f_prefix + '.std'
p_name = f_prefix + '.param'
# Update parameters
p_temp = param.copy()
p_temp['achInFile'] = f_name
p_temp['achOutName'] = f_prefix
if 'dDumpFrameTime' in p_temp: p_temp.pop('dDumpFrameTime')
if 'dDumpFrameStep' in p_temp: p_temp.pop('dDumpFrameStep')
# --------------------------------------------
# Estimate velocity from gravity only
# --------------------------------------------
# Note, accelerations due to gravity are calculated twice to be extra careful
# This is so that any velocity dependent effects are properly accounted for
# (although, ideally, there should be none)
# The second calculation uses the updated velocities from the first
for iGrav in range(2):
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, only calculating gravity
command = 'charmrun ++local ' + changbin + ' -gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# If a is not a vector, calculate radial acceleration. Otherwise, assume
# a is the radial acceleration
a_r = a[:,0]*x/r + a[:,1]*y/r
# Make sure the units are correct then remove them
a_r = isaac.match_units(a_r, a)[0]
a_r = isaac.strip_units(a_r)
# Calculate cos(theta) where theta is angle above x-y plane
cos = r/np.sqrt(r**2 + z**2)
ar2 = a_r*r**2
# Bin the data
r_edges = np.linspace(r.min(), (1+np.spacing(2))*r.max(), nr + 1)
ind, r_edges = isaac.digitize_threshold(r, min_per_bin, r_edges)
ind -= 1
nr = len(r_edges) - 1
r_bins, ar2_mean, err = isaac.binned_mean(r, ar2, binedges=r_edges, \
weighted_bins=True)
# Fit lines to ar2 vs cos for each radial bin
m = np.zeros(nr)
b = np.zeros(nr)
for i in range(nr):
mask = (ind == i)
p = np.polyfit(cos[mask], ar2[mask], 1)
m[i] = p[0]
b[i] = p[1]
# Interpolate the line fits
m_spline = isaac.extrap1d(r_bins, m)
b_spline = isaac.extrap1d(r_bins, b)
# Calculate circular velocity
ar2_calc = m_spline(r)*cos + b_spline(r)
v_calc = np.sqrt(abs(ar2_calc)/r)
vel = f.g['vel'].copy()
v_calc = isaac.match_units(v_calc,vel)[0]
vel[:,0] = -v_calc*y/r
vel[:,1] = v_calc*x/r
# Assign to f
f.g['vel'] = vel
# --------------------------------------------
# Estimate pressure/gas dynamics accelerations
# --------------------------------------------
a_grav = a
ar2_calc_grav = ar2_calc
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, including SPH
command = 'charmrun ++local ' + changbin + ' +gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a_total = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# Estimate the accelerations due to pressure gradients/gas dynamics
a_gas = a_total - a_grav
ar_gas = a_gas[:,0]*x/r + a_gas[:,1]*y/r
ar_gas = isaac.strip_units(ar_gas)
ar2_gas = ar_gas*r**2
logr_bins, ratio, err = isaac.binned_mean(np.log(r), ar2_gas/ar2_calc_grav, nbins=nr,\
weighted_bins=True)
r_bins = np.exp(logr_bins)
ratio_spline = isaac.extrap1d(r_bins, ratio)
ar2_calc = ar2_calc_grav*(1 + ratio_spline(r))
a_calc = ar2_calc/r**2
v = np.sqrt(r*abs(a_calc))
v = isaac.match_units(v, vel0.units)[0]
vel = vel0.copy()
vel[:,0] = -v*y/r
vel[:,1] = v*x/r
# more cleanup
f.g['vel'] = vel0
return vel | v_xy | identifier_name |
calc_velocity.py | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 9 15:39:28 2014
@author: ibackus
"""
import numpy as np
import pynbody
SimArray = pynbody.array.SimArray
import isaac
import subprocess
import os
import glob
import time
def v_xy(f, param, changbin=None, nr=50, min_per_bin=100):
"""
Attempts to calculate the circular velocities for particles in a thin
(not flat) keplerian disk. Requires ChaNGa
**ARGUMENTS**
f : tipsy snapshot
For a gaseous disk
param : dict
a dictionary containing params for changa. (see isaac.configparser)
changbin : str (OPTIONAL)
If set, should be the full path to the ChaNGa executable. If None,
an attempt to find ChaNGa is made
nr : int (optional)
number of radial bins to use when averaging over accelerations
min_per_bin : int (optional)
The minimum number of particles to be in each bin. If there are too
few particles in a bin, it is merged with an adjacent bin. Thus,
actual number of radial bins may be less than nr.
**RETURNS**
vel : SimArray
An N by 3 SimArray of gas particle velocities.
"""
if changbin is None:
# Try to find the ChaNGa binary full path
|
# Load stuff from the snapshot
x = f.g['x']
y = f.g['y']
z = f.g['z']
r = f.g['rxy']
vel0 = f.g['vel'].copy()
# Remove units from all quantities
r = isaac.strip_units(r)
x = isaac.strip_units(x)
y = isaac.strip_units(y)
z = isaac.strip_units(z)
# Temporary filenames for running ChaNGa
f_prefix = str(np.random.randint(0, 2**32))
f_name = f_prefix + '.std'
p_name = f_prefix + '.param'
# Update parameters
p_temp = param.copy()
p_temp['achInFile'] = f_name
p_temp['achOutName'] = f_prefix
if 'dDumpFrameTime' in p_temp: p_temp.pop('dDumpFrameTime')
if 'dDumpFrameStep' in p_temp: p_temp.pop('dDumpFrameStep')
# --------------------------------------------
# Estimate velocity from gravity only
# --------------------------------------------
# Note, accelerations due to gravity are calculated twice to be extra careful
# This is so that any velocity dependent effects are properly accounted for
# (although, ideally, there should be none)
# The second calculation uses the updated velocities from the first
for iGrav in range(2):
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, only calculating gravity
command = 'charmrun ++local ' + changbin + ' -gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# If a is not a vector, calculate radial acceleration. Otherwise, assume
# a is the radial acceleration
a_r = a[:,0]*x/r + a[:,1]*y/r
# Make sure the units are correct then remove them
a_r = isaac.match_units(a_r, a)[0]
a_r = isaac.strip_units(a_r)
# Calculate cos(theta) where theta is angle above x-y plane
cos = r/np.sqrt(r**2 + z**2)
ar2 = a_r*r**2
# Bin the data
r_edges = np.linspace(r.min(), (1+np.spacing(2))*r.max(), nr + 1)
ind, r_edges = isaac.digitize_threshold(r, min_per_bin, r_edges)
ind -= 1
nr = len(r_edges) - 1
r_bins, ar2_mean, err = isaac.binned_mean(r, ar2, binedges=r_edges, \
weighted_bins=True)
# Fit lines to ar2 vs cos for each radial bin
m = np.zeros(nr)
b = np.zeros(nr)
for i in range(nr):
mask = (ind == i)
p = np.polyfit(cos[mask], ar2[mask], 1)
m[i] = p[0]
b[i] = p[1]
# Interpolate the line fits
m_spline = isaac.extrap1d(r_bins, m)
b_spline = isaac.extrap1d(r_bins, b)
# Calculate circular velocity
ar2_calc = m_spline(r)*cos + b_spline(r)
v_calc = np.sqrt(abs(ar2_calc)/r)
vel = f.g['vel'].copy()
v_calc = isaac.match_units(v_calc,vel)[0]
vel[:,0] = -v_calc*y/r
vel[:,1] = v_calc*x/r
# Assign to f
f.g['vel'] = vel
# --------------------------------------------
# Estimate pressure/gas dynamics accelerations
# --------------------------------------------
a_grav = a
ar2_calc_grav = ar2_calc
# Save files
f.write(filename=f_name, fmt = pynbody.tipsy.TipsySnap)
isaac.configsave(p_temp, p_name, ftype='param')
# Run ChaNGa, including SPH
command = 'charmrun ++local ' + changbin + ' +gas -n 0 ' + p_name
p = subprocess.Popen(command.split(), stdout=subprocess.PIPE)
while p.poll() is None:
time.sleep(0.1)
# Load accelerations
acc_name = f_prefix + '.000000.acc2'
a_total = isaac.load_acc(acc_name)
# Clean-up
for fname in glob.glob(f_prefix + '*'): os.remove(fname)
# Estimate the accelerations due to pressure gradients/gas dynamics
a_gas = a_total - a_grav
ar_gas = a_gas[:,0]*x/r + a_gas[:,1]*y/r
ar_gas = isaac.strip_units(ar_gas)
ar2_gas = ar_gas*r**2
logr_bins, ratio, err = isaac.binned_mean(np.log(r), ar2_gas/ar2_calc_grav, nbins=nr,\
weighted_bins=True)
r_bins = np.exp(logr_bins)
ratio_spline = isaac.extrap1d(r_bins, ratio)
ar2_calc = ar2_calc_grav*(1 + ratio_spline(r))
a_calc = ar2_calc/r**2
v = np.sqrt(r*abs(a_calc))
v = isaac.match_units(v, vel0.units)[0]
vel = vel0.copy()
vel[:,0] = -v*y/r
vel[:,1] = v*x/r
# more cleanup
f.g['vel'] = vel0
return vel | changbin = os.popen('which ChaNGa').read().strip() | conditional_block |
_pid_correction.py | import torch
from deluca.lung.core import Controller, LungEnv
class PIDCorrection(Controller):
def __init__(self, base_controller: Controller, sim: LungEnv, pid_K=[0.0, 0.0], decay=0.1, **kwargs):
self.base_controller = base_controller
self.sim = sim
self.I = 0.0
self.K = pid_K
self.decay = decay
self.reset()
| def compute_action(self, state, t):
u_in_base, u_out = self.base_controller(state, t)
err = self.sim.pressure - state
self.I = self.I * (1 - self.decay) + err * self.decay
pid_correction = self.K[0] * err + self.K[1] * self.I
u_in = torch.clamp(u_in_base + pid_correction, min=0.0, max=100.0)
self.sim(u_in, u_out, t)
return u_in, u_out | def reset(self):
self.base_controller.reset()
self.sim.reset()
self.I = 0.0
| random_line_split |
_pid_correction.py | import torch
from deluca.lung.core import Controller, LungEnv
class PIDCorrection(Controller):
def | (self, base_controller: Controller, sim: LungEnv, pid_K=[0.0, 0.0], decay=0.1, **kwargs):
self.base_controller = base_controller
self.sim = sim
self.I = 0.0
self.K = pid_K
self.decay = decay
self.reset()
def reset(self):
self.base_controller.reset()
self.sim.reset()
self.I = 0.0
def compute_action(self, state, t):
u_in_base, u_out = self.base_controller(state, t)
err = self.sim.pressure - state
self.I = self.I * (1 - self.decay) + err * self.decay
pid_correction = self.K[0] * err + self.K[1] * self.I
u_in = torch.clamp(u_in_base + pid_correction, min=0.0, max=100.0)
self.sim(u_in, u_out, t)
return u_in, u_out
| __init__ | identifier_name |
_pid_correction.py | import torch
from deluca.lung.core import Controller, LungEnv
class PIDCorrection(Controller):
def __init__(self, base_controller: Controller, sim: LungEnv, pid_K=[0.0, 0.0], decay=0.1, **kwargs):
self.base_controller = base_controller
self.sim = sim
self.I = 0.0
self.K = pid_K
self.decay = decay
self.reset()
def reset(self):
|
def compute_action(self, state, t):
u_in_base, u_out = self.base_controller(state, t)
err = self.sim.pressure - state
self.I = self.I * (1 - self.decay) + err * self.decay
pid_correction = self.K[0] * err + self.K[1] * self.I
u_in = torch.clamp(u_in_base + pid_correction, min=0.0, max=100.0)
self.sim(u_in, u_out, t)
return u_in, u_out
| self.base_controller.reset()
self.sim.reset()
self.I = 0.0 | identifier_body |
base-demographic-estimate-controller.js | /*
* Electronic Logistics Management Information System (eLMIS) is a supply chain management system for health commodities in a developing country setting.
*
* Copyright (C) 2015 John Snow, Inc (JSI). This program was produced for the U.S. Agency for International Development (USAID). It was prepared under the USAID | DELIVER PROJECT, Task Order 4.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
function BaseDemographicEstimateController($scope, rights, categories, programs , years, $filter) {
//TODO: read this configuration from backend.
$scope.enableAutoCalculate = false;
$scope.showFacilityAggregatesOption = false;
$scope.currentPage = 1;
$scope.pageSize = 50;
$scope.categories = categories;
$scope.rights = rights;
$scope.years = years;
$scope.programs = programs;
$scope.$watch('currentPage', function () {
if ($scope.isDirty()) {
$scope.save();
}
if (angular.isDefined($scope.lineItems)) {
$scope.pageLineItems();
}
});
$scope.isDirty = function () {
return $scope.$dirty;
};
$scope.hasPermission = function (permission) {
return ($scope.rights.indexOf(permission) >= 0);
};
$scope.showParent = function (index) {
if (index > 0) {
return ($scope.form.estimateLineItems[index].parentName !== $scope.form.estimateLineItems[index - 1].parentName);
}
return true;
};
$scope.pageLineItems = function () {
$scope.pageCount = Math.ceil($scope.lineItems.length / $scope.pageSize);
if ($scope.lineItems.length > $scope.pageSize) {
$scope.form.estimateLineItems = $scope.lineItems.slice($scope.pageSize * ($scope.currentPage - 1), $scope.pageSize * Number($scope.currentPage));
} else {
$scope.form.estimateLineItems = $scope.lineItems;
}
};
$scope.clearMessages = function(){
$scope.message = '';
$scope.error = '';
};
$scope.init = function(){
// default to the current year
$scope.year = Number( $filter('date')(new Date(), 'yyyy') );
// when the available program is only 1, default to this program.
if(programs.length === 1){
$scope.program = programs[0].id;
}
$scope.onParamChanged();
};
}
BaseDemographicEstimateController.resolve = { | DemographicEstimateCategories.get({}, function (data) {
deferred.resolve(data.estimate_categories);
}, {});
}, 100);
return deferred.promise;
}, years: function ($q, $timeout, OperationYears) {
var deferred = $q.defer();
$timeout(function () {
OperationYears.get({}, function (data) {
deferred.resolve(data.years);
});
}, 100);
return deferred.promise;
}, programs: function($q, $timeout, DemographicEstimatePrograms){
var deferred = $q.defer();
$timeout(function(){
DemographicEstimatePrograms.get({}, function(data){
deferred.resolve(data.programs);
});
},100);
return deferred.promise;
}, rights: function ($q, $timeout, UserSupervisoryRights) {
var deferred = $q.defer();
$timeout(function () {
UserSupervisoryRights.get({}, function (data) {
deferred.resolve(data.rights);
}, {});
}, 100);
return deferred.promise;
}
}; |
categories: function ($q, $timeout, DemographicEstimateCategories) {
var deferred = $q.defer();
$timeout(function () { | random_line_split |
base-demographic-estimate-controller.js | /*
* Electronic Logistics Management Information System (eLMIS) is a supply chain management system for health commodities in a developing country setting.
*
* Copyright (C) 2015 John Snow, Inc (JSI). This program was produced for the U.S. Agency for International Development (USAID). It was prepared under the USAID | DELIVER PROJECT, Task Order 4.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
function BaseDemographicEstimateController($scope, rights, categories, programs , years, $filter) |
BaseDemographicEstimateController.resolve = {
categories: function ($q, $timeout, DemographicEstimateCategories) {
var deferred = $q.defer();
$timeout(function () {
DemographicEstimateCategories.get({}, function (data) {
deferred.resolve(data.estimate_categories);
}, {});
}, 100);
return deferred.promise;
}, years: function ($q, $timeout, OperationYears) {
var deferred = $q.defer();
$timeout(function () {
OperationYears.get({}, function (data) {
deferred.resolve(data.years);
});
}, 100);
return deferred.promise;
}, programs: function($q, $timeout, DemographicEstimatePrograms){
var deferred = $q.defer();
$timeout(function(){
DemographicEstimatePrograms.get({}, function(data){
deferred.resolve(data.programs);
});
},100);
return deferred.promise;
}, rights: function ($q, $timeout, UserSupervisoryRights) {
var deferred = $q.defer();
$timeout(function () {
UserSupervisoryRights.get({}, function (data) {
deferred.resolve(data.rights);
}, {});
}, 100);
return deferred.promise;
}
};
| {
//TODO: read this configuration from backend.
$scope.enableAutoCalculate = false;
$scope.showFacilityAggregatesOption = false;
$scope.currentPage = 1;
$scope.pageSize = 50;
$scope.categories = categories;
$scope.rights = rights;
$scope.years = years;
$scope.programs = programs;
$scope.$watch('currentPage', function () {
if ($scope.isDirty()) {
$scope.save();
}
if (angular.isDefined($scope.lineItems)) {
$scope.pageLineItems();
}
});
$scope.isDirty = function () {
return $scope.$dirty;
};
$scope.hasPermission = function (permission) {
return ($scope.rights.indexOf(permission) >= 0);
};
$scope.showParent = function (index) {
if (index > 0) {
return ($scope.form.estimateLineItems[index].parentName !== $scope.form.estimateLineItems[index - 1].parentName);
}
return true;
};
$scope.pageLineItems = function () {
$scope.pageCount = Math.ceil($scope.lineItems.length / $scope.pageSize);
if ($scope.lineItems.length > $scope.pageSize) {
$scope.form.estimateLineItems = $scope.lineItems.slice($scope.pageSize * ($scope.currentPage - 1), $scope.pageSize * Number($scope.currentPage));
} else {
$scope.form.estimateLineItems = $scope.lineItems;
}
};
$scope.clearMessages = function(){
$scope.message = '';
$scope.error = '';
};
$scope.init = function(){
// default to the current year
$scope.year = Number( $filter('date')(new Date(), 'yyyy') );
// when the available program is only 1, default to this program.
if(programs.length === 1){
$scope.program = programs[0].id;
}
$scope.onParamChanged();
};
} | identifier_body |
base-demographic-estimate-controller.js | /*
* Electronic Logistics Management Information System (eLMIS) is a supply chain management system for health commodities in a developing country setting.
*
* Copyright (C) 2015 John Snow, Inc (JSI). This program was produced for the U.S. Agency for International Development (USAID). It was prepared under the USAID | DELIVER PROJECT, Task Order 4.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
function | ($scope, rights, categories, programs , years, $filter) {
//TODO: read this configuration from backend.
$scope.enableAutoCalculate = false;
$scope.showFacilityAggregatesOption = false;
$scope.currentPage = 1;
$scope.pageSize = 50;
$scope.categories = categories;
$scope.rights = rights;
$scope.years = years;
$scope.programs = programs;
$scope.$watch('currentPage', function () {
if ($scope.isDirty()) {
$scope.save();
}
if (angular.isDefined($scope.lineItems)) {
$scope.pageLineItems();
}
});
$scope.isDirty = function () {
return $scope.$dirty;
};
$scope.hasPermission = function (permission) {
return ($scope.rights.indexOf(permission) >= 0);
};
$scope.showParent = function (index) {
if (index > 0) {
return ($scope.form.estimateLineItems[index].parentName !== $scope.form.estimateLineItems[index - 1].parentName);
}
return true;
};
$scope.pageLineItems = function () {
$scope.pageCount = Math.ceil($scope.lineItems.length / $scope.pageSize);
if ($scope.lineItems.length > $scope.pageSize) {
$scope.form.estimateLineItems = $scope.lineItems.slice($scope.pageSize * ($scope.currentPage - 1), $scope.pageSize * Number($scope.currentPage));
} else {
$scope.form.estimateLineItems = $scope.lineItems;
}
};
$scope.clearMessages = function(){
$scope.message = '';
$scope.error = '';
};
$scope.init = function(){
// default to the current year
$scope.year = Number( $filter('date')(new Date(), 'yyyy') );
// when the available program is only 1, default to this program.
if(programs.length === 1){
$scope.program = programs[0].id;
}
$scope.onParamChanged();
};
}
BaseDemographicEstimateController.resolve = {
categories: function ($q, $timeout, DemographicEstimateCategories) {
var deferred = $q.defer();
$timeout(function () {
DemographicEstimateCategories.get({}, function (data) {
deferred.resolve(data.estimate_categories);
}, {});
}, 100);
return deferred.promise;
}, years: function ($q, $timeout, OperationYears) {
var deferred = $q.defer();
$timeout(function () {
OperationYears.get({}, function (data) {
deferred.resolve(data.years);
});
}, 100);
return deferred.promise;
}, programs: function($q, $timeout, DemographicEstimatePrograms){
var deferred = $q.defer();
$timeout(function(){
DemographicEstimatePrograms.get({}, function(data){
deferred.resolve(data.programs);
});
},100);
return deferred.promise;
}, rights: function ($q, $timeout, UserSupervisoryRights) {
var deferred = $q.defer();
$timeout(function () {
UserSupervisoryRights.get({}, function (data) {
deferred.resolve(data.rights);
}, {});
}, 100);
return deferred.promise;
}
};
| BaseDemographicEstimateController | identifier_name |
base-demographic-estimate-controller.js | /*
* Electronic Logistics Management Information System (eLMIS) is a supply chain management system for health commodities in a developing country setting.
*
* Copyright (C) 2015 John Snow, Inc (JSI). This program was produced for the U.S. Agency for International Development (USAID). It was prepared under the USAID | DELIVER PROJECT, Task Order 4.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
function BaseDemographicEstimateController($scope, rights, categories, programs , years, $filter) {
//TODO: read this configuration from backend.
$scope.enableAutoCalculate = false;
$scope.showFacilityAggregatesOption = false;
$scope.currentPage = 1;
$scope.pageSize = 50;
$scope.categories = categories;
$scope.rights = rights;
$scope.years = years;
$scope.programs = programs;
$scope.$watch('currentPage', function () {
if ($scope.isDirty()) {
$scope.save();
}
if (angular.isDefined($scope.lineItems)) {
$scope.pageLineItems();
}
});
$scope.isDirty = function () {
return $scope.$dirty;
};
$scope.hasPermission = function (permission) {
return ($scope.rights.indexOf(permission) >= 0);
};
$scope.showParent = function (index) {
if (index > 0) {
return ($scope.form.estimateLineItems[index].parentName !== $scope.form.estimateLineItems[index - 1].parentName);
}
return true;
};
$scope.pageLineItems = function () {
$scope.pageCount = Math.ceil($scope.lineItems.length / $scope.pageSize);
if ($scope.lineItems.length > $scope.pageSize) {
$scope.form.estimateLineItems = $scope.lineItems.slice($scope.pageSize * ($scope.currentPage - 1), $scope.pageSize * Number($scope.currentPage));
} else |
};
$scope.clearMessages = function(){
$scope.message = '';
$scope.error = '';
};
$scope.init = function(){
// default to the current year
$scope.year = Number( $filter('date')(new Date(), 'yyyy') );
// when the available program is only 1, default to this program.
if(programs.length === 1){
$scope.program = programs[0].id;
}
$scope.onParamChanged();
};
}
BaseDemographicEstimateController.resolve = {
categories: function ($q, $timeout, DemographicEstimateCategories) {
var deferred = $q.defer();
$timeout(function () {
DemographicEstimateCategories.get({}, function (data) {
deferred.resolve(data.estimate_categories);
}, {});
}, 100);
return deferred.promise;
}, years: function ($q, $timeout, OperationYears) {
var deferred = $q.defer();
$timeout(function () {
OperationYears.get({}, function (data) {
deferred.resolve(data.years);
});
}, 100);
return deferred.promise;
}, programs: function($q, $timeout, DemographicEstimatePrograms){
var deferred = $q.defer();
$timeout(function(){
DemographicEstimatePrograms.get({}, function(data){
deferred.resolve(data.programs);
});
},100);
return deferred.promise;
}, rights: function ($q, $timeout, UserSupervisoryRights) {
var deferred = $q.defer();
$timeout(function () {
UserSupervisoryRights.get({}, function (data) {
deferred.resolve(data.rights);
}, {});
}, 100);
return deferred.promise;
}
};
| {
$scope.form.estimateLineItems = $scope.lineItems;
} | conditional_block |
MockStore.ts | import { Action, ActionReducer, Store } from '@ngrx/store';
import { State, prodReducer } from 'app/store';
import { State as LayerState } from 'app/store/layers/reducer';
import { State as PlaybackState } from 'app/store/playback/reducer';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import { Observable } from 'rxjs/Observable';
import { Observer } from 'rxjs/Observer';
import { map } from 'rxjs/operator/map';
const INIT_ACTION: Action = { type: '__test123__' };
export class MockStore extends Store<State> {
private readonly subject: BehaviorSubject<State>;
constructor() {
super(
undefined as Observer<Action>,
undefined as Observer<ActionReducer<any>>,
undefined as Observable<any>,
);
this.subject = new BehaviorSubject(prodReducer(undefined, INIT_ACTION));
}
readonly select = <R>(mapFn: any, ...paths: string[]): Observable<R> => {
return map.call(this.subject, mapFn);
};
dispatch(action: Action) {}
getState() |
setLayerState(layers: LayerState) {
const state = this.getState();
const newState: State = { ...state, present: { ...state.present, layers } };
this.subject.next(newState);
}
setPlaybackState(playback: PlaybackState) {
const state = this.getState();
const newState: State = { ...state, present: { ...state.present, playback } };
this.subject.next(newState);
}
}
| {
return this.subject.getValue();
} | identifier_body |
MockStore.ts | import { Action, ActionReducer, Store } from '@ngrx/store';
import { State, prodReducer } from 'app/store';
import { State as LayerState } from 'app/store/layers/reducer';
import { State as PlaybackState } from 'app/store/playback/reducer';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import { Observable } from 'rxjs/Observable';
import { Observer } from 'rxjs/Observer';
import { map } from 'rxjs/operator/map';
const INIT_ACTION: Action = { type: '__test123__' };
export class MockStore extends Store<State> {
private readonly subject: BehaviorSubject<State>;
constructor() {
super(
undefined as Observer<Action>,
undefined as Observer<ActionReducer<any>>,
undefined as Observable<any>,
);
this.subject = new BehaviorSubject(prodReducer(undefined, INIT_ACTION));
}
readonly select = <R>(mapFn: any, ...paths: string[]): Observable<R> => {
return map.call(this.subject, mapFn);
};
| (action: Action) {}
getState() {
return this.subject.getValue();
}
setLayerState(layers: LayerState) {
const state = this.getState();
const newState: State = { ...state, present: { ...state.present, layers } };
this.subject.next(newState);
}
setPlaybackState(playback: PlaybackState) {
const state = this.getState();
const newState: State = { ...state, present: { ...state.present, playback } };
this.subject.next(newState);
}
}
| dispatch | identifier_name |
MockStore.ts | import { Action, ActionReducer, Store } from '@ngrx/store';
import { State, prodReducer } from 'app/store';
import { State as LayerState } from 'app/store/layers/reducer';
import { State as PlaybackState } from 'app/store/playback/reducer';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import { Observable } from 'rxjs/Observable';
import { Observer } from 'rxjs/Observer';
import { map } from 'rxjs/operator/map';
const INIT_ACTION: Action = { type: '__test123__' };
export class MockStore extends Store<State> {
private readonly subject: BehaviorSubject<State>;
constructor() {
super(
undefined as Observer<Action>,
undefined as Observer<ActionReducer<any>>,
undefined as Observable<any>,
);
this.subject = new BehaviorSubject(prodReducer(undefined, INIT_ACTION));
}
readonly select = <R>(mapFn: any, ...paths: string[]): Observable<R> => {
return map.call(this.subject, mapFn);
};
dispatch(action: Action) {} | getState() {
return this.subject.getValue();
}
setLayerState(layers: LayerState) {
const state = this.getState();
const newState: State = { ...state, present: { ...state.present, layers } };
this.subject.next(newState);
}
setPlaybackState(playback: PlaybackState) {
const state = this.getState();
const newState: State = { ...state, present: { ...state.present, playback } };
this.subject.next(newState);
}
} | random_line_split | |
views.py | from django.shortcuts import render_to_response
from django.template import RequestContext
from apps.members.models import Member
def show_all_current_members(request):
members = Member.objects.filter(is_renegade=False).order_by('function', 'started_nsi_date')
return render_to_response(
'show_all_current_members.html',
{'members': members},
context_instance=RequestContext(request)
)
def show_member(request, slug):
member = Member.objects.get(slug=slug)
participation_list = member.participation_set.all()
members = Member.objects.all()
return render_to_response(
'show_member.html',
{'member': member, 'participation_list': participation_list, 'members': members},
context_instance=RequestContext(request)
)
def | (request):
members = Member.objects.filter(is_renegade=True)
return render_to_response(
'show_all_former_members.html',
{'members': members},
context_instance=RequestContext(request)
)
| show_all_former_members | identifier_name |
views.py | from django.shortcuts import render_to_response
from django.template import RequestContext
from apps.members.models import Member
def show_all_current_members(request):
|
def show_member(request, slug):
member = Member.objects.get(slug=slug)
participation_list = member.participation_set.all()
members = Member.objects.all()
return render_to_response(
'show_member.html',
{'member': member, 'participation_list': participation_list, 'members': members},
context_instance=RequestContext(request)
)
def show_all_former_members(request):
members = Member.objects.filter(is_renegade=True)
return render_to_response(
'show_all_former_members.html',
{'members': members},
context_instance=RequestContext(request)
)
| members = Member.objects.filter(is_renegade=False).order_by('function', 'started_nsi_date')
return render_to_response(
'show_all_current_members.html',
{'members': members},
context_instance=RequestContext(request)
) | identifier_body |
views.py | from django.shortcuts import render_to_response
from django.template import RequestContext
from apps.members.models import Member
def show_all_current_members(request):
members = Member.objects.filter(is_renegade=False).order_by('function', 'started_nsi_date')
return render_to_response(
'show_all_current_members.html',
{'members': members},
context_instance=RequestContext(request)
)
def show_member(request, slug):
member = Member.objects.get(slug=slug)
participation_list = member.participation_set.all()
members = Member.objects.all()
return render_to_response(
'show_member.html',
{'member': member, 'participation_list': participation_list, 'members': members},
context_instance=RequestContext(request)
)
| def show_all_former_members(request):
members = Member.objects.filter(is_renegade=True)
return render_to_response(
'show_all_former_members.html',
{'members': members},
context_instance=RequestContext(request)
) | random_line_split | |
ruleFeedbackHistoryAPIs.ts | import { handleApiError, mainApiFetch, getRuleFeedbackHistoriesUrl, getRuleFeedbackHistoryUrl, getActivityStatsUrl } from '../../helpers/evidence/routingHelpers';
export const fetchRuleFeedbackHistories = async (key: string, activityId: string, selectedConjunction: string, startDate?: string, endDate?: string, turkSessionID?: string) => {
if (!selectedConjunction || !startDate) { return }
const url = getRuleFeedbackHistoriesUrl({ activityId, selectedConjunction, startDate, endDate, turkSessionID });
const response = await mainApiFetch(url);
const ruleFeedbackHistories = await response.json();
return {
error: handleApiError('Failed to fetch rule feedback histories, please refresh the page.', response),
ruleFeedbackHistories: ruleFeedbackHistories.rule_feedback_histories
};
}
export const fetchRuleFeedbackHistoriesByRule = async (key: string, ruleUID: string, promptId: string, startDate?: string, endDate?: string, turkSessionID?: string) => {
const url = getRuleFeedbackHistoryUrl({ ruleUID, promptId, startDate, endDate, turkSessionID });
const response = await mainApiFetch(url);
const ruleFeedbackHistories = await response.json();
return {
error: handleApiError('Failed to fetch rule feedback histories, please refresh the page.', response),
responses: ruleFeedbackHistories[ruleUID].responses
};
}
export const fetchPromptHealth = async (key: string, activityId: string, startDate?: string, endDate?: string, turkSessionID?: string) => { | error: handleApiError('Failed to fetch rule feedback histories, please refresh the page.', response),
prompts: promptHealth
};
} | const url = getActivityStatsUrl({ activityId, startDate, endDate, turkSessionID });
const response = await mainApiFetch(url);
const promptHealth = await response.json();
return { | random_line_split |
ruleFeedbackHistoryAPIs.ts | import { handleApiError, mainApiFetch, getRuleFeedbackHistoriesUrl, getRuleFeedbackHistoryUrl, getActivityStatsUrl } from '../../helpers/evidence/routingHelpers';
export const fetchRuleFeedbackHistories = async (key: string, activityId: string, selectedConjunction: string, startDate?: string, endDate?: string, turkSessionID?: string) => {
if (!selectedConjunction || !startDate) |
const url = getRuleFeedbackHistoriesUrl({ activityId, selectedConjunction, startDate, endDate, turkSessionID });
const response = await mainApiFetch(url);
const ruleFeedbackHistories = await response.json();
return {
error: handleApiError('Failed to fetch rule feedback histories, please refresh the page.', response),
ruleFeedbackHistories: ruleFeedbackHistories.rule_feedback_histories
};
}
export const fetchRuleFeedbackHistoriesByRule = async (key: string, ruleUID: string, promptId: string, startDate?: string, endDate?: string, turkSessionID?: string) => {
const url = getRuleFeedbackHistoryUrl({ ruleUID, promptId, startDate, endDate, turkSessionID });
const response = await mainApiFetch(url);
const ruleFeedbackHistories = await response.json();
return {
error: handleApiError('Failed to fetch rule feedback histories, please refresh the page.', response),
responses: ruleFeedbackHistories[ruleUID].responses
};
}
export const fetchPromptHealth = async (key: string, activityId: string, startDate?: string, endDate?: string, turkSessionID?: string) => {
const url = getActivityStatsUrl({ activityId, startDate, endDate, turkSessionID });
const response = await mainApiFetch(url);
const promptHealth = await response.json();
return {
error: handleApiError('Failed to fetch rule feedback histories, please refresh the page.', response),
prompts: promptHealth
};
}
| { return } | conditional_block |
instr_rsqrtps.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn rsqrtps_1() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 214], OperandSize::Dword)
}
#[test]
fn rsqrtps_2() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledDisplaced(EAX, Two, 1571527684, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 60, 69, 4, 156, 171, 93], OperandSize::Dword)
}
#[test]
fn rsqrtps_3() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 196], OperandSize::Qword)
}
#[test]
fn rsqrtps_4() | {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RDX, Four, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 28, 146], OperandSize::Qword)
} | identifier_body | |
instr_rsqrtps.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*; | use ::RegScale::*;
use ::test::run_test;
#[test]
fn rsqrtps_1() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 214], OperandSize::Dword)
}
#[test]
fn rsqrtps_2() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledDisplaced(EAX, Two, 1571527684, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 60, 69, 4, 156, 171, 93], OperandSize::Dword)
}
#[test]
fn rsqrtps_3() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 196], OperandSize::Qword)
}
#[test]
fn rsqrtps_4() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RDX, Four, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 28, 146], OperandSize::Qword)
} | use ::Operand::*;
use ::Reg::*; | random_line_split |
instr_rsqrtps.rs | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn rsqrtps_1() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 214], OperandSize::Dword)
}
#[test]
fn rsqrtps_2() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledDisplaced(EAX, Two, 1571527684, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 60, 69, 4, 156, 171, 93], OperandSize::Dword)
}
#[test]
fn | () {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 196], OperandSize::Qword)
}
#[test]
fn rsqrtps_4() {
run_test(&Instruction { mnemonic: Mnemonic::RSQRTPS, operand1: Some(Direct(XMM3)), operand2: Some(IndirectScaledIndexed(RDX, RDX, Four, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[15, 82, 28, 146], OperandSize::Qword)
}
| rsqrtps_3 | identifier_name |
EIDSS.BvMessages.en-US.js | var EIDSS = {
BvMessages: {
'bntHideSearch': 'Hide Search',
'bntShowSearch': 'Show Search',
'btnClear': 'Clear the field contents',
'btnHideErrDetail': 'Hide Details',
'btnSelect': 'Select',
'btnShowErrDetail': 'Show Details',
'btnView': 'View',
'strSave_Id': 'Save',
'tooltipSave_Id': 'Save',
'strClose_Id': 'Close',
'strRefresh_Id': 'Refresh',
'tooltipRefresh_Id': 'Refresh',
'strCreate_Id': 'New',
'tooltipCreate_Id': 'New',
'strEdit_Id': 'Edit',
'tooltipEdit_Id': 'Edit',
'Confirmation': 'Confirmation',
'Delete Record': 'Delete Record',
'ErrAuthentication': 'The request requires user authentication.',
'ErrDatabase': 'Error during database operation.',
'errDatabaseNotFound': 'Cannot open database \'{0}\' on server \'{1}\'. Check the correctness of database name.',
'ErrDataValidation': 'Some field contains invalid data.',
'ErrEmptyUserLogin': 'User login can\'t be empty',
'ErrFieldSampleIDNotFound': 'Sample is not found.',
'ErrFillDataset': 'Error during retrieving data from database.',
'ErrFilterValidatioError': 'Filter criteria value for [{0}] field can\'t be empty.',
'errGeneralNetworkError': 'Can\'t establish connection to the SQL Server. Please check that network connection is established and try to open this form again.',
'ErrIncorrectDatabaseVersion': 'The database version is absent or in incorrect format. Please upgrade your database to latest database version.',
'errInvailidSiteID': 'Invalid Site ID or Serial Number',
'errInvailidSiteType': 'Invalid Site Type or Serial Number',
'ErrInvalidFieldFormat': 'Invalid data format for field \'{0}\'.',
'ErrInvalidLogin': 'Cannot connect to SQL server. The database user name or password is not correct.',
'ErrInvalidParameter': 'Invalid value passed to the sql command parameter.',
'errInvalidSearchCriteria': 'Invalid search criteria.',
'ErrLocalFieldSampleIDNotFound': 'Local/field sample ID is not found in the grid.',
'ErrLoginIsLocked': 'You have exceeded the number of incorrect login attempts. Please try again in {0} minutes.',
'ErrLowClientVersion': 'The application version doesn\'t correspond to database version. Please install the latest application version.',
'ErrLowDatabaseVersion': 'The application requires the newest database version. Please upgrade your database to latest database version.',
'ErrMandatoryFieldRequired': 'The field \'{0}\' is mandatory. You must enter data in this field before saving the form.',
'errNoFreeLocation': 'There is no free destination location',
'ErrOldPassword': 'Old (current) password incorrect for user. The password was not changed.',
'Error': 'Error',
'ErrPasswordExpired': 'Your password is expired. Please change your password.',
'ErrPasswordPolicy': 'Unable to update the password. The value provided for the new password does not meet the length, complexity, or history requirement.',
'ErrPost': 'Error during saving data in database.',
'errSampleInTransfer': 'Sample "{0}" is already included in transfer "{1}"',
'errSQLLoginError': 'Cannot connect to SQL server. Check the correctness of SQL connection parameters in the SQL Server tab or SQL server accessibility.',
'ErrSqlQuery': 'Error during executing sql query.',
'errSqlServerDoesntExist': 'Can\'t connect to the SQLServer. Please check that network connection is established, SQL Server is not shut down and try to open this form again.',
'errSqlServerNotFound': 'Cannot connect to SQL server \'{0}\'. Check the correctness of SQL server name or SQL server accessibility.',
'ErrStoredProcedure': 'Error during executing database stored procedure.',
'ErrUndefinedStdError': 'Some error occurs in the application. Please send information about this error to software development team.',
'errUnknownError': 'Some error occured in application',
'ErrUnprocessedError': 'Some error occurs in the application. Please send information about this error to software development team.',
'ErrUserNotFound': 'Combination of user/password you entered is not correct.',
'ErrWebTemporarilyUnavailableFunction': 'ErrWebTemporarilyUnavailableFunction',
'Message': 'Message',
'msgCancel': 'All entered data will be lost. Continue?',
'msgCantDeleteRecord': 'The record can not be deleted.',
'msgClearControl': 'Press Ctrl-Del to clear value.',
'msgConfimation': 'Confirmation',
'msgConfirmClearFlexForm': 'Clear the panel content?',
'msgConfirmClearLookup': 'Clear the content?',
'msgDeletePrompt': 'The object will be deleted. Delete object?',
'msgDeleteRecordPrompt': 'The record(s) will be deleted. Delete?',
'msgCancelPrompt': 'Do you want to cancel all the changes and close the form?',
'msgSavePrompt': 'Do you want to save changes?',
'msgUnsavedRecordsPrompt': 'You have some unsaved records. Do you want to save changes before applying a new search (unsaved changes will be undone)?',
'msgOKPrompt': 'Do you want to save changes and close the form?',
'msgEIDSSCopyright': 'Copyright © 2005-2014 Black && Veatch Special Projects Corp.',
'msgEIDSSRunning': 'You can\'t run multiple EIDSS instances simultaneously. Other instance of EIDSS is running already',
'msgEmptyLogin': 'Login is not defined',
'msgMessage': 'Message',
'msgNoDeletePermission': 'You have no rights to delete this object',
'msgNoFreeSpace': 'No free space on location.',
'msgNoInsertPermission': 'You have no rights to create this object',
'msgNoRecordsFound': 'No records is found for current search criteria.',
'msgNoSelectPermission': 'You have no rights to view this form',
'msgParameterAlreadyExists': 'Field Already Exists',
'msgPasswordChanged': 'Your password has been successfully changed',
'msgPasswordNotTheSame': 'New and Confirmed passwords must match',
'msgReasonEmpty': 'Input reason for change',
'msgReplicationPrompt': 'Start the replication to transfer data on other sites?',
'msgREplicationPromptCaption': 'Confirm Replication',
'msgWaitFormCaption': 'Please wait',
'msgFormLoading': 'The form is loading',
'msgWrongDiagnosis': 'The changed diagnosis ({0}) should differ from the initial diagnosis ({1}).',
'Save data?': 'Save data?',
'Warning': 'Warning message', | 'SecurityLog_EIDSS_finished_successfully': 'EIDSS finished successfully',
'SecurityLog_EIDSS_started_abnormaly': 'EIDSS started abnormaly',
'SecurityLog_EIDSS_started_successfully': 'EIDSS started successfully',
'strCancel_Id': 'Cancel',
'strChangeDiagnosisReason_msgId': 'Reason is required.',
'strDelete_Id': 'Delete',
'strOK_Id': 'OK',
'tooltipCancel_Id': 'Cancel',
'tooltipClose_Id': 'Close',
'tooltipDelete_Id': 'Delete',
'tooltipOK_Id': 'OK',
'titleAccessionDetails': 'Accession Details',
'titleAntibiotic': 'Antibiotic',
'titleContactInformation': 'Person Details and Contact Information',
'titleDiagnosisChange': 'Diagnosis Change',
'titleDuplicates': 'Duplicates',
'titleEmployeeDetails': 'Employee Details',
'titleEmployeeList': 'Employees List',
'titleGeoLocation': 'Geographic Location',
'titleHumanCaseList': 'Human Cases List',
'titleOrganizationList': 'Organizations List',
'titleOutbreakList': 'Outbreaks List',
'titlePersonsList': 'Persons List',
'titleFarmList': 'titleFarmList',
'titleTestResultChange': 'Amend Test Result',
'titleAccessionInComment': 'Accession In Comment',
'titleSampleDetails': 'Sample Details',
'titleSummaryInfo': 'Summary Info',
'titleOutbreakNote': 'Note',
'errLoginMandatoryFields': 'All fields are mandatory.',
'msgAddToPreferencesPrompt': 'Selected records will be added to preferences.',
'msgRemoveFromPreferencesPrompt': 'Selected records will be removed from preferences.',
'strAdd_Id': 'Add',
'strRemove_Id': 'Remove',
'titleResultSummary': 'Results Summary and Interpretation',
'titleVeterinaryCaseList': 'Veterinary Cases List',
'titleVsSessionList': 'Vector Surveillance Sessions List',
'titlePensideTest': 'Penside Test',
'titleVetCaseLog': 'Action Required',
'titleASSessionList': 'Active Surveillance Sessions List',
'titleTestResultDetails': 'Test Result Details',
'ErrObjectCantBeDeleted': 'Object can\'t be deleted.',
'titleVaccination': 'Vaccination',
'msgAsSessionNoCaseCreated': 'There are no positive samples.',
'strYes_Id': 'Yes',
'strNo_Id': 'No',
'titleClinicalSigns': 'Clinical Signs',
'LastName': 'Last',
'FirstName': 'First',
'MiddleName': 'Middle',
'AsCampaign_GetSessionRemovalConfirmation': 'Do you really want to remove the link to the selected Session?',
'titleSelectFarm': 'Farms List',
'strInfo': 'Info',
'strSearchPanelMandatoryFields_msgId': 'Please fill all mandatory fields of Search Panel',
'menuCreateAliquot': 'Create Aliquot',
'menuCreateDerivative': 'Create Derivative',
'titleCreateAliquot': 'Create Aliquots',
'titleCreateDerivative': 'Create Derivatives',
'menuTransferOutSample': 'Transfer Out',
'titleTransferOutSample': 'Transfer Out',
'menuAccessionInPoorCondition': 'Accepted in poor condition',
'menuAccessionInRejected': 'Rejected',
'menuAmendTestResult': 'Amend Test Result',
'menuAssignTest': 'Assign Test',
'titleAnimals': 'Animals',
'titleCreateSample': 'Register a new sample',
'titleGroupAccessionIn': 'Group Accession In',
'tabTitleSampleTest': 'Sample/Test Details',
'noEmployeeSelectedErrorMessage': 'Please select an employee',
'Species': 'Species',
'titleClinicalInvestigation': 'Species Epidemiological and Clinical Investigation',
'titleAddDisease': 'Disease and Species',
'titleListDetectedDiseases': 'List of Detected Diseases',
'titleDetectedDisease': 'titleDetectedDisease',
'Active Surveillance Session': 'Active Surveillance Session',
'titleAction': 'Action',
'titleHumanAggregateCasesList': 'Human Aggregate Cases List',
'titleVetAggregateCasesList': 'Veterinary Aggregate Cases List',
'titleVetAggregateActionsList': 'Veterinary Aggregate Actions List',
'titleCopyVector': 'Copy Vector',
'titleAddParameter': 'Add Parameter',
'msgTooBigRecordsCount': 'Number of returned records is too big. Not all records are shown on the form. Please change search criteria and try again',
'titleCopySample': 'Copy',
'titleAnimalSampleInfo': 'Animal/Sample Info',
'titleAnimalsSamplesInfo': 'Animals/Samples Info',
'titleDiagnosisHistory': 'Diagnosis History',
'strMap': 'Map',
'ErrAllMandatoryFieldsRequired': 'You must enter data in all mandatory fields.',
'msgTooManyDiagnosis': 'You have selected too many diagnoses. Only first 0 will be displayed in the report.'
}
} | random_line_split | |
package.py | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyLrudict(PythonPackage):
""" A fast LRU cache"""
homepage = "https://github.com/amitdev/lru-dict"
url = "https://pypi.io/packages/source/l/lru-dict/lru-dict-1.1.6.tar.gz" | depends_on('python@2.7:')
depends_on('py-setuptools', type=('build')) |
version('1.1.6', 'b33f54f1257ab541f4df4bacc7509f5a')
| random_line_split |
package.py | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class | (PythonPackage):
""" A fast LRU cache"""
homepage = "https://github.com/amitdev/lru-dict"
url = "https://pypi.io/packages/source/l/lru-dict/lru-dict-1.1.6.tar.gz"
version('1.1.6', 'b33f54f1257ab541f4df4bacc7509f5a')
depends_on('python@2.7:')
depends_on('py-setuptools', type=('build'))
| PyLrudict | identifier_name |
package.py | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyLrudict(PythonPackage):
| """ A fast LRU cache"""
homepage = "https://github.com/amitdev/lru-dict"
url = "https://pypi.io/packages/source/l/lru-dict/lru-dict-1.1.6.tar.gz"
version('1.1.6', 'b33f54f1257ab541f4df4bacc7509f5a')
depends_on('python@2.7:')
depends_on('py-setuptools', type=('build')) | identifier_body | |
15.2.3.7-5-b-84.js | // Copyright (c) 2012 Ecma International. All rights reserved.
// Ecma International makes this code available under the terms and conditions set
// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
// "Use Terms"). Any redistribution of this code must retain the above
// copyright and this notice and otherwise comply with the Use Terms.
/*---
es5id: 15.2.3.7-5-b-84
description: >
Object.defineProperties - 'descObj' is the global object which
implements its own [[Get]] method to get 'configurable' property
(8.10.5 step 4.a)
includes:
- runTestCase.js
- fnGlobalObject.js
---*/
function | () {
var obj = {};
try {
fnGlobalObject().configurable = true;
Object.defineProperties(obj, {
prop: fnGlobalObject()
});
var result1 = obj.hasOwnProperty("prop");
delete obj.prop;
var result2 = obj.hasOwnProperty("prop");
return result1 === true && result2 === false;
} finally {
delete fnGlobalObject().configurable;
}
}
runTestCase(testcase);
| testcase | identifier_name |
15.2.3.7-5-b-84.js | // Copyright (c) 2012 Ecma International. All rights reserved.
// Ecma International makes this code available under the terms and conditions set
// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
// "Use Terms"). Any redistribution of this code must retain the above
// copyright and this notice and otherwise comply with the Use Terms.
/*---
es5id: 15.2.3.7-5-b-84
description: >
Object.defineProperties - 'descObj' is the global object which
implements its own [[Get]] method to get 'configurable' property
(8.10.5 step 4.a)
includes:
- runTestCase.js
- fnGlobalObject.js
---*/
function testcase() |
runTestCase(testcase);
| {
var obj = {};
try {
fnGlobalObject().configurable = true;
Object.defineProperties(obj, {
prop: fnGlobalObject()
});
var result1 = obj.hasOwnProperty("prop");
delete obj.prop;
var result2 = obj.hasOwnProperty("prop");
return result1 === true && result2 === false;
} finally {
delete fnGlobalObject().configurable;
}
} | identifier_body |
15.2.3.7-5-b-84.js | // Copyright (c) 2012 Ecma International. All rights reserved.
// Ecma International makes this code available under the terms and conditions set
// forth on http://hg.ecmascript.org/tests/test262/raw-file/tip/LICENSE (the
// "Use Terms"). Any redistribution of this code must retain the above
// copyright and this notice and otherwise comply with the Use Terms.
/*---
es5id: 15.2.3.7-5-b-84
description: >
Object.defineProperties - 'descObj' is the global object which
implements its own [[Get]] method to get 'configurable' property
(8.10.5 step 4.a)
includes: |
var obj = {};
try {
fnGlobalObject().configurable = true;
Object.defineProperties(obj, {
prop: fnGlobalObject()
});
var result1 = obj.hasOwnProperty("prop");
delete obj.prop;
var result2 = obj.hasOwnProperty("prop");
return result1 === true && result2 === false;
} finally {
delete fnGlobalObject().configurable;
}
}
runTestCase(testcase); | - runTestCase.js
- fnGlobalObject.js
---*/
function testcase() { | random_line_split |
setup_win32.py | #!/usr/bin/env python
import glob
import os
import site
from cx_Freeze import setup, Executable
import meld.build_helpers
import meld.conf
site_dir = site.getsitepackages()[1]
include_dll_path = os.path.join(site_dir, "gnome")
missing_dll = [
'libgtk-3-0.dll',
'libgdk-3-0.dll',
'libatk-1.0-0.dll',
'libintl-8.dll',
'libzzz.dll',
'libwinpthread-1.dll',
'libcairo-gobject-2.dll',
'libgdk_pixbuf-2.0-0.dll',
'libpango-1.0-0.dll',
'libpangocairo-1.0-0.dll',
'libpangoft2-1.0-0.dll',
'libpangowin32-1.0-0.dll',
'libffi-6.dll',
'libfontconfig-1.dll',
'libfreetype-6.dll',
'libgio-2.0-0.dll',
'libglib-2.0-0.dll',
'libgmodule-2.0-0.dll',
'libgobject-2.0-0.dll',
'libgirepository-1.0-1.dll',
'libgtksourceview-3.0-1.dll',
'libjasper-1.dll',
'libjpeg-8.dll',
'libpng16-16.dll',
'libgnutls-26.dll',
'libxmlxpat.dll',
'librsvg-2-2.dll',
'libharfbuzz-gobject-0.dll',
'libwebp-5.dll',
]
gtk_libs = [
'etc/fonts',
'etc/gtk-3.0/settings.ini',
'etc/pango',
'lib/gdk-pixbuf-2.0',
'lib/girepository-1.0',
'share/fontconfig',
'share/fonts',
'share/glib-2.0',
'share/gtksourceview-3.0',
'share/icons',
]
include_files = [(os.path.join(include_dll_path, path), path) for path in
missing_dll + gtk_libs]
build_exe_options = {
"compressed": False,
"icon": "data/icons/meld.ico",
"includes": ["gi"],
"packages": ["gi", "weakref"],
"include_files": include_files,
}
# Create our registry key, and fill with install directory and exe
registry_table = [
('MeldKLM', 2, 'SOFTWARE\Meld', '*', None, 'TARGETDIR'),
('MeldInstallDir', 2, 'SOFTWARE\Meld', 'InstallDir', '[TARGETDIR]', 'TARGETDIR'),
('MeldExecutable', 2, 'SOFTWARE\Meld', 'Executable', '[TARGETDIR]Meld.exe', 'TARGETDIR'),
]
# Provide the locator and app search to give MSI the existing install directory
# for future upgrades
reg_locator_table = [
('MeldInstallDirLocate', 2, 'SOFTWARE\Meld', 'InstallDir', 0)
]
app_search_table = [('TARGETDIR', 'MeldInstallDirLocate')]
msi_data = {
'Registry': registry_table,
'RegLocator': reg_locator_table,
'AppSearch': app_search_table
}
bdist_msi_options = {
"upgrade_code": "{1d303789-b4e2-4d6e-9515-c301e155cd50}",
"data": msi_data,
}
| setup(
name="Meld",
version=meld.conf.__version__,
description='Visual diff and merge tool',
author='The Meld project',
author_email='meld-list@gnome.org',
maintainer='Kai Willadsen',
url='http://meldmerge.org',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications :: GTK',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Programming Language :: Python',
'Topic :: Desktop Environment :: Gnome',
'Topic :: Software Development',
'Topic :: Software Development :: Version Control',
],
options = {
"build_exe": build_exe_options,
"bdist_msi": bdist_msi_options,
},
executables = [
Executable(
"bin/meld",
base="Win32GUI",
targetName="Meld.exe",
shortcutName="Meld",
shortcutDir="ProgramMenuFolder",
),
],
packages=[
'meld',
'meld.ui',
'meld.util',
'meld.vc',
],
package_data={
'meld': ['README', 'COPYING', 'NEWS']
},
scripts=['bin/meld'],
data_files=[
('share/man/man1',
['meld.1']
),
('share/doc/meld-' + meld.conf.__version__,
['COPYING', 'NEWS']
),
('share/meld',
['data/meld.css', 'data/meld-dark.css']
),
('share/meld/icons',
glob.glob("data/icons/*.png") +
glob.glob("data/icons/COPYING*")
),
('share/meld/ui',
glob.glob("data/ui/*.ui") + glob.glob("data/ui/*.xml")
),
],
cmdclass={
"build_i18n": meld.build_helpers.build_i18n,
"build_help": meld.build_helpers.build_help,
"build_icons": meld.build_helpers.build_icons,
"build_data": meld.build_helpers.build_data,
}
) | random_line_split | |
utils.spec.ts | import * as utils from '../utils';
import '@testing-library/jest-dom/extend-expect';
describe('isCoveredByReact', () => {
it('should identify standard events as covered by React', () => {
expect(utils.isCoveredByReact('click')).toEqual(true);
});
it('should identify custom events as not covered by React', () => {
expect(utils.isCoveredByReact('change')).toEqual(true);
expect(utils.isCoveredByReact('ionchange')).toEqual(false);
});
});
describe('syncEvent', () => {
it('should add event on sync and readd on additional syncs', () => {
var div = document.createElement('div');
const addEventListener = jest.spyOn(div, 'addEventListener');
const removeEventListener = jest.spyOn(div, 'removeEventListener');
const ionClickCallback = jest.fn(); | utils.syncEvent(div, 'ionClick', ionClickCallback);
expect(removeEventListener).not.toHaveBeenCalled();
expect(addEventListener).toHaveBeenCalledWith('ionClick', expect.any(Function));
utils.syncEvent(div, 'ionClick', ionClickCallback);
expect(removeEventListener).toHaveBeenCalledWith('ionClick', expect.any(Function));
expect(addEventListener).toHaveBeenCalledWith('ionClick', expect.any(Function));
const event = new CustomEvent('ionClick', { detail: 'test' });
div.dispatchEvent(event);
expect(ionClickCallback).toHaveBeenCalled();
});
});
describe('attachProps', () => {
it('should pass props to a dom node', () => {
const onIonClickCallback = () => {};
var div = document.createElement('div');
utils.attachProps(div, {
children: [],
style: 'color: red',
ref: () => {},
onClick: () => {},
onIonClick: onIonClickCallback,
testprop: ['red'],
});
expect((div as any).testprop).toEqual(['red']);
expect(div).toHaveStyle(`display: block;`);
expect(Object.keys((div as any).__events)).toEqual(['ionClick']);
});
}); | random_line_split | |
cc_salt_minion.py | # vi: ts=4 expandtab
#
# Copyright (C) 2014 Amazon.com, Inc. or its affiliates.
#
# Author: Jeff Bauer <jbauer@rubic.com>
# Author: Andrew Jorgensen <ajorgens@amazon.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit import util
# Note: see http://saltstack.org/topics/installation/
def | (name, cfg, cloud, log, _args):
# If there isn't a salt key in the configuration don't do anything
if 'salt_minion' not in cfg:
log.debug(("Skipping module named %s,"
" no 'salt_minion' key in configuration"), name)
return
salt_cfg = cfg['salt_minion']
# Start by installing the salt package ...
cloud.distro.install_packages(('salt-minion',))
# Ensure we can configure files at the right dir
config_dir = salt_cfg.get("config_dir", '/etc/salt')
util.ensure_dir(config_dir)
# ... and then update the salt configuration
if 'conf' in salt_cfg:
# Add all sections from the conf object to /etc/salt/minion
minion_config = os.path.join(config_dir, 'minion')
minion_data = util.yaml_dumps(salt_cfg.get('conf'))
util.write_file(minion_config, minion_data)
# ... copy the key pair if specified
if 'public_key' in salt_cfg and 'private_key' in salt_cfg:
pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki')
with util.umask(077):
util.ensure_dir(pki_dir)
pub_name = os.path.join(pki_dir, 'minion.pub')
pem_name = os.path.join(pki_dir, 'minion.pem')
util.write_file(pub_name, salt_cfg['public_key'])
util.write_file(pem_name, salt_cfg['private_key'])
# start / restart salt-minion. if it was started, it needs to be restarted
# for config change.
if cloud.distro.service_running('salt-minion'):
cloud.distro.service_control('salt-minion', 'restart', capture=False)
else:
cloud.distro.service_control('salt-minion', 'start', capture=False)
| handle | identifier_name |
cc_salt_minion.py | # vi: ts=4 expandtab
#
# Copyright (C) 2014 Amazon.com, Inc. or its affiliates.
#
# Author: Jeff Bauer <jbauer@rubic.com>
# Author: Andrew Jorgensen <ajorgens@amazon.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit import util
# Note: see http://saltstack.org/topics/installation/
def handle(name, cfg, cloud, log, _args):
# If there isn't a salt key in the configuration don't do anything
if 'salt_minion' not in cfg:
log.debug(("Skipping module named %s,"
" no 'salt_minion' key in configuration"), name)
return
salt_cfg = cfg['salt_minion']
# Start by installing the salt package ...
cloud.distro.install_packages(('salt-minion',))
# Ensure we can configure files at the right dir
config_dir = salt_cfg.get("config_dir", '/etc/salt') | minion_config = os.path.join(config_dir, 'minion')
minion_data = util.yaml_dumps(salt_cfg.get('conf'))
util.write_file(minion_config, minion_data)
# ... copy the key pair if specified
if 'public_key' in salt_cfg and 'private_key' in salt_cfg:
pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki')
with util.umask(077):
util.ensure_dir(pki_dir)
pub_name = os.path.join(pki_dir, 'minion.pub')
pem_name = os.path.join(pki_dir, 'minion.pem')
util.write_file(pub_name, salt_cfg['public_key'])
util.write_file(pem_name, salt_cfg['private_key'])
# start / restart salt-minion. if it was started, it needs to be restarted
# for config change.
if cloud.distro.service_running('salt-minion'):
cloud.distro.service_control('salt-minion', 'restart', capture=False)
else:
cloud.distro.service_control('salt-minion', 'start', capture=False) | util.ensure_dir(config_dir)
# ... and then update the salt configuration
if 'conf' in salt_cfg:
# Add all sections from the conf object to /etc/salt/minion | random_line_split |
cc_salt_minion.py | # vi: ts=4 expandtab
#
# Copyright (C) 2014 Amazon.com, Inc. or its affiliates.
#
# Author: Jeff Bauer <jbauer@rubic.com>
# Author: Andrew Jorgensen <ajorgens@amazon.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit import util
# Note: see http://saltstack.org/topics/installation/
def handle(name, cfg, cloud, log, _args):
# If there isn't a salt key in the configuration don't do anything
if 'salt_minion' not in cfg:
|
salt_cfg = cfg['salt_minion']
# Start by installing the salt package ...
cloud.distro.install_packages(('salt-minion',))
# Ensure we can configure files at the right dir
config_dir = salt_cfg.get("config_dir", '/etc/salt')
util.ensure_dir(config_dir)
# ... and then update the salt configuration
if 'conf' in salt_cfg:
# Add all sections from the conf object to /etc/salt/minion
minion_config = os.path.join(config_dir, 'minion')
minion_data = util.yaml_dumps(salt_cfg.get('conf'))
util.write_file(minion_config, minion_data)
# ... copy the key pair if specified
if 'public_key' in salt_cfg and 'private_key' in salt_cfg:
pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki')
with util.umask(077):
util.ensure_dir(pki_dir)
pub_name = os.path.join(pki_dir, 'minion.pub')
pem_name = os.path.join(pki_dir, 'minion.pem')
util.write_file(pub_name, salt_cfg['public_key'])
util.write_file(pem_name, salt_cfg['private_key'])
# start / restart salt-minion. if it was started, it needs to be restarted
# for config change.
if cloud.distro.service_running('salt-minion'):
cloud.distro.service_control('salt-minion', 'restart', capture=False)
else:
cloud.distro.service_control('salt-minion', 'start', capture=False)
| log.debug(("Skipping module named %s,"
" no 'salt_minion' key in configuration"), name)
return | conditional_block |
cc_salt_minion.py | # vi: ts=4 expandtab
#
# Copyright (C) 2014 Amazon.com, Inc. or its affiliates.
#
# Author: Jeff Bauer <jbauer@rubic.com>
# Author: Andrew Jorgensen <ajorgens@amazon.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3, as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from cloudinit import util
# Note: see http://saltstack.org/topics/installation/
def handle(name, cfg, cloud, log, _args):
# If there isn't a salt key in the configuration don't do anything
| if 'salt_minion' not in cfg:
log.debug(("Skipping module named %s,"
" no 'salt_minion' key in configuration"), name)
return
salt_cfg = cfg['salt_minion']
# Start by installing the salt package ...
cloud.distro.install_packages(('salt-minion',))
# Ensure we can configure files at the right dir
config_dir = salt_cfg.get("config_dir", '/etc/salt')
util.ensure_dir(config_dir)
# ... and then update the salt configuration
if 'conf' in salt_cfg:
# Add all sections from the conf object to /etc/salt/minion
minion_config = os.path.join(config_dir, 'minion')
minion_data = util.yaml_dumps(salt_cfg.get('conf'))
util.write_file(minion_config, minion_data)
# ... copy the key pair if specified
if 'public_key' in salt_cfg and 'private_key' in salt_cfg:
pki_dir = salt_cfg.get('pki_dir', '/etc/salt/pki')
with util.umask(077):
util.ensure_dir(pki_dir)
pub_name = os.path.join(pki_dir, 'minion.pub')
pem_name = os.path.join(pki_dir, 'minion.pem')
util.write_file(pub_name, salt_cfg['public_key'])
util.write_file(pem_name, salt_cfg['private_key'])
# start / restart salt-minion. if it was started, it needs to be restarted
# for config change.
if cloud.distro.service_running('salt-minion'):
cloud.distro.service_control('salt-minion', 'restart', capture=False)
else:
cloud.distro.service_control('salt-minion', 'start', capture=False) | identifier_body | |
preferencesEditorInput.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { OS } from 'vs/base/common/platform';
import { URI } from 'vs/base/common/uri';
import { ITextModelService } from 'vs/editor/common/services/resolverService';
import * as nls from 'vs/nls';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { EditorInput, SideBySideEditorInput, Verbosity } from 'vs/workbench/common/editor';
import { ResourceEditorInput } from 'vs/workbench/common/editor/resourceEditorInput';
import { IHashService } from 'vs/workbench/services/hash/common/hashService'; |
export class PreferencesEditorInput extends SideBySideEditorInput {
static readonly ID: string = 'workbench.editorinputs.preferencesEditorInput';
getTypeId(): string {
return PreferencesEditorInput.ID;
}
getTitle(verbosity: Verbosity): string {
return this.master.getTitle(verbosity);
}
}
export class DefaultPreferencesEditorInput extends ResourceEditorInput {
static readonly ID = 'workbench.editorinputs.defaultpreferences';
constructor(defaultSettingsResource: URI,
@ITextModelService textModelResolverService: ITextModelService,
@IHashService hashService: IHashService
) {
super(nls.localize('settingsEditorName', "Default Settings"), '', defaultSettingsResource, textModelResolverService, hashService);
}
getTypeId(): string {
return DefaultPreferencesEditorInput.ID;
}
matches(other: any): boolean {
if (other instanceof DefaultPreferencesEditorInput) {
return true;
}
if (!super.matches(other)) {
return false;
}
return true;
}
}
export class KeybindingsEditorInput extends EditorInput {
static readonly ID: string = 'workbench.input.keybindings';
readonly keybindingsModel: KeybindingsEditorModel;
constructor(@IInstantiationService instantiationService: IInstantiationService) {
super();
this.keybindingsModel = instantiationService.createInstance(KeybindingsEditorModel, OS);
}
getTypeId(): string {
return KeybindingsEditorInput.ID;
}
getName(): string {
return nls.localize('keybindingsInputName', "Keyboard Shortcuts");
}
resolve(): Promise<KeybindingsEditorModel> {
return Promise.resolve(this.keybindingsModel);
}
matches(otherInput: any): boolean {
return otherInput instanceof KeybindingsEditorInput;
}
}
export class SettingsEditor2Input extends EditorInput {
static readonly ID: string = 'workbench.input.settings2';
private readonly _settingsModel: Settings2EditorModel;
private resource: URI = URI.from({
scheme: 'vscode-settings',
path: `settingseditor`
});
constructor(
@IPreferencesService _preferencesService: IPreferencesService,
) {
super();
this._settingsModel = _preferencesService.createSettings2EditorModel();
}
matches(otherInput: any): boolean {
return otherInput instanceof SettingsEditor2Input;
}
getTypeId(): string {
return SettingsEditor2Input.ID;
}
getName(): string {
return nls.localize('settingsEditor2InputName', "Settings");
}
resolve(): Promise<Settings2EditorModel> {
return Promise.resolve(this._settingsModel);
}
getResource(): URI {
return this.resource;
}
} | import { KeybindingsEditorModel } from 'vs/workbench/services/preferences/common/keybindingsEditorModel';
import { IPreferencesService } from 'vs/workbench/services/preferences/common/preferences';
import { Settings2EditorModel } from 'vs/workbench/services/preferences/common/preferencesModels'; | random_line_split |
preferencesEditorInput.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { OS } from 'vs/base/common/platform';
import { URI } from 'vs/base/common/uri';
import { ITextModelService } from 'vs/editor/common/services/resolverService';
import * as nls from 'vs/nls';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { EditorInput, SideBySideEditorInput, Verbosity } from 'vs/workbench/common/editor';
import { ResourceEditorInput } from 'vs/workbench/common/editor/resourceEditorInput';
import { IHashService } from 'vs/workbench/services/hash/common/hashService';
import { KeybindingsEditorModel } from 'vs/workbench/services/preferences/common/keybindingsEditorModel';
import { IPreferencesService } from 'vs/workbench/services/preferences/common/preferences';
import { Settings2EditorModel } from 'vs/workbench/services/preferences/common/preferencesModels';
export class PreferencesEditorInput extends SideBySideEditorInput {
static readonly ID: string = 'workbench.editorinputs.preferencesEditorInput';
getTypeId(): string {
return PreferencesEditorInput.ID;
}
getTitle(verbosity: Verbosity): string {
return this.master.getTitle(verbosity);
}
}
export class DefaultPreferencesEditorInput extends ResourceEditorInput {
static readonly ID = 'workbench.editorinputs.defaultpreferences';
constructor(defaultSettingsResource: URI,
@ITextModelService textModelResolverService: ITextModelService,
@IHashService hashService: IHashService
) {
super(nls.localize('settingsEditorName', "Default Settings"), '', defaultSettingsResource, textModelResolverService, hashService);
}
getTypeId(): string {
return DefaultPreferencesEditorInput.ID;
}
matches(other: any): boolean {
if (other instanceof DefaultPreferencesEditorInput) |
if (!super.matches(other)) {
return false;
}
return true;
}
}
export class KeybindingsEditorInput extends EditorInput {
static readonly ID: string = 'workbench.input.keybindings';
readonly keybindingsModel: KeybindingsEditorModel;
constructor(@IInstantiationService instantiationService: IInstantiationService) {
super();
this.keybindingsModel = instantiationService.createInstance(KeybindingsEditorModel, OS);
}
getTypeId(): string {
return KeybindingsEditorInput.ID;
}
getName(): string {
return nls.localize('keybindingsInputName', "Keyboard Shortcuts");
}
resolve(): Promise<KeybindingsEditorModel> {
return Promise.resolve(this.keybindingsModel);
}
matches(otherInput: any): boolean {
return otherInput instanceof KeybindingsEditorInput;
}
}
export class SettingsEditor2Input extends EditorInput {
static readonly ID: string = 'workbench.input.settings2';
private readonly _settingsModel: Settings2EditorModel;
private resource: URI = URI.from({
scheme: 'vscode-settings',
path: `settingseditor`
});
constructor(
@IPreferencesService _preferencesService: IPreferencesService,
) {
super();
this._settingsModel = _preferencesService.createSettings2EditorModel();
}
matches(otherInput: any): boolean {
return otherInput instanceof SettingsEditor2Input;
}
getTypeId(): string {
return SettingsEditor2Input.ID;
}
getName(): string {
return nls.localize('settingsEditor2InputName', "Settings");
}
resolve(): Promise<Settings2EditorModel> {
return Promise.resolve(this._settingsModel);
}
getResource(): URI {
return this.resource;
}
}
| {
return true;
} | conditional_block |
preferencesEditorInput.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { OS } from 'vs/base/common/platform';
import { URI } from 'vs/base/common/uri';
import { ITextModelService } from 'vs/editor/common/services/resolverService';
import * as nls from 'vs/nls';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { EditorInput, SideBySideEditorInput, Verbosity } from 'vs/workbench/common/editor';
import { ResourceEditorInput } from 'vs/workbench/common/editor/resourceEditorInput';
import { IHashService } from 'vs/workbench/services/hash/common/hashService';
import { KeybindingsEditorModel } from 'vs/workbench/services/preferences/common/keybindingsEditorModel';
import { IPreferencesService } from 'vs/workbench/services/preferences/common/preferences';
import { Settings2EditorModel } from 'vs/workbench/services/preferences/common/preferencesModels';
export class PreferencesEditorInput extends SideBySideEditorInput {
static readonly ID: string = 'workbench.editorinputs.preferencesEditorInput';
getTypeId(): string {
return PreferencesEditorInput.ID;
}
getTitle(verbosity: Verbosity): string {
return this.master.getTitle(verbosity);
}
}
export class DefaultPreferencesEditorInput extends ResourceEditorInput {
static readonly ID = 'workbench.editorinputs.defaultpreferences';
constructor(defaultSettingsResource: URI,
@ITextModelService textModelResolverService: ITextModelService,
@IHashService hashService: IHashService
) {
super(nls.localize('settingsEditorName', "Default Settings"), '', defaultSettingsResource, textModelResolverService, hashService);
}
getTypeId(): string {
return DefaultPreferencesEditorInput.ID;
}
matches(other: any): boolean {
if (other instanceof DefaultPreferencesEditorInput) {
return true;
}
if (!super.matches(other)) {
return false;
}
return true;
}
}
export class KeybindingsEditorInput extends EditorInput {
static readonly ID: string = 'workbench.input.keybindings';
readonly keybindingsModel: KeybindingsEditorModel;
constructor(@IInstantiationService instantiationService: IInstantiationService) {
super();
this.keybindingsModel = instantiationService.createInstance(KeybindingsEditorModel, OS);
}
getTypeId(): string {
return KeybindingsEditorInput.ID;
}
getName(): string {
return nls.localize('keybindingsInputName', "Keyboard Shortcuts");
}
resolve(): Promise<KeybindingsEditorModel> {
return Promise.resolve(this.keybindingsModel);
}
matches(otherInput: any): boolean {
return otherInput instanceof KeybindingsEditorInput;
}
}
export class SettingsEditor2Input extends EditorInput {
static readonly ID: string = 'workbench.input.settings2';
private readonly _settingsModel: Settings2EditorModel;
private resource: URI = URI.from({
scheme: 'vscode-settings',
path: `settingseditor`
});
constructor(
@IPreferencesService _preferencesService: IPreferencesService,
) {
super();
this._settingsModel = _preferencesService.createSettings2EditorModel();
}
matches(otherInput: any): boolean {
return otherInput instanceof SettingsEditor2Input;
}
| (): string {
return SettingsEditor2Input.ID;
}
getName(): string {
return nls.localize('settingsEditor2InputName', "Settings");
}
resolve(): Promise<Settings2EditorModel> {
return Promise.resolve(this._settingsModel);
}
getResource(): URI {
return this.resource;
}
}
| getTypeId | identifier_name |
test_contact_compare.py | __author__ = 'Keiran'
from model.contact import Contact
import pytest
def | (app, orm):
with pytest.allure.step('Given a sorted contact list from DB'):
contacts_from_db = orm.get_contact_list()
sorted_contacts_from_db = list(sorted(contacts_from_db, key=Contact.id_or_max))
with pytest.allure.step('Given a sorted contact list from home page'):
contacts_from_home_page = app.contact.get_contact_list()
sorted_contacts_from_home_page = list(sorted(contacts_from_home_page, key=Contact.id_or_max))
with pytest.allure.step('Then I compare this lists'):
for index in range(len(sorted_contacts_from_db)):
assert sorted_contacts_from_db[index] == sorted_contacts_from_home_page[index]
assert sorted_contacts_from_db[index].join_mails() == sorted_contacts_from_home_page[index].all_mails
assert sorted_contacts_from_db[index].join_phones() == sorted_contacts_from_home_page[index].all_phones
| test_contact_compare | identifier_name |
test_contact_compare.py | __author__ = 'Keiran'
from model.contact import Contact
import pytest
def test_contact_compare(app, orm):
with pytest.allure.step('Given a sorted contact list from DB'):
contacts_from_db = orm.get_contact_list()
sorted_contacts_from_db = list(sorted(contacts_from_db, key=Contact.id_or_max))
with pytest.allure.step('Given a sorted contact list from home page'):
contacts_from_home_page = app.contact.get_contact_list()
sorted_contacts_from_home_page = list(sorted(contacts_from_home_page, key=Contact.id_or_max))
with pytest.allure.step('Then I compare this lists'):
for index in range(len(sorted_contacts_from_db)):
| assert sorted_contacts_from_db[index] == sorted_contacts_from_home_page[index]
assert sorted_contacts_from_db[index].join_mails() == sorted_contacts_from_home_page[index].all_mails
assert sorted_contacts_from_db[index].join_phones() == sorted_contacts_from_home_page[index].all_phones | conditional_block | |
test_contact_compare.py | __author__ = 'Keiran'
from model.contact import Contact
import pytest
def test_contact_compare(app, orm):
with pytest.allure.step('Given a sorted contact list from DB'):
contacts_from_db = orm.get_contact_list()
sorted_contacts_from_db = list(sorted(contacts_from_db, key=Contact.id_or_max))
with pytest.allure.step('Given a sorted contact list from home page'):
contacts_from_home_page = app.contact.get_contact_list()
sorted_contacts_from_home_page = list(sorted(contacts_from_home_page, key=Contact.id_or_max))
with pytest.allure.step('Then I compare this lists'):
for index in range(len(sorted_contacts_from_db)): | assert sorted_contacts_from_db[index] == sorted_contacts_from_home_page[index]
assert sorted_contacts_from_db[index].join_mails() == sorted_contacts_from_home_page[index].all_mails
assert sorted_contacts_from_db[index].join_phones() == sorted_contacts_from_home_page[index].all_phones | random_line_split | |
test_contact_compare.py | __author__ = 'Keiran'
from model.contact import Contact
import pytest
def test_contact_compare(app, orm):
| with pytest.allure.step('Given a sorted contact list from DB'):
contacts_from_db = orm.get_contact_list()
sorted_contacts_from_db = list(sorted(contacts_from_db, key=Contact.id_or_max))
with pytest.allure.step('Given a sorted contact list from home page'):
contacts_from_home_page = app.contact.get_contact_list()
sorted_contacts_from_home_page = list(sorted(contacts_from_home_page, key=Contact.id_or_max))
with pytest.allure.step('Then I compare this lists'):
for index in range(len(sorted_contacts_from_db)):
assert sorted_contacts_from_db[index] == sorted_contacts_from_home_page[index]
assert sorted_contacts_from_db[index].join_mails() == sorted_contacts_from_home_page[index].all_mails
assert sorted_contacts_from_db[index].join_phones() == sorted_contacts_from_home_page[index].all_phones | identifier_body | |
target.rs | use std::str::FromStr;
use once_cell::sync::Lazy;
use regex::Regex;
#[derive(Debug)]
pub enum Target {
Amd64Linux,
Arm64Linux,
ArmLinux,
ArmV7Linux,
ArmV7LinuxHardFloat,
}
impl Target {
pub fn try_parse_env() -> Result<Target, <Self as FromStr>::Err> {
FromStr::from_str(env!("BUILD_TARGET"))
}
}
impl FromStr for Target {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
x if x.starts_with("x86_64-unknown-linux-") => Ok(Target::Amd64Linux),
x if x.starts_with("aarch64-unknown-linux-") => Ok(Target::Arm64Linux),
x if ARMV7_HARD_FLOAT.is_match(x) => Ok(Target::ArmV7LinuxHardFloat),
x if x.starts_with("armv7-unknown-linux") => Ok(Target::ArmV7Linux),
"arm-unknown-linux-musleabi" => Ok(Target::ArmLinux),
_ => Err(s.to_string()),
}
}
}
static ARMV7_HARD_FLOAT: Lazy<Regex> =
Lazy::new(|| Regex::new(r"armv7-unknown-linux.*hf").unwrap());
| #[test]
fn test_armv7_hard_float_matcher() {
assert!(ARMV7_HARD_FLOAT.is_match("armv7-unknown-linux-gnueabihf"));
assert!(ARMV7_HARD_FLOAT.is_match("armv7-unknown-linux-musleabihf"));
}
} | #[cfg(test)]
mod tests {
use super::*;
| random_line_split |
target.rs | use std::str::FromStr;
use once_cell::sync::Lazy;
use regex::Regex;
#[derive(Debug)]
pub enum Target {
Amd64Linux,
Arm64Linux,
ArmLinux,
ArmV7Linux,
ArmV7LinuxHardFloat,
}
impl Target {
pub fn try_parse_env() -> Result<Target, <Self as FromStr>::Err> {
FromStr::from_str(env!("BUILD_TARGET"))
}
}
impl FromStr for Target {
type Err = String;
fn | (s: &str) -> Result<Self, Self::Err> {
match s {
x if x.starts_with("x86_64-unknown-linux-") => Ok(Target::Amd64Linux),
x if x.starts_with("aarch64-unknown-linux-") => Ok(Target::Arm64Linux),
x if ARMV7_HARD_FLOAT.is_match(x) => Ok(Target::ArmV7LinuxHardFloat),
x if x.starts_with("armv7-unknown-linux") => Ok(Target::ArmV7Linux),
"arm-unknown-linux-musleabi" => Ok(Target::ArmLinux),
_ => Err(s.to_string()),
}
}
}
static ARMV7_HARD_FLOAT: Lazy<Regex> =
Lazy::new(|| Regex::new(r"armv7-unknown-linux.*hf").unwrap());
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_armv7_hard_float_matcher() {
assert!(ARMV7_HARD_FLOAT.is_match("armv7-unknown-linux-gnueabihf"));
assert!(ARMV7_HARD_FLOAT.is_match("armv7-unknown-linux-musleabihf"));
}
}
| from_str | identifier_name |
target.rs | use std::str::FromStr;
use once_cell::sync::Lazy;
use regex::Regex;
#[derive(Debug)]
pub enum Target {
Amd64Linux,
Arm64Linux,
ArmLinux,
ArmV7Linux,
ArmV7LinuxHardFloat,
}
impl Target {
pub fn try_parse_env() -> Result<Target, <Self as FromStr>::Err> {
FromStr::from_str(env!("BUILD_TARGET"))
}
}
impl FromStr for Target {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> |
}
static ARMV7_HARD_FLOAT: Lazy<Regex> =
Lazy::new(|| Regex::new(r"armv7-unknown-linux.*hf").unwrap());
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_armv7_hard_float_matcher() {
assert!(ARMV7_HARD_FLOAT.is_match("armv7-unknown-linux-gnueabihf"));
assert!(ARMV7_HARD_FLOAT.is_match("armv7-unknown-linux-musleabihf"));
}
}
| {
match s {
x if x.starts_with("x86_64-unknown-linux-") => Ok(Target::Amd64Linux),
x if x.starts_with("aarch64-unknown-linux-") => Ok(Target::Arm64Linux),
x if ARMV7_HARD_FLOAT.is_match(x) => Ok(Target::ArmV7LinuxHardFloat),
x if x.starts_with("armv7-unknown-linux") => Ok(Target::ArmV7Linux),
"arm-unknown-linux-musleabi" => Ok(Target::ArmLinux),
_ => Err(s.to_string()),
}
} | identifier_body |
test_fragments.py | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import numpy as np
from numpy.testing import (
assert_equal,
)
import pytest
import MDAnalysis as mda
from MDAnalysis.core.topologyattrs import Bonds
from MDAnalysis.core import groups
from MDAnalysis import NoDataError
from MDAnalysisTests import make_Universe
from MDAnalysisTests.datafiles import TPR, XTC
# Also used in topology/test_guessers
def make_starshape():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((1 + base, 3 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((4 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
def case1():
return make_starshape()
def case2():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((2 + base, 3 + base))
bonds.append((3 + base, 4 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((0 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
class TestFragments(object):
r"""Use 125 atom test Universe
5 segments of 5 residues of 5 atoms
Case1
-----
Star shapes to try and test the branching prediction
o | o | o
| | | | |
o-o-o-|-o-o-o-|-o-o-o
| | | | |
o | o |x3 o
Case2
-----
4-ring pendants to test cyclic conditions
o------o------o
| | |
o o o
/ \ / \ / \
o o o o o o
\ / \ / \ /
o o o
Test ring molecules?
"""
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_total_frags(self, u):
fragments = u.atoms.fragments
fragindices = u.atoms.fragindices
# should be 5 fragments of 25 atoms
assert len(fragments) == 5
for frag in fragments:
assert len(frag) == 25
# number of fragindices must correspond to number of atoms:
assert len(fragindices) == len(u.atoms)
# number of unique fragindices must correspond to number of fragments:
assert len(np.unique(fragindices)) == len(fragments)
# check fragindices dtype:
assert fragindices.dtype == np.intp
#check n_fragments
assert u.atoms.n_fragments == len(fragments)
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_external_ordering(self, u):
# check fragments and fragindices are sorted correctly:
for i, frag in enumerate(u.atoms.fragments):
assert frag[0].index == i * 25
assert np.unique(frag.fragindices)[0] == i
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_internal_ordering(self, u):
# check atoms are sorted within fragments and have the same fragindex:
for i, frag in enumerate(u.atoms.fragments):
assert_equal(frag.ix, np.arange(25) + i * 25)
assert len(np.unique(frag.fragindices)) == 1
assert frag.n_fragments == 1
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atom_access(self, u):
# check atom can access fragment and fragindex:
for at in (u.atoms[0], u.atoms[76], u.atoms[111]):
frag = at.fragment
assert isinstance(frag, groups.AtomGroup)
assert len(frag) == 25
assert at in frag
fragindex = at.fragindex
assert isinstance(fragindex, int)
with pytest.raises(AttributeError):
x = at.n_fragments
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atomgroup_access(self, u):
# check atomgroup can access fragments
# first 60 atoms have 3 fragments, given as tuple
# each fragment should still be 25 atoms
ag = u.atoms[:60]
frags = ag.fragments
assert len(frags) == 3
assert isinstance(frags, tuple)
for frag in frags:
assert len(frag) == 25
# same for fragindices:
fragindices = ag.fragindices
assert len(fragindices) == 60
assert len(np.unique(fragindices)) == 3
assert ag.n_fragments == 3
def test_empty_atomgroup_access(self):
ag = mda.AtomGroup([], case1()) | assert ag.n_fragments == 0
def test_atomgroup_fragments_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
ag = u.atoms[:10]
with pytest.raises(NoDataError):
getattr(ag, 'fragments')
with pytest.raises(NoDataError):
getattr(ag, 'fragindices')
with pytest.raises(NoDataError):
getattr(ag, 'n_fragments')
def test_atom_fragment_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragment')
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragindex')
def test_atomgroup_fragment_cache_invalidation_bond_making(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.add_bonds((fgs[0][-1] + fgs[1][0],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) > len(u.atoms.fragments) # recomputed
def test_atomgroup_fragment_cache_invalidation_bond_breaking(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.delete_bonds((u.atoms.bonds[3],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) < len(u.atoms.fragments) # recomputed
def test_tpr_fragments():
ag = mda.Universe(TPR, XTC).atoms
frags = ag.fragments
fragindices = ag.fragindices
assert len(frags[0]) == 3341
assert len(fragindices) == len(ag)
assert len(np.unique(fragindices)) == len(frags)
assert ag.n_fragments == len(frags) | assert ag.fragments == tuple()
assert_equal(ag.fragindices, np.array([], dtype=np.int64)) | random_line_split |
test_fragments.py | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import numpy as np
from numpy.testing import (
assert_equal,
)
import pytest
import MDAnalysis as mda
from MDAnalysis.core.topologyattrs import Bonds
from MDAnalysis.core import groups
from MDAnalysis import NoDataError
from MDAnalysisTests import make_Universe
from MDAnalysisTests.datafiles import TPR, XTC
# Also used in topology/test_guessers
def make_starshape():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((1 + base, 3 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
|
u.add_TopologyAttr(Bonds(bonds))
return u
def case1():
return make_starshape()
def case2():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((2 + base, 3 + base))
bonds.append((3 + base, 4 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((0 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
class TestFragments(object):
r"""Use 125 atom test Universe
5 segments of 5 residues of 5 atoms
Case1
-----
Star shapes to try and test the branching prediction
o | o | o
| | | | |
o-o-o-|-o-o-o-|-o-o-o
| | | | |
o | o |x3 o
Case2
-----
4-ring pendants to test cyclic conditions
o------o------o
| | |
o o o
/ \ / \ / \
o o o o o o
\ / \ / \ /
o o o
Test ring molecules?
"""
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_total_frags(self, u):
fragments = u.atoms.fragments
fragindices = u.atoms.fragindices
# should be 5 fragments of 25 atoms
assert len(fragments) == 5
for frag in fragments:
assert len(frag) == 25
# number of fragindices must correspond to number of atoms:
assert len(fragindices) == len(u.atoms)
# number of unique fragindices must correspond to number of fragments:
assert len(np.unique(fragindices)) == len(fragments)
# check fragindices dtype:
assert fragindices.dtype == np.intp
#check n_fragments
assert u.atoms.n_fragments == len(fragments)
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_external_ordering(self, u):
# check fragments and fragindices are sorted correctly:
for i, frag in enumerate(u.atoms.fragments):
assert frag[0].index == i * 25
assert np.unique(frag.fragindices)[0] == i
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_internal_ordering(self, u):
# check atoms are sorted within fragments and have the same fragindex:
for i, frag in enumerate(u.atoms.fragments):
assert_equal(frag.ix, np.arange(25) + i * 25)
assert len(np.unique(frag.fragindices)) == 1
assert frag.n_fragments == 1
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atom_access(self, u):
# check atom can access fragment and fragindex:
for at in (u.atoms[0], u.atoms[76], u.atoms[111]):
frag = at.fragment
assert isinstance(frag, groups.AtomGroup)
assert len(frag) == 25
assert at in frag
fragindex = at.fragindex
assert isinstance(fragindex, int)
with pytest.raises(AttributeError):
x = at.n_fragments
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atomgroup_access(self, u):
# check atomgroup can access fragments
# first 60 atoms have 3 fragments, given as tuple
# each fragment should still be 25 atoms
ag = u.atoms[:60]
frags = ag.fragments
assert len(frags) == 3
assert isinstance(frags, tuple)
for frag in frags:
assert len(frag) == 25
# same for fragindices:
fragindices = ag.fragindices
assert len(fragindices) == 60
assert len(np.unique(fragindices)) == 3
assert ag.n_fragments == 3
def test_empty_atomgroup_access(self):
ag = mda.AtomGroup([], case1())
assert ag.fragments == tuple()
assert_equal(ag.fragindices, np.array([], dtype=np.int64))
assert ag.n_fragments == 0
def test_atomgroup_fragments_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
ag = u.atoms[:10]
with pytest.raises(NoDataError):
getattr(ag, 'fragments')
with pytest.raises(NoDataError):
getattr(ag, 'fragindices')
with pytest.raises(NoDataError):
getattr(ag, 'n_fragments')
def test_atom_fragment_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragment')
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragindex')
def test_atomgroup_fragment_cache_invalidation_bond_making(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.add_bonds((fgs[0][-1] + fgs[1][0],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) > len(u.atoms.fragments) # recomputed
def test_atomgroup_fragment_cache_invalidation_bond_breaking(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.delete_bonds((u.atoms.bonds[3],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) < len(u.atoms.fragments) # recomputed
def test_tpr_fragments():
ag = mda.Universe(TPR, XTC).atoms
frags = ag.fragments
fragindices = ag.fragindices
assert len(frags[0]) == 3341
assert len(fragindices) == len(ag)
assert len(np.unique(fragindices)) == len(frags)
assert ag.n_fragments == len(frags)
| bonds.append((4 + base, 5 + base)) | conditional_block |
test_fragments.py | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import numpy as np
from numpy.testing import (
assert_equal,
)
import pytest
import MDAnalysis as mda
from MDAnalysis.core.topologyattrs import Bonds
from MDAnalysis.core import groups
from MDAnalysis import NoDataError
from MDAnalysisTests import make_Universe
from MDAnalysisTests.datafiles import TPR, XTC
# Also used in topology/test_guessers
def make_starshape():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((1 + base, 3 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((4 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
def case1():
return make_starshape()
def case2():
|
class TestFragments(object):
r"""Use 125 atom test Universe
5 segments of 5 residues of 5 atoms
Case1
-----
Star shapes to try and test the branching prediction
o | o | o
| | | | |
o-o-o-|-o-o-o-|-o-o-o
| | | | |
o | o |x3 o
Case2
-----
4-ring pendants to test cyclic conditions
o------o------o
| | |
o o o
/ \ / \ / \
o o o o o o
\ / \ / \ /
o o o
Test ring molecules?
"""
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_total_frags(self, u):
fragments = u.atoms.fragments
fragindices = u.atoms.fragindices
# should be 5 fragments of 25 atoms
assert len(fragments) == 5
for frag in fragments:
assert len(frag) == 25
# number of fragindices must correspond to number of atoms:
assert len(fragindices) == len(u.atoms)
# number of unique fragindices must correspond to number of fragments:
assert len(np.unique(fragindices)) == len(fragments)
# check fragindices dtype:
assert fragindices.dtype == np.intp
#check n_fragments
assert u.atoms.n_fragments == len(fragments)
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_external_ordering(self, u):
# check fragments and fragindices are sorted correctly:
for i, frag in enumerate(u.atoms.fragments):
assert frag[0].index == i * 25
assert np.unique(frag.fragindices)[0] == i
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_internal_ordering(self, u):
# check atoms are sorted within fragments and have the same fragindex:
for i, frag in enumerate(u.atoms.fragments):
assert_equal(frag.ix, np.arange(25) + i * 25)
assert len(np.unique(frag.fragindices)) == 1
assert frag.n_fragments == 1
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atom_access(self, u):
# check atom can access fragment and fragindex:
for at in (u.atoms[0], u.atoms[76], u.atoms[111]):
frag = at.fragment
assert isinstance(frag, groups.AtomGroup)
assert len(frag) == 25
assert at in frag
fragindex = at.fragindex
assert isinstance(fragindex, int)
with pytest.raises(AttributeError):
x = at.n_fragments
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atomgroup_access(self, u):
# check atomgroup can access fragments
# first 60 atoms have 3 fragments, given as tuple
# each fragment should still be 25 atoms
ag = u.atoms[:60]
frags = ag.fragments
assert len(frags) == 3
assert isinstance(frags, tuple)
for frag in frags:
assert len(frag) == 25
# same for fragindices:
fragindices = ag.fragindices
assert len(fragindices) == 60
assert len(np.unique(fragindices)) == 3
assert ag.n_fragments == 3
def test_empty_atomgroup_access(self):
ag = mda.AtomGroup([], case1())
assert ag.fragments == tuple()
assert_equal(ag.fragindices, np.array([], dtype=np.int64))
assert ag.n_fragments == 0
def test_atomgroup_fragments_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
ag = u.atoms[:10]
with pytest.raises(NoDataError):
getattr(ag, 'fragments')
with pytest.raises(NoDataError):
getattr(ag, 'fragindices')
with pytest.raises(NoDataError):
getattr(ag, 'n_fragments')
def test_atom_fragment_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragment')
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragindex')
def test_atomgroup_fragment_cache_invalidation_bond_making(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.add_bonds((fgs[0][-1] + fgs[1][0],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) > len(u.atoms.fragments) # recomputed
def test_atomgroup_fragment_cache_invalidation_bond_breaking(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.delete_bonds((u.atoms.bonds[3],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) < len(u.atoms.fragments) # recomputed
def test_tpr_fragments():
ag = mda.Universe(TPR, XTC).atoms
frags = ag.fragments
fragindices = ag.fragindices
assert len(frags[0]) == 3341
assert len(fragindices) == len(ag)
assert len(np.unique(fragindices)) == len(frags)
assert ag.n_fragments == len(frags)
| u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((2 + base, 3 + base))
bonds.append((3 + base, 4 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((0 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u | identifier_body |
test_fragments.py | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 fileencoding=utf-8
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
import numpy as np
from numpy.testing import (
assert_equal,
)
import pytest
import MDAnalysis as mda
from MDAnalysis.core.topologyattrs import Bonds
from MDAnalysis.core import groups
from MDAnalysis import NoDataError
from MDAnalysisTests import make_Universe
from MDAnalysisTests.datafiles import TPR, XTC
# Also used in topology/test_guessers
def make_starshape():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((1 + base, 3 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((4 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
def case1():
return make_starshape()
def case2():
u = make_Universe()
bonds = []
for seg in range(5):
segbase = seg * 25
for res in range(5):
# offset for atoms in this res
base = segbase + 5 * res
bonds.append((0 + base, 1 + base))
bonds.append((1 + base, 2 + base))
bonds.append((2 + base, 3 + base))
bonds.append((3 + base, 4 + base))
bonds.append((1 + base, 4 + base))
if not res == 4: # last res doesn't link onwards
bonds.append((0 + base, 5 + base))
u.add_TopologyAttr(Bonds(bonds))
return u
class TestFragments(object):
r"""Use 125 atom test Universe
5 segments of 5 residues of 5 atoms
Case1
-----
Star shapes to try and test the branching prediction
o | o | o
| | | | |
o-o-o-|-o-o-o-|-o-o-o
| | | | |
o | o |x3 o
Case2
-----
4-ring pendants to test cyclic conditions
o------o------o
| | |
o o o
/ \ / \ / \
o o o o o o
\ / \ / \ /
o o o
Test ring molecules?
"""
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_total_frags(self, u):
fragments = u.atoms.fragments
fragindices = u.atoms.fragindices
# should be 5 fragments of 25 atoms
assert len(fragments) == 5
for frag in fragments:
assert len(frag) == 25
# number of fragindices must correspond to number of atoms:
assert len(fragindices) == len(u.atoms)
# number of unique fragindices must correspond to number of fragments:
assert len(np.unique(fragindices)) == len(fragments)
# check fragindices dtype:
assert fragindices.dtype == np.intp
#check n_fragments
assert u.atoms.n_fragments == len(fragments)
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_external_ordering(self, u):
# check fragments and fragindices are sorted correctly:
for i, frag in enumerate(u.atoms.fragments):
assert frag[0].index == i * 25
assert np.unique(frag.fragindices)[0] == i
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_frag_internal_ordering(self, u):
# check atoms are sorted within fragments and have the same fragindex:
for i, frag in enumerate(u.atoms.fragments):
assert_equal(frag.ix, np.arange(25) + i * 25)
assert len(np.unique(frag.fragindices)) == 1
assert frag.n_fragments == 1
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def | (self, u):
# check atom can access fragment and fragindex:
for at in (u.atoms[0], u.atoms[76], u.atoms[111]):
frag = at.fragment
assert isinstance(frag, groups.AtomGroup)
assert len(frag) == 25
assert at in frag
fragindex = at.fragindex
assert isinstance(fragindex, int)
with pytest.raises(AttributeError):
x = at.n_fragments
@pytest.mark.parametrize('u', (
case1(),
case2()
))
def test_atomgroup_access(self, u):
# check atomgroup can access fragments
# first 60 atoms have 3 fragments, given as tuple
# each fragment should still be 25 atoms
ag = u.atoms[:60]
frags = ag.fragments
assert len(frags) == 3
assert isinstance(frags, tuple)
for frag in frags:
assert len(frag) == 25
# same for fragindices:
fragindices = ag.fragindices
assert len(fragindices) == 60
assert len(np.unique(fragindices)) == 3
assert ag.n_fragments == 3
def test_empty_atomgroup_access(self):
ag = mda.AtomGroup([], case1())
assert ag.fragments == tuple()
assert_equal(ag.fragindices, np.array([], dtype=np.int64))
assert ag.n_fragments == 0
def test_atomgroup_fragments_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
ag = u.atoms[:10]
with pytest.raises(NoDataError):
getattr(ag, 'fragments')
with pytest.raises(NoDataError):
getattr(ag, 'fragindices')
with pytest.raises(NoDataError):
getattr(ag, 'n_fragments')
def test_atom_fragment_nobonds_NDE(self):
# should raise NDE
u = make_Universe()
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragment')
with pytest.raises(NoDataError):
getattr(u.atoms[10], 'fragindex')
def test_atomgroup_fragment_cache_invalidation_bond_making(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.add_bonds((fgs[0][-1] + fgs[1][0],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) > len(u.atoms.fragments) # recomputed
def test_atomgroup_fragment_cache_invalidation_bond_breaking(self):
u = case1()
fgs = u.atoms.fragments
assert fgs is u.atoms._cache['fragments']
assert u.atoms._cache_key in u._cache['_valid']['fragments']
u.delete_bonds((u.atoms.bonds[3],)) # should trigger invalidation
assert 'fragments' not in u._cache['_valid']
assert len(fgs) < len(u.atoms.fragments) # recomputed
def test_tpr_fragments():
ag = mda.Universe(TPR, XTC).atoms
frags = ag.fragments
fragindices = ag.fragindices
assert len(frags[0]) == 3341
assert len(fragindices) == len(ag)
assert len(np.unique(fragindices)) == len(frags)
assert ag.n_fragments == len(frags)
| test_atom_access | identifier_name |
note.py | # -*- coding: utf-8 -*-
import json
from io import StringIO
from gsxws import escalations
from django import template
from django.contrib import messages
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.forms.models import modelformset_factory
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import permission_required
from django.core.files.base import ContentFile
from reportlab.lib.units import mm
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.barcode import createBarcodeDrawing
from servo.lib.utils import paginate
from servo.models import (Order, Template, Tag, Customer, Note,
Attachment, Escalation, Article,)
from servo.forms import NoteForm, NoteSearchForm, EscalationForm
class BarcodeDrawing(Drawing):
"""Pretty generic Reportlab drawing to render barcodes."""
def __init__(self, text_value, *args, **kwargs):
barcode = createBarcodeDrawing("Code128",
value=text_value.encode("utf-8"),
barHeight=10 * mm,
width=80 * mm)
Drawing.__init__(self, barcode.width, barcode.height, *args, **kwargs)
self.add(barcode, name="barcode")
def | (request, text):
"""Return text as a barcode."""
if request.GET.get('f') == 'svg':
import barcode
output = StringIO.StringIO()
code = barcode.Code39(text, add_checksum=False)
code.write(output)
contents = output.getvalue()
output.close()
return HttpResponse(contents, content_type="image/svg+xml")
d = BarcodeDrawing(text)
return HttpResponse(d.asString("png"), content_type="image/png")
def prep_list_view(request, kind):
"""Prepare the view for listing notes/messages."""
data = {'title': _("Messages")}
all_notes = Note.objects.all().order_by("-created_at")
if kind == "articles":
all_notes = Article.objects.all().order_by('-date_created')
if kind == "inbox":
all_notes = all_notes.filter(order=None).order_by("is_read", "-created_at")
if kind == "sent":
all_notes = all_notes.filter(created_by=request.user)
if kind == "flagged":
all_notes = all_notes.filter(is_flagged=True)
if kind == "escalations":
all_notes = Note.objects.all().exclude(escalation=None)
page = request.GET.get("page")
notes = paginate(all_notes, page, 20)
data['kind'] = kind
data['notes'] = notes
data['search_hint'] = "notes"
data['inbox_count'] = Note.objects.filter(order=None).count()
return data
@permission_required('servo.change_note')
def copy(request, pk):
"""Copy a note with its attachments and labels."""
note = get_object_or_404(Note, pk=pk)
new_note = Note(created_by=request.user)
new_note.body = note.body
new_note.order = note.order
new_note.subject = note.subject
new_note.save()
new_note.labels = note.labels.all()
for a in note.attachments.all(): # also copy the attachments
a.pk = None
a.content_object = new_note
a.save()
new_note.attachments.add(a)
return redirect(edit, pk=new_note.pk, order_id=note.order_id)
@permission_required('servo.change_note')
def edit(request, pk=None, order_id=None,
parent=None, recipient=None, customer=None):
"""
Edit a note.
@FIXME: Should split this up into smaller pieces
"""
to = []
order = None
command = _('Save')
note = Note(order_id=order_id)
excluded_emails = note.get_excluded_emails()
if recipient is not None:
to.append(recipient)
command = _('Send')
if order_id is not None:
order = get_object_or_404(Order, pk=order_id)
if order.user and (order.user != request.user):
note.is_read = False
if order.user.email not in excluded_emails:
to.append(order.user.email)
if order.customer is not None:
customer = order.customer_id
if customer is not None:
customer = get_object_or_404(Customer, pk=customer)
note.customer = customer
if order_id is None:
to.append(customer.email)
tpl = template.Template(note.subject)
note.subject = tpl.render(template.Context({'note': note}))
note.recipient = ', '.join(to)
note.created_by = request.user
note.sender = note.get_default_sender()
fields = escalations.CONTEXTS
try:
note.escalation = Escalation(created_by=request.user)
except Exception as e:
messages.error(request, e)
return redirect(request.META['HTTP_REFERER'])
AttachmentFormset = modelformset_factory(Attachment,
fields=('content',),
can_delete=True,
extra=3,
exclude=[])
formset = AttachmentFormset(queryset=Attachment.objects.none())
if pk is not None:
note = get_object_or_404(Note, pk=pk)
formset = AttachmentFormset(queryset=note.attachments.all())
if parent is not None:
parent = get_object_or_404(Note, pk=parent)
note.parent = parent
note.body = parent.quote()
if parent.subject:
note.subject = _(u'Re: %s') % parent.clean_subject()
if parent.sender not in excluded_emails:
note.recipient = parent.sender
if parent.order:
order = parent.order
note.order = parent.order
note.customer = parent.customer
note.escalation = parent.escalation
note.is_reported = parent.is_reported
title = note.subject
form = NoteForm(instance=note)
if note.escalation:
contexts = json.loads(note.escalation.contexts)
escalation_form = EscalationForm(prefix='escalation',
instance=note.escalation)
if request.method == "POST":
escalation_form = EscalationForm(request.POST,
prefix='escalation',
instance=note.escalation)
if escalation_form.is_valid():
note.escalation = escalation_form.save()
form = NoteForm(request.POST, instance=note)
if form.is_valid():
note = form.save()
formset = AttachmentFormset(request.POST, request.FILES)
if formset.is_valid():
files = formset.save(commit=False)
for f in files:
f.content_object = note
try:
f.save()
except ValueError as e:
messages.error(request, e)
return redirect(note)
note.attachments.add(*files)
if form.cleaned_data.get('attach_confirmation'):
from servo.views.order import put_on_paper
response = put_on_paper(request, note.order_id, fmt='pdf')
filename = response.filename
content = response.render().content
content = ContentFile(content, filename)
attachment = Attachment(content=content, content_object=note)
attachment.save()
attachment.content.save(filename, content)
note.attachments.add(attachment)
note.save()
try:
msg = note.send_and_save(request.user)
messages.success(request, msg)
except ValueError as e:
messages.error(request, e)
return redirect(note)
return render(request, "notes/form.html", locals())
def delete_note(request, pk):
"""
Deletes a note
"""
note = get_object_or_404(Note, pk=pk)
if request.method == 'POST':
note.delete()
messages.success(request, _("Note deleted"))
if request.session.get('return_to'):
url = request.session.get('return_to')
del(request.session['return_to'])
elif note.order_id:
url = note.order.get_absolute_url()
return redirect(url)
return render(request, 'notes/remove.html', {'note': note})
@csrf_exempt
def render_template(request, order_id=None):
"""
Renders the template with this title with the current
Service Order as the context
"""
title = request.POST.get('title')
tpl = get_object_or_404(Template, title=title)
content = tpl.content
if order_id:
order = get_object_or_404(Order, pk=order_id)
content = tpl.render(order)
return HttpResponse(content)
def templates(request, template_id=None):
if template_id:
tpl = get_object_or_404(Template, pk=template_id)
content = tpl.content
if request.session.get('current_order_id'):
tpl = template.Template(content)
order = Order.objects.get(pk=request.session['current_order_id'])
content = tpl.render(template.Context({'order': order}))
return HttpResponse(content)
templates = Template.objects.all()
return render(request, 'notes/templates.html', {'templates': templates})
def toggle_flag(request, kind, pk, flag):
"""
Toggles a flag of a note (read/unread, flagged/not, reported/not)
"""
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
if flag == 'flagged':
note.toggle_flagged(request.user)
return HttpResponse(note.get_flagged_title(request.user))
if flag == 'read':
note.toggle_read(request.user)
return HttpResponse(note.get_read_title(request.user))
field = 'is_%s' % flag
note = get_object_or_404(Note, pk=pk)
attr = getattr(note, field)
setattr(note, field, not attr)
note.save()
return HttpResponse(getattr(note, 'get_%s_title' % flag)())
def toggle_tag(request, pk, tag_id):
note = get_object_or_404(Note, pk=pk)
tag = get_object_or_404(Tag, pk=tag_id)
if tag in note.labels.all():
note.labels.remove(tag)
else:
note.labels.add(tag)
if note.order:
return redirect(note.order)
return HttpResponse(_('OK'))
def list_notes(request, kind="inbox"):
data = prep_list_view(request, kind)
request.session['return_to'] = request.path
return render(request, "notes/list_notes.html", data)
def view_note(request, kind, pk):
data = prep_list_view(request, kind)
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
data['read_title'] = note.get_read_title(request.user)
data['flagged_title'] = note.get_flagged_title(request.user)
else:
note = get_object_or_404(Note, pk=pk)
data['title'] = note.get_title()
data['note'] = note
if kind == 'escalations':
return render(request, "notes/view_escalation.html", data)
if kind == 'articles':
return render(request, "notes/view_article.html", data)
return render(request, "notes/view_note.html", data)
def find(request):
"""
Notes advanced search
"""
form = NoteSearchForm(request.GET)
results = Note.objects.none()
if request.GET and form.is_valid():
fdata = form.cleaned_data
results = Note.objects.all()
if fdata.get('body'):
results = results.filter(body__icontains=fdata['body'])
if fdata.get('recipient'):
results = results.filter(recipient__icontains=fdata['recipient'])
if fdata.get('sender'):
results = results.filter(sender__icontains=fdata['sender'])
if fdata.get('order_code'):
results = results.filter(order__code__icontains=fdata['order_code'])
results = results.order_by('-created_at')
title = _('Message search')
notes = paginate(results, request.GET.get('page'), 10)
return render(request, "notes/find.html", locals())
def edit_escalation(request):
pass
def create_escalation(request):
esc = Escalation()
form = EscalationForm()
title = _('Edit Escalation')
if request.method == 'POST':
data = request.POST.copy()
data['created_by'] = request.user
form = EscalationForm(data, request.FILES, instance=esc)
if form.is_valid():
note = form.save()
#esc.submit(request.user)
return redirect(view_note, 'escalations', note.pk)
return render(request, 'notes/edit_escalation.html', locals())
def list_messages(request, pk):
note = get_object_or_404(Note, pk=pk)
messages = note.message_set.all()
return render(request, "notes/messages.html", locals())
| show_barcode | identifier_name |
note.py | # -*- coding: utf-8 -*-
import json
from io import StringIO
from gsxws import escalations
from django import template
from django.contrib import messages
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.forms.models import modelformset_factory
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import permission_required
from django.core.files.base import ContentFile
from reportlab.lib.units import mm
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.barcode import createBarcodeDrawing
from servo.lib.utils import paginate
from servo.models import (Order, Template, Tag, Customer, Note,
Attachment, Escalation, Article,)
from servo.forms import NoteForm, NoteSearchForm, EscalationForm
class BarcodeDrawing(Drawing):
"""Pretty generic Reportlab drawing to render barcodes."""
def __init__(self, text_value, *args, **kwargs):
barcode = createBarcodeDrawing("Code128",
value=text_value.encode("utf-8"),
barHeight=10 * mm,
width=80 * mm)
Drawing.__init__(self, barcode.width, barcode.height, *args, **kwargs)
self.add(barcode, name="barcode")
def show_barcode(request, text):
"""Return text as a barcode."""
if request.GET.get('f') == 'svg':
import barcode
output = StringIO.StringIO()
code = barcode.Code39(text, add_checksum=False)
code.write(output)
contents = output.getvalue()
output.close()
return HttpResponse(contents, content_type="image/svg+xml")
d = BarcodeDrawing(text)
return HttpResponse(d.asString("png"), content_type="image/png")
def prep_list_view(request, kind):
"""Prepare the view for listing notes/messages."""
data = {'title': _("Messages")}
all_notes = Note.objects.all().order_by("-created_at")
if kind == "articles":
all_notes = Article.objects.all().order_by('-date_created')
if kind == "inbox":
all_notes = all_notes.filter(order=None).order_by("is_read", "-created_at")
if kind == "sent":
all_notes = all_notes.filter(created_by=request.user)
if kind == "flagged":
all_notes = all_notes.filter(is_flagged=True)
if kind == "escalations":
all_notes = Note.objects.all().exclude(escalation=None)
page = request.GET.get("page")
notes = paginate(all_notes, page, 20)
data['kind'] = kind
data['notes'] = notes
data['search_hint'] = "notes"
data['inbox_count'] = Note.objects.filter(order=None).count()
return data
@permission_required('servo.change_note')
def copy(request, pk):
"""Copy a note with its attachments and labels."""
note = get_object_or_404(Note, pk=pk)
new_note = Note(created_by=request.user)
new_note.body = note.body
new_note.order = note.order
new_note.subject = note.subject
new_note.save()
new_note.labels = note.labels.all()
for a in note.attachments.all(): # also copy the attachments
a.pk = None
a.content_object = new_note
a.save()
new_note.attachments.add(a)
return redirect(edit, pk=new_note.pk, order_id=note.order_id)
@permission_required('servo.change_note')
def edit(request, pk=None, order_id=None,
parent=None, recipient=None, customer=None):
"""
Edit a note.
@FIXME: Should split this up into smaller pieces
"""
to = []
order = None
command = _('Save')
note = Note(order_id=order_id)
excluded_emails = note.get_excluded_emails()
if recipient is not None:
to.append(recipient)
command = _('Send')
if order_id is not None:
order = get_object_or_404(Order, pk=order_id)
if order.user and (order.user != request.user):
note.is_read = False
if order.user.email not in excluded_emails:
to.append(order.user.email)
if order.customer is not None:
customer = order.customer_id
if customer is not None:
customer = get_object_or_404(Customer, pk=customer)
note.customer = customer
if order_id is None:
to.append(customer.email)
tpl = template.Template(note.subject)
note.subject = tpl.render(template.Context({'note': note}))
note.recipient = ', '.join(to)
note.created_by = request.user
note.sender = note.get_default_sender()
fields = escalations.CONTEXTS
try:
note.escalation = Escalation(created_by=request.user)
except Exception as e:
messages.error(request, e)
return redirect(request.META['HTTP_REFERER'])
AttachmentFormset = modelformset_factory(Attachment,
fields=('content',),
can_delete=True,
extra=3,
exclude=[])
formset = AttachmentFormset(queryset=Attachment.objects.none())
if pk is not None:
note = get_object_or_404(Note, pk=pk)
formset = AttachmentFormset(queryset=note.attachments.all())
if parent is not None:
parent = get_object_or_404(Note, pk=parent)
note.parent = parent
note.body = parent.quote()
if parent.subject:
note.subject = _(u'Re: %s') % parent.clean_subject()
if parent.sender not in excluded_emails:
note.recipient = parent.sender
if parent.order:
order = parent.order
note.order = parent.order
note.customer = parent.customer
note.escalation = parent.escalation
note.is_reported = parent.is_reported
title = note.subject
form = NoteForm(instance=note)
if note.escalation:
contexts = json.loads(note.escalation.contexts)
escalation_form = EscalationForm(prefix='escalation',
instance=note.escalation)
if request.method == "POST":
escalation_form = EscalationForm(request.POST,
prefix='escalation',
instance=note.escalation)
if escalation_form.is_valid():
note.escalation = escalation_form.save()
form = NoteForm(request.POST, instance=note)
if form.is_valid():
note = form.save()
formset = AttachmentFormset(request.POST, request.FILES)
if formset.is_valid():
files = formset.save(commit=False)
for f in files:
f.content_object = note
try:
f.save()
except ValueError as e:
messages.error(request, e)
return redirect(note)
note.attachments.add(*files)
if form.cleaned_data.get('attach_confirmation'):
from servo.views.order import put_on_paper
response = put_on_paper(request, note.order_id, fmt='pdf')
filename = response.filename
content = response.render().content
content = ContentFile(content, filename)
attachment = Attachment(content=content, content_object=note)
attachment.save()
attachment.content.save(filename, content)
note.attachments.add(attachment)
note.save()
try:
msg = note.send_and_save(request.user)
messages.success(request, msg)
except ValueError as e:
messages.error(request, e)
return redirect(note)
return render(request, "notes/form.html", locals())
def delete_note(request, pk):
"""
Deletes a note
"""
note = get_object_or_404(Note, pk=pk)
if request.method == 'POST':
note.delete()
messages.success(request, _("Note deleted"))
if request.session.get('return_to'):
url = request.session.get('return_to')
del(request.session['return_to'])
elif note.order_id:
url = note.order.get_absolute_url()
return redirect(url)
return render(request, 'notes/remove.html', {'note': note})
@csrf_exempt
def render_template(request, order_id=None):
"""
Renders the template with this title with the current
Service Order as the context
"""
title = request.POST.get('title')
tpl = get_object_or_404(Template, title=title)
content = tpl.content
if order_id:
order = get_object_or_404(Order, pk=order_id)
content = tpl.render(order)
return HttpResponse(content)
def templates(request, template_id=None):
if template_id:
tpl = get_object_or_404(Template, pk=template_id)
content = tpl.content
if request.session.get('current_order_id'):
tpl = template.Template(content)
order = Order.objects.get(pk=request.session['current_order_id'])
content = tpl.render(template.Context({'order': order}))
return HttpResponse(content)
templates = Template.objects.all()
return render(request, 'notes/templates.html', {'templates': templates})
def toggle_flag(request, kind, pk, flag):
"""
Toggles a flag of a note (read/unread, flagged/not, reported/not)
"""
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
if flag == 'flagged':
note.toggle_flagged(request.user)
return HttpResponse(note.get_flagged_title(request.user))
if flag == 'read':
note.toggle_read(request.user)
return HttpResponse(note.get_read_title(request.user))
field = 'is_%s' % flag
note = get_object_or_404(Note, pk=pk)
attr = getattr(note, field)
setattr(note, field, not attr)
note.save()
return HttpResponse(getattr(note, 'get_%s_title' % flag)())
def toggle_tag(request, pk, tag_id):
note = get_object_or_404(Note, pk=pk)
tag = get_object_or_404(Tag, pk=tag_id)
if tag in note.labels.all():
note.labels.remove(tag)
else:
note.labels.add(tag)
if note.order:
return redirect(note.order)
return HttpResponse(_('OK'))
def list_notes(request, kind="inbox"):
data = prep_list_view(request, kind)
request.session['return_to'] = request.path
return render(request, "notes/list_notes.html", data)
def view_note(request, kind, pk):
data = prep_list_view(request, kind)
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
data['read_title'] = note.get_read_title(request.user)
data['flagged_title'] = note.get_flagged_title(request.user)
else:
note = get_object_or_404(Note, pk=pk)
data['title'] = note.get_title()
data['note'] = note
if kind == 'escalations':
return render(request, "notes/view_escalation.html", data)
if kind == 'articles':
return render(request, "notes/view_article.html", data)
return render(request, "notes/view_note.html", data)
def find(request):
"""
Notes advanced search
"""
form = NoteSearchForm(request.GET)
results = Note.objects.none()
if request.GET and form.is_valid():
fdata = form.cleaned_data
results = Note.objects.all()
if fdata.get('body'):
results = results.filter(body__icontains=fdata['body'])
if fdata.get('recipient'):
results = results.filter(recipient__icontains=fdata['recipient'])
if fdata.get('sender'):
results = results.filter(sender__icontains=fdata['sender'])
if fdata.get('order_code'):
results = results.filter(order__code__icontains=fdata['order_code'])
results = results.order_by('-created_at')
title = _('Message search')
notes = paginate(results, request.GET.get('page'), 10)
return render(request, "notes/find.html", locals())
def edit_escalation(request):
pass
def create_escalation(request):
esc = Escalation()
form = EscalationForm()
title = _('Edit Escalation')
if request.method == 'POST':
data = request.POST.copy()
data['created_by'] = request.user
form = EscalationForm(data, request.FILES, instance=esc)
if form.is_valid():
note = form.save()
#esc.submit(request.user)
return redirect(view_note, 'escalations', note.pk)
return render(request, 'notes/edit_escalation.html', locals())
def list_messages(request, pk):
note = get_object_or_404(Note, pk=pk)
messages = note.message_set.all() | return render(request, "notes/messages.html", locals()) | random_line_split | |
note.py | # -*- coding: utf-8 -*-
import json
from io import StringIO
from gsxws import escalations
from django import template
from django.contrib import messages
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.forms.models import modelformset_factory
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import permission_required
from django.core.files.base import ContentFile
from reportlab.lib.units import mm
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.barcode import createBarcodeDrawing
from servo.lib.utils import paginate
from servo.models import (Order, Template, Tag, Customer, Note,
Attachment, Escalation, Article,)
from servo.forms import NoteForm, NoteSearchForm, EscalationForm
class BarcodeDrawing(Drawing):
"""Pretty generic Reportlab drawing to render barcodes."""
def __init__(self, text_value, *args, **kwargs):
barcode = createBarcodeDrawing("Code128",
value=text_value.encode("utf-8"),
barHeight=10 * mm,
width=80 * mm)
Drawing.__init__(self, barcode.width, barcode.height, *args, **kwargs)
self.add(barcode, name="barcode")
def show_barcode(request, text):
"""Return text as a barcode."""
if request.GET.get('f') == 'svg':
import barcode
output = StringIO.StringIO()
code = barcode.Code39(text, add_checksum=False)
code.write(output)
contents = output.getvalue()
output.close()
return HttpResponse(contents, content_type="image/svg+xml")
d = BarcodeDrawing(text)
return HttpResponse(d.asString("png"), content_type="image/png")
def prep_list_view(request, kind):
"""Prepare the view for listing notes/messages."""
data = {'title': _("Messages")}
all_notes = Note.objects.all().order_by("-created_at")
if kind == "articles":
all_notes = Article.objects.all().order_by('-date_created')
if kind == "inbox":
all_notes = all_notes.filter(order=None).order_by("is_read", "-created_at")
if kind == "sent":
all_notes = all_notes.filter(created_by=request.user)
if kind == "flagged":
all_notes = all_notes.filter(is_flagged=True)
if kind == "escalations":
all_notes = Note.objects.all().exclude(escalation=None)
page = request.GET.get("page")
notes = paginate(all_notes, page, 20)
data['kind'] = kind
data['notes'] = notes
data['search_hint'] = "notes"
data['inbox_count'] = Note.objects.filter(order=None).count()
return data
@permission_required('servo.change_note')
def copy(request, pk):
"""Copy a note with its attachments and labels."""
note = get_object_or_404(Note, pk=pk)
new_note = Note(created_by=request.user)
new_note.body = note.body
new_note.order = note.order
new_note.subject = note.subject
new_note.save()
new_note.labels = note.labels.all()
for a in note.attachments.all(): # also copy the attachments
a.pk = None
a.content_object = new_note
a.save()
new_note.attachments.add(a)
return redirect(edit, pk=new_note.pk, order_id=note.order_id)
@permission_required('servo.change_note')
def edit(request, pk=None, order_id=None,
parent=None, recipient=None, customer=None):
"""
Edit a note.
@FIXME: Should split this up into smaller pieces
"""
to = []
order = None
command = _('Save')
note = Note(order_id=order_id)
excluded_emails = note.get_excluded_emails()
if recipient is not None:
to.append(recipient)
command = _('Send')
if order_id is not None:
order = get_object_or_404(Order, pk=order_id)
if order.user and (order.user != request.user):
note.is_read = False
if order.user.email not in excluded_emails:
to.append(order.user.email)
if order.customer is not None:
customer = order.customer_id
if customer is not None:
customer = get_object_or_404(Customer, pk=customer)
note.customer = customer
if order_id is None:
to.append(customer.email)
tpl = template.Template(note.subject)
note.subject = tpl.render(template.Context({'note': note}))
note.recipient = ', '.join(to)
note.created_by = request.user
note.sender = note.get_default_sender()
fields = escalations.CONTEXTS
try:
note.escalation = Escalation(created_by=request.user)
except Exception as e:
messages.error(request, e)
return redirect(request.META['HTTP_REFERER'])
AttachmentFormset = modelformset_factory(Attachment,
fields=('content',),
can_delete=True,
extra=3,
exclude=[])
formset = AttachmentFormset(queryset=Attachment.objects.none())
if pk is not None:
note = get_object_or_404(Note, pk=pk)
formset = AttachmentFormset(queryset=note.attachments.all())
if parent is not None:
parent = get_object_or_404(Note, pk=parent)
note.parent = parent
note.body = parent.quote()
if parent.subject:
note.subject = _(u'Re: %s') % parent.clean_subject()
if parent.sender not in excluded_emails:
note.recipient = parent.sender
if parent.order:
order = parent.order
note.order = parent.order
note.customer = parent.customer
note.escalation = parent.escalation
note.is_reported = parent.is_reported
title = note.subject
form = NoteForm(instance=note)
if note.escalation:
contexts = json.loads(note.escalation.contexts)
escalation_form = EscalationForm(prefix='escalation',
instance=note.escalation)
if request.method == "POST":
escalation_form = EscalationForm(request.POST,
prefix='escalation',
instance=note.escalation)
if escalation_form.is_valid():
note.escalation = escalation_form.save()
form = NoteForm(request.POST, instance=note)
if form.is_valid():
note = form.save()
formset = AttachmentFormset(request.POST, request.FILES)
if formset.is_valid():
files = formset.save(commit=False)
for f in files:
f.content_object = note
try:
f.save()
except ValueError as e:
messages.error(request, e)
return redirect(note)
note.attachments.add(*files)
if form.cleaned_data.get('attach_confirmation'):
from servo.views.order import put_on_paper
response = put_on_paper(request, note.order_id, fmt='pdf')
filename = response.filename
content = response.render().content
content = ContentFile(content, filename)
attachment = Attachment(content=content, content_object=note)
attachment.save()
attachment.content.save(filename, content)
note.attachments.add(attachment)
note.save()
try:
msg = note.send_and_save(request.user)
messages.success(request, msg)
except ValueError as e:
messages.error(request, e)
return redirect(note)
return render(request, "notes/form.html", locals())
def delete_note(request, pk):
"""
Deletes a note
"""
note = get_object_or_404(Note, pk=pk)
if request.method == 'POST':
note.delete()
messages.success(request, _("Note deleted"))
if request.session.get('return_to'):
url = request.session.get('return_to')
del(request.session['return_to'])
elif note.order_id:
url = note.order.get_absolute_url()
return redirect(url)
return render(request, 'notes/remove.html', {'note': note})
@csrf_exempt
def render_template(request, order_id=None):
"""
Renders the template with this title with the current
Service Order as the context
"""
title = request.POST.get('title')
tpl = get_object_or_404(Template, title=title)
content = tpl.content
if order_id:
order = get_object_or_404(Order, pk=order_id)
content = tpl.render(order)
return HttpResponse(content)
def templates(request, template_id=None):
if template_id:
tpl = get_object_or_404(Template, pk=template_id)
content = tpl.content
if request.session.get('current_order_id'):
tpl = template.Template(content)
order = Order.objects.get(pk=request.session['current_order_id'])
content = tpl.render(template.Context({'order': order}))
return HttpResponse(content)
templates = Template.objects.all()
return render(request, 'notes/templates.html', {'templates': templates})
def toggle_flag(request, kind, pk, flag):
"""
Toggles a flag of a note (read/unread, flagged/not, reported/not)
"""
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
if flag == 'flagged':
note.toggle_flagged(request.user)
return HttpResponse(note.get_flagged_title(request.user))
if flag == 'read':
note.toggle_read(request.user)
return HttpResponse(note.get_read_title(request.user))
field = 'is_%s' % flag
note = get_object_or_404(Note, pk=pk)
attr = getattr(note, field)
setattr(note, field, not attr)
note.save()
return HttpResponse(getattr(note, 'get_%s_title' % flag)())
def toggle_tag(request, pk, tag_id):
note = get_object_or_404(Note, pk=pk)
tag = get_object_or_404(Tag, pk=tag_id)
if tag in note.labels.all():
note.labels.remove(tag)
else:
note.labels.add(tag)
if note.order:
return redirect(note.order)
return HttpResponse(_('OK'))
def list_notes(request, kind="inbox"):
data = prep_list_view(request, kind)
request.session['return_to'] = request.path
return render(request, "notes/list_notes.html", data)
def view_note(request, kind, pk):
data = prep_list_view(request, kind)
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
data['read_title'] = note.get_read_title(request.user)
data['flagged_title'] = note.get_flagged_title(request.user)
else:
note = get_object_or_404(Note, pk=pk)
data['title'] = note.get_title()
data['note'] = note
if kind == 'escalations':
return render(request, "notes/view_escalation.html", data)
if kind == 'articles':
return render(request, "notes/view_article.html", data)
return render(request, "notes/view_note.html", data)
def find(request):
"""
Notes advanced search
"""
form = NoteSearchForm(request.GET)
results = Note.objects.none()
if request.GET and form.is_valid():
fdata = form.cleaned_data
results = Note.objects.all()
if fdata.get('body'):
|
if fdata.get('recipient'):
results = results.filter(recipient__icontains=fdata['recipient'])
if fdata.get('sender'):
results = results.filter(sender__icontains=fdata['sender'])
if fdata.get('order_code'):
results = results.filter(order__code__icontains=fdata['order_code'])
results = results.order_by('-created_at')
title = _('Message search')
notes = paginate(results, request.GET.get('page'), 10)
return render(request, "notes/find.html", locals())
def edit_escalation(request):
pass
def create_escalation(request):
esc = Escalation()
form = EscalationForm()
title = _('Edit Escalation')
if request.method == 'POST':
data = request.POST.copy()
data['created_by'] = request.user
form = EscalationForm(data, request.FILES, instance=esc)
if form.is_valid():
note = form.save()
#esc.submit(request.user)
return redirect(view_note, 'escalations', note.pk)
return render(request, 'notes/edit_escalation.html', locals())
def list_messages(request, pk):
note = get_object_or_404(Note, pk=pk)
messages = note.message_set.all()
return render(request, "notes/messages.html", locals())
| results = results.filter(body__icontains=fdata['body']) | conditional_block |
note.py | # -*- coding: utf-8 -*-
import json
from io import StringIO
from gsxws import escalations
from django import template
from django.contrib import messages
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.forms.models import modelformset_factory
from django.views.decorators.csrf import csrf_exempt
from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import permission_required
from django.core.files.base import ContentFile
from reportlab.lib.units import mm
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.barcode import createBarcodeDrawing
from servo.lib.utils import paginate
from servo.models import (Order, Template, Tag, Customer, Note,
Attachment, Escalation, Article,)
from servo.forms import NoteForm, NoteSearchForm, EscalationForm
class BarcodeDrawing(Drawing):
"""Pretty generic Reportlab drawing to render barcodes."""
def __init__(self, text_value, *args, **kwargs):
barcode = createBarcodeDrawing("Code128",
value=text_value.encode("utf-8"),
barHeight=10 * mm,
width=80 * mm)
Drawing.__init__(self, barcode.width, barcode.height, *args, **kwargs)
self.add(barcode, name="barcode")
def show_barcode(request, text):
"""Return text as a barcode."""
if request.GET.get('f') == 'svg':
import barcode
output = StringIO.StringIO()
code = barcode.Code39(text, add_checksum=False)
code.write(output)
contents = output.getvalue()
output.close()
return HttpResponse(contents, content_type="image/svg+xml")
d = BarcodeDrawing(text)
return HttpResponse(d.asString("png"), content_type="image/png")
def prep_list_view(request, kind):
"""Prepare the view for listing notes/messages."""
data = {'title': _("Messages")}
all_notes = Note.objects.all().order_by("-created_at")
if kind == "articles":
all_notes = Article.objects.all().order_by('-date_created')
if kind == "inbox":
all_notes = all_notes.filter(order=None).order_by("is_read", "-created_at")
if kind == "sent":
all_notes = all_notes.filter(created_by=request.user)
if kind == "flagged":
all_notes = all_notes.filter(is_flagged=True)
if kind == "escalations":
all_notes = Note.objects.all().exclude(escalation=None)
page = request.GET.get("page")
notes = paginate(all_notes, page, 20)
data['kind'] = kind
data['notes'] = notes
data['search_hint'] = "notes"
data['inbox_count'] = Note.objects.filter(order=None).count()
return data
@permission_required('servo.change_note')
def copy(request, pk):
"""Copy a note with its attachments and labels."""
note = get_object_or_404(Note, pk=pk)
new_note = Note(created_by=request.user)
new_note.body = note.body
new_note.order = note.order
new_note.subject = note.subject
new_note.save()
new_note.labels = note.labels.all()
for a in note.attachments.all(): # also copy the attachments
a.pk = None
a.content_object = new_note
a.save()
new_note.attachments.add(a)
return redirect(edit, pk=new_note.pk, order_id=note.order_id)
@permission_required('servo.change_note')
def edit(request, pk=None, order_id=None,
parent=None, recipient=None, customer=None):
"""
Edit a note.
@FIXME: Should split this up into smaller pieces
"""
to = []
order = None
command = _('Save')
note = Note(order_id=order_id)
excluded_emails = note.get_excluded_emails()
if recipient is not None:
to.append(recipient)
command = _('Send')
if order_id is not None:
order = get_object_or_404(Order, pk=order_id)
if order.user and (order.user != request.user):
note.is_read = False
if order.user.email not in excluded_emails:
to.append(order.user.email)
if order.customer is not None:
customer = order.customer_id
if customer is not None:
customer = get_object_or_404(Customer, pk=customer)
note.customer = customer
if order_id is None:
to.append(customer.email)
tpl = template.Template(note.subject)
note.subject = tpl.render(template.Context({'note': note}))
note.recipient = ', '.join(to)
note.created_by = request.user
note.sender = note.get_default_sender()
fields = escalations.CONTEXTS
try:
note.escalation = Escalation(created_by=request.user)
except Exception as e:
messages.error(request, e)
return redirect(request.META['HTTP_REFERER'])
AttachmentFormset = modelformset_factory(Attachment,
fields=('content',),
can_delete=True,
extra=3,
exclude=[])
formset = AttachmentFormset(queryset=Attachment.objects.none())
if pk is not None:
note = get_object_or_404(Note, pk=pk)
formset = AttachmentFormset(queryset=note.attachments.all())
if parent is not None:
parent = get_object_or_404(Note, pk=parent)
note.parent = parent
note.body = parent.quote()
if parent.subject:
note.subject = _(u'Re: %s') % parent.clean_subject()
if parent.sender not in excluded_emails:
note.recipient = parent.sender
if parent.order:
order = parent.order
note.order = parent.order
note.customer = parent.customer
note.escalation = parent.escalation
note.is_reported = parent.is_reported
title = note.subject
form = NoteForm(instance=note)
if note.escalation:
contexts = json.loads(note.escalation.contexts)
escalation_form = EscalationForm(prefix='escalation',
instance=note.escalation)
if request.method == "POST":
escalation_form = EscalationForm(request.POST,
prefix='escalation',
instance=note.escalation)
if escalation_form.is_valid():
note.escalation = escalation_form.save()
form = NoteForm(request.POST, instance=note)
if form.is_valid():
note = form.save()
formset = AttachmentFormset(request.POST, request.FILES)
if formset.is_valid():
files = formset.save(commit=False)
for f in files:
f.content_object = note
try:
f.save()
except ValueError as e:
messages.error(request, e)
return redirect(note)
note.attachments.add(*files)
if form.cleaned_data.get('attach_confirmation'):
from servo.views.order import put_on_paper
response = put_on_paper(request, note.order_id, fmt='pdf')
filename = response.filename
content = response.render().content
content = ContentFile(content, filename)
attachment = Attachment(content=content, content_object=note)
attachment.save()
attachment.content.save(filename, content)
note.attachments.add(attachment)
note.save()
try:
msg = note.send_and_save(request.user)
messages.success(request, msg)
except ValueError as e:
messages.error(request, e)
return redirect(note)
return render(request, "notes/form.html", locals())
def delete_note(request, pk):
"""
Deletes a note
"""
note = get_object_or_404(Note, pk=pk)
if request.method == 'POST':
note.delete()
messages.success(request, _("Note deleted"))
if request.session.get('return_to'):
url = request.session.get('return_to')
del(request.session['return_to'])
elif note.order_id:
url = note.order.get_absolute_url()
return redirect(url)
return render(request, 'notes/remove.html', {'note': note})
@csrf_exempt
def render_template(request, order_id=None):
"""
Renders the template with this title with the current
Service Order as the context
"""
title = request.POST.get('title')
tpl = get_object_or_404(Template, title=title)
content = tpl.content
if order_id:
order = get_object_or_404(Order, pk=order_id)
content = tpl.render(order)
return HttpResponse(content)
def templates(request, template_id=None):
if template_id:
tpl = get_object_or_404(Template, pk=template_id)
content = tpl.content
if request.session.get('current_order_id'):
tpl = template.Template(content)
order = Order.objects.get(pk=request.session['current_order_id'])
content = tpl.render(template.Context({'order': order}))
return HttpResponse(content)
templates = Template.objects.all()
return render(request, 'notes/templates.html', {'templates': templates})
def toggle_flag(request, kind, pk, flag):
"""
Toggles a flag of a note (read/unread, flagged/not, reported/not)
"""
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
if flag == 'flagged':
note.toggle_flagged(request.user)
return HttpResponse(note.get_flagged_title(request.user))
if flag == 'read':
note.toggle_read(request.user)
return HttpResponse(note.get_read_title(request.user))
field = 'is_%s' % flag
note = get_object_or_404(Note, pk=pk)
attr = getattr(note, field)
setattr(note, field, not attr)
note.save()
return HttpResponse(getattr(note, 'get_%s_title' % flag)())
def toggle_tag(request, pk, tag_id):
note = get_object_or_404(Note, pk=pk)
tag = get_object_or_404(Tag, pk=tag_id)
if tag in note.labels.all():
note.labels.remove(tag)
else:
note.labels.add(tag)
if note.order:
return redirect(note.order)
return HttpResponse(_('OK'))
def list_notes(request, kind="inbox"):
data = prep_list_view(request, kind)
request.session['return_to'] = request.path
return render(request, "notes/list_notes.html", data)
def view_note(request, kind, pk):
data = prep_list_view(request, kind)
if kind == 'articles':
note = get_object_or_404(Article, pk=pk)
data['read_title'] = note.get_read_title(request.user)
data['flagged_title'] = note.get_flagged_title(request.user)
else:
note = get_object_or_404(Note, pk=pk)
data['title'] = note.get_title()
data['note'] = note
if kind == 'escalations':
return render(request, "notes/view_escalation.html", data)
if kind == 'articles':
return render(request, "notes/view_article.html", data)
return render(request, "notes/view_note.html", data)
def find(request):
"""
Notes advanced search
"""
form = NoteSearchForm(request.GET)
results = Note.objects.none()
if request.GET and form.is_valid():
fdata = form.cleaned_data
results = Note.objects.all()
if fdata.get('body'):
results = results.filter(body__icontains=fdata['body'])
if fdata.get('recipient'):
results = results.filter(recipient__icontains=fdata['recipient'])
if fdata.get('sender'):
results = results.filter(sender__icontains=fdata['sender'])
if fdata.get('order_code'):
results = results.filter(order__code__icontains=fdata['order_code'])
results = results.order_by('-created_at')
title = _('Message search')
notes = paginate(results, request.GET.get('page'), 10)
return render(request, "notes/find.html", locals())
def edit_escalation(request):
pass
def create_escalation(request):
|
def list_messages(request, pk):
note = get_object_or_404(Note, pk=pk)
messages = note.message_set.all()
return render(request, "notes/messages.html", locals())
| esc = Escalation()
form = EscalationForm()
title = _('Edit Escalation')
if request.method == 'POST':
data = request.POST.copy()
data['created_by'] = request.user
form = EscalationForm(data, request.FILES, instance=esc)
if form.is_valid():
note = form.save()
#esc.submit(request.user)
return redirect(view_note, 'escalations', note.pk)
return render(request, 'notes/edit_escalation.html', locals()) | identifier_body |
ToontownLauncher.py | import os
import sys
import time
import types
ltime = 1 and time.localtime()
logSuffix = '%02d%02d%02d_%02d%02d%02d' % (ltime[0] - 2000, ltime[1], ltime[2],
ltime[3], ltime[4], ltime[5])
logfile = 'toontownD-' + logSuffix + '.log'
class LogAndOutput:
def __init__(self, orig, log):
self.orig = orig
self.log = log
def write(self, str):
self.log.write(str)
self.log.flush()
self.orig.write(str)
self.orig.flush()
def flush(self):
self.log.flush()
self.orig.flush()
log = open(logfile, 'a')
logOut = LogAndOutput(sys.__stdout__, log)
logErr = LogAndOutput(sys.__stderr__, log)
sys.stdout = logOut
sys.stderr = logErr
print('\n\nStarting Toontown...')
if 1:
print 'Current time: ' + time.asctime(time.localtime(time.time())) + ' ' + time.tzname[0]
print 'sys.path = ', sys.path
print 'sys.argv = ', sys.argv
from otp.launcher.LauncherBase import LauncherBase
from otp.otpbase import OTPLauncherGlobals
from panda3d.core import *
from toontown.toonbase import TTLocalizer
class ToontownLauncher(LauncherBase):
GameName = 'Toontown'
LauncherPhases = [3, 3.5, 4, 5, 5.5, 6, 7, 8, 9, 10, 11, 12, 13]
TmpOverallMap = [0.25, 0.15, 0.12, 0.17, 0.08, 0.07, 0.05, 0.05, 0.017,
0.011, 0.01, 0.012, 0.01]
RegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
ForegroundSleepTime = 0.01
Localizer = TTLocalizer
VerifyFiles = 1
DecompressMultifiles = True
def __init__(self):
if sys.argv[2] == 'Phase2.py':
sys.argv = sys.argv[:1] + sys.argv[3:]
if len(sys.argv) == 5 or len(sys.argv) == 6:
self.gameServer = sys.argv[2]
self.accountServer = sys.argv[3]
self.testServerFlag = int(sys.argv[4])
else:
print 'Error: Launcher: incorrect number of parameters'
sys.exit()
self.toontownBlueKey = 'TOONTOWN_BLUE'
self.toontownPlayTokenKey = 'TTI_PLAYCOOKIE'
self.launcherMessageKey = 'LAUNCHER_MESSAGE'
self.game1DoneKey = 'GAME1_DONE'
self.game2DoneKey = 'GAME2_DONE'
self.tutorialCompleteKey = 'TUTORIAL_DONE'
self.toontownRegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
if self.testServerFlag:
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, 'Test')
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, self.getProductName())
LauncherBase.__init__(self)
self.webAcctParams = 'WEB_ACCT_PARAMS'
self.parseWebAcctParams()
self.mainLoop()
def getValue(self, key, default=None):
|
def setValue(self, key, value):
self.setRegistry(key, value)
def getVerifyFiles(self):
return 1
def getTestServerFlag(self):
return self.testServerFlag
def getGameServer(self):
return self.gameServer
def getLogFileName(self):
return 'toontown'
def parseWebAcctParams(self):
s = config.GetString('fake-web-acct-params', '')
if not s:
s = self.getRegistry(self.webAcctParams)
self.setRegistry(self.webAcctParams, '')
l = s.split('&')
length = len(l)
dict = {}
for index in xrange(0, len(l)):
args = l[index].split('=')
if len(args) == 3:
[name, value] = args[-2:]
dict[name] = int(value)
elif len(args) == 2:
[name, value] = args
dict[name] = int(value)
self.secretNeedsParentPasswordKey = 1
if 'secretsNeedsParentPassword' in dict:
self.secretNeedsParentPasswordKey = dict['secretsNeedsParentPassword']
else:
self.notify.warning('no secretNeedsParentPassword token in webAcctParams')
self.notify.info('secretNeedsParentPassword = %d' % self.secretNeedsParentPasswordKey)
self.chatEligibleKey = 0
if 'chatEligible' in dict:
self.chatEligibleKey = dict['chatEligible']
else:
self.notify.warning('no chatEligible token in webAcctParams')
self.notify.info('chatEligibleKey = %d' % self.chatEligibleKey)
def getBlue(self):
blue = self.getValue(self.toontownBlueKey)
self.setValue(self.toontownBlueKey, '')
if blue == 'NO BLUE':
blue = None
return blue
def getPlayToken(self):
playToken = self.getValue(self.toontownPlayTokenKey)
self.setValue(self.toontownPlayTokenKey, '')
if playToken == 'NO PLAYTOKEN':
playToken = None
return playToken
def setRegistry(self, name, value):
if not self.WIN32:
return
t = type(value)
if t == types.IntType:
WindowsRegistry.setIntValue(self.toontownRegistryKey, name, value)
elif t == types.StringType:
WindowsRegistry.setStringValue(self.toontownRegistryKey, name, value)
else:
self.notify.warning('setRegistry: Invalid type for registry value: ' + `value`)
def getRegistry(self, name, missingValue=None):
self.notify.info('getRegistry%s' % ((name, missingValue),))
if not self.WIN32:
if missingValue == None:
missingValue = ''
value = os.environ.get(name, missingValue)
try:
value = int(value)
except: pass
return value
t = WindowsRegistry.getKeyType(self.toontownRegistryKey, name)
if t == WindowsRegistry.TInt:
if missingValue == None:
missingValue = 0
return WindowsRegistry.getIntValue(self.toontownRegistryKey,
name, missingValue)
elif t == WindowsRegistry.TString:
if missingValue == None:
missingValue = ''
return WindowsRegistry.getStringValue(self.toontownRegistryKey,
name, missingValue)
else:
return missingValue
def getCDDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/CD_%d/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, self.fromCD, serverFilePath)
def getDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, serverFilePath)
def getPercentPatchComplete(self, bytesWritten):
if self.totalPatchDownload:
return LauncherBase.getPercentPatchComplete(self, bytesWritten)
else:
return 0
def hashIsValid(self, serverHash, hashStr):
return serverHash.setFromDec(hashStr) or serverHash.setFromHex(hashStr)
def launcherMessage(self, msg):
LauncherBase.launcherMessage(self, msg)
self.setRegistry(self.launcherMessageKey, msg)
def getAccountServer(self):
return self.accountServer
def setTutorialComplete(self):
self.setRegistry(self.tutorialCompleteKey, 0)
def getTutorialComplete(self):
return self.getRegistry(self.tutorialCompleteKey, 0)
def getGame2Done(self):
return self.getRegistry(self.game2DoneKey, 0)
def setPandaErrorCode(self, code):
self.pandaErrorCode = code
if self.WIN32:
self.notify.info('setting panda error code to %s' % code)
exitCode2exitPage = {
OTPLauncherGlobals.ExitEnableChat: 'chat',
OTPLauncherGlobals.ExitSetParentPassword: 'setparentpassword',
OTPLauncherGlobals.ExitPurchase: 'purchase'}
if code in exitCode2exitPage:
self.setRegistry('EXIT_PAGE', exitCode2exitPage[code])
self.setRegistry(self.PandaErrorCodeKey, 0)
else:
self.setRegistry(self.PandaErrorCodeKey, code)
else:
LauncherBase.setPandaErrorCode(self, code)
def getNeedPwForSecretKey(self):
return self.secretNeedsParentPasswordKey
def getParentPasswordSet(self):
return self.chatEligibleKey
def MakeNTFSFilesGlobalWriteable(self, pathToSet=None):
if not self.WIN32:
return
LauncherBase.MakeNTFSFilesGlobalWriteable(self, pathToSet)
def startGame(self):
try:
os.remove('Phase3.py')
except: pass
import Phase3
self.newTaskManager()
from direct.showbase.EventManagerGlobal import eventMgr
eventMgr.restart()
from toontown.toonbase import ToontownStart
| try:
return self.getRegistry(key, default)
except:
return self.getRegistry(key) | identifier_body |
ToontownLauncher.py | import os
import sys
import time
import types
ltime = 1 and time.localtime()
logSuffix = '%02d%02d%02d_%02d%02d%02d' % (ltime[0] - 2000, ltime[1], ltime[2], | def __init__(self, orig, log):
self.orig = orig
self.log = log
def write(self, str):
self.log.write(str)
self.log.flush()
self.orig.write(str)
self.orig.flush()
def flush(self):
self.log.flush()
self.orig.flush()
log = open(logfile, 'a')
logOut = LogAndOutput(sys.__stdout__, log)
logErr = LogAndOutput(sys.__stderr__, log)
sys.stdout = logOut
sys.stderr = logErr
print('\n\nStarting Toontown...')
if 1:
print 'Current time: ' + time.asctime(time.localtime(time.time())) + ' ' + time.tzname[0]
print 'sys.path = ', sys.path
print 'sys.argv = ', sys.argv
from otp.launcher.LauncherBase import LauncherBase
from otp.otpbase import OTPLauncherGlobals
from panda3d.core import *
from toontown.toonbase import TTLocalizer
class ToontownLauncher(LauncherBase):
GameName = 'Toontown'
LauncherPhases = [3, 3.5, 4, 5, 5.5, 6, 7, 8, 9, 10, 11, 12, 13]
TmpOverallMap = [0.25, 0.15, 0.12, 0.17, 0.08, 0.07, 0.05, 0.05, 0.017,
0.011, 0.01, 0.012, 0.01]
RegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
ForegroundSleepTime = 0.01
Localizer = TTLocalizer
VerifyFiles = 1
DecompressMultifiles = True
def __init__(self):
if sys.argv[2] == 'Phase2.py':
sys.argv = sys.argv[:1] + sys.argv[3:]
if len(sys.argv) == 5 or len(sys.argv) == 6:
self.gameServer = sys.argv[2]
self.accountServer = sys.argv[3]
self.testServerFlag = int(sys.argv[4])
else:
print 'Error: Launcher: incorrect number of parameters'
sys.exit()
self.toontownBlueKey = 'TOONTOWN_BLUE'
self.toontownPlayTokenKey = 'TTI_PLAYCOOKIE'
self.launcherMessageKey = 'LAUNCHER_MESSAGE'
self.game1DoneKey = 'GAME1_DONE'
self.game2DoneKey = 'GAME2_DONE'
self.tutorialCompleteKey = 'TUTORIAL_DONE'
self.toontownRegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
if self.testServerFlag:
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, 'Test')
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, self.getProductName())
LauncherBase.__init__(self)
self.webAcctParams = 'WEB_ACCT_PARAMS'
self.parseWebAcctParams()
self.mainLoop()
def getValue(self, key, default=None):
try:
return self.getRegistry(key, default)
except:
return self.getRegistry(key)
def setValue(self, key, value):
self.setRegistry(key, value)
def getVerifyFiles(self):
return 1
def getTestServerFlag(self):
return self.testServerFlag
def getGameServer(self):
return self.gameServer
def getLogFileName(self):
return 'toontown'
def parseWebAcctParams(self):
s = config.GetString('fake-web-acct-params', '')
if not s:
s = self.getRegistry(self.webAcctParams)
self.setRegistry(self.webAcctParams, '')
l = s.split('&')
length = len(l)
dict = {}
for index in xrange(0, len(l)):
args = l[index].split('=')
if len(args) == 3:
[name, value] = args[-2:]
dict[name] = int(value)
elif len(args) == 2:
[name, value] = args
dict[name] = int(value)
self.secretNeedsParentPasswordKey = 1
if 'secretsNeedsParentPassword' in dict:
self.secretNeedsParentPasswordKey = dict['secretsNeedsParentPassword']
else:
self.notify.warning('no secretNeedsParentPassword token in webAcctParams')
self.notify.info('secretNeedsParentPassword = %d' % self.secretNeedsParentPasswordKey)
self.chatEligibleKey = 0
if 'chatEligible' in dict:
self.chatEligibleKey = dict['chatEligible']
else:
self.notify.warning('no chatEligible token in webAcctParams')
self.notify.info('chatEligibleKey = %d' % self.chatEligibleKey)
def getBlue(self):
blue = self.getValue(self.toontownBlueKey)
self.setValue(self.toontownBlueKey, '')
if blue == 'NO BLUE':
blue = None
return blue
def getPlayToken(self):
playToken = self.getValue(self.toontownPlayTokenKey)
self.setValue(self.toontownPlayTokenKey, '')
if playToken == 'NO PLAYTOKEN':
playToken = None
return playToken
def setRegistry(self, name, value):
if not self.WIN32:
return
t = type(value)
if t == types.IntType:
WindowsRegistry.setIntValue(self.toontownRegistryKey, name, value)
elif t == types.StringType:
WindowsRegistry.setStringValue(self.toontownRegistryKey, name, value)
else:
self.notify.warning('setRegistry: Invalid type for registry value: ' + `value`)
def getRegistry(self, name, missingValue=None):
self.notify.info('getRegistry%s' % ((name, missingValue),))
if not self.WIN32:
if missingValue == None:
missingValue = ''
value = os.environ.get(name, missingValue)
try:
value = int(value)
except: pass
return value
t = WindowsRegistry.getKeyType(self.toontownRegistryKey, name)
if t == WindowsRegistry.TInt:
if missingValue == None:
missingValue = 0
return WindowsRegistry.getIntValue(self.toontownRegistryKey,
name, missingValue)
elif t == WindowsRegistry.TString:
if missingValue == None:
missingValue = ''
return WindowsRegistry.getStringValue(self.toontownRegistryKey,
name, missingValue)
else:
return missingValue
def getCDDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/CD_%d/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, self.fromCD, serverFilePath)
def getDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, serverFilePath)
def getPercentPatchComplete(self, bytesWritten):
if self.totalPatchDownload:
return LauncherBase.getPercentPatchComplete(self, bytesWritten)
else:
return 0
def hashIsValid(self, serverHash, hashStr):
return serverHash.setFromDec(hashStr) or serverHash.setFromHex(hashStr)
def launcherMessage(self, msg):
LauncherBase.launcherMessage(self, msg)
self.setRegistry(self.launcherMessageKey, msg)
def getAccountServer(self):
return self.accountServer
def setTutorialComplete(self):
self.setRegistry(self.tutorialCompleteKey, 0)
def getTutorialComplete(self):
return self.getRegistry(self.tutorialCompleteKey, 0)
def getGame2Done(self):
return self.getRegistry(self.game2DoneKey, 0)
def setPandaErrorCode(self, code):
self.pandaErrorCode = code
if self.WIN32:
self.notify.info('setting panda error code to %s' % code)
exitCode2exitPage = {
OTPLauncherGlobals.ExitEnableChat: 'chat',
OTPLauncherGlobals.ExitSetParentPassword: 'setparentpassword',
OTPLauncherGlobals.ExitPurchase: 'purchase'}
if code in exitCode2exitPage:
self.setRegistry('EXIT_PAGE', exitCode2exitPage[code])
self.setRegistry(self.PandaErrorCodeKey, 0)
else:
self.setRegistry(self.PandaErrorCodeKey, code)
else:
LauncherBase.setPandaErrorCode(self, code)
def getNeedPwForSecretKey(self):
return self.secretNeedsParentPasswordKey
def getParentPasswordSet(self):
return self.chatEligibleKey
def MakeNTFSFilesGlobalWriteable(self, pathToSet=None):
if not self.WIN32:
return
LauncherBase.MakeNTFSFilesGlobalWriteable(self, pathToSet)
def startGame(self):
try:
os.remove('Phase3.py')
except: pass
import Phase3
self.newTaskManager()
from direct.showbase.EventManagerGlobal import eventMgr
eventMgr.restart()
from toontown.toonbase import ToontownStart | ltime[3], ltime[4], ltime[5])
logfile = 'toontownD-' + logSuffix + '.log'
class LogAndOutput: | random_line_split |
ToontownLauncher.py | import os
import sys
import time
import types
ltime = 1 and time.localtime()
logSuffix = '%02d%02d%02d_%02d%02d%02d' % (ltime[0] - 2000, ltime[1], ltime[2],
ltime[3], ltime[4], ltime[5])
logfile = 'toontownD-' + logSuffix + '.log'
class LogAndOutput:
def __init__(self, orig, log):
self.orig = orig
self.log = log
def write(self, str):
self.log.write(str)
self.log.flush()
self.orig.write(str)
self.orig.flush()
def flush(self):
self.log.flush()
self.orig.flush()
log = open(logfile, 'a')
logOut = LogAndOutput(sys.__stdout__, log)
logErr = LogAndOutput(sys.__stderr__, log)
sys.stdout = logOut
sys.stderr = logErr
print('\n\nStarting Toontown...')
if 1:
print 'Current time: ' + time.asctime(time.localtime(time.time())) + ' ' + time.tzname[0]
print 'sys.path = ', sys.path
print 'sys.argv = ', sys.argv
from otp.launcher.LauncherBase import LauncherBase
from otp.otpbase import OTPLauncherGlobals
from panda3d.core import *
from toontown.toonbase import TTLocalizer
class ToontownLauncher(LauncherBase):
GameName = 'Toontown'
LauncherPhases = [3, 3.5, 4, 5, 5.5, 6, 7, 8, 9, 10, 11, 12, 13]
TmpOverallMap = [0.25, 0.15, 0.12, 0.17, 0.08, 0.07, 0.05, 0.05, 0.017,
0.011, 0.01, 0.012, 0.01]
RegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
ForegroundSleepTime = 0.01
Localizer = TTLocalizer
VerifyFiles = 1
DecompressMultifiles = True
def __init__(self):
if sys.argv[2] == 'Phase2.py':
sys.argv = sys.argv[:1] + sys.argv[3:]
if len(sys.argv) == 5 or len(sys.argv) == 6:
self.gameServer = sys.argv[2]
self.accountServer = sys.argv[3]
self.testServerFlag = int(sys.argv[4])
else:
print 'Error: Launcher: incorrect number of parameters'
sys.exit()
self.toontownBlueKey = 'TOONTOWN_BLUE'
self.toontownPlayTokenKey = 'TTI_PLAYCOOKIE'
self.launcherMessageKey = 'LAUNCHER_MESSAGE'
self.game1DoneKey = 'GAME1_DONE'
self.game2DoneKey = 'GAME2_DONE'
self.tutorialCompleteKey = 'TUTORIAL_DONE'
self.toontownRegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
if self.testServerFlag:
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, 'Test')
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, self.getProductName())
LauncherBase.__init__(self)
self.webAcctParams = 'WEB_ACCT_PARAMS'
self.parseWebAcctParams()
self.mainLoop()
def getValue(self, key, default=None):
try:
return self.getRegistry(key, default)
except:
return self.getRegistry(key)
def setValue(self, key, value):
self.setRegistry(key, value)
def getVerifyFiles(self):
return 1
def getTestServerFlag(self):
return self.testServerFlag
def getGameServer(self):
return self.gameServer
def getLogFileName(self):
return 'toontown'
def parseWebAcctParams(self):
s = config.GetString('fake-web-acct-params', '')
if not s:
s = self.getRegistry(self.webAcctParams)
self.setRegistry(self.webAcctParams, '')
l = s.split('&')
length = len(l)
dict = {}
for index in xrange(0, len(l)):
args = l[index].split('=')
if len(args) == 3:
[name, value] = args[-2:]
dict[name] = int(value)
elif len(args) == 2:
[name, value] = args
dict[name] = int(value)
self.secretNeedsParentPasswordKey = 1
if 'secretsNeedsParentPassword' in dict:
self.secretNeedsParentPasswordKey = dict['secretsNeedsParentPassword']
else:
self.notify.warning('no secretNeedsParentPassword token in webAcctParams')
self.notify.info('secretNeedsParentPassword = %d' % self.secretNeedsParentPasswordKey)
self.chatEligibleKey = 0
if 'chatEligible' in dict:
self.chatEligibleKey = dict['chatEligible']
else:
self.notify.warning('no chatEligible token in webAcctParams')
self.notify.info('chatEligibleKey = %d' % self.chatEligibleKey)
def getBlue(self):
blue = self.getValue(self.toontownBlueKey)
self.setValue(self.toontownBlueKey, '')
if blue == 'NO BLUE':
blue = None
return blue
def getPlayToken(self):
playToken = self.getValue(self.toontownPlayTokenKey)
self.setValue(self.toontownPlayTokenKey, '')
if playToken == 'NO PLAYTOKEN':
playToken = None
return playToken
def setRegistry(self, name, value):
if not self.WIN32:
return
t = type(value)
if t == types.IntType:
WindowsRegistry.setIntValue(self.toontownRegistryKey, name, value)
elif t == types.StringType:
WindowsRegistry.setStringValue(self.toontownRegistryKey, name, value)
else:
self.notify.warning('setRegistry: Invalid type for registry value: ' + `value`)
def getRegistry(self, name, missingValue=None):
self.notify.info('getRegistry%s' % ((name, missingValue),))
if not self.WIN32:
if missingValue == None:
missingValue = ''
value = os.environ.get(name, missingValue)
try:
value = int(value)
except: pass
return value
t = WindowsRegistry.getKeyType(self.toontownRegistryKey, name)
if t == WindowsRegistry.TInt:
if missingValue == None:
missingValue = 0
return WindowsRegistry.getIntValue(self.toontownRegistryKey,
name, missingValue)
elif t == WindowsRegistry.TString:
if missingValue == None:
missingValue = ''
return WindowsRegistry.getStringValue(self.toontownRegistryKey,
name, missingValue)
else:
return missingValue
def getCDDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/CD_%d/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, self.fromCD, serverFilePath)
def getDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, serverFilePath)
def | (self, bytesWritten):
if self.totalPatchDownload:
return LauncherBase.getPercentPatchComplete(self, bytesWritten)
else:
return 0
def hashIsValid(self, serverHash, hashStr):
return serverHash.setFromDec(hashStr) or serverHash.setFromHex(hashStr)
def launcherMessage(self, msg):
LauncherBase.launcherMessage(self, msg)
self.setRegistry(self.launcherMessageKey, msg)
def getAccountServer(self):
return self.accountServer
def setTutorialComplete(self):
self.setRegistry(self.tutorialCompleteKey, 0)
def getTutorialComplete(self):
return self.getRegistry(self.tutorialCompleteKey, 0)
def getGame2Done(self):
return self.getRegistry(self.game2DoneKey, 0)
def setPandaErrorCode(self, code):
self.pandaErrorCode = code
if self.WIN32:
self.notify.info('setting panda error code to %s' % code)
exitCode2exitPage = {
OTPLauncherGlobals.ExitEnableChat: 'chat',
OTPLauncherGlobals.ExitSetParentPassword: 'setparentpassword',
OTPLauncherGlobals.ExitPurchase: 'purchase'}
if code in exitCode2exitPage:
self.setRegistry('EXIT_PAGE', exitCode2exitPage[code])
self.setRegistry(self.PandaErrorCodeKey, 0)
else:
self.setRegistry(self.PandaErrorCodeKey, code)
else:
LauncherBase.setPandaErrorCode(self, code)
def getNeedPwForSecretKey(self):
return self.secretNeedsParentPasswordKey
def getParentPasswordSet(self):
return self.chatEligibleKey
def MakeNTFSFilesGlobalWriteable(self, pathToSet=None):
if not self.WIN32:
return
LauncherBase.MakeNTFSFilesGlobalWriteable(self, pathToSet)
def startGame(self):
try:
os.remove('Phase3.py')
except: pass
import Phase3
self.newTaskManager()
from direct.showbase.EventManagerGlobal import eventMgr
eventMgr.restart()
from toontown.toonbase import ToontownStart
| getPercentPatchComplete | identifier_name |
ToontownLauncher.py | import os
import sys
import time
import types
ltime = 1 and time.localtime()
logSuffix = '%02d%02d%02d_%02d%02d%02d' % (ltime[0] - 2000, ltime[1], ltime[2],
ltime[3], ltime[4], ltime[5])
logfile = 'toontownD-' + logSuffix + '.log'
class LogAndOutput:
def __init__(self, orig, log):
self.orig = orig
self.log = log
def write(self, str):
self.log.write(str)
self.log.flush()
self.orig.write(str)
self.orig.flush()
def flush(self):
self.log.flush()
self.orig.flush()
log = open(logfile, 'a')
logOut = LogAndOutput(sys.__stdout__, log)
logErr = LogAndOutput(sys.__stderr__, log)
sys.stdout = logOut
sys.stderr = logErr
print('\n\nStarting Toontown...')
if 1:
print 'Current time: ' + time.asctime(time.localtime(time.time())) + ' ' + time.tzname[0]
print 'sys.path = ', sys.path
print 'sys.argv = ', sys.argv
from otp.launcher.LauncherBase import LauncherBase
from otp.otpbase import OTPLauncherGlobals
from panda3d.core import *
from toontown.toonbase import TTLocalizer
class ToontownLauncher(LauncherBase):
GameName = 'Toontown'
LauncherPhases = [3, 3.5, 4, 5, 5.5, 6, 7, 8, 9, 10, 11, 12, 13]
TmpOverallMap = [0.25, 0.15, 0.12, 0.17, 0.08, 0.07, 0.05, 0.05, 0.017,
0.011, 0.01, 0.012, 0.01]
RegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
ForegroundSleepTime = 0.01
Localizer = TTLocalizer
VerifyFiles = 1
DecompressMultifiles = True
def __init__(self):
if sys.argv[2] == 'Phase2.py':
sys.argv = sys.argv[:1] + sys.argv[3:]
if len(sys.argv) == 5 or len(sys.argv) == 6:
self.gameServer = sys.argv[2]
self.accountServer = sys.argv[3]
self.testServerFlag = int(sys.argv[4])
else:
print 'Error: Launcher: incorrect number of parameters'
sys.exit()
self.toontownBlueKey = 'TOONTOWN_BLUE'
self.toontownPlayTokenKey = 'TTI_PLAYCOOKIE'
self.launcherMessageKey = 'LAUNCHER_MESSAGE'
self.game1DoneKey = 'GAME1_DONE'
self.game2DoneKey = 'GAME2_DONE'
self.tutorialCompleteKey = 'TUTORIAL_DONE'
self.toontownRegistryKey = 'Software\\Disney\\Disney Online\\Toontown'
if self.testServerFlag:
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, 'Test')
self.toontownRegistryKey = '%s%s' % (self.toontownRegistryKey, self.getProductName())
LauncherBase.__init__(self)
self.webAcctParams = 'WEB_ACCT_PARAMS'
self.parseWebAcctParams()
self.mainLoop()
def getValue(self, key, default=None):
try:
return self.getRegistry(key, default)
except:
return self.getRegistry(key)
def setValue(self, key, value):
self.setRegistry(key, value)
def getVerifyFiles(self):
return 1
def getTestServerFlag(self):
return self.testServerFlag
def getGameServer(self):
return self.gameServer
def getLogFileName(self):
return 'toontown'
def parseWebAcctParams(self):
s = config.GetString('fake-web-acct-params', '')
if not s:
s = self.getRegistry(self.webAcctParams)
self.setRegistry(self.webAcctParams, '')
l = s.split('&')
length = len(l)
dict = {}
for index in xrange(0, len(l)):
args = l[index].split('=')
if len(args) == 3:
[name, value] = args[-2:]
dict[name] = int(value)
elif len(args) == 2:
[name, value] = args
dict[name] = int(value)
self.secretNeedsParentPasswordKey = 1
if 'secretsNeedsParentPassword' in dict:
self.secretNeedsParentPasswordKey = dict['secretsNeedsParentPassword']
else:
self.notify.warning('no secretNeedsParentPassword token in webAcctParams')
self.notify.info('secretNeedsParentPassword = %d' % self.secretNeedsParentPasswordKey)
self.chatEligibleKey = 0
if 'chatEligible' in dict:
self.chatEligibleKey = dict['chatEligible']
else:
self.notify.warning('no chatEligible token in webAcctParams')
self.notify.info('chatEligibleKey = %d' % self.chatEligibleKey)
def getBlue(self):
blue = self.getValue(self.toontownBlueKey)
self.setValue(self.toontownBlueKey, '')
if blue == 'NO BLUE':
blue = None
return blue
def getPlayToken(self):
playToken = self.getValue(self.toontownPlayTokenKey)
self.setValue(self.toontownPlayTokenKey, '')
if playToken == 'NO PLAYTOKEN':
playToken = None
return playToken
def setRegistry(self, name, value):
if not self.WIN32:
return
t = type(value)
if t == types.IntType:
WindowsRegistry.setIntValue(self.toontownRegistryKey, name, value)
elif t == types.StringType:
WindowsRegistry.setStringValue(self.toontownRegistryKey, name, value)
else:
self.notify.warning('setRegistry: Invalid type for registry value: ' + `value`)
def getRegistry(self, name, missingValue=None):
self.notify.info('getRegistry%s' % ((name, missingValue),))
if not self.WIN32:
if missingValue == None:
missingValue = ''
value = os.environ.get(name, missingValue)
try:
value = int(value)
except: pass
return value
t = WindowsRegistry.getKeyType(self.toontownRegistryKey, name)
if t == WindowsRegistry.TInt:
|
elif t == WindowsRegistry.TString:
if missingValue == None:
missingValue = ''
return WindowsRegistry.getStringValue(self.toontownRegistryKey,
name, missingValue)
else:
return missingValue
def getCDDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/CD_%d/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, self.fromCD, serverFilePath)
def getDownloadPath(self, origPath, serverFilePath):
return '%s/%s%s/%s' % (origPath, self.ServerVersion, self.ServerVersionSuffix, serverFilePath)
def getPercentPatchComplete(self, bytesWritten):
if self.totalPatchDownload:
return LauncherBase.getPercentPatchComplete(self, bytesWritten)
else:
return 0
def hashIsValid(self, serverHash, hashStr):
return serverHash.setFromDec(hashStr) or serverHash.setFromHex(hashStr)
def launcherMessage(self, msg):
LauncherBase.launcherMessage(self, msg)
self.setRegistry(self.launcherMessageKey, msg)
def getAccountServer(self):
return self.accountServer
def setTutorialComplete(self):
self.setRegistry(self.tutorialCompleteKey, 0)
def getTutorialComplete(self):
return self.getRegistry(self.tutorialCompleteKey, 0)
def getGame2Done(self):
return self.getRegistry(self.game2DoneKey, 0)
def setPandaErrorCode(self, code):
self.pandaErrorCode = code
if self.WIN32:
self.notify.info('setting panda error code to %s' % code)
exitCode2exitPage = {
OTPLauncherGlobals.ExitEnableChat: 'chat',
OTPLauncherGlobals.ExitSetParentPassword: 'setparentpassword',
OTPLauncherGlobals.ExitPurchase: 'purchase'}
if code in exitCode2exitPage:
self.setRegistry('EXIT_PAGE', exitCode2exitPage[code])
self.setRegistry(self.PandaErrorCodeKey, 0)
else:
self.setRegistry(self.PandaErrorCodeKey, code)
else:
LauncherBase.setPandaErrorCode(self, code)
def getNeedPwForSecretKey(self):
return self.secretNeedsParentPasswordKey
def getParentPasswordSet(self):
return self.chatEligibleKey
def MakeNTFSFilesGlobalWriteable(self, pathToSet=None):
if not self.WIN32:
return
LauncherBase.MakeNTFSFilesGlobalWriteable(self, pathToSet)
def startGame(self):
try:
os.remove('Phase3.py')
except: pass
import Phase3
self.newTaskManager()
from direct.showbase.EventManagerGlobal import eventMgr
eventMgr.restart()
from toontown.toonbase import ToontownStart
| if missingValue == None:
missingValue = 0
return WindowsRegistry.getIntValue(self.toontownRegistryKey,
name, missingValue) | conditional_block |
peturb.py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
# returns a random d dimensional vector, a direction to peturb in
def direction(d,t):
# if type == uniform
if(t == 'u'):
return np.random.uniform(-1/np.sqrt(2), 1/np.sqrt(2), d)
elif(t == 'n'):
return np.random.normal(0, 1/np.sqrt(d), d)
elif(t == 's'):
# a point on the N-Sphere
angles = np.random.uniform(0, np.pi, d-2)
x = np.zeros(d)
x[0] = np.cos(angles[0])
for i in range(1,d-1):
temp = 1
for j in range(i):
|
x[i] = temp*np.cos(angles[i])
x[d-1] = x[d-2]*np.tan(angles[d-2])
return x
fig = plt.figure()
ax = plt.axes(projection='3d')
for i in range(1000):
R = np.random.uniform(0,1,1)[0]
R2 = np.random.uniform(0,1,1)[0]
xs = np.sin(np.arccos(1-2*R))*np.cos(2*np.pi*R2)
ys = np.sin(np.arccos(1-2*R))*np.sin(2*np.pi*R2)
zs = 1- 2*R
ax.scatter3D(xs, ys, zs, cmap='Greens')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show() | temp = temp * np.sin(angles[j]) | conditional_block |
peturb.py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
# returns a random d dimensional vector, a direction to peturb in
def direction(d,t):
# if type == uniform
if(t == 'u'):
return np.random.uniform(-1/np.sqrt(2), 1/np.sqrt(2), d)
elif(t == 'n'):
return np.random.normal(0, 1/np.sqrt(d), d)
elif(t == 's'):
# a point on the N-Sphere
angles = np.random.uniform(0, np.pi, d-2) | for j in range(i):
temp = temp * np.sin(angles[j])
x[i] = temp*np.cos(angles[i])
x[d-1] = x[d-2]*np.tan(angles[d-2])
return x
fig = plt.figure()
ax = plt.axes(projection='3d')
for i in range(1000):
R = np.random.uniform(0,1,1)[0]
R2 = np.random.uniform(0,1,1)[0]
xs = np.sin(np.arccos(1-2*R))*np.cos(2*np.pi*R2)
ys = np.sin(np.arccos(1-2*R))*np.sin(2*np.pi*R2)
zs = 1- 2*R
ax.scatter3D(xs, ys, zs, cmap='Greens')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show() | x = np.zeros(d)
x[0] = np.cos(angles[0])
for i in range(1,d-1):
temp = 1 | random_line_split |
peturb.py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
# returns a random d dimensional vector, a direction to peturb in
def direction(d,t):
# if type == uniform
|
fig = plt.figure()
ax = plt.axes(projection='3d')
for i in range(1000):
R = np.random.uniform(0,1,1)[0]
R2 = np.random.uniform(0,1,1)[0]
xs = np.sin(np.arccos(1-2*R))*np.cos(2*np.pi*R2)
ys = np.sin(np.arccos(1-2*R))*np.sin(2*np.pi*R2)
zs = 1- 2*R
ax.scatter3D(xs, ys, zs, cmap='Greens')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show() | if(t == 'u'):
return np.random.uniform(-1/np.sqrt(2), 1/np.sqrt(2), d)
elif(t == 'n'):
return np.random.normal(0, 1/np.sqrt(d), d)
elif(t == 's'):
# a point on the N-Sphere
angles = np.random.uniform(0, np.pi, d-2)
x = np.zeros(d)
x[0] = np.cos(angles[0])
for i in range(1,d-1):
temp = 1
for j in range(i):
temp = temp * np.sin(angles[j])
x[i] = temp*np.cos(angles[i])
x[d-1] = x[d-2]*np.tan(angles[d-2])
return x | identifier_body |
peturb.py | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
# returns a random d dimensional vector, a direction to peturb in
def | (d,t):
# if type == uniform
if(t == 'u'):
return np.random.uniform(-1/np.sqrt(2), 1/np.sqrt(2), d)
elif(t == 'n'):
return np.random.normal(0, 1/np.sqrt(d), d)
elif(t == 's'):
# a point on the N-Sphere
angles = np.random.uniform(0, np.pi, d-2)
x = np.zeros(d)
x[0] = np.cos(angles[0])
for i in range(1,d-1):
temp = 1
for j in range(i):
temp = temp * np.sin(angles[j])
x[i] = temp*np.cos(angles[i])
x[d-1] = x[d-2]*np.tan(angles[d-2])
return x
fig = plt.figure()
ax = plt.axes(projection='3d')
for i in range(1000):
R = np.random.uniform(0,1,1)[0]
R2 = np.random.uniform(0,1,1)[0]
xs = np.sin(np.arccos(1-2*R))*np.cos(2*np.pi*R2)
ys = np.sin(np.arccos(1-2*R))*np.sin(2*np.pi*R2)
zs = 1- 2*R
ax.scatter3D(xs, ys, zs, cmap='Greens')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show() | direction | identifier_name |
vim_esx_cl_inetworkfirewallrulesetallowediplist_firewall_ruleset_allowedip.py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
# This module is NOT auto-generated
# Inspired by decompiled Java classes from vCenter's internalvim25stubs.jar
# Unless states otherside, the methods and attributes were not used by esxcli,
# and thus not tested
log = logging.getLogger(__name__)
def | (vim, *args, **kwargs):
obj = vim.client.factory.create('ns0:VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'AllowedIPAddresses', 'Ruleset' ]
for name, arg in zip(required + optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj | VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip | identifier_name |
vim_esx_cl_inetworkfirewallrulesetallowediplist_firewall_ruleset_allowedip.py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
# This module is NOT auto-generated
# Inspired by decompiled Java classes from vCenter's internalvim25stubs.jar
# Unless states otherside, the methods and attributes were not used by esxcli,
# and thus not tested
log = logging.getLogger(__name__)
def VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip(vim, *args, **kwargs):
| obj = vim.client.factory.create('ns0:VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'AllowedIPAddresses', 'Ruleset' ]
for name, arg in zip(required + optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj | identifier_body | |
vim_esx_cl_inetworkfirewallrulesetallowediplist_firewall_ruleset_allowedip.py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
# This module is NOT auto-generated
# Inspired by decompiled Java classes from vCenter's internalvim25stubs.jar
# Unless states otherside, the methods and attributes were not used by esxcli,
# and thus not tested
log = logging.getLogger(__name__)
def VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip(vim, *args, **kwargs):
obj = vim.client.factory.create('ns0:VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'AllowedIPAddresses', 'Ruleset' ]
for name, arg in zip(required + optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
|
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj | setattr(obj, name, value) | conditional_block |
vim_esx_cl_inetworkfirewallrulesetallowediplist_firewall_ruleset_allowedip.py | import logging
from pyvisdk.exceptions import InvalidArgumentError
# This module is NOT auto-generated
# Inspired by decompiled Java classes from vCenter's internalvim25stubs.jar
# Unless states otherside, the methods and attributes were not used by esxcli,
# and thus not tested
log = logging.getLogger(__name__)
def VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip(vim, *args, **kwargs):
obj = vim.client.factory.create('ns0:VimEsxCLInetworkfirewallrulesetallowediplistFirewallRulesetAllowedip')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'AllowedIPAddresses', 'Ruleset' ]
for name, arg in zip(required + optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional))) | return obj | random_line_split | |
lib.rs | //! A macro that maps unicode names to chars and strings.
#![crate_type="dylib"]
#![feature(quote, plugin_registrar, plugin, rustc_private)]
#![plugin(regex_macros)]
extern crate syntax;
extern crate rustc;
extern crate regex;
extern crate unicode_names;
use syntax::ast;
use syntax::codemap;
use syntax::parse::token;
use syntax::ext::base::{self, ExtCtxt, MacResult, MacEager, DummyResult};
use syntax::ext::build::AstBuilder;
use rustc::plugin::Registry;
#[plugin_registrar]
#[doc(hidden)]
pub fn plugin_registrar(registrar: &mut Registry) {
registrar.register_macro("named_char", named_char);
registrar.register_macro("named", named);
}
fn named_char(cx: &mut ExtCtxt, sp: codemap::Span,
tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
match base::get_single_str_from_tts(cx, sp, tts, "named_char") {
None => {}
Some(name) => match unicode_names::character(&name) {
None => cx.span_err(sp, &format!("`{}` does not name a character", name)),
// everything worked!
Some(c) => return MacEager::expr(cx.expr_lit(sp, ast::LitChar(c))),
}
}
// failed :(
DummyResult::expr(sp)
}
fn | (cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
let string = match base::get_single_str_from_tts(cx, sp, tts, "named") {
None => return DummyResult::expr(sp),
Some(s) => s
};
// make sure unclosed braces don't escape.
static NAMES: regex::Regex = regex!(r"\\N\{(.*?)(?:\}|$)");
let new = NAMES.replace_all(&string, |c: ®ex::Captures| {
let full = c.at(0).unwrap();
if !full.ends_with("}") {
cx.span_err(sp, &format!("unclosed escape in `named!`: {}", full));
} else {
let name = c.at(1).unwrap();
match unicode_names::character(name) {
Some(c) => return c.to_string(),
None => {
cx.span_err(sp, &format!("`{}` does not name a character", name));
}
}
}
// failed :(
String::new()
});
MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&new)))
}
| named | identifier_name |
lib.rs | //! A macro that maps unicode names to chars and strings.
#![crate_type="dylib"]
#![feature(quote, plugin_registrar, plugin, rustc_private)]
#![plugin(regex_macros)]
extern crate syntax;
extern crate rustc;
extern crate regex;
extern crate unicode_names;
use syntax::ast;
use syntax::codemap;
use syntax::parse::token;
use syntax::ext::base::{self, ExtCtxt, MacResult, MacEager, DummyResult};
use syntax::ext::build::AstBuilder;
use rustc::plugin::Registry;
#[plugin_registrar]
#[doc(hidden)]
pub fn plugin_registrar(registrar: &mut Registry) {
registrar.register_macro("named_char", named_char);
registrar.register_macro("named", named);
}
fn named_char(cx: &mut ExtCtxt, sp: codemap::Span,
tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
match base::get_single_str_from_tts(cx, sp, tts, "named_char") {
None => {}
Some(name) => match unicode_names::character(&name) {
None => cx.span_err(sp, &format!("`{}` does not name a character", name)),
// everything worked!
Some(c) => return MacEager::expr(cx.expr_lit(sp, ast::LitChar(c))),
}
}
// failed :(
DummyResult::expr(sp)
}
fn named(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
let string = match base::get_single_str_from_tts(cx, sp, tts, "named") {
None => return DummyResult::expr(sp),
Some(s) => s
};
// make sure unclosed braces don't escape.
static NAMES: regex::Regex = regex!(r"\\N\{(.*?)(?:\}|$)");
| let new = NAMES.replace_all(&string, |c: ®ex::Captures| {
let full = c.at(0).unwrap();
if !full.ends_with("}") {
cx.span_err(sp, &format!("unclosed escape in `named!`: {}", full));
} else {
let name = c.at(1).unwrap();
match unicode_names::character(name) {
Some(c) => return c.to_string(),
None => {
cx.span_err(sp, &format!("`{}` does not name a character", name));
}
}
}
// failed :(
String::new()
});
MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&new)))
} | random_line_split | |
lib.rs | //! A macro that maps unicode names to chars and strings.
#![crate_type="dylib"]
#![feature(quote, plugin_registrar, plugin, rustc_private)]
#![plugin(regex_macros)]
extern crate syntax;
extern crate rustc;
extern crate regex;
extern crate unicode_names;
use syntax::ast;
use syntax::codemap;
use syntax::parse::token;
use syntax::ext::base::{self, ExtCtxt, MacResult, MacEager, DummyResult};
use syntax::ext::build::AstBuilder;
use rustc::plugin::Registry;
#[plugin_registrar]
#[doc(hidden)]
pub fn plugin_registrar(registrar: &mut Registry) |
fn named_char(cx: &mut ExtCtxt, sp: codemap::Span,
tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
match base::get_single_str_from_tts(cx, sp, tts, "named_char") {
None => {}
Some(name) => match unicode_names::character(&name) {
None => cx.span_err(sp, &format!("`{}` does not name a character", name)),
// everything worked!
Some(c) => return MacEager::expr(cx.expr_lit(sp, ast::LitChar(c))),
}
}
// failed :(
DummyResult::expr(sp)
}
fn named(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
let string = match base::get_single_str_from_tts(cx, sp, tts, "named") {
None => return DummyResult::expr(sp),
Some(s) => s
};
// make sure unclosed braces don't escape.
static NAMES: regex::Regex = regex!(r"\\N\{(.*?)(?:\}|$)");
let new = NAMES.replace_all(&string, |c: ®ex::Captures| {
let full = c.at(0).unwrap();
if !full.ends_with("}") {
cx.span_err(sp, &format!("unclosed escape in `named!`: {}", full));
} else {
let name = c.at(1).unwrap();
match unicode_names::character(name) {
Some(c) => return c.to_string(),
None => {
cx.span_err(sp, &format!("`{}` does not name a character", name));
}
}
}
// failed :(
String::new()
});
MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&new)))
}
| {
registrar.register_macro("named_char", named_char);
registrar.register_macro("named", named);
} | identifier_body |
lib.rs | //! A macro that maps unicode names to chars and strings.
#![crate_type="dylib"]
#![feature(quote, plugin_registrar, plugin, rustc_private)]
#![plugin(regex_macros)]
extern crate syntax;
extern crate rustc;
extern crate regex;
extern crate unicode_names;
use syntax::ast;
use syntax::codemap;
use syntax::parse::token;
use syntax::ext::base::{self, ExtCtxt, MacResult, MacEager, DummyResult};
use syntax::ext::build::AstBuilder;
use rustc::plugin::Registry;
#[plugin_registrar]
#[doc(hidden)]
pub fn plugin_registrar(registrar: &mut Registry) {
registrar.register_macro("named_char", named_char);
registrar.register_macro("named", named);
}
fn named_char(cx: &mut ExtCtxt, sp: codemap::Span,
tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
match base::get_single_str_from_tts(cx, sp, tts, "named_char") {
None => {}
Some(name) => match unicode_names::character(&name) {
None => cx.span_err(sp, &format!("`{}` does not name a character", name)),
// everything worked!
Some(c) => return MacEager::expr(cx.expr_lit(sp, ast::LitChar(c))),
}
}
// failed :(
DummyResult::expr(sp)
}
fn named(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree]) -> Box<MacResult+'static> {
let string = match base::get_single_str_from_tts(cx, sp, tts, "named") {
None => return DummyResult::expr(sp),
Some(s) => s
};
// make sure unclosed braces don't escape.
static NAMES: regex::Regex = regex!(r"\\N\{(.*?)(?:\}|$)");
let new = NAMES.replace_all(&string, |c: ®ex::Captures| {
let full = c.at(0).unwrap();
if !full.ends_with("}") {
cx.span_err(sp, &format!("unclosed escape in `named!`: {}", full));
} else {
let name = c.at(1).unwrap();
match unicode_names::character(name) {
Some(c) => return c.to_string(),
None => |
}
}
// failed :(
String::new()
});
MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&new)))
}
| {
cx.span_err(sp, &format!("`{}` does not name a character", name));
} | conditional_block |
OpacitySlider.js | BR.OpacitySlider = L.Control.extend({
options: {
position: 'topleft',
callback: function(opacity) {}
},
onAdd: function (map) {
var container = L.DomUtil.create('div', 'leaflet-bar control-slider'),
input = $('<input id="slider" type="text"/>'),
item = localStorage.opacitySliderValue,
value = item ? parseInt(item) : 100,
minOpacity = (BR.conf.minOpacity || 0) * 100;
if (value < minOpacity) |
var stopClickAfterSlide = function(evt) {
L.DomEvent.stop(evt);
removeStopClickListeners();
};
var removeStopClickListeners = function() {
document.removeEventListener('click', stopClickAfterSlide, true);
document.removeEventListener('mousedown', removeStopClickListeners, true);
};
$(container).html(input);
$(container).attr('title', 'Set transparency of route track and markers');
input.slider({
min: 0,
max: 100,
step: 1,
value: value,
orientation: 'vertical',
reversed : true,
selection: 'before', // inverted, serves as track style, see css
tooltip: 'hide'
}).on('slide slideStop', { self: this }, function (evt) {
evt.data.self.options.callback(evt.value / 100);
}).on('slideStop', function (evt) {
localStorage.opacitySliderValue = evt.value;
// When dragging outside slider and over map, click event after mouseup
// adds marker when active on Chromium. So disable click (not needed)
// once after sliding.
document.addEventListener('click', stopClickAfterSlide, true);
// Firefox does not fire click event in this case, so make sure stop listener
// is always removed on next mousedown.
document.addEventListener('mousedown', removeStopClickListeners, true);
});
this.options.callback(value / 100);
return container;
}
});
| {
value = minOpacity;
} | conditional_block |
OpacitySlider.js | BR.OpacitySlider = L.Control.extend({
options: {
position: 'topleft',
callback: function(opacity) {}
},
onAdd: function (map) {
var container = L.DomUtil.create('div', 'leaflet-bar control-slider'),
input = $('<input id="slider" type="text"/>'),
item = localStorage.opacitySliderValue,
value = item ? parseInt(item) : 100,
minOpacity = (BR.conf.minOpacity || 0) * 100;
if (value < minOpacity) {
value = minOpacity;
}
var stopClickAfterSlide = function(evt) {
L.DomEvent.stop(evt);
removeStopClickListeners();
};
var removeStopClickListeners = function() {
document.removeEventListener('click', stopClickAfterSlide, true);
document.removeEventListener('mousedown', removeStopClickListeners, true);
};
$(container).html(input);
$(container).attr('title', 'Set transparency of route track and markers');
input.slider({
min: 0,
max: 100,
step: 1,
value: value,
orientation: 'vertical',
reversed : true,
selection: 'before', // inverted, serves as track style, see css
tooltip: 'hide' | evt.data.self.options.callback(evt.value / 100);
}).on('slideStop', function (evt) {
localStorage.opacitySliderValue = evt.value;
// When dragging outside slider and over map, click event after mouseup
// adds marker when active on Chromium. So disable click (not needed)
// once after sliding.
document.addEventListener('click', stopClickAfterSlide, true);
// Firefox does not fire click event in this case, so make sure stop listener
// is always removed on next mousedown.
document.addEventListener('mousedown', removeStopClickListeners, true);
});
this.options.callback(value / 100);
return container;
}
}); | }).on('slide slideStop', { self: this }, function (evt) { | random_line_split |
reflector.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Type} from '../type';
import {PlatformReflectionCapabilities} from './platform_reflection_capabilities';
import {ReflectorReader} from './reflector_reader';
import {GetterFn, MethodFn, SetterFn} from './types';
export {PlatformReflectionCapabilities} from './platform_reflection_capabilities';
export {GetterFn, MethodFn, SetterFn} from './types';
/**
* Provides access to reflection data about symbols. Used internally by Angular
* to power dependency injection and compilation.
*/
export class Reflector extends ReflectorReader {
| (public reflectionCapabilities: PlatformReflectionCapabilities) { super(); }
updateCapabilities(caps: PlatformReflectionCapabilities) { this.reflectionCapabilities = caps; }
factory(type: Type<any>): Function { return this.reflectionCapabilities.factory(type); }
parameters(typeOrFunc: Type<any>): any[][] {
return this.reflectionCapabilities.parameters(typeOrFunc);
}
annotations(typeOrFunc: Type<any>): any[] {
return this.reflectionCapabilities.annotations(typeOrFunc);
}
propMetadata(typeOrFunc: Type<any>): {[key: string]: any[]} {
return this.reflectionCapabilities.propMetadata(typeOrFunc);
}
hasLifecycleHook(type: any, lcProperty: string): boolean {
return this.reflectionCapabilities.hasLifecycleHook(type, lcProperty);
}
getter(name: string): GetterFn { return this.reflectionCapabilities.getter(name); }
setter(name: string): SetterFn { return this.reflectionCapabilities.setter(name); }
method(name: string): MethodFn { return this.reflectionCapabilities.method(name); }
importUri(type: any): string { return this.reflectionCapabilities.importUri(type); }
resolveIdentifier(name: string, moduleUrl: string, members: string[], runtime: any): any {
return this.reflectionCapabilities.resolveIdentifier(name, moduleUrl, members, runtime);
}
resolveEnum(identifier: any, name: string): any {
return this.reflectionCapabilities.resolveEnum(identifier, name);
}
}
| constructor | identifier_name |
reflector.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Type} from '../type';
import {PlatformReflectionCapabilities} from './platform_reflection_capabilities';
import {ReflectorReader} from './reflector_reader';
import {GetterFn, MethodFn, SetterFn} from './types';
export {PlatformReflectionCapabilities} from './platform_reflection_capabilities';
export {GetterFn, MethodFn, SetterFn} from './types';
/**
* Provides access to reflection data about symbols. Used internally by Angular
* to power dependency injection and compilation.
*/
export class Reflector extends ReflectorReader {
constructor(public reflectionCapabilities: PlatformReflectionCapabilities) { super(); }
updateCapabilities(caps: PlatformReflectionCapabilities) { this.reflectionCapabilities = caps; }
factory(type: Type<any>): Function { return this.reflectionCapabilities.factory(type); }
parameters(typeOrFunc: Type<any>): any[][] {
return this.reflectionCapabilities.parameters(typeOrFunc);
}
annotations(typeOrFunc: Type<any>): any[] {
return this.reflectionCapabilities.annotations(typeOrFunc);
}
propMetadata(typeOrFunc: Type<any>): {[key: string]: any[]} {
return this.reflectionCapabilities.propMetadata(typeOrFunc);
}
hasLifecycleHook(type: any, lcProperty: string): boolean {
return this.reflectionCapabilities.hasLifecycleHook(type, lcProperty);
}
getter(name: string): GetterFn { return this.reflectionCapabilities.getter(name); }
setter(name: string): SetterFn { return this.reflectionCapabilities.setter(name); }
method(name: string): MethodFn { return this.reflectionCapabilities.method(name); } |
resolveIdentifier(name: string, moduleUrl: string, members: string[], runtime: any): any {
return this.reflectionCapabilities.resolveIdentifier(name, moduleUrl, members, runtime);
}
resolveEnum(identifier: any, name: string): any {
return this.reflectionCapabilities.resolveEnum(identifier, name);
}
} |
importUri(type: any): string { return this.reflectionCapabilities.importUri(type); } | random_line_split |
reflector.ts | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Type} from '../type';
import {PlatformReflectionCapabilities} from './platform_reflection_capabilities';
import {ReflectorReader} from './reflector_reader';
import {GetterFn, MethodFn, SetterFn} from './types';
export {PlatformReflectionCapabilities} from './platform_reflection_capabilities';
export {GetterFn, MethodFn, SetterFn} from './types';
/**
* Provides access to reflection data about symbols. Used internally by Angular
* to power dependency injection and compilation.
*/
export class Reflector extends ReflectorReader {
constructor(public reflectionCapabilities: PlatformReflectionCapabilities) { super(); }
updateCapabilities(caps: PlatformReflectionCapabilities) { this.reflectionCapabilities = caps; }
factory(type: Type<any>): Function { return this.reflectionCapabilities.factory(type); }
parameters(typeOrFunc: Type<any>): any[][] {
return this.reflectionCapabilities.parameters(typeOrFunc);
}
annotations(typeOrFunc: Type<any>): any[] {
return this.reflectionCapabilities.annotations(typeOrFunc);
}
propMetadata(typeOrFunc: Type<any>): {[key: string]: any[]} |
hasLifecycleHook(type: any, lcProperty: string): boolean {
return this.reflectionCapabilities.hasLifecycleHook(type, lcProperty);
}
getter(name: string): GetterFn { return this.reflectionCapabilities.getter(name); }
setter(name: string): SetterFn { return this.reflectionCapabilities.setter(name); }
method(name: string): MethodFn { return this.reflectionCapabilities.method(name); }
importUri(type: any): string { return this.reflectionCapabilities.importUri(type); }
resolveIdentifier(name: string, moduleUrl: string, members: string[], runtime: any): any {
return this.reflectionCapabilities.resolveIdentifier(name, moduleUrl, members, runtime);
}
resolveEnum(identifier: any, name: string): any {
return this.reflectionCapabilities.resolveEnum(identifier, name);
}
}
| {
return this.reflectionCapabilities.propMetadata(typeOrFunc);
} | identifier_body |
content_encoding.rs | use brotli::enc::backward_references::{BrotliEncoderParams, BrotliEncoderMode};
use brotli::enc::BrotliCompress as brotli_compress;
use flate2::write::{DeflateEncoder, GzEncoder};
use flate2::Compression as Flate2Compression;
use iron::headers::{QualityItem, Encoding};
use bzip2::Compression as BzCompression;
use std::collections::BTreeSet;
use bzip2::write::BzEncoder;
use std::io::{self, Write};
use unicase::UniCase;
use std::path::Path;
use std::fs::File;
use blake3;
lazy_static! {
/// The list of content encodings we handle.
pub static ref SUPPORTED_ENCODINGS: Vec<Encoding> = {
let es = vec![Encoding::Gzip, Encoding::Deflate, Encoding::EncodingExt("br".to_string()), Encoding::EncodingExt("bzip2".to_string())];
[es.clone(), es.into_iter().map(|e| Encoding::EncodingExt(format!("x-{}", e))).collect()].iter().flat_map(|e| e.clone()).collect()
};
/// The list of extensions not to encode.
pub static ref BLACKLISTED_ENCODING_EXTENSIONS: BTreeSet<UniCase<&'static str>> = {
let raw = include_str!("../../assets/encoding_blacklist");
raw.split('\n').map(str::trim).filter(|s| !s.is_empty() && !s.starts_with('#')).map(UniCase::new).collect()
};
pub static ref BROTLI_PARAMS: BrotliEncoderParams = BrotliEncoderParams {
mode: BrotliEncoderMode::BROTLI_MODE_TEXT,
..Default::default()
};
}
/// The minimal size at which to encode filesystem files.
pub const MIN_ENCODING_SIZE: u64 = 1024;
/// The maximal size at which to encode filesystem files.
pub const MAX_ENCODING_SIZE: u64 = 100 * 1024 * 1024;
/// The minimal size gain at which to preserve encoded filesystem files.
pub const MIN_ENCODING_GAIN: f64 = 1.1;
/// Find best supported encoding to use, or `None` for identity.
pub fn | (requested: &mut [QualityItem<Encoding>]) -> Option<Encoding> {
requested.sort_by_key(|e| e.quality);
requested.iter().filter(|e| e.quality.0 != 0).find(|e| SUPPORTED_ENCODINGS.contains(&e.item)).map(|e| e.item.clone())
}
/// Encode a string slice using a specified encoding or `None` if encoding failed or is not recognised.
pub fn encode_str(dt: &str, enc: &Encoding) -> Option<Vec<u8>> {
type EncodeT = fn(&str) -> Option<Vec<u8>>;
const STR_ENCODING_FNS: &[EncodeT] = &[encode_str_gzip, encode_str_deflate, encode_str_brotli, encode_str_bzip2];
encoding_idx(enc).and_then(|fi| STR_ENCODING_FNS[fi](dt))
}
/// Encode the file denoted by the specified path into the file denoted by the specified path using a specified encoding or
/// `false` if encoding failed, is not recognised or an I/O error occurred.
pub fn encode_file(p: &Path, op: &Path, enc: &Encoding) -> bool {
type EncodeT = fn(File, File) -> bool;
const FILE_ENCODING_FNS: &[EncodeT] = &[encode_file_gzip, encode_file_deflate, encode_file_brotli, encode_file_bzip2];
encoding_idx(enc)
.map(|fi| {
let inf = File::open(p);
let outf = File::create(op);
inf.is_ok() && outf.is_ok() && FILE_ENCODING_FNS[fi](inf.unwrap(), outf.unwrap())
})
.unwrap()
}
/// Encoding extension to use for encoded files, for example "gz" for gzip, or `None` if the encoding is not recognised.
pub fn encoding_extension(enc: &Encoding) -> Option<&'static str> {
const ENCODING_EXTS: &[&str] = &["gz", "dflt", "br", "bz2"];
encoding_idx(enc).map(|ei| ENCODING_EXTS[ei])
}
/// Return the 256-bit BLAKE3 hash of the file denoted by the specified path.
pub fn file_hash(p: &Path) -> blake3::Hash {
let mut ctx = blake3::Hasher::new();
io::copy(&mut File::open(p).unwrap(), &mut ctx).unwrap();
ctx.finalize()
}
fn encoding_idx(enc: &Encoding) -> Option<usize> {
match *enc {
Encoding::Gzip => Some(0),
Encoding::Deflate => Some(1),
Encoding::EncodingExt(ref e) => {
match &e[..] {
"x-gzip" => Some(0),
"x-deflate" => Some(1),
"br" | "x-br" => Some(2),
"bzip2" | "x-bzip2" => Some(3),
_ => None,
}
}
_ => None,
}
}
macro_rules! encode_fn {
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr, $constructor:expr) => {
fn $str_fn_name(dt: &str) -> Option<Vec<u8>> {
let mut cmp = $constructor(Vec::new());
cmp.write_all(dt.as_bytes()).ok().and_then(|_| cmp.finish().ok())
}
fn $file_fn_name(mut inf: File, outf: File) -> bool {
let mut cmp = $constructor(outf);
io::copy(&mut inf, &mut cmp).and_then(|_| cmp.finish()).is_ok()
}
};
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr) => {
encode_fn!($str_fn_name, $file_fn_name, $enc_tp, $comp_lvl, |into| $enc_tp::new(into, $comp_lvl));
}
}
encode_fn!(encode_str_gzip, encode_file_gzip, GzEncoder, Flate2Compression::default());
encode_fn!(encode_str_deflate, encode_file_deflate, DeflateEncoder, Flate2Compression::default());
encode_fn!(encode_str_bzip2, encode_file_bzip2, BzEncoder, BzCompression::Default);
fn encode_str_brotli(dt: &str) -> Option<Vec<u8>> {
let mut ret = Vec::new();
brotli_compress(&mut dt.as_bytes(), &mut ret, &BROTLI_PARAMS).ok().map(|_| ret)
}
fn encode_file_brotli(mut inf: File, mut outf: File) -> bool {
brotli_compress(&mut inf, &mut outf, &BROTLI_PARAMS).is_ok()
}
| response_encoding | identifier_name |
content_encoding.rs | use brotli::enc::backward_references::{BrotliEncoderParams, BrotliEncoderMode};
use brotli::enc::BrotliCompress as brotli_compress;
use flate2::write::{DeflateEncoder, GzEncoder};
use flate2::Compression as Flate2Compression;
use iron::headers::{QualityItem, Encoding};
use bzip2::Compression as BzCompression;
use std::collections::BTreeSet;
use bzip2::write::BzEncoder;
use std::io::{self, Write};
use unicase::UniCase;
use std::path::Path;
use std::fs::File;
use blake3;
lazy_static! {
/// The list of content encodings we handle.
pub static ref SUPPORTED_ENCODINGS: Vec<Encoding> = {
let es = vec![Encoding::Gzip, Encoding::Deflate, Encoding::EncodingExt("br".to_string()), Encoding::EncodingExt("bzip2".to_string())];
[es.clone(), es.into_iter().map(|e| Encoding::EncodingExt(format!("x-{}", e))).collect()].iter().flat_map(|e| e.clone()).collect()
};
/// The list of extensions not to encode.
pub static ref BLACKLISTED_ENCODING_EXTENSIONS: BTreeSet<UniCase<&'static str>> = {
let raw = include_str!("../../assets/encoding_blacklist");
raw.split('\n').map(str::trim).filter(|s| !s.is_empty() && !s.starts_with('#')).map(UniCase::new).collect()
};
pub static ref BROTLI_PARAMS: BrotliEncoderParams = BrotliEncoderParams {
mode: BrotliEncoderMode::BROTLI_MODE_TEXT,
..Default::default()
};
}
/// The minimal size at which to encode filesystem files.
pub const MIN_ENCODING_SIZE: u64 = 1024;
/// The maximal size at which to encode filesystem files. |
/// Find best supported encoding to use, or `None` for identity.
pub fn response_encoding(requested: &mut [QualityItem<Encoding>]) -> Option<Encoding> {
requested.sort_by_key(|e| e.quality);
requested.iter().filter(|e| e.quality.0 != 0).find(|e| SUPPORTED_ENCODINGS.contains(&e.item)).map(|e| e.item.clone())
}
/// Encode a string slice using a specified encoding or `None` if encoding failed or is not recognised.
pub fn encode_str(dt: &str, enc: &Encoding) -> Option<Vec<u8>> {
type EncodeT = fn(&str) -> Option<Vec<u8>>;
const STR_ENCODING_FNS: &[EncodeT] = &[encode_str_gzip, encode_str_deflate, encode_str_brotli, encode_str_bzip2];
encoding_idx(enc).and_then(|fi| STR_ENCODING_FNS[fi](dt))
}
/// Encode the file denoted by the specified path into the file denoted by the specified path using a specified encoding or
/// `false` if encoding failed, is not recognised or an I/O error occurred.
pub fn encode_file(p: &Path, op: &Path, enc: &Encoding) -> bool {
type EncodeT = fn(File, File) -> bool;
const FILE_ENCODING_FNS: &[EncodeT] = &[encode_file_gzip, encode_file_deflate, encode_file_brotli, encode_file_bzip2];
encoding_idx(enc)
.map(|fi| {
let inf = File::open(p);
let outf = File::create(op);
inf.is_ok() && outf.is_ok() && FILE_ENCODING_FNS[fi](inf.unwrap(), outf.unwrap())
})
.unwrap()
}
/// Encoding extension to use for encoded files, for example "gz" for gzip, or `None` if the encoding is not recognised.
pub fn encoding_extension(enc: &Encoding) -> Option<&'static str> {
const ENCODING_EXTS: &[&str] = &["gz", "dflt", "br", "bz2"];
encoding_idx(enc).map(|ei| ENCODING_EXTS[ei])
}
/// Return the 256-bit BLAKE3 hash of the file denoted by the specified path.
pub fn file_hash(p: &Path) -> blake3::Hash {
let mut ctx = blake3::Hasher::new();
io::copy(&mut File::open(p).unwrap(), &mut ctx).unwrap();
ctx.finalize()
}
fn encoding_idx(enc: &Encoding) -> Option<usize> {
match *enc {
Encoding::Gzip => Some(0),
Encoding::Deflate => Some(1),
Encoding::EncodingExt(ref e) => {
match &e[..] {
"x-gzip" => Some(0),
"x-deflate" => Some(1),
"br" | "x-br" => Some(2),
"bzip2" | "x-bzip2" => Some(3),
_ => None,
}
}
_ => None,
}
}
macro_rules! encode_fn {
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr, $constructor:expr) => {
fn $str_fn_name(dt: &str) -> Option<Vec<u8>> {
let mut cmp = $constructor(Vec::new());
cmp.write_all(dt.as_bytes()).ok().and_then(|_| cmp.finish().ok())
}
fn $file_fn_name(mut inf: File, outf: File) -> bool {
let mut cmp = $constructor(outf);
io::copy(&mut inf, &mut cmp).and_then(|_| cmp.finish()).is_ok()
}
};
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr) => {
encode_fn!($str_fn_name, $file_fn_name, $enc_tp, $comp_lvl, |into| $enc_tp::new(into, $comp_lvl));
}
}
encode_fn!(encode_str_gzip, encode_file_gzip, GzEncoder, Flate2Compression::default());
encode_fn!(encode_str_deflate, encode_file_deflate, DeflateEncoder, Flate2Compression::default());
encode_fn!(encode_str_bzip2, encode_file_bzip2, BzEncoder, BzCompression::Default);
fn encode_str_brotli(dt: &str) -> Option<Vec<u8>> {
let mut ret = Vec::new();
brotli_compress(&mut dt.as_bytes(), &mut ret, &BROTLI_PARAMS).ok().map(|_| ret)
}
fn encode_file_brotli(mut inf: File, mut outf: File) -> bool {
brotli_compress(&mut inf, &mut outf, &BROTLI_PARAMS).is_ok()
} | pub const MAX_ENCODING_SIZE: u64 = 100 * 1024 * 1024;
/// The minimal size gain at which to preserve encoded filesystem files.
pub const MIN_ENCODING_GAIN: f64 = 1.1;
| random_line_split |
content_encoding.rs | use brotli::enc::backward_references::{BrotliEncoderParams, BrotliEncoderMode};
use brotli::enc::BrotliCompress as brotli_compress;
use flate2::write::{DeflateEncoder, GzEncoder};
use flate2::Compression as Flate2Compression;
use iron::headers::{QualityItem, Encoding};
use bzip2::Compression as BzCompression;
use std::collections::BTreeSet;
use bzip2::write::BzEncoder;
use std::io::{self, Write};
use unicase::UniCase;
use std::path::Path;
use std::fs::File;
use blake3;
lazy_static! {
/// The list of content encodings we handle.
pub static ref SUPPORTED_ENCODINGS: Vec<Encoding> = {
let es = vec![Encoding::Gzip, Encoding::Deflate, Encoding::EncodingExt("br".to_string()), Encoding::EncodingExt("bzip2".to_string())];
[es.clone(), es.into_iter().map(|e| Encoding::EncodingExt(format!("x-{}", e))).collect()].iter().flat_map(|e| e.clone()).collect()
};
/// The list of extensions not to encode.
pub static ref BLACKLISTED_ENCODING_EXTENSIONS: BTreeSet<UniCase<&'static str>> = {
let raw = include_str!("../../assets/encoding_blacklist");
raw.split('\n').map(str::trim).filter(|s| !s.is_empty() && !s.starts_with('#')).map(UniCase::new).collect()
};
pub static ref BROTLI_PARAMS: BrotliEncoderParams = BrotliEncoderParams {
mode: BrotliEncoderMode::BROTLI_MODE_TEXT,
..Default::default()
};
}
/// The minimal size at which to encode filesystem files.
pub const MIN_ENCODING_SIZE: u64 = 1024;
/// The maximal size at which to encode filesystem files.
pub const MAX_ENCODING_SIZE: u64 = 100 * 1024 * 1024;
/// The minimal size gain at which to preserve encoded filesystem files.
pub const MIN_ENCODING_GAIN: f64 = 1.1;
/// Find best supported encoding to use, or `None` for identity.
pub fn response_encoding(requested: &mut [QualityItem<Encoding>]) -> Option<Encoding> {
requested.sort_by_key(|e| e.quality);
requested.iter().filter(|e| e.quality.0 != 0).find(|e| SUPPORTED_ENCODINGS.contains(&e.item)).map(|e| e.item.clone())
}
/// Encode a string slice using a specified encoding or `None` if encoding failed or is not recognised.
pub fn encode_str(dt: &str, enc: &Encoding) -> Option<Vec<u8>> {
type EncodeT = fn(&str) -> Option<Vec<u8>>;
const STR_ENCODING_FNS: &[EncodeT] = &[encode_str_gzip, encode_str_deflate, encode_str_brotli, encode_str_bzip2];
encoding_idx(enc).and_then(|fi| STR_ENCODING_FNS[fi](dt))
}
/// Encode the file denoted by the specified path into the file denoted by the specified path using a specified encoding or
/// `false` if encoding failed, is not recognised or an I/O error occurred.
pub fn encode_file(p: &Path, op: &Path, enc: &Encoding) -> bool {
type EncodeT = fn(File, File) -> bool;
const FILE_ENCODING_FNS: &[EncodeT] = &[encode_file_gzip, encode_file_deflate, encode_file_brotli, encode_file_bzip2];
encoding_idx(enc)
.map(|fi| {
let inf = File::open(p);
let outf = File::create(op);
inf.is_ok() && outf.is_ok() && FILE_ENCODING_FNS[fi](inf.unwrap(), outf.unwrap())
})
.unwrap()
}
/// Encoding extension to use for encoded files, for example "gz" for gzip, or `None` if the encoding is not recognised.
pub fn encoding_extension(enc: &Encoding) -> Option<&'static str> {
const ENCODING_EXTS: &[&str] = &["gz", "dflt", "br", "bz2"];
encoding_idx(enc).map(|ei| ENCODING_EXTS[ei])
}
/// Return the 256-bit BLAKE3 hash of the file denoted by the specified path.
pub fn file_hash(p: &Path) -> blake3::Hash {
let mut ctx = blake3::Hasher::new();
io::copy(&mut File::open(p).unwrap(), &mut ctx).unwrap();
ctx.finalize()
}
fn encoding_idx(enc: &Encoding) -> Option<usize> {
match *enc {
Encoding::Gzip => Some(0),
Encoding::Deflate => Some(1),
Encoding::EncodingExt(ref e) => {
match &e[..] {
"x-gzip" => Some(0),
"x-deflate" => Some(1),
"br" | "x-br" => Some(2),
"bzip2" | "x-bzip2" => Some(3),
_ => None,
}
}
_ => None,
}
}
macro_rules! encode_fn {
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr, $constructor:expr) => {
fn $str_fn_name(dt: &str) -> Option<Vec<u8>> {
let mut cmp = $constructor(Vec::new());
cmp.write_all(dt.as_bytes()).ok().and_then(|_| cmp.finish().ok())
}
fn $file_fn_name(mut inf: File, outf: File) -> bool {
let mut cmp = $constructor(outf);
io::copy(&mut inf, &mut cmp).and_then(|_| cmp.finish()).is_ok()
}
};
($str_fn_name:ident, $file_fn_name:ident, $enc_tp:ident, $comp_lvl:expr) => {
encode_fn!($str_fn_name, $file_fn_name, $enc_tp, $comp_lvl, |into| $enc_tp::new(into, $comp_lvl));
}
}
encode_fn!(encode_str_gzip, encode_file_gzip, GzEncoder, Flate2Compression::default());
encode_fn!(encode_str_deflate, encode_file_deflate, DeflateEncoder, Flate2Compression::default());
encode_fn!(encode_str_bzip2, encode_file_bzip2, BzEncoder, BzCompression::Default);
fn encode_str_brotli(dt: &str) -> Option<Vec<u8>> {
let mut ret = Vec::new();
brotli_compress(&mut dt.as_bytes(), &mut ret, &BROTLI_PARAMS).ok().map(|_| ret)
}
fn encode_file_brotli(mut inf: File, mut outf: File) -> bool | {
brotli_compress(&mut inf, &mut outf, &BROTLI_PARAMS).is_ok()
} | identifier_body | |
wiggle_to_binned_array.py | #!/afs/bx.psu.edu/project/pythons/py2.7-linux-x86_64-ucs4/bin/python2.7
"""
Convert wiggle data to a binned array. This assumes the input data is on a
single chromosome and does no sanity checks!
usage: %prog score_file out_file < wiggle_data
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from bx_extras.fpconst import isNaN
from bx.cookbook import doc_optparse
from bx import misc
def main():
# Parse command line
|
if __name__ == "__main__": main()
| options, args = doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close() | identifier_body |
wiggle_to_binned_array.py | #!/afs/bx.psu.edu/project/pythons/py2.7-linux-x86_64-ucs4/bin/python2.7
"""
Convert wiggle data to a binned array. This assumes the input data is on a
single chromosome and does no sanity checks!
usage: %prog score_file out_file < wiggle_data
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from bx_extras.fpconst import isNaN
from bx.cookbook import doc_optparse
from bx import misc
def main():
# Parse command line
options, args = doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
|
score_fname = args[0]
out_fname = args[1]
except:
doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
| comp_type = None | conditional_block |
wiggle_to_binned_array.py | #!/afs/bx.psu.edu/project/pythons/py2.7-linux-x86_64-ucs4/bin/python2.7
"""
Convert wiggle data to a binned array. This assumes the input data is on a
single chromosome and does no sanity checks!
usage: %prog score_file out_file < wiggle_data
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from bx_extras.fpconst import isNaN
from bx.cookbook import doc_optparse
from bx import misc
def main():
# Parse command line
options, args = doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
doc_optparse.exit()
|
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main() | scores = BinnedArray() | random_line_split |
wiggle_to_binned_array.py | #!/afs/bx.psu.edu/project/pythons/py2.7-linux-x86_64-ucs4/bin/python2.7
"""
Convert wiggle data to a binned array. This assumes the input data is on a
single chromosome and does no sanity checks!
usage: %prog score_file out_file < wiggle_data
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from bx_extras.fpconst import isNaN
from bx.cookbook import doc_optparse
from bx import misc
def | ():
# Parse command line
options, args = doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
| main | identifier_name |
ConnectedMessage.tsx | /*
* Wire
* Copyright (C) 2021 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
*/
import Avatar, {AVATAR_SIZE} from 'Components/Avatar';
import ClassifiedBar from 'Components/input/ClassifiedBar';
import React from 'react';
import {User} from 'src/script/entity/User';
import {useKoSubscribableChildren} from 'Util/ComponentUtil';
import {t} from 'Util/LocalizerUtil';
export interface ConnectedMessageProps {
classifiedDomains?: string[];
onClickCancelRequest: () => void;
showServicesWarning?: boolean;
user: User;
}
const ConnectedMessage: React.FC<ConnectedMessageProps> = ({
user,
onClickCancelRequest,
showServicesWarning = false,
classifiedDomains,
}) => {
const {name, providerName, isOutgoingRequest} = useKoSubscribableChildren(user, [
'name',
'providerName',
'isOutgoingRequest',
]);
const handle = user.handle;
const isService = user.isService;
return (
<div className="message-connected" data-uie-name="element-connected-message">
<span className="message-connected-header">{name}</span>
{isService ? (
<span className="message-connected-provider-name">{providerName}</span>
) : (
<span className="message-connected-username label-username">{handle}</span>
)}
{isOutgoingRequest && classifiedDomains && <ClassifiedBar users={[user]} classifiedDomains={classifiedDomains} />}
<Avatar
avatarSize={AVATAR_SIZE.X_LARGE}
participant={user}
noBadge={isOutgoingRequest}
className="message-connected-avatar cursor-default"
/> | {isOutgoingRequest && (
<div
className="message-connected-cancel accent-text"
onClick={onClickCancelRequest}
data-uie-name="do-cancel-request"
>
{t('conversationConnectionCancelRequest')}
</div>
)}
{showServicesWarning && (
<div className="message-services-warning" data-uie-name="label-services-warning">
{t('conversationServicesWarning')}
</div>
)}
</div>
);
};
export default ConnectedMessage; | random_line_split | |
index.py | import os
import traceback
import json
import requests
from flask import Flask, request
from cities_list import CITIES
from messages import get_message, search_keyword
token = os.environ.get('FB_ACCESS_TOKEN')
api_key = os.environ.get('WEATHER_API_KEY')
app = Flask(__name__)
def location_quick_reply(sender, text=None):
if not text:
text = get_message('location-button')
return {
"recipient": {
"id": sender
},
"message": {
"text": text,
"quick_replies": [
{
"content_type": "location",
}
]
}
}
def send_attachment(sender, type, payload):
return {
"recipient": {
"id": sender
},
"message": {
"attachment": {
"type": type,
"payload": payload,
}
}
}
def send_text(sender, text):
return {
"recipient": {
"id": sender
},
"message": {
"text": text
}
}
def send_message(payload):
requests.post('https://graph.facebook.com/v2.6/me/messages/?access_token=' + token, json=payload)
def send_weather_info(sender, **kwargs):
latitude = kwargs.pop('latitude', None)
longitude = kwargs.pop('longitude', None)
city_name = kwargs.pop('city_name', None)
if latitude and longitude:
query = 'lat={}&lon={}'.format(latitude, longitude)
elif city_name:
query = 'q={},br'.format(city_name.title())
url = 'http://api.openweathermap.org/data/2.5/weather?' \
'{}&appid={}&units={}&lang={}'.format(query,
api_key,
'metric',
'pt')
r = requests.get(url)
response = r.json()
print(response)
if 'cod' in response:
if response['cod'] != 200:
return 'error'
name = response['name']
weather = response['main']
wind = response['wind']
elements = [{
'title': name,
'subtitle': 'Temperatura: {} graus'.format(str(weather['temp']).replace('.',',')),
'image_url': 'https://cdn-images-1.medium.com/max/800/1*LkbHjhacSRDNDzupX7pgEQ.jpeg'
}]
for info in response['weather']:
description = info['description'].capitalize()
icon = info['icon']
weather_data = 'Umidade: {}%\n' \
'Pressão: {}\n' \
'Velocidade do vento: {}'.format(weather['humidity'],
weather['pressure'],
wind['speed'])
if 'visibility' in response:
weather_data = '{}\n Visibilidade: {}'.format(weather_data, response['visibility'])
elements.append({
'title': description,
'subtitle': weather_data,
'image_url': 'http://openweathermap.org/img/w/{}.png'.format(icon)
})
payload = send_attachment(sender,
'template',
{
"template_type": "list",
"top_element_style": "large",
"elements": elements,
"buttons": [
{
"title": "Fazer nova pesquisa",
"type": "postback",
"payload": "do_it_again"
}
]
})
send_message(payload)
return None
@app.route('/', methods=['GET', 'POST'])
def webhook():
if request.method == 'POST':
try:
data = json.loads(request.data.decode())
sender = data['entry'][0]['messaging'][0]['sender']['id']
print(data)
if 'message' in data['entry'][0]['messaging'][0]:
message = data['entry'][0]['messaging'][0]['message']
if 'postback' in data['entry'][0]['messaging'][0]:
# Action when user first enters the chat
payload = data['entry'][0]['messaging'][0]['postback']['payload']
if payload == 'begin_button':
message = send_text(sender, 'Olá, tudo bem? Vamos começar?')
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# Resend the location button
if payload == 'do_it_again':
payload = location_quick_reply(sender)
send_message(payload)
if 'attachments' in message:
if 'payload' in message['attachments'][0]:
if 'coordinates' in message['attachments'][0]['payload']:
location = message['attachments'][0]['payload']['coordinates']
latitude = location['lat']
longitude = location['long']
send_weather_info(sender, latitude=latitude, longitude=longitude)
if _return == 'error':
message = send_text(sender, get_message('error'))
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
else:
text = message['text']
for city in CITIES:
if text.lower() in city:
_return = send_weather_info(sender, city_name=text)
| message = send_text(sender, get_message('error'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# If text not in city list...
chat_message = search_keyword(text)
if chat_message:
# if found keyword, reply with chat stuff
message = send_text(sender, chat_message)
send_message(message)
else:
message = send_text(sender, get_message('not-a-city'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
except Exception as e:
print(traceback.format_exc())
elif request.method == 'GET':
if request.args.get('hub.verify_token') == os.environ.get('FB_VERIFY_TOKEN'):
return request.args.get('hub.challenge')
return "Wrong Verify Token"
return "Nothing"
if __name__ == '__main__':
app.run(debug=True) | if _return == 'error': | random_line_split |
index.py | import os
import traceback
import json
import requests
from flask import Flask, request
from cities_list import CITIES
from messages import get_message, search_keyword
token = os.environ.get('FB_ACCESS_TOKEN')
api_key = os.environ.get('WEATHER_API_KEY')
app = Flask(__name__)
def location_quick_reply(sender, text=None):
if not text:
text = get_message('location-button')
return {
"recipient": {
"id": sender
},
"message": {
"text": text,
"quick_replies": [
{
"content_type": "location",
}
]
}
}
def send_attachment(sender, type, payload):
return {
"recipient": {
"id": sender
},
"message": {
"attachment": {
"type": type,
"payload": payload,
}
}
}
def send_text(sender, text):
return {
"recipient": {
"id": sender
},
"message": {
"text": text
}
}
def send_message(payload):
requests.post('https://graph.facebook.com/v2.6/me/messages/?access_token=' + token, json=payload)
def send_weather_info(sender, **kwargs):
latitude = kwargs.pop('latitude', None)
longitude = kwargs.pop('longitude', None)
city_name = kwargs.pop('city_name', None)
if latitude and longitude:
query = 'lat={}&lon={}'.format(latitude, longitude)
elif city_name:
query = 'q={},br'.format(city_name.title())
url = 'http://api.openweathermap.org/data/2.5/weather?' \
'{}&appid={}&units={}&lang={}'.format(query,
api_key,
'metric',
'pt')
r = requests.get(url)
response = r.json()
print(response)
if 'cod' in response:
if response['cod'] != 200:
return 'error'
name = response['name']
weather = response['main']
wind = response['wind']
elements = [{
'title': name,
'subtitle': 'Temperatura: {} graus'.format(str(weather['temp']).replace('.',',')),
'image_url': 'https://cdn-images-1.medium.com/max/800/1*LkbHjhacSRDNDzupX7pgEQ.jpeg'
}]
for info in response['weather']:
description = info['description'].capitalize()
icon = info['icon']
weather_data = 'Umidade: {}%\n' \
'Pressão: {}\n' \
'Velocidade do vento: {}'.format(weather['humidity'],
weather['pressure'],
wind['speed'])
if 'visibility' in response:
weather_data = '{}\n Visibilidade: {}'.format(weather_data, response['visibility'])
elements.append({
'title': description,
'subtitle': weather_data,
'image_url': 'http://openweathermap.org/img/w/{}.png'.format(icon)
})
payload = send_attachment(sender,
'template',
{
"template_type": "list",
"top_element_style": "large",
"elements": elements,
"buttons": [
{
"title": "Fazer nova pesquisa",
"type": "postback",
"payload": "do_it_again"
}
]
})
send_message(payload)
return None
@app.route('/', methods=['GET', 'POST'])
def webhook():
if request.method == 'POST':
try:
data = json.loads(request.data.decode())
sender = data['entry'][0]['messaging'][0]['sender']['id']
print(data)
if 'message' in data['entry'][0]['messaging'][0]:
message = data['entry'][0]['messaging'][0]['message']
if 'postback' in data['entry'][0]['messaging'][0]:
# Action when user first enters the chat
payload = data['entry'][0]['messaging'][0]['postback']['payload']
if payload == 'begin_button':
message = send_text(sender, 'Olá, tudo bem? Vamos começar?')
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# Resend the location button
if payload == 'do_it_again':
payload = location_quick_reply(sender)
send_message(payload)
if 'attachments' in message:
if 'payload' in message['attachments'][0]:
if 'coordinates' in message['attachments'][0]['payload']:
location = message['attachments'][0]['payload']['coordinates']
latitude = location['lat']
longitude = location['long']
send_weather_info(sender, latitude=latitude, longitude=longitude)
if _return == 'error':
mes | else:
text = message['text']
for city in CITIES:
if text.lower() in city:
_return = send_weather_info(sender, city_name=text)
if _return == 'error':
message = send_text(sender, get_message('error'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
return 'Ok'
# If text not in city list...
chat_message = search_keyword(text)
if chat_message:
# if found keyword, reply with chat stuff
message = send_text(sender, chat_message)
send_message(message)
else:
message = send_text(sender, get_message('not-a-city'))
send_message(message)
# Send location button
payload = location_quick_reply(sender)
send_message(payload)
except Exception as e:
print(traceback.format_exc())
elif request.method == 'GET':
if request.args.get('hub.verify_token') == os.environ.get('FB_VERIFY_TOKEN'):
return request.args.get('hub.challenge')
return "Wrong Verify Token"
return "Nothing"
if __name__ == '__main__':
app.run(debug=True)
| sage = send_text(sender, get_message('error'))
send_message(message)
payload = location_quick_reply(sender)
send_message(payload)
| conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.