prompt large_stringlengths 70 991k | completion large_stringlengths 0 1.02k |
|---|---|
<|file_name|>elements.py<|end_file_name|><|fim▁begin|>import struct
from common import *
from objects import ObjectAppType
from bcddevice import BCDDevice
# element types:
# X X ???? XX
# class format subtype
# class:
# 1 = Library
# 2 = Application
# 3 = Device
# format:
# 0 = Unknown
# 1 = Device
# 2 = String
# 3 = Object
# 4 = Object List
# 5 = Integer
# 6 = Boolean
# 7 = IntegerList
ElementClass = enum(Library=0x1,
Application=0x2,
Device=0x3,
Hidden=0x4)
ElementFormat = enum(Unknown=0,
Device=1,
String=2,
Object=3,
ObjectList=4,
Integer=5,
Boolean=6,
IntegerList=7)
# based on both my personal findings and on this website:
# http://www.geoffchappell.com/notes/windows/boot/bcd/elements.htm?tx=5
_library = {
0x01: (1, 'device'),
0x02: (2, 'path'),
0x04: (2, 'description'),
0x05: (2, 'locale'),
0x06: (4, 'inherit'),
0x07: (5, 'truncatememory'),
0x08: (4, 'recoverysequence'),
0x09: (6, 'recoveryenabled'),
0x0A: (7, 'badmemorylist'),
0x0B: (6, 'badmemoryaccess'),
0x0C: (5, 'firstmegabytepolicy', enum('UseNone','UseAll','UsePrivate')),
0x0D: (5, 'relocatephysical'),
0x0E: (5, 'avoidlowmemory'),
0x0F: (6, 'traditionalksegmappings'),
0x10: (6, 'bootdebug'),
0x11: (5, 'debugtype', enum('Serial','1394','USB')),
0x12: (5, 'debugaddress'),
0x13: (5, 'debugport'),
0x14: (5, 'baudrate'),
0x15: (5, 'channel'),
0x16: (2, 'targetname'),
0x17: (6, 'noumex'),
0x18: (5, 'debugstart', enum('Active', 'AutoEnable', 'Disable')),
0x19: (2, 'busparams'),
0x20: (6, 'bootems'),
0x22: (5, 'emsport'),
0x23: (5, 'emsbaudrate'),
0x30: (2, 'loadoptions'),
0x31: (6, 'attemptnonbcdstart'),
0x40: (6, 'advancedoptions'),
0x41: (6, 'optionsedit'),
0x42: (5, 'keyringaddress'),
# no alias
0x43: (1, 'bootstatusdatalogdevice'),
# no alias
0x44: (2, 'bootstatusdatalogfile'),
# no alias
0x45: (6, 'bootstatusdatalogappend'),
0x46: (6, 'graphicsmodedisabled'),
0x47: (5, 'configaccesspolicy', enum('Default', 'DisallowMmConfig')),
0x48: (6, 'nointegritychecks'),
0x49: (6, 'testsigning'),
0x4A: (2, 'fontpath'),
# seems to be wrong in the table?
0x4B: (5, 'integrityservices'),
0x50: (6, 'extendedinput'),
0x51: (5, 'initialconsoleinput'),
# not in table
0x60: (6, 'isolatedcontext'),
# not in table
0x65: (5, 'displaymessage', enum('Default','Resume','HyperV', 'Recovery','StartupRepair', 'SystemImageRecovery','CommandPrompt', 'SystemRestore', 'PushButtonReset')),
# not in table
0x77: (7, 'allowedinmemorysettings'),
}
_bootmgr = {
0x01: (4, 'displayorder'),
0x02: (4, 'bootsequence'),
0x03: (3, 'default'),
0x04: (5, 'timeout'),
0x05: (6, 'resume'),
0x06: (3, 'resumeobject'),
0x10: (4, 'toolsdisplayorder'),
0x20: (6, 'displaybootmenu'),
0x21: (6, 'noerrordisplay'),
0x22: (1, 'bcddevice'),
0x23: (2, 'bcdfilepath'),
0x30: (7, 'customactions'),
}
_osloader = {
0x001: (1, 'osdevice'),
0x002: (2, 'systemroot'),
0x003: (3, 'resumeobject'),
0x004: (6, 'stampdisks'),
0x010: (6, 'detecthal'),
0x011: (2, 'kernel'),
0x012: (2, 'hal'),
0x013: (2, 'dbgtransport'),
0x020: (5, 'nx', enum('OptIn', 'OptOut', 'AlwaysOff', 'AlwaysOn')),
0x021: (5, 'pae', enum('Default', 'ForceEnable', 'ForceDisable')),
0x022: (6, 'winpe'),
0x024: (6, 'nocrashautoreboot'),
0x025: (6, 'lastknowngood'),
0x026: (6, 'oslnointegritychecks'),
0x027: (6, 'osltestsigning'),
0x030: (6, 'nolowmem'),
0x031: (5, 'removememory'),
0x032: (5, 'increaseuserva'),
0x033: (5, 'perfmem'),
0x040: (6, 'vga'),
0x041: (6, 'quietboot'),
0x042: (6, 'novesa'),
0x050: (5, 'clustermodeaddressing'),
0x051: (6, 'usephysicaldestination'),
0x052: (5, 'restrictapiccluster'),
0x053: (2, 'evstore'),
0x054: (6, 'uselegacyapicmode'),
0x060: (6, 'onecpu'),
0x061: (5, 'numproc'),
0x062: (6, 'maxproc'),
0x063: (5, 'configflags'),
0x064: (6, 'maxgroup'),
0x065: (6, 'groupaware'),
0x066: (5, 'groupsize'),
0x070: (6, 'usefirmwarepcisettings'),
0x071: (5, 'msi', enum('Default', 'ForceDisable')),
0x072: (5, 'pciexpress', enum('Default', 'ForceDisable')),
0x080: (5, 'safeboot', enum('Minimal', 'Network', 'DsRepair')),
0x081: (6, 'safebootalternateshell'),
0x090: (6, 'bootlog'),
0x091: (6, 'sos'),
0x0A0: (6, 'debug'),
0x0A1: (6, 'halbreakpoint'),
0x0A2: (6, 'useplatformclock'),
0x0B0: (6, 'ems'),
# no alias
0x0C0: (5, 'forcefailure', enum('Load', 'Hive', 'Acpi', 'General')),
0x0C1: (5, 'driverloadfailurepolicy', enum('Fatal', 'UseErrorControl')),
# not in table
0x0C2: (5, 'bootmenupolicy', enum('TODO0', 'Standard', 'TODO2', 'TODO3')),<|fim▁hole|> 0x0F3: (5, 'hypervisordebugtype', enum('Serial', '1394')),
0x0F4: (5, 'hypervisordebugport'),
0x0F5: (5, 'hypervisorbaudrate'),
0x0F6: (5, 'hypervisorchannel'),
# not a lot known
0x0F7: (5, 'bootuxpolicy'),
0x0F8: (6, 'hypervisordisableslat'),
0x100: (5, 'tpmbootentropy', enum('Default', 'ForceDisable', 'ForceEnable')),
0x120: (5, 'xsavepolicy'),
0x121: (5, 'xsaveaddfeature0'),
0x122: (5, 'xsaveaddfeature1'),
0x123: (5, 'xsaveaddfeature2'),
0x124: (5, 'xsaveaddfeature3'),
0x125: (5, 'xsaveaddfeature4'),
0x126: (5, 'xsaveaddfeature5'),
0x127: (5, 'xsaveaddfeature6'),
0x128: (5, 'xsaveaddfeature7'),
0x129: (5, 'xsaveremovefeature'),
0x12A: (5, 'xsaveprocessorsmask'),
0x12B: (5, 'xsavedisable'),
}
_resume = {
0x01: (1, 'filedevice'),
0x02: (2, 'filepath'),
0x03: (6, 'customsettings'),
0x04: (6, 'pae'),
0x05: (1, 'associatedosdevice'),
0x06: (6, 'debugoptionenabled'),
0x07: (5, 'bootux', enum('Disabled', 'Basic', 'Standard')),
# not in table
0x08: (5, 'bootmenupolicy', enum('TODO0', 'Standard', 'TODO2', 'TODO3')),
}
_memdiag = {
0x01: (5, 'passcount'),
0x02: (5, 'testmix', enum('Basic', 'Extended')),
0x03: (5, 'failurecount'),
0x04: (5, 'testtofail', enum('Stride', 'Mats', 'InverseCoupling', 'RandomPattern', 'Checkerboard')),
0x05: (6, 'cacheenable'),
}
_ntldr = {
0x01: (2, 'bpbstring'),
}
_startup = {
0x01: (6, 'pxesoftreboot'),
0x02: (2, 'applicationname'),
}
_device = {
0x01: (5, 'ramdiskimageoffset'),
0x02: (5, 'ramdiskftpclientport'),
0x03: (1, 'ramdisksdidevice'),
0x04: (2, 'ramdisksdipath'),
0x05: (5, 'ramdiskimagelength'),
0x06: (6, 'exportascd'),
0x07: (5, 'ramdisktftpblocksize'),
0x08: (5, 'ramdisktftpwindowsize'),
0x09: (6, 'ramdiskmcenabled'),
0x0A: (6, 'ramdiskmctftpfallback'),
}
# All of these are hidden during a bcdedit /enum all command
# I design good software, so I'll show it even if bcdedit doesn't.
_setup = {
0x01: (1, 'devicetype'),
0x02: (2, 'applicationrelativepath'),
0x03: (2, 'ramdiskdevicerelativepath'),
0x04: (6, 'omitosloaderelements'),
0x10: (6, 'recoveryos'),
}
alias_dict = {
# applies to all object types
ElementClass.Library: _library,
# these depend on the application
ElementClass.Application: {
#objectapptype
0: {},
ObjectAppType.FirmwareMgr: _bootmgr,
ObjectAppType.WinBootMgr: _bootmgr,
ObjectAppType.WinBootLdr: _osloader,
ObjectAppType.WinResume: _resume,
ObjectAppType.WinMemTest: _memdiag,
ObjectAppType.Ntldr: _ntldr,
ObjectAppType.Setupldr: _ntldr,
ObjectAppType.BootSect: {},
ObjectAppType.Startup: _startup,
},
# only works for devices
ElementClass.Device : _device,
# setup template elements
ElementClass.Hidden: _setup,
}
def element_info(type):
if isinstance(type, str):
type = int(type, 16)
return ((0xF0000000 & type) >> 28,
(0x0F000000 & type) >> 24,
0x00FFFFFF & type)
# transformation functions from the BCD raw format to Python.
# tuple of to/from functions
_bcdqword = (lambda v:struct.pack('Q', v),
lambda v:struct.unpack('Q', v)[0])
_bcdqwordlist = (lambda v:b''.join((struct.pack('Q', v) for v in l)),
lambda v:list((struct.unpack('Q', bytes(j))[0]
for j in zip(*[v[i::8]
for i in range(8)]))))
_bcdtodo = (lambda v:'TODO',
lambda v:'TODO')
_bcdraw = (identity, identity)
_bcdobj = _bcdraw
_bcdobjlist = _bcdraw
# different ways to express booleans
_boolnames = {'0' : False,
'1' : True,
'on' : True,
'off' : False,
'true' : True,
'false': False,
'yes' : True,
'no' : False}
_bcdbool = (lambda v: bytes([int(_boolnames.get(v.lower(), v)
if isinstance(v,str) else v)]),
lambda v: bool(v[0]),
lambda v: ('No', 'Yes')[int(v)])
_bcddevice = (None, lambda v:BCDDevice(v))
# Match transformation functions to ElementFormats.
element_transform = {
ElementFormat.Device: _bcddevice,#_bcdtodo,
ElementFormat.String: _bcdraw,
ElementFormat.Object: _bcdobj,
ElementFormat.ObjectList: _bcdobjlist,
ElementFormat.Integer: _bcdqword,
ElementFormat.Boolean: _bcdbool,
ElementFormat.IntegerList: _bcdqwordlist,
}
element_transform_str = {
ElementFormat.IntegerList: lambda v:[hex(i) for i in v],
}
# END OF LINE.<|fim▁end|> | 0x0E0: (5, 'bootstatuspolicy', enum('DisplayAllFailures', 'IgnoreAllFailures', 'IgnoreShutdownFailures', 'IgnoreBootFailures')),
0x0F0: (5, 'hypervisorlaunchtype', enum('Off', 'Auto')),
0x0F1: (2, 'hypervisorpath'),
0x0F2: (6, 'hypervisordebug'), |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![recursion_limit = "256"]
//! This module provides the render environment.
//!
//! OrbTk has choosen the [tinyskia] crate to handle all 2D rendering
//! tasks. Implemented as wrapper functions, it consumes the native
//! rendering functions provided from tinyskia.
//!
//! [tinyskia]: https://docs.rs/tiny-skia
use std::{any::Any, fmt};
/// Pre-selects commonly used OrbTk crates and put them into scope.
pub mod prelude;
/// Handles helper utilities and global methods.
pub use orbtk_utils::prelude as utils;
mod common;
pub use tinyskia::*;
pub mod tinyskia;
pub use self::render_target::*;
mod render_target;
/// Defines the current configuration of the render ctx.
#[derive(Debug, Clone)]
pub struct RenderConfig {
pub fill_style: utils::Brush,
pub stroke_style: utils::Brush,
pub line_width: f64,
pub font_config: FontConfig,
pub alpha: f32,
}
impl Default for RenderConfig {
fn default() -> Self {
RenderConfig {
fill_style: utils::Brush::default(),
stroke_style: utils::Brush::default(),
line_width: 1.,
font_config: FontConfig::default(),
alpha: 1.,
}
}
}
/// The TextMetrics struct represents the dimension of a text.
#[derive(Clone, Copy, Default, Debug)]
pub struct TextMetrics {
pub width: f64,
pub height: f64,
}
/// Internal font helper.
#[derive(Default, Clone, PartialEq, Debug)]
pub struct FontConfig {
pub family: String,
pub font_size: f64,
}
impl ToString for FontConfig {
fn to_string(&self) -> String {
format!("{}px {}", self.font_size, self.family)
}
}
// Handle render pipeline tasks.
pub trait RenderPipeline {
/// Draws the ctx of the pipeline.
fn draw(&self, image: &mut RenderTarget);
}
/// Used to implement a custom render pipeline.
pub trait PipelineTrait: RenderPipeline + Any + Send {
/// Equality for two Pipeline objects.
fn box_eq(&self, other: &dyn Any) -> bool;
/// Converts self to an any reference.
fn as_any(&self) -> &dyn Any;
/// Clones self as box.
fn clone_box(&self) -> Box<dyn PipelineTrait>;
/// Draws the ctx of the pipeline.
fn draw_pipeline(&self, image: &mut RenderTarget) {
self.draw(image);
}<|fim▁hole|> fn eq(&self, other: &Box<dyn PipelineTrait>) -> bool {
self.box_eq(other.as_any())
}
}
impl Clone for Box<dyn PipelineTrait> {
fn clone(&self) -> Self {
self.clone_box()
}
}
impl fmt::Debug for Box<dyn PipelineTrait> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Box<dyn PipelineTrait>")
}
}<|fim▁end|> | }
impl PartialEq for Box<dyn PipelineTrait> { |
<|file_name|>BlendModeScope.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2013-2014 by Kristina Simpson <sweet.kristas@gmail.com>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgement in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source
distribution.
*/
#include "BlendModeScope.hpp"
namespace KRE
{
namespace
{
BlendModeScope::color_stack_type& get_mode_stack()
{
static BlendModeScope::color_stack_type res;
return res;
}
const BlendMode& get_default_mode()
{<|fim▁hole|> return res;
}
}
BlendModeScope::BlendModeScope(const BlendMode& bm)
{
it_ = get_mode_stack().emplace(get_mode_stack().end(), bm);
}
BlendModeScope::BlendModeScope(const BlendModeConstants& src, const BlendModeConstants& dst)
{
it_ = get_mode_stack().emplace(get_mode_stack().end(), BlendMode(src, dst));
}
BlendModeScope::~BlendModeScope()
{
get_mode_stack().erase(it_);
}
const BlendMode& BlendModeScope::getCurrentMode()
{
if(get_mode_stack().empty()) {
return get_default_mode();
}
return get_mode_stack().back();
}
}<|fim▁end|> | static BlendMode res = BlendMode(); |
<|file_name|>metainfo_by_fieldpath.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""All sorts of properties for every field.
Generated by ./generate-onetime-js-widget-data.py
AMD-style module definition.
Note: No leading, internal path before the [], since we'd have to hardwire it
to data/whatever-fns.js which is inflexible.
"""
# TODO: Clean up the description fields that actually contain data
# extracted from the comments of dbroot_v2.proto.
# The META_INFO contains already vetted snippets.
META_INFO = r"""
{
"end_snippet.bbs_server_info.base_url:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.base_url:value",
"default_value": null,
"description": "URL of the server including protocol, domain and port. Can be translated if we use different servers for different languages.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.base_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "base_url:value",
"presence": "optional",
"short_label": "base_url",
"typ": "string"
},
"end_snippet.bbs_server_info.file_submit_path:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.file_submit_path:value",
"default_value": null,
"description": "Path on server where files can be submitted.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.file_submit_path:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "file_submit_path:value",
"presence": "optional",
"short_label": "file_submit_path",
"typ": "string"
},
"end_snippet.bbs_server_info.name:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.name:value",
"default_value": null,
"description": "Name that will be displayed in context menu to user. Must be translated.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.name:value",
"enum_vals": null,<|fim▁hole|> "required": true
},
"name": "name:value",
"presence": "optional",
"short_label": "name",
"typ": "string"
},
"end_snippet.bbs_server_info.post_wizard_path:value": {
"abstract_fieldpath": "end_snippet.bbs_server_info.post_wizard_path:value",
"default_value": null,
"description": "Path on server where wizard can be found.",
"empty_concrete_fieldpath": "end_snippet.bbs_server_info.post_wizard_path:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "post_wizard_path:value",
"presence": "optional",
"short_label": "post_wizard_path",
"typ": "string"
},
"end_snippet.client_options.disable_disk_cache": {
"abstract_fieldpath": "end_snippet.client_options.disable_disk_cache",
"default_value": null,
"description": "If true, no data will be cached on disk for this database. It will not be accessible offline.",
"empty_concrete_fieldpath": "end_snippet.client_options.disable_disk_cache",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "disable_disk_cache",
"presence": "optional",
"short_label": "disable_disk_cache",
"typ": "bool"
},
"end_snippet.cobrand_info.logo_url": {
"abstract_fieldpath": "end_snippet.cobrand_info.logo_url",
"default_value": null,
"description": "URL of image to use as logo. Can be remote or local. However, using local URLs depends on the installation of the client and should be used carefully.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].logo_url",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "logo_url",
"presence": "required",
"short_label": "logo_url",
"typ": "string"
},
"end_snippet.cobrand_info.screen_size": {
"abstract_fieldpath": "end_snippet.cobrand_info.screen_size",
"default_value": "0.0",
"description": "If specified and strictly positive but <= 1.0, makes logo scalable with screen by forcing its width to occupy a fixed fraction of the screeen. For instance, a value of .25 makes the given logo occupy 25% of the screen.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].screen_size",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "screen_size",
"presence": "optional",
"short_label": "screen_size",
"typ": "double"
},
"end_snippet.cobrand_info.tie_point": {
"abstract_fieldpath": "end_snippet.cobrand_info.tie_point",
"default_value": "BOTTOM_LEFT",
"description": "Controls reference point in overlay.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].tie_point",
"enum_vals": {
"BOTTOM_CENTER": 7,
"BOTTOM_LEFT": 6,
"BOTTOM_RIGHT": 8,
"MID_CENTER": 4,
"MID_LEFT": 3,
"MID_RIGHT": 5,
"TOP_CENTER": 1,
"TOP_LEFT": 0,
"TOP_RIGHT": 2
},
"js_validation_rule": {
"required": true
},
"name": "tie_point",
"presence": "optional",
"short_label": "tie_point",
"typ": "TiePoint"
},
"end_snippet.cobrand_info.x_coord.is_relative": {
"abstract_fieldpath": "end_snippet.cobrand_info.x_coord.is_relative",
"default_value": "false",
"description": "If true, the coordinate is relative to the screen.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].x_coord.is_relative",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "is_relative",
"presence": "optional",
"short_label": "is_relative",
"typ": "bool"
},
"end_snippet.cobrand_info.x_coord.value": {
"abstract_fieldpath": "end_snippet.cobrand_info.x_coord.value",
"default_value": "0.0",
"description": "Coordinate value. Interpretation depends on is_relative (absolute or",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].x_coord.value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "value",
"presence": "required",
"short_label": "value",
"typ": "double"
},
"end_snippet.cobrand_info.y_coord.is_relative": {
"abstract_fieldpath": "end_snippet.cobrand_info.y_coord.is_relative",
"default_value": "false",
"description": "If true, the coordinate is relative to the screen.",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].y_coord.is_relative",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "is_relative",
"presence": "optional",
"short_label": "is_relative",
"typ": "bool"
},
"end_snippet.cobrand_info.y_coord.value": {
"abstract_fieldpath": "end_snippet.cobrand_info.y_coord.value",
"default_value": "0.0",
"description": "Coordinate value. Interpretation depends on is_relative (absolute or",
"empty_concrete_fieldpath": "end_snippet.cobrand_info.[].y_coord.value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "value",
"presence": "required",
"short_label": "value",
"typ": "double"
},
"end_snippet.default_web_page_intl_url:value": {
"abstract_fieldpath": "end_snippet.default_web_page_intl_url:value",
"default_value": null,
"description": "Default location of web page.",
"empty_concrete_fieldpath": "end_snippet.default_web_page_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "default_web_page_intl_url:value",
"presence": "optional",
"short_label": "default_web_page_intl_url",
"typ": "string"
},
"end_snippet.earth_intl_url:value": {
"abstract_fieldpath": "end_snippet.earth_intl_url:value",
"default_value": null,
"description": "Location of international page for earth.",
"empty_concrete_fieldpath": "end_snippet.earth_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "earth_intl_url:value",
"presence": "optional",
"short_label": "earth_intl_url",
"typ": "string"
},
"end_snippet.elevation_service_base_url": {
"abstract_fieldpath": "end_snippet.elevation_service_base_url",
"default_value": "",
"description": "Terrain elevation service URL. If empty, service will be unavailable.",
"empty_concrete_fieldpath": "end_snippet.elevation_service_base_url",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "elevation_service_base_url",
"presence": "optional",
"short_label": "elevation_service_base_url",
"typ": "string"
},
"end_snippet.hide_user_data": {
"abstract_fieldpath": "end_snippet.hide_user_data",
"default_value": "false",
"description": "If true, hides user license key in about dialog. Useful for Pro only, allows information to not be visible for shared license keys.",
"empty_concrete_fieldpath": "end_snippet.hide_user_data",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "hide_user_data",
"presence": "optional",
"short_label": "hide_user_data",
"typ": "bool"
},
"end_snippet.keyboard_shortcuts_url:value": {
"abstract_fieldpath": "end_snippet.keyboard_shortcuts_url:value",
"default_value": null,
"description": "URL for keyboard shortcuts page. If not specified, this URL is built from user_guide_intl_url as user_guide_intl_url + \"ug_keyboard.html\".",
"empty_concrete_fieldpath": "end_snippet.keyboard_shortcuts_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "keyboard_shortcuts_url:value",
"presence": "optional",
"short_label": "keyboard_shortcuts_url",
"typ": "string"
},
"end_snippet.model.compressed_negative_altitude_threshold": {
"abstract_fieldpath": "end_snippet.model.compressed_negative_altitude_threshold",
"default_value": null,
"description": "Threshold below which negative altitudes are compressed",
"empty_concrete_fieldpath": "end_snippet.model.compressed_negative_altitude_threshold",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "compressed_negative_altitude_threshold",
"presence": "optional",
"short_label": "compressed_negative_altitude_threshold",
"typ": "double"
},
"end_snippet.model.elevation_bias": {
"abstract_fieldpath": "end_snippet.model.elevation_bias",
"default_value": null,
"description": "Elevation bias",
"empty_concrete_fieldpath": "end_snippet.model.elevation_bias",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "elevation_bias",
"presence": "optional",
"short_label": "elevation_bias",
"typ": "double"
},
"end_snippet.model.flattening": {
"abstract_fieldpath": "end_snippet.model.flattening",
"default_value": "0.00335281066474748",
"description": "Planet flattening. Default value is 1.0/298.257223563 (from WGS84).",
"empty_concrete_fieldpath": "end_snippet.model.flattening",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "flattening",
"presence": "optional",
"short_label": "flattening",
"typ": "double"
},
"end_snippet.model.negative_altitude_exponent_bias": {
"abstract_fieldpath": "end_snippet.model.negative_altitude_exponent_bias",
"default_value": null,
"description": "Bias for negative altitude so that ocean tiles can be streamed to older clients",
"empty_concrete_fieldpath": "end_snippet.model.negative_altitude_exponent_bias",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "negative_altitude_exponent_bias",
"presence": "optional",
"short_label": "negative_altitude_exponent_bias",
"typ": "int32"
},
"end_snippet.model.radius": {
"abstract_fieldpath": "end_snippet.model.radius",
"default_value": "6378.13700",
"description": "Mean planet radius. Default value is the WGS84 model for earth.",
"empty_concrete_fieldpath": "end_snippet.model.radius",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "radius",
"presence": "optional",
"short_label": "radius",
"typ": "double"
},
"end_snippet.privacy_policy_url:value": {
"abstract_fieldpath": "end_snippet.privacy_policy_url:value",
"default_value": null,
"description": "URL for the privacy policy.",
"empty_concrete_fieldpath": "end_snippet.privacy_policy_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "privacy_policy_url:value",
"presence": "optional",
"short_label": "privacy_policy_url",
"typ": "string"
},
"end_snippet.release_notes_url:value": {
"abstract_fieldpath": "end_snippet.release_notes_url:value",
"default_value": null,
"description": "URL for release notes page.",
"empty_concrete_fieldpath": "end_snippet.release_notes_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "release_notes_url:value",
"presence": "optional",
"short_label": "release_notes_url",
"typ": "string"
},
"end_snippet.reverse_geocoder_protocol_version": {
"abstract_fieldpath": "end_snippet.reverse_geocoder_protocol_version",
"default_value": "3",
"description": "Reverse geocoder protocol version. Default is 3 which is the protocol supported by newer clients.",
"empty_concrete_fieldpath": "end_snippet.reverse_geocoder_protocol_version",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "reverse_geocoder_protocol_version",
"presence": "optional",
"short_label": "reverse_geocoder_protocol_version",
"typ": "int32"
},
"end_snippet.reverse_geocoder_url:value": {
"abstract_fieldpath": "end_snippet.reverse_geocoder_url:value",
"default_value": null,
"description": "Reverse geocoder server URL",
"empty_concrete_fieldpath": "end_snippet.reverse_geocoder_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "reverse_geocoder_url:value",
"presence": "optional",
"short_label": "reverse_geocoder_url",
"typ": "string"
},
"end_snippet.show_signin_button": {
"abstract_fieldpath": "end_snippet.show_signin_button",
"default_value": null,
"description": "If true, shows the signin button in the upper right corner.",
"empty_concrete_fieldpath": "end_snippet.show_signin_button",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "show_signin_button",
"presence": "optional",
"short_label": "show_signin_button",
"typ": "bool"
},
"end_snippet.startup_tips_intl_url:value": {
"abstract_fieldpath": "end_snippet.startup_tips_intl_url:value",
"default_value": null,
"description": "URL from which to load startup tips in Earth 7.0 and higher.",
"empty_concrete_fieldpath": "end_snippet.startup_tips_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "startup_tips_intl_url:value",
"presence": "optional",
"short_label": "startup_tips_intl_url",
"typ": "string"
},
"end_snippet.support_answer_intl_url:value": {
"abstract_fieldpath": "end_snippet.support_answer_intl_url:value",
"default_value": null,
"description": "Url to support answer.",
"empty_concrete_fieldpath": "end_snippet.support_answer_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "support_answer_intl_url:value",
"presence": "optional",
"short_label": "support_answer_intl_url",
"typ": "string"
},
"end_snippet.support_center_intl_url:value": {
"abstract_fieldpath": "end_snippet.support_center_intl_url:value",
"default_value": null,
"description": "Url to support center.",
"empty_concrete_fieldpath": "end_snippet.support_center_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "support_center_intl_url:value",
"presence": "optional",
"short_label": "support_center_intl_url",
"typ": "string"
},
"end_snippet.support_request_intl_url:value": {
"abstract_fieldpath": "end_snippet.support_request_intl_url:value",
"default_value": null,
"description": "Url to support pages.",
"empty_concrete_fieldpath": "end_snippet.support_request_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "support_request_intl_url:value",
"presence": "optional",
"short_label": "support_request_intl_url",
"typ": "string"
},
"end_snippet.support_topic_intl_url:value": {
"abstract_fieldpath": "end_snippet.support_topic_intl_url:value",
"default_value": null,
"description": "Url to support topics used by certain diagnostic messages.",
"empty_concrete_fieldpath": "end_snippet.support_topic_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "support_topic_intl_url:value",
"presence": "optional",
"short_label": "support_topic_intl_url",
"typ": "string"
},
"end_snippet.swoop_parameters.start_dist_in_meters": {
"abstract_fieldpath": "end_snippet.swoop_parameters.start_dist_in_meters",
"default_value": null,
"description": "Controls how far from a target swooping should start.",
"empty_concrete_fieldpath": "end_snippet.swoop_parameters.start_dist_in_meters",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "start_dist_in_meters",
"presence": "optional",
"short_label": "start_dist_in_meters",
"typ": "double"
},
"end_snippet.tutorial_url:value": {
"abstract_fieldpath": "end_snippet.tutorial_url:value",
"default_value": null,
"description": "URL for tutorial page. If not specified, this URL is built from user_guide_intl_url as user_guide_intl_url + \"tutorials/index.html\".",
"empty_concrete_fieldpath": "end_snippet.tutorial_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "tutorial_url:value",
"presence": "optional",
"short_label": "tutorial_url",
"typ": "string"
},
"end_snippet.use_ge_logo": {
"abstract_fieldpath": "end_snippet.use_ge_logo",
"default_value": "true",
"description": "If false, hides the Google logo.",
"empty_concrete_fieldpath": "end_snippet.use_ge_logo",
"enum_vals": null,
"js_validation_rule": {
"required": false
},
"name": "use_ge_logo",
"presence": "optional",
"short_label": "use_ge_logo",
"typ": "bool"
},
"end_snippet.user_guide_intl_url:value": {
"abstract_fieldpath": "end_snippet.user_guide_intl_url:value",
"default_value": null,
"description": "Url to user guide.",
"empty_concrete_fieldpath": "end_snippet.user_guide_intl_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "user_guide_intl_url:value",
"presence": "optional",
"short_label": "user_guide_intl_url",
"typ": "string"
},
"end_snippet.valid_database.database_name:value": {
"abstract_fieldpath": "end_snippet.valid_database.database_name:value",
"default_value": null,
"description": "Human-readable name of database (such as \"Primary Database\" or \"Digital Globe Database\")",
"empty_concrete_fieldpath": "end_snippet.valid_database.[].database_name:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "database_name:value",
"presence": "optional",
"short_label": "database_name",
"typ": "string"
},
"end_snippet.valid_database.database_url": {
"abstract_fieldpath": "end_snippet.valid_database.database_url",
"default_value": null,
"description": "URL of server. This can include a path and query, and must be a well-formed, absolute URL.",
"empty_concrete_fieldpath": "end_snippet.valid_database.[].database_url",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "database_url",
"presence": "required",
"short_label": "database_url",
"typ": "string"
},
"end_snippet.search_config.error_page_url:value": {
"abstract_fieldpath": "end_snippet.search_config.error_page_url:value",
"default_value": "about:blank",
"description": "URL of a page that will be displayed if a network error or other local error occurs while performing a search. This might be an error for a local geocode while in offline mode, a connection error while trying to connect to MFE, or some other error where we can't get an error message from the server. (Obviously this page should be cached locally, or it's not terribly useful.) The URL should be fully encoded, and can use $[hl] and friends if necessary.",
"empty_concrete_fieldpath": "end_snippet.search_config.error_page_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "error_page_url:value",
"presence": "optional",
"short_label": "error_page_url",
"typ": "string"
},
"end_snippet.search_config.kml_render_url:value": {
"abstract_fieldpath": "end_snippet.search_config.kml_render_url:value",
"default_value": "/earth/client/kmlrender/index_$[hl].html",
"description": "URL of a page that will be shown when KML is rendered in the search panel. This page should have JavaScript that reads the KML from the environment and renders it as HTML, but should NOT perform onebox or searchlet searches. The URL should be fully encoded, and can use $[hl] and friends if necessary.",
"empty_concrete_fieldpath": "end_snippet.search_config.kml_render_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "kml_render_url:value",
"presence": "optional",
"short_label": "kml_render_url",
"typ": "string"
},
"end_snippet.search_config.kml_search_url:value": {
"abstract_fieldpath": "end_snippet.search_config.kml_search_url:value",
"default_value": "/earth/client/kmlrender/index_$[hl].html",
"description": "URL of a page that will be shown when a KML search is performed. This page should have JavaScript that reads the KML from the environment and renders it as HTML, and also performs onebox and searchlet searches if applicable. The URL should be fully encoded, and can use $[hl] and friends if necessary.",
"empty_concrete_fieldpath": "end_snippet.search_config.kml_search_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "kml_search_url:value",
"presence": "optional",
"short_label": "kml_search_url",
"typ": "string"
},
"end_snippet.search_config.search_history_url:value": {
"abstract_fieldpath": "end_snippet.search_config.search_history_url:value",
"default_value": "http://www.google.com/earth/client/search/history_$[hl].html",
"description": "URL of a page that will be shown when the search history is requested. This page should have JavaScript that reads the search history from the client and renders it as HTML.",
"empty_concrete_fieldpath": "end_snippet.search_config.search_history_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "search_history_url:value",
"presence": "optional",
"short_label": "search_history_url",
"typ": "string"
},
"end_snippet.google_maps_url:value": {
"abstract_fieldpath": "end_snippet.google_maps_url:value",
"default_value": "",
"description": "URL for Google Maps, for features like 'View in Maps'.",
"empty_concrete_fieldpath": "end_snippet.google_maps_url:value",
"enum_vals": null,
"js_validation_rule": {
"required": true
},
"name": "google_maps_url:value",
"presence": "optional",
"short_label": "google_maps_url",
"typ": "string"
}
}
"""<|fim▁end|> | "js_validation_rule": { |
<|file_name|>exercice1.py<|end_file_name|><|fim▁begin|>#Initialisation
from time import sleep
from NaoCommunication import *
nao=NaoControle(Nao())
# 1 - Decrire le resultat de ce morceau de code
# ...
for a in range(16):
if a%2==0:
nao.reglerCouleur(a,a*15,50,50)
else :
nao.reglerCouleur(a,255,0,0)
sleep(0.1)
for a in range(15,-1,-1):
nao.eteindreLed(a)
sleep(0.1)<|fim▁hole|># 2 - Decrire le resultat de ce deuxieme morceau de code
# ...
for a in range(15,-1,-1):
nao.allumerLed(a)
sleep(0.1)
for a in range(0,16,1):
nao.eteindreLed(a)
sleep(0.1)
# 3 - A partir des exemples precedents, ecrire un code qui
# allume alternativement les deux leds 1 seconde chacune
# pendant 10 secondes.<|fim▁end|> | |
<|file_name|>pyunit_automl_regression.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import print_function
import sys, os
sys.path.insert(1, os.path.join("..","..","..",".."))
import h2o
from h2o.automl import H2OAutoML
from tests import pyunit_utils as pu
from _automl_utils import import_dataset
def test_default_automl_with_regression_task():
ds = import_dataset('regression')
aml = H2OAutoML(max_models=2,
project_name='aml_regression')
aml.train(y=ds.target, training_frame=ds.train, validation_frame=ds.valid, leaderboard_frame=ds.test)
print(aml.leader)
print(aml.leaderboard)
assert aml.leaderboard.columns == ["model_id", "mean_residual_deviance", "rmse", "mse", "mae", "rmsle"]
def test_workaround_for_distribution():
try:
h2o.rapids("(setproperty \"{}\" \"{}\")".format("sys.ai.h2o.automl.algo_parameters.all.enabled", "true"))
ds = import_dataset('regression')
aml = H2OAutoML(project_name="py_test",
algo_parameters=dict(
distribution='poisson',
family='poisson',
),
exclude_algos=['StackedEnsemble'],<|fim▁hole|> seed=1)
aml.train(y=ds.target, training_frame=ds.train)
model_names = [aml.leaderboard[i, 0] for i in range(0, (aml.leaderboard.nrows))]
for mn in model_names:
m = h2o.get_model(mn)
dist = m.params['distribution'] if 'distribution' in m.params else m.params['family'] if 'family' in m.params else None
print("{}: distribution = {}".format(mn, dist))
except:
h2o.rapids("(setproperty \"{}\" \"{}\")".format("sys.ai.h2o.automl.algo_parameters.all.enabled", "false"))
pu.run_tests([
test_default_automl_with_regression_task,
test_workaround_for_distribution,
])<|fim▁end|> | max_runtime_secs=60, |
<|file_name|>removec.js<|end_file_name|><|fim▁begin|>// Sanity test for removing documents with adjacent index keys. SERVER-2008
t = db.jstests_removec;
t.drop();
t.ensureIndex({a: 1});
<|fim▁hole|> ret.push(i + j);
}
return ret;
}
// Insert some documents with adjacent index keys.
for (i = 0; i < 1100; i += 11) {
t.save({a: runStartingWith(i)});
}
// Remove and then reinsert random documents in the background.
s = startParallelShell('t = db.jstests_removec;' +
'Random.setRandomSeed();' +
'for( j = 0; j < 1000; ++j ) {' +
' o = t.findOne( { a:Random.randInt( 1100 ) } );' +
' t.remove( { _id:o._id } );' +
' t.insert( o );' +
'}');
// Find operations are error free. Note that the cursor throws if it detects the $err
// field in the returned document.
for (i = 0; i < 200; ++i) {
t.find({a: {$gte: 0}}).hint({a: 1}).itcount();
}
s();
t.drop();<|fim▁end|> | /** @return an array containing a sequence of numbers from i to i + 10. */
function runStartingWith(i) {
ret = [];
for (j = 0; j < 11; ++j) { |
<|file_name|>sessions.module.ts<|end_file_name|><|fim▁begin|>// (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { TranslateModule } from '@ngx-translate/core';
import { CoreComponentsModule } from '@components/components.module';
import { CoreDirectivesModule } from '@directives/directives.module';
import { CorePipesModule } from '@pipes/pipes.module';
import { AddonModChatComponentsModule } from '../../components/components.module';
import { AddonModChatSessionsPage } from './sessions';
@NgModule({
declarations: [
AddonModChatSessionsPage,
],
imports: [
CoreComponentsModule,
CoreDirectivesModule,
CorePipesModule,<|fim▁hole|> AddonModChatComponentsModule,
IonicPageModule.forChild(AddonModChatSessionsPage),
TranslateModule.forChild()
],
})
export class AddonModChatSessionsPageModule {}<|fim▁end|> | |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.module_name)
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Attachment'
db.create_table('attachments_attachment', (
('reusableplugin_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['wiki.ReusablePlugin'], unique=True, primary_key=True)),
('current_revision', self.gf('django.db.models.fields.related.OneToOneField')(blank=True, related_name='current_set', unique=True, null=True, to=orm['attachments.AttachmentRevision'])),
('original_filename', self.gf('django.db.models.fields.CharField')(max_length=256, null=True, blank=True)),
))
db.send_create_signal('attachments', ['Attachment'])
# Adding model 'AttachmentRevision'
db.create_table('attachments_attachmentrevision', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('revision_number', self.gf('django.db.models.fields.IntegerField')()),
('user_message', self.gf('django.db.models.fields.TextField')(blank=True)),
('automatic_log', self.gf('django.db.models.fields.TextField')(blank=True)),
('ip_address', self.gf('django.db.models.fields.IPAddressField')(max_length=15, null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[user_orm_label], null=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('previous_revision', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['attachments.AttachmentRevision'], null=True, blank=True)),
('deleted', self.gf('django.db.models.fields.BooleanField')(default=False)),
('locked', self.gf('django.db.models.fields.BooleanField')(default=False)),
('attachment', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['attachments.Attachment'])),
('file', self.gf('django.db.models.fields.files.FileField')(max_length=100)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('attachments', ['AttachmentRevision'])
def backwards(self, orm):
# Deleting model 'Attachment'
db.delete_table('attachments_attachment')
# Deleting model 'AttachmentRevision'
db.delete_table('attachments_attachmentrevision')
models = {
'attachments.attachment': {
'Meta': {'object_name': 'Attachment', '_ormbases': ['wiki.ReusablePlugin']},
'current_revision': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'current_set'", 'unique': 'True', 'null': 'True', 'to': "orm['attachments.AttachmentRevision']"}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'reusableplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['wiki.ReusablePlugin']", 'unique': 'True', 'primary_key': 'True'})
},
'attachments.attachmentrevision': {
'Meta': {'ordering': "('created',)", 'object_name': 'AttachmentRevision'},
'attachment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['attachments.Attachment']"}),
'automatic_log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'previous_revision': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['attachments.AttachmentRevision']", 'null': 'True', 'blank': 'True'}),
'revision_number': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'}),
'user_message': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'wiki.article': {
'Meta': {'object_name': 'Article'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_revision': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'current_set'", 'unique': 'True', 'null': 'True', 'to': "orm['wiki.ArticleRevision']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),<|fim▁hole|> 'group_write': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'other_read': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'other_write': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'wiki.articleplugin': {
'Meta': {'object_name': 'ArticlePlugin'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Article']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'wiki.articlerevision': {
'Meta': {'ordering': "('created',)", 'unique_together': "(('article', 'revision_number'),)", 'object_name': 'ArticleRevision'},
'article': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.Article']"}),
'automatic_log': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'previous_revision': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['wiki.ArticleRevision']", 'null': 'True', 'blank': 'True'}),
'redirect': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'redirect_set'", 'null': 'True', 'to': "orm['wiki.Article']"}),
'revision_number': ('django.db.models.fields.IntegerField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '512'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'}),
'user_message': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'wiki.reusableplugin': {
'Meta': {'object_name': 'ReusablePlugin', '_ormbases': ['wiki.ArticlePlugin']},
'articleplugin_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['wiki.ArticlePlugin']", 'unique': 'True', 'primary_key': 'True'}),
'articles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shared_plugins_set'", 'symmetrical': 'False', 'to': "orm['wiki.Article']"})
}
}
complete_apps = ['attachments']<|fim▁end|> | 'group_read': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), |
<|file_name|>private_unused.rs<|end_file_name|><|fim▁begin|>// compile-pass
#[deny(warnings)]<|fim▁hole|>trait Bar<T> {}
impl Bar<Empty> for () {}
fn boo() -> impl Bar<Empty> {}
fn main() {
boo();
}<|fim▁end|> |
enum Empty { } |
<|file_name|>0019_sitemode.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
import caching.base
class Migration(migrations.Migration):
dependencies = [
('opendebates', '0018_flag_note'),
]
operations = [
migrations.CreateModel(
name='SiteMode',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),<|fim▁hole|> ('allow_sorting_by_votes', models.BooleanField(default=True)),
('allow_voting_and_submitting_questions', models.BooleanField(default=True)),
('debate_time', models.DateTimeField(default=datetime.datetime(2099, 1, 1, 0, 0), help_text=b'Enter time that debate starts in timezone America/New_York')),
],
bases=(caching.base.CachingMixin, models.Model),
),
]<|fim▁end|> | ('show_question_votes', models.BooleanField(default=True)),
('show_total_votes', models.BooleanField(default=True)), |
<|file_name|>md5.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::iter::range_step;
use cryptoutil::{write_u32_le, read_u32v_le, FixedBuffer, FixedBuffer64, StandardPadding};
use digest::Digest;
// A structure that represents that state of a digest computation for the MD5 digest function
struct Md5State {
s0: u32,
s1: u32,
s2: u32,
s3: u32
}
impl Md5State {
fn new() -> Md5State {
return Md5State {
s0: 0x67452301,
s1: 0xefcdab89,
s2: 0x98badcfe,<|fim▁hole|> s3: 0x10325476
};
}
fn reset(&mut self) {
self.s0 = 0x67452301;
self.s1 = 0xefcdab89;
self.s2 = 0x98badcfe;
self.s3 = 0x10325476;
}
fn process_block(&mut self, input: &[u8]) {
fn f(u: u32, v: u32, w: u32) -> u32 {
return (u & v) | (!u & w);
}
fn g(u: u32, v: u32, w: u32) -> u32 {
return (u & w) | (v & !w);
}
fn h(u: u32, v: u32, w: u32) -> u32 {
return u ^ v ^ w;
}
fn i(u: u32, v: u32, w: u32) -> u32 {
return v ^ (u | !w);
}
fn op_f(w: u32, x: u32, y: u32, z: u32, m: u32, s: u32) -> u32 {
return (w + f(x, y, z) + m).rotate_left(s as uint) + x;
}
fn op_g(w: u32, x: u32, y: u32, z: u32, m: u32, s: u32) -> u32 {
return (w + g(x, y, z) + m).rotate_left(s as uint) + x;
}
fn op_h(w: u32, x: u32, y: u32, z: u32, m: u32, s: u32) -> u32 {
return (w + h(x, y, z) + m).rotate_left(s as uint) + x;
}
fn op_i(w: u32, x: u32, y: u32, z: u32, m: u32, s: u32) -> u32 {
return (w + i(x, y, z) + m).rotate_left(s as uint) + x;
}
let mut a = self.s0;
let mut b = self.s1;
let mut c = self.s2;
let mut d = self.s3;
let mut data = [0u32, ..16];
read_u32v_le(data, input);
// round 1
for i in range_step(0u, 16, 4) {
a = op_f(a, b, c, d, data[i] + C1[i], 7);
d = op_f(d, a, b, c, data[i + 1] + C1[i + 1], 12);
c = op_f(c, d, a, b, data[i + 2] + C1[i + 2], 17);
b = op_f(b, c, d, a, data[i + 3] + C1[i + 3], 22);
}
// round 2
let mut t = 1;
for i in range_step(0u, 16, 4) {
a = op_g(a, b, c, d, data[t & 0x0f] + C2[i], 5);
d = op_g(d, a, b, c, data[(t + 5) & 0x0f] + C2[i + 1], 9);
c = op_g(c, d, a, b, data[(t + 10) & 0x0f] + C2[i + 2], 14);
b = op_g(b, c, d, a, data[(t + 15) & 0x0f] + C2[i + 3], 20);
t += 20;
}
// round 3
t = 5;
for i in range_step(0u, 16, 4) {
a = op_h(a, b, c, d, data[t & 0x0f] + C3[i], 4);
d = op_h(d, a, b, c, data[(t + 3) & 0x0f] + C3[i + 1], 11);
c = op_h(c, d, a, b, data[(t + 6) & 0x0f] + C3[i + 2], 16);
b = op_h(b, c, d, a, data[(t + 9) & 0x0f] + C3[i + 3], 23);
t += 12;
}
// round 4
t = 0;
for i in range_step(0u, 16, 4) {
a = op_i(a, b, c, d, data[t & 0x0f] + C4[i], 6);
d = op_i(d, a, b, c, data[(t + 7) & 0x0f] + C4[i + 1], 10);
c = op_i(c, d, a, b, data[(t + 14) & 0x0f] + C4[i + 2], 15);
b = op_i(b, c, d, a, data[(t + 21) & 0x0f] + C4[i + 3], 21);
t += 28;
}
self.s0 += a;
self.s1 += b;
self.s2 += c;
self.s3 += d;
}
}
// Round 1 constants
static C1: [u32, ..16] = [
0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee, 0xf57c0faf, 0x4787c62a, 0xa8304613, 0xfd469501,
0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be, 0x6b901122, 0xfd987193, 0xa679438e, 0x49b40821
];
// Round 2 constants
static C2: [u32, ..16] = [
0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa, 0xd62f105d, 0x02441453, 0xd8a1e681, 0xe7d3fbc8,
0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed, 0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a
];
// Round 3 constants
static C3: [u32, ..16] = [
0xfffa3942, 0x8771f681, 0x6d9d6122, 0xfde5380c, 0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70,
0x289b7ec6, 0xeaa127fa, 0xd4ef3085, 0x04881d05, 0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665
];
// Round 4 constants
static C4: [u32, ..16] = [
0xf4292244, 0x432aff97, 0xab9423a7, 0xfc93a039, 0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1,
0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1, 0xf7537e82, 0xbd3af235, 0x2ad7d2bb, 0xeb86d391
];
/// The MD5 Digest algorithm
pub struct Md5 {
length_bytes: u64,
buffer: FixedBuffer64,
state: Md5State,
finished: bool,
}
impl Md5 {
/// Construct a new instance of the MD5 Digest.
pub fn new() -> Md5 {
return Md5 {
length_bytes: 0,
buffer: FixedBuffer64::new(),
state: Md5State::new(),
finished: false
}
}
}
impl Digest for Md5 {
fn input(&mut self, input: &[u8]) {
assert!(!self.finished);
// Unlike Sha1 and Sha2, the length value in MD5 is defined as the length of the message mod
// 2^64 - ie: integer overflow is OK.
self.length_bytes += input.len() as u64;
let self_state = &mut self.state;
self.buffer.input(input, |d: &[u8]| { self_state.process_block(d); });
}
fn reset(&mut self) {
self.length_bytes = 0;
self.buffer.reset();
self.state.reset();
self.finished = false;
}
fn result(&mut self, out: &mut [u8]) {
if !self.finished {
let self_state = &mut self.state;
self.buffer.standard_padding(8, |d: &[u8]| { self_state.process_block(d); });
write_u32_le(self.buffer.next(4), (self.length_bytes << 3) as u32);
write_u32_le(self.buffer.next(4), (self.length_bytes >> 29) as u32);
self_state.process_block(self.buffer.full_buffer());
self.finished = true;
}
write_u32_le(out.mut_slice(0, 4), self.state.s0);
write_u32_le(out.mut_slice(4, 8), self.state.s1);
write_u32_le(out.mut_slice(8, 12), self.state.s2);
write_u32_le(out.mut_slice(12, 16), self.state.s3);
}
fn output_bits(&self) -> uint { 128 }
fn block_size(&self) -> uint { 64 }
}
#[cfg(test)]
mod tests {
use cryptoutil::test::test_digest_1million_random;
use digest::Digest;
use md5::Md5;
struct Test {
input: &'static str,
output_str: &'static str,
}
fn test_hash<D: Digest>(sh: &mut D, tests: &[Test]) {
// Test that it works when accepting the message all at once
for t in tests.iter() {
sh.input_str(t.input);
let out_str = sh.result_str();
assert!(out_str.as_slice() == t.output_str);
sh.reset();
}
// Test that it works when accepting the message in pieces
for t in tests.iter() {
let len = t.input.len();
let mut left = len;
while left > 0u {
let take = (left + 1u) / 2u;
sh.input_str(t.input.slice(len - left, take + len - left));
left = left - take;
}
let out_str = sh.result_str();
assert!(out_str.as_slice() == t.output_str);
sh.reset();
}
}
#[test]
fn test_md5() {
// Examples from wikipedia
let wikipedia_tests = vec![
Test {
input: "",
output_str: "d41d8cd98f00b204e9800998ecf8427e"
},
Test {
input: "The quick brown fox jumps over the lazy dog",
output_str: "9e107d9d372bb6826bd81d3542a419d6"
},
Test {
input: "The quick brown fox jumps over the lazy dog.",
output_str: "e4d909c290d0fb1ca068ffaddf22cbd0"
},
];
let tests = wikipedia_tests;
let mut sh = Md5::new();
test_hash(&mut sh, tests.as_slice());
}
#[test]
fn test_1million_random_md5() {
let mut sh = Md5::new();
test_digest_1million_random(
&mut sh,
64,
"7707d6ae4e027c70eea2a935c2296f21");
}
}
#[cfg(test)]
mod bench {
use test::Bencher;
use digest::Digest;
use md5::Md5;
#[bench]
pub fn md5_10(bh: & mut Bencher) {
let mut sh = Md5::new();
let bytes = [1u8, ..10];
bh.iter( || {
sh.input(bytes);
});
bh.bytes = bytes.len() as u64;
}
#[bench]
pub fn md5_1k(bh: & mut Bencher) {
let mut sh = Md5::new();
let bytes = [1u8, ..1024];
bh.iter( || {
sh.input(bytes);
});
bh.bytes = bytes.len() as u64;
}
#[bench]
pub fn md5_64k(bh: & mut Bencher) {
let mut sh = Md5::new();
let bytes = [1u8, ..65536];
bh.iter( || {
sh.input(bytes);
});
bh.bytes = bytes.len() as u64;
}
}<|fim▁end|> | |
<|file_name|>_version.py<|end_file_name|><|fim▁begin|># This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = ""
cfg.versionfile_source = "jxl2txt/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip().decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# Use only the last line. Previous lines may contain GPG signature
# information.
date = date.splitlines()[-1]
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
# Use only the last line. Previous lines may contain GPG signature
# information.
date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post0.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post0.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post0.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:<|fim▁hole|> rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}<|fim▁end|> | rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]: |
<|file_name|>parametric_system.py<|end_file_name|><|fim▁begin|>import floq.core.fixed_system as fs
import floq.evolution as ev
import floq.errors as er
import floq.helpers.index as h
class ParametricSystemBase(object):
"""
Base class to specify a physical system that still has open parameters,
such as the control amplitudes, the control duration, or other arbitrary
parameters in the Hamiltonian.
This needs to be sub-classed, and a subclass should provide:
- get_system(controls)
"""
def get_system(self, controls, t):
raise NotImplementedError("get_system not implemented.")
def is_nz_ok(self, controls, t):
system = self.get_system(controls, t)
try:
u = ev.evolve_system(system)
except er.EigenvalueNumberError:
return False
return h.is_unitary(u)
def set_nz(self, controls, t):
if self.is_nz_ok(controls, t):
self.decrease_nz_until_not_ok(controls, t, step=max(10, self.nz/5))<|fim▁hole|> self.decrease_nz_until_not_ok(controls, t, step=max(10, self.nz/10))
self.decrease_nz_until_not_ok(controls, t, step=2)
self.increase_nz_until_ok(controls, t, step=2)
else:
self.increase_nz_until_ok(controls, t, step=max(10, self.nz/5))
self.decrease_nz_until_not_ok(controls, t, step=2)
self.increase_nz_until_ok(controls, t, step=2)
def increase_nz_until_ok(self, controls, t, step=2):
while self.is_nz_ok(controls, t) is False:
self.nz += h.make_even(step)
def decrease_nz_until_not_ok(self, controls, t, step=2):
while self.is_nz_ok(controls, t) and self.nz-step > 3:
self.nz -= h.make_even(step)
class ParametricSystemWithFunctions(ParametricSystemBase):
"""
A system with parametric hf and dhf, which are passed as callables to the constructor.
hf has to have the form hf(a,parameters)
"""
def __init__(self, hf, dhf, nz, omega, parameters):
"""
hf: callable hf(controls,parameters,omega)
dhf: callable dhf(controls,parameters,omega)
omega: 2 pi/T, the period of the Hamiltonian
nz: number of Fourier modes to be considered during evolution
parameters: a data structure that holds parameters for hf and dhf
(dictionary is probably the best idea)
"""
self.hf = hf
self.dhf = dhf
self.omega = omega
self.nz = nz
self.parameters = parameters
def calculate_hf(self, controls):
return self.hf(controls, self.parameters, self.omega)
def calculate_dhf(self, controls):
return self.dhf(controls, self.parameters, self.omega)
def get_system(self, controls, t):
hf = self.calculate_hf(controls)
dhf = self.calculate_dhf(controls)
return fs.FixedSystem(hf, dhf, self.nz, self.omega, t)<|fim▁end|> | |
<|file_name|>promise-fs.spec.ts<|end_file_name|><|fim▁begin|>import * as modPath from "path";
import * as modFS from "fs";
import * as modPFS from "../promise-fs";
import * as modLog from "../log";
import * as modUtil from "./helper";
const getFixturePath = modUtil.getFixturePath;
const normalizePath = modUtil.normalizePath;
function resolvePath(path: string): string {
return normalizePath(modPath.resolve(path));
}
describe("promise-fs", () => {
describe("promisify", () => {
function removeSpacesFailOnTabs(text: string, cb: modPFS.INodeCallback<string>): void {
function doAction() {
if (/\t/.test(text)) {
cb(new Error("i hate tabs"), null);
} else {
cb(null, text.replace(/\s+/g, ""));
}
}
setTimeout(doAction);
}
it("converts callback fn to a promise factory", () => {
const remove = modPFS.promisify<string, string>(removeSpacesFailOnTabs);
expect(remove.constructor).toBe(Function);
expect(remove("test").then).toBe(Promise.prototype.then);
});
it("processes resolve callback correctly", (done) => {
const fn = {
removeSpacesFailOnTabs: removeSpacesFailOnTabs
};
spyOn(fn, "removeSpacesFailOnTabs").and.callThrough();
const remove = modPFS.promisify<string, string>(fn.removeSpacesFailOnTabs);
remove("s o m e t h i n g")
.then(text => {
expect(text).toBe("something");
expect(fn.removeSpacesFailOnTabs).toHaveBeenCalled();
done();
})
.catch(err => {
fail("catch callback should not fire");
done();
});
});
it("processes reject callback correctly", (done) => {
const fn = {
removeSpacesFailOnTabs: removeSpacesFailOnTabs
};
spyOn(fn, "removeSpacesFailOnTabs").and.callThrough();
const remove = modPFS.promisify<string, string>(fn.removeSpacesFailOnTabs);
remove("s o m e \t h i n g")
.then(text => {
fail("then callback should not fire");
done();
})
.catch(err => {
expect(err.constructor).toBe(Error);
expect(err.message).toBe("i hate tabs");
expect(fn.removeSpacesFailOnTabs).toHaveBeenCalled();
done();
});
});
});
describe("readDir", () => {
it("returns items from the directory", (done) => {
const path = getFixturePath("dir");
const expected = [
getFixturePath("dir/file1"),
getFixturePath("dir/file2"),
getFixturePath("dir/subdir")
];
modPFS.readDir(path)
.then(items => {
expect(items.map(normalizePath))
.toEqual(expected);
done();
})
.catch(err => {
fail(err);
done();
});
});
it("returns items from the directory / relative", (done) => {
const path = getFixturePath("dir");
modPFS.readDir(path, true)
.then(items => {
expect(items)
.toEqual(["file1", "file2", "subdir"]);
done();
})
.catch(err => {
fail(err);
done();
});
});
});
describe("stat", () => {
it("returns Stats of given item", (done) => {
const path = getFixturePath("dir");
modPFS.stat(path)
.then(stat => {
expect(stat.isDirectory())
.toBe(true);
done();
})
.catch(err => {
fail(err);
done();
});
});
});
describe("statAll", () => {
it("returns Stats of all given items", (done) => {
const path = getFixturePath("dir");
const paths = ["subdir", "file1"].map(name => modPath.join(path, name));
modPFS.statAll(paths)
.then(stats => {
expect(stats[paths[0]].isDirectory())
.toBe(true);
expect(stats[paths[1]].isFile())
.toBe(true);
done();
})
.catch(err => {
fail(err);
done();
});
});
});
describe("access", () => {
it("returns path when has access", (done) => {
const path1 = getFixturePath("dir/file1");
const path3 = getFixturePath("dir/file3");
modPFS.access(path1)
.then(path => {
expect(path)
.toBe(path1);
done();
})
.catch(err => {
fail(err);
done();
});
modPFS.access(path3)
.then(path => {
fail(`${path3} should not have access`);
done();
})
.catch(err => {
expect(err)
.toBe(path3);
done();
});
});
});
describe("getAccessible", () => {
it("returns list of accesible paths", (done) => {
const path = getFixturePath("dir");
const paths = ["file1", "file2", "file3"].map(name => modPath.join(path, name));<|fim▁hole|> expect(existingPaths)
.toEqual([paths[0], paths[1]]);
done();
})
.catch(err => {
fail(err);
done();
});
});
});
describe("readFile", () => {
it("fails on nonexistent file", (done) => {
const path = getFixturePath("dir/file3");
modPFS.readFile(path)
.then(buf => {
fail("should not succeed");
done();
})
.catch(err => {
expect(err.code)
.toBe("ENOENT");
expect(normalizePath(err.path))
.toBe(resolvePath(path));
done();
});
});
it("reads existing file content", (done) => {
const path = getFixturePath("dir/file1");
modPFS.readFile(path)
.then(buf => buf.toString())
.then(text => {
expect(text)
.toBe("1");
done();
})
.catch(err => {
fail(err);
done();
});
});
});
describe("writeFile", () => {
it("fails on existing directory", (done) => {
const path = getFixturePath("dir");
modPFS.writeFile(path, "test")
.then(buf => {
fail("should not succeed");
done();
})
.catch(err => {
expect(err.code)
.toBe("EISDIR");
expect(normalizePath(err.path))
.toBe(resolvePath(path));
done();
});
});
it("creates new file if it does not exist", (done) => {
const path = getFixturePath("dir/file4");
const text = "test";
modPFS.writeFile(path, text)
.then(path2 => expect(path2).toBe(path))
.then(() => modPFS.readFile(path))
.then(buf => buf.toString())
.then(text2 => expect(text2).toBe(text))
.then(() => modPFS.unlinkFile(path))
.then(path2 => expect(path2).toBe(path))
.then(() => done())
.catch(err => {
fail(err);
done();
});
});
it("modifies existing file", (done) => {
const path = getFixturePath("dir/file2");
const text = "2";
modPFS.writeFile(path, text)
.then(path2 => expect(path2).toBe(path))
.then(() => modPFS.readFile(path))
.then(buf => buf.toString())
.then(text2 => expect(text2).toBe(text))
.then(() => done())
.catch(err => {
fail(err);
done();
});
});
});
});<|fim▁end|> | modPFS.getAccessible(paths)
.then(existingPaths => { |
<|file_name|>fake_tridentbackend.go<|end_file_name|><|fim▁begin|>// Copyright 2021 NetApp, Inc. All Rights Reserved.
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
"context"
netappv1 "github.com/netapp/trident/persistent_store/crd/apis/netapp/v1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
schema "k8s.io/apimachinery/pkg/runtime/schema"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
testing "k8s.io/client-go/testing"
)
// FakeTridentBackends implements TridentBackendInterface
type FakeTridentBackends struct {
Fake *FakeTridentV1
ns string
}
var tridentbackendsResource = schema.GroupVersionResource{Group: "trident.netapp.io", Version: "v1", Resource: "tridentbackends"}
var tridentbackendsKind = schema.GroupVersionKind{Group: "trident.netapp.io", Version: "v1", Kind: "TridentBackend"}
// Get takes name of the tridentBackend, and returns the corresponding tridentBackend object, and an error if there is any.
func (c *FakeTridentBackends) Get(ctx context.Context, name string, options v1.GetOptions) (result *netappv1.TridentBackend, err error) {
obj, err := c.Fake.
Invokes(testing.NewGetAction(tridentbackendsResource, c.ns, name), &netappv1.TridentBackend{})
if obj == nil {
return nil, err
}
return obj.(*netappv1.TridentBackend), err
}
// List takes label and field selectors, and returns the list of TridentBackends that match those selectors.
func (c *FakeTridentBackends) List(ctx context.Context, opts v1.ListOptions) (result *netappv1.TridentBackendList, err error) {
obj, err := c.Fake.
Invokes(testing.NewListAction(tridentbackendsResource, tridentbackendsKind, c.ns, opts), &netappv1.TridentBackendList{})
if obj == nil {
return nil, err
}
label, _, _ := testing.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}<|fim▁hole|> for _, item := range obj.(*netappv1.TridentBackendList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested tridentBackends.
func (c *FakeTridentBackends) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(testing.NewWatchAction(tridentbackendsResource, c.ns, opts))
}
// Create takes the representation of a tridentBackend and creates it. Returns the server's representation of the tridentBackend, and an error, if there is any.
func (c *FakeTridentBackends) Create(ctx context.Context, tridentBackend *netappv1.TridentBackend, opts v1.CreateOptions) (result *netappv1.TridentBackend, err error) {
obj, err := c.Fake.
Invokes(testing.NewCreateAction(tridentbackendsResource, c.ns, tridentBackend), &netappv1.TridentBackend{})
if obj == nil {
return nil, err
}
return obj.(*netappv1.TridentBackend), err
}
// Update takes the representation of a tridentBackend and updates it. Returns the server's representation of the tridentBackend, and an error, if there is any.
func (c *FakeTridentBackends) Update(ctx context.Context, tridentBackend *netappv1.TridentBackend, opts v1.UpdateOptions) (result *netappv1.TridentBackend, err error) {
obj, err := c.Fake.
Invokes(testing.NewUpdateAction(tridentbackendsResource, c.ns, tridentBackend), &netappv1.TridentBackend{})
if obj == nil {
return nil, err
}
return obj.(*netappv1.TridentBackend), err
}
// Delete takes name of the tridentBackend and deletes it. Returns an error if one occurs.
func (c *FakeTridentBackends) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error {
_, err := c.Fake.
Invokes(testing.NewDeleteAction(tridentbackendsResource, c.ns, name), &netappv1.TridentBackend{})
return err
}
// DeleteCollection deletes a collection of objects.
func (c *FakeTridentBackends) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {
action := testing.NewDeleteCollectionAction(tridentbackendsResource, c.ns, listOpts)
_, err := c.Fake.Invokes(action, &netappv1.TridentBackendList{})
return err
}
// Patch applies the patch and returns the patched tridentBackend.
func (c *FakeTridentBackends) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *netappv1.TridentBackend, err error) {
obj, err := c.Fake.
Invokes(testing.NewPatchSubresourceAction(tridentbackendsResource, c.ns, name, pt, data, subresources...), &netappv1.TridentBackend{})
if obj == nil {
return nil, err
}
return obj.(*netappv1.TridentBackend), err
}<|fim▁end|> | list := &netappv1.TridentBackendList{ListMeta: obj.(*netappv1.TridentBackendList).ListMeta} |
<|file_name|>baseUniq.js<|end_file_name|><|fim▁begin|>/**
* Lo-Dash 2.4.1 (Custom Build) <http://lodash.com/>
* Build: `lodash modularize modern exports="node" -o ./modern/`
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
var baseIndexOf = require('./baseIndexOf'),
cacheIndexOf = require('./cacheIndexOf'),
createCache = require('./createCache'),
getArray = require('./getArray'),
largeArraySize = require('./largeArraySize'),
releaseArray = require('./releaseArray'),
releaseObject = require('./releaseObject');
/**
* The base implementation of `_.uniq` without support for callback shorthands
* or `thisArg` binding.
*
* @private
* @param {Array} array The array to process.
* @param {boolean} [isSorted=false] A flag to indicate that `array` is sorted.
* @param {Function} [callback] The function called per iteration.
* @returns {Array} Returns a duplicate-value-free array.
*/
function baseUniq(array, isSorted, callback) {
var index = -1,
indexOf = baseIndexOf,
length = array ? array.length : 0,
result = [];
var isLarge = !isSorted && length >= largeArraySize,
seen = (callback || isLarge) ? getArray() : result;
if (isLarge) {
var cache = createCache(seen);
indexOf = cacheIndexOf;
seen = cache;
}
while (++index < length) {
var value = array[index],
computed = callback ? callback(value, index, array) : value;
if (isSorted<|fim▁hole|> seen.push(computed);
}
result.push(value);
}
}
if (isLarge) {
releaseArray(seen.array);
releaseObject(seen);
} else if (callback) {
releaseArray(seen);
}
return result;
}
module.exports = baseUniq;<|fim▁end|> | ? !index || seen[seen.length - 1] !== computed
: indexOf(seen, computed) < 0
) {
if (callback || isLarge) { |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|><|fim▁hole|>.. module:: admin
"""
from django.contrib import admin
from apps.volontulo.models import Offer
from apps.volontulo.models import Organization
from apps.volontulo.models import UserProfile
admin.site.register(Offer)
admin.site.register(Organization)
admin.site.register(UserProfile)<|fim▁end|> | # -*- coding: utf-8 -*-
""" |
<|file_name|>grid.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! CSS handling for the computed value of
//! [grids](https://drafts.csswg.org/css-grid/)
use cssparser::{Parser, Token, BasicParseError};
use parser::{Parse, ParserContext};
use std::{mem, usize};
use std::ascii::AsciiExt;
use style_traits::{HasViewportPercentage, ParseError, StyleParseError};
use values::{CSSFloat, CustomIdent, Either};
use values::computed::{self, Context, ToComputedValue};
use values::generics::grid::{RepeatCount, TrackBreadth, TrackKeyword, TrackRepeat};
use values::generics::grid::{TrackSize, TrackList, TrackListType};
use values::specified::LengthOrPercentage;
/// Parse a single flexible length.
pub fn parse_flex<'i, 't>(input: &mut Parser<'i, 't>) -> Result<CSSFloat, ParseError<'i>> {
match input.next()? {
Token::Dimension { value, ref unit, .. } if unit.eq_ignore_ascii_case("fr") && value.is_sign_positive()
=> Ok(value),
t => Err(BasicParseError::UnexpectedToken(t).into()),
}
}
impl Parse for TrackBreadth<LengthOrPercentage> {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if let Ok(lop) = input.try(|i| LengthOrPercentage::parse_non_negative(context, i)) {
return Ok(TrackBreadth::Breadth(lop))
}
if let Ok(f) = input.try(parse_flex) {
return Ok(TrackBreadth::Flex(f))
}
TrackKeyword::parse(input).map(TrackBreadth::Keyword)
}
}
impl HasViewportPercentage for TrackBreadth<LengthOrPercentage> {
#[inline]
fn has_viewport_percentage(&self) -> bool {
if let TrackBreadth::Breadth(ref lop) = *self {
lop.has_viewport_percentage()
} else {
false
}
}
}
impl Parse for TrackSize<LengthOrPercentage> {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
if let Ok(b) = input.try(|i| TrackBreadth::parse(context, i)) {
return Ok(TrackSize::Breadth(b))
}
if input.try(|i| i.expect_function_matching("minmax")).is_ok() {
return input.parse_nested_block(|input| {
let inflexible_breadth =
match input.try(|i| LengthOrPercentage::parse_non_negative(context, i)) {
Ok(lop) => TrackBreadth::Breadth(lop),
Err(..) => {
let keyword = TrackKeyword::parse(input)?;
TrackBreadth::Keyword(keyword)
}
};
input.expect_comma()?;
Ok(TrackSize::MinMax(inflexible_breadth, TrackBreadth::parse(context, input)?))
});
}
input.expect_function_matching("fit-content")?;
let lop = input.parse_nested_block(|i| LengthOrPercentage::parse_non_negative(context, i))?;
Ok(TrackSize::FitContent(lop))
}
}
/// Parse the grid line names into a vector of owned strings.
///
/// https://drafts.csswg.org/css-grid/#typedef-line-names
pub fn parse_line_names<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Vec<String>, ParseError<'i>> {
input.expect_square_bracket_block()?;
input.parse_nested_block(|input| {
let mut values = vec![];
while let Ok(ident) = input.try(|i| i.expect_ident()) {<|fim▁hole|> values.push(ident.into_owned());
}
Ok(values)
})
}
/// The type of `repeat` function (only used in parsing).
///
/// https://drafts.csswg.org/css-grid/#typedef-track-repeat
#[derive(Clone, Copy, PartialEq, Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
enum RepeatType {
/// [`<auto-repeat>`](https://drafts.csswg.org/css-grid/#typedef-auto-repeat)
Auto,
/// [`<track-repeat>`](https://drafts.csswg.org/css-grid/#typedef-track-repeat)
Normal,
/// [`<fixed-repeat>`](https://drafts.csswg.org/css-grid/#typedef-fixed-repeat)
Fixed,
}
impl TrackRepeat<LengthOrPercentage> {
fn parse_with_repeat_type<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>)
-> Result<(TrackRepeat<LengthOrPercentage>, RepeatType),
ParseError<'i>> {
input.try(|i| i.expect_function_matching("repeat").map_err(|e| e.into())).and_then(|_| {
input.parse_nested_block(|input| {
let count = RepeatCount::parse(context, input)?;
input.expect_comma()?;
let is_auto = count == RepeatCount::AutoFit || count == RepeatCount::AutoFill;
let mut repeat_type = if is_auto {
RepeatType::Auto
} else { // <fixed-size> is a subset of <track_size>, so it should work for both
RepeatType::Fixed
};
let mut names = vec![];
let mut values = vec![];
let mut current_names;
loop {
current_names = input.try(parse_line_names).unwrap_or(vec![]);
if let Ok(track_size) = input.try(|i| TrackSize::parse(context, i)) {
if !track_size.is_fixed() {
if is_auto {
// should be <fixed-size> for <auto-repeat>
return Err(StyleParseError::UnspecifiedError.into())
}
if repeat_type == RepeatType::Fixed {
repeat_type = RepeatType::Normal // <track-size> for sure
}
}
values.push(track_size);
names.push(current_names);
} else {
if values.is_empty() {
// expecting at least one <track-size>
return Err(StyleParseError::UnspecifiedError.into())
}
names.push(current_names); // final `<line-names>`
break // no more <track-size>, breaking
}
}
let repeat = TrackRepeat {
count: count,
track_sizes: values,
line_names: names,
};
Ok((repeat, repeat_type))
})
})
}
}
impl HasViewportPercentage for TrackRepeat<LengthOrPercentage> {
#[inline]
fn has_viewport_percentage(&self) -> bool {
self.track_sizes.iter().any(|ref v| v.has_viewport_percentage())
}
}
/// Either a `<track-size>` or `<track-repeat>` component of `<track-list>`
///
/// This is required only for the specified form of `<track-list>`, and will become
/// `TrackSize<LengthOrPercentage>` in its computed form.
pub type TrackSizeOrRepeat = Either<TrackSize<LengthOrPercentage>, TrackRepeat<LengthOrPercentage>>;
impl Parse for TrackList<TrackSizeOrRepeat> {
fn parse<'i, 't>(context: &ParserContext, input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
let mut current_names;
let mut names = vec![];
let mut values = vec![];
let mut list_type = TrackListType::Explicit; // assume it's the simplest case
// marker to check whether we've already encountered <auto-repeat> along the way
let mut is_auto = false;
// assume that everything is <fixed-size>. This flag is useful when we encounter <auto-repeat>
let mut atleast_one_not_fixed = false;
loop {
current_names = input.try(parse_line_names).unwrap_or(vec![]);
if let Ok(track_size) = input.try(|i| TrackSize::parse(context, i)) {
if !track_size.is_fixed() {
atleast_one_not_fixed = true;
if is_auto {
// <auto-track-list> only accepts <fixed-size> and <fixed-repeat>
return Err(StyleParseError::UnspecifiedError.into())
}
}
names.push(current_names);
values.push(Either::First(track_size));
} else if let Ok((repeat, type_)) = input.try(|i| TrackRepeat::parse_with_repeat_type(context, i)) {
if list_type == TrackListType::Explicit {
list_type = TrackListType::Normal; // <explicit-track-list> doesn't contain repeat()
}
match type_ {
RepeatType::Normal => {
atleast_one_not_fixed = true;
if is_auto { // only <fixed-repeat>
return Err(StyleParseError::UnspecifiedError.into())
}
},
RepeatType::Auto => {
if is_auto || atleast_one_not_fixed {
// We've either seen <auto-repeat> earlier, or there's at least one non-fixed value
return Err(StyleParseError::UnspecifiedError.into())
}
is_auto = true;
list_type = TrackListType::Auto(values.len() as u16);
},
RepeatType::Fixed => (),
}
names.push(current_names);
values.push(Either::Second(repeat));
} else {
if values.is_empty() {
return Err(StyleParseError::UnspecifiedError.into())
}
names.push(current_names);
break
}
}
Ok(TrackList {
list_type: list_type,
values: values,
line_names: names,
auto_repeat: None, // filled only in computation
})
}
}
impl HasViewportPercentage for TrackList<TrackSizeOrRepeat> {
#[inline]
fn has_viewport_percentage(&self) -> bool {
self.values.iter().any(|ref v| v.has_viewport_percentage())
}
}
impl ToComputedValue for TrackList<TrackSizeOrRepeat> {
type ComputedValue = TrackList<TrackSize<computed::LengthOrPercentage>>;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
// Merge the line names while computing values. The resulting values will
// all be a bunch of `<track-size>`.
//
// For example,
// `[a b] 100px [c d] repeat(1, 30px [g]) [h]` will be merged as `[a b] 100px [c d] 30px [g h]`
// whereas, `[a b] repeat(2, [c] 50px [d]) [e f] repeat(auto-fill, [g] 12px) 10px [h]` will be merged as
// `[a b c] 50px [d c] 50px [d e f] repeat(auto-fill, [g] 12px) 10px [h]`, with the `<auto-repeat>` value
// set in the `auto_repeat` field, and the `idx` in TrackListType::Auto pointing to the values after
// `<auto-repeat>` (in this case, `10px [h]`).
let mut line_names = vec![];
let mut list_type = self.list_type;
let mut values = vec![];
let mut prev_names = vec![];
let mut auto_repeat = None;
let mut names_iter = self.line_names.iter();
for (size_or_repeat, names) in self.values.iter().zip(&mut names_iter) {
prev_names.extend_from_slice(names);
match *size_or_repeat {
Either::First(ref size) => values.push(size.to_computed_value(context)),
Either::Second(ref repeat) => {
let mut computed = repeat.to_computed_value(context);
if computed.count == RepeatCount::AutoFit || computed.count == RepeatCount::AutoFill {
line_names.push(mem::replace(&mut prev_names, vec![])); // don't merge for auto
list_type = TrackListType::Auto(values.len() as u16);
auto_repeat = Some(computed);
continue
}
let mut repeat_names_iter = computed.line_names.drain(..);
for (size, mut names) in computed.track_sizes.drain(..).zip(&mut repeat_names_iter) {
prev_names.append(&mut names);
line_names.push(mem::replace(&mut prev_names, vec![]));
values.push(size);
}
if let Some(mut names) = repeat_names_iter.next() {
prev_names.append(&mut names);
}
continue // last `<line-names>` in repeat() may merge with the next set
}
}
line_names.push(mem::replace(&mut prev_names, vec![]));
}
if let Some(names) = names_iter.next() {
prev_names.extend_from_slice(names);
}
line_names.push(mem::replace(&mut prev_names, vec![]));
TrackList {
list_type: list_type,
values: values,
line_names: line_names,
auto_repeat: auto_repeat,
}
}
#[inline]
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
let auto_idx = if let TrackListType::Auto(idx) = computed.list_type {
idx as usize
} else {
usize::MAX
};
let mut values = Vec::with_capacity(computed.values.len() + 1);
for (i, value) in computed.values.iter().map(ToComputedValue::from_computed_value).enumerate() {
if i == auto_idx {
let value = TrackRepeat::from_computed_value(computed.auto_repeat.as_ref().unwrap());
values.push(Either::Second(value));
}
values.push(Either::First(value));
}
TrackList {
list_type: computed.list_type,
values: values,
line_names: computed.line_names.clone(),
auto_repeat: None,
}
}
}<|fim▁end|> | if CustomIdent::from_ident((&*ident).into(), &["span"]).is_err() {
return Err(StyleParseError::UnspecifiedError.into())
}
|
<|file_name|>test_run_finemap.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Boocock James <james.boocock@otago.ac.nz>
# Author: Boocock James <james.boocock@otago.ac.nz>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to<|fim▁hole|># subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from fine_mapping_pipeline.finemap.finemap import run_finemap, remove_surrogates, _write_matrix, _write_zscores
import logging
logging.basicConfig(level=logging.INFO)
def test_remove_surrogate(tmpdir):
input_matrix = 'tests/finemap_data/test.matrix'
input_zscore = 'tests/finemap_data/test.Z'
surrogates_out = 'tests/finemap_data/out.surro'
(matrix, zscores) = remove_surrogates(input_matrix,input_zscore, surrogates_out)
_write_matrix(matrix, "tests/finemap_data/out.matrix")
_write_zscores(zscores, "tests/finemap_data/out.zscores")
assert 1 == 2<|fim▁end|> | # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so, |
<|file_name|>warship.ts<|end_file_name|><|fim▁begin|>/// <reference path="jquery.d.ts" />
/// <reference path='jqueryui.d.ts' />
class Cell {
shipIndex: number;
hasHit: boolean;
element: HTMLElement;
constructor (public row: number, public column: number) {
this.element = $("<div class='cell notBombed'></div>")[0];
}
// Parse a cell location of the format "row,column"
static parseCellLocation(pos: string) {
var indices: string[] = pos.split(",");
return {'row': parseInt(indices[0]), 'column': parseInt(indices[1])};
}
// Return the cell location of the format "row,column"
cellLocation() {
return "" + this.row + "," + this.column;
}
}
class Ship {
column = 0;
row = 0;
isVertical = true;
hits = 0;
element: HTMLElement;
constructor (public size: number) {
this.element = $("<div class='ship'></div>")[0];
}
updatePosition(row: number, column: number, vertical: boolean) {
this.row = row;
this.column = column;
this.isVertical = vertical;
this.updateLayout();
}
updateLayout() {
var width = "9.9%";
var height = "" + (this.size * 9.9) + "%";
this.element.style.left = "" + (this.column * 10) + "%";
this.element.style.top = "" + (this.row * 10) + "%";
this.element.style.width = this.isVertical ? width : height;
this.element.style.height = this.isVertical ? height : width;
}
flipShip() {
this.isVertical = !this.isVertical;
if (this.isVertical) {
if (this.row + this.size > 10) {
this.row = 10 - this.size;
}
} else {
if (this.column + this.size > 10) {
this.column = 10 - this.size;
}
}
this.updateLayout();
}
getCellsCovered() {
var cells: string[] = [];
var row = this.row;
var col = this.column;
for (var i = 0; i < this.size; i++) {
cells.push(row.toString() + "," + col.toString());
if (this.isVertical) {
row++;
} else {
col++;
}
}
return cells;
}
isSunk() {
return this.hits === this.size;
}
}
class Board {
ships: Ship[];
cells: Cell[][]; // Indexed by [rows][columns]
playerTurn = false; // Set to true when player can move
onEvent: Function; // Callback function when an action on the board occurs
shipSizes = [5, 4, 3, 3, 2];
private positioningEnabled: boolean; // Set to true when the player can position the ships
constructor (public element: HTMLElement, playerBoard: boolean = true) {
this.positioningEnabled = playerBoard;
this.cells = [];
this.ships = [];
var cell: Cell = null;
// Create the cells for the board
for (var row = 0; row < 10; row++) {
this.cells[row] = [];
for (var column = 0; column < 10; column++) {
cell = new Cell(row, column);
this.cells[row][column] = cell;
element.appendChild(cell.element);
$(cell.element).data("cellLocation", cell.cellLocation());
if (playerBoard) {
$(cell.element).droppable({
disabled: false,
drop: (event, ui) => {
var shipElement = <HTMLElement>ui.draggable[0];
var shipIndex: number = $(shipElement).data("shipIndex");
var ship = this.ships[shipIndex];
var shipX = Math.round(shipElement.offsetLeft / cell.element.offsetWidth);
var shipY = Math.round(shipElement.offsetTop / cell.element.offsetHeight);
ship.updatePosition(shipY, shipX, ship.isVertical);
}
});
}
}
}
var referenceCell = $(cell.element);
for (var i = 0; i < this.shipSizes.length; i++) {
var ship = new Ship(this.shipSizes[i]);
this.ships[i] = ship;
ship.updatePosition(i, 0, false);
if (playerBoard) { // Show the ships for positioning.
this.element.appendChild(ship.element);
ship.updateLayout();
$(ship.element).data("shipIndex", i).draggable({
disabled: false,
containment: 'parent',
// Reduce size slightly to avoid overlap issues blocking the last cell
grid: [referenceCell.width() * 0.99 + 2, referenceCell.height() * 0.99 + 2],
cursor: 'crosshair'
}).click( (evt: JQueryEventObject) => {
if (this.positioningEnabled) {
var shipIndex: number = $(evt.target).data("shipIndex");
this.ships[shipIndex].flipShip();
}
<|fim▁hole|> });
}
}
$(window).resize((evt) => {
$(this.element).children(".ship").draggable("option", "grid", [referenceCell.width() * 0.99 + 2, referenceCell.height() * 0.99 + 2]);
});
if (!playerBoard) {
// Computer board, this is where the player clicks to bomb
$(element).click((evt: JQueryEventObject) => this.onCellClick(evt));
}
}
set dragAndDropEnabled(val: boolean) {
var cells = $(this.element).children(".cell");
var ships = $(this.element).children(".ship");
this.positioningEnabled = val;
ships.draggable("option", "disabled", !val);
cells.droppable("option", "disabled", !val);
}
static getRandomPosition() {
return {
"row": Math.floor(Math.random() * 10),
"column": Math.floor(Math.random() * 10),
"vertical": (Math.floor(Math.random() * 2) === 1)
}
}
onCellClick(evt: JQueryEventObject) {
var x = <HTMLElement>evt.target;
if ($(x).hasClass("cell") === false) {
return;
}
if (!this.playerTurn) {
this.onEvent.call(this, 'click');
}
if (this.playerTurn) { // May be updated by prior onEvent call, so check again
this.bombCell(x);
}
}
bombCell(cellElem: HTMLElement) {
var cellPos = Cell.parseCellLocation($(cellElem).data("cellLocation"));
var cell = this.cells[cellPos.row][cellPos.column];
if (cell.hasHit) {
return; // Already been clicked on
}
cell.hasHit = true;
if (cell.shipIndex >= 0) { // Has a ship
$(cellElem).removeClass("notBombed");
$(cellElem).addClass("cellHit");
var ship = this.ships[cell.shipIndex];
ship.hits++;
if (ship.isSunk()) {
if (this.allShipsSunk()) {
this.onEvent.call(this, 'allSunk');
} else {
this.onEvent.call(this, 'shipSunk');
}
} else {
this.onEvent.call(this, 'hit');
}
} else {
$(cellElem).removeClass("notBombed");
$(cellElem).addClass("cellMiss");
this.onEvent.call(this, 'playerMissed');
}
}
randomize() {
var shipCount = this.ships.length;
do {
for (var shipIndex = 0; shipIndex < shipCount; shipIndex++) {
var pos = Board.getRandomPosition();
this.ships[shipIndex].updatePosition(pos.row, pos.column, pos.vertical);
}
} while (!this.boardIsValid());
}
boardIsValid() {
// Check if any ships overlap my checking their cells for duplicates.
// Do this by putting into a flat array, sorting, and seeing if any adjacent cells are equal
var allCells: string[] = [];
for (var i = 0; i < this.ships.length; i++) {
allCells = allCells.concat(this.ships[i].getCellsCovered());
}
allCells.sort();
var dups = allCells.some(function (val, idx, arr) { return val === arr[idx + 1]; });
// See if any ship cells are off the board
var outOfRange = allCells.some(function (val: string) {
var pos = Cell.parseCellLocation(val);
return !(pos.column >= 0 && pos.column <= 9 && pos.row >= 0 && pos.row <= 9);
});
if (dups || outOfRange) {
return false;
} else {
this.updateCellData();
return true;
}
}
chooseMove() {
do {
var pos = Board.getRandomPosition();
var cell = this.cells[pos.row][pos.column];
} while (cell.hasHit);
this.bombCell(cell.element);
}
private updateCellData() {
for (var i = 0; i < 100; i++) {
var x = this.cells[Math.floor(i / 10)][i % 10];
x.hasHit = false;
x.shipIndex = -1;
}
for (var index = 0; index < this.ships.length; index++) {
var ship = this.ships[index]
ship.hits = 0;
var cells = ship.getCellsCovered();
for (var cell = 0; cell < cells.length; cell++) {
var cellPos = Cell.parseCellLocation(cells[cell]);
var targetCell = this.cells[cellPos.row][cellPos.column];
targetCell.shipIndex = index;
}
}
$(this.element).children(".cell").removeClass("cellHit cellMiss").addClass("notBombed");
}
private allShipsSunk() {
return this.ships.every(function (val) { return val.isSunk(); });
}
}
class Game {
static gameState = {begin: 0, computerTurn: 1, playerTurn: 2, finished: 3};
static msgs = {
gameStart: "Drag your ships to the desired location on your board (on the right), then bomb a square on the left board to start the game!",
invalidPositions: "All ships must be in valid positions before the game can begin.",
wait: "Wait your turn!",
gameOn: "Game on!",
hit: "Good hit!",
shipSunk: "You sunk a ship!",
lostShip: "You lost a ship :-(",
lostGame: "You lost this time. Click anywhere on the left board to play again.",
allSunk: "Congratulations! You won! Click anywhere on the left board to play again."
};
state = Game.gameState.begin;
playerBoard: Board;
computerBoard: Board;
constructor () {
this.updateStatus(Game.msgs.gameStart);
this.playerBoard = new Board($("#playerBoard")[0]);
this.computerBoard = new Board($("#computerBoard")[0], false);
this.computerBoard.randomize();
this.playerBoard.randomize();
this.playerBoard.dragAndDropEnabled = true;
this.computerBoard.onEvent = (evt) => {
switch (evt) {
case 'click': // The user has click outside a turn. Action depends on current state
switch (this.state) {
case Game.gameState.begin:
this.startGame();
break;
case Game.gameState.computerTurn: // Not their turn yet. Ask to wait.
this.updateStatus(Game.msgs.wait);
break;
case Game.gameState.finished: // Start a new game
this.computerBoard.randomize();
this.playerBoard.randomize();
this.playerBoard.dragAndDropEnabled = true;
this.updateStatus(Game.msgs.gameStart);
this.state = Game.gameState.begin;
break;
}
break;
case 'playerMissed':
this.computersTurn();
break;
case 'hit':
this.updateStatus(Game.msgs.hit);
this.computersTurn();
break;
case 'shipSunk':
this.updateStatus(Game.msgs.shipSunk);
this.computersTurn();
break;
case 'allSunk':
this.state = Game.gameState.finished;
this.computerBoard.playerTurn = false;
this.updateStatus(Game.msgs.allSunk);
break;
}
};
this.playerBoard.onEvent = (evt) => {
switch (evt) {
case 'playerMissed':
case 'hit':
this.computerBoard.playerTurn = true;
break;
case 'shipSunk':
this.updateStatus(Game.msgs.lostShip);
this.computerBoard.playerTurn = true;
break;
case 'allSunk':
this.updateStatus(Game.msgs.lostGame);
this.computerBoard.playerTurn = false;
this.state = Game.gameState.finished;
break;
}
};
}
private computersTurn() {
this.computerBoard.playerTurn = false;
this.state = Game.gameState.computerTurn;
setTimeout(() => { this.playerBoard.chooseMove();
}, 250);
}
private startGame() {
if (this.playerBoard.boardIsValid()) {
this.state = Game.gameState.playerTurn;
this.playerBoard.dragAndDropEnabled = false;
this.computerBoard.playerTurn = true;
this.updateStatus(Game.msgs.gameOn);
}
else {
this.updateStatus(Game.msgs.invalidPositions);
}
}
private updateStatus(msg: string) {
$("#status").slideUp('fast', function () { // Slide out the old text
$(this).text(msg).slideDown('fast'); // Then slide in the new text
});
}
}
$(new Function("var game = new Game();"));<|fim▁end|> | |
<|file_name|>spread-err-sngl-err-obj-unresolvable.js<|end_file_name|><|fim▁begin|>// This file was procedurally generated from the following sources:
// - src/spread/sngl-err-obj-unresolvable.case
// - src/spread/error/array.template
/*---
description: Object Spread operator results in error when using an unresolvable reference (Array initializer)
esid: sec-runtime-semantics-arrayaccumulation
es6id: 12.2.5.2
features: [object-spread]
flags: [generated]
info: |
SpreadElement : ...AssignmentExpression
1. Let spreadRef be the result of evaluating AssignmentExpression.
2. Let spreadObj be ? GetValue(spreadRef).
3. Let iterator be ? GetIterator(spreadObj).
4. Repeat
a. Let next be ? IteratorStep(iterator).
b. If next is false, return nextIndex.
c. Let nextValue be ? IteratorValue(next).
d. Let status be CreateDataProperty(array, ToString(ToUint32(nextIndex)),
nextValue).
e. Assert: status is true.
f. Let nextIndex be nextIndex + 1.
Pending Runtime Semantics: PropertyDefinitionEvaluation
PropertyDefinition:...AssignmentExpression
1. Let exprValue be the result of evaluating AssignmentExpression.
2. Let fromValue be GetValue(exprValue).
3. ReturnIfAbrupt(fromValue).
4. Let excludedNames be a new empty List.
5. Return CopyDataProperties(object, fromValue, excludedNames).
---*/
<|fim▁hole|>});<|fim▁end|> | assert.throws(ReferenceError, function() {
[{...unresolvableReference}]; |
<|file_name|>spdiagview_bones.hpp<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 Conrad Sanderson
// Copyright (C) 2015 NICTA (www.nicta.com.au)
//
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! \addtogroup spdiagview
//! @{
//! Class for storing data required to extract and set the diagonals of a sparse matrix
template<typename eT>
class spdiagview : public Base<eT, spdiagview<eT> >
{
public:
typedef eT elem_type;
typedef typename get_pod_type<eT>::result pod_type;
arma_aligned const SpMat<eT>& m;
static const bool is_row = false;
static const bool is_col = true;
const uword row_offset;
const uword col_offset;
const uword n_rows; // equal to n_elem
const uword n_elem;
static const uword n_cols = 1;
protected:
arma_inline spdiagview(const SpMat<eT>& in_m, const uword in_row_offset, const uword in_col_offset, const uword len);
public:
inline ~spdiagview();
inline void operator=(const spdiagview& x);
inline void operator+=(const eT val);
inline void operator-=(const eT val);
inline void operator*=(const eT val);
inline void operator/=(const eT val);
template<typename T1> inline void operator= (const Base<eT,T1>& x);
template<typename T1> inline void operator+=(const Base<eT,T1>& x);
template<typename T1> inline void operator-=(const Base<eT,T1>& x);
template<typename T1> inline void operator%=(const Base<eT,T1>& x);
template<typename T1> inline void operator/=(const Base<eT,T1>& x);
<|fim▁hole|> template<typename T1> inline void operator/=(const SpBase<eT,T1>& x);
inline eT at_alt (const uword ii) const;
inline SpValProxy< SpMat<eT> > operator[](const uword ii);
inline eT operator[](const uword ii) const;
inline SpValProxy< SpMat<eT> > at(const uword ii);
inline eT at(const uword ii) const;
inline SpValProxy< SpMat<eT> > operator()(const uword ii);
inline eT operator()(const uword ii) const;
inline SpValProxy< SpMat<eT> > at(const uword in_n_row, const uword);
inline eT at(const uword in_n_row, const uword) const;
inline SpValProxy< SpMat<eT> > operator()(const uword in_n_row, const uword in_n_col);
inline eT operator()(const uword in_n_row, const uword in_n_col) const;
inline void fill(const eT val);
inline void zeros();
inline void ones();
inline void randu();
inline void randn();
inline static void extract(Mat<eT>& out, const spdiagview& in);
private:
friend class SpMat<eT>;
spdiagview();
};
//! @}<|fim▁end|> | template<typename T1> inline void operator= (const SpBase<eT,T1>& x);
template<typename T1> inline void operator+=(const SpBase<eT,T1>& x);
template<typename T1> inline void operator-=(const SpBase<eT,T1>& x);
template<typename T1> inline void operator%=(const SpBase<eT,T1>& x); |
<|file_name|>webkit_patch.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010 Google Inc. All rights reserved.
# Copyright (c) 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# A tool for automating dealing with bugzilla, posting patches, committing patches, etc.
from optparse import make_option
from webkitpy.common.host import Host
from webkitpy.tool.multicommandtool import MultiCommandTool
from webkitpy.tool import commands
class WebKitPatch(MultiCommandTool, Host):
global_options = [
make_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="enable all logging"),
make_option("-d", "--directory", action="append", dest="patch_directories",
default=[], help="Directory to look at for changed files"),
]
def __init__(self, path):
MultiCommandTool.__init__(self)
Host.__init__(self)
self._path = path
def path(self):
return self._path
def should_show_in_main_help(self, command):
if not command.show_in_main_help:
return False
if command.requires_local_commits:
return self.scm().supports_local_commits()
return True
# FIXME: This may be unnecessary since we pass global options to all commands during execute() as well.<|fim▁hole|> if command.requires_local_commits and not self.scm().supports_local_commits():
failure_reason = "%s requires local commits using %s in %s." % (
command.name, self.scm().display_name(), self.scm().checkout_root)
return (False, failure_reason)
return (True, None)<|fim▁end|> | def handle_global_options(self, options):
self.initialize_scm(options.patch_directories)
def should_execute_command(self, command): |
<|file_name|>crypto_setup_client.go<|end_file_name|><|fim▁begin|>package handshake
import (
"bytes"
"crypto/rand"
"crypto/tls"
"encoding/binary"
"errors"
"fmt"
"io"
"sync"
"time"
"github.com/Psiphon-Labs/psiphon-tunnel-core/psiphon/common/quic/gquic-go/internal/crypto"
"github.com/Psiphon-Labs/psiphon-tunnel-core/psiphon/common/quic/gquic-go/internal/protocol"
"github.com/Psiphon-Labs/psiphon-tunnel-core/psiphon/common/quic/gquic-go/internal/utils"
"github.com/Psiphon-Labs/psiphon-tunnel-core/psiphon/common/quic/gquic-go/qerr"
)
type cryptoSetupClient struct {
mutex sync.RWMutex
hostname string
connID protocol.ConnectionID
version protocol.VersionNumber
initialVersion protocol.VersionNumber
negotiatedVersions []protocol.VersionNumber
cryptoStream io.ReadWriter
serverConfig *serverConfigClient
stk []byte
sno []byte<|fim▁hole|> nonc []byte
proof []byte
chloForSignature []byte
lastSentCHLO []byte
certManager crypto.CertManager
divNonceChan chan struct{}
diversificationNonce []byte
clientHelloCounter int
serverVerified bool // has the certificate chain and the proof already been verified
keyDerivation QuicCryptoKeyDerivationFunction
receivedSecurePacket bool
nullAEAD crypto.AEAD
secureAEAD crypto.AEAD
forwardSecureAEAD crypto.AEAD
paramsChan chan<- TransportParameters
handshakeEvent chan<- struct{}
params *TransportParameters
logger utils.Logger
}
var _ CryptoSetup = &cryptoSetupClient{}
var (
errNoObitForClientNonce = errors.New("CryptoSetup BUG: No OBIT for client nonce available")
errClientNonceAlreadyExists = errors.New("CryptoSetup BUG: A client nonce was already generated")
errConflictingDiversificationNonces = errors.New("Received two different diversification nonces")
)
// NewCryptoSetupClient creates a new CryptoSetup instance for a client
func NewCryptoSetupClient(
cryptoStream io.ReadWriter,
connID protocol.ConnectionID,
version protocol.VersionNumber,
tlsConf *tls.Config,
params *TransportParameters,
paramsChan chan<- TransportParameters,
handshakeEvent chan<- struct{},
initialVersion protocol.VersionNumber,
negotiatedVersions []protocol.VersionNumber,
logger utils.Logger,
) (CryptoSetup, error) {
nullAEAD, err := crypto.NewNullAEAD(protocol.PerspectiveClient, connID, version)
if err != nil {
return nil, err
}
divNonceChan := make(chan struct{})
cs := &cryptoSetupClient{
cryptoStream: cryptoStream,
hostname: tlsConf.ServerName,
connID: connID,
version: version,
certManager: crypto.NewCertManager(tlsConf),
params: params,
keyDerivation: crypto.DeriveQuicCryptoAESKeys,
nullAEAD: nullAEAD,
paramsChan: paramsChan,
handshakeEvent: handshakeEvent,
initialVersion: initialVersion,
// The server might have sent greased versions in the Version Negotiation packet.
// We need strip those from the list, since they won't be included in the handshake tag.
negotiatedVersions: protocol.StripGreasedVersions(negotiatedVersions),
divNonceChan: divNonceChan,
logger: logger,
}
return cs, nil
}
func (h *cryptoSetupClient) HandleCryptoStream() error {
messageChan := make(chan HandshakeMessage)
errorChan := make(chan error, 1)
go func() {
for {
message, err := ParseHandshakeMessage(h.cryptoStream)
if err != nil {
errorChan <- qerr.Error(qerr.HandshakeFailed, err.Error())
return
}
messageChan <- message
}
}()
for {
if err := h.maybeUpgradeCrypto(); err != nil {
return err
}
h.mutex.RLock()
sendCHLO := h.secureAEAD == nil
h.mutex.RUnlock()
if sendCHLO {
if err := h.sendCHLO(); err != nil {
return err
}
}
var message HandshakeMessage
select {
case <-h.divNonceChan:
// there's no message to process, but we should try upgrading the crypto again
continue
case message = <-messageChan:
case err := <-errorChan:
return err
}
h.logger.Debugf("Got %s", message)
switch message.Tag {
case TagREJ:
if err := h.handleREJMessage(message.Data); err != nil {
return err
}
case TagSHLO:
params, err := h.handleSHLOMessage(message.Data)
if err != nil {
return err
}
// blocks until the session has received the parameters
h.paramsChan <- *params
h.handshakeEvent <- struct{}{}
close(h.handshakeEvent)
default:
return qerr.InvalidCryptoMessageType
}
}
}
func (h *cryptoSetupClient) handleREJMessage(cryptoData map[Tag][]byte) error {
var err error
if stk, ok := cryptoData[TagSTK]; ok {
h.stk = stk
}
if sno, ok := cryptoData[TagSNO]; ok {
h.sno = sno
}
// TODO: what happens if the server sends a different server config in two packets?
if scfg, ok := cryptoData[TagSCFG]; ok {
h.serverConfig, err = parseServerConfig(scfg)
if err != nil {
return err
}
if h.serverConfig.IsExpired() {
return qerr.CryptoServerConfigExpired
}
// now that we have a server config, we can use its OBIT value to generate a client nonce
if len(h.nonc) == 0 {
err = h.generateClientNonce()
if err != nil {
return err
}
}
}
if proof, ok := cryptoData[TagPROF]; ok {
h.proof = proof
h.chloForSignature = h.lastSentCHLO
}
if crt, ok := cryptoData[TagCERT]; ok {
err := h.certManager.SetData(crt)
if err != nil {
return qerr.Error(qerr.InvalidCryptoMessageParameter, "Certificate data invalid")
}
err = h.certManager.Verify(h.hostname)
if err != nil {
h.logger.Infof("Certificate validation failed: %s", err.Error())
return qerr.ProofInvalid
}
}
if h.serverConfig != nil && len(h.proof) != 0 && h.certManager.GetLeafCert() != nil {
validProof := h.certManager.VerifyServerProof(h.proof, h.chloForSignature, h.serverConfig.Get())
if !validProof {
h.logger.Infof("Server proof verification failed")
return qerr.ProofInvalid
}
h.serverVerified = true
}
return nil
}
func (h *cryptoSetupClient) handleSHLOMessage(cryptoData map[Tag][]byte) (*TransportParameters, error) {
h.mutex.Lock()
defer h.mutex.Unlock()
if !h.receivedSecurePacket {
return nil, qerr.Error(qerr.CryptoEncryptionLevelIncorrect, "unencrypted SHLO message")
}
if sno, ok := cryptoData[TagSNO]; ok {
h.sno = sno
}
serverPubs, ok := cryptoData[TagPUBS]
if !ok {
return nil, qerr.Error(qerr.CryptoMessageParameterNotFound, "PUBS")
}
verTag, ok := cryptoData[TagVER]
if !ok {
return nil, qerr.Error(qerr.InvalidCryptoMessageParameter, "server hello missing version list")
}
if !h.validateVersionList(verTag) {
return nil, qerr.Error(qerr.VersionNegotiationMismatch, "Downgrade attack detected")
}
nonce := append(h.nonc, h.sno...)
ephermalSharedSecret, err := h.serverConfig.kex.CalculateSharedKey(serverPubs)
if err != nil {
return nil, err
}
leafCert := h.certManager.GetLeafCert()
h.forwardSecureAEAD, err = h.keyDerivation(
true,
ephermalSharedSecret,
nonce,
h.connID,
h.lastSentCHLO,
h.serverConfig.Get(),
leafCert,
nil,
protocol.PerspectiveClient,
)
if err != nil {
return nil, err
}
h.logger.Debugf("Creating AEAD for forward-secure encryption. Stopping to accept all lower encryption levels.")
params, err := readHelloMap(cryptoData)
if err != nil {
return nil, qerr.InvalidCryptoMessageParameter
}
return params, nil
}
func (h *cryptoSetupClient) validateVersionList(verTags []byte) bool {
numNegotiatedVersions := len(h.negotiatedVersions)
if numNegotiatedVersions == 0 {
return true
}
if len(verTags)%4 != 0 || len(verTags)/4 != numNegotiatedVersions {
return false
}
b := bytes.NewReader(verTags)
for i := 0; i < numNegotiatedVersions; i++ {
v, err := utils.BigEndian.ReadUint32(b)
if err != nil { // should never occur, since the length was already checked
return false
}
if protocol.VersionNumber(v) != h.negotiatedVersions[i] {
return false
}
}
return true
}
func (h *cryptoSetupClient) Open(dst, src []byte, packetNumber protocol.PacketNumber, associatedData []byte) ([]byte, protocol.EncryptionLevel, error) {
h.mutex.RLock()
defer h.mutex.RUnlock()
if h.forwardSecureAEAD != nil {
data, err := h.forwardSecureAEAD.Open(dst, src, packetNumber, associatedData)
if err == nil {
return data, protocol.EncryptionForwardSecure, nil
}
return nil, protocol.EncryptionUnspecified, err
}
if h.secureAEAD != nil {
data, err := h.secureAEAD.Open(dst, src, packetNumber, associatedData)
if err == nil {
h.logger.Debugf("Received first secure packet. Stopping to accept unencrypted packets.")
h.receivedSecurePacket = true
return data, protocol.EncryptionSecure, nil
}
if h.receivedSecurePacket {
return nil, protocol.EncryptionUnspecified, err
}
}
res, err := h.nullAEAD.Open(dst, src, packetNumber, associatedData)
if err != nil {
return nil, protocol.EncryptionUnspecified, err
}
return res, protocol.EncryptionUnencrypted, nil
}
func (h *cryptoSetupClient) GetSealer() (protocol.EncryptionLevel, Sealer) {
h.mutex.RLock()
defer h.mutex.RUnlock()
if h.forwardSecureAEAD != nil {
return protocol.EncryptionForwardSecure, h.forwardSecureAEAD
} else if h.secureAEAD != nil {
return protocol.EncryptionSecure, h.secureAEAD
} else {
return protocol.EncryptionUnencrypted, h.nullAEAD
}
}
func (h *cryptoSetupClient) GetSealerForCryptoStream() (protocol.EncryptionLevel, Sealer) {
return protocol.EncryptionUnencrypted, h.nullAEAD
}
func (h *cryptoSetupClient) GetSealerWithEncryptionLevel(encLevel protocol.EncryptionLevel) (Sealer, error) {
h.mutex.RLock()
defer h.mutex.RUnlock()
switch encLevel {
case protocol.EncryptionUnencrypted:
return h.nullAEAD, nil
case protocol.EncryptionSecure:
if h.secureAEAD == nil {
return nil, errors.New("CryptoSetupClient: no secureAEAD")
}
return h.secureAEAD, nil
case protocol.EncryptionForwardSecure:
if h.forwardSecureAEAD == nil {
return nil, errors.New("CryptoSetupClient: no forwardSecureAEAD")
}
return h.forwardSecureAEAD, nil
}
return nil, errors.New("CryptoSetupClient: no encryption level specified")
}
func (h *cryptoSetupClient) ConnectionState() ConnectionState {
h.mutex.Lock()
defer h.mutex.Unlock()
return ConnectionState{
HandshakeComplete: h.forwardSecureAEAD != nil,
PeerCertificates: h.certManager.GetChain(),
}
}
func (h *cryptoSetupClient) SetDiversificationNonce(divNonce []byte) error {
h.mutex.Lock()
if len(h.diversificationNonce) > 0 {
defer h.mutex.Unlock()
if !bytes.Equal(h.diversificationNonce, divNonce) {
return errConflictingDiversificationNonces
}
return nil
}
h.diversificationNonce = divNonce
h.mutex.Unlock()
h.divNonceChan <- struct{}{}
return nil
}
func (h *cryptoSetupClient) sendCHLO() error {
h.clientHelloCounter++
if h.clientHelloCounter > protocol.MaxClientHellos {
return qerr.Error(qerr.CryptoTooManyRejects, fmt.Sprintf("More than %d rejects", protocol.MaxClientHellos))
}
b := &bytes.Buffer{}
tags, err := h.getTags()
if err != nil {
return err
}
h.addPadding(tags)
message := HandshakeMessage{
Tag: TagCHLO,
Data: tags,
}
h.logger.Debugf("Sending %s", message)
message.Write(b)
_, err = h.cryptoStream.Write(b.Bytes())
if err != nil {
return err
}
h.lastSentCHLO = b.Bytes()
return nil
}
func (h *cryptoSetupClient) getTags() (map[Tag][]byte, error) {
tags := h.params.getHelloMap()
tags[TagSNI] = []byte(h.hostname)
tags[TagPDMD] = []byte("X509")
ccs := h.certManager.GetCommonCertificateHashes()
if len(ccs) > 0 {
tags[TagCCS] = ccs
}
versionTag := make([]byte, 4)
binary.BigEndian.PutUint32(versionTag, uint32(h.initialVersion))
tags[TagVER] = versionTag
if len(h.stk) > 0 {
tags[TagSTK] = h.stk
}
if len(h.sno) > 0 {
tags[TagSNO] = h.sno
}
if h.serverConfig != nil {
tags[TagSCID] = h.serverConfig.ID
leafCert := h.certManager.GetLeafCert()
if leafCert != nil {
certHash, _ := h.certManager.GetLeafCertHash()
xlct := make([]byte, 8)
binary.LittleEndian.PutUint64(xlct, certHash)
tags[TagNONC] = h.nonc
tags[TagXLCT] = xlct
tags[TagKEXS] = []byte("C255")
tags[TagAEAD] = []byte("AESG")
tags[TagPUBS] = h.serverConfig.kex.PublicKey() // TODO: check if 3 bytes need to be prepended
}
}
return tags, nil
}
// add a TagPAD to a tagMap, such that the total size will be bigger than the ClientHelloMinimumSize
func (h *cryptoSetupClient) addPadding(tags map[Tag][]byte) {
var size int
for _, tag := range tags {
size += 8 + len(tag) // 4 bytes for the tag + 4 bytes for the offset + the length of the data
}
paddingSize := protocol.MinClientHelloSize - size
if paddingSize > 0 {
tags[TagPAD] = bytes.Repeat([]byte{0}, paddingSize)
}
}
func (h *cryptoSetupClient) maybeUpgradeCrypto() error {
if !h.serverVerified {
return nil
}
h.mutex.Lock()
defer h.mutex.Unlock()
leafCert := h.certManager.GetLeafCert()
if h.secureAEAD == nil && (h.serverConfig != nil && len(h.serverConfig.sharedSecret) > 0 && len(h.nonc) > 0 && len(leafCert) > 0 && len(h.diversificationNonce) > 0 && len(h.lastSentCHLO) > 0) {
var err error
var nonce []byte
if h.sno == nil {
nonce = h.nonc
} else {
nonce = append(h.nonc, h.sno...)
}
h.secureAEAD, err = h.keyDerivation(
false,
h.serverConfig.sharedSecret,
nonce,
h.connID,
h.lastSentCHLO,
h.serverConfig.Get(),
leafCert,
h.diversificationNonce,
protocol.PerspectiveClient,
)
if err != nil {
return err
}
h.logger.Debugf("Creating AEAD for secure encryption.")
h.handshakeEvent <- struct{}{}
}
return nil
}
func (h *cryptoSetupClient) generateClientNonce() error {
if len(h.nonc) > 0 {
return errClientNonceAlreadyExists
}
nonc := make([]byte, 32)
binary.BigEndian.PutUint32(nonc, uint32(time.Now().Unix()))
if len(h.serverConfig.obit) != 8 {
return errNoObitForClientNonce
}
copy(nonc[4:12], h.serverConfig.obit)
_, err := rand.Read(nonc[12:])
if err != nil {
return err
}
h.nonc = nonc
return nil
}<|fim▁end|> | |
<|file_name|>CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66a.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66a.cpp
Label Definition File: CWE23_Relative_Path_Traversal.label.xml
Template File: sources-sink-66a.tmpl.cpp
*/
/*
* @description
* CWE: 23 Relative Path Traversal
* BadSource: connect_socket Read data using a connect socket (client side)
* GoodSource: Use a fixed file name
* Sinks: w32CreateFile
* BadSink : Open the file named in data using CreateFile()
* Flow Variant: 66 Data flow: data passed in an array from one function to another in different source files
*
* */
#include "std_testcase.h"
#ifdef _WIN32
#define BASEPATH "c:\\temp\\"
#else
#include <wchar.h>
#define BASEPATH "/tmp/"
#endif
#ifdef _WIN32
#include <winsock2.h>
#include <windows.h>
#include <direct.h>
#pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */
#define CLOSE_SOCKET closesocket
#else /* NOT _WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#define INVALID_SOCKET -1
#define SOCKET_ERROR -1
#define CLOSE_SOCKET close
#define SOCKET int
#endif
#define TCP_PORT 27015
#define IP_ADDRESS "127.0.0.1"
namespace CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66
{
#ifndef OMITBAD
/* bad function declaration */
void badSink(char * dataArray[]);
void bad()
{
char * data;
char * dataArray[5];
char dataBuffer[FILENAME_MAX] = BASEPATH;
data = dataBuffer;
{
#ifdef _WIN32
WSADATA wsaData;
int wsaDataInit = 0;
#endif
int recvResult;
struct sockaddr_in service;
char *replace;
SOCKET connectSocket = INVALID_SOCKET;
size_t dataLen = strlen(data);
do
{
#ifdef _WIN32
if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR)
{
break;
}
wsaDataInit = 1;
#endif
/* POTENTIAL FLAW: Read data using a connect socket */
connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (connectSocket == INVALID_SOCKET)
{
break;
}
memset(&service, 0, sizeof(service));
service.sin_family = AF_INET;
service.sin_addr.s_addr = inet_addr(IP_ADDRESS);
service.sin_port = htons(TCP_PORT);
if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR)
{
break;
}
/* Abort on error or the connection was closed, make sure to recv one
* less char than is in the recv_buf in order to append a terminator */
/* Abort on error or the connection was closed */
recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (FILENAME_MAX - dataLen - 1), 0);
if (recvResult == SOCKET_ERROR || recvResult == 0)
{
break;
}
/* Append null terminator */
data[dataLen + recvResult / sizeof(char)] = '\0';
/* Eliminate CRLF */
replace = strchr(data, '\r');
<|fim▁hole|> }
replace = strchr(data, '\n');
if (replace)
{
*replace = '\0';
}
}
while (0);
if (connectSocket != INVALID_SOCKET)
{
CLOSE_SOCKET(connectSocket);
}
#ifdef _WIN32
if (wsaDataInit)
{
WSACleanup();
}
#endif
}
/* put data in array */
dataArray[2] = data;
badSink(dataArray);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* good function declarations */
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(char * dataArray[]);
static void goodG2B()
{
char * data;
char * dataArray[5];
char dataBuffer[FILENAME_MAX] = BASEPATH;
data = dataBuffer;
/* FIX: Use a fixed file name */
strcat(data, "file.txt");
dataArray[2] = data;
goodG2BSink(dataArray);
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE23_Relative_Path_Traversal__char_connect_socket_w32CreateFile_66; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif<|fim▁end|> | if (replace)
{
*replace = '\0';
|
<|file_name|>1021.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
def calc_note(count, value):
qnt = 0
if count >= value:
qnt = int(count) / value
print '%d nota(s) de R$ %d.00' % (qnt, value)
return count - qnt * value
n = float(raw_input())
print 'NOTAS:'
n = calc_note(n, 100)
n = calc_note(n, 50)
n = calc_note(n, 20)
n = calc_note(n, 10)<|fim▁hole|>n = calc_note(n, 5)
n = calc_note(n, 2)
print 'MOEDAS:'
print '%d moeda(s) de R$ 1.00' % int(n)
n -= int(n)
m50 = n / 0.50
print '%d moeda(s) de R$ 0.50' % m50
n -= int(m50) * 0.50
m25 = n / 0.25
print '%d moeda(s) de R$ 0.25' % m25
n -= int(m25) * 0.25
m10 = n / 0.10
print '%d moeda(s) de R$ 0.10' % m10
n -= int(m10) * 0.10
if round(n, 2) >= 0.05:
print '1 moeda(s) de R$ 0.05'
m1 = (n - 0.05) * 100
else:
print '0 moeda(s) de R$ 0.05'
m1 = round(n, 2) * 100
if round(m1, 0):
print '%.0f moeda(s) de R$ 0.01' % m1
else:
print '0 moeda(s) de R$ 0.01'<|fim▁end|> | |
<|file_name|>ceres_costs_utils_test.hpp<|end_file_name|><|fim▁begin|>/*
* Software License Agreement (Apache License)
*
* Copyright (c) 2014, Southwest Research Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CERES_COSTS_UTILS_TEST_HPP_
#define CERES_COSTS_UTILS_TEST_HPP_
#include "ceres/ceres.h"
#include "ceres/rotation.h"
#include <industrial_extrinsic_cal/basic_types.h>
namespace industrial_extrinsic_cal
{
/* local prototypes of helper functions */
/*! \brief print a quaternion plus position as a homogeneous transform
* \param qx quaternion x value
* \param qy quaternion y value
* \param qz quaternion z value
* \param qw quaternion w value
* \param tx position x value
* \param ty position y value
* \param tz position z value
*/
void printQTasH(double qx, double qy, double qz, double qw, double tx, double ty, double tz);
/*! \brief print an angle axis transform as a homogeneous transform
* \param x angle axis x value
* \param y angle axis y value
* \param z angle axis z value
* \param tx position x value
* \param ty position y value
* \param tz position z value
*/
void printAATasH(double ax, double ay, double az, double tx, double ty, double tz);
/*! \brief print angle axis to homogeneous transform inverse
* \param ax angle axis x value
* \param ay angle axis y value
* \param az angle axis z value
* \param tx position x value
* \param ty position y value
* \param tz position z value
*/
void printAATasHI(double ax, double ay, double az, double tx, double ty, double tz);<|fim▁hole|>/*! \brief print angle axis as euler angles
* \param ax angle axis x value
* \param ay angle axis y value
* \param az angle axis z value
*/
void printAAasEuler(double ax, double ay, double az);
/*! \brief print Camera Parameters
* \param CameraParameters include intrinsic and extrinsic
* \param words to provide as a header
*/
void printCameraParameters(CameraParameters C, std::string words);
/*! \brief computes image of point in cameras image plane
* \param C both intrinsic and extrinsic camera parameters
* \param P the point to be projected into image
*/
Observation projectPointWithDistortion(CameraParameters camera_parameters, Point3d point);
Observation projectPointNoDistortion(CameraParameters camera_params, Point3d point_to_project);
Observation projectPointWithDistortion(CameraParameters C, Point3d P)
{
double p[3];
double pt[3];
pt[0] = P.x;
pt[1] = P.y;
pt[2] = P.z;
/* transform point into camera frame */
/* note, camera transform takes points from camera frame into world frame */
double aa[3];
aa[0] = C.pb_extrinsics[0];
aa[1] = C.pb_extrinsics[1];
aa[2] = C.pb_extrinsics[2];
ceres::AngleAxisRotatePoint(aa, pt, p);
// apply camera translation
double xp1 = p[0] + C.pb_extrinsics[3];
double yp1 = p[1] + C.pb_extrinsics[4];
double zp1 = p[2] + C.pb_extrinsics[5];
// p[0] +=C.pb_extrinsics[3];
// p[1] +=C.pb_extrinsics[4];
// p[2] +=C.pb_extrinsics[5];
double xp = xp1 / zp1;
double yp = yp1 / zp1;
// calculate terms for polynomial distortion
double r2 = xp * xp + yp * yp;
double r4 = r2 * r2;
double r6 = r2 * r4;
double xp2 = xp * xp; /* temporary variables square of others */
double yp2 = yp * yp;
/* apply the distortion coefficients to refine pixel location */
double xpp = xp + C.distortion_k1 * r2 * xp + C.distortion_k2 * r4 * xp + C.distortion_k3 * r6 * xp +
C.distortion_p2 * (r2 + 2 * xp2) + 2 * C.distortion_p1 * xp * yp;
double ypp = yp + C.distortion_k1 * r2 * yp + C.distortion_k2 * r4 * yp + C.distortion_k3 * r6 * yp +
C.distortion_p1 * (r2 + 2 * yp2) + 2 * C.distortion_p2 * xp * yp;
/* perform projection using focal length and camera center into image plane */
Observation O;
O.point_id = 0;
O.image_loc_x = C.focal_length_x * xpp + C.center_x;
O.image_loc_y = C.focal_length_y * ypp + C.center_y;
return (O);
}
Observation projectPointNoDistortion(CameraParameters C, Point3d P)
{
double p[3]; // rotated into camera frame
double point[3]; // world location of point
double aa[3]; // angle axis representation of camera transform
double tx = C.position[0]; // location of origin in camera frame x
double ty = C.position[1]; // location of origin in camera frame y
double tz = C.position[2]; // location of origin in camera frame z
double fx = C.focal_length_x; // focal length x
double fy = C.focal_length_y; // focal length y
double cx = C.center_x; // optical center x
double cy = C.center_y; // optical center y
aa[0] = C.angle_axis[0];
aa[1] = C.angle_axis[1];
aa[2] = C.angle_axis[2];
point[0] = P.x;
point[1] = P.y;
point[2] = P.z;
/** rotate and translate points into camera frame */
ceres::AngleAxisRotatePoint(aa, point, p);
// apply camera translation
double xp1 = p[0] + tx;
double yp1 = p[1] + ty;
double zp1 = p[2] + tz;
// scale into the image plane by distance away from camera
double xp = xp1 / zp1;
double yp = yp1 / zp1;
// perform projection using focal length and camera center into image plane
Observation O;
O.image_loc_x = fx * xp + cx;
O.image_loc_y = fy * yp + cy;
return (O);
}
} // end of namespace
#endif<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
// Copyright (c) 2016 Connor Hilarides
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software
// and associated documentation files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
// BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#![allow(dead_code)]
extern crate image;
extern crate gif;
extern crate owning_ref;
extern crate memmap;
use std::path::PathBuf;
use std::rc::Rc;
use std::{io, fs, slice};
pub mod ffi;
pub enum Buffer {
Boxed(Box<[u8]>),
Allocated(*const u8, usize, unsafe fn(*const u8, usize)),
}
impl Buffer {
pub fn get(&self) -> &[u8] {
match *self {
Buffer::Boxed(ref data) => &**data,
Buffer::Allocated(data, size, _) => unsafe { slice::from_raw_parts(data, size) },
}
}
}
impl Drop for Buffer {
fn drop(&mut self) {
match *self {
Buffer::Allocated(data, size, free) => unsafe { free(data, size) },
_ => {}
}
}
}
pub enum ImageId {
File(PathBuf),
Borrowed(*const [u8]),
Owned(Buffer),
}
pub struct Image {
frame: image::Frame,
}
impl Image {
fn get_reader(id: &ImageId) -> io::Result<io::Cursor<SrcData>> {
let id = match *id {
ImageId::File(ref path) => ImageId::File(path.clone()),
ImageId::Borrowed(ptr) => ImageId::Borrowed(ptr),
ImageId::Owned(ref data) => ImageId::Borrowed(data.get() as *const [u8]),
};
Ok(io::Cursor::new(try!(ImageSrc::new(id))))
}
pub fn load(id: &ImageId, format: image::ImageFormat) -> image::ImageResult<Image> {
let reader = try!(Image::get_reader(id));
let dyn = try!(image::load(reader, format));
let frame = image::Frame::new(dyn.to_rgba());
Ok(Image {
frame: frame,
})
}
}
type SrcData = owning_ref::OwningRef<Rc<ImageSrc>, [u8]>;
enum ImageSrc {
File(memmap::Mmap),
Borrowed(*const [u8]),
Owned(Buffer),
}
impl ImageSrc {
pub fn new(id: ImageId) -> io::Result<SrcData> {
Ok(ImageSrc::make_data(try!(ImageSrc::open_src(id))))
}
fn open_src(id: ImageId) -> io::Result<ImageSrc> {
use ImageSrc::*;
Ok(match id {
ImageId::File(path) => {
let file = try!(fs::File::open(path));
let mmap = try!(memmap::Mmap::open(&file, memmap::Protection::Read));
File(mmap)
},
ImageId::Borrowed(ptr) => Borrowed(ptr),
ImageId::Owned(data) => Owned(data),
})
}
fn make_data(data: ImageSrc) -> SrcData {
use owning_ref::OwningRef;
let base_ref = OwningRef::<Rc<ImageSrc>, ImageSrc>::new(Rc::new(data));
base_ref.map(|data| {
use ImageSrc::*;
match *data {
File(ref mmap) => unsafe { mmap.as_slice() },
Borrowed(ptr) => unsafe { &*ptr },
Owned(ref data) => data.get(),
}
})
}
}
pub struct MultiImage {
current_frame: Option<image::Frame>,
decoder: Option<gif::Reader<io::Cursor<SrcData>>>,
source: SrcData,
next_frame: usize,
width: u32,
height: u32,
delay: u16,
}
impl MultiImage {
pub fn new(id: ImageId) -> Result<MultiImage, gif::DecodingError> {
let mut image = MultiImage {
next_frame: 0,
width: 0,
height: 0,
delay: 0,
current_frame: None,
decoder: None,
source: try!(ImageSrc::new(id)),
};
try!(image.setup_decoder());
Ok(image)
}<|fim▁hole|> pub fn request_frame(&mut self, num: usize) -> Option<&image::Frame> {
if self.current_frame.is_none() || num + 1 != self.next_frame {
self.load_frame(num)
} else {
self.current_frame.as_ref()
}
}
fn setup_decoder(&mut self) -> Result<(), gif::DecodingError> {
use gif::{ColorOutput, SetParameter};
let mut decoder = gif::Decoder::new(io::Cursor::new(self.source.clone()));
decoder.set(ColorOutput::RGBA);
let reader = try!(decoder.read_info());
self.width = reader.width() as u32;
self.height = reader.height() as u32;
self.decoder = Some(reader);
self.next_frame = 0;
self.current_frame = None;
Ok(())
}
fn load_frame(&mut self, num: usize) -> Option<&image::Frame> {
use image::{ImageBuffer, DynamicImage};
if self.decoder.is_none() || self.next_frame > num {
match self.setup_decoder() {
Ok(_) => {},
Err(_) => return None,
}
}
let mut reader = self.decoder.take().unwrap();
while self.next_frame < num {
match reader.next_frame_info() {
Ok(Some(_)) => {},
_ => return None,
}
self.next_frame += 1;
}
let (width, height, frame_buf) = match reader.read_next_frame() {
Ok(Some(frame)) => {
self.delay = frame.delay;
(
frame.width as u32,
frame.height as u32,
frame.buffer.clone().into_owned(),
)
},
_ => return None,
};
self.next_frame += 1;
let raw_buf = ImageBuffer::from_raw(width, height, frame_buf);
let buf = match raw_buf.map(|v| DynamicImage::ImageRgba8(v)) {
Some(buf) => buf,
None => return None,
};
self.decoder = Some(reader);
self.current_frame = Some(image::Frame::new(buf.to_rgba()));
self.current_frame.as_ref()
}
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! A 'simple' ARM emulator.
//!
//! At the moment the emulator only has support for a handful of THUMB-2 instructions
/// and no ARM-mode support.
use super::*;
pub use self::memory_tree::MemoryTree;
pub use self::ram::RAM;
pub use self::emu::SimpleEmulator;
pub use self::system::SimpleSystem;
pub mod memory_tree;
pub mod ram;
pub mod emu;
pub mod system;
/// Copy as much memory as possible from `src` to `dest`.
pub fn copy_memory(src: &[u8], dest: &mut [u8]) {
for x in 0.. {
if (x >= src.len()) || (x >= dest.len()) {
break
}
dest[x] = src[x]
}
}
fn swap_word(src: Word) -> Word {
let src = src as u32;
let src = (src >> 24)
| ((src >> 8) & 0xff00)
| ((src << 8) & 0xff0000)
| ((src << 24) & 0xff000000);
src as Word
}
fn adc32(a: Word, b: Word, c: Word) -> (Word, bool, bool) {<|fim▁hole|> let sa = a as i64;
let sb = b as i64;
let sc = c as i64;
let ua = (a as u32) as u64;
let ub = (b as u32) as u64;
let uc = (c as u32) as u64;
let us = ua.wrapping_add(ub).wrapping_add(uc);
let ss = sa.wrapping_add(sb).wrapping_add(sc);
let result = us as u32;
(result as i32,
(result as u64) != us,
((result as i32) as i64) != ss)
}
pub trait Memory {
fn read(&self, _addr: u64, _dest: &mut [u8]) -> Result<()> { Err(Error::Unknown(format!("not implemented"))) }
fn write(&self, _addr: u64, _src: &[u8]) -> Result<()> { Err(Error::Unknown(format!("not implemented"))) }
fn read_u8(&self, addr: u64) -> Result<u8> {
let mut data = [0u8];
try!(self.read(addr, &mut data));
Ok(data[0])
}
fn read_u16(&self, addr: u64) -> Result<u16> {
let mut data = [0u8;2];
try!(self.read(addr, &mut data));
Ok((data[0] as u16) | ((data[1] as u16) << 8))
}
fn read_u32(&self, addr: u64) -> Result<u32> {
let mut data = [0u8;4];
try!(self.read(addr, &mut data));
Ok((data[0] as u32)
| ((data[1] as u32) << 8)
| ((data[2] as u32) << 16)
| ((data[3] as u32) << 24))
}
fn write_u8(&self, addr: u64, val: u8) -> Result<()> {
self.write(addr, &[val])
}
fn write_u16(&self, addr: u64, val: u16) -> Result<()> {
self.write(addr, &[(val & 0xff) as u8,
((val >> 8) & 0xff) as u8])
}
fn write_u32(&self, addr: u64, val: u32) -> Result<()> {
self.write(addr, &[(val & 0xff) as u8,
((val >> 8) & 0xff) as u8,
((val >> 16) & 0xff) as u8,
((val >> 24) & 0xff) as u8])
}
}
pub trait System {
type Memory: Memory;
fn memory(&self) -> &Self::Memory;
}<|fim▁end|> | |
<|file_name|>SingleChoiceFragment.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mardin.job.widgets.wizard.ui;
import com.mardin.job.R;
import com.mardin.job.widgets.wizard.model.Page;
import com.mardin.job.widgets.wizard.model.SingleFixedChoicePage;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.ListFragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
public class SingleChoiceFragment extends ListFragment {
private static final String ARG_KEY = "key";
private PageFragmentCallbacks mCallbacks;
private List<String> mChoices;
private String mKey;
private Page mPage;
public static SingleChoiceFragment create(String key) {
Bundle args = new Bundle();
args.putString(ARG_KEY, key);
SingleChoiceFragment fragment = new SingleChoiceFragment();
fragment.setArguments(args);
return fragment;
}
public SingleChoiceFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle args = getArguments();
mKey = args.getString(ARG_KEY);
mPage = mCallbacks.onGetPage(mKey);
SingleFixedChoicePage fixedChoicePage = (SingleFixedChoicePage) mPage;
mChoices = new ArrayList<String>();
for (int i = 0; i < fixedChoicePage.getOptionCount(); i++) {
mChoices.add(fixedChoicePage.getOptionAt(i));
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_page, container, false);
((TextView) rootView.findViewById(android.R.id.title)).setText(mPage.getTitle());
final ListView listView = (ListView) rootView.findViewById(android.R.id.list);
setListAdapter(new ArrayAdapter<String>(getActivity(),
android.R.layout.simple_list_item_single_choice,
android.R.id.text1,
mChoices));
listView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
// Pre-select currently selected item.
new Handler().post(new Runnable() {
@Override
public void run() {
String selection = mPage.getData().getString(Page.SIMPLE_DATA_KEY);
for (int i = 0; i < mChoices.size(); i++) {
if (mChoices.get(i).equals(selection)) {
listView.setItemChecked(i, true);
break;
}
}
}
});
return rootView;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (!(activity instanceof PageFragmentCallbacks)) {
throw new ClassCastException("Activity must implement PageFragmentCallbacks");
}
mCallbacks = (PageFragmentCallbacks) activity;
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
mPage.getData().putString(Page.SIMPLE_DATA_KEY,
getListAdapter().getItem(position).toString());
mPage.notifyDataChanged();
}
}<|fim▁end|> | * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software |
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | module.exports = require('./lib/knox'); |
<|file_name|>Initialization.cc<|end_file_name|><|fim▁begin|>/* Copyright (C) 2016, 2017 PISM Authors
*
* This file is part of PISM.
*
* PISM is free software; you can redistribute it and/or modify it under the<|fim▁hole|> * version.
*
* PISM is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with PISM; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "Initialization.hh"
#include "pism/util/pism_utilities.hh"
#include "pism/util/io/io_helpers.hh"
#include "pism/util/io/PIO.hh"
#include "pism/util/pism_options.hh"
namespace pism {
namespace ocean {
InitializationHelper::InitializationHelper(IceGrid::ConstPtr g, OceanModel* in)
: OceanModifier(g, in),
m_sea_level_metadata("effective_sea_level_elevation",
m_config->get_string("time.dimension_name"),
m_sys) {
m_melange_back_pressure_fraction.create(m_grid, "effective_melange_back_pressure_fraction",
WITHOUT_GHOSTS);
m_melange_back_pressure_fraction.set_attrs("model_state",
"effective melange back pressure fraction,"
" as seen by the ice dynamics code (for re-starting)",
"1", "");
m_shelf_base_temperature.create(m_grid, "effective_shelf_base_temperature", WITHOUT_GHOSTS);
m_shelf_base_temperature.set_attrs("model_state",
"effective shelf base temperature,"
" as seen by the ice dynamics code (for re-starting)",
"Kelvin", "");
m_shelf_base_mass_flux.create(m_grid, "effective_shelf_base_mass_flux", WITHOUT_GHOSTS);
m_shelf_base_mass_flux.set_attrs("model_state",
"effective shelf base mass flux"
" (positive flux is loss from ice shelf),"
" as seen by the ice dynamics code (for re-starting)",
"kg m-2 s-1", "");
m_shelf_base_mass_flux.metadata().set_string("glaciological_units", "kg m-2 year-1");
m_sea_level_elevation = 0.0;
m_sea_level_metadata.set_string("pism_intent", "model_state");
m_sea_level_metadata.set_string("units", "meters");
m_sea_level_metadata.set_string("long_name", "effective sea level elevation, "
"as seen by the ice dynamics code (for re-starting)");
}
void InitializationHelper::update_impl(double t, double dt) {
m_input_model->update(t, dt);
m_input_model->melange_back_pressure_fraction(m_melange_back_pressure_fraction);
m_input_model->shelf_base_mass_flux(m_shelf_base_mass_flux);
m_input_model->shelf_base_temperature(m_shelf_base_temperature);
m_sea_level_elevation = m_input_model->sea_level_elevation();
}
void InitializationHelper::init_impl() {
m_input_model->init();
InputOptions opts = process_input_options(m_grid->com);
if (opts.type == INIT_RESTART) {
m_log->message(2, "* Reading effective ocean model outputs from '%s' for re-starting...\n",
opts.filename.c_str());
PIO file(m_grid->com, "guess_mode", opts.filename, PISM_READONLY);
const unsigned int time_length = file.inq_nrecords();
const unsigned int last_record = time_length > 0 ? time_length - 1 : 0;
m_melange_back_pressure_fraction.read(file, last_record);
m_shelf_base_mass_flux.read(file, last_record);
m_shelf_base_temperature.read(file, last_record);
{
std::vector<double> data;
file.get_1d_var(m_sea_level_metadata.get_name(),
last_record, 1, // start, count
data);
m_sea_level_elevation = data[0];
}
file.close();
} else {
m_log->message(2, "* Performing a 'fake' ocean model time-step for bootstrapping...\n");
init_step(*this, *m_grid->ctx()->time());
}
// Support regridding. This is needed to ensure that initialization using "-i" is equivalent to
// "-i ... -bootstrap -regrid_file ..."
{
regrid("ocean model initialization helper", m_melange_back_pressure_fraction,
REGRID_WITHOUT_REGRID_VARS);
regrid("ocean model initialization helper", m_shelf_base_mass_flux,
REGRID_WITHOUT_REGRID_VARS);
regrid("ocean model initialization helper", m_shelf_base_temperature,
REGRID_WITHOUT_REGRID_VARS);
}
// FIXME: fake "regridding" of sea level
}
void InitializationHelper::melange_back_pressure_fraction_impl(IceModelVec2S &result) const {
result.copy_from(m_melange_back_pressure_fraction);
}
void InitializationHelper::sea_level_elevation_impl(double &result) const {
result = m_sea_level_elevation;
}
void InitializationHelper::shelf_base_temperature_impl(IceModelVec2S &result) const {
result.copy_from(m_shelf_base_temperature);
}
void InitializationHelper::shelf_base_mass_flux_impl(IceModelVec2S &result) const {
result.copy_from(m_shelf_base_mass_flux);
}
void InitializationHelper::define_model_state_impl(const PIO &output) const {
m_melange_back_pressure_fraction.define(output);
m_shelf_base_mass_flux.define(output);
m_shelf_base_temperature.define(output);
m_melange_back_pressure_fraction.define(output);
io::define_timeseries(m_sea_level_metadata, output, PISM_DOUBLE);
m_input_model->define_model_state(output);
}
void InitializationHelper::write_model_state_impl(const PIO &output) const {
m_melange_back_pressure_fraction.write(output);
m_shelf_base_mass_flux.write(output);
m_shelf_base_temperature.write(output);
m_melange_back_pressure_fraction.write(output);
const unsigned int
time_length = output.inq_dimlen(m_sea_level_metadata.get_dimension_name()),
t_start = time_length > 0 ? time_length - 1 : 0;
io::write_timeseries(output, m_sea_level_metadata, t_start, m_sea_level_elevation,
PISM_DOUBLE);
m_input_model->write_model_state(output);
}
} // end of namespace ocean
} // end of namespace pism<|fim▁end|> | * terms of the GNU General Public License as published by the Free Software
* Foundation; either version 3 of the License, or (at your option) any later |
<|file_name|>test_setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# system
import os
import sys
dir = os.path.split(os.path.split(os.path.split(os.path.realpath(__file__))[0])[0])[0]
sys.path.append(os.path.join(dir, 'scripts'))
# testing
import mock
import unittest
from mock import patch
# program
import setup.load as Config
import setup.database as DB
<|fim▁hole|>#
TEST_DATA = 'test_flood_portal_output.json'
class CheckConfigurationStructure(unittest.TestCase):
'''Unit tests for the configuration files.'''
def test_that_load_config_fails_gracefully(self):
assert Config.LoadConfig('xxx.json') == False
## Object type tests.
def test_config_is_list(self):
d = Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert type(d) is dict
def test_config_returns_a_table_list(self):
d = Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert type(d['database']) is list
def test_config_checks_api_key(self):
Config.LoadConfig(os.path.join(dir, 'config', 'dev.json'))
assert Config.LoadConfig(os.path.join(dir, 'tests', 'data', 'test_config.json')) == False
class CheckDatabaseCreation(unittest.TestCase):
'''Unit tests for the setting up the database.'''
## Structural tests.
def test_wrapper_database_function_works(self):
assert DB.Main() != False
## Failed config file.
def test_database_fail(self):
assert DB.CreateTables(config_path=os.path.join(dir, 'tests', 'data', 'test_database_fail.json')) == False
def test_that_odd_table_names_fail(self):
assert DB.CreateTables(config_path=os.path.join(dir, 'tests', 'data', 'test_fail_column_names.json')) == False<|fim▁end|> | #
# Global variables. |
<|file_name|>test_redis_url.py<|end_file_name|><|fim▁begin|>import redis_url
import unittest
class RedisUrlTestSuite(unittest.TestCase):
def test_redis_parse_localhost(self):
self.assertEqual(
redis_url.parse('redis://localhost:6379/0?cluster=false'),
{
'host': 'localhost',
'port': 6379,
'db': 0,
'password': None
}
)
def test_redis_parse_remote(self):
self.assertEqual(
redis_url.parse('redis://:138913@ec2-192-168-1-1.compute-1.amazon.aws.com:30001?cluster=false'),
{
'host': 'ec2-192-168-1-1.compute-1.amazon.aws.com',
'port': 30001,
'db': 0,
'password': '138913'
}
)
def test_redis_parse_cluster_localhost(self):
self.assertEqual(
redis_url.parse('redis://localhost:6379?cluster=true'),
{
'host': 'localhost',
'port': 6379,
'password': None
}<|fim▁hole|> def test_redis_parse_cluster_skip_full_coverage_check(self):
self.assertEqual(
redis_url.parse('redis://localhost:6379?cluster=true&skip_full_coverage_check=true'),
{
'host': 'localhost',
'port': 6379,
'password': None,
'skip_full_coverage_check': True,
}
)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | )
|
<|file_name|>2_.py<|end_file_name|><|fim▁begin|>from numpy import *
from cmlib import showMatr
A = matrix([[1, 2, 0],
[0, 2, 2]])
<|fim▁hole|>B = matrix([[3, -1],
[-1, 3],
[1, 0]])
res = (A * B).T
showMatr(array(res))<|fim▁end|> | |
<|file_name|>DataDriver.py<|end_file_name|><|fim▁begin|>from datetime import datetime
import time
import json
from Commit import Commit;
import Constant;
import collections
from yattag import Doc
def generateHTML(commits, projectName, commitData, fileExtensionMap):
totalAuthors = len(commitData)
generateBestAuthors(projectName, commitData)
generateFileByExtension(fileExtensionMap, projectName)
totalLines, totalLinesAdded, totalLinesDeleted = generateLinesByDate(commits, projectName)
totalFiles = generateFilesByDate(commits, projectName)
generateIndexHtml(projectName, totalLines, totalLinesAdded, totalLinesDeleted,
totalFiles, len(commits), totalAuthors)
def generateIndexHtml(projectName, totalLines, totalLinesAdded, totalLinesDeleted,
totalFiles, totalCommits, totalAuthors):
with open(Constant.INDEX_HTML_TEMPLATE, "rt") as fin:
with open(Constant.INDEX_HTML, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$time' in line:
fout.write(line.replace('$time', time.strftime('%l:%M%p %Z on %b %d, %Y')))
elif '$files' in line:
fout.write(line.replace('$files', str(totalFiles)))
elif '$commits' in line:
fout.write(line.replace('$commits', str(totalCommits)))
elif '$totallines' in line:
fout.write(line.replace('$totallines', str(totalLines)))
elif '$linesadded' in line:
fout.write(line.replace('$linesadded', str(totalLinesAdded)))
elif '$linesdeleted' in line:
fout.write(line.replace('$linesdeleted', str(totalLinesDeleted)))
elif '$author' in line:
fout.write(line.replace('$author', str(totalAuthors)))
else:
fout.write(line)
def generateBestAuthors(projectName, commitData):
# Generate best author table
fields = ['author', 'commit_number', 'lines_added', 'lines_deleted']
doc, tag, text = Doc().tagtext()
with tag('table', ('class', 'table table-bordered table-condensed table-hover')):
with tag('tr'):
for i in range(len(fields)):
with tag('th'):
text(fields[i])
for commitdata in commitData:
with tag('tr'):
for i in range(len(fields)):
with tag('td', ('align', 'center')):
text(commitdata[i])
with open(Constant.BEST_AUTHORS_TEMPLATE, "rt") as fin:
with open(Constant.BEST_AUTHORS, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$data' in line:
fout.write(line.replace('$data', doc.getvalue()))
else:
fout.write(line)
def generateLinesByDate(commits, projectName):
totalLines = 0
totalLinesAdded = 0
totalLinesDeleted = 0;
mydic = collections.OrderedDict()
for commit in reversed(commits):
dateKey = int(commit.date.strftime("%s")) * 1000
totalLinesAdded = totalLinesAdded + commit.linesAdded
totalLinesDeleted = totalLinesDeleted + commit.linesDeleted
linesDiff = commit.linesAdded - commit.linesDeleted;
totalLines = totalLines + linesDiff
if dateKey in mydic:
mydic[dateKey] = mydic[dateKey] + linesDiff
else:
mydic[dateKey] = totalLines + linesDiff
data = []
for item in mydic.items():
data.append([item[0], item[1]])
with open(Constant.LINES_BY_DATE_TEMPLATE, "rt") as fin:
with open(Constant.LINES_BY_DATE, "wt") as fout:
for line in fin:
if '$data' in line:
fout.write(line.replace('$data', str(data)))
elif '$title' in line:
fout.write(line.replace('$title', projectName))
else:
fout.write(line)
return totalLines, totalLinesAdded, totalLinesDeleted
def generateFilesByDate(commits, projectName):
totalFiles = 0
mydic = collections.OrderedDict()
for commit in reversed(commits):<|fim▁hole|> if dateKey in mydic:
mydic[dateKey] = mydic[dateKey] + filesDiff
else:
mydic[dateKey] = totalFiles + filesDiff
data = []
for item in mydic.items():
data.append([item[0], item[1]])
with open(Constant.FILES_BY_DATE_TEMPLATE, "rt") as fin:
with open(Constant.FILES_BY_DATE, "wt") as fout:
for line in fin:
if '$data' in line:
fout.write(line.replace('$data', str(data)))
elif '$title' in line:
fout.write(line.replace('$title', projectName))
else:
fout.write(line)
return totalFiles
def generateFileByExtension(fileExtensionMap, projectName):
exts = fileExtensionMap.keys()
data = fileExtensionMap.values()
totalFiles = sum(data)
threshold = int(totalFiles/200)
for ext in fileExtensionMap.keys():
if fileExtensionMap[ext] <= threshold:
if 'other' not in fileExtensionMap:
fileExtensionMap['other'] = 0
fileExtensionMap['other'] += fileExtensionMap[ext]
del fileExtensionMap[ext]
with open(Constant.FILES_BY_EXTENSION_TEMPLATE, "rt") as fin:
with open(Constant.FILES_BY_EXTENSION, "wt") as fout:
for line in fin:
if '$title' in line:
fout.write(line.replace('$title', projectName))
elif '$data' in line:
fout.write(line.replace('$data', '[' + ','.join(str(e) for e in fileExtensionMap.values()) + ']' ))
elif '$extensions' in line:
fout.write(line.replace('$extensions', json.dumps(fileExtensionMap.keys())))
else:
fout.write(line)<|fim▁end|> | dateKey = int(commit.date.strftime("%s")) * 1000
filesDiff = commit.filesAdded - commit.filesDeleted;
totalFiles = totalFiles + filesDiff |
<|file_name|>gfks_helptext.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
logger.info("Loading 5 objects to table gfks_helptext...")
# fields: id, content_type, field, help_text
loader.save(create_gfks_helptext(1,contacts_Partner,u'language',u'Die Sprache, in der Dokumente ausgestellt werden sollen.'))
loader.save(create_gfks_helptext(2,gfks_HelpText,u'field',u'The name of the field.'))
loader.save(create_gfks_helptext(3,pcsw_Client,u'in_belgium_since',u'Since when this person in Belgium lives.\n<b>Important:</b> help_text can be formatted.'))
loader.save(create_gfks_helptext(4,pcsw_Client,u'noble_condition',u'The eventual noble condition of this person. Imported from TIM.'))
loader.save(create_gfks_helptext(5,contacts_Partner,u'language',u'Die Sprache, in der Dokumente ausgestellt werden sollen.'))<|fim▁hole|><|fim▁end|> |
loader.flush_deferred_objects() |
<|file_name|>autocomplete_attributes.js<|end_file_name|><|fim▁begin|>(function($) {
module("autocomplete: options");
test('setAttributes', function() {
var source = $('#autocomplete').autocomplete("option", "source");<|fim▁hole|>
var autoFocus = $('#autocomplete').autocomplete("option", "autoFocus");
equals(autoFocus, true, 'Boolean property');
var appendTo = $('#autocomplete').autocomplete("option", "appendTo");
equals(appendTo, '#destination', 'String property');
var minLength = $('#autocomplete').autocomplete("option", "minLength");
equals(minLength, 2, 'Number property');
});
})(jQuery);<|fim▁end|> | equals(source.join(','), ['ActionScript','AppleScript','Asp','BASIC','C','C++','Clojure','COBOL','ColdFusion','Erlang','Fortran','Groovy','Haskell','Java','JavaScript','Lisp','Perl','PHP','Python','Ruby','Scala','Scheme'].join(','), 'Array property'); |
<|file_name|>image.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic types for the handling of [images].
//!
//! [images]: https://drafts.csswg.org/css-images/#image-values
use custom_properties;
use servo_arc::Arc;
use std::fmt::{self, Write};
use style_traits::{CssWriter, ToCss};
use values::serialize_atom_identifier;
use Atom;
/// An [image].
///
/// [image]: https://drafts.csswg.org/css-images/#image-values
#[derive(Clone, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub enum Image<Gradient, MozImageRect, ImageUrl> {
/// A `<url()>` image.
Url(ImageUrl),
/// A `<gradient>` image. Gradients are rather large, and not nearly as
/// common as urls, so we box them here to keep the size of this enum sane.
Gradient(Box<Gradient>),
/// A `-moz-image-rect` image. Also fairly large and rare.
Rect(Box<MozImageRect>),
/// A `-moz-element(# <element-id>)`
#[css(function = "-moz-element")]
Element(Atom),
/// A paint worklet image.
/// <https://drafts.css-houdini.org/css-paint-api/>
#[cfg(feature = "servo")]
PaintWorklet(PaintWorklet),
}
/// A CSS gradient.
/// <https://drafts.csswg.org/css-images/#gradients>
#[derive(Clone, Debug, MallocSizeOf, PartialEq, ToComputedValue)]
pub struct Gradient<LineDirection, Length, LengthOrPercentage, Position, Color, Angle> {
/// Gradients can be linear or radial.
pub kind: GradientKind<LineDirection, Length, LengthOrPercentage, Position, Angle>,
/// The color stops and interpolation hints.
pub items: Vec<GradientItem<Color, LengthOrPercentage>>,
/// True if this is a repeating gradient.
pub repeating: bool,
/// Compatibility mode.
pub compat_mode: CompatMode,
}
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue)]
/// Whether we used the modern notation or the compatibility `-webkit`, `-moz` prefixes.
pub enum CompatMode {
/// Modern syntax.
Modern,
/// `-webkit` prefix.<|fim▁hole|> /// `-moz` prefix
Moz,
}
/// A gradient kind.
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue)]
pub enum GradientKind<LineDirection, Length, LengthOrPercentage, Position, Angle> {
/// A linear gradient.
Linear(LineDirection),
/// A radial gradient.
Radial(
EndingShape<Length, LengthOrPercentage>,
Position,
Option<Angle>,
),
}
/// A radial gradient's ending shape.
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue, ToCss)]
pub enum EndingShape<Length, LengthOrPercentage> {
/// A circular gradient.
Circle(Circle<Length>),
/// An elliptic gradient.
Ellipse(Ellipse<LengthOrPercentage>),
}
/// A circle shape.
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue)]
pub enum Circle<Length> {
/// A circle radius.
Radius(Length),
/// A circle extent.
Extent(ShapeExtent),
}
/// An ellipse shape.
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue, ToCss)]
pub enum Ellipse<LengthOrPercentage> {
/// An ellipse pair of radii.
Radii(LengthOrPercentage, LengthOrPercentage),
/// An ellipse extent.
Extent(ShapeExtent),
}
/// <https://drafts.csswg.org/css-images/#typedef-extent-keyword>
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, Parse, PartialEq, ToComputedValue, ToCss)]
pub enum ShapeExtent {
ClosestSide,
FarthestSide,
ClosestCorner,
FarthestCorner,
Contain,
Cover,
}
/// A gradient item.
/// <https://drafts.csswg.org/css-images-4/#color-stop-syntax>
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue, ToCss)]
pub enum GradientItem<Color, LengthOrPercentage> {
/// A color stop.
ColorStop(ColorStop<Color, LengthOrPercentage>),
/// An interpolation hint.
InterpolationHint(LengthOrPercentage),
}
/// A color stop.
/// <https://drafts.csswg.org/css-images/#typedef-color-stop-list>
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, ToComputedValue, ToCss)]
pub struct ColorStop<Color, LengthOrPercentage> {
/// The color of this stop.
pub color: Color,
/// The position of this stop.
pub position: Option<LengthOrPercentage>,
}
/// Specified values for a paint worklet.
/// <https://drafts.css-houdini.org/css-paint-api/>
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
#[derive(Clone, Debug, PartialEq, ToComputedValue)]
pub struct PaintWorklet {
/// The name the worklet was registered with.
pub name: Atom,
/// The arguments for the worklet.
/// TODO: store a parsed representation of the arguments.
#[cfg_attr(feature = "servo", ignore_malloc_size_of = "Arc")]
pub arguments: Vec<Arc<custom_properties::SpecifiedValue>>,
}
impl ::style_traits::SpecifiedValueInfo for PaintWorklet {}
impl ToCss for PaintWorklet {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
dest.write_str("paint(")?;
serialize_atom_identifier(&self.name, dest)?;
for argument in &self.arguments {
dest.write_str(", ")?;
argument.to_css(dest)?;
}
dest.write_str(")")
}
}
/// Values for `moz-image-rect`.
///
/// `-moz-image-rect(<uri>, top, right, bottom, left);`
#[allow(missing_docs)]
#[css(comma, function)]
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue, ToCss)]
pub struct MozImageRect<NumberOrPercentage, MozImageRectUrl> {
pub url: MozImageRectUrl,
pub top: NumberOrPercentage,
pub right: NumberOrPercentage,
pub bottom: NumberOrPercentage,
pub left: NumberOrPercentage,
}
impl<G, R, U> fmt::Debug for Image<G, R, U>
where
G: ToCss,
R: ToCss,
U: ToCss,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.to_css(&mut CssWriter::new(f))
}
}
impl<G, R, U> ToCss for Image<G, R, U>
where
G: ToCss,
R: ToCss,
U: ToCss,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
Image::Url(ref url) => url.to_css(dest),
Image::Gradient(ref gradient) => gradient.to_css(dest),
Image::Rect(ref rect) => rect.to_css(dest),
#[cfg(feature = "servo")]
Image::PaintWorklet(ref paint_worklet) => paint_worklet.to_css(dest),
Image::Element(ref selector) => {
dest.write_str("-moz-element(#")?;
serialize_atom_identifier(selector, dest)?;
dest.write_str(")")
},
}
}
}
impl<D, L, LoP, P, C, A> ToCss for Gradient<D, L, LoP, P, C, A>
where
D: LineDirection,
L: ToCss,
LoP: ToCss,
P: ToCss,
C: ToCss,
A: ToCss,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match self.compat_mode {
CompatMode::WebKit => dest.write_str("-webkit-")?,
CompatMode::Moz => dest.write_str("-moz-")?,
_ => {},
}
if self.repeating {
dest.write_str("repeating-")?;
}
dest.write_str(self.kind.label())?;
dest.write_str("-gradient(")?;
let mut skip_comma = match self.kind {
GradientKind::Linear(ref direction) if direction.points_downwards(self.compat_mode) => {
true
},
GradientKind::Linear(ref direction) => {
direction.to_css(dest, self.compat_mode)?;
false
},
GradientKind::Radial(ref shape, ref position, ref angle) => {
let omit_shape = match *shape {
EndingShape::Ellipse(Ellipse::Extent(ShapeExtent::Cover)) |
EndingShape::Ellipse(Ellipse::Extent(ShapeExtent::FarthestCorner)) => true,
_ => false,
};
if self.compat_mode == CompatMode::Modern {
if !omit_shape {
shape.to_css(dest)?;
dest.write_str(" ")?;
}
dest.write_str("at ")?;
position.to_css(dest)?;
} else {
position.to_css(dest)?;
if let Some(ref a) = *angle {
dest.write_str(" ")?;
a.to_css(dest)?;
}
if !omit_shape {
dest.write_str(", ")?;
shape.to_css(dest)?;
}
}
false
},
};
for item in &self.items {
if !skip_comma {
dest.write_str(", ")?;
}
skip_comma = false;
item.to_css(dest)?;
}
dest.write_str(")")
}
}
impl<D, L, LoP, P, A> GradientKind<D, L, LoP, P, A> {
fn label(&self) -> &str {
match *self {
GradientKind::Linear(..) => "linear",
GradientKind::Radial(..) => "radial",
}
}
}
/// The direction of a linear gradient.
pub trait LineDirection {
/// Whether this direction points towards, and thus can be omitted.
fn points_downwards(&self, compat_mode: CompatMode) -> bool;
/// Serialises this direction according to the compatibility mode.
fn to_css<W>(&self, dest: &mut CssWriter<W>, compat_mode: CompatMode) -> fmt::Result
where
W: Write;
}
impl<L> ToCss for Circle<L>
where
L: ToCss,
{
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
Circle::Extent(ShapeExtent::FarthestCorner) | Circle::Extent(ShapeExtent::Cover) => {
dest.write_str("circle")
},
Circle::Extent(keyword) => {
dest.write_str("circle ")?;
keyword.to_css(dest)
},
Circle::Radius(ref length) => length.to_css(dest),
}
}
}<|fim▁end|> | WebKit, |
<|file_name|>test_config.js<|end_file_name|><|fim▁begin|>const fs = require('fs');
const p = require('path');
// walk $PATH to find bin
const which = (bin) => {
const path = process.env.PATH.split(p.delimiter);
let file = '';
path.find((v) => {
const testPath = v + p.sep + bin;
if (fs.existsSync(testPath)) {
file = testPath;
return true;
}
return false;
});
return file;
};
const config = {
removeInfected: false, // don't change
quarantineInfected: `${__dirname}/infected`, // required for testing
// scanLog: `${__dirname}/clamscan-log`, // not required<|fim▁hole|> socket: '/var/run/clamd.scan/clamd.sock', // - can be set to null
host: '127.0.0.1', // required for testing (change for your system) - can be set to null
port: 3310, // required for testing (change for your system) - can be set to null
path: which('clamdscan'), // required for testing
timeout: 1000,
localFallback: false,
// configFile: '/etc/clamd.d/scan.conf' // set if required
},
// preference: 'clamdscan', // not used if socket/host+port is provided
debugMode: false,
};
// Force specific socket when on GitHub Actions
if (process.env.CI) config.clamdscan.socket = '/var/run/clamav/clamd.ctl';
module.exports = config;<|fim▁end|> | clamscan: {
path: which('clamscan'), // required for testing
},
clamdscan: { |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import { AzureFunction, Context } from '@azure/functions'
import { performance } from 'perf_hooks'
import { ListSchoolsService } from './list-schools-service'
import { IFunctionTimer } from '../../azure/functions'
const functionName = 'ps-report-1-list-schools'
const timerTrigger: AzureFunction = async function (context: Context, timer: IFunctionTimer): Promise<void> {
if (timer.isPastDue) {
// This function could potentially deliver a lot of work to do to the functions, and none of it is urgent. No surprises!
context.log(`${functionName}: timer is past due, exiting.`)
return
}
const start = performance.now()
const meta = { processCount: 0, errorCount: 0 }
try {
const schoolListService = new ListSchoolsService(context.log)
const messages = await schoolListService.getSchoolMessages()
context.bindings.schoolMessages = messages
meta.processCount = messages.length
} catch (error) {<|fim▁hole|> throw error
}
const end = performance.now()
const durationInMilliseconds = end - start
const timeStamp = new Date().toISOString()
context.log(`${functionName}: ${timeStamp} processed ${meta.processCount} records, run took ${durationInMilliseconds} ms`)
}
export default timerTrigger<|fim▁end|> | context.log.error(`${functionName}: ERROR: ${error.message}`) |
<|file_name|>commands.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from .database import BaseMessage
from .records import RecordUpdateMessage, RecordDeleteMessage, RecordCreateMessage
from ..exceptions import PyOrientBadMethodCallException
from ..constants import COMMAND_OP, FIELD_BOOLEAN, FIELD_BYTE, FIELD_CHAR, \
FIELD_INT, FIELD_LONG, FIELD_SHORT, FIELD_STRING, QUERY_SYNC, FIELD_BYTES, \
TX_COMMIT_OP, QUERY_GREMLIN, QUERY_ASYNC, QUERY_CMD, QUERY_TYPES, \
QUERY_SCRIPT
from ..utils import need_connected, need_db_opened, dlog
__author__ = 'Ostico <ostico@gmail.com>'
#
# COMMAND_OP
#
# Executes remote commands:
#
# Request: (mode:byte)(class-name:string)(command-payload-length:int)(command-payload)
# Response:
# - synchronous commands: [(synch-result-type:byte)[(synch-result-content:?)]]+
# - asynchronous commands: [(asynch-result-type:byte)[(asynch-result-content:?)]*]
# (pre-fetched-record-size.md)[(pre-fetched-record)]*+
#
# Where the request:
#
# mode can be 'a' for asynchronous mode and 's' for synchronous mode
# class-name is the class name of the command implementation.
# There are short form for the most common commands:
# q stands for query as idempotent command. It's like passing
# com.orientechnologies.orient.core.sql.query.OSQLSynchQuery
# c stands for command as non-idempotent command (insert, update, etc).
# It's like passing com.orientechnologies.orient.core.sql.OCommandSQL
# s stands for script. It's like passing
# com.orientechnologies.orient.core.command.script.OCommandScript.
# Script commands by using any supported server-side scripting like Javascript command. Since v1.0.
# any other values is the class name. The command will be created via
# reflection using the default constructor and invoking the fromStream() method against it
# command-payload is the command's serialized payload (see Network-Binary-Protocol-Commands)
# Response is different for synchronous and asynchronous request:
# synchronous:
# synch-result-type can be:
# 'n', means null result
# 'r', means single record returned
# 'l', collection of records. The format is:
# an integer to indicate the collection size
# all the records one by one
# 'a', serialized result, a byte[] is sent
# synch-result-content, can only be a record
# pre-fetched-record-size, as the number of pre-fetched records not
# directly part of the result set but joined to it by fetching
# pre-fetched-record as the pre-fetched record content
# asynchronous:
# asynch-result-type can be:
# 0: no records remain to be fetched
# 1: a record is returned as a resultset
# 2: a record is returned as pre-fetched to be loaded in client's cache only.
# It's not part of the result set but the client knows that it's available for later access
# asynch-result-content, can only be a record
#
class CommandMessage(BaseMessage):
def __init__(self, _orient_socket):
super(CommandMessage, self).__init__(_orient_socket)
self._query = ''
self._limit = 20
self._fetch_plan = '*:0'
self._command_type = QUERY_SYNC
self._mod_byte = 's'
self._append((FIELD_BYTE, COMMAND_OP))
@need_db_opened
def prepare(self, params=None):
if isinstance(params, tuple) or isinstance(params, list):
try:
self.set_command_type(params[0])
self._query = params[1]
self._limit = params[2]
self._fetch_plan = params[3]
# callback function use to operate
# over the async fetched records
self.set_callback(params[4])
except IndexError:
# Use default for non existent indexes
pass
if self._command_type == QUERY_CMD \
or self._command_type == QUERY_SYNC \
or self._command_type == QUERY_SCRIPT \
or self._command_type == QUERY_GREMLIN:
self._mod_byte = 's'
else:
if self._callback is None:
raise PyOrientBadMethodCallException("No callback was provided.", [])
self._mod_byte = 'a'
_payload_definition = [
(FIELD_STRING, self._command_type),
(FIELD_STRING, self._query)
]
if self._command_type == QUERY_ASYNC \
or self._command_type == QUERY_SYNC \
or self._command_type == QUERY_GREMLIN:
# a limit specified in a sql string should always override a
# limit parameter pass to prepare()
if ' LIMIT ' not in self._query.upper() or self._command_type == QUERY_GREMLIN:
_payload_definition.append((FIELD_INT, self._limit))
else:
_payload_definition.append((FIELD_INT, -1))
_payload_definition.append((FIELD_STRING, self._fetch_plan))
if self._command_type == QUERY_SCRIPT:
_payload_definition.insert(1, (FIELD_STRING, 'sql'))
_payload_definition.append((FIELD_INT, 0))
payload = b''.join(
self._encode_field(x) for x in _payload_definition
)
self._append((FIELD_BYTE, self._mod_byte))
self._append((FIELD_STRING, payload))
return super(CommandMessage, self).prepare()
def fetch_response(self):
# skip execution in case of transaction
if self._orientSocket.in_transaction is True:
return self
# decode header only
super(CommandMessage, self).fetch_response()
if self._command_type == QUERY_ASYNC:
self._read_async_records()
else:
return self._read_sync()
def set_command_type(self, _command_type):
if _command_type in QUERY_TYPES:
# user choice if present
self._command_type = _command_type
else:
raise PyOrientBadMethodCallException(
_command_type + ' is not a valid command type', []
)
return self
def set_fetch_plan(self, _fetch_plan):
self._fetch_plan = _fetch_plan
return self
def set_query(self, _query):
self._query = _query
return self
def set_limit(self, _limit):
self._limit = _limit
return self
def _read_sync(self):
# type of response
# decode body char with flag continue ( Header already read )
response_type = self._decode_field(FIELD_CHAR)
if not isinstance(response_type, str):
response_type = response_type.decode()
res = []
if response_type == 'n':
self._append(FIELD_CHAR)
super(CommandMessage, self).fetch_response(True)
# end Line \x00
return None
elif response_type == 'r' or response_type == 'w':
res = [self._read_record()]
self._append(FIELD_CHAR)
# end Line \x00
_res = super(CommandMessage, self).fetch_response(True)
if response_type == 'w':
res = [res[0].oRecordData['result']]
elif response_type == 'a':
self._append(FIELD_STRING)
self._append(FIELD_CHAR)
res = [super(CommandMessage, self).fetch_response(True)[0]]
elif response_type == 'l':
self._append(FIELD_INT)
list_len = super(CommandMessage, self).fetch_response(True)[0]
for n in range(0, list_len):
res.append(self._read_record())
# async-result-type can be:
# 0: no records remain to be fetched
# 1: a record is returned as a result set
# 2: a record is returned as pre-fetched to be loaded in client's
# cache only. It's not part of the result set but the client
# knows that it's available for later access
cached_results = self._read_async_records()
# cache = cached_results['cached']
else:
# this should be never happen, used only to debug the protocol
msg = b''
self._orientSocket._socket.setblocking(0)
m = self._orientSocket.read(1)
while m != "":
msg += m
m = self._orientSocket.read(1)
return res
def set_callback(self, func):
if hasattr(func, '__call__'):
self._callback = func
else:
raise PyOrientBadMethodCallException(func + " is not a callable "
"function", [])
return self
#
# TX COMMIT
#
# Commits a transaction. This operation flushes all the
# pending changes to the server side.
#
# Request: (tx-id:int)(using-tx-log:byte)(tx-entry)*(0-byte indicating end-of-records)
# tx-entry: (operation-type:byte)(cluster-id:short)
# (cluster-position:long)(record-type:byte)(entry-content)
#
# entry-content for CREATE: (record-content:bytes)
# entry-content for UPDATE: (version:record-version)(content-changed:boolean)(record-content:bytes)
# entry-content for DELETE: (version:record-version)
# Response: (created-record-count:int)[(client-specified-cluster-id:short)
# (client-specified-cluster-position:long)(created-cluster-id:short)
# (created-cluster-position:long)]*(updated-record-count:int)[(updated-cluster-id:short)
# (updated-cluster-position:long)(new-record-version:int)]*(count-of-collection-changes:int)
# [(uuid-most-sig-bits:long)(uuid-least-sig-bits:long)(updated-file-id:long)(updated-page-index:long)
# (updated-page-offset:int)]*
#
# Where:
# tx-id is the Transaction's Id
# use-tx-log tells if the server must use the Transaction
# Log to recover the transaction. 1 = true, 0 = false
# operation-type can be:
# 1, for UPDATES
# 2, for DELETES
# 3, for CREATIONS
#
# record-content depends on the operation type:
# For UPDATED (1): (original-record-version:int)(record-content:bytes)
# For DELETED (2): (original-record-version:int)
# For CREATED (3): (record-content:bytes)
#
# This response contains two parts: a map of 'temporary' client-generated
# record ids to 'real' server-provided record ids for each CREATED record,
# and a map of UPDATED record ids to update record-versions.
#
# Look at Optimistic Transaction to know how temporary RecordIDs are managed.
#
# The last part or response is referred to RidBag management.
# Take a look at the main page for more details.
class _TXCommitMessage(BaseMessage):
def __init__(self, _orient_socket):
super(_TXCommitMessage, self).__init__(_orient_socket)
self._tx_id = -1
self._operation_stack = []
self._pre_operation_records = {}
self._operation_records = {}
self._temp_cluster_position_seq = -2
# order matters
self._append((FIELD_BYTE, TX_COMMIT_OP))
self._command = TX_COMMIT_OP
@need_connected
def prepare(self, params=None):
self._append((FIELD_INT, self.get_transaction_id()))
self._append((FIELD_BOOLEAN, True))
for k, v in enumerate(self._operation_stack):
self._append((FIELD_BYTE, chr(1))) # start of records
for field in v:
self._append(field)
self._append((FIELD_BYTE, chr(0)))
self._append((FIELD_STRING, ""))
return super(_TXCommitMessage, self).prepare()
def send(self):
return super(_TXCommitMessage, self).send()
def fetch_response(self):
# self.dump_streams()
super(_TXCommitMessage, self).fetch_response()
result = {
'created': [],
'updated': [],
'changes': []
}
items = self._decode_field(FIELD_INT)
for x in range(0, items):
# (created-record-count:int)
# [
# (client-specified-cluster-id:short)
# (client-specified-cluster-position:long)
# (created-cluster-id:short)
# (created-cluster-position:long)
# ]*
result['created'].append(
{
'client_c_id': self._decode_field(FIELD_SHORT),
'client_c_pos': self._decode_field(FIELD_LONG),
'created_c_id': self._decode_field(FIELD_SHORT),
'created_c_pos': self._decode_field(FIELD_LONG)
}
)
operation = self._pre_operation_records[
str(result['created'][-1]['client_c_pos'])
]
rid = "#" + str(result['created'][-1]['created_c_id']) + \
":" + str(result['created'][-1]['created_c_pos'])
record = getattr(operation, "_record_content")
record.update(__version=1, __rid=rid)
self._operation_records[rid] = record
items = self._decode_field(FIELD_INT)
for x in range(0, items):
# (updated-record-count:int)
# [
# (updated-cluster-id:short)
# (updated-cluster-position:long)
# (new-record-version:int)
# ]*
result['updated'].append(
{
'updated_c_id': self._decode_field(FIELD_SHORT),
'updated_c_pos': self._decode_field(FIELD_LONG),
'new_version': self._decode_field(FIELD_INT),
}
)
try:
operation = self._pre_operation_records[
str(result['updated'][-1]['updated_c_pos'])
]
record = getattr(operation, "_record_content")
rid = "#" + str(result['updated'][-1]['updated_c_id']) + \
":" + str(result['updated'][-1]['updated_c_pos'])
record.update(
__version=result['updated'][-1]['new_version'],
__rid=rid
)
self._operation_records[rid] = record
except KeyError:
pass
if self.get_protocol() > 23:
items = self._decode_field(FIELD_INT)
for x in range(0, items):
# (count-of-collection-changes:int)
# [
# (uuid-most-sig-bits:long)
# (uuid-least-sig-bits:long)
# (updated-file-id:long)
# (updated-page-index:long)
# (updated-page-offset:int)
# ]*
result['updated'].append(
{
'uuid_high': self._decode_field(FIELD_LONG),
'uuid_low': self._decode_field(FIELD_LONG),
'file_id': self._decode_field(FIELD_LONG),
'page_index': self._decode_field(FIELD_LONG),
'page_offset': self._decode_field(FIELD_INT),
}
)
self.dump_streams()
return self._operation_records # [self._operation_records, result]
def attach(self, operation):
if not isinstance(operation, BaseMessage):
# A Subclass of BaseMessage was expected
raise AssertionError("A subclass of BaseMessage was expected")
if isinstance(operation, RecordUpdateMessage):
o_record_enc = self.get_serializer().encode(getattr(operation, "_record_content"))
self._operation_stack.append((
(FIELD_BYTE, chr(1)),
(FIELD_SHORT, int(getattr(operation, "_cluster_id"))),
(FIELD_LONG, int(getattr(operation, "_cluster_position"))),
(FIELD_BYTE, getattr(operation, "_record_type")),
(FIELD_INT, int(getattr(operation, "_record_version"))),
(FIELD_STRING, o_record_enc),
))
if self.get_protocol() >= 23:
self._operation_stack[-1] = \
self._operation_stack[-1] + \
((FIELD_BOOLEAN, bool(getattr(operation, "_update_content"))),)
self._pre_operation_records[
str(getattr(operation, "_cluster_position"))
] = operation
elif isinstance(operation, RecordDeleteMessage):
self._operation_stack.append((
(FIELD_BYTE, chr(2)),
(FIELD_SHORT, int(getattr(operation, "_cluster_id"))),
(FIELD_LONG, int(getattr(operation, "_cluster_position"))),
(FIELD_BYTE, getattr(operation, "_record_type")),
(FIELD_INT, int(getattr(operation, "_record_version"))),
))
elif isinstance(operation, RecordCreateMessage):
o_record_enc = self.get_serializer().encode(getattr(operation, "_record_content"))
self._operation_stack.append((
(FIELD_BYTE, chr(3)),
(FIELD_SHORT, int(-1)),
(FIELD_LONG, int(self._temp_cluster_position_seq)),
(FIELD_BYTE, getattr(operation, "_record_type")),
(FIELD_STRING, o_record_enc),
))
self._pre_operation_records[
str(self._temp_cluster_position_seq)
] = operation
self._temp_cluster_position_seq -= 1
else:
raise PyOrientBadMethodCallException(
"Wrong command type " + operation.__class__.__name__, []
)
return self
def get_transaction_id(self):
if self._tx_id < 0:
from datetime import datetime
my_epoch = datetime(2014, 7, 1)
now = datetime.now()
delta = now - my_epoch
# write in extended mode to make it easy to read
# seconds * 1000000 to get the equivalent microseconds
_sm = (delta.seconds + delta.days * 24 * 3600) * 10 ** 6
_ms = delta.microseconds
_mstime = _sm + _ms
# remove sign
# treat as unsigned even when the INT is signed
# and take 4 Bytes
# ( 32 bit uniqueness is not ensured in any way,
# but is surely unique in this session )
# we need only a transaction unique for this session
# not a real UUID
if _mstime & 0x80000000:
self._tx_id = int((_mstime - 0x80000000) & 0xFFFFFFFF)
else:
self._tx_id = int(_mstime & 0xFFFFFFFF)
return self._tx_id
def begin(self):
self._operation_stack = []
self._pre_operation_records = {}
self._operation_records = {}
self._temp_cluster_position_seq = -2
self._orientSocket.in_transaction = True
self.get_transaction_id()
return self
def commit(self):
self._orientSocket.in_transaction = False
result = self.prepare().send().fetch_response()
self._operation_stack = []
self._pre_operation_records = {}
self._operation_records = {}
self._tx_id = -1
self._temp_cluster_position_seq = -2
return result
def rollback(self):
self._operation_stack = []
self._pre_operation_records = {}
self._operation_records = {}
self._tx_id = -1
self._temp_cluster_position_seq = -2
self._orientSocket.in_transaction = False
return self
#
# TX COMMIT facade
#
class TxCommitMessage:
def __init__(self, _orient_socket):
self._transaction = _TXCommitMessage(_orient_socket)<|fim▁hole|> def attach(self, operation):
self._transaction.attach(operation)
return self
def begin(self):
self._transaction.begin()
return self
def commit(self):
return self._transaction.commit()
def rollback(self):
return self._transaction.rollback()
def set_session_token(self, token):
self._transaction.set_session_token(token)
return self<|fim▁end|> | pass
|
<|file_name|>mercurial.py<|end_file_name|><|fim▁begin|>from mercurial import cmdutil
_hgignore_content = """\
syntax: glob
*~
*.pyc
*.pyo
*.bak
cache/*
databases/*
sessions/*
errors/*
"""
def commit():
app = request.args[0]
path = apath(app, r=request)
uio = ui.ui()
uio.quiet = True
if not os.environ.get('HGUSER') and not uio.config("ui", "username"):<|fim▁hole|> r = hg.repository(ui=uio, path=path, create=True)
hgignore = os.path.join(path, '.hgignore')
if not os.path.exists(hgignore):
open(hgignore, 'w').write(_hgignore_content)
form = FORM('Comment:',INPUT(_name='comment',requires=IS_NOT_EMPTY()),
INPUT(_type='submit',_value='Commit'))
if form.accepts(request.vars,session):
oldid = r[r.lookup('.')]
cmdutil.addremove(r)
r.commit(text=form.vars.comment)
if r[r.lookup('.')] == oldid:
response.flash = 'no changes'
files = r[r.lookup('.')].files()
return dict(form=form,files=TABLE(*[TR(file) for file in files]),repo=r)<|fim▁end|> | os.environ['HGUSER'] = 'web2py@localhost'
try:
r = hg.repository(ui=uio, path=path)
except: |
<|file_name|>test_pqkmeans.py<|end_file_name|><|fim▁begin|>import unittest
import pqkmeans
import numpy
import collections
import pickle
class TestPQKMeans(unittest.TestCase):
def data_source(self, n: int):
for i in range(n):
yield [i * 100] * 6
def setUp(self):
# Train PQ encoder
self.encoder = pqkmeans.encoder.PQEncoder(num_subdim=3, Ks=20)
self.encoder.fit(numpy.array(list(self.data_source(200))))
def test_just_construction(self):
pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=5, iteration=10, verbose=False)
def test_fit_and_predict(self):
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=2, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
predicted = engine.fit_predict(codes)
count = collections.defaultdict(int)
for cluster in predicted:
count[cluster] += 1
# roughly balanced clusters
self.assertGreaterEqual(min(count.values()), max(count.values()) * 0.7)
a = engine.predict(codes[0:1, :])
b = engine.predict(codes[0:1, :])
self.assertEqual(a, b)
def test_cluster_centers_are_really_nearest(self):
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=2, iteration=10, verbose=False)<|fim▁hole|> codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
fit_predicted = engine.fit_predict(codes)
cluster_centers = numpy.array(engine.cluster_centers_, dtype=numpy.uint8)
predicted = engine.predict(codes)
self.assertTrue((fit_predicted == predicted).all())
# Reconstruct the original vectors
codes_decoded = self.encoder.inverse_transform(codes)
cluster_centers_decoded = self.encoder.inverse_transform(cluster_centers)
for cluster, code_decoded in zip(predicted, codes_decoded):
other_cluster = (cluster + 1) % max(predicted)
self.assertLessEqual(
numpy.linalg.norm(cluster_centers_decoded[cluster] - code_decoded),
numpy.linalg.norm(cluster_centers_decoded[other_cluster] - code_decoded)
)
def test_constructor_with_cluster_center(self):
# Run pqkmeans first.
engine = pqkmeans.clustering.PQKMeans(encoder=self.encoder, k=5, iteration=10, verbose=False)
codes = self.encoder.transform(numpy.array(list(self.data_source(100))))
fit_predicted = engine.fit_predict(codes)
cluster_centers = numpy.array(engine.cluster_centers_, dtype=numpy.uint8)
predicted = engine.predict(codes)
# save current engine and recover from savedata
engine_savedata = pickle.dumps(engine)
engine_recovered = pickle.loads(engine_savedata)
fit_predicted_from_recovered_obj = engine_recovered.predict(codes)
numpy.testing.assert_array_equal(predicted, fit_predicted_from_recovered_obj)<|fim▁end|> | |
<|file_name|>server.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from "./index"
export { buildServerApp, ServerAppResolve } from "./buildServerApp" |
<|file_name|>0002_notification_queued.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-04 12:05
from __future__ import unicode_literals
<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='notification',
name='queued',
field=models.BooleanField(db_index=True, default=False),
),
]<|fim▁end|> | from django.db import migrations, models
|
<|file_name|>partner-alias-selection.js<|end_file_name|><|fim▁begin|>$(function() {
/* ------------ 合作伙伴信息 ------------ */
$( "#partner_id" ).combobox( {
url : ksa.buildUrl( "/data/combo", "bd-partner-all" ),
onSelect : function( record ) {
$grid.datagrid( "load", { id : record.id } );
}
} );
// 确认选择
$("#btn_ok").click( function() {
var results = $("#extra_grid").datagrid("getSelected");
parent.$.close( results );
return false;
});
// 添加确认
$("#btn_extra_ok").click( function() {
$("#btn_extra_ok").attr("disabled", "disabled");
var extra = $("#extra").val();
if( ! extra ) {
top.$.messager.warning("请输入新建的抬头信息。");
$("#btn_extra_ok").attr("disabled", null );
return false;
} else {
// 保存
$.ajax({
url: ksa.buildUrl( "/component/bd", "partner-alias-insert" ),
data: { "partner.id" : $("#partner_id").combobox("getValue"), extra : extra },
success: function( result ) {
try {
if (result.status == "success") { // 添加成功
parent.$.close( extra );
return false;
}
else { $.messager.error( result.message ); $("#btn_extra_ok").attr("disabled", null ); }
} catch (e) { $("#btn_extra_ok").attr("disabled", null ); }
}
});
}
} );
// 添加关闭
$("#btn_extra_close").click( function() {
$("#extra_window").window("close");
} );
// 单位别名
var NEW_LINE = "\n";
$.fn.datagrid.defaults.loadEmptyMsg = '<span class="label label-warning">注意</span> 没有获取到任何数据,请选择新的合作单位。';
var $grid = $('#extra_grid').datagrid({
title : '抬头信息:' + PARTNER_NAME,
url: ksa.buildUrl( "/data/grid", "bd-partner-extra" ),
pagination : false,
queryParams : {
id : $("#partner_id").combobox("getValue")
},
fit : true,
onDblClickRow : function( i, data ) {
parent.$.close( data );
return false;
},
columns:[[
{ field:'dump', checkbox:true },
{ field:'name', title:'抬头', width:200, formatter:function(v,data,i) {
var a = data;
try { while( a.indexOf( NEW_LINE ) >= 0 ) { a = a.replace( NEW_LINE, "<br/>" ); } return a; }
catch(e) { return data; }
} }
]],
toolbar:[{
text:'添加...',
cls: 'btn-primary',
iconCls:'icon-plus icon-white',
<|fim▁hole|> handler:function(){
var id = $("#partner_id").combobox("getValue");
if( !id || id == "" ) {
top.$.messager.warning("请首先选择合作单位,再进行抬头信息的添加操作。");
return;
}
$("#extra_window").window("open");
$("#extra").val("");
try { $("#extra")[0].focus(); } catch(e){}
}
}, '-', {
text:'删除',
cls: 'btn-danger',
iconCls:'icon-trash icon-white',
handler:function(){ deleteExtra(); }
}]
});
// 删除
function deleteExtra() {
var row = $grid.datagrid( "getSelected" );
if( ! row ) {
top.$.messager.warning("请选择一条数据后,再进行删除操作。");
return;
}
$.messager.confirm( "确定删除所选抬头吗?", function( ok ){
if( ok ) {
$.ajax({
url: ksa.buildUrl( "/component/bd", "partner-alias-delete" ),
data: { "partner.id" : $("#partner_id").combobox("getValue"), extra : $grid.datagrid("getSelected") },
success: function( result ) {
try {
if (result.status == "success") {
$.messager.success( result.message );
$grid.datagrid( "reload" );
}
else { $.messager.error( result.message ); }
} catch (e) { }
}
});
}
} );
}
});<|fim▁end|> | |
<|file_name|>view.py<|end_file_name|><|fim▁begin|>#----------------------------------------------------------------------
# Copyright 2012, 2013 Arndt Droullier, Nive GmbH. All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#----------------------------------------------------------------------
__doc__ = """
"""
from nive.i18n import _
from nive.definitions import FieldConf, ViewConf, ViewModuleConf, Conf
# view module definition ------------------------------------------------------------------
#@nive_module
configuration = ViewModuleConf(
id = "userview",
name = _(u"User signup"),
static = "nive.userdb.userview:static",
containment = "nive.userdb.app.UserDB",
context = "nive.userdb.root.root",
view = "nive.userdb.userview.view.UserView",
templates = "nive.userdb.userview:",
permission = "view"
)
t = configuration.templates
configuration.views = [
# User Views
ViewConf(name="login", attr="login", renderer=t+"loginpage.pt"),
ViewConf(name="signup", attr="create", renderer=t+"signup.pt", permission="signup"),
ViewConf(name="update", attr="update", renderer=t+"update.pt", permission="updateuser"),
ViewConf(name="resetpass",attr="resetpass",renderer=t+"resetpass.pt"),
ViewConf(name="logout", attr="logout"),
# disabled
#ViewConf(name="mailpass", attr="mailpass", renderer=t+"mailpass.pt"),
]
# view and form implementation ------------------------------------------------------------------
from nive.views import BaseView, Unauthorized, Mail
from nive.forms import ObjectForm
class UserForm(ObjectForm):
"""
Extended User form
"""
def __init__(self, view=None, loadFromType=None, context=None, request=None, app=None, **kw):
ObjectForm.__init__(self, view=view, loadFromType=loadFromType)
self.actions = [
Conf(id="default", method="StartForm", name=_(u"Initialize"), hidden=True),
Conf(id="defaultEdit",method="LoadUser", name=_(u"Initialize"), hidden=True),
Conf(id="create", method="AddUser", name=_(u"Signup"), hidden=False, options={"renderSuccess":False}),
Conf(id="edit", method="Update", name=_(u"Confirm"), hidden=False),
Conf(id="mailpass", method="MailPass", name=_(u"Mail password"), hidden=False),
Conf(id="resetpass", method="ResetPass", name=_(u"Reset password"), hidden=False),
Conf(id="login", method="Login", name=_(u"Login"), hidden=False),
]
self.subsets = {
"create": {"fields": ["name", "password", "email", "surname", "lastname"],
"actions": ["create"],
"defaultAction": "default"},
"create2":{"fields": ["name", "email"],
"actions": ["create"],
"defaultAction": "default"},
"edit": {"fields": ["email",
FieldConf(id="password", name=_("Password"), datatype="password", required=False, settings={"update": True}),
"surname", "lastname"],
"actions": ["defaultEdit", "edit"],
"defaultAction": "defaultEdit"},
"login": {"fields": ["name", FieldConf(id="password", name=_("Password"), datatype="password", settings={"single": True})],
"actions": ["login"],
"defaultAction": "default"},
"mailpass":{"fields": ["email"],
"actions": ["mailpass"],
"defaultAction": "default"},
"resetpass":{"fields": ["email"],
"actions": ["resetpass"],
"defaultAction": "default"},
}
self.activate = 1
self.generatePW = 0
self.notify = True
self.mail = None
self.mailpass = None
self.groups = ""
self.css_class = "smallform"
def AddUser(self, action, **kw):
"""
Form action: safely add a user
"""
msgs = []
result,data,errors = self.Validate(self.request)
if result:
result, msgs = self.context.AddUser(data,
activate=self.activate,
generatePW=self.generatePW,
mail=self.mail,
groups=self.groups,
notify=self.notify,
currentUser=self.view.User())
return self._FinishFormProcessing(result, data, msgs, errors, **kw)
def LoadUser(self, action, **kw):
"""
Initially load data from obj.
context = obj
"""
user = self.view.User()
if not user:
raise Unauthorized, "User not found."
data = self.LoadObjData(user)
try:
del data["password"]
except:
pass
return data!=None, self.Render(data)
def Update(self, action, **kw):
"""
Form action: safely update a user
"""
user = self.view.User()
if not user:
raise Unauthorized, "User not found."
msgs = []
result,data,errors = self.Validate(self.request)
if result:
uobj = self.context.LookupUser(id=user.id)
result = uobj.SecureUpdate(data, user)
if result:
msgs.append(_(u"OK"))
return self._FinishFormProcessing(result, data, msgs, errors, **kw)
def Login(self, action, **kw):
"""
Form action: user login
"""
redirectSuccess = kw.get("redirectSuccess")
data = self.GetFormValues(self.request)
user, msgs = self.context.Login(data.get("name"), data.get("password"), 0)
if user:
self.context.app.RememberLogin(self.request, user.data.get("name"))
if self.view and redirectSuccess:
self.view.Redirect(redirectSuccess)
return
errors=None
return user, self.Render(data, msgs=msgs, errors=errors)
def MailPass(self, action, **kw):
"""
"""
redirectSuccess = kw.get("redirectSuccess")
return self.ResetPass(action, createNewPasswd=False, **kw)
def ResetPass(self, action, createNewPasswd=True, **kw):
"""
"""
#result, data, e = self.Validate(self.request)
data = self.GetFormValues(self.request)
result, msgs = self.context.MailUserPass(email=data.get("email"), mailtmpl=self.mailpass, createNewPasswd=createNewPasswd, currentUser=self.view.User())
if result:
data = {}
return self._FinishFormProcessing(result, data, msgs, None, **kw)
class UserView(BaseView):
def __init__(self, context, request):
BaseView.__init__(self, context, request)
self.form = UserForm(view=self, loadFromType="user")
self.form.groups = ""
self.publicSignup = False
def create(self):
self.form.activate=1
self.form.generatePW=0
self.form.Setup(subset="create")
return self._render()
def createNotActive(self):
self.form.activate=0
self.form.generatePW=0
self.form.Setup(subset="create")
return self._render()
def createPassword(self):
self.form.activate=1
self.form.generatePW=1
self.form.Setup(subset="create2")
return self._render()
def update(self):
user=self.User()
if user and user.id == 0:
return {u"content": _(u"Your current user can only be edited on file system level."), u"result": False, u"head": self.form.HTMLHead()}
self.form.Setup(subset="edit")
try:
result, data, action = self.form.Process()
return {u"content": data, u"result": result, u"head": self.form.HTMLHead()}<|fim▁hole|> return {u"content": _(u"User not found"), u"result": False, u"head": self.form.HTMLHead()}
def mailpass(self):
self.form.startEmpty = True
self.form.mail = Mail(_(u"Your password"), "nive.userdb:userview/mailpassmail.pt")
self.form.Setup(subset="mailpass")
return self._render()
def resetpass(self):
self.form.startEmpty = True
self.form.mail = Mail(_(u"Your new password"), "nive.userdb:userview/resetpassmail.pt")
self.form.Setup(subset="resetpass")
return self._render()
def login(self):
self.form.Setup(subset="login")
user = self.UserName()
if not user:
self.form.startEmpty = True
#self.form.renderOneColumn = True
redirect = self.GetFormValue(u"redirect")
if not redirect:
try:
redirect = self.context.app.portal.configuration.loginSuccessUrl
except:
redirect = self.request.url
result, data, action = self.form.Process(redirectSuccess=redirect)
return {u"content": data, u"result": result, u"head": self.form.HTMLHead()}
return {u"content": u"", u"result": True, u"head": self.form.HTMLHead()}
def logoutlink(self):
return {}
def logout(self):
app = self.context.app
user = self.UserName()
a = self.context.Logout(user)
app.ForgetLogin(self.request)
redirect = self.GetFormValue(u"redirect")
if not redirect:
try:
redirect = self.context.app.portal.configuration.logoutSuccessUrl
except:
redirect = self.context.app.portal.configuration.portalDefaultUrl
if redirect:
if redirect.find(u"lo=1")==-1:
if redirect.find(u"?")==-1:
redirect+=u"?lo=1"
else:
redirect+=u"&lo=1"
self.Redirect(redirect)
return {}
def logouturl(self):
try:
return self.context.app.portal.configuration.logoutUrl
except:
return self.request.url
def _render(self):
result, data, action = self.form.Process()
return {u"content": data, u"result": result, u"head": self.form.HTMLHead()}<|fim▁end|> | except Unauthorized: |
<|file_name|>command.rs<|end_file_name|><|fim▁begin|>use std::fmt::{Debug, Formatter, Error};
use std::path::{PathBuf, Path};
use std::process::{Command, Output};
use std::{env, str};
pub struct TestCommand {
cwd: PathBuf,
args: Vec<String>,
env_vars: Vec<(String, String)>,
}
impl TestCommand {
pub fn new(cwd: &Path, subcommand: &str) -> Self {
TestCommand {
cwd: cwd.into(),
args: vec![subcommand.into()],
env_vars: Vec::new(),
}
}
pub fn arg<S: Into<String>>(mut self, value: S) -> Self {
self.args.push(value.into());<|fim▁hole|> }
pub fn env(mut self, key: &str, value: &str) -> Self {
self.env_vars.push((key.into(), value.into()));
self
}
pub fn run(self) -> CommandResult {
let output = self.build_command().output().unwrap();
CommandResult {
output: output,
}
}
fn build_command(&self) -> Command {
let mut command = Command::new(path_to_diesel_cli());
command.args(&self.args)
.current_dir(&self.cwd);
for &(ref k, ref v) in self.env_vars.iter() {
command.env(&k, &v);
}
command
}
}
pub struct CommandResult {
output: Output,
}
impl CommandResult {
pub fn is_success(&self) -> bool {
self.output.status.success()
}
pub fn stdout(&self) -> &str {
str::from_utf8(&self.output.stdout).unwrap()
}
pub fn stderr(&self) -> &str {
str::from_utf8(&self.output.stderr).unwrap()
}
pub fn code(&self) -> i32 {
self.output.status.code().unwrap()
}
}
fn path_to_diesel_cli() -> PathBuf {
env::current_exe().unwrap()
.parent().unwrap()
.join("diesel")
}
impl Debug for CommandResult {
fn fmt(&self, out: &mut Formatter) -> Result<(), Error> {
write!(out, "stdout: {}\nstderr: {}", self.stdout(), self.stderr())
}
}<|fim▁end|> | self |
<|file_name|>expressroutecircuitauthorizations.go<|end_file_name|><|fim▁begin|>package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// ExpressRouteCircuitAuthorizationsClient is the network Client
type ExpressRouteCircuitAuthorizationsClient struct {
BaseClient
}
// NewExpressRouteCircuitAuthorizationsClient creates an instance of the ExpressRouteCircuitAuthorizationsClient
// client.
func NewExpressRouteCircuitAuthorizationsClient(subscriptionID string) ExpressRouteCircuitAuthorizationsClient {
return NewExpressRouteCircuitAuthorizationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewExpressRouteCircuitAuthorizationsClientWithBaseURI creates an instance of the
// ExpressRouteCircuitAuthorizationsClient client.
func NewExpressRouteCircuitAuthorizationsClientWithBaseURI(baseURI string, subscriptionID string) ExpressRouteCircuitAuthorizationsClient {
return ExpressRouteCircuitAuthorizationsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates an authorization in the specified express route circuit.
//
// resourceGroupName is the name of the resource group. circuitName is the name of the express route circuit.
// authorizationName is the name of the authorization. authorizationParameters is parameters supplied to the create
// or update express route circuit authorization operation.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string, authorizationParameters ExpressRouteCircuitAuthorization) (result ExpressRouteCircuitAuthorizationsCreateOrUpdateFuture, err error) {
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, circuitName, authorizationName, authorizationParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string, authorizationParameters ExpressRouteCircuitAuthorization) (*http.Request, error) {
pathParameters := map[string]interface{}{
"authorizationName": autorest.Encode("path", authorizationName),
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2016-09-01"<|fim▁hole|>
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}", pathParameters),
autorest.WithJSON(authorizationParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdateSender(req *http.Request) (future ExpressRouteCircuitAuthorizationsCreateOrUpdateFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated))
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) CreateOrUpdateResponder(resp *http.Response) (result ExpressRouteCircuitAuthorization, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified authorization from the specified express route circuit.
//
// resourceGroupName is the name of the resource group. circuitName is the name of the express route circuit.
// authorizationName is the name of the authorization.
func (client ExpressRouteCircuitAuthorizationsClient) Delete(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (result ExpressRouteCircuitAuthorizationsDeleteFuture, err error) {
req, err := client.DeletePreparer(ctx, resourceGroupName, circuitName, authorizationName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client ExpressRouteCircuitAuthorizationsClient) DeletePreparer(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"authorizationName": autorest.Encode("path", authorizationName),
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2016-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) DeleteSender(req *http.Request) (future ExpressRouteCircuitAuthorizationsDeleteFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent))
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the specified authorization from the specified express route circuit.
//
// resourceGroupName is the name of the resource group. circuitName is the name of the express route circuit.
// authorizationName is the name of the authorization.
func (client ExpressRouteCircuitAuthorizationsClient) Get(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (result ExpressRouteCircuitAuthorization, err error) {
req, err := client.GetPreparer(ctx, resourceGroupName, circuitName, authorizationName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client ExpressRouteCircuitAuthorizationsClient) GetPreparer(ctx context.Context, resourceGroupName string, circuitName string, authorizationName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"authorizationName": autorest.Encode("path", authorizationName),
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2016-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) GetResponder(resp *http.Response) (result ExpressRouteCircuitAuthorization, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all authorizations in an express route circuit.
//
// resourceGroupName is the name of the resource group. circuitName is the name of the circuit.
func (client ExpressRouteCircuitAuthorizationsClient) List(ctx context.Context, resourceGroupName string, circuitName string) (result AuthorizationListResultPage, err error) {
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName, circuitName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.alr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "List", resp, "Failure sending request")
return
}
result.alr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client ExpressRouteCircuitAuthorizationsClient) ListPreparer(ctx context.Context, resourceGroupName string, circuitName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"circuitName": autorest.Encode("path", circuitName),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2016-09-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client ExpressRouteCircuitAuthorizationsClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client ExpressRouteCircuitAuthorizationsClient) ListResponder(resp *http.Response) (result AuthorizationListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client ExpressRouteCircuitAuthorizationsClient) listNextResults(lastResults AuthorizationListResult) (result AuthorizationListResult, err error) {
req, err := lastResults.authorizationListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.ExpressRouteCircuitAuthorizationsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client ExpressRouteCircuitAuthorizationsClient) ListComplete(ctx context.Context, resourceGroupName string, circuitName string) (result AuthorizationListResultIterator, err error) {
result.page, err = client.List(ctx, resourceGroupName, circuitName)
return
}<|fim▁end|> | queryParameters := map[string]interface{}{
"api-version": APIVersion,
} |
<|file_name|>ledger.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
'use strict'
/* brave ledger integration for the brave browser
module entry points:
init() - called by app/index.js to start module
quit() - .. .. .. .. prior to browser quitting
boot() - .. .. .. .. to create wallet
IPC entry point:
LEDGER_PUBLISHER - called synchronously by app/extensions/brave/content/scripts/pageInformation.js
CHANGE_SETTING - called asynchronously to record a settings change
eventStore entry point:
addChangeListener - called when tabs render or gain focus
*/
const fs = require('fs')
const path = require('path')
const url = require('url')
const util = require('util')
const electron = require('electron')
const app = electron.app
const ipc = electron.ipcMain
const session = electron.session
const acorn = require('acorn')
const ledgerBalance = require('ledger-balance')
const ledgerClient = require('ledger-client')
const ledgerGeoIP = require('ledger-geoip')
const ledgerPublisher = require('ledger-publisher')
const qr = require('qr-image')
const random = require('random-lib')
const tldjs = require('tldjs')
const underscore = require('underscore')
const uuid = require('node-uuid')
const appActions = require('../js/actions/appActions')
const appConstants = require('../js/constants/appConstants')
const appDispatcher = require('../js/dispatcher/appDispatcher')
const messages = require('../js/constants/messages')
const settings = require('../js/constants/settings')
const request = require('../js/lib/request')
const getSetting = require('../js/settings').getSetting
const locale = require('./locale')
const appStore = require('../js/stores/appStore')
const eventStore = require('../js/stores/eventStore')
const rulesolver = require('./extensions/brave/content/scripts/pageInformation.js')
const ledgerUtil = require('./common/lib/ledgerUtil')
// TBD: remove these post beta [MTR]
const logPath = 'ledger-log.json'
const publisherPath = 'ledger-publisher.json'
const scoresPath = 'ledger-scores.json'
// TBD: move these to secureState post beta [MTR]
const statePath = 'ledger-state.json'
const synopsisPath = 'ledger-synopsis.json'
/*
* ledger globals
*/
var bootP = false
var client
const clientOptions = { debugP: process.env.LEDGER_DEBUG,
loggingP: process.env.LEDGER_LOGGING,
verboseP: process.env.LEDGER_VERBOSE
}
/*
* publisher globals
*/
var synopsis
var locations = {}
var publishers = {}
/*
* utility globals
*/
const msecs = { year: 365 * 24 * 60 * 60 * 1000,
week: 7 * 24 * 60 * 60 * 1000,
day: 24 * 60 * 60 * 1000,
hour: 60 * 60 * 1000,
minute: 60 * 1000,
second: 1000
}
/*
* notification state globals
*/
let addFundsMessage
let reconciliationMessage
let suppressNotifications = false
let reconciliationNotificationShown = false
let notificationTimeout = null
// TODO(bridiver) - create a better way to get setting changes
const doAction = (action) => {
switch (action.actionType) {
case appConstants.APP_CHANGE_SETTING:
if (action.key === settings.PAYMENTS_ENABLED) return initialize(action.value)
if (action.key === settings.PAYMENTS_CONTRIBUTION_AMOUNT) return setPaymentInfo(action.value)
break
case appConstants.APP_NETWORK_CONNECTED:
setTimeout(networkConnected, 1 * msecs.second)
break
default:
break
}
}
/*
* module entry points
*/
var init = () => {
try {
ledgerInfo._internal.debugP = ledgerClient.prototype.boolion(process.env.LEDGER_CLIENT_DEBUG)
publisherInfo._internal.debugP = ledgerClient.prototype.boolion(process.env.LEDGER_PUBLISHER_DEBUG)
publisherInfo._internal.verboseP = ledgerClient.prototype.boolion(process.env.LEDGER_PUBLISHER_VERBOSE)
appDispatcher.register(doAction)
initialize(getSetting(settings.PAYMENTS_ENABLED))
} catch (ex) { console.log('ledger.js initialization failed: ' + ex.toString() + '\n' + ex.stack) }
}
var quit = () => {
visit('NOOP', underscore.now(), null)
}
var boot = () => {
if ((bootP) || (client)) return
bootP = true
fs.access(pathName(statePath), fs.FF_OK, (err) => {
if (!err) return
if (err.code !== 'ENOENT') console.log('statePath read error: ' + err.toString())
ledgerInfo.creating = true
appActions.updateLedgerInfo({ creating: true })
try {
client = ledgerClient(null, underscore.extend({ roundtrip: roundtrip }, clientOptions), null)
} catch (ex) {
appActions.updateLedgerInfo({})
bootP = false
return console.log('ledger client boot error: ' + ex.toString() + '\n' + ex.stack)
}
if (client.sync(callback) === true) run(random.randomInt({ min: msecs.minute, max: 10 * msecs.minute }))
getBalance()
bootP = false
})
}
/*
* IPC entry point
*/
if (ipc) {
ipc.on(messages.CHECK_BITCOIN_HANDLER, (event, partition) => {
const protocolHandler = session.fromPartition(partition).protocol
// TODO: https://github.com/brave/browser-laptop/issues/3625
if (typeof protocolHandler.isNavigatorProtocolHandled === 'function') {
ledgerInfo.hasBitcoinHandler = protocolHandler.isNavigatorProtocolHandled('bitcoin')
appActions.updateLedgerInfo(underscore.omit(ledgerInfo, [ '_internal' ]))
}
})
ipc.on(messages.LEDGER_PUBLISHER, (event, location) => {
var ctx
if ((!synopsis) || (event.sender.session === session.fromPartition('default')) || (!tldjs.isValid(location))) {
event.returnValue = {}
return
}
ctx = url.parse(location, true)
ctx.TLD = tldjs.getPublicSuffix(ctx.host)
if (!ctx.TLD) {
if (publisherInfo._internal.verboseP) console.log('\nno TLD for:' + ctx.host)
event.returnValue = {}
return
}
ctx = underscore.mapObject(ctx, function (value, key) { if (!underscore.isFunction(value)) return value })
ctx.URL = location
ctx.SLD = tldjs.getDomain(ctx.host)
ctx.RLD = tldjs.getSubdomain(ctx.host)
ctx.QLD = ctx.RLD ? underscore.last(ctx.RLD.split('.')) : ''
event.returnValue = { context: ctx, rules: publisherInfo._internal.ruleset.cooked }
})
ipc.on(messages.NOTIFICATION_RESPONSE, (e, message, buttonIndex) => {
const win = electron.BrowserWindow.getFocusedWindow()
if (message === addFundsMessage) {
appActions.hideMessageBox(message)
if (buttonIndex === 0) {
// Don't show notifications for the next 6 hours.
suppressNotifications = true
setTimeout(() => { suppressNotifications = false }, 6 * msecs.hour)
} else {
// Open payments panel
if (win) {
win.webContents.send(messages.SHORTCUT_NEW_FRAME,
'about:preferences#payments', { singleFrame: true })
}
}
} else if (message === reconciliationMessage) {
appActions.hideMessageBox(message)
if (win) {
win.webContents.send(messages.SHORTCUT_NEW_FRAME,
'about:preferences#payments', { singleFrame: true })
}
// If > 24 hours has passed, it might be time to show the reconciliation
// message again
setTimeout(() => { reconciliationNotificationShown = false }, 1 * msecs.day)
}
})
ipc.on(messages.ADD_FUNDS_CLOSED, () => {
if (balanceTimeoutId) clearTimeout(balanceTimeoutId)
balanceTimeoutId = setTimeout(getBalance, 5 * msecs.second)
})
}
/*
* eventStore entry point
*/
var fileTypes = {
bmp: new Buffer([ 0x42, 0x4d ]),
gif: new Buffer([ 0x47, 0x49, 0x46, 0x38, [0x37, 0x39], 0x61 ]),
ico: new Buffer([ 0x00, 0x00, 0x01, 0x00 ]),
jpeg: new Buffer([ 0xff, 0xd8, 0xff ]),
png: new Buffer([ 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a ])
}
var signatureMax = 0
underscore.keys(fileTypes).forEach((fileType) => {
if (signatureMax < fileTypes[fileType].length) signatureMax = fileTypes[fileType].length
})
signatureMax = Math.ceil(signatureMax * 1.5)
eventStore.addChangeListener(() => {
const eventState = eventStore.getState().toJS()
var view = eventState.page_view
var info = eventState.page_info
var pageLoad = eventState.page_load
if ((!synopsis) || (!util.isArray(info))) return
info.forEach((page) => {
var entry, faviconURL, publisher
var location = page.url
if ((location.match(/^about/)) || ((locations[location]) && (locations[location].publisher))) return
if (!page.publisher) {
try {
publisher = ledgerPublisher.getPublisher(location)
if (publisher) page.publisher = publisher
} catch (ex) {
console.log('getPublisher error for ' + location + ': ' + ex.toString())
}
}
locations[location] = underscore.omit(page, [ 'url' ])
if (!page.publisher) return
publisher = page.publisher
synopsis.initPublisher(publisher)
entry = synopsis.publishers[publisher]
if ((page.protocol) && (!entry.protocol)) entry.protocol = page.protocol
if ((typeof entry.faviconURL === 'undefined') && ((page.faviconURL) || (entry.protocol))) {
var fetch = (url, redirects) => {
if (typeof redirects === 'undefined') redirects = 0
request.request({ url: url, responseType: 'blob' }, (err, response, blob) => {
var matchP, prefix, tail
if (publisherInfo._internal.debugP) {
console.log('\nresponse: ' + url +
' errP=' + (!!err) + ' blob=' + (blob || '').substr(0, 80) + '\nresponse=' +
JSON.stringify(response, null, 2))
}
if (err) return console.log('response error: ' + err.toString() + '\n' + err.stack)
if ((response.statusCode === 301) && (response.headers.location)) {
if (redirects < 3) fetch(response.headers.location, redirects++)
return
}
if ((response.statusCode !== 200) || (response.headers['content-length'] === '0')) return
if (blob.indexOf('data:image/') !== 0) {
// NB: for some reason, some sites return an image, but with the wrong content-type...
tail = blob.indexOf(';base64,')
if (tail <= 0) return
prefix = new Buffer(blob.substr(tail + 8, signatureMax), 'base64')
underscore.keys(fileTypes).forEach((fileType) => {
if (matchP) return
if ((prefix.length < fileTypes[fileType].length) &&
(fileTypes[fileType].compare(prefix, 0, fileTypes[fileType].length) !== 0)) return
blob = 'data:image/' + fileType + blob.substr(tail)
matchP = true
})
}
entry.faviconURL = blob
updatePublisherInfo()
if (publisherInfo._internal.debugP) {
console.log('\n' + publisher + ' synopsis=' +
JSON.stringify(underscore.extend(underscore.omit(entry, [ 'faviconURL', 'window' ]),
{ faviconURL: entry.faviconURL && '... ' }), null, 2))
}
})
}
faviconURL = page.faviconURL || entry.protocol + '//' + url.parse(location).host + '/favicon.ico'
entry.faviconURL = null
if (publisherInfo._internal.debugP) console.log('request: ' + faviconURL)
fetch(faviconURL)
}
})
view = underscore.last(view) || {}
if (ledgerUtil.shouldTrackView(view, pageLoad)) {
visit(view.url || 'NOOP', view.timestamp || underscore.now(), view.tabId)
}
})
/*
* module initialization
*/
var initialize = (onoff) => {
enable(onoff)
if (!onoff) {
client = null
return appActions.updateLedgerInfo({})
}
if (client) return
cacheRuleSet(ledgerPublisher.rules)
fs.access(pathName(statePath), fs.FF_OK, (err) => {
if (!err) {
if (clientOptions.verboseP) console.log('\nfound ' + pathName(statePath))
fs.readFile(pathName(statePath), (err, data) => {
var state
if (err) return console.log('read error: ' + err.toString())
try {
state = JSON.parse(data)
if (clientOptions.verboseP) console.log('\nstarting up ledger client integration')
} catch (ex) {
return console.log('statePath parse error: ' + ex.toString())
}
getStateInfo(state)
try {
client = ledgerClient(state.personaId,
underscore.extend(state.options, { roundtrip: roundtrip }, clientOptions), state)
} catch (ex) {
return console.log('ledger client creation error: ' + ex.toString() + '\n' + ex.stack)
}
if (client.sync(callback) === true) run(random.randomInt({ min: msecs.minute, max: 10 * msecs.minute }))
cacheRuleSet(state.ruleset)
// Make sure bravery props are up-to-date with user settings
setPaymentInfo(getSetting(settings.PAYMENTS_CONTRIBUTION_AMOUNT))
getBalance()
})
return
}
if (err.code !== 'ENOENT') console.log('statePath read error: ' + err.toString())
appActions.updateLedgerInfo({})
})
}
var enable = (onoff) => {
if (!onoff) {
synopsis = null
if (notificationTimeout) {
clearInterval(notificationTimeout)
notificationTimeout = null
}
return updatePublisherInfo()
}
synopsis = new (ledgerPublisher.Synopsis)()
fs.readFile(pathName(synopsisPath), (err, data) => {
if (publisherInfo._internal.verboseP) console.log('\nstarting up ledger publisher integration')
if (err) {
if (err.code !== 'ENOENT') console.log('synopsisPath read error: ' + err.toString())
return updatePublisherInfo()
}
if (publisherInfo._internal.verboseP) console.log('\nfound ' + pathName(synopsisPath))
try {
synopsis = new (ledgerPublisher.Synopsis)(data)
} catch (ex) {
console.log('synopsisPath parse error: ' + ex.toString())
}
// cf., the `Synopsis` constructor, https://github.com/brave/ledger-publisher/blob/master/index.js#L167
if (process.env.NODE_ENV === 'test') {
synopsis.options.minDuration = 0
synopsis.options.minPublisherDuration = 0
synopsis.options.minPublisherVisits = 0
} else {
if (process.env.LEDGER_PUBLISHER_VISIT_DURATION) {
synopsis.options.minDuration = ledgerClient.prototype.numbion(process.env.LEDGER_PUBLISHER_VISIT_DURATION)
}
if (process.env.LEDGER_PUBLISHER_MIN_DURATION) {
synopsis.options.minPublisherDuration = ledgerClient.prototype.numbion(process.env.LEDGER_PUBLISHER_MIN_DURATION)
}
if (process.env.LEDGER_PUBLISHER_MIN_VISITS) {
synopsis.options.minPublisherVisits = ledgerClient.prototype.numbion(process.env.LEDGER_PUBLISHER_MIN_VISITS)
}
}
underscore.keys(synopsis.publishers).forEach((publisher) => {
if (synopsis.publishers[publisher].faviconURL === null) delete synopsis.publishers[publisher].faviconURL
})
updatePublisherInfo()
// Check if relevant browser notifications should be shown every 15 minutes
notificationTimeout = setInterval(showNotifications, 15 * msecs.minute)
fs.readFile(pathName(publisherPath), (err, data) => {
if (err) {
if (err.code !== 'ENOENT') console.log('publisherPath read error: ' + err.toString())
return
}
if (publisherInfo._internal.verboseP) console.log('\nfound ' + pathName(publisherPath))
try {
data = JSON.parse(data)
underscore.keys(data).sort().forEach((publisher) => {
var entries = data[publisher]
publishers[publisher] = {}
entries.forEach((entry) => {
locations[entry.location] = entry
publishers[publisher][entry.location] = { timestamp: entry.when, tabIds: [] }
})
})
} catch (ex) {
console.log('publishersPath parse error: ' + ex.toString())
}
})
})
}
/*
* update publisher information
*/
var publisherInfo = {
synopsis: undefined,
_internal: {
ruleset: { raw: [], cooked: [] }
}
}
var updatePublisherInfo = () => {
var data = {}
var then = underscore.now() - msecs.week
if (!synopsis) return
underscore.keys(publishers).sort().forEach((publisher) => {
var entries = []
underscore.keys(publishers[publisher]).forEach((location) => {
var when = publishers[publisher][location].timestamp
if (when > then) entries.push({ location: location, when: when })
})
if (entries.length > 0) data[publisher] = entries
})
syncWriter(pathName(publisherPath), data, () => {})
syncWriter(pathName(scoresPath), synopsis.allN(), () => {})
syncWriter(pathName(synopsisPath), synopsis, () => {})
publisherInfo.synopsis = synopsisNormalizer()
if (publisherInfo._internal.debugP) {
data = []
publisherInfo.synopsis.forEach((entry) => {
data.push(underscore.extend(underscore.omit(entry, [ 'faviconURL' ]), { faviconURL: entry.faviconURL && '...' }))
})
console.log('\nupdatePublisherInfo: ' + JSON.stringify(data, null, 2))
}
appActions.updatePublisherInfo(underscore.omit(publisherInfo, [ '_internal' ]))
}
var synopsisNormalizer = () => {
var i, duration, n, pct, publisher, results, total
var data = []
var scorekeeper = synopsis.options.scorekeeper
results = []
underscore.keys(synopsis.publishers).forEach((publisher) => {
if (synopsis.publishers[publisher].scores[scorekeeper] <= 0) return
if ((synopsis.options.minPublisherDuration > synopsis.publishers[publisher].duration) ||
(synopsis.options.minPublisherVisits > synopsis.publishers[publisher].visits)) return
results.push(underscore.extend({ publisher: publisher }, underscore.omit(synopsis.publishers[publisher], 'window')))
}, synopsis)
results = underscore.sortBy(results, (entry) => { return -entry.scores[scorekeeper] })
n = results.length
total = 0
for (i = 0; i < n; i++) { total += results[i].scores[scorekeeper] }
if (total === 0) return data
pct = []
for (i = 0; i < n; i++) {
publisher = synopsis.publishers[results[i].publisher]
duration = results[i].duration
data[i] = {
rank: i + 1,
// TBD: the `ledger-publisher` package does not currently report `verified` ...
verified: publisher.verified || false,
site: results[i].publisher,
views: results[i].visits,
duration: duration,
daysSpent: 0,
hoursSpent: 0,
minutesSpent: 0,
secondsSpent: 0,
faviconURL: publisher.faviconURL,
score: results[i].scores[scorekeeper]
}
// HACK: Protocol is sometimes blank here, so default to http:// so we can
// still generate publisherURL.
data[i].publisherURL = (results[i].protocol || 'http:') + '//' + results[i].publisher
pct[i] = Math.round((results[i].scores[scorekeeper] * 100) / total)
if (duration >= msecs.day) {
data[i].daysSpent = Math.max(Math.round(duration / msecs.day), 1)
} else if (duration >= msecs.hour) {
data[i].hoursSpent = Math.max(Math.floor(duration / msecs.hour), 1)
data[i].minutesSpent = Math.round((duration % msecs.hour) / msecs.minute)
} else if (duration >= msecs.minute) {
data[i].minutesSpent = Math.max(Math.round(duration / msecs.minute), 1)
data[i].secondsSpent = Math.round((duration % msecs.minute) / msecs.second)
} else {
data[i].secondsSpent = Math.max(Math.round(duration / msecs.second), 1)
}
}
// courtesy of https://stackoverflow.com/questions/13483430/how-to-make-rounded-percentages-add-up-to-100#13485888
var foo = (l, target) => {
var off = target - underscore.reduce(l, (acc, x) => { return acc + Math.round(x) }, 0)
return underscore.chain(l)
.sortBy((x) => { return Math.round(x) - x })
.map((x, i) => { return Math.round(x) + (off > i) - (i >= (l.length + off)) })
.value()
}
pct = foo(pct, 100)
total = 0
for (i = 0; i < n; i++) {
/*
if (pct[i] <= 0) {
data = data.slice(0, i)
break
}
*/
if (pct[i] < 0) pct[i] = 0
data[i].percentage = pct[i]
total += pct[i]
}
for (i = data.length - 1; (total > 100) && (i >= 0); i--) {
if (data[i].percentage < 2) continue
data[i].percentage--
total--
}
return data
}
/*
* publisher utilities
*/
var currentLocation = 'NOOP'
var currentTimestamp = underscore.now()
var visit = (location, timestamp, tabId) => {
var setLocation = () => {
var duration, publisher, revisitP
if (!synopsis) return
if (publisherInfo._internal.verboseP) {
console.log('locations[' + currentLocation + ']=' + JSON.stringify(locations[currentLocation], null, 2) +
' duration=' + (timestamp - currentTimestamp) + ' msec' + ' tabId=' + tabId)
}
if ((location === currentLocation) || (!locations[currentLocation]) || (!tabId)) return
publisher = locations[currentLocation].publisher
if (!publisher) return
if (!publishers[publisher]) publishers[publisher] = {}
if (!publishers[publisher][currentLocation]) publishers[publisher][currentLocation] = { tabIds: [] }
publishers[publisher][currentLocation].timestamp = timestamp
revisitP = publishers[publisher][currentLocation].tabIds.indexOf(tabId) !== -1
if (!revisitP) publishers[publisher][currentLocation].tabIds.push(tabId)
duration = timestamp - currentTimestamp
if (publisherInfo._internal.verboseP) {
console.log('\nadd publisher ' + publisher + ': ' + duration + ' msec' + ' revisitP=' + revisitP + ' state=' +
JSON.stringify(underscore.extend({ location: currentLocation }, publishers[publisher][currentLocation]),
null, 2))
}
synopsis.addPublisher(publisher, { duration: duration, revisitP: revisitP })
updatePublisherInfo()
}
setLocation()
if (location === currentLocation) return
currentLocation = location.match(/^about/) ? 'NOOP' : location
currentTimestamp = timestamp
}
var cacheRuleSet = (ruleset) => {
var stewed, syncP
if ((!ruleset) || (underscore.isEqual(publisherInfo._internal.ruleset.raw, ruleset))) return
try {
stewed = []
ruleset.forEach((rule) => {
var entry = { condition: acorn.parse(rule.condition) }
if (rule.dom) {
if (rule.dom.publisher) {
entry.publisher = { selector: rule.dom.publisher.nodeSelector,
consequent: acorn.parse(rule.dom.publisher.consequent)
}
}
if (rule.dom.faviconURL) {
entry.faviconURL = { selector: rule.dom.faviconURL.nodeSelector,
consequent: acorn.parse(rule.dom.faviconURL.consequent)
}
}
}
if (!entry.publisher) entry.consequent = rule.consequent ? acorn.parse(rule.consequent) : rule.consequent
stewed.push(entry)
})
publisherInfo._internal.ruleset.raw = ruleset
publisherInfo._internal.ruleset.cooked = stewed
if (!synopsis) return
underscore.keys(synopsis.publishers).forEach((publisher) => {
var location = (synopsis.publishers[publisher].protocol || 'http:') + '//' + publisher
var ctx = url.parse(location, true)
ctx.TLD = tldjs.getPublicSuffix(ctx.host)
if (!ctx.TLD) return
ctx = underscore.mapObject(ctx, function (value, key) { if (!underscore.isFunction(value)) return value })
ctx.URL = location
ctx.SLD = tldjs.getDomain(ctx.host)
ctx.RLD = tldjs.getSubdomain(ctx.host)
ctx.QLD = ctx.RLD ? underscore.last(ctx.RLD.split('.')) : ''
stewed.forEach((rule) => {
if ((rule.consequent !== null) || (rule.dom)) return
if (!rulesolver.resolve(rule.condition, ctx)) return
if (publisherInfo._internal.verboseP) console.log('\npurging ' + publisher)
delete synopsis.publishers[publisher]
delete publishers[publisher]
syncP = true
})
})
if (!syncP) return
updatePublisherInfo()
} catch (ex) {
console.log('ruleset error: ' + ex.toString() + '\n' + ex.stack)
}
}
/*
* update ledger information
*/
var ledgerInfo = {
creating: false,
created: false,
delayStamp: undefined,
reconcileStamp: undefined,
reconcileDelay: undefined,
transactions:
[
/*
{
viewingId: undefined,
surveyorId: undefined,
contribution: {
fiat: {
amount: undefined,
currency: undefined
},
rates: {
[currency]: undefined // bitcoin value in <currency>
},
satoshis: undefined,
fee: undefined
},
submissionStamp: undefined,
submissionId: undefined,
count: undefined,
satoshis: undefined,
votes: undefined,
ballots: {
[publisher]: undefined
}
, ...
*/
],
// set from ledger client's state.paymentInfo OR client's getWalletProperties
// Bitcoin wallet address
address: undefined,
// Bitcoin wallet balance (truncated BTC and satoshis)
balance: undefined,
unconfirmed: undefined,
satoshis: undefined,
// the desired contribution (the btc value approximates the amount/currency designation)
btc: undefined,
amount: undefined,
currency: undefined,
paymentURL: undefined,
buyURL: undefined,
bravery: undefined,
hasBitcoinHandler: false,
// geoIP/exchange information
countryCode: undefined,
exchangeInfo: undefined,
_internal: {
exchangeExpiry: 0,
exchanges: {},
geoipExpiry: 0
},
error: null
}<|fim▁hole|>
if (info) {
underscore.extend(ledgerInfo,
underscore.pick(info, [ 'address', 'balance', 'unconfirmed', 'satoshis', 'btc', 'amount', 'currency' ]))
if ((!info.buyURLExpires) || (info.buyURLExpires > now)) ledgerInfo.buyURL = info.buyURL
underscore.extend(ledgerInfo, ledgerInfo._internal.cache || {})
}
if ((client) && (now > ledgerInfo._internal.geoipExpiry)) {
ledgerInfo._internal.geoipExpiry = now + (5 * msecs.minute)
return ledgerGeoIP.getGeoIP(client.options, (err, provider, result) => {
if (err) console.log('ledger geoip warning: ' + JSON.stringify(err, null, 2))
if (result) ledgerInfo.countryCode = result
ledgerInfo.exchangeInfo = ledgerInfo._internal.exchanges[ledgerInfo.countryCode]
if (now <= ledgerInfo._internal.exchangeExpiry) return updateLedgerInfo()
ledgerInfo._internal.exchangeExpiry = now + msecs.day
roundtrip({ path: '/v1/exchange/providers' }, client.options, (err, response, body) => {
if (err) console.log('ledger exchange error: ' + JSON.stringify(err, null, 2))
ledgerInfo._internal.exchanges = body || {}
ledgerInfo.exchangeInfo = ledgerInfo._internal.exchanges[ledgerInfo.countryCode]
updateLedgerInfo()
})
})
}
if (ledgerInfo._internal.debugP) {
console.log('\nupdateLedgerInfo: ' + JSON.stringify(underscore.omit(ledgerInfo, [ '_internal' ]), null, 2))
}
appActions.updateLedgerInfo(underscore.omit(ledgerInfo, [ '_internal' ]))
}
/*
* ledger client callbacks
*/
var logs = []
var callback = (err, result, delayTime) => {
var i, then
var entries = client && client.report()
var now = underscore.now()
if (clientOptions.verboseP) {
console.log('\nledger client callback: clientP=' + (!!client) + ' errP=' + (!!err) + ' resultP=' + (!!result) +
' delayTime=' + delayTime)
}
if (entries) {
then = now - msecs.week
logs = logs.concat(entries)
for (i = 0; i < logs.length; i++) if (logs[i].when > then) break
if ((i !== 0) && (i !== logs.length)) logs = logs.slice(i)
if (result) entries.push({ who: 'callback', what: result, when: underscore.now() })
syncWriter(pathName(logPath), entries, { flag: 'a' }, () => {})
}
if (err) {
console.log('ledger client error(1): ' + JSON.stringify(err, null, 2) + (err.stack ? ('\n' + err.stack) : ''))
if (!client) return
if (typeof delayTime === 'undefined') delayTime = random.randomInt({ min: msecs.minute, max: 10 * msecs.minute })
}
if (!result) return run(delayTime)
if ((client) && (result.properties.wallet)) {
if (!ledgerInfo.created) setPaymentInfo(getSetting(settings.PAYMENTS_CONTRIBUTION_AMOUNT))
getStateInfo(result)
getPaymentInfo()
}
cacheRuleSet(result.ruleset)
syncWriter(pathName(statePath), result, () => {})
run(delayTime)
}
var roundtrip = (params, options, callback) => {
var i
var parts = typeof params.server === 'string' ? url.parse(params.server)
: typeof params.server !== 'undefined' ? params.server
: typeof options.server === 'string' ? url.parse(options.server) : options.server
if (!params.method) params.method = 'GET'
parts = underscore.extend(underscore.pick(parts, [ 'protocol', 'hostname', 'port' ]),
underscore.omit(params, [ 'headers', 'payload', 'timeout' ]))
// TBD: let the user configure this via preferences [MTR]
if ((parts.hostname === 'ledger.brave.com') && (params.useProxy)) parts.hostname = 'ledger-proxy.privateinternetaccess.com'
i = parts.path.indexOf('?')
if (i !== -1) {
parts.pathname = parts.path.substring(0, i)
parts.search = parts.path.substring(i)
} else {
parts.pathname = parts.path
}
options = {
url: url.format(parts),
method: params.method,
payload: params.payload,
responseType: 'text',
headers: underscore.defaults(params.headers || {}, { 'content-type': 'application/json; charset=utf-8' }),
verboseP: options.verboseP
}
request.request(options, (err, response, body) => {
var payload
if ((response) && (options.verboseP)) {
console.log('[ response for ' + params.method + ' ' + parts.protocol + '//' + parts.hostname + params.path + ' ]')
console.log('>>> HTTP/' + response.httpVersionMajor + '.' + response.httpVersionMinor + ' ' + response.statusCode +
' ' + (response.statusMessage || ''))
underscore.keys(response.headers).forEach((header) => { console.log('>>> ' + header + ': ' + response.headers[header]) })
console.log('>>>')
console.log('>>> ' + (body || '').split('\n').join('\n>>> '))
}
if (err) return callback(err)
if (Math.floor(response.statusCode / 100) !== 2) {
return callback(new Error('HTTP response ' + response.statusCode) + ' for ' + params.method + ' ' + params.path)
}
try {
payload = (response.statusCode !== 204) ? JSON.parse(body) : null
} catch (err) {
return callback(err)
}
try {
callback(null, response, payload)
} catch (err0) {
if (options.verboseP) console.log('\ncallback: ' + err0.toString() + '\n' + err0.stack)
}
})
if (!options.verboseP) return
console.log('<<< ' + params.method + ' ' + parts.protocol + '//' + parts.hostname + params.path)
underscore.keys(options.headers).forEach((header) => { console.log('<<< ' + header + ': ' + options.headers[header]) })
console.log('<<<')
if (options.payload) console.log('<<< ' + JSON.stringify(params.payload, null, 2).split('\n').join('\n<<< '))
}
var runTimeoutId = false
var run = (delayTime) => {
if (clientOptions.verboseP) console.log('\nledger client run: clientP=' + (!!client) + ' delayTime=' + delayTime)
if ((typeof delayTime === 'undefined') || (!client)) return
var active, state
var ballots = client.ballots()
var siteSettings = appStore.getState().get('siteSettings')
var winners = ((synopsis) && (ballots > 0) && (synopsis.winners(ballots))) || []
try {
winners.forEach((winner) => {
var result
var siteSetting = siteSettings.get(`https?://${winner}`)
if ((siteSetting) && (siteSetting.get('ledgerPayments') === false)) return
result = client.vote(winner)
if (result) state = result
})
if (state) syncWriter(pathName(statePath), state, () => {})
} catch (ex) {
console.log('ledger client error(2): ' + ex.toString() + (ex.stack ? ('\n' + ex.stack) : ''))
}
if (delayTime === 0) {
try {
delayTime = client.timeUntilReconcile()
} catch (ex) {
delayTime = false
}
if (delayTime === false) delayTime = random.randomInt({ min: msecs.minute, max: 10 * msecs.minute })
}
if (delayTime > 0) {
if (runTimeoutId) return
active = client
if (delayTime > (1 * msecs.hour)) delayTime = random.randomInt({ min: 3 * msecs.minute, max: msecs.hour })
runTimeoutId = setTimeout(() => {
runTimeoutId = false
if (active !== client) return
if (!client) return console.log('\n\n*** MTR says this can\'t happen(1)... please tell him that he\'s wrong!\n\n')
if (client.sync(callback) === true) return run(0)
}, delayTime)
return
}
if (client.isReadyToReconcile()) return client.reconcile(uuid.v4().toLowerCase(), callback)
console.log('what? wait, how can this happen?')
}
/*
* ledger client utilities
*/
var getStateInfo = (state) => {
var ballots, i, transaction
var info = state.paymentInfo
var then = underscore.now() - msecs.year
ledgerInfo.created = !!state.properties.wallet
ledgerInfo.creating = !ledgerInfo.created
ledgerInfo.delayStamp = state.delayStamp
ledgerInfo.reconcileStamp = state.reconcileStamp
ledgerInfo.reconcileDelay = state.prepareTransaction && state.delayStamp
if (info) {
ledgerInfo._internal.paymentInfo = info
cacheReturnValue()
}
ledgerInfo.transactions = []
if (!state.transactions) return updateLedgerInfo()
for (i = state.transactions.length - 1; i >= 0; i--) {
transaction = state.transactions[i]
if (transaction.stamp < then) break
ballots = underscore.clone(transaction.ballots || {})
state.ballots.forEach((ballot) => {
if (ballot.viewingId !== transaction.viewingId) return
if (!ballots[ballot.publisher]) ballots[ballot.publisher] = 0
ballots[ballot.publisher]++
})
ledgerInfo.transactions.push(underscore.extend(underscore.pick(transaction,
[ 'viewingId', 'contribution', 'submissionStamp', 'count' ]),
{ ballots: ballots }))
}
updateLedgerInfo()
}
var balanceTimeoutId = false
var getBalance = () => {
if (!client) return
balanceTimeoutId = setTimeout(getBalance, 1 * msecs.minute)
if (!ledgerInfo.address) return
ledgerBalance.getBalance(ledgerInfo.address, underscore.extend({ balancesP: true }, client.options),
(err, provider, result) => {
var unconfirmed
var info = ledgerInfo._internal.paymentInfo
if (err) return console.log('ledger balance warning: ' + JSON.stringify(err, null, 2))
if (typeof result.unconfirmed === 'undefined') return
if (result.unconfirmed > 0) {
unconfirmed = (result.unconfirmed / 1e8).toFixed(4)
if ((info || ledgerInfo).unconfirmed === unconfirmed) return
ledgerInfo.unconfirmed = unconfirmed
if (info) info.unconfirmed = ledgerInfo.unconfirmed
if (clientOptions.verboseP) console.log('\ngetBalance refreshes ledger info: ' + ledgerInfo.unconfirmed)
return updateLedgerInfo()
}
if (ledgerInfo.unconfirmed === '0.0000') return
if (clientOptions.verboseP) console.log('\ngetBalance refreshes payment info')
getPaymentInfo()
})
}
var logError = (err, caller) => {
if (err) {
ledgerInfo.error = {
caller: caller,
error: err
}
console.log('Error in %j: %j', caller, err)
return true
} else {
ledgerInfo.error = null
return false
}
}
var getPaymentInfo = () => {
var amount, currency
if (!client) return
try {
ledgerInfo.bravery = client.getBraveryProperties()
if (ledgerInfo.bravery.fee) {
amount = ledgerInfo.bravery.fee.amount
currency = ledgerInfo.bravery.fee.currency
}
client.getWalletProperties(amount, currency, function (err, body) {
var info = ledgerInfo._internal.paymentInfo || {}
if (logError(err, 'getWalletProperties')) {
return
}
info = underscore.extend(info, underscore.pick(body, [ 'buyURL', 'buyURLExpires', 'balance', 'unconfirmed', 'satoshis' ]))
info.address = client.getWalletAddress()
if ((amount) && (currency)) {
info = underscore.extend(info, { amount: amount, currency: currency })
if ((body.rates) && (body.rates[currency])) {
info.btc = (amount / body.rates[currency]).toFixed(8)
}
}
ledgerInfo._internal.paymentInfo = info
updateLedgerInfo()
cacheReturnValue()
})
} catch (ex) {
console.log('properties error: ' + ex.toString())
}
}
var setPaymentInfo = (amount) => {
if (!client) return
var bravery = client.getBraveryProperties()
amount = parseInt(amount, 10)
if (isNaN(amount) || (amount <= 0)) return
underscore.extend(bravery.fee, { amount: amount })
client.setBraveryProperties(bravery, (err, result) => {
if (err) return console.log('ledger setBraveryProperties: ' + err.toString())
if (result) syncWriter(pathName(statePath), result, () => {})
})
if (ledgerInfo.created) getPaymentInfo()
}
var cacheReturnValue = () => {
var chunks, cache, paymentURL
var info = ledgerInfo._internal.paymentInfo
if (!info) return
if (!ledgerInfo._internal.cache) ledgerInfo._internal.cache = {}
cache = ledgerInfo._internal.cache
paymentURL = 'bitcoin:' + info.address + '?amount=' + info.btc + '&label=' + encodeURI('Brave Software')
if (cache.paymentURL === paymentURL) return
cache.paymentURL = paymentURL
updateLedgerInfo()
try {
chunks = []
qr.image(paymentURL, { type: 'png' }).on('data', (chunk) => { chunks.push(chunk) }).on('end', () => {
cache.paymentIMG = 'data:image/png;base64,' + Buffer.concat(chunks).toString('base64')
updateLedgerInfo()
})
} catch (ex) {
console.log('qr.imageSync error: ' + ex.toString())
}
}
var networkConnected = underscore.debounce(() => {
if (!client) return
if (runTimeoutId) {
clearTimeout(runTimeoutId)
runTimeoutId = false
}
if (client.sync(callback) === true) run(random.randomInt({ min: msecs.minute, max: 10 * msecs.minute }))
if (balanceTimeoutId) clearTimeout(balanceTimeoutId)
balanceTimeoutId = setTimeout(getBalance, 5 * msecs.second)
}, 1 * msecs.minute, true)
/*
* low-level utilities
*/
var syncingP = {}
var syncWriter = (path, obj, options, cb) => {
if (typeof options === 'function') {
cb = options
options = null
}
options = underscore.defaults(options || {}, { encoding: 'utf8', mode: parseInt('644', 8) })
if (syncingP[path]) {
syncingP[path] = { obj: obj, options: options, cb: cb }
if (ledgerInfo._internal.debugP) console.log('deferring ' + path)
return
}
syncingP[path] = true
if (ledgerInfo._internal.debugP) console.log('writing ' + path)
fs.writeFile(path, JSON.stringify(obj, null, 2), options, (err) => {
var deferred = syncingP[path]
delete syncingP[path]
if (typeof deferred === 'object') {
if (ledgerInfo._internal.debugP) console.log('restarting ' + path)
syncWriter(path, deferred.obj, deferred.options, deferred.cb)
}
if (err) console.log('write error: ' + err.toString())
cb(err)
})
}
const pathSuffix = { development: '-dev', test: '-test' }[process.env.NODE_ENV] || ''
var pathName = (name) => {
var parts = path.parse(name)
var basePath = process.env.NODE_ENV === 'test'
? path.join(process.env.HOME, '.brave-test-ledger')
: app.getPath('userData')
return path.join(basePath, parts.name + pathSuffix + parts.ext)
}
/**
* UI controller functionality
*/
/**
* Show message that it's time to add funds if reconciliation is less than
* a day in the future and balance is too low.
* 24 hours prior to reconciliation, show message asking user to review
* their votes.
*/
const showNotifications = () => {
if (!getSetting(settings.PAYMENTS_ENABLED) ||
!getSetting(settings.PAYMENTS_NOTIFICATIONS) || suppressNotifications) {
return
}
const reconcileStamp = ledgerInfo.reconcileStamp
const balance = Number(ledgerInfo.balance || 0)
const unconfirmed = Number(ledgerInfo.unconfirmed || 0)
if (reconcileStamp && reconcileStamp - underscore.now() < msecs.day) {
if (ledgerInfo.btc &&
balance + unconfirmed < 0.9 * Number(ledgerInfo.btc)) {
addFundsMessage = addFundsMessage || locale.translation('addFundsNotification')
appActions.showMessageBox({
greeting: locale.translation('updateHello'),
message: addFundsMessage,
buttons: [
{text: locale.translation('updateLater')},
{text: locale.translation('addFunds'), className: 'primary'}
],
options: {
style: 'greetingStyle',
persist: false
}
})
} else if (!reconciliationNotificationShown) {
reconciliationMessage = reconciliationMessage || locale.translation('reconciliationNotification')
appActions.showMessageBox({
greeting: locale.translation('updateHello'),
message: reconciliationMessage,
buttons: [
{text: locale.translation('reviewSites'), className: 'primary'}
],
options: {
style: 'greetingStyle',
persist: false
}
})
reconciliationNotificationShown = true
}
}
}
module.exports = {
init: init,
quit: quit,
boot: boot
}<|fim▁end|> |
var updateLedgerInfo = () => {
var info = ledgerInfo._internal.paymentInfo
var now = underscore.now() |
<|file_name|>v1_group_version_for_discovery.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1GroupVersionForDiscovery(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'group_version': 'str',
'version': 'str'
}
attribute_map = {
'group_version': 'groupVersion',
'version': 'version'
}
def __init__(self, group_version=None, version=None):
"""
V1GroupVersionForDiscovery - a model defined in Swagger
"""
self._group_version = None
self._version = None
self.discriminator = None
self.group_version = group_version
self.version = version
@property
def group_version(self):
"""
Gets the group_version of this V1GroupVersionForDiscovery.<|fim▁hole|> :return: The group_version of this V1GroupVersionForDiscovery.
:rtype: str
"""
return self._group_version
@group_version.setter
def group_version(self, group_version):
"""
Sets the group_version of this V1GroupVersionForDiscovery.
groupVersion specifies the API group and version in the form \"group/version\"
:param group_version: The group_version of this V1GroupVersionForDiscovery.
:type: str
"""
if group_version is None:
raise ValueError("Invalid value for `group_version`, must not be `None`")
self._group_version = group_version
@property
def version(self):
"""
Gets the version of this V1GroupVersionForDiscovery.
version specifies the version in the form of \"version\". This is to save the clients the trouble of splitting the GroupVersion.
:return: The version of this V1GroupVersionForDiscovery.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this V1GroupVersionForDiscovery.
version specifies the version in the form of \"version\". This is to save the clients the trouble of splitting the GroupVersion.
:param version: The version of this V1GroupVersionForDiscovery.
:type: str
"""
if version is None:
raise ValueError("Invalid value for `version`, must not be `None`")
self._version = version
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1GroupVersionForDiscovery):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other<|fim▁end|> | groupVersion specifies the API group and version in the form \"group/version\"
|
<|file_name|>thread.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::borrow::ToOwned;
use util::thread::spawn_named;
<|fim▁hole|>fn spawn_named_test() {
spawn_named("Test".to_owned(), move || {
println!("I can run!");
});
}<|fim▁end|> | #[test] |
<|file_name|>data_migrations.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
from __future__ import unicode_literals
import collections
from django.db import models
from django.utils import six
from django.utils.six.moves import zip
from yepes.contrib.datamigrations.exceptions import (
UnableToCreateError,
UnableToExportError,
UnableToImportError,
UnableToUpdateError,
)
from yepes.contrib.datamigrations.fields import (
BooleanField,
DateField, DateTimeField, TimeField,
FileField,
FloatField, IntegerField, NumberField,
TextField,
)
from yepes.contrib.datamigrations.importation_plans import importation_plans
from yepes.contrib.datamigrations.serializers import serializers
from yepes.contrib.datamigrations.utils import ModelFieldsCache
from yepes.types import Undefined
from yepes.utils.properties import cached_property
class DataMigration(object):
can_create = False
can_update = False
fields = []
def export_data(self, file=None, serializer=None):
if not self.can_export:
raise UnableToExportError
serializer = self.get_serializer(serializer)
headers = [fld.name for fld in self.fields_to_export]
data = self.get_data_to_export(serializer)
return serializer.serialize(headers, data, file)
def get_data_to_export(self, serializer):
raise NotImplementedError('Subclasses of DataMigration must override get_data_to_export() method')
def get_data_to_import(self, source, serializer):
fields = self.fields_to_import
headers = [fld.name for fld in fields]
data = serializer.deserialize(headers, source)
return (
{
fld.path: fld.import_value(val, serializer)
for val, fld
in zip(row, fields)
if val is not Undefined
}
for row
in data
)
def get_importation_plan(self, plan_class):
if isinstance(plan_class, six.string_types):
plan_class = importation_plans.get_plan(plan_class)
return plan_class(self)
def get_serializer(self, serializer=None):
if serializer is None:
serializer = 'json'
if isinstance(serializer, six.string_types):
serializer = serializers.get_serializer(serializer)
if isinstance(serializer, collections.Callable):
serializer = serializer()
return serializer
def import_data(self, source, serializer=None, plan=None, batch_size=100):
if not self.can_import:
raise UnableToImportError
plan = self.get_importation_plan(plan)
if not self.can_create and getattr(plan, 'inserts_data', True):
raise UnableToCreateError
if not self.can_update and getattr(plan, 'updates_data', True):
raise UnableToUpdateError
serializer = self.get_serializer(serializer)
data = self.get_data_to_import(source, serializer)
plan.run(data, batch_size)
@property
def can_export(self):
return bool(self.fields_to_export)
@property
def can_import(self):
return self.fields_to_import and (self.can_create or self.can_update)
@cached_property
def fields_to_export(self):
return self.fields
@cached_property
def fields_to_import(self):
return self.fields if self.can_create or self.can_update else []
class BaseModelMigration(DataMigration):
def __init__(self, model, use_base_manager=False,
ignore_missing_foreign_keys=False):
self.model = model
self.use_base_manager = use_base_manager
self.ignore_missing_foreign_keys = ignore_missing_foreign_keys
def get_data_to_export(self, serializer):
if self.use_base_manager:
manager = self.model._base_manager
else:
manager = self.model._default_manager
qs = manager.get_queryset()
if self.requires_model_instances:
return self._data_from_objects(qs, serializer)
else:
return self._data_from_values(qs, serializer)
def _data_from_objects(self, queryset, serializer):
fields = self.fields_to_export
if (queryset._result_cache is None
and not queryset._prefetch_related_lookups):
queryset = queryset.iterator()
return (
[
fld.export_value(
fld.value_from_object(obj),
serializer,
)
for fld
in fields
]
for obj
in queryset
)
def _data_from_values(self, queryset, serializer):
fields = self.fields_to_export
return (
[
fld.export_value(val, serializer)
for val, fld
in zip(row, fields)
]
for row
in queryset.values_list(*[
fld.path
for fld
in fields
]).iterator()
)
def get_importation_plan(self, plan_class=None):
if plan_class is None:
if self.can_create and self.can_update:
plan_class = 'update_or_create'
elif self.can_create:
plan_class = 'create'
elif self.can_update:
plan_class = 'update'
return super(BaseModelMigration, self).get_importation_plan(plan_class)
@cached_property
def can_create(self):
included_fields = {
self.model_fields[fld][0]
for fld
in self.fields_to_import
}
required_fields = {
f
for f
in self.model._meta.get_fields()
if not (f.is_relation and f.auto_created)
and not f.blank
and not f.has_default()
}
return (required_fields <= included_fields)
@property
def can_update(self):
return (self.primary_key is not None)
@cached_property
def fields_to_import(self):
fields = []
for fld, model_fields in six.iteritems(self.model_fields):
if (len(model_fields) == 1
and '__' not in fld.path):
fields.append(fld)
elif (len(model_fields) == 2
and fld.path.count('__') == 1):
f1, f2 = model_fields
if (f2.unique and not f2.null
and f1.remote_field is not None and f2.remote_field is None):
fields.append(fld) # This allows use of natural keys.
return fields
@cached_property
def model_fields(self):
cache = ModelFieldsCache()
fields = []
for fld in self.fields:
model_fields = cache.get_model_fields(
self.model,
fld.path.split('__'),
)
fields.append((fld, model_fields))
return collections.OrderedDict(fields)
@cached_property
def natural_foreign_keys(self):
return [
fld
for fld
in self.fields_to_import
if '__' in fld.path
] or None
@cached_property
def primary_key(self):
key = None
opts = self.model._meta
for fld in self.fields_to_import:
f = self.model_fields[fld][0]
if f.primary_key:
return fld
if key is None and f.unique and not f.null:
key = fld
if key is None and opts.unique_together:
available_model_fields = {
model_fields[0].name: fld
for fld, model_fields
in six.iteritems(self.model_fields)
if fld in self.fields_to_import
}
for set in opts.unique_together:
try:
key = tuple(
available_model_fields[name]
for name
in set
)
except KeyError:
continue
else:
break
return key
@cached_property
def requires_model_instances(self):
for fld, model_fields in six.iteritems(self.model_fields):
if len(model_fields) < (fld.path.count('__') + 1):
return True # Field path points to an object property.
return False
class ModelMigration(BaseModelMigration):
def __init__(self, model, fields=None, exclude=None,
use_natural_primary_keys=False,
use_natural_foreign_keys=False,
use_base_manager=False,
ignore_missing_foreign_keys=False):<|fim▁hole|> self.selected_fields = None
else:
self.selected_fields = [ # Field order matters
name if name != 'pk' else model._meta.pk.name
for name
in fields
]
if not exclude:
self.excluded_fields = None
else:
self.excluded_fields = {
name if name != 'pk' else model._meta.pk.name
for name
in exclude
}
self.use_natural_primary_keys = use_natural_primary_keys
self.use_natural_foreign_keys = use_natural_foreign_keys
def build_field(self, model_field, path=None, name=None, attname=None):
if hasattr(model_field, 'migrationfield'):
return model_field.migrationfield(path, name, attname)
if path is None:
path = model_field.attname
if name is None:
name = model_field.name
if attname is None:
attname = path
if isinstance(model_field, (models.BooleanField, models.NullBooleanField)):
field_class = BooleanField
elif isinstance(model_field, models.DateTimeField):
field_class = DateTimeField
elif isinstance(model_field, models.DateField):
field_class = DateField
elif isinstance(model_field, models.FileField):
field_class = FileField
elif isinstance(model_field, models.FloatField):
field_class = FloatField
elif isinstance(model_field, (models.IntegerField, models.AutoField)):
field_class = IntegerField
elif isinstance(model_field, models.DecimalField):
field_class = NumberField
elif isinstance(model_field, (models.CharField, models.TextField,
models.FilePathField,
models.IPAddressField, models.GenericIPAddressField)):
field_class = TextField
elif isinstance(model_field, models.TimeField):
field_class = TimeField
else:
return None
return field_class(path, name, attname)
def build_relation(self, model_field, path=None, name=None, attname=None):
# Discard ManyToManyFields and GenericForeignKeys
if not isinstance(model_field, models.ForeignKey):
return None
if path is None:
path = model_field.attname
if name is None:
name = model_field.name
if attname is None:
attname = path
target_field = model_field.target_field
if self.use_natural_foreign_keys:
opts = target_field.model._meta
natural_key = self.find_natural_key(
opts.get_fields(),
opts.unique_together)
if natural_key is not None:
if not isinstance(natural_key, collections.Iterable):
fld = self.build_field(
natural_key,
''.join((name, '__', natural_key.attname)),
''.join((name, '__', natural_key.name)),
attname)
if fld is not None:
return [(fld, [model_field, natural_key])]
else:
flds = [
self.build_field(
key,
''.join((name, '__', key.attname)),
''.join((name, '__', key.name)),
attname)
for key
in natural_key
]
if all(fld is not None for fld in flds):
return [
(fld, [model_field, key])
for fld, key
in zip(flds, natural_key)
]
fld = self.build_field(target_field, path, name, attname)
return [(fld, [model_field])]
def find_natural_key(self, model_fields, unique_together=()):
for f in model_fields:
if not f.is_relation and f.unique and not f.primary_key:
return f
if unique_together:
available_model_fields = {
f.name: f
for f
in model_fields
}
for set in unique_together:
try:
return tuple(
available_model_fields[name]
for name
in set
)
except KeyError:
continue
return None
@cached_property
def fields(self):
return [fld for fld in self.model_fields]
@cached_property
def model_fields(self):
cache = ModelFieldsCache()
selected_fields = self.selected_fields or [
f.name
for f
in cache.get_all_model_fields(self.model)
]
if self.excluded_fields:
selected_fields = [
field_name
for field_name
in selected_fields
if field_name not in self.excluded_fields
]
if self.use_natural_primary_keys and not self.selected_fields:
model_fields = [
f
for f
in cache.get_all_model_fields(self.model)
if f.name in selected_fields
]
natural_key = self.find_natural_key(
model_fields,
self.model._meta.unique_together
)
if natural_key is not None:
excluded_fields = [
f.name
for f
in model_fields
if f.primary_key
]
selected_fields = [
field_name
for field_name
in selected_fields
if field_name not in excluded_fields
]
fields = []
for name in selected_fields:
path = name.split('__')
model_fields = cache.get_model_fields(self.model, path)
if not model_fields or len(model_fields) < len(path):
continue # ModelMigration cannot handle properties.
model_field = model_fields[-1]
path = '__'.join(f.attname for f in model_fields)
name = '__'.join(f.name for f in model_fields)
attname = path
if not model_field.is_relation:
fld = self.build_field(model_field, path, name, attname)
if fld is not None:
fields.append((fld, model_fields))
else:
rel = self.build_relation(model_field, path, name, attname)
if rel is not None:
if len(model_fields) > 1:
previous_model_fields = model_fields[:-1]
rel = [
(fld, previous_model_fields + rel_model_fields)
for fld, rel_model_fields
in rel
]
fields.extend(rel)
return collections.OrderedDict(fields)
class QuerySetExportation(ModelMigration):
can_create = False
can_update = False
fields_to_import = []
def __init__(self, queryset):
model = queryset.model
opts = model._meta
fields = None
exclude = None
field_names, defer = queryset.query.deferred_loading
if field_names:
field_names = sorted(field_names, key=(
lambda n: opts.get_field(n).creation_counter))
if defer:
exclude = field_names
else:
fields = field_names
super(QuerySetExportation, self).__init__(model, fields, exclude)
self.queryset = queryset
def get_data_to_export(self, serializer):
if self.requires_model_instances:
return self._data_from_objects(self.queryset, serializer)
else:
return self._data_from_values(self.queryset, serializer)
@cached_property
def requires_model_instances(self):
return (self.queryset._result_cache is not None
or self.queryset._prefetch_related_lookups)<|fim▁end|> |
super(ModelMigration, self).__init__(model, use_base_manager,
ignore_missing_foreign_keys)
if not fields: |
<|file_name|>relays.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging
from flask import request
from flask import render_template
from relay import app
from relay.decorators import jsonify
from relay.decorators import session_required
from relay.decorators import sanitize_user
from relay.models.relays import add_relay_model
from relay.models.relays import get_relay
from relay.models.relays import get_relays
from relay.models.relays import get_relays_for_recipient
from relay.models.relays import get_sent_relay
from relay.models.relays import get_sent_relays_for_user
from relay.models.relays import add_comment
from relay.models.relays import delete_comment
from relay.models.relays import add_like
from relay.models.relays import unlike
from relay.util import extract_url
from relay.util import make_relay_map
from relay.util import make_sent_relay_map
# remove the direct models from these files, but babysteps
from google.appengine.api import taskqueue
@app.route('/relays/preview')
@jsonify
def relay_preview():
# standardize the url so that we maximize our caching
url = extract_url(request.args.get('url'))
if not url:
return {}
relay = get_relay(url)
if not relay:
relay = add_relay_model(url)
relay.put()
return make_relay_map(relay)
@app.route('/relays/<user_id>/archive', methods=['POST'])
@jsonify
@sanitize_user
@session_required
def archive_relay(user_id, user=None):
sent_relay_id = long(request.form['relay_id'])
sent_relay = get_sent_relay(sent_relay_id)
sent_relay.not_archived.remove(user_id)
sent_relay.archived.append(user_id)
result = sent_relay.put()
logging.info('archiving sent_relay %s'%(str(sent_relay)))
return {'success': result is not None}
@app.route('/relays/like', methods=['POST'])
@jsonify
@session_required
def post_like(user=None):
sent_relay_id = long(request.form['relay_id'])
result = add_like(sent_relay_id, user.key.id())
return {'success': result}
@app.route('/relays/comment', methods=['POST'])
@jsonify
@session_required
def post_comment(user=None):
sent_relay_id = long(request.form['relay_id'])
message = request.form['message']
result = add_comment(sent_relay_id, user.key.id(), message)
return {'success': result}
@app.route('/relays/like/delete', methods=['POST'])
@jsonify
@session_required
def remove_like(user=None):
like_id = long(request.form['like_id'])
result = delete_like(like_id, user.key.id())
return {'success': result}
@app.route('/relays/comment/delete', methods=['POST'])
@jsonify
@session_required
def remove_comment(user_id, user=None):
comment_id = long(request.form['comment_id'])
result = delete_comment(comment_id, user.key.id())
return {'success': result}
@app.route('/relays', methods=['GET', 'POST'])
@app.route('/relays/<int:sent_relay_id>')
@jsonify
def reelay(sent_relay_id=None):
if request.method == 'GET':
offset = int(request.args.get('offset', 0))
return {'relays': get_relays(sent_relay_id, offset)}
elif request.method == 'POST':
success = queue_relay(
request.form['url'],
request.form['sender'],
request.form['recipients'],
)
return {'success': success}
@app.route('/a')
def test_relay_html():
relays = get_relays(None, 0)
return render_template('template.html', relays=relays)
def queue_relay(url, sender, recipients):
task = taskqueue.add(
url='/post_relay_queue',
params={
'url': url,
'sender': sender,
'recipients': recipients,
}
)
return task.was_enqueued
@app.route('/relays/<user_id>/delete', methods=['POST'])
@jsonify
@sanitize_user
@session_required
def delete_relay(user_id, user=None):
sent_relay_id = long(request.form['relay_id'])
sent_relay = get_sent_relay(sent_relay_id)
recipients = sent_relay.recipients
success = False
# validate this
if user_id == sent_relay.sender:
sent_relay.key.delete()
success = True
if user_id in recipients:
recipients.remove(user_id)
sent_relay.put()
success = True
return {'success': success}
@app.route('/relays/from/<user_id>')
@jsonify
@sanitize_user
@session_required
def get_relays_from_user(user_id=None, user=None):
offset = int(request.args.get('offset', 0))
limit = int(request.args.get('limit', 10))
sent_relays = []
sent_relay_items = get_sent_relays_for_user(user_id, offset=offset, limit=limit)
for sent_relay_item in sent_relay_items:
item_map = make_sent_relay_map(sent_relay_item)
item_map.pop('sender', None)
item_map['recipients'] = sent_relay_item.recipients
sent_relays.append(item_map)
return {'relays': sent_relays}
@app.route('/relays/to/<user_id>')<|fim▁hole|>def get_relay_to_user(user_id=None, user=None, archived=False):
archived = bool(int(request.args.get('archived', 0)))
return _get_relay_to_user(user_id, user, archived)
def _get_relay_to_user(user_id=None, user=None, archived=False):
offset = int(request.args.get('offset', 0))
relays = get_relays_for_recipient(user_id, offset, archived=archived)
return {
'relays' : [
make_sent_relay_map(r) for r in relays
]
}<|fim▁end|> | @jsonify
@sanitize_user
@session_required |
<|file_name|>uuid.hpp<|end_file_name|><|fim▁begin|>#ifndef UUIDS_UUID_HPP
#define UUIDS_UUID_HPP
#include <algorithm>
#include <iomanip>
#include <iosfwd>
#include <cctype>
#include <cstring>
#include <string>
#include <cstdint>
#include <iterator>
namespace uuids
{
namespace detail
{
template <typename T>
struct has_overloaded_addressof
{
template <typename U>
static constexpr bool has_overload(...)
{
return false;
}
template <typename U, ::std::size_t N = sizeof(::std::declval<U&>().operator&())>
static constexpr bool has_overload(bool /*unused*/)
{
return true;
}
static constexpr bool value = has_overload<T>(true);
};
template <typename T, typename ::std::enable_if<!has_overloaded_addressof<T>::value, bool>::type = false>
constexpr inline T* static_addressof(T& ref)
{
return &ref;
}
template <typename T, typename ::std::enable_if<has_overloaded_addressof<T>::value, bool>::type = false>
constexpr inline T* static_addressof(T& ref)
{
return std::addressof(ref);
}
constexpr inline std::size_t static_strlen(const char* str)
{
return (str == nullptr || *str == 0) ? 0 : 1 + static_strlen(++str);
}
constexpr inline std::size_t static_wstrlen(const wchar_t* str)
{
return (str == nullptr || *str == 0) ? 0 : 1 + static_wstrlen(++str);
}
template <typename InIter1, typename InIter2>
constexpr void static_copy(InIter1 /*unused*/, InIter1 /*unused*/, InIter2 /*unused*/)
{
// return (first1 != last1) ?<|fim▁hole|>{
return (first != last && pr(*first)) ? static_all_of(++first, last, pr) : (first == last);
}
}
struct uuid
{
using value_type = std::uint8_t;
using pointer = value_type*;
using reference = value_type&;
using const_pointer = value_type const*;
using const_reference = value_type const&;
using size_type = ::std::size_t;
using difference_type = ::std::ptrdiff_t;
using iterator = pointer;
using const_iterator = const_pointer;
using reverse_iterator = ::std::reverse_iterator<iterator>;
using const_reverse_iterator = ::std::reverse_iterator<const_iterator>;
///////////////////////////////////////////////////////////////////////
enum version_type
{
version_unknown = 0x00,
version_time_based = 0x01,
version_dce_security = 0x02,
version_name_based_md5 = 0x03,
version_random = 0x04,
version_name_based_sha1 = 0x05,
};
enum variant_type
{
variant_unknown,
variant_ncs,
variant_rfc4122,
variant_microsoft,
variant_future,
};
///////////////////////////////////////////////////////////////////////
constexpr uuid()
: data_{0}
{
}
constexpr uuid(uuid&&) = default;
constexpr uuid(uuid const&) = default;
constexpr explicit uuid(char const* cstr)
: uuid(cstr, detail::static_strlen(cstr))
{
}
constexpr uuid(char const* /*unused*/, size_type /*unused*/)
: uuid()
{
// std::isxdigit(ch)..
}
explicit uuid(std::string const& str)
: uuid(str.c_str(), str.size())
{
}
// constexpr pl0x!
explicit uuid(const std::initializer_list<value_type>& ilist)
: uuid(ilist.begin(), ilist.end())
{
}
template <typename InIter>
uuid(InIter first, InIter last)
{
::std::copy(first, last, data_);
}
uuid& operator=(uuid&&) = default;
uuid& operator=(uuid const&) = default;
///////////////////////////////////////////////////////////////////////
constexpr const_reference operator[](size_type idx) const noexcept
{
return data_[idx];
}
///////////////////////////////////////////////////////////////////////
// constexpr bool is_nil() const noexcept
//{
// return detail::static_all_of(begin(), end(), [](value_type x) { return x
// == 0; });
//}
static constexpr size_type size() noexcept
{
return static_size;
}
constexpr version_type version() const noexcept
{
return version_random; // not implemented, of course...
}
constexpr variant_type variant() const noexcept
{
return (
(data_[8] >> 5) == 0x07
? variant_future
: ((data_[8] >> 5) == 0x06
? variant_microsoft
: ((data_[8] >> 6) == 0x02
? variant_rfc4122
: ((data_[8] >> 7) == 0x00 ? variant_ncs : variant_unknown)))); // not sure if
// variant_unknown
// possible... here
// just cuz..
}
std::string to_string() const
{
static constexpr char const* const hex_digits = "0123456789abcdef";
::std::string result(this->size() * 2 + 4, 0);
result[8] = result[13] = result[18] = result[23] = '-';
for(size_type i = 0, k = 0; i < result.size() - 1;)
{
if(result[i] == '-')
{
++i;
continue;
}
result[i++] = hex_digits[(data_[k] & 0xf0u) >> 4];
result[i++] = hex_digits[(data_[k] & 0x0fu)];
++k;
}
return result;
}
///////////////////////////////////////////////////////////////////////
constexpr const_iterator begin() const noexcept
{
return detail::static_addressof(data_[0]);
}
constexpr const_iterator end() const noexcept
{
return detail::static_addressof(data_[static_size - 1]);
}
constexpr const_iterator cbegin() const noexcept
{
return detail::static_addressof(data_[0]);
}
constexpr const_iterator cend() const noexcept
{
return detail::static_addressof(data_[static_size - 1]);
}
const_reverse_iterator rbegin() const noexcept
{
return const_reverse_iterator{end()};
}
const_reverse_iterator rend() const noexcept
{
return const_reverse_iterator{begin()};
}
const_reverse_iterator crbegin() const noexcept
{
return const_reverse_iterator{cend()};
}
const_reverse_iterator crend() const noexcept
{
return const_reverse_iterator{cbegin()};
}
///////////////////////////////////////////////////////////////////////
void swap(uuid& other) noexcept
{
uuid tmp = *this;
*this = other;
other = tmp;
}
///////////////////////////////////////////////////////////////////////
friend bool operator==(uuid const& lhs, uuid const& rhs)
{
return ::std::equal(lhs.begin(), lhs.end(), rhs.begin());
}
friend bool operator<(uuid const& lhs, uuid const& rhs)
{
return ::std::lexicographical_compare(lhs.begin(), lhs.end(), rhs.begin(), rhs.end());
}
///////////////////////////////////////////////////////////////////////
template <typename CharT, typename Traits>
friend ::std::basic_ostream<CharT, Traits>& operator<<(::std::basic_ostream<CharT, Traits>& os,
uuid const& u)
{
typedef typename std::basic_ostream<CharT, Traits> ostream_type;
typename ostream_type::sentry ok(os);
if(ok)
{
for(std::size_t i = 0; i != u.size(); ++i)
{
os << ::std::hex << ::std::setfill('0') << ::std::setw(2) << static_cast<int>(u[i]);
if(i == 3 || i == 5 || i == 7 || i == 9)
{
os << os.widen('-');
}
}
}
return os;
}
private:
static constexpr std::size_t static_size = 16;
value_type data_[static_size];
};
///////////////////////////////////////////////////////////////////////
inline void swap(uuid& lhs, uuid& rhs)
{
lhs.swap(rhs);
}
///////////////////////////////////////////////////////////////////////
inline bool operator!=(uuid const& lhs, uuid const& rhs)
{
return !(lhs == rhs);
}
inline bool operator<=(uuid const& lhs, uuid const& rhs)
{
return !(rhs < lhs);
}
inline bool operator>=(uuid const& lhs, uuid const& rhs)
{
return !(lhs < rhs);
}
inline bool operator>(uuid const& lhs, uuid const& rhs)
{
return (rhs < lhs);
}
}
namespace std
{
template <>
struct hash<uuids::uuid>
{
size_t operator()(const uuids::uuid& obj) const
{
hash<std::string> hasher;
return hasher(obj.to_string());
}
};
}
#endif // UUIDS_UUID_HPP<|fim▁end|> | }
template <typename InIter, typename Pred>
constexpr inline bool static_all_of(InIter first, InIter last, Pred pr) |
<|file_name|>log_setup.py<|end_file_name|><|fim▁begin|>import logging
class BorgSingleton:
_shared_state = {}<|fim▁hole|> self.__dict__ = self._shared_state
class LoggerSetup(BorgSingleton):
"""Logger setup convenience class"""
DEFAULT_FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
def __init__(self, logger_name, log_level=logging.INFO, log_file=None, log_format=DEFAULT_FORMAT):
BorgSingleton.__init__(self)
self.__logger_name = logger_name
logger = logging.getLogger(self.__logger_name)
logger.setLevel(log_level)
if log_file:
self.add_log_file(log_file, log_level, log_format)
def add_log_file(self, log_file, level=logging.INFO, log_format=DEFAULT_FORMAT):
file_handler = logging.FileHandler(log_file)
file_handler.setLevel(level)
file_handler.setFormatter(logging.Formatter(log_format))
logging.getLogger(self.__logger_name).addHandler(file_handler)
def get_logger(self):
return logging.getLogger(self.__logger_name)<|fim▁end|> |
def __init__(self): |
<|file_name|>mozambique.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Authors: dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2022
# ryanss <ryanssdev@icloud.com> (c) 2014-2017
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
from dateutil.easter import easter
from dateutil.relativedelta import relativedelta as rd
from holidays.constants import TUE, THU, SUN
from holidays.constants import FEB, APR, MAY, JUN, SEP, OCT, NOV, DEC
from holidays.holiday_base import HolidayBase
class Mozambique(HolidayBase):
country = "MZ"
def __init__(self, **kwargs):
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):<|fim▁hole|> good_friday = e - rd(days=2)
self[good_friday] = "Sexta-feira Santa"
# carnival is the Tuesday before Ash Wednesday
# which is 40 days before easter excluding sundays
carnival = e - rd(days=46)
while carnival.weekday() != TUE:
carnival = carnival - rd(days=1)
self[carnival] = "Carnaval"
self[date(year, FEB, 3)] = "Dia dos Heróis Moçambicanos"
self[date(year, APR, 7)] = "Dia da Mulher Moçambicana"
self[date(year, MAY, 1)] = "Dia Mundial do Trabalho"
self[date(year, JUN, 25)] = "Dia da Independência Nacional"
self[date(year, SEP, 7)] = "Dia da Vitória"
self[date(year, SEP, 25)] = "Dia das Forças Armadas"
self[date(year, OCT, 4)] = "Dia da Paz e Reconciliação"
self[date(year, DEC, 25)] = "Dia de Natal e da Família"
# whenever a public holiday falls on a Sunday,
# it rolls over to the following Monday
for k, v in list(self.items()):
if self.observed and year > 1974:
if k.weekday() == SUN:
self[k + rd(days=1)] = v + " (PONTE)"
class MZ(Mozambique):
pass
class MOZ(Mozambique):
pass<|fim▁end|> |
if year > 1974:
self[date(year, 1, 1)] = "Ano novo"
e = easter(year) |
<|file_name|>template.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
flango.template
~~~~~~~~~~~~~~
template module provide a simple template system that compiles
templates to Python code which like django and tornado template
modules.
Usage
-----
Well, you can view the tests file directly for the usage under tests.
Basically::
>>> import template
>>> template.Template('Hello, {{ name }}').render(name='flango')
Hello, flango
If, else, for...::
>>> template.Template('''
... {% for i in l %}
... {% if i > 3 %}
... {{ i }}
... {% else %}
... less than 3
... {% endif %}
... {% endfor %})
... ''' ).render(l=[2, 4])
less than 3
4
Then, user define class object maybe also works well::
>>> class A(object):
...
... def __init__(self, a, b):<|fim▁hole|> >>> o = A("I am o.a", [1, 2, 3])
>>> template.Template('''
... {{ o.a }}
... {% for i in o.b %}
... {{ i }}
... {% endfor %}
... ''').render(o=o)
I am o.a
1
2
3
and Wow, function maybe suprise you::
>>> template.Template('{{ abs(-3) }}').render()
'3'
>>> template.Template('{{ len([1, 2, 3]) }}').render()
'3'
>>> template.Template('{{ [1, 2, 3].index(2) }}').render()
'1'
and complex function like lambda expression maybe works::
>>> template.Template('{{ list(map(lambda x: x * 2, [1, 2, 3])) }}').render()
'[2, 4, 6]'
and lastly, inheritance of template, extends and include::
{% extends 'base.html' %}
{% include 'included.html' %}
Hacking with fun and joy.
"""
import re
import os
import collections
# LRU Cache capacity:
_CACHE_CAPACITY = 128
class Scanner(object):
""" Scanner is a inner class of Template which provide
custom template source reading operations.
"""
def __init__(self, source):
# pattern for variable, function, block, statement.
self.pattern = re.compile(r'''
{{\s*(?P<var>.+?)\s*}} # variable: {{ name }} or function like: {{ abs(-2) }}
| # or
{%\s*(?P<endtag>end(if|for|while|block))\s*%} # endtag: {% endfor %}
| # or
{%\s*(?P<statement>(?P<keyword>\w+)\s*(.+?))\s*%} # statement: {% for i in range(10) %}
''', re.VERBOSE)
# the pre-text before token.
self.pretext = ''
# the remaining text which have not been processed.
self.remain = source
def next_token(self):
""" Get the next token which match the pattern semantic.
return `None` if there is no more tokens, otherwise,
return matched regular expression group of token `t`, get
the pre-text and the remain text at the same time.
"""
t = self.pattern.search(self.remain)
if not t:
return None
self.pretext = self.remain[:t.start()]
self.remain = self.remain[t.end():]
return t
@property
def empty(self):
""" Return `True` if the source have been processed."""
return self.remain == ''
class BaseNode(object):
""" Base abstract class for nodes.
Subclass of BaseNode must implement 'generate' interface for
output Python intermediate code generating.
"""
def __init__(self, text, indent, block):
self.text = text
self.indent = indent
self.block = block
def generate(self):
raise NotImplementedError()
class TextNode(BaseNode):
""" Node for normal text. """
def generate(self):
return '{0}_stdout.append(\'\'\'{1}\'\'\')\n'.format(' '*self.indent, self.text)
class VariableNode(BaseNode):
""" Node for variables: such as {{ name }}. """
def generate(self):
return '{0}_stdout.append({1})\n'.format(' '*self.indent, self.text)
class KeyNode(BaseNode):
""" Node for keywords like if else... """
def generate(self):
return '{0}{1}\n'.format(' '*self.indent, self.text)
class TemplateException(Exception):
pass
class Template(object):
""" Main class for compiled template instance.
A initialized template instance will parse and compile
all the template source to Python intermediate code,
and instance function `render` will use Python builtin function
`exec` to execute the intermediate code in Python
runtime.
As function `exec` own very strong power and the ability to
execute all the python code in the runtime with given
namespace dict, so this template engine can perform all
the python features even lambda function. But, function
`exec` also has a huge problem in security, so be careful
and be serious, and I am very serious too.
"""
def __init__(self, source, path='', autoescape=False):
if not source:
raise ValueError('Invalid parameter')
self.scanner = Scanner(source)
# path for extends and include
self.path = path
self.nodes = []
# parent template
self.parent = None
self.autoescape = autoescape
self._parse()
# compiled intermediate code.
self.intermediate = self._compile()
def _parse(self):
python_keywords = ['if', 'for', 'while', 'try', 'else', 'elif', 'except', 'finally']
indent = 0
block_stack = []
def block_stack_top():
return block_stack[-1] if block_stack else None
while not self.scanner.empty:
token = self.scanner.next_token()
if not token:
self.nodes.append(TextNode(self.scanner.remain, indent, block_stack_top()))
break
# get the pre-text before token.
if self.scanner.pretext:
self.nodes.append(TextNode(self.scanner.pretext, indent, block_stack_top()))
variable, endtag, tag, statement, keyword, suffix = token.groups()
if variable:
node_text = 'escape(str({0}))'.format(variable) if self.autoescape else variable
self.nodes.append(VariableNode(node_text, indent, block_stack_top()))
elif endtag:
if tag != 'block':
indent -= 1
continue
# block placeholder in parent template nodes
if not self.parent:
node_text = 'endblock%{0}'.format(block_stack_top())
self.nodes.append(KeyNode(node_text, indent, block_stack_top()))
block_stack.pop()
elif statement:
if keyword == 'include':
filename = re.sub(r'\'|\"', '', suffix)
nodes = Loader(self.path).load(filename).nodes
for node in nodes:
node.indent += indent
self.nodes.extend(nodes)
elif keyword == 'extends':
if self.nodes:
raise TemplateException('Template syntax error: extends tag must be '
'at the beginning of the file.')
filename = re.sub(r'\'|\"', '', suffix)
self.parent = Loader(self.path).load(filename)
elif keyword == 'block':
block_stack.append(suffix)
if not self.parent:
node_text = 'block%{0}'.format(suffix)
self.nodes.append(KeyNode(node_text, indent, block_stack_top()))
elif keyword in python_keywords:
node_text = '{0}:'.format(statement)
if keyword in ['else', 'elif', 'except', 'finally']:
key_indent = indent - 1
else:
key_indent = indent
indent += 1
self.nodes.append(KeyNode(node_text, key_indent, block_stack_top()))
else:
raise TemplateException('Invalid keyword: {0}.'.format(keyword))
else:
raise TemplateException('Template syntax error.')
def _compile(self):
block = {}
if self.parent:
generate_code = ''.join(node.generate() for node in self.parent.nodes)
pattern = re.compile(r'block%(?P<start_block>\w+)(?P<block_code>.*?)endblock%(?P<end_block>\w+)', re.S)
for node in self.nodes:
block.setdefault(node.block, []).append(node.generate())
for token in pattern.finditer(generate_code):
block_name = token.group('start_block')
if block_name != token.group('end_block'):
raise TemplateException('Template syntax error.')
block_code = ''.join(block[block_name]) if block_name in block.keys() else token.group('block_code')
generate_code = generate_code.replace(token.group(), block_code)
else:
generate_code = ''.join(node.generate() for node in self.nodes)
return compile(generate_code, '<string>', 'exec')
def render(self, **context):
# `context['_stdout']`: Compiled template source code
# which is a Python list, contain all the output
# statement of Python code.
context.update({'_stdout': [], 'escape': escape})
exec(self.intermediate, context)
return re.sub(r'(\s+\n)+', r'\n', ''.join(map(str, context['_stdout'])))
class LRUCache(object):
""" Simple LRU cache for template instance caching.
in fact, the OrderedDict in collections module or
@functools.lru_cache is working well too.
"""
def __init__(self, capacity):
self.capacity = capacity
self.cache = collections.OrderedDict()
def get(self, key):
""" Return -1 if catched KeyError exception."""
try:
value = self.cache.pop(key)
self.cache[key] = value
return value
except KeyError:
return -1
def set(self, key, value):
try:
self.cache.pop(key)
except KeyError:
if len(self.cache) >= self.capacity:
self.cache.popitem(last=False)
self.cache[key] = value
class Loader(object):
""" A template Loader which loads the environments of
main application, or just give the template system a root
directory to search the template files.
loader = template.Loader("home/to/root/of/templates/")
loader.load("index.html").render()
Loader class use a LRU cache system to cache the recently used
templates for performance consideration.
"""
def __init__(self, path='', engine=Template, cache_capacity=_CACHE_CAPACITY):
self.path = path
self.engine = engine
self.cache = LRUCache(capacity=cache_capacity)
def load(self, filename):
if not self.path.endswith(os.sep) and self.path != '':
self.path = self.path + os.sep
p = ''.join([self.path, filename])
cache_instance = self.cache.get(p)
if cache_instance != -1:
return cache_instance
if not os.path.isfile(p):
raise TemplateException('Template file {0} is not exist.'.format(p))
with open(p) as f:
self.cache.set(p, self.engine(f.read(), path=self.path))
return self.cache.get(p)
def escape(content):
""" Escapes a string's HTML. """
return content.replace('&', '&').replace('<', '<').replace('>', '>')\
.replace('"', '"').replace("'", ''')<|fim▁end|> | ... self.a = a
... self.b = b
... |
<|file_name|>pipelines.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
# import codecs
# import json
# class stokeScrapyPipeline(object):
# def __init__(self):
# self.file=codecs.open("stokeScrapy.json",mode="wb",encoding='utf-8')
# self.file.write('{"hah"'+':[')
import pymongo
from scrapy.conf import settings
from scrapy.exceptions import DropItem
from scrapy import log<|fim▁hole|> def __init__( self ):
connection = pymongo.MongoClient(
settings[ 'MONGODB_SERVER' ],
settings[ 'MONGODB_PORT' ]
)
db = connection[settings[ 'MONGODB_DB' ]]
self .collection = db[settings[ 'MONGODB_COLLECTION' ]]
def process_item( self , item, spider):
valid = True
for data in item:
if not data:
valid = False
raise DropItem( "Missing {0}!" . format (data))
if valid:
self .collection.insert( dict (item))
log.msg( "Stoke added to MongoDB database!" ,
level = log.DEBUG, spider = spider)
return item
# def process_item(self, item, spider):
# line = json.dumps(dict(item))+","
# self.file.write(line.decode("unicode_escape"))
# return item<|fim▁end|> |
#MongoDBPipeline
class MongoDBPipeline( object ): |
<|file_name|>handlers.py<|end_file_name|><|fim▁begin|>import logging
from event_consumer.conf import settings
from event_consumer.errors import PermanentFailure
from event_consumer.handlers import message_handler
_logger = logging.getLogger(__name__)
class IntegrationTestHandlers(object):
"""
Basic message handlers that log or raise known exceptions to allow
interactive testing of the RabbitMQ config.
"""
@staticmethod
def py_integration_ok(body):
"""
Should always succeed, never retry, never archive.
"""
msg = 'py_integration_ok, {}'.format(body)
_logger.info(msg)
@staticmethod
def py_integration_raise(body):
"""
Should retry until there are no attempts left, then archive.
"""
msg = 'py_integration_raise, {}'.format(body)
_logger.info(msg)
raise Exception(msg)
<|fim▁hole|> Should cause the message to be archived on first go.
"""
msg = 'py_integration_raise_permanent, {}'.format(body)
_logger.info(msg)
raise PermanentFailure(msg)
if settings.TEST_ENABLED:
# Add tasks for interactive testing (call decorators directly)
message_handler('py.integration.ok')(
IntegrationTestHandlers.py_integration_ok)
message_handler('py.integration.raise')(
IntegrationTestHandlers.py_integration_raise)
message_handler('py.integration.raise.permanent')(
IntegrationTestHandlers.py_integration_raise_permanent)<|fim▁end|> | @staticmethod
def py_integration_raise_permanent(body):
""" |
<|file_name|>0005_auto_20160512_0713.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-12 07:13<|fim▁hole|>from django.db import migrations
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0004_auto_20160511_0845'),
]
operations = [
migrations.AlterField(
model_name='teammember',
name='bio',
field=wagtail.core.fields.RichTextField(help_text='The team member bio', max_length=360),
),
]<|fim▁end|> | from __future__ import unicode_literals
|
<|file_name|>Batchlet.java<|end_file_name|><|fim▁begin|>//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vIBM 2.2.3-11/28/2011 06:21 AM(foreman)-
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.06.11 at 05:49:00 PM EDT
//
package com.ibm.jbatch.jsl.model.v2;
import javax.annotation.Generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for Batchlet complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="Batchlet"><|fim▁hole|> * <element name="properties" type="{https://jakarta.ee/xml/ns/jakartaee}Properties" minOccurs="0"/>
* </sequence>
* <attribute name="ref" use="required" type="{https://jakarta.ee/xml/ns/jakartaee}artifactRef" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "Batchlet", propOrder = {
"properties"
})
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public class Batchlet extends com.ibm.jbatch.jsl.model.Batchlet {
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
protected JSLProperties properties;
@XmlAttribute(name = "ref", required = true)
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
protected String ref;
/** {@inheritDoc} */
@Override
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public JSLProperties getProperties() {
return properties;
}
/** {@inheritDoc} */
@Override
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public void setProperties(com.ibm.jbatch.jsl.model.JSLProperties value) {
this.properties = (JSLProperties) value;
}
/** {@inheritDoc} */
@Override
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public String getRef() {
return ref;
}
/** {@inheritDoc} */
@Override
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public void setRef(String value) {
this.ref = value;
}
/**
* Copyright 2013 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Appended by build tooling.
*/
@Override
public String toString() {
StringBuilder buf = new StringBuilder(100);
buf.append("Batchlet: ref=" + ref);
buf.append("\n");
buf.append("Properties = " + com.ibm.jbatch.jsl.model.helper.PropertiesToStringHelper.getString(properties));
return buf.toString();
}
}<|fim▁end|> | * <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence> |
<|file_name|>RuntimeDyldCOFF.cpp<|end_file_name|><|fim▁begin|>//===-- RuntimeDyldCOFF.cpp - Run-time dynamic linker for MC-JIT -*- C++ -*-==//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// Implementation of COFF support for the MC-JIT runtime dynamic linker.
//
//===----------------------------------------------------------------------===//
#include "RuntimeDyldCOFF.h"
#include "Targets/RuntimeDyldCOFFAArch64.h"
#include "Targets/RuntimeDyldCOFFI386.h"
#include "Targets/RuntimeDyldCOFFThumb.h"
#include "Targets/RuntimeDyldCOFFX86_64.h"
#include "llvm/ADT/STLExtras.h"
#include "llvm/ADT/Triple.h"
#include "llvm/Object/ObjectFile.h"
#include "llvm/Support/FormatVariadic.h"
using namespace llvm;
using namespace llvm::object;
#define DEBUG_TYPE "dyld"
namespace {
class LoadedCOFFObjectInfo final
: public LoadedObjectInfoHelper<LoadedCOFFObjectInfo,
RuntimeDyld::LoadedObjectInfo> {
public:
LoadedCOFFObjectInfo(
RuntimeDyldImpl &RTDyld,
RuntimeDyld::LoadedObjectInfo::ObjSectionToIDMap ObjSecToIDMap)
: LoadedObjectInfoHelper(RTDyld, std::move(ObjSecToIDMap)) {}
OwningBinary<ObjectFile>
getObjectForDebug(const ObjectFile &Obj) const override {
return OwningBinary<ObjectFile>();
}
};
}
namespace llvm {
std::unique_ptr<RuntimeDyldCOFF>
llvm::RuntimeDyldCOFF::create(Triple::ArchType Arch,
RuntimeDyld::MemoryManager &MemMgr,
JITSymbolResolver &Resolver) {
switch (Arch) {
default: llvm_unreachable("Unsupported target for RuntimeDyldCOFF.");
case Triple::x86:
return std::make_unique<RuntimeDyldCOFFI386>(MemMgr, Resolver);
case Triple::thumb:
return std::make_unique<RuntimeDyldCOFFThumb>(MemMgr, Resolver);
case Triple::x86_64:
return std::make_unique<RuntimeDyldCOFFX86_64>(MemMgr, Resolver);
case Triple::aarch64:
return std::make_unique<RuntimeDyldCOFFAArch64>(MemMgr, Resolver);
}
}
std::unique_ptr<RuntimeDyld::LoadedObjectInfo>
RuntimeDyldCOFF::loadObject(const object::ObjectFile &O) {
if (auto ObjSectionToIDOrErr = loadObjectImpl(O)) {
return std::make_unique<LoadedCOFFObjectInfo>(*this, *ObjSectionToIDOrErr);
} else {
HasError = true;
raw_string_ostream ErrStream(ErrorStr);
logAllUnhandledErrors(ObjSectionToIDOrErr.takeError(), ErrStream);
return nullptr;
}
}
uint64_t RuntimeDyldCOFF::getSymbolOffset(const SymbolRef &Sym) {
// The value in a relocatable COFF object is the offset.
return cantFail(Sym.getValue());
}
uint64_t RuntimeDyldCOFF::getDLLImportOffset(unsigned SectionID, StubMap &Stubs,
StringRef Name,
bool SetSectionIDMinus1) {
LLVM_DEBUG(dbgs() << "Getting DLLImport entry for " << Name << "... ");
assert(Name.startswith(getImportSymbolPrefix()) && "Not a DLLImport symbol?");
RelocationValueRef Reloc;
Reloc.SymbolName = Name.data();
auto I = Stubs.find(Reloc);
if (I != Stubs.end()) {
LLVM_DEBUG(dbgs() << format("{0:x8}", I->second) << "\n");
return I->second;
}
assert(SectionID < Sections.size() && "SectionID out of range");
auto &Sec = Sections[SectionID];
auto EntryOffset = alignTo(Sec.getStubOffset(), PointerSize);
Sec.advanceStubOffset(EntryOffset + PointerSize - Sec.getStubOffset());
Stubs[Reloc] = EntryOffset;
RelocationEntry RE(SectionID, EntryOffset, PointerReloc, 0, false,
Log2_64(PointerSize));
// Hack to tell I386/Thumb resolveRelocation that this isn't section relative.
if (SetSectionIDMinus1)
RE.Sections.SectionA = -1;
addRelocationForSymbol(RE, Name.drop_front(getImportSymbolPrefix().size()));
<|fim▁hole|> EntryOffset, Sec.getLoadAddress() + EntryOffset)
<< "\n";
});
return EntryOffset;
}
bool RuntimeDyldCOFF::isCompatibleFile(const object::ObjectFile &Obj) const {
return Obj.isCOFF();
}
} // namespace llvm<|fim▁end|> | LLVM_DEBUG({
dbgs() << "Creating entry at "
<< formatv("{0:x16} + {1:x8} ( {2:x16} )", Sec.getLoadAddress(), |
<|file_name|>WebServer.py<|end_file_name|><|fim▁begin|># coding=utf-8
import threading
server = None
web_server_ip = "0.0.0.0"
web_server_port = "8000"
web_server_template = "www"
def initialize_web_server(config):
'''
Setup the web server, retrieving the configuration parameters
and starting the web server thread
'''
global web_server_ip, web_server_port, web_server_template
# Check for custom web server address
compositeWebServerAddress = config.get('BOT', 'customWebServerAddress', '0.0.0.0').split(":")
# associate web server ip address
web_server_ip = compositeWebServerAddress[0]
# check for IP:PORT legacy format
if (len(compositeWebServerAddress) > 1):
# associate web server port<|fim▁hole|> # Check for custom web server port
web_server_port = config.get('BOT', 'customWebServerPort', '8000')
# Check for custom web server template
web_server_template = config.get('BOT', 'customWebServerTemplate', 'www')
print('Starting WebServer at {0} on port {1} with template {2}'
.format(web_server_ip, web_server_port, web_server_template))
thread = threading.Thread(target=start_web_server)
thread.deamon = True
thread.start()
def start_web_server():
'''
Start the web server
'''
import SimpleHTTPServer
import SocketServer
import socket
try:
port = int(web_server_port)
host = web_server_ip
# Do not attempt to fix code warnings in the below class, it is perfect.
class QuietHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
# quiet server logs
def log_message(self, format, *args):
return
# serve from www folder under current working dir
def translate_path(self, path):
return SimpleHTTPServer.SimpleHTTPRequestHandler.translate_path(self, '/' + web_server_template + path)
global server
SocketServer.TCPServer.allow_reuse_address = True
server = SocketServer.TCPServer((host, port), QuietHandler)
if host == "0.0.0.0":
# Get all addresses that we could listen on the port specified
addresses = [i[4][0] for i in socket.getaddrinfo(socket.gethostname().split('.')[0], port)]
addresses = [i for i in addresses if ':' not in i] # Filter out all IPv6 addresses
addresses.append('127.0.0.1') # getaddrinfo doesn't always get localhost
hosts = list(set(addresses)) # Make list unique
else:
hosts = [host]
serving_msg = "http://{0}:{1}/lendingbot.html".format(hosts[0], port)
for host in hosts[1:]:
serving_msg += ", http://{0}:{1}/lendingbot.html".format(host, port)
print('Started WebServer, lendingbot status available at {0}'.format(serving_msg))
server.serve_forever()
except Exception as ex:
ex.message = ex.message if ex.message else str(ex)
print('Failed to start WebServer: {0}'.format(ex.message))
def stop_web_server():
'''
Stop the web server
'''
try:
print("Stopping WebServer")
threading.Thread(target=server.shutdown).start()
except Exception as ex:
ex.message = ex.message if ex.message else str(ex)
print("Failed to stop WebServer: {0}".format(ex.message))<|fim▁end|> | web_server_port = compositeWebServerAddress[1]
else: |
<|file_name|>invokespecial.go<|end_file_name|><|fim▁begin|>package references
import (
"github.com/zxh0/jvm.go/instructions/base"
"github.com/zxh0/jvm.go/rtda"
"github.com/zxh0/jvm.go/rtda/heap"
)
// Invoke instance method;<|fim▁hole|>type InvokeSpecial struct{ base.Index16Instruction }
func (instr *InvokeSpecial) Execute(frame *rtda.Frame) {
cp := frame.GetConstantPool()
k := cp.GetConstant(instr.Index)
if kMethodRef, ok := k.(*heap.ConstantMethodRef); ok {
method := kMethodRef.GetMethod(false)
frame.Thread.InvokeMethod(method)
} else {
method := k.(*heap.ConstantInterfaceMethodRef).GetMethod(false)
frame.Thread.InvokeMethod(method)
}
}<|fim▁end|> | // special handling for superclass, private, and instance initialization method invocations |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/* ========================================================================
* DOM-based Routing<|fim▁hole|> *
* .noConflict()
* The routing is enclosed within an anonymous function so that you can
* always reference jQuery with $, even when in .noConflict() mode.
* ======================================================================== */
(function($) {
// Use this variable to set up the common and page specific functions. If you
// rename this variable, you will also need to rename the namespace below.
var Gruntd = {
// All pages
'common': {
init: function() {
// JavaScript to be fired on all pages
},
finalize: function() {
// JavaScript to be fired on all pages, after page specific JS is fired
}
},
// Home page
'home': {
init: function() {
// JavaScript to be fired on the home page
},
finalize: function() {
// JavaScript to be fired on the home page, after the init JS
}
},
// About us page, note the change from about-us to about_us.
'about_us': {
init: function() {
// JavaScript to be fired on the about us page
}
}
};
// The routing fires all common scripts, followed by the page specific scripts.
// Add additional events for more control over timing e.g. a finalize event
var UTIL = {
fire: function(func, funcname, args) {
var fire;
var namespace = Gruntd;
funcname = (funcname === undefined) ? 'init' : funcname;
fire = func !== '';
fire = fire && namespace[func];
fire = fire && typeof namespace[func][funcname] === 'function';
if (fire) {
namespace[func][funcname](args);
}
},
loadEvents: function() {
// Fire common init JS
UTIL.fire('common');
// Fire page-specific init JS, and then finalize JS
$.each(document.body.className.replace(/-/g, '_').split(/\s+/), function(i, classnm) {
UTIL.fire(classnm);
UTIL.fire(classnm, 'finalize');
});
// Fire common finalize JS
UTIL.fire('common', 'finalize');
}
};
// Load Events
$(document).ready(UTIL.loadEvents);
})(jQuery); // Fully reference jQuery after this point.<|fim▁end|> | * Based on http://goo.gl/EUTi53 by Paul Irish
*
* Only fires on body classes that match. If a body class contains a dash,
* replace the dash with an underscore when adding it to the object below. |
<|file_name|>operations.py<|end_file_name|><|fim▁begin|>from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.db.models.functions import Distance
from django.contrib.gis.measure import (
Area as AreaMeasure, Distance as DistanceMeasure,
)
from django.db.utils import NotSupportedError
from django.utils.functional import cached_property
class BaseSpatialOperations:
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry column should be selected.
select = '%s'
@cached_property
def select_extent(self):
return self.select
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
# Aggregates<|fim▁hole|>
geom_func_prefix = ''
# Mapping between Django function names and backend names, when names do not
# match; used in spatial_function_name().
function_names = {}
# Blacklist/set of known unsupported functions of the backend
unsupported_functions = {
'Area', 'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'Azimuth',
'BoundingCircle', 'Centroid', 'Difference', 'Distance', 'Envelope',
'GeoHash', 'GeometryDistance', 'Intersection', 'IsValid', 'Length',
'LineLocatePoint', 'MakeValid', 'MemSize', 'NumGeometries',
'NumPoints', 'Perimeter', 'PointOnSurface', 'Reverse', 'Scale',
'SnapToGrid', 'SymDifference', 'Transform', 'Translate', 'Union',
}
# Constructors
from_text = False
# Default conversion functions for aggregates; will be overridden if implemented
# for the spatial backend.
def convert_extent(self, box, srid):
raise NotImplementedError('Aggregate extent not implemented for this spatial backend.')
def convert_extent3d(self, box, srid):
raise NotImplementedError('Aggregate 3D extent not implemented for this spatial backend.')
# For quoting column values, rather than columns.
def geo_quote_name(self, name):
return "'%s'" % name
# GeometryField operations
def geo_db_type(self, f):
"""
Return the database column type for the geometry field on
the spatial backend.
"""
raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters for the given geometry field,
lookup value, and lookup type.
"""
raise NotImplementedError('Distance operations not available on this spatial backend.')
def get_geom_placeholder(self, f, value, compiler):
"""
Return the placeholder for the given geometry field with the given
value. Depending on the spatial backend, the placeholder may contain a
stored procedure call to the transformation function of the spatial
backend.
"""
def transform_value(value, field):
return value is not None and value.srid != field.srid
if hasattr(value, 'as_sql'):
return (
'%s(%%s, %s)' % (self.spatial_function_name('Transform'), f.srid)
if transform_value(value.output_field, f)
else '%s'
)
if transform_value(value, f):
# Add Transform() to the SQL placeholder.
return '%s(%s(%%s,%s), %s)' % (
self.spatial_function_name('Transform'),
self.from_text, value.srid, f.srid,
)
elif self.connection.features.has_spatialrefsys_table:
return '%s(%%s,%s)' % (self.from_text, f.srid)
else:
# For backwards compatibility on MySQL (#27464).
return '%s(%%s)' % self.from_text
def check_expression_support(self, expression):
if isinstance(expression, self.disallowed_aggregates):
raise NotSupportedError(
"%s spatial aggregation is not supported by this database backend." % expression.name
)
super().check_expression_support(expression)
def spatial_aggregate_name(self, agg_name):
raise NotImplementedError('Aggregate support not implemented for this spatial backend.')
def spatial_function_name(self, func_name):
if func_name in self.unsupported_functions:
raise NotSupportedError("This backend doesn't support the %s function." % func_name)
return self.function_names.get(func_name, self.geom_func_prefix + func_name)
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
raise NotImplementedError('Subclasses of BaseSpatialOperations must provide a geometry_columns() method.')
def spatial_ref_sys(self):
raise NotImplementedError('subclasses of BaseSpatialOperations must a provide spatial_ref_sys() method')
distance_expr_for_lookup = staticmethod(Distance)
def get_db_converters(self, expression):
converters = super().get_db_converters(expression)
if isinstance(expression.output_field, GeometryField):
converters.append(self.get_geometry_converter(expression))
return converters
def get_geometry_converter(self, expression):
raise NotImplementedError(
'Subclasses of BaseSpatialOperations must provide a '
'get_geometry_converter() method.'
)
def get_area_att_for_field(self, field):
if field.geodetic(self.connection):
if self.connection.features.supports_area_geodetic:
return 'sq_m'
raise NotImplementedError('Area on geodetic coordinate systems not supported.')
else:
units_name = field.units_name(self.connection)
if units_name:
return AreaMeasure.unit_attname(units_name)
def get_distance_att_for_field(self, field):
dist_att = None
if field.geodetic(self.connection):
if self.connection.features.supports_distance_geodetic:
dist_att = 'm'
else:
units = field.units_name(self.connection)
if units:
dist_att = DistanceMeasure.unit_attname(units)
return dist_att<|fim▁end|> | disallowed_aggregates = () |
<|file_name|>util.py<|end_file_name|><|fim▁begin|># coding: utf-8
""" General utilities. """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import collections
import sys
import logging
import multiprocessing
# Third-party
import numpy as np
__all__ = ['get_pool']
# Create logger
logger = logging.getLogger(__name__)
class SerialPool(object):
def close(self):
return
<|fim▁hole|> def map(self, *args, **kwargs):
return map(*args, **kwargs)
def get_pool(mpi=False, threads=None):
""" Get a pool object to pass to emcee for parallel processing.
If mpi is False and threads is None, pool is None.
Parameters
----------
mpi : bool
Use MPI or not. If specified, ignores the threads kwarg.
threads : int (optional)
If mpi is False and threads is specified, use a Python
multiprocessing pool with the specified number of threads.
"""
if mpi:
from emcee.utils import MPIPool
# Initialize the MPI pool
pool = MPIPool()
# Make sure the thread we're running on is the master
if not pool.is_master():
pool.wait()
sys.exit(0)
logger.debug("Running with MPI...")
elif threads > 1:
logger.debug("Running with multiprocessing on {} cores..."
.format(threads))
pool = multiprocessing.Pool(threads)
else:
logger.debug("Running serial...")
pool = SerialPool()
return pool
def gram_schmidt(y):
""" Modified Gram-Schmidt orthonormalization of the matrix y(n,n) """
n = y.shape[0]
if y.shape[1] != n:
raise ValueError("Invalid shape: {}".format(y.shape))
mo = np.zeros(n)
# Main loop
for i in range(n):
# Remove component in direction i
for j in range(i):
esc = np.sum(y[j]*y[i])
y[i] -= y[j]*esc
# Normalization
mo[i] = np.linalg.norm(y[i])
y[i] /= mo[i]
return mo
class use_backend(object):
def __init__(self, backend):
import matplotlib.pyplot as plt
from IPython.core.interactiveshell import InteractiveShell
from IPython.core.pylabtools import backend2gui
self.shell = InteractiveShell.instance()
self.old_backend = backend2gui[str(plt.get_backend())]
self.new_backend = backend
def __enter__(self):
gui, backend = self.shell.enable_matplotlib(self.new_backend)
def __exit__(self, type, value, tb):
gui, backend = self.shell.enable_matplotlib(self.old_backend)
def inherit_docs(cls):
for name, func in vars(cls).items():
if not func.__doc__:
for parent in cls.__bases__:
try:
parfunc = getattr(parent, name)
except AttributeError: # parent doesn't have function
break
if parfunc and getattr(parfunc, '__doc__', None):
func.__doc__ = parfunc.__doc__
break
return cls
class ImmutableDict(collections.Mapping):
def __init__(self, somedict):
self._dict = dict(somedict) # make a copy
self._hash = None
def __getitem__(self, key):
return self._dict[key]
def __len__(self):
return len(self._dict)
def __iter__(self):
return iter(self._dict)
def __hash__(self):
if self._hash is None:
self._hash = hash(frozenset(self._dict.items()))
return self._hash
def __eq__(self, other):
return self._dict == other._dict<|fim▁end|> | |
<|file_name|>daily_attendance_plot_presenter.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Time-sheet Analyser: Python library which allows to analyse time-sheets.
# Copyright (C) 2017 Carlos Serra Toro.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<|fim▁hole|># You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
import matplotlib.pyplot as plt
from matplotlib.dates import MONDAY
from matplotlib.dates import MonthLocator, WeekdayLocator, DateFormatter
from TimeSheetAnalyser.utils.misc import time_to_float_time, normalise_number,\
average_sequence
def daily_attendance_plot_presenter(time_sheet):
""" Represent a TimeSheet as a graphic view of all the attendance
entries separately, grouped by days, as a bar plot.
:param time_sheet: An object of type TimeSheet.
:return: True
"""
dates = time_sheet.get_dates(sort=True)
days_in_week = 5
min_hour = 24
max_hour = 0
start_times, end_times = [], []
for date in dates:
start_time = time_to_float_time(time_sheet[date][0].start_time)
end_time = time_to_float_time(time_sheet[date][-1].end_time)
start_times.append(start_time)
end_times.append(end_time)
if int(start_time) < min_hour:
min_hour = int(start_time)
if int(math.ceil(end_time)) > max_hour:
max_hour = int(math.ceil(end_time))
hours_range = [min_hour, min(24, max_hour)]
fig, axes = plt.subplots()
# Plots the starting and ending times for each day.
axes.plot_date(dates, start_times, fmt='og', visible=True)
axes.plot_date(dates, end_times, fmt='or', visible=True)
# Prints the time-spans for each day.
for date in dates:
for time_span in time_sheet[date]:
start_time = time_to_float_time(time_span.start_time)
end_time = time_to_float_time(time_span.end_time)
normalised_start_time = normalise_number(start_time,
input_range=hours_range)
normalised_end_time = normalise_number(end_time,
input_range=hours_range)
axes.axvline(x=date, ymin=normalised_start_time,
ymax=normalised_end_time, color='b')
# Plots the averaged starting & ending times.
average_line_fmt = {'fmt': 'm-', 'linewidth': 2, 'visible': True}
average_start_times = average_sequence(start_times, win_size=days_in_week)
axes.plot_date(dates, average_start_times, **average_line_fmt)
average_end_times = average_sequence(end_times, win_size=days_in_week)
axes.plot_date(dates, average_end_times, **average_line_fmt)
axes.grid(True)
axes.set_ylim(hours_range)
axes.xaxis.set_major_locator(MonthLocator())
axes.xaxis.set_minor_locator(WeekdayLocator(MONDAY))
axes.xaxis.set_major_formatter(DateFormatter("%B %Y"))
fig.autofmt_xdate()
hours_list = range(hours_range[0], hours_range[1] + 1)
plt.yticks(hours_list, [str(hour) for hour in hours_list],
rotation='horizontal')
plt.show()
return True<|fim▁end|> | |
<|file_name|>xudy.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
<|fim▁hole|>
return len(split_str[-1])
if __name__ == '__main__':
result = length_of_last_word("hello world")
print(result)<|fim▁end|> | def length_of_last_word(str_):
split_str = str_.split(" ")
if not split_str:
return 0 |
<|file_name|>url.js<|end_file_name|><|fim▁begin|>// Copyright 2008 The Closure Library Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// The following is taken from Closure Library:
//
// buildFromEncodedParts
// splitRe
// ComponentIndex
// split
// removeDotSegments
/**
* Builds a URI string from already-encoded parts.
*
* No encoding is performed. Any component may be omitted as either null or
* undefined.
*
* @param {?string=} opt_scheme The scheme such as 'http'.
* @param {?string=} opt_userInfo The user name before the '@'.
* @param {?string=} opt_domain The domain such as 'www.google.com', already
* URI-encoded.
* @param {(string|number|null)=} opt_port The port number.
* @param {?string=} opt_path The path, already URI-encoded. If it is not
* empty, it must begin with a slash.
* @param {?string=} opt_queryData The URI-encoded query data.
* @param {?string=} opt_fragment The URI-encoded fragment identifier.
* @return {string} The fully combined URI.
*/
function buildFromEncodedParts(opt_scheme, opt_userInfo,
opt_domain, opt_port, opt_path, opt_queryData, opt_fragment) {
var out = [];
if (opt_scheme) {
out.push(opt_scheme, ':');
}
if (opt_domain) {
out.push('//');
if (opt_userInfo) {
out.push(opt_userInfo, '@');
}
out.push(opt_domain);
if (opt_port) {
out.push(':', opt_port);
}
}
if (opt_path) {
out.push(opt_path);
}
if (opt_queryData) {
out.push('?', opt_queryData);
}
if (opt_fragment) {
out.push('#', opt_fragment);
}
return out.join('');
};
/**
* A regular expression for breaking a URI into its component parts.
*
* {@link http://www.gbiv.com/protocols/uri/rfc/rfc3986.html#RFC2234} says
* As the "first-match-wins" algorithm is identical to the "greedy"
* disambiguation method used by POSIX regular expressions, it is natural and
* commonplace to use a regular expression for parsing the potential five
* components of a URI reference.
*
* The following line is the regular expression for breaking-down a
* well-formed URI reference into its components.
*
* <pre>
* ^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?
* 12 3 4 5 6 7 8 9
* </pre>
*
* The numbers in the second line above are only to assist readability; they
* indicate the reference points for each subexpression (i.e., each paired
* parenthesis). We refer to the value matched for subexpression <n> as $<n>.
* For example, matching the above expression to
* <pre>
* http://www.ics.uci.edu/pub/ietf/uri/#Related
* </pre>
* results in the following subexpression matches:
* <pre>
* $1 = http:
* $2 = http
* $3 = //www.ics.uci.edu
* $4 = www.ics.uci.edu
* $5 = /pub/ietf/uri/
* $6 = <undefined>
* $7 = <undefined>
* $8 = #Related
* $9 = Related
* </pre>
* where <undefined> indicates that the component is not present, as is the
* case for the query component in the above example. Therefore, we can
* determine the value of the five components as
* <pre>
* scheme = $2
* authority = $4
* path = $5
* query = $7
* fragment = $9
* </pre>
*
* The regular expression has been modified slightly to expose the
* userInfo, domain, and port separately from the authority.
* The modified version yields
* <pre>
* $1 = http scheme
* $2 = <undefined> userInfo -\
* $3 = www.ics.uci.edu domain | authority
* $4 = <undefined> port -/
* $5 = /pub/ietf/uri/ path
* $6 = <undefined> query without ?
* $7 = Related fragment without #
* </pre>
* @type {!RegExp}
* @private
*/
var splitRe = new RegExp(
'^' +
'(?:' +
'([^:/?#.]+)' + // scheme - ignore special characters
// used by other URL parts such as :,
// ?, /, #, and .
':)?' +
'(?://' +
'(?:([^/?#]*)@)?' + // userInfo
'([\\w\\d\\-\\u0100-\\uffff.%]*)' + // domain - restrict to letters,
// digits, dashes, dots, percent
// escapes, and unicode characters.
'(?::([0-9]+))?' + // port
')?' +
'([^?#]+)?' + // path
'(?:\\?([^#]*))?' + // query
'(?:#(.*))?' + // fragment
'$');
/**
* The index of each URI component in the return value of goog.uri.utils.split.
* @enum {number}
*/
var ComponentIndex = {
SCHEME: 1,
USER_INFO: 2,
DOMAIN: 3,
PORT: 4,
PATH: 5,
QUERY_DATA: 6,
FRAGMENT: 7
};
/**
* Splits a URI into its component parts.
*
* Each component can be accessed via the component indices; for example:
* <pre>
* goog.uri.utils.split(someStr)[goog.uri.utils.CompontentIndex.QUERY_DATA];
* </pre>
*
* @param {string} uri The URI string to examine.
* @return {!Array.<string|undefined>} Each component still URI-encoded.
* Each component that is present will contain the encoded value, whereas
* components that are not present will be undefined or empty, depending
* on the browser's regular expression implementation. Never null, since
* arbitrary strings may still look like path names.
*/
function split(uri) {
// See @return comment -- never null.
return /** @type {!Array.<string|undefined>} */ (
uri.match(splitRe));
}
/**
* Removes dot segments in given path component, as described in
* RFC 3986, section 5.2.4.
*
* @param {string} path A non-empty path component.
* @return {string} Path component with removed dot segments.
*/
export function removeDotSegments(path) {
if (path === '/')
return '/';
var leadingSlash = path[0] === '/' ? '/' : '';
var trailingSlash = path.slice(-1) === '/' ? '/' : '';
var segments = path.split('/');
var out = [];
var up = 0;
for (var pos = 0; pos < segments.length; pos++) {
var segment = segments[pos];
switch (segment) {
case '':
case '.':
break;
case '..':
if (out.length)
out.pop();
else
up++;
break;
default:
out.push(segment);
}
}
if (!leadingSlash) {
while (up-- > 0) {
out.unshift('..');
}
if (out.length === 0)
out.push('.');
}
return leadingSlash + out.join('/') + trailingSlash;
}
/**
* Takes an array of the parts from split and canonicalizes the path part
* and then joins all the parts.
* @param {Array.<string?} parts
* @return {string}
*/
function joinAndCanonicalizePath(parts) {
var path = parts[ComponentIndex.PATH];
path = removeDotSegments(path.replace(/\/\//.g, '/'));
parts[ComponentIndex.PATH] = path;
return buildFromEncodedParts(
parts[ComponentIndex.SCHEME],
parts[ComponentIndex.USER_INFO],
parts[ComponentIndex.DOMAIN],
parts[ComponentIndex.PORT],
parts[ComponentIndex.PATH],
parts[ComponentIndex.QUERY_DATA],
parts[ComponentIndex.FRAGMENT]);
}
/**
* Canonicalizes a URL by eliminating ./ path entries,
* canonicalizing ../ entries, and collapsing occurrences of //.
*
* @param {string} url
* @return {string}
*/
export function canonicalizeUrl(url) {
var parts = split(url);
return joinAndCanonicalizePath(parts);
}
/**
* Resovles a URL.
* @param {string} base The URL acting as the base URL.
* @param {string} to The URL to resolve.
* @return {string}
*/
export function resolveUrl(base, url) {
if (url[0] === '@')
return url;
var parts = split(url);
var baseParts = split(base);
if (parts[ComponentIndex.SCHEME]) {
return joinAndCanonicalizePath(parts);
} else {
parts[ComponentIndex.SCHEME] = baseParts[ComponentIndex.SCHEME];
}
for (var i = ComponentIndex.SCHEME; i <= ComponentIndex.PORT; i++) {
if (!parts[i]) {
parts[i] = baseParts[i];
}<|fim▁hole|> return joinAndCanonicalizePath(parts);
}
var path = baseParts[ComponentIndex.PATH];
var index = path.lastIndexOf('/');
path = path.slice(0, index + 1) + parts[ComponentIndex.PATH];
parts[ComponentIndex.PATH] = path;
return joinAndCanonicalizePath(parts);
}<|fim▁end|> | }
if (parts[ComponentIndex.PATH][0] == '/') { |
<|file_name|>test_spiderForPEDAILY.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Copyright (C) 2015, MuChu Hsu
Contributed by Muchu Hsu (muchu1983@gmail.com)
This file is part of BSD license<|fim▁hole|>import unittest
import logging
from cameo.spiderForPEDAILY import SpiderForPEDAILY
"""
測試 抓取 PEDAILY
"""
class SpiderForPEDAILYTest(unittest.TestCase):
#準備
def setUp(self):
logging.basicConfig(level=logging.INFO)
self.spider = SpiderForPEDAILY()
self.spider.initDriver()
#收尾
def tearDown(self):
self.spider.quitDriver()
"""
#測試抓取 index page
def test_downloadIndexPage(self):
logging.info("SpiderForPEDAILYTest.test_downloadIndexPage")
self.spider.downloadIndexPage()
#測試抓取 category page
def test_downloadCategoryPage(self):
logging.info("SpiderForPEDAILYTest.test_downloadCategoryPage")
self.spider.downloadCategoryPage()
"""
#測試抓取 news page
def test_downloadNewsPage(self):
logging.info("SpiderForPEDAILYTest.test_downloadNewsPage")
self.spider.downloadNewsPage(strCategoryName=None)
#測試開始
if __name__ == "__main__":
unittest.main(exit=False)<|fim▁end|> |
<https://opensource.org/licenses/BSD-3-Clause>
""" |
<|file_name|>LineFormat.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2001-2020 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.gui.new_plotter.configuration;
import com.rapidminer.gui.new_plotter.listener.events.LineFormatChangeEvent;
import com.rapidminer.gui.new_plotter.utility.DataStructureUtils;
import com.rapidminer.tools.I18N;
import java.awt.BasicStroke;
import java.awt.Color;
import java.lang.ref.WeakReference;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
/**
* @author Marius Helf
* @deprecated since 9.2.0
*/
@Deprecated
public class LineFormat implements Cloneable {
private static class StrokeFactory {
static public BasicStroke getSolidStroke() {
return new BasicStroke(1, BasicStroke.CAP_ROUND, BasicStroke.JOIN_ROUND);
}
static public BasicStroke getDottedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 1f, 1f }, 0.0f);
}
static public BasicStroke getShortDashedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 4f, 2f }, 0.0f);
}
static public BasicStroke getLongDashedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 7f, 3f }, 0.0f);
}
static public BasicStroke getDashDotStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 6f, 2f, 1f, 2f },
0.0f);
}
static public BasicStroke getStripedStroke() {
return new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND, 10.0f, new float[] { 0.2f, 0.2f }, 0.0f);
}
}
public enum LineStyle {
NONE(null, I18N.getGUILabel("plotter.linestyle.NONE.label")), SOLID(StrokeFactory.getSolidStroke(), I18N
.getGUILabel("plotter.linestyle.SOLID.label")), DOTS(StrokeFactory.getDottedStroke(), I18N
.getGUILabel("plotter.linestyle.DOTS.label")), SHORT_DASHES(StrokeFactory.getShortDashedStroke(), I18N
.getGUILabel("plotter.linestyle.SHORT_DASHES.label")), LONG_DASHES(StrokeFactory.getLongDashedStroke(), I18N
.getGUILabel("plotter.linestyle.LONG_DASHES.label")), DASH_DOT(StrokeFactory.getDashDotStroke(), I18N
.getGUILabel("plotter.linestyle.DASH_DOT.label")), STRIPES(StrokeFactory.getStripedStroke(), I18N
.getGUILabel("plotter.linestyle.STRIPES.label"));
private final BasicStroke stroke;
private final String name;
public BasicStroke getStroke() {
return stroke;
}
public String getName() {
return name;
}
private LineStyle(BasicStroke stroke, String name) {
this.stroke = stroke;
this.name = name;
}
}
private List<WeakReference<LineFormatListener>> listeners = new LinkedList<WeakReference<LineFormatListener>>();
private LineStyle style = LineStyle.NONE; // dashed, solid...
private Color color = Color.GRAY;
private float width = 1.0f;
public LineStyle getStyle() {
return style;
}
public void setStyle(LineStyle style) {
if (style != this.style) {
this.style = style;
fireStyleChanged();
}
}
public Color getColor() {
return color;
}
public void setColor(Color color) {
if (color == null ? this.color != null : !color.equals(this.color)) {
this.color = color;
fireColorChanged();
}
}
public float getWidth() {
return width;
}
public void setWidth(float width) {
if (width != this.width) {
this.width = width;
fireWidthChanged();
}
}
private void fireWidthChanged() {
fireLineFormatChanged(new LineFormatChangeEvent(this, width));
}
private void fireColorChanged() {
fireLineFormatChanged(new LineFormatChangeEvent(this, color));
}
private void fireStyleChanged() {
fireLineFormatChanged(new LineFormatChangeEvent(this, style));
}
private void fireLineFormatChanged(LineFormatChangeEvent e) {
Iterator<WeakReference<LineFormatListener>> it = listeners.iterator();
while (it.hasNext()) {
LineFormatListener l = it.next().get();
if (l != null) {
<|fim▁hole|> } else {
it.remove();
}
}
}
@Override
public LineFormat clone() {
LineFormat clone = new LineFormat();
clone.color = new Color(color.getRGB(), true);
clone.style = style;
clone.width = width;
return clone;
}
public BasicStroke getStroke() {
BasicStroke stroke = style.getStroke();
if (stroke != null) {
float[] scaledDashArray = getScaledDashArray();
BasicStroke scaledStroke = new BasicStroke(this.getWidth(), stroke.getEndCap(), stroke.getLineJoin(),
stroke.getMiterLimit(), scaledDashArray, stroke.getDashPhase());
return scaledStroke;
} else {
return null;
}
}
float[] getScaledDashArray() {
BasicStroke stroke = getStyle().getStroke();
if (stroke == null) {
return null;
}
float[] dashArray = stroke.getDashArray();
float[] scaledDashArray;
if (dashArray != null) {
float scalingFactor = getWidth();
if (scalingFactor <= 0) {
scalingFactor = 1;
}
if (scalingFactor != 1) {
scaledDashArray = DataStructureUtils.cloneAndMultiplyArray(dashArray, scalingFactor);
} else {
scaledDashArray = dashArray;
}
} else {
scaledDashArray = dashArray;
}
return scaledDashArray;
}
public void addLineFormatListener(LineFormatListener l) {
listeners.add(new WeakReference<LineFormatListener>(l));
}
public void removeLineFormatListener(LineFormatListener l) {
Iterator<WeakReference<LineFormatListener>> it = listeners.iterator();
while (it.hasNext()) {
LineFormatListener listener = it.next().get();
if (l != null) {
if (listener != null && listener.equals(l)) {
it.remove();
}
} else {
it.remove();
}
}
}
}<|fim▁end|> | l.lineFormatChanged(e);
|
<|file_name|>compat.py<|end_file_name|><|fim▁begin|>"""Provides compatibility with first-generation host delegation options in ansible-test."""
from __future__ import annotations
import argparse
import dataclasses
import enum
import os
import types
import typing as t
from ..constants import (
CONTROLLER_PYTHON_VERSIONS,
SUPPORTED_PYTHON_VERSIONS,
)
from ..util import (
ApplicationError,
display,
filter_args,
sorted_versions,
str_to_version,
)
from ..docker_util import (
docker_available,
)
from ..completion import (
DOCKER_COMPLETION,
REMOTE_COMPLETION,
filter_completion,
)
from ..host_configs import (
ControllerConfig,
ControllerHostConfig,
DockerConfig,
FallbackDetail,
FallbackReason,
HostConfig,
HostContext,
HostSettings,
NativePythonConfig,
NetworkInventoryConfig,
NetworkRemoteConfig,
OriginConfig,
PosixRemoteConfig,
VirtualPythonConfig,
WindowsInventoryConfig,
WindowsRemoteConfig,
)
def filter_python(version, versions): # type: (t.Optional[str], t.Optional[t.List[str]]) -> t.Optional[str]
"""If a Python version is given and is in the given version list, return that Python version, otherwise return None."""
return version if version in versions else None
def controller_python(version): # type: (t.Optional[str]) -> t.Optional[str]
"""If a Python version is given and is supported by the controller, return that Python version, otherwise return None."""
return filter_python(version, CONTROLLER_PYTHON_VERSIONS)
def get_fallback_remote_controller(): # type: () -> str
"""Return the remote fallback platform for the controller."""
platform = 'freebsd' # lower cost than RHEL and macOS
candidates = [item for item in filter_completion(REMOTE_COMPLETION).values() if item.controller_supported and item.platform == platform]
fallback = sorted(candidates, key=lambda value: str_to_version(value.version), reverse=True)[0]
return fallback.name
def get_option_name(name): # type: (str) -> str
"""Return a command-line option name from the given option name."""
if name == 'targets':
name = 'target'
return f'--{name.replace("_", "-")}'
class PythonVersionUnsupportedError(ApplicationError):
"""A Python version was requested for a context which does not support that version."""
def __init__(self, context, version, versions):
super().__init__(f'Python {version} is not supported by environment `{context}`. Supported Python version(s) are: {", ".join(versions)}')
class PythonVersionUnspecifiedError(ApplicationError):
"""A Python version was not specified for a context which is unknown, thus the Python version is unknown."""
def __init__(self, context):
super().__init__(f'A Python version was not specified for environment `{context}`. Use the `--python` option to specify a Python version.')
class ControllerNotSupportedError(ApplicationError):
"""Option(s) were specified which do not provide support for the controller and would be ignored because they are irrelevant for the target."""
def __init__(self, context):
super().__init__(f'Environment `{context}` does not provide a Python version supported by the controller.')
class OptionsConflictError(ApplicationError):
"""Option(s) were specified which conflict with other options."""
def __init__(self, first, second):
super().__init__(f'Options `{" ".join(first)}` cannot be combined with options `{" ".join(second)}`.')
@dataclasses.dataclass(frozen=True)
class LegacyHostOptions:
"""Legacy host options used prior to the availability of separate controller and target host configuration."""
python: t.Optional[str] = None
python_interpreter: t.Optional[str] = None
local: t.Optional[bool] = None
venv: t.Optional[bool] = None
venv_system_site_packages: t.Optional[bool] = None
remote: t.Optional[str] = None
remote_provider: t.Optional[str] = None
docker: t.Optional[str] = None
docker_privileged: t.Optional[bool] = None
docker_seccomp: t.Optional[str] = None
docker_memory: t.Optional[int] = None
windows: t.Optional[t.List[str]] = None
platform: t.Optional[t.List[str]] = None
platform_collection: t.Optional[t.List[t.Tuple[str, str]]] = None
platform_connection: t.Optional[t.List[t.Tuple[str, str]]] = None
inventory: t.Optional[str] = None
@staticmethod
def create(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> LegacyHostOptions
"""Create legacy host options from the given namespace."""
kwargs = {field.name: getattr(namespace, field.name, None) for field in dataclasses.fields(LegacyHostOptions)}
if kwargs['python'] == 'default':
kwargs['python'] = None
return LegacyHostOptions(**kwargs)
@staticmethod
def purge_namespace(namespace): # type: (t.Union[argparse.Namespace, types.SimpleNamespace]) -> None
"""Purge legacy host options fields from the given namespace."""
for field in dataclasses.fields(LegacyHostOptions): # type: dataclasses.Field
if hasattr(namespace, field.name):
delattr(namespace, field.name)
@staticmethod
def purge_args(args): # type: (t.List[str]) -> t.List[str]
"""Purge legacy host options from the given command line arguments."""
fields = dataclasses.fields(LegacyHostOptions) # type: t.Tuple[dataclasses.Field, ...]
filters = {get_option_name(field.name): 0 if field.type is t.Optional[bool] else 1 for field in fields} # type: t.Dict[str, int]
return filter_args(args, filters)
def get_options_used(self): # type: () -> t.Tuple[str, ...]
"""Return a tuple of the command line options used."""
fields = dataclasses.fields(self) # type: t.Tuple[dataclasses.Field, ...]
options = tuple(sorted(get_option_name(field.name) for field in fields if getattr(self, field.name)))
return options
class TargetMode(enum.Enum):
"""Type of provisioning to use for the targets."""
WINDOWS_INTEGRATION = enum.auto() # windows-integration
NETWORK_INTEGRATION = enum.auto() # network-integration
POSIX_INTEGRATION = enum.auto() # integration
SANITY = enum.auto() # sanity
UNITS = enum.auto() # units
SHELL = enum.auto() # shell
NO_TARGETS = enum.auto() # coverage
@property
def one_host(self):
"""Return True if only one host (the controller) should be used, otherwise return False."""
return self in (TargetMode.SANITY, TargetMode.UNITS, TargetMode.NO_TARGETS)
@property
def no_fallback(self):
"""Return True if no fallback is acceptable for the controller (due to options not applying to the target), otherwise return False."""
return self in (TargetMode.WINDOWS_INTEGRATION, TargetMode.NETWORK_INTEGRATION, TargetMode.NO_TARGETS)
@property
def multiple_pythons(self):
"""Return True if multiple Python versions are allowed, otherwise False."""
return self in (TargetMode.SANITY, TargetMode.UNITS)
@property
def has_python(self):
"""Return True if this mode uses Python, otherwise False."""
return self in (TargetMode.POSIX_INTEGRATION, TargetMode.SANITY, TargetMode.UNITS, TargetMode.SHELL)
def convert_legacy_args(
argv, # type: t.List[str]
args, # type: t.Union[argparse.Namespace, types.SimpleNamespace]
mode, # type: TargetMode
): # type: (...) -> HostSettings
"""Convert pre-split host arguments in the given namespace to their split counterparts."""
old_options = LegacyHostOptions.create(args)
old_options.purge_namespace(args)
new_options = [
'--controller',
'--target',
'--target-python',
]
used_old_options = old_options.get_options_used()
used_new_options = [name for name in new_options if name in argv]
if used_old_options:
if used_new_options:
raise OptionsConflictError(used_old_options, used_new_options)
controller, targets, controller_fallback = get_legacy_host_config(mode, old_options)
if controller_fallback:
if mode.one_host:
display.info(controller_fallback.message, verbosity=1)
else:
display.warning(controller_fallback.message)
used_default_pythons = mode in (TargetMode.SANITY, TargetMode.UNITS) and not native_python(old_options)
else:
controller = args.controller or OriginConfig()
controller_fallback = None
if mode == TargetMode.NO_TARGETS:
targets = []
used_default_pythons = False
elif args.targets:
targets = args.targets
used_default_pythons = False
else:
targets = default_targets(mode, controller)
used_default_pythons = mode in (TargetMode.SANITY, TargetMode.UNITS)
args.controller = controller
args.targets = targets
if used_default_pythons:
targets = t.cast(t.List[ControllerConfig], targets)
skipped_python_versions = sorted_versions(list(set(SUPPORTED_PYTHON_VERSIONS) - {target.python.version for target in targets}))
else:
skipped_python_versions = []
filtered_args = old_options.purge_args(argv)
filtered_args = filter_args(filtered_args, {name: 1 for name in new_options})
host_settings = HostSettings(
controller=controller,
targets=targets,
skipped_python_versions=skipped_python_versions,
filtered_args=filtered_args,
controller_fallback=controller_fallback,
)
return host_settings
def controller_targets(
mode, # type: TargetMode
options, # type: LegacyHostOptions
controller, # type: ControllerHostConfig
): # type: (...) -> t.List[ControllerConfig]
"""Return the configuration for controller targets."""
python = native_python(options)
if python:
targets = [ControllerConfig(python=python)]
else:
targets = default_targets(mode, controller)
return targets
def native_python(options): # type: (LegacyHostOptions) -> t.Optional[NativePythonConfig]
"""Return a NativePythonConfig for the given version if it is not None, otherwise return None."""
if not options.python and not options.python_interpreter:
return None
return NativePythonConfig(version=options.python, path=options.python_interpreter)
def get_legacy_host_config(
mode, # type: TargetMode
options, # type: LegacyHostOptions
): # type: (...) -> t.Tuple[HostConfig, t.List[HostConfig], t.Optional[FallbackDetail]]
"""
Returns controller and target host configs derived from the provided legacy host options.
The goal is to match the original behavior, by using non-split testing whenever possible.
When the options support the controller, use the options for the controller and use ControllerConfig for the targets.
When the options do not support the controller, use the options for the targets and use a default controller config influenced by the options.
"""
venv_fallback = 'venv/default'
docker_fallback = 'default'
remote_fallback = get_fallback_remote_controller()
controller_fallback = None # type: t.Optional[t.Tuple[str, str, FallbackReason]]
if options.venv:
if controller_python(options.python) or not options.python:
controller = OriginConfig(python=VirtualPythonConfig(version=options.python or 'default', system_site_packages=options.venv_system_site_packages))
else:
controller_fallback = f'origin:python={venv_fallback}', f'--venv --python {options.python}', FallbackReason.PYTHON
controller = OriginConfig(python=VirtualPythonConfig(version='default', system_site_packages=options.venv_system_site_packages))
if mode in (TargetMode.SANITY, TargetMode.UNITS):
targets = controller_targets(mode, options, controller)
# Target sanity tests either have no Python requirements or manage their own virtual environments.
# Thus there is no point in setting up virtual environments ahead of time for them.
if mode == TargetMode.UNITS:
targets = [ControllerConfig(python=VirtualPythonConfig(version=target.python.version, path=target.python.path,
system_site_packages=options.venv_system_site_packages)) for target in targets]
else:
targets = [ControllerConfig(python=VirtualPythonConfig(version=options.python or 'default',
system_site_packages=options.venv_system_site_packages))]
elif options.docker:
docker_config = filter_completion(DOCKER_COMPLETION).get(options.docker)
if docker_config:
if options.python and options.python not in docker_config.supported_pythons:
raise PythonVersionUnsupportedError(f'--docker {options.docker}', options.python, docker_config.supported_pythons)
if docker_config.controller_supported:
if controller_python(options.python) or not options.python:
controller = DockerConfig(name=options.docker, python=native_python(options),
privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)
targets = controller_targets(mode, options, controller)
else:
controller_fallback = f'docker:{options.docker}', f'--docker {options.docker} --python {options.python}', FallbackReason.PYTHON
controller = DockerConfig(name=options.docker)
targets = controller_targets(mode, options, controller)
else:
controller_fallback = f'docker:{docker_fallback}', f'--docker {options.docker}', FallbackReason.ENVIRONMENT
controller = DockerConfig(name=docker_fallback)
targets = [DockerConfig(name=options.docker, python=native_python(options),
privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)]
else:
if not options.python:
raise PythonVersionUnspecifiedError(f'--docker {options.docker}')
if controller_python(options.python):
controller = DockerConfig(name=options.docker, python=native_python(options),
privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)
targets = controller_targets(mode, options, controller)
else:
controller_fallback = f'docker:{docker_fallback}', f'--docker {options.docker} --python {options.python}', FallbackReason.PYTHON
controller = DockerConfig(name=docker_fallback)
targets = [DockerConfig(name=options.docker, python=native_python(options),
privileged=options.docker_privileged, seccomp=options.docker_seccomp, memory=options.docker_memory)]
elif options.remote:
remote_config = filter_completion(REMOTE_COMPLETION).get(options.remote)
context, reason = None, None
if remote_config:
if options.python and options.python not in remote_config.supported_pythons:
raise PythonVersionUnsupportedError(f'--remote {options.remote}', options.python, remote_config.supported_pythons)
if remote_config.controller_supported:
if controller_python(options.python) or not options.python:
controller = PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)
targets = controller_targets(mode, options, controller)
else:
controller_fallback = f'remote:{options.remote}', f'--remote {options.remote} --python {options.python}', FallbackReason.PYTHON
controller = PosixRemoteConfig(name=options.remote, provider=options.remote_provider)
targets = controller_targets(mode, options, controller)
else:
context, reason = f'--remote {options.remote}', FallbackReason.ENVIRONMENT
controller = None
targets = [PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)]
elif mode == TargetMode.SHELL and options.remote.startswith('windows/'):
if options.python and options.python not in CONTROLLER_PYTHON_VERSIONS:
raise ControllerNotSupportedError(f'--python {options.python}')
controller = OriginConfig(python=native_python(options))
targets = [WindowsRemoteConfig(name=options.remote, provider=options.remote_provider)]
else:
if not options.python:
raise PythonVersionUnspecifiedError(f'--remote {options.remote}')
if controller_python(options.python):
controller = PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)
targets = controller_targets(mode, options, controller)
else:
context, reason = f'--remote {options.remote} --python {options.python}', FallbackReason.PYTHON
controller = None
targets = [PosixRemoteConfig(name=options.remote, python=native_python(options), provider=options.remote_provider)]
if not controller:
if docker_available():
controller_fallback = f'docker:{docker_fallback}', context, reason
controller = DockerConfig(name=docker_fallback)
else:
controller_fallback = f'remote:{remote_fallback}', context, reason
controller = PosixRemoteConfig(name=remote_fallback)
else: # local/unspecified
# There are several changes in behavior from the legacy implementation when using no delegation (or the `--local` option).<|fim▁hole|> # Previously this option was completely ignored except when used with the `--docker` or `--remote` options.
# 2) The `--python` option now triggers re-execution of ansible-test if it differs from sys.version_info.
# Previously it affected Python subprocesses, but not ansible-test itself.
if controller_python(options.python) or not options.python:
controller = OriginConfig(python=native_python(options))
targets = controller_targets(mode, options, controller)
else:
controller_fallback = 'origin:python=default', f'--python {options.python}', FallbackReason.PYTHON
controller = OriginConfig()
targets = controller_targets(mode, options, controller)
if controller_fallback:
controller_option, context, reason = controller_fallback
if mode.no_fallback:
raise ControllerNotSupportedError(context)
fallback_detail = FallbackDetail(
reason=reason,
message=f'Using `--controller {controller_option}` since `{context}` does not support the controller.',
)
else:
fallback_detail = None
if mode.one_host and any(not isinstance(target, ControllerConfig) for target in targets):
raise ControllerNotSupportedError(controller_fallback[1])
if mode == TargetMode.NO_TARGETS:
targets = []
else:
targets = handle_non_posix_targets(mode, options, targets)
return controller, targets, fallback_detail
def handle_non_posix_targets(
mode, # type: TargetMode
options, # type: LegacyHostOptions
targets, # type: t.List[HostConfig]
): # type: (...) -> t.List[HostConfig]
"""Return a list of non-POSIX targets if the target mode is non-POSIX."""
if mode == TargetMode.WINDOWS_INTEGRATION:
if options.windows:
targets = [WindowsRemoteConfig(name=f'windows/{version}', provider=options.remote_provider) for version in options.windows]
else:
targets = [WindowsInventoryConfig(path=options.inventory)]
elif mode == TargetMode.NETWORK_INTEGRATION:
if options.platform:
targets = [NetworkRemoteConfig(name=platform, provider=options.remote_provider) for platform in options.platform]
for platform, collection in options.platform_collection or []:
for entry in targets:
if entry.platform == platform:
entry.collection = collection
for platform, connection in options.platform_connection or []:
for entry in targets:
if entry.platform == platform:
entry.connection = connection
else:
targets = [NetworkInventoryConfig(path=options.inventory)]
return targets
def default_targets(
mode, # type: TargetMode
controller, # type: ControllerHostConfig
): # type: (...) -> t.List[HostConfig]
"""Return a list of default targets for the given target mode."""
if mode == TargetMode.WINDOWS_INTEGRATION:
targets = [WindowsInventoryConfig(path=os.path.abspath('test/integration/inventory.winrm'))]
elif mode == TargetMode.NETWORK_INTEGRATION:
targets = [NetworkInventoryConfig(path=os.path.abspath('test/integration/inventory.networking'))]
elif mode.multiple_pythons:
targets = controller.get_default_targets(HostContext(controller_config=controller))
else:
targets = [ControllerConfig()]
return targets<|fim▁end|> | # These changes are due to ansible-test now maintaining consistency between its own Python and that of controller Python subprocesses.
#
# 1) The `--python-interpreter` option (if different from sys.executable) now affects controller subprocesses and triggers re-execution of ansible-test. |
<|file_name|>uac-srtp-sdes-reinv-dtls.py<|end_file_name|><|fim▁begin|># $Id$<|fim▁hole|>
PJSUA = ["--null-audio --max-calls=1 --auto-answer=200 --no-tcp --srtp-secure 0 --use-srtp 2 --srtp-keying=0"]
PJSUA_EXPECTS = [[0, "SRTP uses keying method SDES", ""],
[0, "SRTP uses keying method DTLS-SRTP", ""]
]<|fim▁end|> | #
import inc_const as const |
<|file_name|>dircmpdel.py<|end_file_name|><|fim▁begin|>import os
import errno
def delete_file(file_name, dry=False):
if dry:
print(' DRY DELETED: {}'.format(file_name))
else:
os.remove(file_name)
try:
dirname = os.path.dirname(file_name)
os.rmdir(dirname)
print(' DELETED DIR: {}'.format(dirname))
except OSError as ex:
if ex.errno != errno.ENOTEMPTY:
raise
print(' DELETED: {}'.format(file_name))
def run_dircmpdel(dircmp_file, prompt=True, dry=False):
"""
Parse dircmp file for groups of file names to be deleted.
"""
with open(dircmp_file) as fp:
lines = fp.read()
groups = lines.strip().split('\n\n')
print('Found {} duplicate groups'.format(len(groups)))
groups = (group.split('\n') for group in groups)
checked_proper_cwd = False
for group in groups:
for i, file_name in enumerate(group):
if not i:
if not checked_proper_cwd:
if not os.path.exists(file_name):
raise RuntimeError('File {} could not be found. '
'Please ensure you are in the '
'correct directory.'
''.format(file_name))
checked_proper_cwd = True
print('Deleting duplicates of {}'.format(file_name))
else:
if prompt:<|fim▁hole|> while True:
resp = input(' Delete {}? '.format(file_name))
resp = resp.lower()
if resp not in ('yes', 'no'):
print('Please answer "yes" or "no".')
elif resp == 'yes':
delete_file(file_name, dry=dry)
break
elif resp == 'no':
print(' Not deleted: {}'.format(file_name))
break
else:
delete_file(file_name, dry=dry)
print()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(
description='Utility for deleting duplicate files found by dircmp'
)
parser.add_argument('file')
parser.add_argument('--no-prompt',
action='store_false', default=True, dest='prompt')
parser.add_argument('-d', '--dry',
action='store_true', default=False, dest='dry')
args = parser.parse_args()
run_dircmpdel(args.file, prompt=args.prompt, dry=args.dry)<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Communication primitives for concurrent tasks
//!
//! Rust makes it very difficult to share data among tasks to prevent race
//! conditions and to improve parallelism, but there is often a need for
//! communication between concurrent tasks. The primitives defined in this
//! module are the building blocks for synchronization in rust.
//!
//! This module provides message-based communication over channels, concretely
//! defined as two types:
//!
//! * `Sender`
//! * `Receiver`
//!
//! A `Sender` is used to send data to a `Receiver`. A `Sender` is clone-able
//! such that many tasks can send simultaneously to one receiver. These
//! channels are *task blocking*, not *thread blocking*. This means that if one
//! task is blocked on a channel, other tasks can continue to make progress.
//!
//! Rust channels can be used as if they have an infinite internal buffer. What
//! this means is that the `send` operation will never block. `Receiver`s, on
//! the other hand, will block the task if there is no data to be received.
//!
//! ## Failure Propagation
//!
//! In addition to being a core primitive for communicating in rust, channels
//! are the points at which failure is propagated among tasks. Whenever the one
//! half of channel is closed, the other half will have its next operation
//! `fail!`. The purpose of this is to allow propagation of failure among tasks
//! that are linked to one another via channels.
//!
//! There are methods on both of `Sender` and `Receiver` to perform their
//! respective operations without failing, however.
//!
//! ## Outside the Runtime
//!
//! All channels and ports work seamlessly inside and outside of the rust
//! runtime. This means that code may use channels to communicate information
//! inside and outside of the runtime. For example, if rust were embedded as an
//! FFI module in another application, the rust runtime would probably be
//! running in its own external thread pool. Channels created can communicate
//! from the native application threads to the rust threads through the use of
//! native mutexes and condition variables.
//!
//! What this means is that if a native thread is using a channel, execution
//! will be blocked accordingly by blocking the OS thread.
//!
//! # Example
//!
//! ```rust,should_fail
//! // Create a simple streaming channel
//! let (tx, rx) = channel();
//! spawn(proc() {
//! tx.send(10);
//! });
//! assert_eq!(rx.recv(), 10);
//!
//! // Create a shared channel which can be sent along from many tasks
//! let (tx, rx) = channel();
//! for i in range(0, 10) {
//! let tx = tx.clone();
//! spawn(proc() {
//! tx.send(i);
//! })
//! }
//!
//! for _ in range(0, 10) {
//! let j = rx.recv();
//! assert!(0 <= j && j < 10);
//! }
//!
//! // The call to recv() will fail!() because the channel has already hung
//! // up (or been deallocated)
//! let (tx, rx) = channel::<int>();
//! drop(tx);
//! rx.recv();
//! ```
// A description of how Rust's channel implementation works
//
// Channels are supposed to be the basic building block for all other
// concurrent primitives that are used in Rust. As a result, the channel type
// needs to be highly optimized, flexible, and broad enough for use everywhere.
//
// The choice of implementation of all channels is to be built on lock-free data
// structures. The channels themselves are then consequently also lock-free data
// structures. As always with lock-free code, this is a very "here be dragons"
// territory, especially because I'm unaware of any academic papers which have
// gone into great length about channels of these flavors.
//
// ## Flavors of channels
//
// From the perspective of a consumer of this library, there is only one flavor
// of channel. This channel can be used as a stream and cloned to allow multiple
// senders. Under the hood, however, there are actually three flavors of
// channels in play.
//
// * Oneshots - these channels are highly optimized for the one-send use case.
// They contain as few atomics as possible and involve one and
// exactly one allocation.
// * Streams - these channels are optimized for the non-shared use case. They
// use a different concurrent queue which is more tailored for this
// use case. The initial allocation of this flavor of channel is not
// optimized.
// * Shared - this is the most general form of channel that this module offers,
// a channel with multiple senders. This type is as optimized as it
// can be, but the previous two types mentioned are much faster for
// their use-cases.
//
// ## Concurrent queues
//
// The basic idea of Rust's Sender/Receiver types is that send() never blocks, but
// recv() obviously blocks. This means that under the hood there must be some
// shared and concurrent queue holding all of the actual data.
//
// With two flavors of channels, two flavors of queues are also used. We have
// chosen to use queues from a well-known author which are abbreviated as SPSC
// and MPSC (single producer, single consumer and multiple producer, single
// consumer). SPSC queues are used for streams while MPSC queues are used for
// shared channels.
//
// ### SPSC optimizations
//
// The SPSC queue found online is essentially a linked list of nodes where one
// half of the nodes are the "queue of data" and the other half of nodes are a
// cache of unused nodes. The unused nodes are used such that an allocation is
// not required on every push() and a free doesn't need to happen on every
// pop().
//
// As found online, however, the cache of nodes is of an infinite size. This
// means that if a channel at one point in its life had 50k items in the queue,
// then the queue will always have the capacity for 50k items. I believed that
// this was an unnecessary limitation of the implementation, so I have altered
// the queue to optionally have a bound on the cache size.
//
// By default, streams will have an unbounded SPSC queue with a small-ish cache
// size. The hope is that the cache is still large enough to have very fast
// send() operations while not too large such that millions of channels can
// coexist at once.
//
// ### MPSC optimizations
//
// Right now the MPSC queue has not been optimized. Like the SPSC queue, it uses
// a linked list under the hood to earn its unboundedness, but I have not put
// forth much effort into having a cache of nodes similar to the SPSC queue.
//
// For now, I believe that this is "ok" because shared channels are not the most
// common type, but soon we may wish to revisit this queue choice and determine
// another candidate for backend storage of shared channels.
//
// ## Overview of the Implementation
//
// Now that there's a little background on the concurrent queues used, it's
// worth going into much more detail about the channels themselves. The basic
// pseudocode for a send/recv are:
//<|fim▁hole|>//
// send(t) recv()
// queue.push(t) return if queue.pop()
// if increment() == -1 deschedule {
// wakeup() if decrement() > 0
// cancel_deschedule()
// }
// queue.pop()
//
// As mentioned before, there are no locks in this implementation, only atomic
// instructions are used.
//
// ### The internal atomic counter
//
// Every channel has a shared counter with each half to keep track of the size
// of the queue. This counter is used to abort descheduling by the receiver and
// to know when to wake up on the sending side.
//
// As seen in the pseudocode, senders will increment this count and receivers
// will decrement the count. The theory behind this is that if a sender sees a
// -1 count, it will wake up the receiver, and if the receiver sees a 1+ count,
// then it doesn't need to block.
//
// The recv() method has a beginning call to pop(), and if successful, it needs
// to decrement the count. It is a crucial implementation detail that this
// decrement does *not* happen to the shared counter. If this were the case,
// then it would be possible for the counter to be very negative when there were
// no receivers waiting, in which case the senders would have to determine when
// it was actually appropriate to wake up a receiver.
//
// Instead, the "steal count" is kept track of separately (not atomically
// because it's only used by receivers), and then the decrement() call when
// descheduling will lump in all of the recent steals into one large decrement.
//
// The implication of this is that if a sender sees a -1 count, then there's
// guaranteed to be a waiter waiting!
//
// ## Native Implementation
//
// A major goal of these channels is to work seamlessly on and off the runtime.
// All of the previous race conditions have been worded in terms of
// scheduler-isms (which is obviously not available without the runtime).
//
// For now, native usage of channels (off the runtime) will fall back onto
// mutexes/cond vars for descheduling/atomic decisions. The no-contention path
// is still entirely lock-free, the "deschedule" blocks above are surrounded by
// a mutex and the "wakeup" blocks involve grabbing a mutex and signaling on a
// condition variable.
//
// ## Select
//
// Being able to support selection over channels has greatly influenced this
// design, and not only does selection need to work inside the runtime, but also
// outside the runtime.
//
// The implementation is fairly straightforward. The goal of select() is not to
// return some data, but only to return which channel can receive data without
// blocking. The implementation is essentially the entire blocking procedure
// followed by an increment as soon as its woken up. The cancellation procedure
// involves an increment and swapping out of to_wake to acquire ownership of the
// task to unblock.
//
// Sadly this current implementation requires multiple allocations, so I have
// seen the throughput of select() be much worse than it should be. I do not
// believe that there is anything fundamental which needs to change about these
// channels, however, in order to support a more efficient select().
//
// # Conclusion
//
// And now that you've seen all the races that I found and attempted to fix,
// here's the code for you to find some more!
use cast;
use cell::Cell;
use clone::Clone;
use iter::Iterator;
use kinds::Send;
use kinds::marker;
use mem;
use ops::Drop;
use option::{Some, None, Option};
use result::{Ok, Err, Result};
use rt::local::Local;
use rt::task::{Task, BlockedTask};
use sync::arc::UnsafeArc;
pub use comm::select::{Select, Handle};
macro_rules! test (
{ fn $name:ident() $b:block $(#[$a:meta])*} => (
mod $name {
#![allow(unused_imports)]
use native;
use comm::*;
use prelude::*;
use super::*;
use super::super::*;
use task;
fn f() $b
$(#[$a])* #[test] fn uv() { f() }
$(#[$a])* #[test] fn native() {
use native;
let (tx, rx) = channel();
native::task::spawn(proc() { tx.send(f()) });
rx.recv();
}
}
)
)
mod select;
mod oneshot;
mod stream;
mod shared;
mod sync;
// Use a power of 2 to allow LLVM to optimize to something that's not a
// division, this is hit pretty regularly.
static RESCHED_FREQ: int = 256;
/// The receiving-half of Rust's channel type. This half can only be owned by
/// one task
pub struct Receiver<T> {
inner: Flavor<T>,
receives: Cell<uint>,
// can't share in an arc
marker: marker::NoShare,
}
/// An iterator over messages on a receiver, this iterator will block
/// whenever `next` is called, waiting for a new message, and `None` will be
/// returned when the corresponding channel has hung up.
pub struct Messages<'a, T> {
rx: &'a Receiver<T>
}
/// The sending-half of Rust's asynchronous channel type. This half can only be
/// owned by one task, but it can be cloned to send to other tasks.
pub struct Sender<T> {
inner: Flavor<T>,
sends: Cell<uint>,
// can't share in an arc
marker: marker::NoShare,
}
/// The sending-half of Rust's synchronous channel type. This half can only be
/// owned by one task, but it can be cloned to send to other tasks.
pub struct SyncSender<T> {
inner: UnsafeArc<sync::Packet<T>>,
// can't share in an arc
marker: marker::NoShare,
}
/// This enumeration is the list of the possible reasons that try_recv could not
/// return data when called.
#[deriving(Eq, Clone, Show)]
pub enum TryRecvResult<T> {
/// This channel is currently empty, but the sender(s) have not yet
/// disconnected, so data may yet become available.
Empty,
/// This channel's sending half has become disconnected, and there will
/// never be any more data received on this channel
Disconnected,
/// The channel had some data and we successfully popped it
Data(T),
}
/// This enumeration is the list of the possible outcomes for the
/// `SyncSender::try_send` method.
#[deriving(Eq, Clone, Show)]
pub enum TrySendResult<T> {
/// The data was successfully sent along the channel. This either means that
/// it was buffered in the channel, or handed off to a receiver. In either
/// case, the callee no longer has ownership of the data.
Sent,
/// The data could not be sent on the channel because it would require that
/// the callee block to send the data.
///
/// If this is a buffered channel, then the buffer is full at this time. If
/// this is not a buffered channel, then there is no receiver available to
/// acquire the data.
Full(T),
/// This channel's receiving half has disconnected, so the data could not be
/// sent. The data is returned back to the callee in this case.
RecvDisconnected(T),
}
enum Flavor<T> {
Oneshot(UnsafeArc<oneshot::Packet<T>>),
Stream(UnsafeArc<stream::Packet<T>>),
Shared(UnsafeArc<shared::Packet<T>>),
Sync(UnsafeArc<sync::Packet<T>>),
}
/// Creates a new channel, returning the sender/receiver halves. All data sent
/// on the sender will become available on the receiver. See the documentation
/// of `Receiver` and `Sender` to see what's possible with them.
pub fn channel<T: Send>() -> (Sender<T>, Receiver<T>) {
let (a, b) = UnsafeArc::new2(oneshot::Packet::new());
(Sender::my_new(Oneshot(b)), Receiver::my_new(Oneshot(a)))
}
/// Creates a new synchronous, bounded channel.
///
/// Like asynchronous channels, the `Receiver` will block until a message
/// becomes available. These channels differ greatly in the semantics of the
/// sender from asynchronous channels, however.
///
/// This channel has an internal buffer on which messages will be queued. When
/// the internal buffer becomes full, future sends will *block* waiting for the
/// buffer to open up. Note that a buffer size of 0 is valid, in which case this
/// becomes "rendezvous channel" where each send will not return until a recv
/// is paired with it.
///
/// As with asynchronous channels, all senders will fail in `send` if the
/// `Receiver` has been destroyed.
///
/// # Example
///
/// ```
/// let (tx, rx) = sync_channel(1);
///
/// // this returns immediately
/// tx.send(1);
///
/// spawn(proc() {
/// // this will block until the previous message has been received
/// tx.send(2);
/// });
///
/// assert_eq!(rx.recv(), 1);
/// assert_eq!(rx.recv(), 2);
/// ```
pub fn sync_channel<T: Send>(bound: uint) -> (SyncSender<T>, Receiver<T>) {
let (a, b) = UnsafeArc::new2(sync::Packet::new(bound));
(SyncSender::new(a), Receiver::my_new(Sync(b)))
}
////////////////////////////////////////////////////////////////////////////////
// Sender
////////////////////////////////////////////////////////////////////////////////
impl<T: Send> Sender<T> {
fn my_new(inner: Flavor<T>) -> Sender<T> {
Sender { inner: inner, sends: Cell::new(0), marker: marker::NoShare }
}
/// Sends a value along this channel to be received by the corresponding
/// receiver.
///
/// Rust channels are infinitely buffered so this method will never block.
///
/// # Failure
///
/// This function will fail if the other end of the channel has hung up.
/// This means that if the corresponding receiver has fallen out of scope,
/// this function will trigger a fail message saying that a message is
/// being sent on a closed channel.
///
/// Note that if this function does *not* fail, it does not mean that the
/// data will be successfully received. All sends are placed into a queue,
/// so it is possible for a send to succeed (the other end is alive), but
/// then the other end could immediately disconnect.
///
/// The purpose of this functionality is to propagate failure among tasks.
/// If failure is not desired, then consider using the `try_send` method
pub fn send(&self, t: T) {
if !self.try_send(t) {
fail!("sending on a closed channel");
}
}
/// Attempts to send a value on this channel, returning whether it was
/// successfully sent.
///
/// A successful send occurs when it is determined that the other end of
/// the channel has not hung up already. An unsuccessful send would be one
/// where the corresponding receiver has already been deallocated. Note
/// that a return value of `false` means that the data will never be
/// received, but a return value of `true` does *not* mean that the data
/// will be received. It is possible for the corresponding receiver to
/// hang up immediately after this function returns `true`.
///
/// Like `send`, this method will never block. If the failure of send cannot
/// be tolerated, then this method should be used instead.
pub fn try_send(&self, t: T) -> bool {
// In order to prevent starvation of other tasks in situations where
// a task sends repeatedly without ever receiving, we occassionally
// yield instead of doing a send immediately.
//
// Don't unconditionally attempt to yield because the TLS overhead can
// be a bit much, and also use `try_take` instead of `take` because
// there's no reason that this send shouldn't be usable off the
// runtime.
let cnt = self.sends.get() + 1;
self.sends.set(cnt);
if cnt % (RESCHED_FREQ as uint) == 0 {
let task: Option<~Task> = Local::try_take();
task.map(|t| t.maybe_yield());
}
let (new_inner, ret) = match self.inner {
Oneshot(ref p) => {
let p = p.get();
unsafe {
if !(*p).sent() {
return (*p).send(t);
} else {
let (a, b) = UnsafeArc::new2(stream::Packet::new());
match (*p).upgrade(Receiver::my_new(Stream(b))) {
oneshot::UpSuccess => {
(*a.get()).send(t);
(a, true)
}
oneshot::UpDisconnected => (a, false),
oneshot::UpWoke(task) => {
(*a.get()).send(t);
task.wake().map(|t| t.reawaken());
(a, true)
}
}
}
}
}
Stream(ref p) => return unsafe { (*p.get()).send(t) },
Shared(ref p) => return unsafe { (*p.get()).send(t) },
Sync(..) => unreachable!(),
};
unsafe {
let mut tmp = Sender::my_new(Stream(new_inner));
mem::swap(&mut cast::transmute_mut(self).inner, &mut tmp.inner);
}
return ret;
}
}
impl<T: Send> Clone for Sender<T> {
fn clone(&self) -> Sender<T> {
let (packet, sleeper) = match self.inner {
Oneshot(ref p) => {
let (a, b) = UnsafeArc::new2(shared::Packet::new());
match unsafe { (*p.get()).upgrade(Receiver::my_new(Shared(a))) } {
oneshot::UpSuccess | oneshot::UpDisconnected => (b, None),
oneshot::UpWoke(task) => (b, Some(task))
}
}
Stream(ref p) => {
let (a, b) = UnsafeArc::new2(shared::Packet::new());
match unsafe { (*p.get()).upgrade(Receiver::my_new(Shared(a))) } {
stream::UpSuccess | stream::UpDisconnected => (b, None),
stream::UpWoke(task) => (b, Some(task)),
}
}
Shared(ref p) => {
unsafe { (*p.get()).clone_chan(); }
return Sender::my_new(Shared(p.clone()));
}
Sync(..) => unreachable!(),
};
unsafe {
(*packet.get()).inherit_blocker(sleeper);
let mut tmp = Sender::my_new(Shared(packet.clone()));
mem::swap(&mut cast::transmute_mut(self).inner, &mut tmp.inner);
}
Sender::my_new(Shared(packet))
}
}
#[unsafe_destructor]
impl<T: Send> Drop for Sender<T> {
fn drop(&mut self) {
match self.inner {
Oneshot(ref mut p) => unsafe { (*p.get()).drop_chan(); },
Stream(ref mut p) => unsafe { (*p.get()).drop_chan(); },
Shared(ref mut p) => unsafe { (*p.get()).drop_chan(); },
Sync(..) => unreachable!(),
}
}
}
////////////////////////////////////////////////////////////////////////////////
// SyncSender
////////////////////////////////////////////////////////////////////////////////
impl<T: Send> SyncSender<T> {
fn new(inner: UnsafeArc<sync::Packet<T>>) -> SyncSender<T> {
SyncSender { inner: inner, marker: marker::NoShare }
}
/// Sends a value on this synchronous channel.
///
/// This function will *block* until space in the internal buffer becomes
/// available or a receiver is available to hand off the message to.
///
/// Note that a successful send does *not* guarantee that the receiver will
/// ever see the data if there is a buffer on this channel. Messages may be
/// enqueued in the internal buffer for the receiver to receive at a later
/// time. If the buffer size is 0, however, it can be guaranteed that the
/// receiver has indeed received the data if this function returns success.
///
/// # Failure
///
/// Similarly to `Sender::send`, this function will fail if the
/// corresponding `Receiver` for this channel has disconnected. This
/// behavior is used to propagate failure among tasks.
///
/// If failure is not desired, you can achieve the same semantics with the
/// `SyncSender::send_opt` method which will not fail if the receiver
/// disconnects.
pub fn send(&self, t: T) {
if self.send_opt(t).is_some() {
fail!("sending on a closed channel");
}
}
/// Send a value on a channel, returning it back if the receiver
/// disconnected
///
/// This method will *block* to send the value `t` on the channel, but if
/// the value could not be sent due to the receiver disconnecting, the value
/// is returned back to the callee. This function is similar to `try_send`,
/// except that it will block if the channel is currently full.
///
/// # Failure
///
/// This function cannot fail.
pub fn send_opt(&self, t: T) -> Option<T> {
match unsafe { (*self.inner.get()).send(t) } {
Ok(()) => None,
Err(t) => Some(t),
}
}
/// Attempts to send a value on this channel without blocking.
///
/// This method semantically differs from `Sender::try_send` because it can
/// fail if the receiver has not disconnected yet. If the buffer on this
/// channel is full, this function will immediately return the data back to
/// the callee.
///
/// See `SyncSender::send` for notes about guarantees of whether the
/// receiver has received the data or not if this function is successful.
///
/// # Failure
///
/// This function cannot fail
pub fn try_send(&self, t: T) -> TrySendResult<T> {
unsafe { (*self.inner.get()).try_send(t) }
}
}
impl<T: Send> Clone for SyncSender<T> {
fn clone(&self) -> SyncSender<T> {
unsafe { (*self.inner.get()).clone_chan(); }
return SyncSender::new(self.inner.clone());
}
}
#[unsafe_destructor]
impl<T: Send> Drop for SyncSender<T> {
fn drop(&mut self) {
unsafe { (*self.inner.get()).drop_chan(); }
}
}
////////////////////////////////////////////////////////////////////////////////
// Receiver
////////////////////////////////////////////////////////////////////////////////
impl<T: Send> Receiver<T> {
fn my_new(inner: Flavor<T>) -> Receiver<T> {
Receiver { inner: inner, receives: Cell::new(0), marker: marker::NoShare }
}
/// Blocks waiting for a value on this receiver
///
/// This function will block if necessary to wait for a corresponding send
/// on the channel from its paired `Sender` structure. This receiver will
/// be woken up when data is ready, and the data will be returned.
///
/// # Failure
///
/// Similar to channels, this method will trigger a task failure if the
/// other end of the channel has hung up (been deallocated). The purpose of
/// this is to propagate failure among tasks.
///
/// If failure is not desired, then there are two options:
///
/// * If blocking is still desired, the `recv_opt` method will return `None`
/// when the other end hangs up
///
/// * If blocking is not desired, then the `try_recv` method will attempt to
/// peek at a value on this receiver.
pub fn recv(&self) -> T {
match self.recv_opt() {
Some(t) => t,
None => fail!("receiving on a closed channel"),
}
}
/// Attempts to return a pending value on this receiver without blocking
///
/// This method will never block the caller in order to wait for data to
/// become available. Instead, this will always return immediately with a
/// possible option of pending data on the channel.
///
/// This is useful for a flavor of "optimistic check" before deciding to
/// block on a receiver.
///
/// This function cannot fail.
pub fn try_recv(&self) -> TryRecvResult<T> {
// If a thread is spinning in try_recv, we should take the opportunity
// to reschedule things occasionally. See notes above in scheduling on
// sends for why this doesn't always hit TLS, and also for why this uses
// `try_take` instead of `take`.
let cnt = self.receives.get() + 1;
self.receives.set(cnt);
if cnt % (RESCHED_FREQ as uint) == 0 {
let task: Option<~Task> = Local::try_take();
task.map(|t| t.maybe_yield());
}
loop {
let mut new_port = match self.inner {
Oneshot(ref p) => {
match unsafe { (*p.get()).try_recv() } {
Ok(t) => return Data(t),
Err(oneshot::Empty) => return Empty,
Err(oneshot::Disconnected) => return Disconnected,
Err(oneshot::Upgraded(rx)) => rx,
}
}
Stream(ref p) => {
match unsafe { (*p.get()).try_recv() } {
Ok(t) => return Data(t),
Err(stream::Empty) => return Empty,
Err(stream::Disconnected) => return Disconnected,
Err(stream::Upgraded(rx)) => rx,
}
}
Shared(ref p) => {
match unsafe { (*p.get()).try_recv() } {
Ok(t) => return Data(t),
Err(shared::Empty) => return Empty,
Err(shared::Disconnected) => return Disconnected,
}
}
Sync(ref p) => {
match unsafe { (*p.get()).try_recv() } {
Ok(t) => return Data(t),
Err(sync::Empty) => return Empty,
Err(sync::Disconnected) => return Disconnected,
}
}
};
unsafe {
mem::swap(&mut cast::transmute_mut(self).inner,
&mut new_port.inner);
}
}
}
/// Attempt to wait for a value on this receiver, but does not fail if the
/// corresponding channel has hung up.
///
/// This implementation of iterators for ports will always block if there is
/// not data available on the receiver, but it will not fail in the case
/// that the channel has been deallocated.
///
/// In other words, this function has the same semantics as the `recv`
/// method except for the failure aspect.
///
/// If the channel has hung up, then `None` is returned. Otherwise `Some` of
/// the value found on the receiver is returned.
pub fn recv_opt(&self) -> Option<T> {
loop {
let mut new_port = match self.inner {
Oneshot(ref p) => {
match unsafe { (*p.get()).recv() } {
Ok(t) => return Some(t),
Err(oneshot::Empty) => return unreachable!(),
Err(oneshot::Disconnected) => return None,
Err(oneshot::Upgraded(rx)) => rx,
}
}
Stream(ref p) => {
match unsafe { (*p.get()).recv() } {
Ok(t) => return Some(t),
Err(stream::Empty) => return unreachable!(),
Err(stream::Disconnected) => return None,
Err(stream::Upgraded(rx)) => rx,
}
}
Shared(ref p) => {
match unsafe { (*p.get()).recv() } {
Ok(t) => return Some(t),
Err(shared::Empty) => return unreachable!(),
Err(shared::Disconnected) => return None,
}
}
Sync(ref p) => return unsafe { (*p.get()).recv() }
};
unsafe {
mem::swap(&mut cast::transmute_mut(self).inner,
&mut new_port.inner);
}
}
}
/// Returns an iterator which will block waiting for messages, but never
/// `fail!`. It will return `None` when the channel has hung up.
pub fn iter<'a>(&'a self) -> Messages<'a, T> {
Messages { rx: self }
}
}
impl<T: Send> select::Packet for Receiver<T> {
fn can_recv(&self) -> bool {
loop {
let mut new_port = match self.inner {
Oneshot(ref p) => {
match unsafe { (*p.get()).can_recv() } {
Ok(ret) => return ret,
Err(upgrade) => upgrade,
}
}
Stream(ref p) => {
match unsafe { (*p.get()).can_recv() } {
Ok(ret) => return ret,
Err(upgrade) => upgrade,
}
}
Shared(ref p) => {
return unsafe { (*p.get()).can_recv() };
}
Sync(ref p) => {
return unsafe { (*p.get()).can_recv() };
}
};
unsafe {
mem::swap(&mut cast::transmute_mut(self).inner,
&mut new_port.inner);
}
}
}
fn start_selection(&self, mut task: BlockedTask) -> Result<(), BlockedTask>{
loop {
let (t, mut new_port) = match self.inner {
Oneshot(ref p) => {
match unsafe { (*p.get()).start_selection(task) } {
oneshot::SelSuccess => return Ok(()),
oneshot::SelCanceled(task) => return Err(task),
oneshot::SelUpgraded(t, rx) => (t, rx),
}
}
Stream(ref p) => {
match unsafe { (*p.get()).start_selection(task) } {
stream::SelSuccess => return Ok(()),
stream::SelCanceled(task) => return Err(task),
stream::SelUpgraded(t, rx) => (t, rx),
}
}
Shared(ref p) => {
return unsafe { (*p.get()).start_selection(task) };
}
Sync(ref p) => {
return unsafe { (*p.get()).start_selection(task) };
}
};
task = t;
unsafe {
mem::swap(&mut cast::transmute_mut(self).inner,
&mut new_port.inner);
}
}
}
fn abort_selection(&self) -> bool {
let mut was_upgrade = false;
loop {
let result = match self.inner {
Oneshot(ref p) => unsafe { (*p.get()).abort_selection() },
Stream(ref p) => unsafe {
(*p.get()).abort_selection(was_upgrade)
},
Shared(ref p) => return unsafe {
(*p.get()).abort_selection(was_upgrade)
},
Sync(ref p) => return unsafe {
(*p.get()).abort_selection()
},
};
let mut new_port = match result { Ok(b) => return b, Err(p) => p };
was_upgrade = true;
unsafe {
mem::swap(&mut cast::transmute_mut(self).inner,
&mut new_port.inner);
}
}
}
}
impl<'a, T: Send> Iterator<T> for Messages<'a, T> {
fn next(&mut self) -> Option<T> { self.rx.recv_opt() }
}
#[unsafe_destructor]
impl<T: Send> Drop for Receiver<T> {
fn drop(&mut self) {
match self.inner {
Oneshot(ref mut p) => unsafe { (*p.get()).drop_port(); },
Stream(ref mut p) => unsafe { (*p.get()).drop_port(); },
Shared(ref mut p) => unsafe { (*p.get()).drop_port(); },
Sync(ref mut p) => unsafe { (*p.get()).drop_port(); },
}
}
}
#[cfg(test)]
mod test {
use prelude::*;
use native;
use os;
use super::*;
pub fn stress_factor() -> uint {
match os::getenv("RUST_TEST_STRESS") {
Some(val) => from_str::<uint>(val).unwrap(),
None => 1,
}
}
test!(fn smoke() {
let (tx, rx) = channel();
tx.send(1);
assert_eq!(rx.recv(), 1);
})
test!(fn drop_full() {
let (tx, _rx) = channel();
tx.send(~1);
})
test!(fn drop_full_shared() {
let (tx, _rx) = channel();
drop(tx.clone());
drop(tx.clone());
tx.send(~1);
})
test!(fn smoke_shared() {
let (tx, rx) = channel();
tx.send(1);
assert_eq!(rx.recv(), 1);
let tx = tx.clone();
tx.send(1);
assert_eq!(rx.recv(), 1);
})
test!(fn smoke_threads() {
let (tx, rx) = channel();
spawn(proc() {
tx.send(1);
});
assert_eq!(rx.recv(), 1);
})
test!(fn smoke_port_gone() {
let (tx, rx) = channel();
drop(rx);
tx.send(1);
} #[should_fail])
test!(fn smoke_shared_port_gone() {
let (tx, rx) = channel();
drop(rx);
tx.send(1);
} #[should_fail])
test!(fn smoke_shared_port_gone2() {
let (tx, rx) = channel();
drop(rx);
let tx2 = tx.clone();
drop(tx);
tx2.send(1);
} #[should_fail])
test!(fn port_gone_concurrent() {
let (tx, rx) = channel();
spawn(proc() {
rx.recv();
});
loop { tx.send(1) }
} #[should_fail])
test!(fn port_gone_concurrent_shared() {
let (tx, rx) = channel();
let tx2 = tx.clone();
spawn(proc() {
rx.recv();
});
loop {
tx.send(1);
tx2.send(1);
}
} #[should_fail])
test!(fn smoke_chan_gone() {
let (tx, rx) = channel::<int>();
drop(tx);
rx.recv();
} #[should_fail])
test!(fn smoke_chan_gone_shared() {
let (tx, rx) = channel::<()>();
let tx2 = tx.clone();
drop(tx);
drop(tx2);
rx.recv();
} #[should_fail])
test!(fn chan_gone_concurrent() {
let (tx, rx) = channel();
spawn(proc() {
tx.send(1);
tx.send(1);
});
loop { rx.recv(); }
} #[should_fail])
test!(fn stress() {
let (tx, rx) = channel();
spawn(proc() {
for _ in range(0, 10000) { tx.send(1); }
});
for _ in range(0, 10000) {
assert_eq!(rx.recv(), 1);
}
})
test!(fn stress_shared() {
static AMT: uint = 10000;
static NTHREADS: uint = 8;
let (tx, rx) = channel::<int>();
let (dtx, drx) = channel::<()>();
spawn(proc() {
for _ in range(0, AMT * NTHREADS) {
assert_eq!(rx.recv(), 1);
}
match rx.try_recv() {
Data(..) => fail!(),
_ => {}
}
dtx.send(());
});
for _ in range(0, NTHREADS) {
let tx = tx.clone();
spawn(proc() {
for _ in range(0, AMT) { tx.send(1); }
});
}
drop(tx);
drx.recv();
})
#[test]
fn send_from_outside_runtime() {
let (tx1, rx1) = channel::<()>();
let (tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<()>();
let tx4 = tx3.clone();
spawn(proc() {
tx1.send(());
for _ in range(0, 40) {
assert_eq!(rx2.recv(), 1);
}
tx3.send(());
});
rx1.recv();
native::task::spawn(proc() {
for _ in range(0, 40) {
tx2.send(1);
}
tx4.send(());
});
rx3.recv();
rx3.recv();
}
#[test]
fn recv_from_outside_runtime() {
let (tx, rx) = channel::<int>();
let (dtx, drx) = channel();
native::task::spawn(proc() {
for _ in range(0, 40) {
assert_eq!(rx.recv(), 1);
}
dtx.send(());
});
for _ in range(0, 40) {
tx.send(1);
}
drx.recv();
}
#[test]
fn no_runtime() {
let (tx1, rx1) = channel::<int>();
let (tx2, rx2) = channel::<int>();
let (tx3, rx3) = channel::<()>();
let tx4 = tx3.clone();
native::task::spawn(proc() {
assert_eq!(rx1.recv(), 1);
tx2.send(2);
tx4.send(());
});
native::task::spawn(proc() {
tx1.send(1);
assert_eq!(rx2.recv(), 2);
tx3.send(());
});
rx3.recv();
rx3.recv();
}
test!(fn oneshot_single_thread_close_port_first() {
// Simple test of closing without sending
let (_tx, rx) = channel::<int>();
drop(rx);
})
test!(fn oneshot_single_thread_close_chan_first() {
// Simple test of closing without sending
let (tx, _rx) = channel::<int>();
drop(tx);
})
test!(fn oneshot_single_thread_send_port_close() {
// Testing that the sender cleans up the payload if receiver is closed
let (tx, rx) = channel::<~int>();
drop(rx);
tx.send(~0);
} #[should_fail])
test!(fn oneshot_single_thread_recv_chan_close() {
// Receiving on a closed chan will fail
let res = task::try(proc() {
let (tx, rx) = channel::<int>();
drop(tx);
rx.recv();
});
// What is our res?
assert!(res.is_err());
})
test!(fn oneshot_single_thread_send_then_recv() {
let (tx, rx) = channel::<~int>();
tx.send(~10);
assert!(rx.recv() == ~10);
})
test!(fn oneshot_single_thread_try_send_open() {
let (tx, rx) = channel::<int>();
assert!(tx.try_send(10));
assert!(rx.recv() == 10);
})
test!(fn oneshot_single_thread_try_send_closed() {
let (tx, rx) = channel::<int>();
drop(rx);
assert!(!tx.try_send(10));
})
test!(fn oneshot_single_thread_try_recv_open() {
let (tx, rx) = channel::<int>();
tx.send(10);
assert!(rx.recv_opt() == Some(10));
})
test!(fn oneshot_single_thread_try_recv_closed() {
let (tx, rx) = channel::<int>();
drop(tx);
assert!(rx.recv_opt() == None);
})
test!(fn oneshot_single_thread_peek_data() {
let (tx, rx) = channel::<int>();
assert_eq!(rx.try_recv(), Empty)
tx.send(10);
assert_eq!(rx.try_recv(), Data(10));
})
test!(fn oneshot_single_thread_peek_close() {
let (tx, rx) = channel::<int>();
drop(tx);
assert_eq!(rx.try_recv(), Disconnected);
assert_eq!(rx.try_recv(), Disconnected);
})
test!(fn oneshot_single_thread_peek_open() {
let (_tx, rx) = channel::<int>();
assert_eq!(rx.try_recv(), Empty);
})
test!(fn oneshot_multi_task_recv_then_send() {
let (tx, rx) = channel::<~int>();
spawn(proc() {
assert!(rx.recv() == ~10);
});
tx.send(~10);
})
test!(fn oneshot_multi_task_recv_then_close() {
let (tx, rx) = channel::<~int>();
spawn(proc() {
drop(tx);
});
let res = task::try(proc() {
assert!(rx.recv() == ~10);
});
assert!(res.is_err());
})
test!(fn oneshot_multi_thread_close_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = channel::<int>();
spawn(proc() {
drop(rx);
});
drop(tx);
}
})
test!(fn oneshot_multi_thread_send_close_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = channel::<int>();
spawn(proc() {
drop(rx);
});
let _ = task::try(proc() {
tx.send(1);
});
}
})
test!(fn oneshot_multi_thread_recv_close_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = channel::<int>();
spawn(proc() {
let res = task::try(proc() {
rx.recv();
});
assert!(res.is_err());
});
spawn(proc() {
spawn(proc() {
drop(tx);
});
});
}
})
test!(fn oneshot_multi_thread_send_recv_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = channel();
spawn(proc() {
tx.send(~10);
});
spawn(proc() {
assert!(rx.recv() == ~10);
});
}
})
test!(fn stream_send_recv_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = channel();
send(tx, 0);
recv(rx, 0);
fn send(tx: Sender<~int>, i: int) {
if i == 10 { return }
spawn(proc() {
tx.send(~i);
send(tx, i + 1);
});
}
fn recv(rx: Receiver<~int>, i: int) {
if i == 10 { return }
spawn(proc() {
assert!(rx.recv() == ~i);
recv(rx, i + 1);
});
}
}
})
test!(fn recv_a_lot() {
// Regression test that we don't run out of stack in scheduler context
let (tx, rx) = channel();
for _ in range(0, 10000) { tx.send(()); }
for _ in range(0, 10000) { rx.recv(); }
})
test!(fn shared_chan_stress() {
let (tx, rx) = channel();
let total = stress_factor() + 100;
for _ in range(0, total) {
let tx = tx.clone();
spawn(proc() {
tx.send(());
});
}
for _ in range(0, total) {
rx.recv();
}
})
test!(fn test_nested_recv_iter() {
let (tx, rx) = channel::<int>();
let (total_tx, total_rx) = channel::<int>();
spawn(proc() {
let mut acc = 0;
for x in rx.iter() {
acc += x;
}
total_tx.send(acc);
});
tx.send(3);
tx.send(1);
tx.send(2);
drop(tx);
assert_eq!(total_rx.recv(), 6);
})
test!(fn test_recv_iter_break() {
let (tx, rx) = channel::<int>();
let (count_tx, count_rx) = channel();
spawn(proc() {
let mut count = 0;
for x in rx.iter() {
if count >= 3 {
break;
} else {
count += x;
}
}
count_tx.send(count);
});
tx.send(2);
tx.send(2);
tx.send(2);
tx.try_send(2);
drop(tx);
assert_eq!(count_rx.recv(), 4);
})
test!(fn try_recv_states() {
let (tx1, rx1) = channel::<int>();
let (tx2, rx2) = channel::<()>();
let (tx3, rx3) = channel::<()>();
spawn(proc() {
rx2.recv();
tx1.send(1);
tx3.send(());
rx2.recv();
drop(tx1);
tx3.send(());
});
assert_eq!(rx1.try_recv(), Empty);
tx2.send(());
rx3.recv();
assert_eq!(rx1.try_recv(), Data(1));
assert_eq!(rx1.try_recv(), Empty);
tx2.send(());
rx3.recv();
assert_eq!(rx1.try_recv(), Disconnected);
})
// This bug used to end up in a livelock inside of the Receiver destructor
// because the internal state of the Shared packet was corrupted
test!(fn destroy_upgraded_shared_port_when_sender_still_active() {
let (tx, rx) = channel();
let (tx2, rx2) = channel();
spawn(proc() {
rx.recv(); // wait on a oneshot
drop(rx); // destroy a shared
tx2.send(());
});
// make sure the other task has gone to sleep
for _ in range(0, 5000) { task::deschedule(); }
// upgrade to a shared chan and send a message
let t = tx.clone();
drop(tx);
t.send(());
// wait for the child task to exit before we exit
rx2.recv();
})
test!(fn sends_off_the_runtime() {
use rt::thread::Thread;
let (tx, rx) = channel();
let t = Thread::start(proc() {
for _ in range(0, 1000) {
tx.send(());
}
});
for _ in range(0, 1000) {
rx.recv();
}
t.join();
})
test!(fn try_recvs_off_the_runtime() {
use rt::thread::Thread;
let (tx, rx) = channel();
let (cdone, pdone) = channel();
let t = Thread::start(proc() {
let mut hits = 0;
while hits < 10 {
match rx.try_recv() {
Data(()) => { hits += 1; }
Empty => { Thread::yield_now(); }
Disconnected => return,
}
}
cdone.send(());
});
for _ in range(0, 10) {
tx.send(());
}
t.join();
pdone.recv();
})
}
#[cfg(test)]
mod sync_tests {
use prelude::*;
use os;
pub fn stress_factor() -> uint {
match os::getenv("RUST_TEST_STRESS") {
Some(val) => from_str::<uint>(val).unwrap(),
None => 1,
}
}
test!(fn smoke() {
let (tx, rx) = sync_channel(1);
tx.send(1);
assert_eq!(rx.recv(), 1);
})
test!(fn drop_full() {
let (tx, _rx) = sync_channel(1);
tx.send(~1);
})
test!(fn smoke_shared() {
let (tx, rx) = sync_channel(1);
tx.send(1);
assert_eq!(rx.recv(), 1);
let tx = tx.clone();
tx.send(1);
assert_eq!(rx.recv(), 1);
})
test!(fn smoke_threads() {
let (tx, rx) = sync_channel(0);
spawn(proc() {
tx.send(1);
});
assert_eq!(rx.recv(), 1);
})
test!(fn smoke_port_gone() {
let (tx, rx) = sync_channel(0);
drop(rx);
tx.send(1);
} #[should_fail])
test!(fn smoke_shared_port_gone2() {
let (tx, rx) = sync_channel(0);
drop(rx);
let tx2 = tx.clone();
drop(tx);
tx2.send(1);
} #[should_fail])
test!(fn port_gone_concurrent() {
let (tx, rx) = sync_channel(0);
spawn(proc() {
rx.recv();
});
loop { tx.send(1) }
} #[should_fail])
test!(fn port_gone_concurrent_shared() {
let (tx, rx) = sync_channel(0);
let tx2 = tx.clone();
spawn(proc() {
rx.recv();
});
loop {
tx.send(1);
tx2.send(1);
}
} #[should_fail])
test!(fn smoke_chan_gone() {
let (tx, rx) = sync_channel::<int>(0);
drop(tx);
rx.recv();
} #[should_fail])
test!(fn smoke_chan_gone_shared() {
let (tx, rx) = sync_channel::<()>(0);
let tx2 = tx.clone();
drop(tx);
drop(tx2);
rx.recv();
} #[should_fail])
test!(fn chan_gone_concurrent() {
let (tx, rx) = sync_channel(0);
spawn(proc() {
tx.send(1);
tx.send(1);
});
loop { rx.recv(); }
} #[should_fail])
test!(fn stress() {
let (tx, rx) = sync_channel(0);
spawn(proc() {
for _ in range(0, 10000) { tx.send(1); }
});
for _ in range(0, 10000) {
assert_eq!(rx.recv(), 1);
}
})
test!(fn stress_shared() {
static AMT: uint = 1000;
static NTHREADS: uint = 8;
let (tx, rx) = sync_channel::<int>(0);
let (dtx, drx) = sync_channel::<()>(0);
spawn(proc() {
for _ in range(0, AMT * NTHREADS) {
assert_eq!(rx.recv(), 1);
}
match rx.try_recv() {
Data(..) => fail!(),
_ => {}
}
dtx.send(());
});
for _ in range(0, NTHREADS) {
let tx = tx.clone();
spawn(proc() {
for _ in range(0, AMT) { tx.send(1); }
});
}
drop(tx);
drx.recv();
})
test!(fn oneshot_single_thread_close_port_first() {
// Simple test of closing without sending
let (_tx, rx) = sync_channel::<int>(0);
drop(rx);
})
test!(fn oneshot_single_thread_close_chan_first() {
// Simple test of closing without sending
let (tx, _rx) = sync_channel::<int>(0);
drop(tx);
})
test!(fn oneshot_single_thread_send_port_close() {
// Testing that the sender cleans up the payload if receiver is closed
let (tx, rx) = sync_channel::<~int>(0);
drop(rx);
tx.send(~0);
} #[should_fail])
test!(fn oneshot_single_thread_recv_chan_close() {
// Receiving on a closed chan will fail
let res = task::try(proc() {
let (tx, rx) = sync_channel::<int>(0);
drop(tx);
rx.recv();
});
// What is our res?
assert!(res.is_err());
})
test!(fn oneshot_single_thread_send_then_recv() {
let (tx, rx) = sync_channel::<~int>(1);
tx.send(~10);
assert!(rx.recv() == ~10);
})
test!(fn oneshot_single_thread_try_send_open() {
let (tx, rx) = sync_channel::<int>(1);
assert_eq!(tx.try_send(10), Sent);
assert!(rx.recv() == 10);
})
test!(fn oneshot_single_thread_try_send_closed() {
let (tx, rx) = sync_channel::<int>(0);
drop(rx);
assert_eq!(tx.try_send(10), RecvDisconnected(10));
})
test!(fn oneshot_single_thread_try_send_closed2() {
let (tx, _rx) = sync_channel::<int>(0);
assert_eq!(tx.try_send(10), Full(10));
})
test!(fn oneshot_single_thread_try_recv_open() {
let (tx, rx) = sync_channel::<int>(1);
tx.send(10);
assert!(rx.recv_opt() == Some(10));
})
test!(fn oneshot_single_thread_try_recv_closed() {
let (tx, rx) = sync_channel::<int>(0);
drop(tx);
assert!(rx.recv_opt() == None);
})
test!(fn oneshot_single_thread_peek_data() {
let (tx, rx) = sync_channel::<int>(1);
assert_eq!(rx.try_recv(), Empty)
tx.send(10);
assert_eq!(rx.try_recv(), Data(10));
})
test!(fn oneshot_single_thread_peek_close() {
let (tx, rx) = sync_channel::<int>(0);
drop(tx);
assert_eq!(rx.try_recv(), Disconnected);
assert_eq!(rx.try_recv(), Disconnected);
})
test!(fn oneshot_single_thread_peek_open() {
let (_tx, rx) = sync_channel::<int>(0);
assert_eq!(rx.try_recv(), Empty);
})
test!(fn oneshot_multi_task_recv_then_send() {
let (tx, rx) = sync_channel::<~int>(0);
spawn(proc() {
assert!(rx.recv() == ~10);
});
tx.send(~10);
})
test!(fn oneshot_multi_task_recv_then_close() {
let (tx, rx) = sync_channel::<~int>(0);
spawn(proc() {
drop(tx);
});
let res = task::try(proc() {
assert!(rx.recv() == ~10);
});
assert!(res.is_err());
})
test!(fn oneshot_multi_thread_close_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = sync_channel::<int>(0);
spawn(proc() {
drop(rx);
});
drop(tx);
}
})
test!(fn oneshot_multi_thread_send_close_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = sync_channel::<int>(0);
spawn(proc() {
drop(rx);
});
let _ = task::try(proc() {
tx.send(1);
});
}
})
test!(fn oneshot_multi_thread_recv_close_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = sync_channel::<int>(0);
spawn(proc() {
let res = task::try(proc() {
rx.recv();
});
assert!(res.is_err());
});
spawn(proc() {
spawn(proc() {
drop(tx);
});
});
}
})
test!(fn oneshot_multi_thread_send_recv_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = sync_channel(0);
spawn(proc() {
tx.send(~10);
});
spawn(proc() {
assert!(rx.recv() == ~10);
});
}
})
test!(fn stream_send_recv_stress() {
for _ in range(0, stress_factor()) {
let (tx, rx) = sync_channel(0);
send(tx, 0);
recv(rx, 0);
fn send(tx: SyncSender<~int>, i: int) {
if i == 10 { return }
spawn(proc() {
tx.send(~i);
send(tx, i + 1);
});
}
fn recv(rx: Receiver<~int>, i: int) {
if i == 10 { return }
spawn(proc() {
assert!(rx.recv() == ~i);
recv(rx, i + 1);
});
}
}
})
test!(fn recv_a_lot() {
// Regression test that we don't run out of stack in scheduler context
let (tx, rx) = sync_channel(10000);
for _ in range(0, 10000) { tx.send(()); }
for _ in range(0, 10000) { rx.recv(); }
})
test!(fn shared_chan_stress() {
let (tx, rx) = sync_channel(0);
let total = stress_factor() + 100;
for _ in range(0, total) {
let tx = tx.clone();
spawn(proc() {
tx.send(());
});
}
for _ in range(0, total) {
rx.recv();
}
})
test!(fn test_nested_recv_iter() {
let (tx, rx) = sync_channel::<int>(0);
let (total_tx, total_rx) = sync_channel::<int>(0);
spawn(proc() {
let mut acc = 0;
for x in rx.iter() {
acc += x;
}
total_tx.send(acc);
});
tx.send(3);
tx.send(1);
tx.send(2);
drop(tx);
assert_eq!(total_rx.recv(), 6);
})
test!(fn test_recv_iter_break() {
let (tx, rx) = sync_channel::<int>(0);
let (count_tx, count_rx) = sync_channel(0);
spawn(proc() {
let mut count = 0;
for x in rx.iter() {
if count >= 3 {
break;
} else {
count += x;
}
}
count_tx.send(count);
});
tx.send(2);
tx.send(2);
tx.send(2);
tx.try_send(2);
drop(tx);
assert_eq!(count_rx.recv(), 4);
})
test!(fn try_recv_states() {
let (tx1, rx1) = sync_channel::<int>(1);
let (tx2, rx2) = sync_channel::<()>(1);
let (tx3, rx3) = sync_channel::<()>(1);
spawn(proc() {
rx2.recv();
tx1.send(1);
tx3.send(());
rx2.recv();
drop(tx1);
tx3.send(());
});
assert_eq!(rx1.try_recv(), Empty);
tx2.send(());
rx3.recv();
assert_eq!(rx1.try_recv(), Data(1));
assert_eq!(rx1.try_recv(), Empty);
tx2.send(());
rx3.recv();
assert_eq!(rx1.try_recv(), Disconnected);
})
// This bug used to end up in a livelock inside of the Receiver destructor
// because the internal state of the Shared packet was corrupted
test!(fn destroy_upgraded_shared_port_when_sender_still_active() {
let (tx, rx) = sync_channel(0);
let (tx2, rx2) = sync_channel(0);
spawn(proc() {
rx.recv(); // wait on a oneshot
drop(rx); // destroy a shared
tx2.send(());
});
// make sure the other task has gone to sleep
for _ in range(0, 5000) { task::deschedule(); }
// upgrade to a shared chan and send a message
let t = tx.clone();
drop(tx);
t.send(());
// wait for the child task to exit before we exit
rx2.recv();
})
test!(fn try_recvs_off_the_runtime() {
use std::rt::thread::Thread;
let (tx, rx) = sync_channel(0);
let (cdone, pdone) = channel();
let t = Thread::start(proc() {
let mut hits = 0;
while hits < 10 {
match rx.try_recv() {
Data(()) => { hits += 1; }
Empty => { Thread::yield_now(); }
Disconnected => return,
}
}
cdone.send(());
});
for _ in range(0, 10) {
tx.send(());
}
t.join();
pdone.recv();
})
test!(fn send_opt1() {
let (tx, rx) = sync_channel(0);
spawn(proc() { rx.recv(); });
assert_eq!(tx.send_opt(1), None);
})
test!(fn send_opt2() {
let (tx, rx) = sync_channel(0);
spawn(proc() { drop(rx); });
assert_eq!(tx.send_opt(1), Some(1));
})
test!(fn send_opt3() {
let (tx, rx) = sync_channel(1);
assert_eq!(tx.send_opt(1), None);
spawn(proc() { drop(rx); });
assert_eq!(tx.send_opt(1), Some(1));
})
test!(fn send_opt4() {
let (tx, rx) = sync_channel(0);
let tx2 = tx.clone();
let (done, donerx) = channel();
let done2 = done.clone();
spawn(proc() {
assert_eq!(tx.send_opt(1), Some(1));
done.send(());
});
spawn(proc() {
assert_eq!(tx2.send_opt(2), Some(2));
done2.send(());
});
drop(rx);
donerx.recv();
donerx.recv();
})
test!(fn try_send1() {
let (tx, _rx) = sync_channel(0);
assert_eq!(tx.try_send(1), Full(1));
})
test!(fn try_send2() {
let (tx, _rx) = sync_channel(1);
assert_eq!(tx.try_send(1), Sent);
assert_eq!(tx.try_send(1), Full(1));
})
test!(fn try_send3() {
let (tx, rx) = sync_channel(1);
assert_eq!(tx.try_send(1), Sent);
drop(rx);
assert_eq!(tx.try_send(1), RecvDisconnected(1));
})
test!(fn try_send4() {
let (tx, rx) = sync_channel(0);
spawn(proc() {
for _ in range(0, 1000) { task::deschedule(); }
assert_eq!(tx.try_send(1), Sent);
});
assert_eq!(rx.recv(), 1);
} #[ignore(reason = "flaky on libnative")])
}<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import *
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'myblog.views.home', name='home'),
url(r'^login/$', 'reg.views.loginView'),
url(r'^logout/$', 'reg.views.logoutView'),<|fim▁hole|><|fim▁end|> |
) |
<|file_name|>FEDiffuseLighting.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2004, 2005, 2006, 2007 Nikolas Zimmermann <zimmermann@kde.org>
* Copyright (C) 2004, 2005 Rob Buis <buis@kde.org>
* Copyright (C) 2005 Eric Seidel <eric@webkit.org>
* Copyright (C) 2013 Google Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*<|fim▁hole|> */
#include "config.h"
#include "platform/graphics/filters/FEDiffuseLighting.h"
#include "platform/graphics/filters/LightSource.h"
#include "platform/text/TextStream.h"
namespace blink {
FEDiffuseLighting::FEDiffuseLighting(Filter* filter, const Color& lightingColor, float surfaceScale,
float diffuseConstant, PassRefPtr<LightSource> lightSource)
: FELighting(filter, DiffuseLighting, lightingColor, surfaceScale, diffuseConstant, 0, 0, lightSource)
{
}
PassRefPtrWillBeRawPtr<FEDiffuseLighting> FEDiffuseLighting::create(Filter* filter, const Color& lightingColor,
float surfaceScale, float diffuseConstant, PassRefPtr<LightSource> lightSource)
{
return adoptRefWillBeNoop(new FEDiffuseLighting(filter, lightingColor, surfaceScale, diffuseConstant, lightSource));
}
FEDiffuseLighting::~FEDiffuseLighting()
{
}
Color FEDiffuseLighting::lightingColor() const
{
return m_lightingColor;
}
bool FEDiffuseLighting::setLightingColor(const Color& lightingColor)
{
if (m_lightingColor == lightingColor)
return false;
m_lightingColor = lightingColor;
return true;
}
float FEDiffuseLighting::surfaceScale() const
{
return m_surfaceScale;
}
bool FEDiffuseLighting::setSurfaceScale(float surfaceScale)
{
if (m_surfaceScale == surfaceScale)
return false;
m_surfaceScale = surfaceScale;
return true;
}
float FEDiffuseLighting::diffuseConstant() const
{
return m_diffuseConstant;
}
bool FEDiffuseLighting::setDiffuseConstant(float diffuseConstant)
{
diffuseConstant = std::max(diffuseConstant, 0.0f);
if (m_diffuseConstant == diffuseConstant)
return false;
m_diffuseConstant = diffuseConstant;
return true;
}
const LightSource* FEDiffuseLighting::lightSource() const
{
return m_lightSource.get();
}
void FEDiffuseLighting::setLightSource(PassRefPtr<LightSource> lightSource)
{
m_lightSource = lightSource;
}
TextStream& FEDiffuseLighting::externalRepresentation(TextStream& ts, int indent) const
{
writeIndent(ts, indent);
ts << "[feDiffuseLighting";
FilterEffect::externalRepresentation(ts);
ts << " surfaceScale=\"" << m_surfaceScale << "\" " << "diffuseConstant=\"" << m_diffuseConstant << "\"]\n";
inputEffect(0)->externalRepresentation(ts, indent + 1);
return ts;
}
} // namespace blink<|fim▁end|> | * You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA. |
<|file_name|>Dead.java<|end_file_name|><|fim▁begin|>package GlidersGrid;
import java.util.Iterator;
import repast.simphony.context.Context;
import repast.simphony.engine.schedule.ScheduledMethod;
import repast.simphony.query.space.grid.MooreQuery;
import repast.simphony.space.grid.Grid;
import repast.simphony.space.grid.GridPoint;
import repast.simphony.util.ContextUtils;
public class Dead {
private Grid<Object> grid;
private int state;
public Dead(Grid<Object> grid) {
this.grid = grid;
}
// calculate the state for the next time tick for dead cells
@ScheduledMethod(start = 1, interval = 1, priority = 4)
public void step1() {
MooreQuery<Dead> query = new MooreQuery(grid, this);
int neighbours = 0;
for (Object o : query.query()) {
if (o instanceof Living) {
neighbours++;
if (neighbours ==3) {<|fim▁hole|> if (neighbours == 3) {
state = 1;
} else {
state = 0;
}
}
// visualise the change into the underlay and grid
@ScheduledMethod(start = 1, interval = 1, priority = 1)
public void step2() {
if (state == 1) {
GridPoint gpt = grid.getLocation(this);
Context<Object> context = ContextUtils.getContext(this);
context.remove(this);
Living livingCell = new Living(grid);
context.add(livingCell);
grid.moveTo(livingCell, gpt.getX(), gpt.getY());
context.add(livingCell);
}
}
}<|fim▁end|> | }
}
} |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::error;
use std::fmt::Display;
use std::fmt::{self};
use std::io;
use std::str;
use std::string;
use std::{self};
use serde::de;
use serde::ser;
#[derive(Debug)]
pub struct Error {
msg: String,
}
pub type Result<T> = std::result::Result<T, Error>;
impl Error {<|fim▁hole|> }
}
impl error::Error for Error {
fn description(&self) -> &str {
&self.msg
}
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.msg)
}
}
impl ser::Error for Error {
fn custom<T: Display>(msg: T) -> Self {
Error::new(msg)
}
}
impl de::Error for Error {
fn custom<T: Display>(msg: T) -> Self {
Error::new(msg)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error::new(err)
}
}
impl From<str::Utf8Error> for Error {
fn from(err: str::Utf8Error) -> Self {
Error::new(err)
}
}
impl From<string::FromUtf8Error> for Error {
fn from(err: string::FromUtf8Error) -> Self {
Error::new(err)
}
}<|fim▁end|> | pub fn new<T: Display>(msg: T) -> Self {
Error {
msg: msg.to_string(),
} |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, an open source suite of business apps<|fim▁hole|># This module copyright (C) 2015 bloopark systems (<http://bloopark.de>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import json
import xml.etree.ElementTree as ET
import urllib2
import werkzeug.utils
from openerp.addons.web import http
from openerp.addons.web.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(['/<path:seo_url>'], type='http', auth="public", website=True)
def path_page(self, seo_url, **kwargs):
"""Handle SEO urls for ir.ui.views.
ToDo: Add additional check for field seo_url_parent. Otherwise it is
possible to use invalid url structures. For example: if you have two
pages 'study-1' and 'study-2' with the same seo_url_level and different
seo_url_parent you can use '/ecommerce/study-1/how-to-do-it-right' and
'/ecommerce/study-2/how-to-do-it-right' to call the page
'how-to-do-it-right'.
"""
env = request.env(context=request.context)
seo_url_parts = [s.encode('utf8') for s in seo_url.split('/')
if s != '']
views = env['ir.ui.view'].search([('seo_url', 'in', seo_url_parts)],
order='seo_url_level ASC')
page = 'website.404'
if len(seo_url_parts) == len(views):
seo_url_check = [v.seo_url.encode('utf8') for v in views]
current_view = views[-1]
if (seo_url_parts == seo_url_check
and (current_view.seo_url_level + 1) == len(views)):
page = current_view.xml_id
if page == 'website.404':
try:
url = self.look_for_redirect_url(seo_url, **kwargs)
if url:
return request.redirect(url, code=301)
assert url is not None
except Exception, e:
return request.registry['ir.http']._handle_exception(e, 404)
if page == 'website.404' and request.website.is_publisher():
page = 'website.page_404'
return request.render(page, {})
def look_for_redirect_url(self, seo_url, **kwargs):
env = request.env(context=request.context)
if not seo_url.startswith('/'):
seo_url = '/' + seo_url
lang = env.context.get('lang', False)
if not lang:
lang = request.website.default_lang_code
lang = env['res.lang'].get_code_from_alias(lang)
domain = [('url', '=', seo_url), ('lang', '=', lang)]
data = env['website.seo.redirect'].search(domain)
if data:
model, rid = data[0].resource.split(',')
resource = env[model].browse(int(rid))
return resource.get_seo_path()[0]
@http.route()
def page(self, page, **opt):
try:
view = request.website.get_template(page)
if view.seo_url:
return request.redirect(view.get_seo_path()[0], code=301)
except:
pass
return super(Website, self).page(page, **opt)
@http.route(['/website/seo_suggest'], type='json', auth='user', website=True)
def seo_suggest(self, keywords=None, lang=None):
url = "http://google.com/complete/search"
try:
params = {
'ie': 'utf8',
'oe': 'utf8',
'output': 'toolbar',
'q': keywords,
}
if lang:
language = lang.split("_")
params.update({
'hl': language[0],
'gl': language[1] if len(language) > 1 else ''
})
req = urllib2.Request("%s?%s" % (url, werkzeug.url_encode(params)))
request = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
# TODO: shouldn't this return {} ?
return []
xmlroot = ET.fromstring(request.read())
return [sugg[0].attrib['data'] for sugg in xmlroot if len(sugg) and sugg[0].attrib['data']]<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django import forms
from django.contrib.auth.models import User
from sample.models import (Doctor, Worker, Patient, SpecialtyType, TimeSlot, Case, Comment, CommentGroup,
Scan)
class searchDoctor(admin.ModelAdmin):
list_display = ['user_first_name', 'user_last_name', 'get_some_value']
search_fields = ['user__first_name', 'user__last_name',
'specialties__name']
class searchWorker(admin.ModelAdmin):
list_display = ['user_first_name', 'user_last_name']
search_fields = ['user__first_name', 'user__last_name']
class searchPatient(admin.ModelAdmin):
list_display = ['first_name', 'last_name']
search_fields = ['first_name', 'last_name']<|fim▁hole|> list_display = ['name']
class searchTimeslot(admin.ModelAdmin):
search_fields = ['start_time', 'end_time']
class searchCase(admin.ModelAdmin):
search_fields = ['id']
list_display = ['id']
class searchComment(admin.ModelAdmin):
search_fields = ['text']
list_display = ['text']
class searchScan(admin.ModelAdmin):
search_fields = ['patient', 'comments']
admin.site.register(Doctor, searchDoctor)
admin.site.register(Worker, searchWorker)
admin.site.register(Patient, searchPatient)
admin.site.register(SpecialtyType, searchSpeciality)
admin.site.register(TimeSlot, searchTimeslot)
admin.site.register(Case, searchCase)
admin.site.register(Comment, searchComment)
admin.site.register(CommentGroup)
admin.site.register(Scan, searchScan)<|fim▁end|> |
class searchSpeciality(admin.ModelAdmin):
search_fields = ['name'] |
<|file_name|>bitcoin_hu.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="hu" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Mercury</source>
<translation>A Mercury-ról</translation>
</message>
<message>
<location line="+39"/>
<source><b>Mercury</b> version</source>
<translation><b>Mercury</b> verzió</translation>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The Mercury developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source>
<translation>
Ez egy kísérleti program.
MIT/X11 szoftverlicenc alatt kiadva, lásd a mellékelt COPYING fájlt vagy a http://www.opensource.org/licenses/mit-license.php weboldalt.
Ez a termék tartalmaz az OpenSSL Project által az OpenSSL Toolkit-hez (http://www.openssl.org/) fejlesztett szoftvert; kriptográfiai szoftvert, melyet Eric Young (eay@cryptsoft.com) írt; és Thomas Bernard által írt UPnP szoftvert.</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Címjegyzék</translation>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Kattints duplán a cím vagy címke szerkesztéséhez</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Új cím létrehozása</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>A kiválasztott cím másolása a vágólapra</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Új Cím</translation>
</message>
<message>
<location line="-46"/>
<source>These are your Mercury addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Ezek a Mercury-címeid a fizetések fogadásához. Érdemes minden küldőnek egy külön címet létrehozni, hogy könnyebben követhesd személyenként a tranzakciókat.</translation>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Cím Másolása</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>&QR Kód Mutatása</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Mercury address</source>
<translation>Írj alá egy üzenetet, hogy bizonyíthasd egy Mercury-cím birtoklását</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>&Üzenet Aláírása</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>A kiválasztott cím törlése a listáról</translation>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified Mercury address</source>
<translation>Hitelesíts egy üzenetet ahhoz, hogy bebizonyosodjon Mercury-cím alapján a feladó kiléte</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>Üzenet &Hitelesítése</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Törlés</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Címke &Másolása</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>Sz&erkesztés</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation>Címjegyzék Exportálása</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Vesszővel elválasztott fájl (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Hiba exportálás</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Nem lehetett írni a fájlt a következő helyen: %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Címke</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Cím</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(nincs címke)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Kulcsszó Párbeszédablak</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Add meg a jelszót</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Új jelszó</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Új jelszó ismét</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation>Triviálisan a sendmoney parancs letiltására szolgál, amennyiben az OS felhasználója kompromittálódik. Nem ad valós biztonságot.</translation>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation>Csak kamatoztatásra</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Írd be az új jelszót a tárcához.<br/>Használj <b>legalább 10 véletlenszerű karaktert</b>, vagy <b>legalább nyolc szót</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Tárca kódolása</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ez a művelet a tárcád jelszavának megadását igényli annak megnyitásához.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Tárca feloldása</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ez a művelet a tárcád jelszavának megadását igényli annak dekódolásához.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Tárca dekódolása</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Jelszó megváltoztatása</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Írd be a tárca régi és új jelszavát.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Biztosan kódolni akarod a tárcát?</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation>Figyelmeztetés: Ha kódolod a tárcád és elveszíted annak jelszavát, el fogod <b>VESZÍTENI AZ ÖSSZES ÉRMÉDET</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Biztosan kódolni akarod a tárcát?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>FONTOS: A pénztárca-fájl korábbi mentéseit ezzel az új, titkosított pénztárca-fájllal kell helyettesíteni. Biztonsági okokból a pénztárca-fájl korábbi titkosítás nélküli mentései haszontalanná válnak amint elkezdi használni az új, titkosított pénztárcát.</translation>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Tárca kódolva</translation>
</message>
<message>
<location line="-58"/>
<source>Mercury will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Tárca kódolása sikertelen.</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Tárca kódolása belső hiba miatt sikertelen. A tárcád nem lett kódolva.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>A megadott jelszavak nem egyeznek.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Tárca megnyitása sikertelen</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Hibás jelszó.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Dekódolás sikertelen.</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Jelszó megváltoztatva.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation>Üzenet aláírása...</translation>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Szinkronizálás a hálózattal...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Áttekintés</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Tárca általános áttekintése</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Tranzakciók</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Tranzakciótörténet megtekintése</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>&Kilépés</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Kilépés</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about Mercury</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>A &Qt-ról</translation><|fim▁hole|> </message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Információk a Qt ról</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opciók...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>Tárca &kódolása...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Bisztonsági másolat készítése a Tárcáról</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>Jelszó &megváltoztatása...</translation>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a Mercury address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for Mercury</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Biztonsági másolat készítése a Tárcáról egy másik helyre</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Tárcakódoló jelszó megváltoztatása</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation>&Debug ablak</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Hibakereső és diagnosztikai konzol megnyitása</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>Üzenet &valódiságának ellenőrzése</translation>
</message>
<message>
<location line="-202"/>
<source>Mercury</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Tárca</translation>
</message>
<message>
<location line="+180"/>
<source>&About Mercury</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Mutat / Elrejt</translation>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Fájl</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Beállítások</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Súgó</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Fül eszköztár</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[teszthálózat]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>Mercury client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to Mercury network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About Mercury card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Mercury card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Naprakész</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Frissítés...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Tranzakció elküldve.</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Beérkező tranzakció</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Dátum: %1
Összeg: %2
Típus: %3
Cím: %4
</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid Mercury address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Tárca <b>kódolva</b> és jelenleg <b>nyitva</b>.</translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Tárca <b>kódolva</b> és jelenleg <b>zárva</b>.</translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. Mercury can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation>Hálózati figyelmeztetés</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Összeg:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Összeg</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Cím</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Dátum</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Megerősítve</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Cím másolása</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Címke másolása</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Összeg másolása</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation>Tranzakcióazonosító másolása</translation>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(nincs címke)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Cím szerkesztése</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>Cím&ke</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Cím</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Új fogadó cím</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Új küldő cím</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Fogadó cím szerkesztése</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Küldő cím szerkesztése</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>A megadott "%1" cím már szerepel a címjegyzékben.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Mercury address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Tárca feloldása sikertelen</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Új kulcs generálása sikertelen</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>Mercury-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opciók</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Fő</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Tranzakciós &díj fizetése</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Mercury after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Mercury on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>&Hálózat</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Mercury client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>&UPnP port-feltérképezés</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Mercury network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>Proxy &IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Proxy portja (pl.: 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &Verzió:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>A proxy SOCKS verziója (pl. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Ablak</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Kicsinyítés után csak eszköztár-ikont mutass</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Kicsinyítés a tálcára az eszköztár helyett</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Az alkalmazásból való kilépés helyett az eszköztárba kicsinyíti az alkalmazást az ablak bezárásakor. Ez esetben az alkalmazás csak a Kilépés menüponttal zárható be.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>K&icsinyítés záráskor</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Megjelenítés</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>Felhasználófelület nye&lve:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Mercury.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Mértékegység:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Válaszd ki az interfészen és érmék küldésekor megjelenítendő alapértelmezett alegységet.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Mercury addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Címek megjelenítése a tranzakciólistában</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>Megszakítás</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>alapértelmezett</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Mercury.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>A megadott proxy cím nem érvényes.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Űrlap</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Mercury network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Tárca</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation>Jelenlegi egyenleg</translation>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation>Éretlen:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Bányászott egyenleg amely még nem érett be.</translation>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation>Összesen:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation>Aktuális egyenleged</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Legutóbbi tranzakciók</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation>Nincs szinkronban.</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Kliens néve</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>Nem elérhető</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Kliens verzió</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Információ</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Használt OpenSSL verzió</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Bekapcsolás ideje</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Hálózat</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Kapcsolatok száma</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Blokklánc</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Aktuális blokkok száma</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Becsült összes blokk</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Utolsó blokk ideje</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Megnyitás</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Mercury-Qt help message to get a list with possible Mercury command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Konzol</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Fordítás dátuma</translation>
</message>
<message>
<location line="-104"/>
<source>Mercury - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Mercury Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Debug naplófájl</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Mercury debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Konzol törlése</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the Mercury RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Navigálhat a fel és le nyilakkal, és <b>Ctrl-L</b> -vel törölheti a képernyőt.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Írd be azt, hogy <b>help</b> az elérhető parancsok áttekintéséhez.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Érmék küldése</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Összeg:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 CLAM</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Küldés több címzettnek egyszerre</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Címzett hozzáadása</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Mindent &töröl</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Egyenleg:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 CLAM</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Küldés megerősítése</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Küldés</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a Mercury address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Összeg másolása</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Küldés megerősítése</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>A címzett címe érvénytelen, kérlek, ellenőrizd.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>A fizetendő összegnek nagyobbnak kell lennie 0-nál.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Nincs ennyi bitcoin az egyenlegeden.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>A küldeni kívánt összeg és a %1 tranzakciós díj együtt meghaladja az egyenlegeden rendelkezésedre álló összeget.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Többször szerepel ugyanaz a cím. Egy küldési műveletben egy címre csak egyszer lehet küldeni.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid Mercury address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(nincs címke)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Összeg:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Címzett:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Milyen címkével kerüljön be ez a cím a címtáradba?
</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>Címke:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Cím beillesztése a vágólapról</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Mercury address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>Üzenet aláírása...</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Aláírhat a címeivel üzeneteket, amivel bizonyíthatja, hogy a címek az önéi. Vigyázzon, hogy ne írjon alá semmi félreérthetőt, mivel a phising támadásokkal megpróbálhatják becsapni, hogy az azonosságát átírja másokra. Csak olyan részletes állításokat írjon alá, amivel egyetért.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Cím beillesztése a vágólapról</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Ide írja az aláírandó üzenetet</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation>A jelenleg kiválasztott aláírás másolása a rendszer-vágólapra</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Mercury address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Mindent &töröl</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation>Üzenet ellenőrzése</translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Írja be az aláírás címét, az üzenetet (ügyelve arra, hogy az új-sor, szóköz, tab, stb. karaktereket is pontosan) és az aláírást az üzenet ellenőrzéséhez. Ügyeljen arra, ne gondoljon többet az aláírásról, mint amennyi az aláírt szövegben ténylegesen áll, hogy elkerülje a köztes-ember (man-in-the-middle) támadást.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Mercury address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Mercury address (e.g. B8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Mercury signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>A megadott cím nem érvényes.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Ellenőrizze a címet és próbálja meg újra.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Üzenet aláírása nem sikerült.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Üzenet aláírva.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Az aláírást nem sikerült dekódolni.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Ellenőrizd az aláírást és próbáld újra.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Az üzenet ellenőrzése nem sikerült.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Üzenet ellenőrizve.</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Megnyitva %1-ig</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/megerősítetlen</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 megerősítés</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Állapot</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Dátum</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Legenerálva</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Űrlap</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Címzett</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>saját cím</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>címke</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Jóváírás</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>elutasítva</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Terhelés</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Tranzakciós díj</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Nettó összeg</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Üzenet</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Megjegyzés</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>Tranzakcióazonosító</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Debug információ</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Tranzakció</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation>Bemenetek</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Összeg</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>igaz</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>hamis</translation>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, még nem sikerült elküldeni.</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>ismeretlen</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Tranzakció részletei</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ez a mező a tranzakció részleteit mutatja</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Dátum</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Típus</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Cím</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Összeg</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>%1-ig megnyitva</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Megerősítve (%1 megerősítés)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Ezt a blokkot egyetlen másik csomópont sem kapta meg, így valószínűleg nem lesz elfogadva!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Legenerálva, de még el nem fogadva.</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Erre a címre</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Erről az</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Erre a címre</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Magadnak kifizetve</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Kibányászva</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(nincs)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Tranzakció állapota. Húzd ide a kurzort, hogy lásd a megerősítések számát.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Tranzakció fogadásának dátuma és időpontja.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tranzakció típusa.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>A tranzakció címzettjének címe.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Az egyenleghez jóváírt vagy ráterhelt összeg.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Mind</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Mai</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Ezen a héten</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Ebben a hónapban</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Múlt hónapban</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Ebben az évben</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Tartomány ...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Erre a címre</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Erre a címre</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Magadnak</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Kibányászva</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Más</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Írd be a keresendő címet vagy címkét</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Minimális összeg</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Cím másolása</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Címke másolása</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Összeg másolása</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Tranzakcióazonosító másolása</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Címke szerkesztése</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Tranzakciós részletek megjelenítése</translation>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Vesszővel elválasztott fájl (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Megerősítve</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Dátum</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Típus</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Címke</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Cím</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Összeg</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>Azonosító</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Tartomány:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>meddig</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>Mercury version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Használat:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or mercuryd</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Parancsok kilistázása
</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Segítség egy parancsról
</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Opciók
</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: mercury.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: mercuryd.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Adatkönyvtár
</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Az adatbázis gyorsítótár mérete megabájtban (alapértelmezés: 25)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Maximálisan <n> számú kapcsolat fenntartása a peerekkel (alapértelmezés: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Kapcsolódás egy csomóponthoz a peerek címeinek megszerzése miatt, majd szétkapcsolás</translation>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation>Adja meg az Ön saját nyilvános címét</translation>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Helytelenül viselkedő peerek leválasztási határértéke (alapértelmezés: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Helytelenül viselkedő peerek kizárási ideje másodpercben (alapértelmezés: 86400)</translation>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 56413)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Parancssoros és JSON-RPC parancsok elfogadása
</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Háttérben futtatás daemonként és parancsok elfogadása
</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Teszthálózat használata
</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Figyelem: a -paytxfee nagyon magas. Ennyi tranzakciós díjat fogsz fizetni, ha elküldöd a tranzakciót.</translation>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Mercury will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation>Csatlakozás csak a megadott csomóponthoz</translation>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Egyik hálózati porton sem sikerül hallgatni. Használja a -listen=0 kapcsolót, ha ezt szeretné.</translation>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>SSL-opciók: (lásd a Bitcoin Wiki SSL-beállítási instrukcióit)</translation>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>trace/debug információ küldése a konzolra a debog.log fájl helyett</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Csatlakozás időkerete milliszekundumban (alapértelmezett: 5000)</translation>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 0)</translation>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 1 when listening)</translation>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Felhasználói név JSON-RPC csatlakozásokhoz
</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Jelszó JSON-RPC csatlakozásokhoz
</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=mercuryrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Mercury Alert" admin@foo.com
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>JSON-RPC csatlakozások engedélyezése meghatározott IP-címről
</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Parancsok küldése <ip> címen működő csomóponthoz (alapértelmezett: 127.0.0.1)
</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Parancs, amit akkor hajt végre, amikor a legjobb blokk megváltozik (%s a cmd-ban lecserélődik a blokk hash-re)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Parancs, amit akkor hajt végre, amikor egy tárca-tranzakció megváltozik (%s a parancsban lecserélődik a blokk TxID-re)</translation>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>A Tárca frissítése a legfrissebb formátumra</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Kulcskarika mérete <n> (alapértelmezett: 100)
</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Blokklánc újraszkennelése hiányzó tárca-tranzakciók után
</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>OpenSSL (https) használata JSON-RPC csatalkozásokhoz
</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Szervertanúsítvány-fájl (alapértelmezett: server.cert)
</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Szerver titkos kulcsa (alapértelmezett: server.pem)
</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Ez a súgó-üzenet
</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. Mercury is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>Mercury</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>A %s nem elérhető ezen a gépen (bind returned error %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>DNS-kikeresés engedélyezése az addnode-nál és a connect-nél</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Címek betöltése...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Hiba a wallet.dat betöltése közben: meghibásodott tárca</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of Mercury</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart Mercury to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Hiba az wallet.dat betöltése közben</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Érvénytelen -proxy cím: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Ismeretlen hálózat lett megadva -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Ismeretlen -socks proxy kérése: %i</translation>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Csatlakozási cím (-bind address) feloldása nem sikerült: '%s'</translation>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Külső cím (-externalip address) feloldása nem sikerült: '%s'</translation>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Étvénytelen -paytxfee=<összeg> összeg: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Étvénytelen összeg</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Nincs elég bitcoinod.</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Blokkindex betöltése...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Elérendő csomópont megadása and attempt to keep the connection open</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. Mercury is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Tárca betöltése...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation>Nem sikerült a Tárca visszaállítása a korábbi verzióra</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation>Nem sikerült az alapértelmezett címet írni.</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Újraszkennelés...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Betöltés befejezve.</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation>Használd a %s opciót</translation>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Hiba</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Be kell állítani rpcpassword=<password> a konfigurációs fájlban
%s
Ha a fájl nem létezik, hozd létre 'csak a felhasználó által olvasható' fájl engedéllyel</translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>joiner.rs<|end_file_name|><|fim▁begin|>use std::os;
use std::io::File;
//use std::ops::BitXor;
fn main()
{
let args: ~[~str] = os::args();
if args.len() != 3
{
println!("Usage: {:s} <intputfile>", args[0]);
}
else
{
let fname_a = args[1].clone();
let path_a = Path::new(fname_a.clone());
let msg_file_a = File::open(&path_a);
let fname_b = args[2];
let path_b = Path::new(fname_b.clone());
let msg_file_b = File::open(&path_b);
match(msg_file_a, msg_file_b)
{
(Some(mut msg_a), Some(mut msg_b)) =>
{
let msg_bytes_a: ~[u8] = msg_a.read_to_end();
let msg_bytes_b: ~[u8] = msg_b.read_to_end();
let join_file = File::create(&Path::new("join.txt"));
match (join_file)
{
Some(join) =>
{
joiner(join, msg_bytes_a, msg_bytes_b);
},
None => fail!("Error opening output files!"),
}
},
(_, _) => fail!("Error opening message file: {:s}", fname_a)
}
}
}
fn xor(a: &[u8], b: &[u8])-> ~[u8]
{
let mut ret = ~[];
for i in range(0, a.len())
{
ret.push(a[i] ^ b[i]);
}<|fim▁hole|> ret
}
fn joiner(mut join: File, msg_bytes_a: &[u8], msg_bytes_b: &[u8])
{
let unencrypted_bytes = xor(msg_bytes_a, msg_bytes_b);
join.write((unencrypted_bytes));
}<|fim▁end|> | |
<|file_name|>Ui_SearchWidget.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file './QScintilla/SearchWidget.ui'
#
# Created: Tue Nov 18 17:53:58 2014
# by: PyQt5 UI code generator 5.3.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_SearchWidget(object):
def setupUi(self, SearchWidget):
SearchWidget.setObjectName("SearchWidget")
SearchWidget.resize(973, 25)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)<|fim▁hole|> sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SearchWidget.sizePolicy().hasHeightForWidth())
SearchWidget.setSizePolicy(sizePolicy)
self.horizontalLayout = QtWidgets.QHBoxLayout(SearchWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.closeButton = QtWidgets.QToolButton(SearchWidget)
self.closeButton.setText("")
self.closeButton.setObjectName("closeButton")
self.horizontalLayout.addWidget(self.closeButton)
self.label = QtWidgets.QLabel(SearchWidget)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.findtextCombo = QtWidgets.QComboBox(SearchWidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.findtextCombo.sizePolicy().hasHeightForWidth())
self.findtextCombo.setSizePolicy(sizePolicy)
self.findtextCombo.setMinimumSize(QtCore.QSize(300, 0))
self.findtextCombo.setEditable(True)
self.findtextCombo.setInsertPolicy(QtWidgets.QComboBox.InsertAtTop)
self.findtextCombo.setDuplicatesEnabled(False)
self.findtextCombo.setObjectName("findtextCombo")
self.horizontalLayout.addWidget(self.findtextCombo)
self.findPrevButton = QtWidgets.QToolButton(SearchWidget)
self.findPrevButton.setObjectName("findPrevButton")
self.horizontalLayout.addWidget(self.findPrevButton)
self.findNextButton = QtWidgets.QToolButton(SearchWidget)
self.findNextButton.setObjectName("findNextButton")
self.horizontalLayout.addWidget(self.findNextButton)
self.caseCheckBox = QtWidgets.QCheckBox(SearchWidget)
self.caseCheckBox.setObjectName("caseCheckBox")
self.horizontalLayout.addWidget(self.caseCheckBox)
self.wordCheckBox = QtWidgets.QCheckBox(SearchWidget)
self.wordCheckBox.setObjectName("wordCheckBox")
self.horizontalLayout.addWidget(self.wordCheckBox)
self.regexpCheckBox = QtWidgets.QCheckBox(SearchWidget)
self.regexpCheckBox.setObjectName("regexpCheckBox")
self.horizontalLayout.addWidget(self.regexpCheckBox)
self.wrapCheckBox = QtWidgets.QCheckBox(SearchWidget)
self.wrapCheckBox.setObjectName("wrapCheckBox")
self.horizontalLayout.addWidget(self.wrapCheckBox)
self.selectionCheckBox = QtWidgets.QCheckBox(SearchWidget)
self.selectionCheckBox.setObjectName("selectionCheckBox")
self.horizontalLayout.addWidget(self.selectionCheckBox)
self.retranslateUi(SearchWidget)
QtCore.QMetaObject.connectSlotsByName(SearchWidget)
SearchWidget.setTabOrder(self.findtextCombo, self.caseCheckBox)
SearchWidget.setTabOrder(self.caseCheckBox, self.wordCheckBox)
SearchWidget.setTabOrder(self.wordCheckBox, self.regexpCheckBox)
SearchWidget.setTabOrder(self.regexpCheckBox, self.wrapCheckBox)
SearchWidget.setTabOrder(self.wrapCheckBox, self.selectionCheckBox)
SearchWidget.setTabOrder(self.selectionCheckBox, self.findNextButton)
SearchWidget.setTabOrder(self.findNextButton, self.findPrevButton)
SearchWidget.setTabOrder(self.findPrevButton, self.closeButton)
def retranslateUi(self, SearchWidget):
_translate = QtCore.QCoreApplication.translate
SearchWidget.setWindowTitle(_translate("SearchWidget", "Find"))
self.closeButton.setToolTip(_translate("SearchWidget", "Press to close the window"))
self.label.setText(_translate("SearchWidget", "Find:"))
self.findPrevButton.setToolTip(_translate("SearchWidget", "Press to find the previous occurrence"))
self.findNextButton.setToolTip(_translate("SearchWidget", "Press to find the next occurrence"))
self.caseCheckBox.setText(_translate("SearchWidget", "Match case"))
self.wordCheckBox.setText(_translate("SearchWidget", "Whole word"))
self.regexpCheckBox.setText(_translate("SearchWidget", "Regexp"))
self.wrapCheckBox.setText(_translate("SearchWidget", "Wrap around"))
self.selectionCheckBox.setText(_translate("SearchWidget", "Selection only"))<|fim▁end|> | |
<|file_name|>_subscriptions_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._subscriptions_operations import build_check_zone_peers_request, build_get_request, build_list_locations_request, build_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SubscriptionsOperations:
"""SubscriptionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.subscriptions.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_locations(
self,
subscription_id: str,
**kwargs: Any
) -> AsyncIterable["_models.LocationListResult"]:
"""Gets all available geo-locations.
This operation provides all the locations that are available for resource providers; however,
each resource provider may support a subset of this list.
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LocationListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.subscriptions.v2018_06_01.models.LocationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_locations_request(
subscription_id=subscription_id,
template_url=self.list_locations.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_locations_request(
subscription_id=subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("LocationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_locations.metadata = {'url': '/subscriptions/{subscriptionId}/locations'} # type: ignore
@distributed_trace_async
async def get(
self,
subscription_id: str,
**kwargs: Any
) -> "_models.Subscription":
"""Gets details about a specified subscription.
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Subscription, or the result of cls(response)
:rtype: ~azure.mgmt.resource.subscriptions.v2018_06_01.models.Subscription
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subscription"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Subscription', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}'} # type: ignore
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.SubscriptionListResult"]:
"""Gets all subscriptions for a tenant.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SubscriptionListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.subscriptions.v2018_06_01.models.SubscriptionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SubscriptionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SubscriptionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions'} # type: ignore
@distributed_trace_async
async def check_zone_peers(
self,
subscription_id: str,
parameters: "_models.CheckZonePeersRequest",
**kwargs: Any
) -> "_models.CheckZonePeersResult":
"""Compares a subscriptions logical zone mapping.
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param parameters: Parameters for checking zone peers.
:type parameters: ~azure.mgmt.resource.subscriptions.v2018_06_01.models.CheckZonePeersRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckZonePeersResult, or the result of cls(response)
:rtype: ~azure.mgmt.resource.subscriptions.v2018_06_01.models.CheckZonePeersResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CheckZonePeersResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'CheckZonePeersRequest')
request = build_check_zone_peers_request(<|fim▁hole|> json=_json,
template_url=self.check_zone_peers.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CheckZonePeersResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_zone_peers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Resources/checkZonePeers/'} # type: ignore<|fim▁end|> | subscription_id=subscription_id,
content_type=content_type, |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>// get params
function getParams()
{
var params = {
initial_amount: parseInt($('#initial_amount').val(), 10) || 0,
interest_rate_per_annum: parseFloat($('#interest_rate_per_annum').val()) / 100 || 0,
monthly_amount: parseFloat($('#monthly_amount').val()),
num_months: parseInt($('#num_months').val(), 10)
};
params.method = $('#by_monthly_amount').is(':checked')
? 'by_monthly_amount'
: 'by_num_months';
return params;
}
function updateUI()
{
var params = getParams();
var data = computeLoanData(params);
updatePaymentTable(data);
console.log(data);
}
function computeLoanData(params) {
var
total_paid = 0,
num_months = 0,
remainder = Math.floor(params.initial_amount * 100),
monthly_interest_rate = params.interest_rate_per_annum / 12,
monthly_amount,
months = [];
if (params.method == 'by_num_months') {
var pow = Math.pow(1 + monthly_interest_rate, params.num_months);
monthly_amount = remainder * monthly_interest_rate * pow / (pow - 1);
}
else {
monthly_amount = params.monthly_amount * 100;
}
monthly_amount = Math.ceil(monthly_amount);
// compute by amount first
while (remainder > 0)
{
var interest = Math.floor(remainder * monthly_interest_rate);
remainder += interest;
var to_pay = remainder > monthly_amount ? monthly_amount : remainder;
total_paid += to_pay;
remainder -= to_pay;
months.push({
interest: interest / 100,
repayment: to_pay / 100,
remainder: remainder / 100
});
}
$('#monthly_amount').val((monthly_amount / 100).toFixed(2));
$('#num_months').val(months.length);
return {
total_paid: total_paid / 100,
interest_paid: (total_paid - params.initial_amount * 100) / 100,
months: months
};
}
function updatePaymentTable(data) {
$('#total_repayment').text(data.total_paid.toFixed(2));
$('#total_interested_paid').text(data.interest_paid.toFixed(2));
var rows = $('#monthly_breakdown').empty();
for (var idx=0; idx < data.months.length; idx++) {
var month_num = idx+1;
var is_new_year = (idx % 12) === 0;
var tr = $('<tr />')
.append($('<td />').text(month_num))
.append($('<td />').text(data.months[idx].interest.toFixed(2)))
.append($('<td />').text(data.months[idx].repayment.toFixed(2)))<|fim▁hole|> .appendTo(rows);
}
}
// initiatilize listeners
$('#initial_amount').on('change', updateUI);
$('#interest_rate_per_annum').on('change', updateUI);
$('#monthly_amount').on('change', updateUI);
$('#num_months').on('change', updateUI);
$('#by_monthly_amount').on('change', updateUI);
$('#by_num_months').on('change', updateUI);<|fim▁end|> | .append($('<td />').text(data.months[idx].remainder.toFixed(2)))
.addClass(is_new_year ? 'jan' : '') |
<|file_name|>production.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use sendgrid to send emails
- Use MEMCACHIER on Heroku
'''
from configurations import values
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from .common import Common
class Production(Common):
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
# END INSTALLED_APPS
# SECRET KEY
SECRET_KEY = values.SecretValue()
# END SECRET KEY
# django-secure
INSTALLED_APPS += ("djangosecure", )
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
# end django-secure
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += ('collectfast', )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# END STORAGE CONFIGURATION
# EMAIL
DEFAULT_FROM_EMAIL = values.Value('tco2 <noreply@example.com>')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT")
EMAIL_SUBJECT_PREFIX = values.Value('[tco2] ', environ_name="EMAIL_SUBJECT_PREFIX")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION<|fim▁hole|> 'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CONFIGURATION
# CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
# END CACHING
# Your production stuff: Below this line define 3rd party library settings<|fim▁end|> | # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader', |
<|file_name|>ActionGraph.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>std::shared_ptr<ActionGraphInterface> ActionGraphInterface::c_graphImp = nullptr;
std::weak_ptr<ActionGraphInterface> ActionGraphInterface::Get()
{
// if it`s not init, just crash.
check(c_graphImp != nullptr);
return c_graphImp;
}
void ActionGraphInterface::Init(const unsigned int& inWorkThreadNum)
{
c_graphImp = std::shared_ptr<ActionGraphInterface>(new ActionGraphImplementation());
c_graphImp->ConstructWorkThread(inWorkThreadNum);
}<|fim▁end|> | #include "Core.h"
#include "Async/ActionGraph.h"
#include "ActionGraphImplementation.h"
|
<|file_name|>pshb.py<|end_file_name|><|fim▁begin|># Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and<|fim▁hole|>import urllib2
# TODO(termie): abstract away app engine specifics
from google.appengine.api import urlfetch
from django.conf import settings
from common import exception
from common.protocol import base
class _DevRpc(object):
def get_result(self):
pass
class PshbConnection(base.Connection):
def __init__(self, endpoint):
self.endpoint = endpoint
def publish_async(self, urls):
if settings.MANAGE_PY:
logging.info('pshb.publish(%s, %s)', self.endpoint, self.urls)
return _DevRpc()
rpc = urlfetch.create_rpc()
def _callback():
result = rpc.get_result()
if result.status_code == 204:
return
raise exception.ServiceError(result.content)
rpc.callback = _callback
data = urllib.urlencode(
{'hub.url': urls, 'hub.mode': 'publish'}, doseq=True)
urlfetch.make_fetch_call(rpc, self.endpoint, method='POST', payload=data)
return rpc<|fim▁end|> | # limitations under the License.
import logging
import urllib |
<|file_name|>DeleteEventActionRequest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dataexchange.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dataexchange-2017-07-25/DeleteEventAction" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteEventActionRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The unique identifier for the event action.
* </p>
*/
private String eventActionId;
/**
* <p>
* The unique identifier for the event action.
* </p>
*
* @param eventActionId
* The unique identifier for the event action.
*/
public void setEventActionId(String eventActionId) {
this.eventActionId = eventActionId;
}
/**
* <p>
* The unique identifier for the event action.
* </p>
*
* @return The unique identifier for the event action.
*/
public String getEventActionId() {
return this.eventActionId;<|fim▁hole|> }
/**
* <p>
* The unique identifier for the event action.
* </p>
*
* @param eventActionId
* The unique identifier for the event action.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteEventActionRequest withEventActionId(String eventActionId) {
setEventActionId(eventActionId);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEventActionId() != null)
sb.append("EventActionId: ").append(getEventActionId());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteEventActionRequest == false)
return false;
DeleteEventActionRequest other = (DeleteEventActionRequest) obj;
if (other.getEventActionId() == null ^ this.getEventActionId() == null)
return false;
if (other.getEventActionId() != null && other.getEventActionId().equals(this.getEventActionId()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEventActionId() == null) ? 0 : getEventActionId().hashCode());
return hashCode;
}
@Override
public DeleteEventActionRequest clone() {
return (DeleteEventActionRequest) super.clone();
}
}<|fim▁end|> | |
<|file_name|>omaha.go<|end_file_name|><|fim▁begin|>// Copyright 2017 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package local<|fim▁hole|>)
// OmahaWrapper wraps the omaha trivial server to log any errors returned by destroy
// and doesn't return anything instead
type OmahaWrapper struct {
*omaha.TrivialServer
}
func (o OmahaWrapper) Destroy() {
if err := o.TrivialServer.Destroy(); err != nil {
plog.Errorf("Error destroying omaha server: %v", err)
}
}<|fim▁end|> |
import (
"github.com/coreos/go-omaha/omaha" |
<|file_name|>index.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from flask import Blueprint, render_template
from flask.ext.security import current_user
mod = Blueprint('documentation', __name__)
@mod.route('/documentation')
@mod.route('/documentation/index')
def doc_index():<|fim▁hole|> return render_template('documentation/index.html',
apikey='token' if current_user.is_anonymous else current_user.apikey)<|fim▁end|> | |
<|file_name|>clock.py<|end_file_name|><|fim▁begin|>"""Clock/event scheduler.
This is a Pygame implementation of a scheduler inspired by the clock
classes in Pyglet.
"""
import heapq
from weakref import ref
from functools import total_ordering
from types import MethodType
__all__ = [
'Clock', 'schedule', 'schedule_interval', 'unschedule'
]
def weak_method(method):
"""Quick weak method ref in case users aren't using Python 3.4"""
selfref = ref(method.__self__)
funcref = ref(method.__func__)
def weakref():
self = selfref()
func = funcref()
if self is None or func is None:
return None
return func.__get__(self)
return weakref
def mkref(o):
if isinstance(o, MethodType):
return weak_method(o)
else:
return ref(o)
@total_ordering
class Event:
"""An event scheduled for a future time.
Events are ordered by their scheduled execution time.
"""
def __init__(self, time, cb, repeat=None):
self.time = time
self.repeat = repeat
self.cb = mkref(cb)
self.name = str(cb)
self.repeat = repeat
def __lt__(self, ano):
return self.time < ano.time
def __eq__(self, ano):
return self.time == ano.time
@property
def callback(self):
return self.cb()
class Clock:
"""A clock used for event scheduling.
When tick() is called, all events scheduled for before now will be called
in order.
tick() would typically be called from the game loop for the default clock.
Additional clocks could be created - for example, a game clock that could
be suspended in pause screens. Your code must take care of calling tick()
or not. You could also run the clock at a different rate if desired, by
scaling dt before passing it to tick().
"""
def __init__(self):
self.t = 0
self.fired = False
self.events = []
self._each_tick = []
def schedule(self, callback, delay):
"""Schedule callback to be called once, at `delay` seconds from now.
:param callback: A parameterless callable to be called.
:param delay: The delay before the call (in clock time / seconds).
"""
heapq.heappush(self.events, Event(self.t + delay, callback, None))
def schedule_unique(self, callback, delay):
"""Schedule callback to be called once, at `delay` seconds from now.
If it was already scheduled, postpone its firing.
:param callback: A parameterless callable to be called.
:param delay: The delay before the call (in clock time / seconds).
"""
self.unschedule(callback)
self.schedule(callback, delay)
def schedule_interval(self, callback, delay):
"""Schedule callback to be called every `delay` seconds.
The first occurrence will be after `delay` seconds.
:param callback: A parameterless callable to be called.
:param delay: The interval in seconds.
"""
heapq.heappush(self.events, Event(self.t + delay, callback, delay))
def unschedule(self, callback):
"""Unschedule the given callback.
If scheduled multiple times all instances will be unscheduled.
"""
self.events = [e for e in self.events if e.callback != callback and e.callback is not None]
heapq.heapify(self.events)
self._each_tick = [e for e in self._each_tick if e() != callback]
def each_tick(self, callback):
"""Schedule a callback to be called every tick.
Unlike the standard scheduler functions, the callable is passed the
elapsed clock time since the last call (the same value passed to tick).
"""
self._each_tick.append(mkref(callback))
def _fire_each_tick(self, dt):
dead = [None]
for r in self._each_tick:
cb = r()
if cb is not None:
self.fired = True
try:
cb(dt)
except Exception:
import traceback
traceback.print_exc()
dead.append(cb)
self._each_tick = [e for e in self._each_tick if e() not in dead]
def tick(self, dt):
"""Update the clock time and fire all scheduled events.
:param dt: The elapsed time in seconds.
"""
self.fired = False
self.t += float(dt)
self._fire_each_tick(dt)
while self.events and self.events[0].time <= self.t:
ev = heapq.heappop(self.events)
cb = ev.callback
if not cb:<|fim▁hole|> self.schedule_interval(cb, ev.repeat)
self.fired = True
try:
cb()
except Exception:
import traceback
traceback.print_exc()
self.unschedule(cb)
# One instance of a clock is available by default, to simplify the API
clock = Clock()
tick = clock.tick
schedule = clock.schedule
schedule_interval = clock.schedule_interval
schedule_unique = clock.schedule_unique
unschedule = clock.unschedule
each_tick = clock.each_tick<|fim▁end|> | continue
if ev.repeat is not None: |
<|file_name|>records.controller.js<|end_file_name|><|fim▁begin|>/**
* @package omeka
* @subpackage neatline
* @copyright 2014 Rector and Board of Visitors, University of Virginia
* @license http://www.apache.org/licenses/LICENSE-2.0.html
*/
Neatline.module('Editor.Exhibit.Records', function(Records) {
Records.Controller = Neatline.Shared.Controller.extend({
slug: 'EDITOR:EXHIBIT:RECORDS',
commands: [
'display',
'load',
'ingest',
'navToList'
],
requests: [
'getModel'
],
/**
* Create the router, collection, and view.
*/<|fim▁hole|> this.view = new Records.View({ slug: this.slug });
},
/**
* Append the list to the editor container.
*
* @param {Object} container: The container element.
*/
display: function(container) {
this.view.showIn(container);
},
/**
* Query for new records.
*
* @param {Object} params: The query parameters.
*/
load: function(params) {
this.view.load(params);
},
/**
* Render a records collection in the list.
*
* @param {Object} records: The collection of records.
*/
ingest: function(records) {
this.view.ingest(records);
},
/**
* Navigate to the record list.
*/
navToList: function() {
this.router.navigate('records', true);
},
/**
* Get a record model from the collection.
*
* @param {Number} id: The record id.
* @param {Function} cb: A callback, called with the model.
*/
getModel: function(id, cb) {
this.view.records.getOrFetch(id, cb);
}
});
});<|fim▁end|> | init: function() {
this.router = new Records.Router(); |
<|file_name|>print_once.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2021 Stanford University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|>#
from __future__ import print_function
import pygion
from pygion import task
@task
def main():
pygion.print_once("Hello, Legion!")
if __name__ == '__main__':
main()<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. |
<|file_name|>MultiCalendarBase.js<|end_file_name|><|fim▁begin|>/**
* @author Matthew Foster
* @date December 27th 2007
*/
var MultiCalendarBase = Class.create();
Object.extend(Object.extend(MultiCalendarBase.prototype, EventDispatcher.prototype),
{
buildInterface : function(container){
this.container = $(container);
this.view = this.container.down(".view");
this.wrap = this.container.down(".wrap");
this.control = this.container.down(".control");
this.nextControl = this.control.down(".next");
this.prevControl = this.control.down(".previous");
},
setItemIncrement : function(num){
this.increment = num;
},
getItemIncrement : function(){
return this.increment || 0;
},
buildOptions : function(options){
this.options = Object.extend({ rangeOffset : 3, buffer : 25}, options || {});
},
createCalendar : function(element, date){
return new LabeledCalendar(element, date);
},
scrollLeft : function(){
this.wrap.scrollLeft -= this.getItemIncrement();
},
scrollRight : function(){
this.wrap.scrollLeft += this.getItemIncrement();
},
insertCalendar : function(element){
try{
this.view.insertBefore(element, this.view.firstChild);<|fim▁hole|> this.appendCalendar(element);
}
},
appendCalendar : function(element){
this.view.appendChild(element);
},
getInitialRange : function(){
var date = new Date();
var start = new GregorianCalendar(date.getFullYear(), date.getMonth() -2);
var end = new GregorianCalendar(date.getFullYear(),date.getMonth()+this.options.rangeOffset );
return $A(new GregorianCalendarRange(start, end));
}
}
);<|fim▁end|> | }
catch(e){ |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.