code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for batch_to_space_nd."""
import numpy as np
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_batch_to_space_nd_tests(options):
"""Make a set of tests to do batch_to_space_nd."""
test_parameters = [
{
"dtype": [tf.float32, tf.int64, tf.int32],
"input_shape": [[12, 3, 3, 1]],
"block_shape": [[1, 4], [2, 2], [3, 4]],
"crops": [[[0, 0], [0, 0]], [[1, 1], [1, 1]]],
"constant_block_shape": [True, False],
"constant_crops": [True, False],
"dynamic_range_quantize": [False],
},
# Single batch (no-op)
{
"dtype": [tf.float32],
"input_shape": [[1, 3, 3, 1]],
"block_shape": [[1, 1]],
"crops": [[[0, 0], [0, 0]], [[1, 1], [1, 1]]],
"constant_block_shape": [True],
"constant_crops": [True],
"dynamic_range_quantize": [True, False],
},
# 3D use case.
{
"dtype": [tf.float32],
"input_shape": [[1, 3, 3]],
"block_shape": [[1]],
"crops": [[[0, 0]], [[1, 1]]],
"constant_block_shape": [True],
"constant_crops": [True],
"dynamic_range_quantize": [True, False],
},
]
if options.run_with_flex:
# Non-4D use case: 1 batch dimension, 3 spatial dimensions, 2 others.
test_parameters = test_parameters + [{
"dtype": [tf.float32],
"input_shape": [[8, 2, 2, 2, 1, 1]],
"block_shape": [[2, 2, 2]],
"crops": [[[0, 0], [0, 0], [0, 0]]],
"constant_block_shape": [True, False],
"constant_crops": [True, False],
"dynamic_range_quantize": [False],
}]
def build_graph(parameters):
"""Build a batch_to_space graph given `parameters`."""
input_tensor = tf.compat.v1.placeholder(
dtype=parameters["dtype"],
name="input",
shape=parameters["input_shape"])
input_tensors = [input_tensor]
# Get block_shape either as a const or as a placeholder (tensor).
if parameters["constant_block_shape"]:
block_shape = parameters["block_shape"]
else:
shape = [len(parameters["block_shape"])]
block_shape = tf.compat.v1.placeholder(
dtype=tf.int32, name="shape", shape=shape)
input_tensors.append(block_shape)
# Get crops either as a const or as a placeholder (tensor).
if parameters["constant_crops"]:
crops = parameters["crops"]
else:
shape = [len(parameters["crops"]), 2]
crops = tf.compat.v1.placeholder(
dtype=tf.int32, name="crops", shape=shape)
input_tensors.append(crops)
out = tf.batch_to_space_nd(input_tensor, block_shape, crops)
return input_tensors, [out]
def build_inputs(parameters, sess, inputs, outputs):
values = [
create_tensor_data(parameters["dtype"], parameters["input_shape"])
]
if not parameters["constant_block_shape"]:
values.append(np.array(parameters["block_shape"]))
if not parameters["constant_crops"]:
values.append(np.array(parameters["crops"]))
return values, sess.run(outputs, feed_dict=dict(zip(inputs, values)))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| tensorflow/tensorflow | tensorflow/lite/testing/op_tests/batch_to_space_nd.py | Python | apache-2.0 | 4,144 |
<?php
/*
* This file is part of the Respect\Rest package.
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Respect\Rest\Routines;
use Respect\Rest\Request;
/** Routine that runs before the route matching */
interface ProxyableWhen
{
/** Executed to check if the route matchs */
public function when(Request $request, $params);
}
| googlecodelabs/cloud-lamp-migration | web/core/vendor/respect/rest/library/Respect/Rest/Routines/ProxyableWhen.php | PHP | apache-2.0 | 436 |
using FluentValidation;
using Nop.Admin.Models.Settings;
using Nop.Core.Domain.Orders;
using Nop.Services.Localization;
using Nop.Web.Framework.Validators;
namespace Nop.Admin.Validators.Settings
{
public class RewardPointsSettingsValidator : BaseNopValidator<RewardPointsSettingsModel>
{
public RewardPointsSettingsValidator(ILocalizationService localizationService)
{
RuleFor(x => x.PointsForPurchases_Awarded).NotEqual((int)OrderStatus.Pending).WithMessage(localizationService.GetResource("Admin.Configuration.Settings.RewardPoints.PointsForPurchases_Awarded.Pending"));
RuleFor(x => x.PointsForPurchases_Canceled).NotEqual((int)OrderStatus.Pending).WithMessage(localizationService.GetResource("Admin.Configuration.Settings.RewardPoints.PointsForPurchases_Canceled.Pending"));
}
}
} | jornfilho/nopCommerce | source/Presentation/Nop.Web/Administration/Validators/Settings/RewardPointsSettingsValidator.cs | C# | apache-2.0 | 849 |
package ca.uhn.fhir.model.dstu3.composite;
import java.util.List;
import ca.uhn.fhir.model.api.ICompositeDatatype;
import ca.uhn.fhir.model.api.IElement;
import ca.uhn.fhir.model.api.annotation.Child;
import ca.uhn.fhir.model.api.annotation.DatatypeDef;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.primitive.BooleanDt;
import ca.uhn.fhir.model.primitive.CodeDt;
import ca.uhn.fhir.model.primitive.StringDt;
import ca.uhn.fhir.model.primitive.UriDt;
/**
* HAPI/FHIR <b>CodingDt</b> Datatype
* ()
*
* <p>
* <b>Definition:</b>
* A reference to a code defined by a terminology system
* </p>
*
* <p>
* <b>Requirements:</b>
* References to codes are very common in healthcare models
* </p>
*/
@DatatypeDef(name="CodingDt")
public class CodingDt
extends BaseCodingDt implements ICompositeDatatype, org.hl7.fhir.instance.model.api.IBaseCoding {
/**
* Constructor
*/
public CodingDt() {
// nothing
}
/**
* Creates a new Coding with the given system and code
*/
public CodingDt(String theSystem, String theCode) {
setSystem(theSystem);
setCode(theCode);
}
/**
* Copy constructor: Creates a new Coding with the system and code copied out of the given coding
*/
public CodingDt(BaseCodingDt theCoding) {
this(theCoding.getSystemElement().getValueAsString(), theCoding.getCodeElement().getValue());
}
@Child(name="system", type=UriDt.class, order=0, min=0, max=1, summary=true, modifier=false)
@Description(
shortDefinition="",
formalDefinition="The identification of the code system that defines the meaning of the symbol in the code."
)
private UriDt mySystem;
@Child(name="version", type=StringDt.class, order=1, min=0, max=1, summary=true, modifier=false)
@Description(
shortDefinition="",
formalDefinition="The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged"
)
private StringDt myVersion;
@Child(name="code", type=CodeDt.class, order=2, min=0, max=1, summary=true, modifier=false)
@Description(
shortDefinition="",
formalDefinition="A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)"
)
private CodeDt myCode;
@Child(name="display", type=StringDt.class, order=3, min=0, max=1, summary=true, modifier=false)
@Description(
shortDefinition="",
formalDefinition="A representation of the meaning of the code in the system, following the rules of the system"
)
private StringDt myDisplay;
@Child(name="userSelected", type=BooleanDt.class, order=4, min=0, max=1, summary=true, modifier=false)
@Description(
shortDefinition="",
formalDefinition="Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)"
)
private BooleanDt myUserSelected;
@Override
public boolean isEmpty() {
return super.isBaseEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty( mySystem, myVersion, myCode, myDisplay, myUserSelected);
}
@Override
public <T extends IElement> List<T> getAllPopulatedChildElementsOfType(Class<T> theType) {
return ca.uhn.fhir.util.ElementUtil.allPopulatedChildElements(theType, mySystem, myVersion, myCode, myDisplay, myUserSelected);
}
/**
* Gets the value(s) for <b>system</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* The identification of the code system that defines the meaning of the symbol in the code.
* </p>
*/
public UriDt getSystemElement() {
if (mySystem == null) {
mySystem = new UriDt();
}
return mySystem;
}
/**
* Gets the value(s) for <b>system</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* The identification of the code system that defines the meaning of the symbol in the code.
* </p>
*/
public String getSystem() {
return getSystemElement().getValue();
}
/**
* Sets the value(s) for <b>system</b> ()
*
* <p>
* <b>Definition:</b>
* The identification of the code system that defines the meaning of the symbol in the code.
* </p>
*/
public CodingDt setSystem(UriDt theValue) {
mySystem = theValue;
return this;
}
/**
* Sets the value for <b>system</b> ()
*
* <p>
* <b>Definition:</b>
* The identification of the code system that defines the meaning of the symbol in the code.
* </p>
*/
public CodingDt setSystem( String theUri) {
mySystem = new UriDt(theUri);
return this;
}
/**
* Gets the value(s) for <b>version</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged
* </p>
*/
public StringDt getVersionElement() {
if (myVersion == null) {
myVersion = new StringDt();
}
return myVersion;
}
/**
* Gets the value(s) for <b>version</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged
* </p>
*/
public String getVersion() {
return getVersionElement().getValue();
}
/**
* Sets the value(s) for <b>version</b> ()
*
* <p>
* <b>Definition:</b>
* The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged
* </p>
*/
public CodingDt setVersion(StringDt theValue) {
myVersion = theValue;
return this;
}
/**
* Sets the value for <b>version</b> ()
*
* <p>
* <b>Definition:</b>
* The version of the code system which was used when choosing this code. Note that a well-maintained code system does not need the version reported, because the meaning of codes is consistent across versions. However this cannot consistently be assured. and when the meaning is not guaranteed to be consistent, the version SHOULD be exchanged
* </p>
*/
public CodingDt setVersion( String theString) {
myVersion = new StringDt(theString);
return this;
}
/**
* Gets the value(s) for <b>code</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)
* </p>
*/
public CodeDt getCodeElement() {
if (myCode == null) {
myCode = new CodeDt();
}
return myCode;
}
/**
* Gets the value(s) for <b>code</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)
* </p>
*/
public String getCode() {
return getCodeElement().getValue();
}
/**
* Sets the value(s) for <b>code</b> ()
*
* <p>
* <b>Definition:</b>
* A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)
* </p>
*/
public CodingDt setCode(CodeDt theValue) {
myCode = theValue;
return this;
}
/**
* Sets the value for <b>code</b> ()
*
* <p>
* <b>Definition:</b>
* A symbol in syntax defined by the system. The symbol may be a predefined code or an expression in a syntax defined by the coding system (e.g. post-coordination)
* </p>
*/
public CodingDt setCode( String theCode) {
myCode = new CodeDt(theCode);
return this;
}
/**
* Gets the value(s) for <b>display</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* A representation of the meaning of the code in the system, following the rules of the system
* </p>
*/
public StringDt getDisplayElement() {
if (myDisplay == null) {
myDisplay = new StringDt();
}
return myDisplay;
}
/**
* Gets the value(s) for <b>display</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* A representation of the meaning of the code in the system, following the rules of the system
* </p>
*/
public String getDisplay() {
return getDisplayElement().getValue();
}
/**
* Sets the value(s) for <b>display</b> ()
*
* <p>
* <b>Definition:</b>
* A representation of the meaning of the code in the system, following the rules of the system
* </p>
*/
public CodingDt setDisplay(StringDt theValue) {
myDisplay = theValue;
return this;
}
/**
* Sets the value for <b>display</b> ()
*
* <p>
* <b>Definition:</b>
* A representation of the meaning of the code in the system, following the rules of the system
* </p>
*/
public CodingDt setDisplay( String theString) {
myDisplay = new StringDt(theString);
return this;
}
/**
* Gets the value(s) for <b>userSelected</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)
* </p>
*/
public BooleanDt getUserSelectedElement() {
if (myUserSelected == null) {
myUserSelected = new BooleanDt();
}
return myUserSelected;
}
/**
* Gets the value(s) for <b>userSelected</b> ().
* creating it if it does
* not exist. Will not return <code>null</code>.
*
* <p>
* <b>Definition:</b>
* Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)
* </p>
*/
public Boolean getUserSelected() {
return getUserSelectedElement().getValue();
}
/**
* Sets the value(s) for <b>userSelected</b> ()
*
* <p>
* <b>Definition:</b>
* Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)
* </p>
*/
public CodingDt setUserSelected(BooleanDt theValue) {
myUserSelected = theValue;
return this;
}
/**
* Sets the value for <b>userSelected</b> ()
*
* <p>
* <b>Definition:</b>
* Indicates that this coding was chosen by a user directly - i.e. off a pick list of available items (codes or displays)
* </p>
*/
public CodingDt setUserSelected( boolean theBoolean) {
myUserSelected = new BooleanDt(theBoolean);
return this;
}
} | eug48/hapi-fhir | hapi-tinder-plugin/src/main/java/ca/uhn/fhir/model/dstu3/composite/CodingDt.java | Java | apache-2.0 | 11,977 |
/*
* Licensed to DuraSpace under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* DuraSpace licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fcrepo.kernel.api.exception;
/**
* Exception indicating that the requested transaction was not found.
*
* @author bbpennel
*/
public class TransactionNotFoundException extends TransactionRuntimeException {
private static final long serialVersionUID = 1L;
/**
* Ordinary constructor.
*
* @param msg the message
*/
public TransactionNotFoundException(final String msg) {
super(msg);
}
/**
* Ordinary constructor.
*
* @param rootCause the root cause
*/
public TransactionNotFoundException(final Throwable rootCause) {
super(rootCause);
}
/**
* Ordinary constructor.
*
* @param msg the message
* @param rootCause the root cause
*/
public TransactionNotFoundException(final String msg, final Throwable rootCause) {
super(msg, rootCause);
}
}
| dbernstein/fcrepo4 | fcrepo-kernel-api/src/main/java/org/fcrepo/kernel/api/exception/TransactionNotFoundException.java | Java | apache-2.0 | 1,665 |
// 不再赘述
var path = require('path')
// 加载 confi.index.js
var config = require('../config')
// 使用一些小工具
var utils = require('./utils')
// 加载 webpack
var webpack = require('webpack')
// 加载 webpack 配置合并工具
var merge = require('webpack-merge')
// 加载 webpack.base.conf.js
var baseWebpackConfig = require('./webpack.base.conf')
// 一个 webpack 扩展,可以提取一些代码并且将它们和文件分离开
// 如果我们想将 webpack 打包成一个文件 css js 分离开,那我们需要这个插件
var ExtractTextPlugin = require('extract-text-webpack-plugin')
// 一个可以插入 html 并且创建新的 .html 文件的插件
var HtmlWebpackPlugin = require('html-webpack-plugin')
var env = config.build.env
// 合并 webpack.base.conf.js
var webpackConfig = merge(baseWebpackConfig, {
// 使用的 loader
module: {
loaders: utils.styleLoaders({ sourceMap: config.build.productionSourceMap, extract: true })
},
// 是否使用 #source-map 开发工具,更多信息可以查看 DDFE 往期文章
devtool: config.build.productionSourceMap ? '#source-map' : false,
output: {
// 编译输出目录
path: config.build.assetsRoot,
// 编译输出文件名
// 我们可以在 hash 后加 :6 决定使用几位 hash 值
filename: utils.assetsPath('js/[name].[chunkhash].js'),
// 没有指定输出名的文件输出的文件名
chunkFilename: utils.assetsPath('js/[id].[chunkhash].js')
},
vue: {
// 编译 .vue 文件时使用的 loader
loaders: utils.cssLoaders({
sourceMap: config.build.productionSourceMap,
extract: true
})
},
// 使用的插件
plugins: [
// http://vuejs.github.io/vue-loader/en/workflow/production.html
// definePlugin 接收字符串插入到代码当中, 所以你需要的话可以写上 JS 的字符串
new webpack.DefinePlugin({
'process.env': env
}),
// 压缩 js (同样可以压缩 css)
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: false
}
}),
new webpack.optimize.OccurrenceOrderPlugin(),
// 将 css 文件分离出来
new ExtractTextPlugin(utils.assetsPath('css/[name].[contenthash].css')),
// 输入输出的 .html 文件
new HtmlWebpackPlugin({
filename: config.build.index,
template: 'index.html',
// 是否注入 html
inject: true,
// 压缩的方式
minify: {
removeComments: true,
collapseWhitespace: true,
removeAttributeQuotes: true
// more options:
// https://github.com/kangax/html-minifier#options-quick-reference
},
// necessary to consistently work with multiple chunks via CommonsChunkPlugin
chunksSortMode: 'dependency'
}),
// 没有指定输出文件名的文件输出的静态文件名
new webpack.optimize.CommonsChunkPlugin({
name: 'vendor',
minChunks: function (module, count) {
// any required modules inside node_modules are extracted to vendor
return (
module.resource &&
/\.js$/.test(module.resource) &&
module.resource.indexOf(
path.join(__dirname, '../node_modules')
) === 0
)
}
}),
// extract webpack runtime and module manifest to its own file in order to
// prevent vendor hash from being updated whenever app bundle is updated
new webpack.optimize.CommonsChunkPlugin({
name: 'manifest',
chunks: ['vendor']
})
]
})
// 开启 gzip 的情况下使用下方的配置
if (config.build.productionGzip) {
// 加载 compression-webpack-plugin 插件
var CompressionWebpackPlugin = require('compression-webpack-plugin')
// 向webpackconfig.plugins中加入下方的插件
webpackConfig.plugins.push(
// 使用 compression-webpack-plugin 插件进行压缩
new CompressionWebpackPlugin({
asset: '[path].gz[query]',
algorithm: 'gzip',
test: new RegExp(
'\\.(' +
config.build.productionGzipExtensions.join('|') +
')$'
),
threshold: 10240,
minRatio: 0.8
})
)
}
module.exports = webpackConfig
| huang303513/WebBasicCommonDemos | vue-cli2.0的webpack配置分析/lessproject/build/webpack.prod.conf.js | JavaScript | apache-2.0 | 4,179 |
// Example repl is a simple REPL (read-eval-print loop) for GO using
// http://github.com/0xfaded/eval to the heavy lifting to implement
// the eval() part.
//
// The intent here is to show how more to use the library, rather than
// be a full-featured REPL.
//
// A more complete REPL including command history, tab completion and
// readline editing is available as a separate package:
// http://github.com/rocky/go-fish
//
// (rocky) My intent here is also to have something that I can debug in
// the ssa-debugger tortoise/gub.sh. Right now that can't handle the
// unsafe package, pointers, and calls to C code. So that let's out
// go-gnureadline and lineedit.
package main
import (
"bufio"
"fmt"
"go/parser"
"io"
"os"
"reflect"
"strings"
"github.com/mailgun/godebug/Godeps/_workspace/src/github.com/0xfaded/eval"
)
// Simple replacement for GNU readline
func readline(prompt string, in *bufio.Reader) (string, error) {
fmt.Printf(prompt)
line, err := in.ReadString('\n')
if err == nil {
line = strings.TrimRight(line, "\r\n")
}
return line, err
}
func intro_text() {
fmt.Printf(`=== A simple Go eval REPL ===
Results of expression are stored in variable slice "results".
The environment is stored in global variable "env".
Enter expressions to be evaluated at the "go>" prompt.
To see all results, type: "results".
To quit, enter: "quit" or Ctrl-D (EOF).
`)
}
// REPL is the a read, eval, and print loop.
func REPL(env *eval.SimpleEnv) {
var err error
// A place to store result values of expressions entered
// interactively
results := make([]interface{}, 0, 10)
env.Vars["results"] = reflect.ValueOf(&results)
exprs := 0
in := bufio.NewReader(os.Stdin)
line, err := readline("go> ", in)
for line != "quit" {
if err != nil {
if err == io.EOF {
break
}
panic(err)
}
if expr, err := parser.ParseExpr(line); err != nil {
if pair := eval.FormatErrorPos(line, err.Error()); len(pair) == 2 {
fmt.Println(pair[0])
fmt.Println(pair[1])
}
fmt.Printf("parse error: %s\n", err)
} else if cexpr, errs := eval.CheckExpr(expr, env); len(errs) != 0 {
for _, cerr := range errs {
fmt.Printf("check error: %v\n", cerr)
}
} else if vals, err := eval.EvalExpr(cexpr, env); err != nil {
fmt.Printf("panic: %s\n", err)
} else if len(vals) == 0 {
fmt.Printf("Kind=Slice\nvoid\n")
} else if len(vals) == 1 {
value := (vals)[0]
if value.IsValid() {
kind := value.Kind().String()
typ := value.Type().String()
if typ != kind {
fmt.Printf("Kind = %v\n", kind)
fmt.Printf("Type = %v\n", typ)
} else {
fmt.Printf("Kind = Type = %v\n", kind)
}
fmt.Printf("results[%d] = %s\n", exprs, eval.Inspect(value))
exprs += 1
results = append(results, (vals)[0].Interface())
} else {
fmt.Printf("%s\n", value)
}
} else {
fmt.Printf("Kind = Multi-Value\n")
size := len(vals)
for i, v := range vals {
fmt.Printf("%s", eval.Inspect(v))
if i < size-1 {
fmt.Printf(", ")
}
}
fmt.Printf("\n")
exprs += 1
results = append(results, vals)
}
line, err = readline("go> ", in)
}
}
type XI interface {
x()
}
type YI interface {
y()
}
type ZI interface {
x()
}
type X int
type Y int
type Z int
func (X) x() {}
func (Y) y() {}
func (Z) x() {}
// Create an eval.Env environment to use in evaluation.
// This is a bit ugly here, because we are rolling everything by hand, but
// we want some sort of environment to show off in demo'ing.
// The artifical environment we create here consists of
// fmt:
// fns: fmt.Println, fmt.Printf
// os:
// types: MyInt
// vars: Stdout, Args
// main:
// type Alice
// var alice, aliceptr
//
// (REPL also adds var results to main)
//
// See make_env in github.com/rocky/go-fish for an automated way to
// create more complete environment from a starting import.
func makeBogusEnv() *eval.SimpleEnv {
// A copule of things from the fmt package.
var fmt_funcs map[string]reflect.Value = make(map[string]reflect.Value)
fmt_funcs["Println"] = reflect.ValueOf(fmt.Println)
fmt_funcs["Printf"] = reflect.ValueOf(fmt.Printf)
// A simple type for demo
type MyInt int
// A stripped down package environment. See
// http://github.com/rocky/go-fish and repl_imports.go for a more
// complete environment.
pkgs := map[string]eval.Env{
"fmt": &eval.SimpleEnv{
Vars: make(map[string]reflect.Value),
Consts: make(map[string]reflect.Value),
Funcs: fmt_funcs,
Types: make(map[string]reflect.Type),
Pkgs: nil,
}, "os": &eval.SimpleEnv{
Vars: map[string]reflect.Value{
"Stdout": reflect.ValueOf(&os.Stdout),
"Args": reflect.ValueOf(&os.Args)},
Consts: make(map[string]reflect.Value),
Funcs: make(map[string]reflect.Value),
Types: map[string]reflect.Type{
"MyInt": reflect.TypeOf(*new(MyInt))},
Pkgs: nil,
},
}
mainEnv := eval.MakeSimpleEnv()
mainEnv.Pkgs = pkgs
// Some "alice" things for testing
type Alice struct {
Bob int
Secret string
}
type R rune
alice := Alice{1, "shhh"}
alicePtr := &alice
foo := 10
ints := []int{1, 2, 3, 4}
add := func(a, b int) int {
return a + b
}
sum := func(as ...int) int {
r := 0
for _, a := range as {
r += a
}
return r
}
mainEnv.Vars["alice"] = reflect.ValueOf(&alice)
mainEnv.Vars["alicePtr"] = reflect.ValueOf(&alicePtr)
mainEnv.Vars["foo"] = reflect.ValueOf(&foo)
mainEnv.Vars["ints"] = reflect.ValueOf(&ints)
mainEnv.Consts["bar"] = reflect.ValueOf(eval.NewConstInt64(5))
mainEnv.Funcs["add"] = reflect.ValueOf(add)
mainEnv.Funcs["sum"] = reflect.ValueOf(sum)
mainEnv.Types["Alice"] = reflect.TypeOf(Alice{})
mainEnv.Types["R"] = reflect.TypeOf(R(0))
var xi *XI = new(XI)
var yi *YI = new(YI)
var zi *ZI = new(ZI)
*xi = XI(X(0))
*yi = YI(Y(0))
*zi = ZI(Z(0))
mainEnv.Types["XI"] = reflect.TypeOf(xi).Elem()
mainEnv.Types["YI"] = reflect.TypeOf(yi).Elem()
mainEnv.Types["ZI"] = reflect.TypeOf(zi).Elem()
mainEnv.Types["X"] = reflect.TypeOf(X(0))
mainEnv.Types["Y"] = reflect.TypeOf(Y(0))
mainEnv.Types["Z"] = reflect.TypeOf(Z(0))
return mainEnv
}
func main() {
env := makeBogusEnv()
intro_text()
REPL(env)
}
| CodyGuo/godebug | Godeps/_workspace/src/github.com/0xfaded/eval/demo/repl.go | GO | apache-2.0 | 6,199 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.hbase.io.encoding;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ObjectIntPair;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Compress using:
* - store size of common prefix
* - save column family once, it is same within HFile
* - use integer compression for key, value and prefix (7-bit encoding)
* - use bits to avoid duplication key length, value length
* and type if it same as previous
* - store in 3 bits length of timestamp field
* - allow diff in timestamp instead of actual value
*
* Format:
* - 1 byte: flag
* - 1-5 bytes: key length (only if FLAG_SAME_KEY_LENGTH is not set in flag)
* - 1-5 bytes: value length (only if FLAG_SAME_VALUE_LENGTH is not set in flag)
* - 1-5 bytes: prefix length
* - ... bytes: rest of the row (if prefix length is small enough)
* - ... bytes: qualifier (or suffix depending on prefix length)
* - 1-8 bytes: timestamp or diff
* - 1 byte: type (only if FLAG_SAME_TYPE is not set in the flag)
* - ... bytes: value
*/
@InterfaceAudience.Private
public class DiffKeyDeltaEncoder extends BufferedDataBlockEncoder {
static final int FLAG_SAME_KEY_LENGTH = 1;
static final int FLAG_SAME_VALUE_LENGTH = 1 << 1;
static final int FLAG_SAME_TYPE = 1 << 2;
static final int FLAG_TIMESTAMP_IS_DIFF = 1 << 3;
static final int MASK_TIMESTAMP_LENGTH = (1 << 4) | (1 << 5) | (1 << 6);
static final int SHIFT_TIMESTAMP_LENGTH = 4;
static final int FLAG_TIMESTAMP_SIGN = 1 << 7;
protected static class DiffCompressionState extends CompressionState {
long timestamp;
byte[] familyNameWithSize;
@Override
protected void readTimestamp(ByteBuffer in) {
timestamp = in.getLong();
}
@Override
void copyFrom(CompressionState state) {
super.copyFrom(state);
DiffCompressionState state2 = (DiffCompressionState) state;
timestamp = state2.timestamp;
}
}
private void uncompressSingleKeyValue(DataInputStream source,
ByteBuffer buffer,
DiffCompressionState state)
throws IOException, EncoderBufferTooSmallException {
// read the column family at the beginning
if (state.isFirst()) {
state.familyLength = source.readByte();
state.familyNameWithSize =
new byte[(state.familyLength & 0xff) + KeyValue.FAMILY_LENGTH_SIZE];
state.familyNameWithSize[0] = state.familyLength;
int read = source.read(state.familyNameWithSize, KeyValue.FAMILY_LENGTH_SIZE,
state.familyLength);
assert read == state.familyLength;
}
// read flag
byte flag = source.readByte();
// read key/value/common lengths
int keyLength;
int valueLength;
if ((flag & FLAG_SAME_KEY_LENGTH) != 0) {
keyLength = state.keyLength;
} else {
keyLength = ByteBufferUtils.readCompressedInt(source);
}
if ((flag & FLAG_SAME_VALUE_LENGTH) != 0) {
valueLength = state.valueLength;
} else {
valueLength = ByteBufferUtils.readCompressedInt(source);
}
int commonPrefix = ByteBufferUtils.readCompressedInt(source);
// create KeyValue buffer and fill it prefix
int keyOffset = buffer.position();
ensureSpace(buffer, keyLength + valueLength + KeyValue.ROW_OFFSET);
buffer.putInt(keyLength);
buffer.putInt(valueLength);
// copy common from previous key
if (commonPrefix > 0) {
ByteBufferUtils.copyFromBufferToBuffer(buffer, buffer, state.prevOffset
+ KeyValue.ROW_OFFSET, commonPrefix);
}
// copy the rest of the key from the buffer
int keyRestLength;
if (state.isFirst() || commonPrefix <
state.rowLength + KeyValue.ROW_LENGTH_SIZE) {
// omit the family part of the key, it is always the same
short rowLength;
int rowRestLength;
// check length of row
if (commonPrefix < KeyValue.ROW_LENGTH_SIZE) {
// not yet copied, do it now
ByteBufferUtils.copyFromStreamToBuffer(buffer, source,
KeyValue.ROW_LENGTH_SIZE - commonPrefix);
ByteBufferUtils.skip(buffer, -KeyValue.ROW_LENGTH_SIZE);
rowLength = buffer.getShort();
rowRestLength = rowLength;
} else {
// already in buffer, just read it
rowLength = buffer.getShort(keyOffset + KeyValue.ROW_OFFSET);
rowRestLength = rowLength + KeyValue.ROW_LENGTH_SIZE - commonPrefix;
}
// copy the rest of row
ByteBufferUtils.copyFromStreamToBuffer(buffer, source, rowRestLength);
state.rowLength = rowLength;
// copy the column family
buffer.put(state.familyNameWithSize);
keyRestLength = keyLength - rowLength -
state.familyNameWithSize.length -
(KeyValue.ROW_LENGTH_SIZE + KeyValue.TIMESTAMP_TYPE_SIZE);
} else {
// prevRowWithSizeLength is the same as on previous row
keyRestLength = keyLength - commonPrefix - KeyValue.TIMESTAMP_TYPE_SIZE;
}
// copy the rest of the key, after column family -> column qualifier
ByteBufferUtils.copyFromStreamToBuffer(buffer, source, keyRestLength);
// handle timestamp
int timestampFitsInBytes =
((flag & MASK_TIMESTAMP_LENGTH) >>> SHIFT_TIMESTAMP_LENGTH) + 1;
long timestamp = ByteBufferUtils.readLong(source, timestampFitsInBytes);
if ((flag & FLAG_TIMESTAMP_SIGN) != 0) {
timestamp = -timestamp;
}
if ((flag & FLAG_TIMESTAMP_IS_DIFF) != 0) {
timestamp = state.timestamp - timestamp;
}
buffer.putLong(timestamp);
// copy the type field
byte type;
if ((flag & FLAG_SAME_TYPE) != 0) {
type = state.type;
} else {
type = source.readByte();
}
buffer.put(type);
// copy value part
ByteBufferUtils.copyFromStreamToBuffer(buffer, source, valueLength);
state.keyLength = keyLength;
state.valueLength = valueLength;
state.prevOffset = keyOffset;
state.timestamp = timestamp;
state.type = type;
// state.qualifier is unused
}
@Override
public int internalEncode(Cell cell, HFileBlockDefaultEncodingContext encodingContext,
DataOutputStream out) throws IOException {
EncodingState state = encodingContext.getEncodingState();
int size = compressSingleKeyValue(out, cell, state.prevCell);
size += afterEncodingKeyValue(cell, out, encodingContext);
state.prevCell = cell;
return size;
}
private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
throws IOException {
int flag = 0; // Do not use more bits that can fit into a byte
int kLength = KeyValueUtil.keyLength(cell);
int vLength = cell.getValueLength();
long timestamp;
long diffTimestamp = 0;
int diffTimestampFitsInBytes = 0;
int timestampFitsInBytes;
int commonPrefix = 0;
if (prevCell == null) {
timestamp = cell.getTimestamp();
if (timestamp < 0) {
flag |= FLAG_TIMESTAMP_SIGN;
timestamp = -timestamp;
}
timestampFitsInBytes = ByteBufferUtils.longFitsIn(timestamp);
flag |= (timestampFitsInBytes - 1) << SHIFT_TIMESTAMP_LENGTH;
// put column family
byte familyLength = cell.getFamilyLength();
out.write(familyLength);
PrivateCellUtil.writeFamily(out, cell, familyLength);
} else {
// Finding common prefix
int preKeyLength = KeyValueUtil.keyLength(prevCell);
commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(cell, prevCell, true, false);
if (kLength == preKeyLength) {
flag |= FLAG_SAME_KEY_LENGTH;
}
if (vLength == prevCell.getValueLength()) {
flag |= FLAG_SAME_VALUE_LENGTH;
}
if (cell.getTypeByte() == prevCell.getTypeByte()) {
flag |= FLAG_SAME_TYPE;
}
// don't compress timestamp and type using prefix encode timestamp
timestamp = cell.getTimestamp();
diffTimestamp = prevCell.getTimestamp() - timestamp;
boolean negativeTimestamp = timestamp < 0;
if (negativeTimestamp) {
timestamp = -timestamp;
}
timestampFitsInBytes = ByteBufferUtils.longFitsIn(timestamp);
boolean minusDiffTimestamp = diffTimestamp < 0;
if (minusDiffTimestamp) {
diffTimestamp = -diffTimestamp;
}
diffTimestampFitsInBytes = ByteBufferUtils.longFitsIn(diffTimestamp);
if (diffTimestampFitsInBytes < timestampFitsInBytes) {
flag |= (diffTimestampFitsInBytes - 1) << SHIFT_TIMESTAMP_LENGTH;
flag |= FLAG_TIMESTAMP_IS_DIFF;
if (minusDiffTimestamp) {
flag |= FLAG_TIMESTAMP_SIGN;
}
} else {
flag |= (timestampFitsInBytes - 1) << SHIFT_TIMESTAMP_LENGTH;
if (negativeTimestamp) {
flag |= FLAG_TIMESTAMP_SIGN;
}
}
}
out.write(flag);
if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
ByteBufferUtils.putCompressedInt(out, kLength);
}
if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
ByteBufferUtils.putCompressedInt(out, vLength);
}
ByteBufferUtils.putCompressedInt(out, commonPrefix);
short rLen = cell.getRowLength();
if (commonPrefix < rLen + KeyValue.ROW_LENGTH_SIZE) {
// Previous and current rows are different. Copy the differing part of
// the row, skip the column family, and copy the qualifier.
PrivateCellUtil.writeRowKeyExcludingCommon(cell, rLen, commonPrefix, out);
PrivateCellUtil.writeQualifier(out, cell, cell.getQualifierLength());
} else {
// The common part includes the whole row. As the column family is the
// same across the whole file, it will automatically be included in the
// common prefix, so we need not special-case it here.
// What we write here is the non common part of the qualifier
int commonQualPrefix = commonPrefix - (rLen + KeyValue.ROW_LENGTH_SIZE)
- (cell.getFamilyLength() + KeyValue.FAMILY_LENGTH_SIZE);
PrivateCellUtil.writeQualifierSkippingBytes(out, cell, cell.getQualifierLength(),
commonQualPrefix);
}
if ((flag & FLAG_TIMESTAMP_IS_DIFF) == 0) {
ByteBufferUtils.putLong(out, timestamp, timestampFitsInBytes);
} else {
ByteBufferUtils.putLong(out, diffTimestamp, diffTimestampFitsInBytes);
}
if ((flag & FLAG_SAME_TYPE) == 0) {
out.write(cell.getTypeByte());
}
PrivateCellUtil.writeValue(out, cell, vLength);
return kLength + vLength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
}
@Override
public Cell getFirstKeyCellInBlock(ByteBuff block) {
block.mark();
block.position(Bytes.SIZEOF_INT);
byte familyLength = block.get();
block.skip(familyLength);
byte flag = block.get();
int keyLength = ByteBuff.readCompressedInt(block);
// TODO : See if we can avoid these reads as the read values are not getting used
ByteBuff.readCompressedInt(block); // valueLength
ByteBuff.readCompressedInt(block); // commonLength
ByteBuffer result = ByteBuffer.allocate(keyLength);
// copy row
assert !(result.isDirect());
int pos = result.arrayOffset();
block.get(result.array(), pos, Bytes.SIZEOF_SHORT);
pos += Bytes.SIZEOF_SHORT;
short rowLength = result.getShort();
block.get(result.array(), pos, rowLength);
pos += rowLength;
// copy family
int savePosition = block.position();
block.position(Bytes.SIZEOF_INT);
block.get(result.array(), pos, familyLength + Bytes.SIZEOF_BYTE);
pos += familyLength + Bytes.SIZEOF_BYTE;
// copy qualifier
block.position(savePosition);
int qualifierLength =
keyLength - pos + result.arrayOffset() - KeyValue.TIMESTAMP_TYPE_SIZE;
block.get(result.array(), pos, qualifierLength);
pos += qualifierLength;
// copy the timestamp and type
int timestampFitInBytes =
((flag & MASK_TIMESTAMP_LENGTH) >>> SHIFT_TIMESTAMP_LENGTH) + 1;
long timestamp = ByteBuff.readLong(block, timestampFitInBytes);
if ((flag & FLAG_TIMESTAMP_SIGN) != 0) {
timestamp = -timestamp;
}
result.putLong(pos, timestamp);
pos += Bytes.SIZEOF_LONG;
block.get(result.array(), pos, Bytes.SIZEOF_BYTE);
block.reset();
// The result is already a BB. So always we will create a KeyOnlyKv.
return new KeyValue.KeyOnlyKeyValue(result.array(), 0, keyLength);
}
@Override
public String toString() {
return DiffKeyDeltaEncoder.class.getSimpleName();
}
protected static class DiffSeekerState extends SeekerState {
private int rowLengthWithSize;
private long timestamp;
public DiffSeekerState(ObjectIntPair<ByteBuffer> tmpPair,
boolean includeTags) {
super(tmpPair, includeTags);
}
@Override
protected void copyFromNext(SeekerState that) {
super.copyFromNext(that);
DiffSeekerState other = (DiffSeekerState) that;
rowLengthWithSize = other.rowLengthWithSize;
timestamp = other.timestamp;
}
}
@Override
public EncodedSeeker createSeeker(HFileBlockDecodingContext decodingCtx) {
return new DiffSeekerStateBufferedEncodedSeeker(decodingCtx);
}
@Override
protected ByteBuffer internalDecodeKeyValues(DataInputStream source, int allocateHeaderLength,
int skipLastBytes, HFileBlockDefaultDecodingContext decodingCtx) throws IOException {
int decompressedSize = source.readInt();
ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
allocateHeaderLength);
buffer.position(allocateHeaderLength);
DiffCompressionState state = new DiffCompressionState();
while (source.available() > skipLastBytes) {
uncompressSingleKeyValue(source, buffer, state);
afterDecodingKeyValue(source, buffer, decodingCtx);
}
if (source.available() != skipLastBytes) {
throw new IllegalStateException("Read too much bytes.");
}
return buffer;
}
private static class DiffSeekerStateBufferedEncodedSeeker
extends BufferedEncodedSeeker<DiffSeekerState> {
private byte[] familyNameWithSize;
private static final int TIMESTAMP_WITH_TYPE_LENGTH =
Bytes.SIZEOF_LONG + Bytes.SIZEOF_BYTE;
private DiffSeekerStateBufferedEncodedSeeker(HFileBlockDecodingContext decodingCtx) {
super(decodingCtx);
}
private void decode(boolean isFirst) {
byte flag = currentBuffer.get();
byte type = 0;
if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
if (!isFirst) {
type = current.keyBuffer[current.keyLength - Bytes.SIZEOF_BYTE];
}
current.keyLength = ByteBuff.readCompressedInt(currentBuffer);
}
if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
current.valueLength = ByteBuff.readCompressedInt(currentBuffer);
}
current.lastCommonPrefix = ByteBuff.readCompressedInt(currentBuffer);
current.ensureSpaceForKey();
if (current.lastCommonPrefix < Bytes.SIZEOF_SHORT) {
// length of row is different, copy everything except family
// copy the row size
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
Bytes.SIZEOF_SHORT - current.lastCommonPrefix);
current.rowLengthWithSize = Bytes.toShort(current.keyBuffer, 0) +
Bytes.SIZEOF_SHORT;
// copy the rest of row
currentBuffer.get(current.keyBuffer, Bytes.SIZEOF_SHORT,
current.rowLengthWithSize - Bytes.SIZEOF_SHORT);
// copy the column family
System.arraycopy(familyNameWithSize, 0, current.keyBuffer,
current.rowLengthWithSize, familyNameWithSize.length);
// copy the qualifier
currentBuffer.get(current.keyBuffer,
current.rowLengthWithSize + familyNameWithSize.length,
current.keyLength - current.rowLengthWithSize -
familyNameWithSize.length - TIMESTAMP_WITH_TYPE_LENGTH);
} else if (current.lastCommonPrefix < current.rowLengthWithSize) {
// we have to copy part of row and qualifier,
// but column family is in right place
// before column family (rest of row)
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
current.rowLengthWithSize - current.lastCommonPrefix);
// after column family (qualifier)
currentBuffer.get(current.keyBuffer,
current.rowLengthWithSize + familyNameWithSize.length,
current.keyLength - current.rowLengthWithSize -
familyNameWithSize.length - TIMESTAMP_WITH_TYPE_LENGTH);
} else {
// copy just the ending
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
current.keyLength - TIMESTAMP_WITH_TYPE_LENGTH -
current.lastCommonPrefix);
}
// timestamp
int pos = current.keyLength - TIMESTAMP_WITH_TYPE_LENGTH;
int timestampFitInBytes = 1 +
((flag & MASK_TIMESTAMP_LENGTH) >>> SHIFT_TIMESTAMP_LENGTH);
long timestampOrDiff = ByteBuff.readLong(currentBuffer, timestampFitInBytes);
if ((flag & FLAG_TIMESTAMP_SIGN) != 0) {
timestampOrDiff = -timestampOrDiff;
}
if ((flag & FLAG_TIMESTAMP_IS_DIFF) == 0) { // it is timestamp
current.timestamp = timestampOrDiff;
} else { // it is diff
current.timestamp = current.timestamp - timestampOrDiff;
}
Bytes.putLong(current.keyBuffer, pos, current.timestamp);
pos += Bytes.SIZEOF_LONG;
// type
if ((flag & FLAG_SAME_TYPE) == 0) {
currentBuffer.get(current.keyBuffer, pos, Bytes.SIZEOF_BYTE);
} else if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
current.keyBuffer[pos] = type;
}
current.valueOffset = currentBuffer.position();
currentBuffer.skip(current.valueLength);
if (includesTags()) {
decodeTags();
}
if (includesMvcc()) {
current.memstoreTS = ByteBufferUtils.readVLong(currentBuffer);
} else {
current.memstoreTS = 0;
}
current.nextKvOffset = currentBuffer.position();
}
@Override
protected void decodeFirst() {
currentBuffer.skip(Bytes.SIZEOF_INT);
// read column family
byte familyNameLength = currentBuffer.get();
familyNameWithSize = new byte[familyNameLength + Bytes.SIZEOF_BYTE];
familyNameWithSize[0] = familyNameLength;
currentBuffer.get(familyNameWithSize, Bytes.SIZEOF_BYTE,
familyNameLength);
decode(true);
}
@Override
protected void decodeNext() {
decode(false);
}
@Override
protected DiffSeekerState createSeekerState() {
return new DiffSeekerState(this.tmpPair, this.includesTags());
}
}
}
| mahak/hbase | hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/DiffKeyDeltaEncoder.java | Java | apache-2.0 | 19,734 |
package store
import (
"strconv"
"strings"
"github.com/docker/swarmkit/api"
"github.com/docker/swarmkit/api/naming"
memdb "github.com/hashicorp/go-memdb"
)
const tableTask = "task"
func init() {
register(ObjectStoreConfig{
Table: &memdb.TableSchema{
Name: tableTask,
Indexes: map[string]*memdb.IndexSchema{
indexID: {
Name: indexID,
Unique: true,
Indexer: api.TaskIndexerByID{},
},
indexName: {
Name: indexName,
AllowMissing: true,
Indexer: taskIndexerByName{},
},
indexRuntime: {
Name: indexRuntime,
AllowMissing: true,
Indexer: taskIndexerByRuntime{},
},
indexServiceID: {
Name: indexServiceID,
AllowMissing: true,
Indexer: taskIndexerByServiceID{},
},
indexNodeID: {
Name: indexNodeID,
AllowMissing: true,
Indexer: taskIndexerByNodeID{},
},
indexSlot: {
Name: indexSlot,
AllowMissing: true,
Indexer: taskIndexerBySlot{},
},
indexDesiredState: {
Name: indexDesiredState,
Indexer: taskIndexerByDesiredState{},
},
indexTaskState: {
Name: indexTaskState,
Indexer: taskIndexerByTaskState{},
},
indexNetwork: {
Name: indexNetwork,
AllowMissing: true,
Indexer: taskIndexerByNetwork{},
},
indexSecret: {
Name: indexSecret,
AllowMissing: true,
Indexer: taskIndexerBySecret{},
},
indexCustom: {
Name: indexCustom,
Indexer: api.TaskCustomIndexer{},
AllowMissing: true,
},
},
},
Save: func(tx ReadTx, snapshot *api.StoreSnapshot) error {
var err error
snapshot.Tasks, err = FindTasks(tx, All)
return err
},
Restore: func(tx Tx, snapshot *api.StoreSnapshot) error {
tasks, err := FindTasks(tx, All)
if err != nil {
return err
}
for _, t := range tasks {
if err := DeleteTask(tx, t.ID); err != nil {
return err
}
}
for _, t := range snapshot.Tasks {
if err := CreateTask(tx, t); err != nil {
return err
}
}
return nil
},
ApplyStoreAction: func(tx Tx, sa api.StoreAction) error {
switch v := sa.Target.(type) {
case *api.StoreAction_Task:
obj := v.Task
switch sa.Action {
case api.StoreActionKindCreate:
return CreateTask(tx, obj)
case api.StoreActionKindUpdate:
return UpdateTask(tx, obj)
case api.StoreActionKindRemove:
return DeleteTask(tx, obj.ID)
}
}
return errUnknownStoreAction
},
})
}
// CreateTask adds a new task to the store.
// Returns ErrExist if the ID is already taken.
func CreateTask(tx Tx, t *api.Task) error {
return tx.create(tableTask, t)
}
// UpdateTask updates an existing task in the store.
// Returns ErrNotExist if the node doesn't exist.
func UpdateTask(tx Tx, t *api.Task) error {
return tx.update(tableTask, t)
}
// DeleteTask removes a task from the store.
// Returns ErrNotExist if the task doesn't exist.
func DeleteTask(tx Tx, id string) error {
return tx.delete(tableTask, id)
}
// GetTask looks up a task by ID.
// Returns nil if the task doesn't exist.
func GetTask(tx ReadTx, id string) *api.Task {
t := tx.get(tableTask, id)
if t == nil {
return nil
}
return t.(*api.Task)
}
// FindTasks selects a set of tasks and returns them.
func FindTasks(tx ReadTx, by By) ([]*api.Task, error) {
checkType := func(by By) error {
switch by.(type) {
case byName, byNamePrefix, byIDPrefix, byRuntime, byDesiredState, byTaskState, byNode, byService, bySlot, byReferencedNetworkID, byReferencedSecretID, byCustom, byCustomPrefix:
return nil
default:
return ErrInvalidFindBy
}
}
taskList := []*api.Task{}
appendResult := func(o api.StoreObject) {
taskList = append(taskList, o.(*api.Task))
}
err := tx.find(tableTask, by, checkType, appendResult)
return taskList, err
}
type taskIndexerByName struct{}
func (ti taskIndexerByName) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerByName) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
name := naming.Task(t)
// Add the null character as a terminator
return true, []byte(strings.ToLower(name) + "\x00"), nil
}
func (ti taskIndexerByName) PrefixFromArgs(args ...interface{}) ([]byte, error) {
return prefixFromArgs(args...)
}
type taskIndexerByRuntime struct{}
func (ti taskIndexerByRuntime) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerByRuntime) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
r, err := naming.Runtime(t.Spec)
if err != nil {
return false, nil, nil
}
return true, []byte(r + "\x00"), nil
}
func (ti taskIndexerByRuntime) PrefixFromArgs(args ...interface{}) ([]byte, error) {
return prefixFromArgs(args...)
}
type taskIndexerByServiceID struct{}
func (ti taskIndexerByServiceID) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerByServiceID) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
// Add the null character as a terminator
val := t.ServiceID + "\x00"
return true, []byte(val), nil
}
type taskIndexerByNodeID struct{}
func (ti taskIndexerByNodeID) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerByNodeID) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
// Add the null character as a terminator
val := t.NodeID + "\x00"
return true, []byte(val), nil
}
type taskIndexerBySlot struct{}
func (ti taskIndexerBySlot) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerBySlot) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
// Add the null character as a terminator
val := t.ServiceID + "\x00" + strconv.FormatUint(t.Slot, 10) + "\x00"
return true, []byte(val), nil
}
type taskIndexerByDesiredState struct{}
func (ti taskIndexerByDesiredState) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerByDesiredState) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
// Add the null character as a terminator
return true, []byte(strconv.FormatInt(int64(t.DesiredState), 10) + "\x00"), nil
}
type taskIndexerByNetwork struct{}
func (ti taskIndexerByNetwork) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerByNetwork) FromObject(obj interface{}) (bool, [][]byte, error) {
t := obj.(*api.Task)
var networkIDs [][]byte
for _, na := range t.Spec.Networks {
// Add the null character as a terminator
networkIDs = append(networkIDs, []byte(na.Target+"\x00"))
}
return len(networkIDs) != 0, networkIDs, nil
}
type taskIndexerBySecret struct{}
func (ti taskIndexerBySecret) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ti taskIndexerBySecret) FromObject(obj interface{}) (bool, [][]byte, error) {
t := obj.(*api.Task)
container := t.Spec.GetContainer()
if container == nil {
return false, nil, nil
}
var secretIDs [][]byte
for _, secretRef := range container.Secrets {
// Add the null character as a terminator
secretIDs = append(secretIDs, []byte(secretRef.SecretID+"\x00"))
}
return len(secretIDs) != 0, secretIDs, nil
}
type taskIndexerByTaskState struct{}
func (ts taskIndexerByTaskState) FromArgs(args ...interface{}) ([]byte, error) {
return fromArgs(args...)
}
func (ts taskIndexerByTaskState) FromObject(obj interface{}) (bool, []byte, error) {
t := obj.(*api.Task)
// Add the null character as a terminator
return true, []byte(strconv.FormatInt(int64(t.Status.State), 10) + "\x00"), nil
}
| mstanleyjones/docker | vendor/github.com/docker/swarmkit/manager/state/store/tasks.go | GO | apache-2.0 | 7,821 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package proxy
import (
"fmt"
"net"
"reflect"
"strings"
"sync"
"k8s.io/klog/v2"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/sets"
utilfeature "k8s.io/apiserver/pkg/util/feature"
"k8s.io/client-go/tools/record"
apiservice "k8s.io/kubernetes/pkg/api/v1/service"
"k8s.io/kubernetes/pkg/features"
"k8s.io/kubernetes/pkg/proxy/metrics"
utilproxy "k8s.io/kubernetes/pkg/proxy/util"
)
// BaseServiceInfo contains base information that defines a service.
// This could be used directly by proxier while processing services,
// or can be used for constructing a more specific ServiceInfo struct
// defined by the proxier if needed.
type BaseServiceInfo struct {
clusterIP net.IP
port int
protocol v1.Protocol
nodePort int
loadBalancerStatus v1.LoadBalancerStatus
sessionAffinityType v1.ServiceAffinity
stickyMaxAgeSeconds int
externalIPs []string
loadBalancerSourceRanges []string
healthCheckNodePort int
nodeLocalExternal bool
nodeLocalInternal bool
internalTrafficPolicy *v1.ServiceInternalTrafficPolicyType
hintsAnnotation string
}
var _ ServicePort = &BaseServiceInfo{}
// String is part of ServicePort interface.
func (info *BaseServiceInfo) String() string {
return fmt.Sprintf("%s:%d/%s", info.clusterIP, info.port, info.protocol)
}
// ClusterIP is part of ServicePort interface.
func (info *BaseServiceInfo) ClusterIP() net.IP {
return info.clusterIP
}
// Port is part of ServicePort interface.
func (info *BaseServiceInfo) Port() int {
return info.port
}
// SessionAffinityType is part of the ServicePort interface.
func (info *BaseServiceInfo) SessionAffinityType() v1.ServiceAffinity {
return info.sessionAffinityType
}
// StickyMaxAgeSeconds is part of the ServicePort interface
func (info *BaseServiceInfo) StickyMaxAgeSeconds() int {
return info.stickyMaxAgeSeconds
}
// Protocol is part of ServicePort interface.
func (info *BaseServiceInfo) Protocol() v1.Protocol {
return info.protocol
}
// LoadBalancerSourceRanges is part of ServicePort interface
func (info *BaseServiceInfo) LoadBalancerSourceRanges() []string {
return info.loadBalancerSourceRanges
}
// HealthCheckNodePort is part of ServicePort interface.
func (info *BaseServiceInfo) HealthCheckNodePort() int {
return info.healthCheckNodePort
}
// NodePort is part of the ServicePort interface.
func (info *BaseServiceInfo) NodePort() int {
return info.nodePort
}
// ExternalIPStrings is part of ServicePort interface.
func (info *BaseServiceInfo) ExternalIPStrings() []string {
return info.externalIPs
}
// LoadBalancerIPStrings is part of ServicePort interface.
func (info *BaseServiceInfo) LoadBalancerIPStrings() []string {
var ips []string
for _, ing := range info.loadBalancerStatus.Ingress {
ips = append(ips, ing.IP)
}
return ips
}
// NodeLocalExternal is part of ServicePort interface.
func (info *BaseServiceInfo) NodeLocalExternal() bool {
return info.nodeLocalExternal
}
// NodeLocalInternal is part of ServicePort interface
func (info *BaseServiceInfo) NodeLocalInternal() bool {
return info.nodeLocalInternal
}
// InternalTrafficPolicy is part of ServicePort interface
func (info *BaseServiceInfo) InternalTrafficPolicy() *v1.ServiceInternalTrafficPolicyType {
return info.internalTrafficPolicy
}
// HintsAnnotation is part of ServicePort interface.
func (info *BaseServiceInfo) HintsAnnotation() string {
return info.hintsAnnotation
}
func (sct *ServiceChangeTracker) newBaseServiceInfo(port *v1.ServicePort, service *v1.Service) *BaseServiceInfo {
nodeLocalExternal := false
if apiservice.RequestsOnlyLocalTraffic(service) {
nodeLocalExternal = true
}
nodeLocalInternal := false
if utilfeature.DefaultFeatureGate.Enabled(features.ServiceInternalTrafficPolicy) {
nodeLocalInternal = apiservice.RequestsOnlyLocalTrafficForInternal(service)
}
var stickyMaxAgeSeconds int
if service.Spec.SessionAffinity == v1.ServiceAffinityClientIP {
// Kube-apiserver side guarantees SessionAffinityConfig won't be nil when session affinity type is ClientIP
stickyMaxAgeSeconds = int(*service.Spec.SessionAffinityConfig.ClientIP.TimeoutSeconds)
}
clusterIP := utilproxy.GetClusterIPByFamily(sct.ipFamily, service)
info := &BaseServiceInfo{
clusterIP: net.ParseIP(clusterIP),
port: int(port.Port),
protocol: port.Protocol,
nodePort: int(port.NodePort),
sessionAffinityType: service.Spec.SessionAffinity,
stickyMaxAgeSeconds: stickyMaxAgeSeconds,
nodeLocalExternal: nodeLocalExternal,
nodeLocalInternal: nodeLocalInternal,
internalTrafficPolicy: service.Spec.InternalTrafficPolicy,
hintsAnnotation: service.Annotations[v1.AnnotationTopologyAwareHints],
}
loadBalancerSourceRanges := make([]string, len(service.Spec.LoadBalancerSourceRanges))
for i, sourceRange := range service.Spec.LoadBalancerSourceRanges {
loadBalancerSourceRanges[i] = strings.TrimSpace(sourceRange)
}
// filter external ips, source ranges and ingress ips
// prior to dual stack services, this was considered an error, but with dual stack
// services, this is actually expected. Hence we downgraded from reporting by events
// to just log lines with high verbosity
ipFamilyMap := utilproxy.MapIPsByIPFamily(service.Spec.ExternalIPs)
info.externalIPs = ipFamilyMap[sct.ipFamily]
// Log the IPs not matching the ipFamily
if ips, ok := ipFamilyMap[utilproxy.OtherIPFamily(sct.ipFamily)]; ok && len(ips) > 0 {
klog.V(4).Infof("service change tracker(%v) ignored the following external IPs(%s) for service %v/%v as they don't match IPFamily", sct.ipFamily, strings.Join(ips, ","), service.Namespace, service.Name)
}
ipFamilyMap = utilproxy.MapCIDRsByIPFamily(loadBalancerSourceRanges)
info.loadBalancerSourceRanges = ipFamilyMap[sct.ipFamily]
// Log the CIDRs not matching the ipFamily
if cidrs, ok := ipFamilyMap[utilproxy.OtherIPFamily(sct.ipFamily)]; ok && len(cidrs) > 0 {
klog.V(4).Infof("service change tracker(%v) ignored the following load balancer source ranges(%s) for service %v/%v as they don't match IPFamily", sct.ipFamily, strings.Join(cidrs, ","), service.Namespace, service.Name)
}
// Obtain Load Balancer Ingress IPs
var ips []string
for _, ing := range service.Status.LoadBalancer.Ingress {
if ing.IP != "" {
ips = append(ips, ing.IP)
}
}
if len(ips) > 0 {
ipFamilyMap = utilproxy.MapIPsByIPFamily(ips)
if ipList, ok := ipFamilyMap[utilproxy.OtherIPFamily(sct.ipFamily)]; ok && len(ipList) > 0 {
klog.V(4).Infof("service change tracker(%v) ignored the following load balancer(%s) ingress ips for service %v/%v as they don't match IPFamily", sct.ipFamily, strings.Join(ipList, ","), service.Namespace, service.Name)
}
// Create the LoadBalancerStatus with the filtered IPs
for _, ip := range ipFamilyMap[sct.ipFamily] {
info.loadBalancerStatus.Ingress = append(info.loadBalancerStatus.Ingress, v1.LoadBalancerIngress{IP: ip})
}
}
if apiservice.NeedsHealthCheck(service) {
p := service.Spec.HealthCheckNodePort
if p == 0 {
klog.Errorf("Service %s/%s has no healthcheck nodeport", service.Namespace, service.Name)
} else {
info.healthCheckNodePort = int(p)
}
}
return info
}
type makeServicePortFunc func(*v1.ServicePort, *v1.Service, *BaseServiceInfo) ServicePort
// This handler is invoked by the apply function on every change. This function should not modify the
// ServiceMap's but just use the changes for any Proxier specific cleanup.
type processServiceMapChangeFunc func(previous, current ServiceMap)
// serviceChange contains all changes to services that happened since proxy rules were synced. For a single object,
// changes are accumulated, i.e. previous is state from before applying the changes,
// current is state after applying all of the changes.
type serviceChange struct {
previous ServiceMap
current ServiceMap
}
// ServiceChangeTracker carries state about uncommitted changes to an arbitrary number of
// Services, keyed by their namespace and name.
type ServiceChangeTracker struct {
// lock protects items.
lock sync.Mutex
// items maps a service to its serviceChange.
items map[types.NamespacedName]*serviceChange
// makeServiceInfo allows proxier to inject customized information when processing service.
makeServiceInfo makeServicePortFunc
processServiceMapChange processServiceMapChangeFunc
ipFamily v1.IPFamily
recorder record.EventRecorder
}
// NewServiceChangeTracker initializes a ServiceChangeTracker
func NewServiceChangeTracker(makeServiceInfo makeServicePortFunc, ipFamily v1.IPFamily, recorder record.EventRecorder, processServiceMapChange processServiceMapChangeFunc) *ServiceChangeTracker {
return &ServiceChangeTracker{
items: make(map[types.NamespacedName]*serviceChange),
makeServiceInfo: makeServiceInfo,
recorder: recorder,
ipFamily: ipFamily,
processServiceMapChange: processServiceMapChange,
}
}
// Update updates given service's change map based on the <previous, current> service pair. It returns true if items changed,
// otherwise return false. Update can be used to add/update/delete items of ServiceChangeMap. For example,
// Add item
// - pass <nil, service> as the <previous, current> pair.
// Update item
// - pass <oldService, service> as the <previous, current> pair.
// Delete item
// - pass <service, nil> as the <previous, current> pair.
func (sct *ServiceChangeTracker) Update(previous, current *v1.Service) bool {
svc := current
if svc == nil {
svc = previous
}
// previous == nil && current == nil is unexpected, we should return false directly.
if svc == nil {
return false
}
metrics.ServiceChangesTotal.Inc()
namespacedName := types.NamespacedName{Namespace: svc.Namespace, Name: svc.Name}
sct.lock.Lock()
defer sct.lock.Unlock()
change, exists := sct.items[namespacedName]
if !exists {
change = &serviceChange{}
change.previous = sct.serviceToServiceMap(previous)
sct.items[namespacedName] = change
}
change.current = sct.serviceToServiceMap(current)
// if change.previous equal to change.current, it means no change
if reflect.DeepEqual(change.previous, change.current) {
delete(sct.items, namespacedName)
} else {
klog.V(2).Infof("Service %s updated: %d ports", namespacedName, len(change.current))
}
metrics.ServiceChangesPending.Set(float64(len(sct.items)))
return len(sct.items) > 0
}
// UpdateServiceMapResult is the updated results after applying service changes.
type UpdateServiceMapResult struct {
// HCServiceNodePorts is a map of Service names to node port numbers which indicate the health of that Service on this Node.
// The value(uint16) of HCServices map is the service health check node port.
HCServiceNodePorts map[types.NamespacedName]uint16
// UDPStaleClusterIP holds stale (no longer assigned to a Service) Service IPs that had UDP ports.
// Callers can use this to abort timeout-waits or clear connection-tracking information.
UDPStaleClusterIP sets.String
}
// Update updates ServiceMap base on the given changes.
func (sm ServiceMap) Update(changes *ServiceChangeTracker) (result UpdateServiceMapResult) {
result.UDPStaleClusterIP = sets.NewString()
sm.apply(changes, result.UDPStaleClusterIP)
// TODO: If this will appear to be computationally expensive, consider
// computing this incrementally similarly to serviceMap.
result.HCServiceNodePorts = make(map[types.NamespacedName]uint16)
for svcPortName, info := range sm {
if info.HealthCheckNodePort() != 0 {
result.HCServiceNodePorts[svcPortName.NamespacedName] = uint16(info.HealthCheckNodePort())
}
}
return result
}
// ServiceMap maps a service to its ServicePort.
type ServiceMap map[ServicePortName]ServicePort
// serviceToServiceMap translates a single Service object to a ServiceMap.
//
// NOTE: service object should NOT be modified.
func (sct *ServiceChangeTracker) serviceToServiceMap(service *v1.Service) ServiceMap {
if service == nil {
return nil
}
if utilproxy.ShouldSkipService(service) {
return nil
}
clusterIP := utilproxy.GetClusterIPByFamily(sct.ipFamily, service)
if clusterIP == "" {
return nil
}
serviceMap := make(ServiceMap)
svcName := types.NamespacedName{Namespace: service.Namespace, Name: service.Name}
for i := range service.Spec.Ports {
servicePort := &service.Spec.Ports[i]
svcPortName := ServicePortName{NamespacedName: svcName, Port: servicePort.Name, Protocol: servicePort.Protocol}
baseSvcInfo := sct.newBaseServiceInfo(servicePort, service)
if sct.makeServiceInfo != nil {
serviceMap[svcPortName] = sct.makeServiceInfo(servicePort, service, baseSvcInfo)
} else {
serviceMap[svcPortName] = baseSvcInfo
}
}
return serviceMap
}
// apply the changes to ServiceMap and update the stale udp cluster IP set. The UDPStaleClusterIP argument is passed in to store the
// udp protocol service cluster ip when service is deleted from the ServiceMap.
// apply triggers processServiceMapChange on every change.
func (sm *ServiceMap) apply(changes *ServiceChangeTracker, UDPStaleClusterIP sets.String) {
changes.lock.Lock()
defer changes.lock.Unlock()
for _, change := range changes.items {
if changes.processServiceMapChange != nil {
changes.processServiceMapChange(change.previous, change.current)
}
sm.merge(change.current)
// filter out the Update event of current changes from previous changes before calling unmerge() so that can
// skip deleting the Update events.
change.previous.filter(change.current)
sm.unmerge(change.previous, UDPStaleClusterIP)
}
// clear changes after applying them to ServiceMap.
changes.items = make(map[types.NamespacedName]*serviceChange)
metrics.ServiceChangesPending.Set(0)
}
// merge adds other ServiceMap's elements to current ServiceMap.
// If collision, other ALWAYS win. Otherwise add the other to current.
// In other words, if some elements in current collisions with other, update the current by other.
// It returns a string type set which stores all the newly merged services' identifier, ServicePortName.String(), to help users
// tell if a service is deleted or updated.
// The returned value is one of the arguments of ServiceMap.unmerge().
// ServiceMap A Merge ServiceMap B will do following 2 things:
// * update ServiceMap A.
// * produce a string set which stores all other ServiceMap's ServicePortName.String().
// For example,
// - A{}
// - B{{"ns", "cluster-ip", "http"}: {"172.16.55.10", 1234, "TCP"}}
// - A updated to be {{"ns", "cluster-ip", "http"}: {"172.16.55.10", 1234, "TCP"}}
// - produce string set {"ns/cluster-ip:http"}
// - A{{"ns", "cluster-ip", "http"}: {"172.16.55.10", 345, "UDP"}}
// - B{{"ns", "cluster-ip", "http"}: {"172.16.55.10", 1234, "TCP"}}
// - A updated to be {{"ns", "cluster-ip", "http"}: {"172.16.55.10", 1234, "TCP"}}
// - produce string set {"ns/cluster-ip:http"}
func (sm *ServiceMap) merge(other ServiceMap) sets.String {
// existingPorts is going to store all identifiers of all services in `other` ServiceMap.
existingPorts := sets.NewString()
for svcPortName, info := range other {
// Take ServicePortName.String() as the newly merged service's identifier and put it into existingPorts.
existingPorts.Insert(svcPortName.String())
_, exists := (*sm)[svcPortName]
if !exists {
klog.V(1).Infof("Adding new service port %q at %s", svcPortName, info.String())
} else {
klog.V(1).Infof("Updating existing service port %q at %s", svcPortName, info.String())
}
(*sm)[svcPortName] = info
}
return existingPorts
}
// filter filters out elements from ServiceMap base on given ports string sets.
func (sm *ServiceMap) filter(other ServiceMap) {
for svcPortName := range *sm {
// skip the delete for Update event.
if _, ok := other[svcPortName]; ok {
delete(*sm, svcPortName)
}
}
}
// unmerge deletes all other ServiceMap's elements from current ServiceMap. We pass in the UDPStaleClusterIP strings sets
// for storing the stale udp service cluster IPs. We will clear stale udp connection base on UDPStaleClusterIP later
func (sm *ServiceMap) unmerge(other ServiceMap, UDPStaleClusterIP sets.String) {
for svcPortName := range other {
info, exists := (*sm)[svcPortName]
if exists {
klog.V(1).Infof("Removing service port %q", svcPortName)
if info.Protocol() == v1.ProtocolUDP {
UDPStaleClusterIP.Insert(info.ClusterIP().String())
}
delete(*sm, svcPortName)
} else {
klog.Errorf("Service port %q doesn't exists", svcPortName)
}
}
}
| ravilr/kubernetes | pkg/proxy/service.go | GO | apache-2.0 | 17,273 |
<?php
/* *
* 类名:AlipayNotify
* 功能:支付宝通知处理类
* 详细:处理支付宝各接口通知返回
* 版本:3.2
* 日期:2011-03-25
* 说明:
* 以下代码只是为了方便商户测试而提供的样例代码,商户可以根据自己网站的需要,按照技术文档编写,并非一定要使用该代码。
* 该代码仅供学习和研究支付宝接口使用,只是提供一个参考
*************************注意*************************
* 调试通知返回时,可查看或改写log日志的写入TXT里的数据,来检查通知返回是否正常
*/
namespace Addons\Payment\Controller;
require_once("alipay_core.function.php");
require_once("alipay_md5.function.php");
class AlipayNotify {
/**
* HTTPS形式消息验证地址
*/
var $https_verify_url = 'https://mapi.alipay.com/gateway.do?service=notify_verify&';
/**
* HTTP形式消息验证地址
*/
var $http_verify_url = 'http://notify.alipay.com/trade/notify_query.do?';
var $alipay_config;
function __construct($alipay_config){
$this->alipay_config = $alipay_config;
}
function AlipayNotify($alipay_config) {
$this->__construct($alipay_config);
}
/**
* 针对notify_url验证消息是否是支付宝发出的合法消息
* @return 验证结果
*/
function verifyNotify(){
if(empty($_POST)) {//判断POST来的数组是否为空
return false;
}
else {
//生成签名结果
$isSign = $this->getSignVeryfy($_POST, $_POST["sign"]);
//获取支付宝远程服务器ATN结果(验证是否是支付宝发来的消息)
$responseTxt = 'true';
if (! empty($_POST["notify_id"])) {$responseTxt = $this->getResponse($_POST["notify_id"]);}
//写日志记录
//if ($isSign) {
// $isSignStr = 'true';
//}
//else {
// $isSignStr = 'false';
//}
//$log_text = "responseTxt=".$responseTxt."\n notify_url_log:isSign=".$isSignStr.",";
//$log_text = $log_text.createLinkString($_POST);
//logResult($log_text);
//验证
//$responsetTxt的结果不是true,与服务器设置问题、合作身份者ID、notify_id一分钟失效有关
//isSign的结果不是true,与安全校验码、请求时的参数格式(如:带自定义参数等)、编码格式有关
if (preg_match("/true$/i",$responseTxt) && $isSign) {
return true;
} else {
return false;
}
}
}
/**
* 针对return_url验证消息是否是支付宝发出的合法消息
* @return 验证结果
*/
function verifyReturn(){
if(empty($_GET)) {//判断POST来的数组是否为空
return false;
}
else {
//生成签名结果
$isSign = $this->getSignVeryfy($_GET, $_GET["sign"]);
//获取支付宝远程服务器ATN结果(验证是否是支付宝发来的消息)
$responseTxt = 'true';
if (! empty($_GET["notify_id"])) {$responseTxt = $this->getResponse($_GET["notify_id"]);}
//写日志记录
if ($isSign) {
$isSignStr = 'true';
}
else {
$isSignStr = 'false';
}
$log_text = "responseTxt=".$responseTxt."\n return_url_log:isSign=".$isSignStr.",";
$log_text = $log_text.createLinkString($_GET);
logResult($log_text);
//验证
//$responsetTxt的结果不是true,与服务器设置问题、合作身份者ID、notify_id一分钟失效有关
//isSign的结果不是true,与安全校验码、请求时的参数格式(如:带自定义参数等)、编码格式有关
if (preg_match("/true$/i",$responseTxt) && $isSign) {
return true;
} else {
return false;
}
}
}
/**
* 获取返回时的签名验证结果
* @param $para_temp 通知返回来的参数数组
* @param $sign 返回的签名结果
* @return 签名验证结果
*/
function getSignVeryfy($para_temp, $sign) {
//除去待签名参数数组中的空值和签名参数
$para_filter = paraFilter($para_temp);
//对待签名参数数组排序
$para_sort = argSort($para_filter);
//把数组所有元素,按照“参数=参数值”的模式用“&”字符拼接成字符串
$prestr = createLinkstring($para_sort);
$isSgin = false;
switch (strtoupper(trim($this->alipay_config['sign_type']))) {
case "MD5" :
$isSgin = md5Verify($prestr, $sign, $this->alipay_config['key']);
break;
default :
$isSgin = false;
}
return $isSgin;
}
/**
* 获取远程服务器ATN结果,验证返回URL
* @param $notify_id 通知校验ID
* @return 服务器ATN结果
* 验证结果集:
* invalid命令参数不对 出现这个错误,请检测返回处理中partner和key是否为空
* true 返回正确信息
* false 请检查防火墙或者是服务器阻止端口问题以及验证时间是否超过一分钟
*/
function getResponse($notify_id) {
$transport = strtolower(trim($this->alipay_config['transport']));
$partner = trim($this->alipay_config['partner']);
$veryfy_url = '';
if($transport == 'https') {
$veryfy_url = $this->https_verify_url;
}
else {
$veryfy_url = $this->http_verify_url;
}
$veryfy_url = $veryfy_url."partner=" . $partner . "¬ify_id=" . $notify_id;
$responseTxt = getHttpResponseGET($veryfy_url, $this->alipay_config['cacert']);
return $responseTxt;
}
}
?>
| lovebull/wild_weiphp | Addons/Payment/Controller/Alipay/AlipayNotify.class.php | PHP | apache-2.0 | 5,320 |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.eas.opc.da.dcom;
import org.jinterop.dcom.common.JIException;
import org.jinterop.dcom.core.JIStruct;
import org.jinterop.dcom.core.JIVariant;
/**
*
* @author pk
*/
public class OPCITEMSTATE
{
private JIStruct struct;
public static JIStruct getEmptyStruct() throws JIException
{
JIStruct emptyStruct = new JIStruct();
emptyStruct.addMember(Integer.class); //hClient
emptyStruct.addMember(new FileTime().getStruct()); //ftTimeStamp
emptyStruct.addMember(Short.class); //wQuality
emptyStruct.addMember(Short.class); //wReserved
emptyStruct.addMember(JIVariant.class); //vDataValue
return emptyStruct;
}
public OPCITEMSTATE(JIStruct struct)
{
this.struct = struct;
}
public JIStruct getStruct()
{
return struct;
}
public Integer getClientHandle()
{
return (Integer) struct.getMember(0);
}
public FileTime getTimeStamp()
{
return new FileTime((JIStruct) struct.getMember(1));
}
public Short getQuality()
{
return (Short) struct.getMember(2);
}
public JIVariant getDataValue()
{
return (JIVariant) struct.getMember(4);
}
}
| altsoft/PlatypusJS | opc-da/src/main/java/com/eas/opc/da/dcom/OPCITEMSTATE.java | Java | apache-2.0 | 1,334 |
/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/fill_zeros_like_op.h"
namespace paddle {
namespace operators {
class FillZerosLikeOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext *ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("X"),
"Input(X) of FillZerosLikeOp should not be null.");
PADDLE_ENFORCE(ctx->HasOutput("Out"),
"Output(Out) of FillZerosLikeOp should not be null.");
ctx->SetOutputDim("Out", ctx->GetInputDim("X"));
ctx->ShareLoD("X", /*->*/ "Out");
}
};
class FillZerosLikeOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput("X", "The input of fill-zeros-like op.");
AddOutput("Out", "The variable will be filled up with zeros.");
AddComment(R"DOC(
FillZerosLike Operator.
Fill up a variable with zeros.
The output will have the same size as the input.
)DOC");
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OP_WITHOUT_GRADIENT(fill_zeros_like, ops::FillZerosLikeOp,
ops::FillZerosLikeOpMaker);
REGISTER_OP_CPU_KERNEL(
fill_zeros_like,
ops::FillZerosLikeKernel<paddle::platform::CPUDeviceContext, int>,
ops::FillZerosLikeKernel<paddle::platform::CPUDeviceContext, int64_t>,
ops::FillZerosLikeKernel<paddle::platform::CPUDeviceContext, float>,
ops::FillZerosLikeKernel<paddle::platform::CPUDeviceContext, double>,
ops::FillZerosLikeKernel<paddle::platform::CPUDeviceContext, bool>);
| Canpio/Paddle | paddle/fluid/operators/fill_zeros_like_op.cc | C++ | apache-2.0 | 2,212 |
#region Using
using System;
#endregion
namespace PPWCode.Util.OddsAndEnds.I.DateTimeProvider
{
public class GenuineDateTimeProvider : IDateTimeProvider
{
public GenuineDateTimeProvider()
{
}
public static IDateTimeProvider CreateInstance()
{
return new GenuineDateTimeProvider();
}
public DateTime Today
{
get { return DateTime.Today; }
}
public DateTime Now
{
get { return DateTime.Now; }
}
}
} | jandppw/ppwcode-recovered-from-google-code | dotnet/Util/OddsAndEnds/I/2.n/2.1.1/src/I/DateTimeProvider/GenuineDateTimeProvider.cs | C# | apache-2.0 | 577 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.admin.cluster.node.restart;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.restart.NodesRestartRequest;
import org.elasticsearch.action.admin.cluster.node.restart.NodesRestartResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.*;
import java.io.IOException;
import static org.elasticsearch.rest.action.support.RestXContentBuilder.restContentBuilder;
/**
*
*/
public class RestNodesRestartAction extends BaseRestHandler {
@Inject
public RestNodesRestartAction(Settings settings, Client client, RestController controller) {
super(settings, client);
controller.registerHandler(RestRequest.Method.POST, "/_cluster/nodes/_restart", this);
controller.registerHandler(RestRequest.Method.POST, "/_cluster/nodes/{nodeId}/_restart", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel) {
String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId"));
NodesRestartRequest nodesRestartRequest = new NodesRestartRequest(nodesIds);
nodesRestartRequest.listenerThreaded(false);
nodesRestartRequest.delay(request.paramAsTime("delay", nodesRestartRequest.delay()));
client.admin().cluster().nodesRestart(nodesRestartRequest, new ActionListener<NodesRestartResponse>() {
@Override
public void onResponse(NodesRestartResponse result) {
try {
XContentBuilder builder = restContentBuilder(request);
builder.startObject();
builder.field("cluster_name", result.getClusterName().value());
builder.startObject("nodes");
for (NodesRestartResponse.NodeRestartResponse nodeInfo : result) {
builder.startObject(nodeInfo.getNode().id());
builder.field("name", nodeInfo.getNode().name());
builder.endObject();
}
builder.endObject();
builder.endObject();
channel.sendResponse(new XContentRestResponse(request, RestStatus.OK, builder));
} catch (Throwable e) {
onFailure(e);
}
}
@Override
public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
}
}
});
}
}
| libosu/elasticsearch | src/main/java/org/elasticsearch/rest/action/admin/cluster/node/restart/RestNodesRestartAction.java | Java | apache-2.0 | 3,716 |
/*************************GO-LICENSE-START*********************************
* Copyright 2015 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.cruise.api.response;
import java.util.List;
public class BuildCause {
private boolean trigger_forced;
private String trigger_message;
private String approver;
private List<MaterialRevision> material_revisions;
public BuildCause(boolean triggerForced, String triggerMessage,
String approver, List<MaterialRevision> materialRevisions) {
super();
this.trigger_forced = triggerForced;
this.trigger_message = triggerMessage;
this.approver = approver;
this.material_revisions = materialRevisions;
}
public boolean isTriggerForced() {
return trigger_forced;
}
public void setTriggerForced(boolean triggerForced) {
this.trigger_forced = triggerForced;
}
public String getTriggerMessage() {
return trigger_message;
}
public void setTriggerMessage(String triggerMessage) {
this.trigger_message = triggerMessage;
}
public String getApprover() {
return approver;
}
public void setApprover(String approver) {
this.approver = approver;
}
public List<MaterialRevision> getMaterialRevisions() {
return material_revisions;
}
public void setMaterialRevisions(List<MaterialRevision> materialRevisions) {
this.material_revisions = materialRevisions;
}
}
| ketan/functional-tests | src/test/java/com/thoughtworks/cruise/api/response/BuildCause.java | Java | apache-2.0 | 1,969 |
var assert = require('assert');
var util = require('util');
var events = require('events');
var utils = require('../../lib/utils');
var helper = require('../test-helper');
var InetAddress = require('../../lib/types').InetAddress;
describe('InetAddress', function () {
describe('constructor', function () {
it('should validate the Buffer length', function () {
assert.throws(function () {
new InetAddress(new Buffer(10));
});
assert.throws(function () {
new InetAddress(null);
});
assert.throws(function () {
new InetAddress();
});
assert.doesNotThrow(function () {
new InetAddress(new Buffer(16));
});
assert.doesNotThrow(function () {
new InetAddress(new Buffer(4));
});
});
});
describe('#toString()', function () {
it('should convert IPv6 to string representation', function () {
var val = new InetAddress(new Buffer('aabb0000eeff00112233445566778899', 'hex'));
assert.strictEqual(val.version, 6);
assert.strictEqual(val.toString(), 'aabb::eeff:11:2233:4455:6677:8899');
val = new InetAddress(new Buffer('aabbccddeeff00112233445566778899', 'hex'));
assert.strictEqual(val.toString(), 'aabb:ccdd:eeff:11:2233:4455:6677:8899');
val = new InetAddress(new Buffer('aabb0000000000112233445566778899', 'hex'));
assert.strictEqual(val.toString(), 'aabb::11:2233:4455:6677:8899');
val = new InetAddress(new Buffer('aabb0001000100112233445500000000', 'hex'));
assert.strictEqual(val.toString(), 'aabb:1:1:11:2233:4455::');
val = new InetAddress(new Buffer('00000000000100112233445500aa00bb', 'hex'));
assert.strictEqual(val.toString(), '::1:11:2233:4455:aa:bb');
val = new InetAddress(new Buffer('000000000000000022330000000000bb', 'hex'));
assert.strictEqual(val.toString(), '::2233:0:0:bb');
val = new InetAddress(new Buffer('00000000000000000000000000000001', 'hex'));
assert.strictEqual(val.toString(), '::1');
});
it('should convert IPv4 to string representation', function () {
var val = new InetAddress(new Buffer([127, 0, 0, 1]));
assert.strictEqual(val.version, 4);
assert.strictEqual(val.toString(), '127.0.0.1');
val = new InetAddress(new Buffer([198, 168, 1, 1]));
assert.strictEqual(val.toString(), '198.168.1.1');
val = new InetAddress(new Buffer([10, 12, 254, 32]));
assert.strictEqual(val.toString(), '10.12.254.32');
});
});
describe('#equals()', function () {
it('should return true when the bytes are the same', function () {
var hex1 = 'aabb0000eeff00112233445566778899';
var hex2 = 'ffff0000eeff00112233445566778899';
var buf1 = new Buffer(hex1, 'hex');
var val1 = new InetAddress(buf1);
var val2 = new InetAddress(new Buffer(hex2, 'hex'));
assert.ok(val1.equals(new InetAddress(buf1)));
assert.ok(val1.equals(new InetAddress(new Buffer(hex1, 'hex'))));
assert.ok(!val1.equals(val2));
});
});
describe('fromString()', function () {
it('should parse IPv6 string representation', function () {
[
'aabb::eeff:11:2233:4455:6677:8899',
'aabb:1:eeff:11:2233:4455:6677:8899',
'aabb:1:eeff:11:2233:4455:6677:8899',
'::1:11:2233:4455:aa:bb',
'::2233:0:0:bb',
'::1234',
'10fa::1'
].forEach(function (item) {
var val = InetAddress.fromString(item, 'hex');
helper.assertInstanceOf(val, InetAddress);
assert.strictEqual(val.toString(), item);
});
});
it('should parse IPv4 string representation', function () {
var val = InetAddress.fromString('127.0.0.1');
helper.assertInstanceOf(val, InetAddress);
assert.strictEqual(val.toString(), '127.0.0.1');
val = InetAddress.fromString('198.168.1.1');
helper.assertInstanceOf(val, InetAddress);
assert.strictEqual(val.toString(), '198.168.1.1');
val = InetAddress.fromString('10.11.12.13');
helper.assertInstanceOf(val, InetAddress);
assert.strictEqual(val.toString(), '10.11.12.13');
});
it('should throw when can not parse to 4 or 16 bytes', function () {
assert.throws(function () {
InetAddress.fromString('127.0.0.1.10');
}, Error);
});
});
}); | thiagoveras/nodejs-driver | test/unit/inet-address-tests.js | JavaScript | apache-2.0 | 4,324 |
# Copyright 2015 CloudByte Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import uuid
from oslo_log import log as logging
from oslo_service import loopingcall
from oslo_utils import units
import six
from six.moves import http_client
from six.moves import urllib
from cinder import exception
from cinder.i18n import _, _LE, _LI
from cinder.volume.drivers.cloudbyte import options
from cinder.volume.drivers.san import san
LOG = logging.getLogger(__name__)
class CloudByteISCSIDriver(san.SanISCSIDriver):
"""CloudByte ISCSI Driver.
Version history:
1.0.0 - Initial driver
1.1.0 - Add chap support and minor bug fixes
1.1.1 - Add wait logic for delete volumes
"""
VERSION = '1.1.1'
volume_stats = {}
def __init__(self, *args, **kwargs):
super(CloudByteISCSIDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(
options.cloudbyte_add_qosgroup_opts)
self.configuration.append_config_values(
options.cloudbyte_create_volume_opts)
self.configuration.append_config_values(
options.cloudbyte_connection_opts)
self.cb_use_chap = self.configuration.use_chap_auth
self.get_volume_stats()
def _get_url(self, cmd, params, apikey):
"""Will prepare URL that connects to CloudByte."""
if params is None:
params = {}
params['command'] = cmd
params['response'] = 'json'
sanitized_params = {}
for key in params:
value = params[key]
if value is not None:
sanitized_params[key] = six.text_type(value)
sanitized_params = urllib.parse.urlencode(sanitized_params)
url = ('/client/api?%s' % sanitized_params)
LOG.debug("CloudByte URL to be executed: [%s].", url)
# Add the apikey
api = {}
api['apiKey'] = apikey
url = url + '&' + urllib.parse.urlencode(api)
return url
def _extract_http_error(self, error_data):
# Extract the error message from error_data
error_msg = ""
# error_data is a single key value dict
for key, value in error_data.items():
error_msg = value.get('errortext')
return error_msg
def _execute_and_get_response_details(self, host, url):
"""Will prepare response after executing an http request."""
res_details = {}
try:
# Prepare the connection
connection = http_client.HTTPSConnection(host)
# Make the connection
connection.request('GET', url)
# Extract the response as the connection was successful
response = connection.getresponse()
# Read the response
data = response.read()
# Transform the json string into a py object
data = json.loads(data)
# Extract http error msg if any
error_details = None
if response.status != 200:
error_details = self._extract_http_error(data)
# Prepare the return object
res_details['data'] = data
res_details['error'] = error_details
res_details['http_status'] = response.status
finally:
connection.close()
LOG.debug("CloudByte connection was closed successfully.")
return res_details
def _api_request_for_cloudbyte(self, cmd, params, version=None):
"""Make http calls to CloudByte."""
LOG.debug("Executing CloudByte API for command [%s].", cmd)
if version is None:
version = CloudByteISCSIDriver.VERSION
# Below is retrieved from /etc/cinder/cinder.conf
apikey = self.configuration.cb_apikey
if apikey is None:
msg = (_("API key is missing for CloudByte driver."))
raise exception.VolumeBackendAPIException(data=msg)
host = self.configuration.san_ip
# Construct the CloudByte URL with query params
url = self._get_url(cmd, params, apikey)
data = {}
error_details = None
http_status = None
try:
# Execute CloudByte API & frame the response
res_obj = self._execute_and_get_response_details(host, url)
data = res_obj['data']
error_details = res_obj['error']
http_status = res_obj['http_status']
except http_client.HTTPException as ex:
msg = (_("Error executing CloudByte API [%(cmd)s], "
"Error: %(err)s.") %
{'cmd': cmd, 'err': ex})
raise exception.VolumeBackendAPIException(data=msg)
# Check if it was an error response from CloudByte
if http_status != 200:
msg = (_("Failed to execute CloudByte API [%(cmd)s]."
" Http status: %(status)s,"
" Error: %(error)s.") %
{'cmd': cmd, 'status': http_status,
'error': error_details})
raise exception.VolumeBackendAPIException(data=msg)
LOG.info(_LI("CloudByte API executed successfully for command [%s]."),
cmd)
return data
def _request_tsm_details(self, account_id):
params = {"accountid": account_id}
# List all CloudByte tsm
data = self._api_request_for_cloudbyte("listTsm", params)
return data
def _override_params(self, default_dict, filtered_user_dict):
"""Override the default config values with user provided values."""
if filtered_user_dict is None:
# Nothing to override
return default_dict
for key, value in default_dict.items():
# Fill the user dict with default options based on condition
if filtered_user_dict.get(key) is None and value is not None:
filtered_user_dict[key] = value
return filtered_user_dict
def _add_qos_group_request(self, volume, tsmid, volume_name):
# Get qos related params from configuration
params = self.configuration.cb_add_qosgroup
if params is None:
params = {}
params['name'] = "QoS_" + volume_name
params['tsmid'] = tsmid
data = self._api_request_for_cloudbyte("addQosGroup", params)
return data
def _create_volume_request(self, volume, datasetid, qosgroupid,
tsmid, volume_name):
size = volume.get('size')
quotasize = six.text_type(size) + "G"
# Prepare the user input params
params = {
"datasetid": datasetid,
"name": volume_name,
"qosgroupid": qosgroupid,
"tsmid": tsmid,
"quotasize": quotasize
}
# Get the additional params from configuration
params = self._override_params(self.configuration.cb_create_volume,
params)
data = self._api_request_for_cloudbyte("createVolume", params)
return data
def _queryAsyncJobResult_request(self, jobid):
async_cmd = "queryAsyncJobResult"
params = {
"jobId": jobid,
}
data = self._api_request_for_cloudbyte(async_cmd, params)
return data
def _get_tsm_details(self, data, tsm_name, account_name):
# Filter required tsm's details
tsms = data['listTsmResponse'].get('listTsm')
if tsms is None:
msg = (_("TSM [%(tsm)s] was not found in CloudByte storage "
"for account [%(account)s].") %
{'tsm': tsm_name, 'account': account_name})
raise exception.VolumeBackendAPIException(data=msg)
tsmdetails = {}
for tsm in tsms:
if tsm['name'] == tsm_name:
tsmdetails['datasetid'] = tsm['datasetid']
tsmdetails['tsmid'] = tsm['id']
break
return tsmdetails
def _retry_volume_operation(self, operation, retries,
max_retries, jobid,
cb_volume):
"""CloudByte async calls via the FixedIntervalLoopingCall."""
# Query the CloudByte storage with this jobid
volume_response = self._queryAsyncJobResult_request(jobid)
count = retries['count']
result_res = None
if volume_response is not None:
result_res = volume_response.get('queryasyncjobresultresponse')
if result_res is None:
msg = (_(
"Null response received while querying "
"for [%(operation)s] based job [%(job)s] "
"at CloudByte storage.") %
{'operation': operation, 'job': jobid})
raise exception.VolumeBackendAPIException(data=msg)
status = result_res.get('jobstatus')
if status == 1:
LOG.info(_LI("CloudByte operation [%(operation)s] succeeded for "
"volume [%(cb_volume)s]."),
{'operation': operation, 'cb_volume': cb_volume})
raise loopingcall.LoopingCallDone()
elif count == max_retries:
# All attempts exhausted
LOG.error(_LE("CloudByte operation [%(operation)s] failed"
" for volume [%(vol)s]. Exhausted all"
" [%(max)s] attempts."),
{'operation': operation,
'vol': cb_volume,
'max': max_retries})
raise loopingcall.LoopingCallDone(retvalue=False)
else:
count += 1
retries['count'] = count
LOG.debug("CloudByte operation [%(operation)s] for"
" volume [%(vol)s]: retry [%(retry)s] of [%(max)s].",
{'operation': operation,
'vol': cb_volume,
'retry': count,
'max': max_retries})
def _wait_for_volume_creation(self, volume_response, cb_volume_name):
"""Given the job wait for it to complete."""
vol_res = volume_response.get('createvolumeresponse')
if vol_res is None:
msg = _("Null response received while creating volume [%s] "
"at CloudByte storage.") % cb_volume_name
raise exception.VolumeBackendAPIException(data=msg)
jobid = vol_res.get('jobid')
if jobid is None:
msg = _("Job id not found in CloudByte's "
"create volume [%s] response.") % cb_volume_name
raise exception.VolumeBackendAPIException(data=msg)
retry_interval = (
self.configuration.cb_confirm_volume_create_retry_interval)
max_retries = (
self.configuration.cb_confirm_volume_create_retries)
retries = {'count': 0}
timer = loopingcall.FixedIntervalLoopingCall(
self._retry_volume_operation,
'Create Volume',
retries,
max_retries,
jobid,
cb_volume_name)
timer.start(interval=retry_interval).wait()
def _wait_for_volume_deletion(self, volume_response, cb_volume_id):
"""Given the job wait for it to complete."""
vol_res = volume_response.get('deleteFileSystemResponse')
if vol_res is None:
msg = _("Null response received while deleting volume [%s] "
"at CloudByte storage.") % cb_volume_id
raise exception.VolumeBackendAPIException(data=msg)
jobid = vol_res.get('jobid')
if jobid is None:
msg = _("Job id not found in CloudByte's "
"delete volume [%s] response.") % cb_volume_id
raise exception.VolumeBackendAPIException(data=msg)
retry_interval = (
self.configuration.cb_confirm_volume_delete_retry_interval)
max_retries = (
self.configuration.cb_confirm_volume_delete_retries)
retries = {'count': 0}
timer = loopingcall.FixedIntervalLoopingCall(
self._retry_volume_operation,
'Delete Volume',
retries,
max_retries,
jobid,
cb_volume_id)
timer.start(interval=retry_interval).wait()
def _get_volume_id_from_response(self, cb_volumes, volume_name):
"""Search the volume in CloudByte storage."""
vol_res = cb_volumes.get('listFilesystemResponse')
if vol_res is None:
msg = _("Null response received from CloudByte's "
"list filesystem.")
raise exception.VolumeBackendAPIException(data=msg)
volumes = vol_res.get('filesystem')
if volumes is None:
msg = _('No volumes found in CloudByte storage.')
raise exception.VolumeBackendAPIException(data=msg)
volume_id = None
for vol in volumes:
if vol['name'] == volume_name:
volume_id = vol['id']
break
if volume_id is None:
msg = _("Volume [%s] not found in CloudByte "
"storage.") % volume_name
raise exception.VolumeBackendAPIException(data=msg)
return volume_id
def _get_qosgroupid_id_from_response(self, cb_volumes, volume_id):
volumes = cb_volumes['listFilesystemResponse']['filesystem']
qosgroup_id = None
for vol in volumes:
if vol['id'] == volume_id:
qosgroup_id = vol['groupid']
break
return qosgroup_id
def _build_provider_details_from_volume(self, volume, chap):
model_update = {}
model_update['provider_location'] = (
'%s %s %s' % (volume['ipaddress'] + ':3260', volume['iqnname'], 0)
)
# Will provide CHAP Authentication on forthcoming patches/release
model_update['provider_auth'] = None
if chap:
model_update['provider_auth'] = ('CHAP %(username)s %(password)s'
% chap)
model_update['provider_id'] = volume['id']
LOG.debug("CloudByte volume iqn: [%(iqn)s] provider id: [%(proid)s].",
{'iqn': volume['iqnname'], 'proid': volume['id']})
return model_update
def _build_provider_details_from_response(self,
cb_volumes,
volume_name,
chap):
"""Get provider information."""
model_update = {}
volumes = cb_volumes['listFilesystemResponse']['filesystem']
for vol in volumes:
if vol['name'] == volume_name:
model_update = self._build_provider_details_from_volume(vol,
chap)
break
return model_update
def _get_initiator_group_id_from_response(self, data):
"""Find iSCSI initiator group id."""
ig_list_res = data.get('listInitiatorsResponse')
if ig_list_res is None:
msg = _("Null response received from CloudByte's "
"list iscsi initiators.")
raise exception.VolumeBackendAPIException(data=msg)
ig_list = ig_list_res.get('initiator')
if ig_list is None:
msg = _('No iscsi initiators were found in CloudByte.')
raise exception.VolumeBackendAPIException(data=msg)
ig_id = None
for ig in ig_list:
if ig.get('initiatorgroup') == 'ALL':
ig_id = ig['id']
break
return ig_id
def _get_iscsi_service_id_from_response(self, volume_id, data):
iscsi_service_res = data.get('listVolumeiSCSIServiceResponse')
if iscsi_service_res is None:
msg = _("Null response received from CloudByte's "
"list volume iscsi service.")
raise exception.VolumeBackendAPIException(data=msg)
iscsi_service_list = iscsi_service_res.get('iSCSIService')
if iscsi_service_list is None:
msg = _('No iscsi services found in CloudByte storage.')
raise exception.VolumeBackendAPIException(data=msg)
iscsi_id = None
for iscsi_service in iscsi_service_list:
if iscsi_service['volume_id'] == volume_id:
iscsi_id = iscsi_service['id']
break
if iscsi_id is None:
msg = _("No iscsi service found for CloudByte "
"volume [%s].") % volume_id
raise exception.VolumeBackendAPIException(data=msg)
else:
return iscsi_id
def _request_update_iscsi_service(self, iscsi_id, ig_id, ag_id):
params = {
"id": iscsi_id,
"igid": ig_id
}
if ag_id:
params['authgroupid'] = ag_id
params['authmethod'] = "CHAP"
self._api_request_for_cloudbyte(
'updateVolumeiSCSIService', params)
def _get_cb_snapshot_path(self, snapshot_name, volume_id):
"""Find CloudByte snapshot path."""
params = {"id": volume_id}
# List all snapshot from CloudByte
cb_snapshots_list = self._api_request_for_cloudbyte(
'listStorageSnapshots', params)
# Filter required snapshot from list
cb_snap_res = cb_snapshots_list.get('listDatasetSnapshotsResponse')
cb_snapshot = {}
if cb_snap_res is not None:
cb_snapshot = cb_snap_res.get('snapshot')
path = None
# Filter snapshot path
for snap in cb_snapshot:
if snap['name'] == snapshot_name:
path = snap['path']
break
return path
def _get_account_id_from_name(self, account_name):
params = {}
data = self._api_request_for_cloudbyte("listAccount", params)
accounts = data["listAccountResponse"]["account"]
account_id = None
for account in accounts:
if account.get("name") == account_name:
account_id = account.get("id")
break
if account_id is None:
msg = _("Failed to get CloudByte account details "
"for account [%s].") % account_name
raise exception.VolumeBackendAPIException(data=msg)
return account_id
def _search_volume_id(self, cb_volumes, cb_volume_id):
"""Search the volume in CloudByte."""
volumes_res = cb_volumes.get('listFilesystemResponse')
if volumes_res is None:
msg = _("No response was received from CloudByte's "
"list filesystem api call.")
raise exception.VolumeBackendAPIException(data=msg)
volumes = volumes_res.get('filesystem')
if volumes is None:
msg = _("No volume was found at CloudByte storage.")
raise exception.VolumeBackendAPIException(data=msg)
volume_id = None
for vol in volumes:
if vol['id'] == cb_volume_id:
volume_id = vol['id']
break
return volume_id
def _get_storage_info(self, tsmname):
"""Get CloudByte TSM that is associated with OpenStack backend."""
# List all TSMs from CloudByte storage
tsm_list = self._api_request_for_cloudbyte('listTsm', params={})
tsm_details_res = tsm_list.get('listTsmResponse')
if tsm_details_res is None:
msg = _("No response was received from CloudByte storage "
"list tsm API call.")
raise exception.VolumeBackendAPIException(data=msg)
tsm_details = tsm_details_res.get('listTsm')
data = {}
flag = 0
# Filter required TSM and get storage info
for tsms in tsm_details:
if tsms['name'] == tsmname:
flag = 1
data['total_capacity_gb'] = (
float(tsms['numericquota']) / units.Ki)
data['free_capacity_gb'] = (
float(tsms['availablequota']) / units.Ki)
break
# TSM not found in CloudByte storage
if flag == 0:
LOG.error(_LE("TSM [%s] not found in CloudByte storage."), tsmname)
data['total_capacity_gb'] = 0.0
data['free_capacity_gb'] = 0.0
return data
def _get_auth_group_id_from_response(self, data):
"""Find iSCSI auth group id."""
chap_group = self.configuration.cb_auth_group
ag_list_res = data.get('listiSCSIAuthGroupResponse')
if ag_list_res is None:
msg = _("Null response received from CloudByte's "
"list iscsi auth groups.")
raise exception.VolumeBackendAPIException(data=msg)
ag_list = ag_list_res.get('authgroup')
if ag_list is None:
msg = _('No iscsi auth groups were found in CloudByte.')
raise exception.VolumeBackendAPIException(data=msg)
ag_id = None
for ag in ag_list:
if ag.get('name') == chap_group:
ag_id = ag['id']
break
else:
msg = _("Auth group [%s] details not found in "
"CloudByte storage.") % chap_group
raise exception.VolumeBackendAPIException(data=msg)
return ag_id
def _get_auth_group_info(self, account_id, ag_id):
"""Fetch the auth group details."""
params = {"accountid": account_id, "authgroupid": ag_id}
auth_users = self._api_request_for_cloudbyte(
'listiSCSIAuthUser', params)
auth_user_details_res = auth_users.get('listiSCSIAuthUsersResponse')
if auth_user_details_res is None:
msg = _("No response was received from CloudByte storage "
"list iSCSI auth user API call.")
raise exception.VolumeBackendAPIException(data=msg)
auth_user_details = auth_user_details_res.get('authuser')
if auth_user_details is None:
msg = _("Auth user details not found in CloudByte storage.")
raise exception.VolumeBackendAPIException(data=msg)
chapuser = auth_user_details[0].get('chapusername')
chappassword = auth_user_details[0].get('chappassword')
if chapuser is None or chappassword is None:
msg = _("Invalid chap user details found in CloudByte storage.")
raise exception.VolumeBackendAPIException(data=msg)
data = {'username': chapuser, 'password': chappassword, 'ag_id': ag_id}
return data
def _get_chap_info(self, account_id):
"""Fetch the chap details."""
params = {"accountid": account_id}
iscsi_auth_data = self._api_request_for_cloudbyte(
'listiSCSIAuthGroup', params)
ag_id = self._get_auth_group_id_from_response(
iscsi_auth_data)
return self._get_auth_group_info(account_id, ag_id)
def _export(self):
model_update = {'provider_auth': None}
if self.cb_use_chap is True:
account_name = self.configuration.cb_account_name
account_id = self._get_account_id_from_name(account_name)
chap = self._get_chap_info(account_id)
model_update['provider_auth'] = ('CHAP %(username)s %(password)s'
% chap)
return model_update
def create_volume(self, volume):
tsm_name = self.configuration.cb_tsm_name
account_name = self.configuration.cb_account_name
# Get account id of this account
account_id = self._get_account_id_from_name(account_name)
# Set backend storage volume name using OpenStack volume id
cb_volume_name = volume['id'].replace("-", "")
LOG.debug("Will create a volume [%(cb_vol)s] in TSM [%(tsm)s] "
"at CloudByte storage w.r.t "
"OpenStack volume [%(stack_vol)s].",
{'cb_vol': cb_volume_name,
'stack_vol': volume.get('id'),
'tsm': tsm_name})
tsm_data = self._request_tsm_details(account_id)
tsm_details = self._get_tsm_details(tsm_data, tsm_name, account_name)
# Send request to create a qos group before creating a volume
LOG.debug("Creating qos group for CloudByte volume [%s].",
cb_volume_name)
qos_data = self._add_qos_group_request(
volume, tsm_details.get('tsmid'), cb_volume_name)
# Extract the qos group id from response
qosgroupid = qos_data['addqosgroupresponse']['qosgroup']['id']
LOG.debug("Successfully created qos group for CloudByte volume [%s].",
cb_volume_name)
# Send a create volume request to CloudByte API
vol_data = self._create_volume_request(
volume, tsm_details.get('datasetid'), qosgroupid,
tsm_details.get('tsmid'), cb_volume_name)
# Since create volume is an async call;
# need to confirm the creation before proceeding further
self._wait_for_volume_creation(vol_data, cb_volume_name)
# Fetch iscsi id
cb_volumes = self._api_request_for_cloudbyte(
'listFileSystem', params={})
volume_id = self._get_volume_id_from_response(cb_volumes,
cb_volume_name)
params = {"storageid": volume_id}
iscsi_service_data = self._api_request_for_cloudbyte(
'listVolumeiSCSIService', params)
iscsi_id = self._get_iscsi_service_id_from_response(
volume_id, iscsi_service_data)
# Fetch the initiator group ID
params = {"accountid": account_id}
iscsi_initiator_data = self._api_request_for_cloudbyte(
'listiSCSIInitiator', params)
ig_id = self._get_initiator_group_id_from_response(
iscsi_initiator_data)
LOG.debug("Updating iscsi service for CloudByte volume [%s].",
cb_volume_name)
ag_id = None
chap_info = {}
if self.cb_use_chap is True:
chap_info = self._get_chap_info(account_id)
ag_id = chap_info['ag_id']
# Update the iscsi service with above fetched iscsi_id & ig_id
self._request_update_iscsi_service(iscsi_id, ig_id, ag_id)
LOG.debug("CloudByte volume [%(vol)s] updated with "
"iscsi id [%(iscsi)s] and initiator group [%(ig)s] and "
"authentication group [%(ag)s].",
{'vol': cb_volume_name, 'iscsi': iscsi_id,
'ig': ig_id, 'ag': ag_id})
# Provide the model after successful completion of above steps
provider = self._build_provider_details_from_response(
cb_volumes, cb_volume_name, chap_info)
LOG.info(_LI("Successfully created a CloudByte volume [%(cb_vol)s] "
"w.r.t OpenStack volume [%(stack_vol)s]."),
{'cb_vol': cb_volume_name, 'stack_vol': volume.get('id')})
return provider
def delete_volume(self, volume):
params = {}
# OpenStack source volume id
source_volume_id = volume['id']
# CloudByte volume id equals OpenStack volume's provider_id
cb_volume_id = volume.get('provider_id')
LOG.debug("Will delete CloudByte volume [%(cb_vol)s] "
"w.r.t OpenStack volume [%(stack_vol)s].",
{'cb_vol': cb_volume_id, 'stack_vol': source_volume_id})
# Delete volume at CloudByte
if cb_volume_id is not None:
cb_volumes = self._api_request_for_cloudbyte(
'listFileSystem', params)
# Search cb_volume_id in CloudByte volumes
# incase it has already been deleted from CloudByte
cb_volume_id = self._search_volume_id(cb_volumes, cb_volume_id)
# Delete volume at CloudByte
if cb_volume_id is not None:
params = {"id": cb_volume_id}
del_res = self._api_request_for_cloudbyte('deleteFileSystem',
params)
self._wait_for_volume_deletion(del_res, cb_volume_id)
LOG.info(
_LI("Successfully deleted volume [%(cb_vol)s] "
"at CloudByte corresponding to "
"OpenStack volume [%(stack_vol)s]."),
{'cb_vol': cb_volume_id,
'stack_vol': source_volume_id})
else:
LOG.error(_LE("CloudByte does not have a volume corresponding "
"to OpenStack volume [%s]."), source_volume_id)
else:
LOG.error(_LE("CloudByte volume information not available for"
" OpenStack volume [%s]."), source_volume_id)
def create_snapshot(self, snapshot):
"""Creates a snapshot at CloudByte."""
# OpenStack volume
source_volume_id = snapshot['volume_id']
# CloudByte volume id equals OpenStack volume's provider_id
cb_volume_id = snapshot.get('volume').get('provider_id')
if cb_volume_id is not None:
# Set backend storage snapshot name using OpenStack snapshot id
snapshot_name = "snap_" + snapshot['id'].replace("-", "")
params = {
"name": snapshot_name,
"id": cb_volume_id
}
LOG.debug(
"Will create CloudByte snapshot [%(cb_snap)s] "
"w.r.t CloudByte volume [%(cb_vol)s] "
"and OpenStack volume [%(stack_vol)s].",
{'cb_snap': snapshot_name,
'cb_vol': cb_volume_id,
'stack_vol': source_volume_id})
self._api_request_for_cloudbyte('createStorageSnapshot', params)
# Get the snapshot path from CloudByte
path = self._get_cb_snapshot_path(snapshot_name, cb_volume_id)
LOG.info(
_LI("Created CloudByte snapshot [%(cb_snap)s] "
"w.r.t CloudByte volume [%(cb_vol)s] "
"and OpenStack volume [%(stack_vol)s]."),
{'cb_snap': path,
'cb_vol': cb_volume_id,
'stack_vol': source_volume_id})
model_update = {}
# Store snapshot path as snapshot provider_id
model_update['provider_id'] = path
else:
msg = _("Failed to create snapshot. CloudByte volume information "
"not found for OpenStack volume [%s].") % source_volume_id
raise exception.VolumeBackendAPIException(data=msg)
return model_update
def create_cloned_volume(self, cloned_volume, src_volume):
"""Create a clone of an existing volume.
First it will create a snapshot of the source/parent volume,
then it creates a clone of this newly created snapshot.
"""
# Extract necessary information from input params
parent_volume_id = src_volume.get('id')
# Generating id for snapshot
# as this is not user entered in this particular usecase
snapshot_id = six.text_type(uuid.uuid1())
# Prepare the params for create_snapshot
# as well as create_volume_from_snapshot method
snapshot_params = {
'id': snapshot_id,
'volume_id': parent_volume_id,
'volume': src_volume,
}
# Create a snapshot
snapshot = self.create_snapshot(snapshot_params)
snapshot_params['provider_id'] = snapshot.get('provider_id')
# Create a clone of above snapshot
return self.create_volume_from_snapshot(cloned_volume, snapshot_params)
def create_volume_from_snapshot(self, cloned_volume, snapshot):
"""Create a clone from an existing snapshot."""
# Getting necessary data from input params
parent_volume_id = snapshot['volume_id']
cloned_volume_name = cloned_volume['id'].replace("-", "")
# CloudByte volume id equals OpenStack volume's provider_id
cb_volume_id = snapshot.get('volume').get('provider_id')
# CloudByte snapshot path equals OpenStack snapshot's provider_id
cb_snapshot_path = snapshot['provider_id']
params = {
"id": cb_volume_id,
"clonename": cloned_volume_name,
"path": cb_snapshot_path
}
LOG.debug(
"Will create CloudByte clone [%(cb_clone)s] "
"at CloudByte snapshot path [%(cb_snap)s] "
"w.r.t parent OpenStack volume [%(stack_vol)s].",
{'cb_clone': cloned_volume_name,
'cb_snap': cb_snapshot_path,
'stack_vol': parent_volume_id})
# Create clone of the snapshot
clone_dataset_snapshot_res = (
self._api_request_for_cloudbyte('cloneDatasetSnapshot', params))
cb_snap = clone_dataset_snapshot_res.get('cloneDatasetSnapshot')
cb_vol = {}
if cb_snap is not None:
cb_vol = cb_snap.get('filesystem')
else:
msg = ("Error: Clone creation failed for "
"OpenStack volume [%(vol)s] with CloudByte "
"snapshot path [%(path)s]" %
{'vol': parent_volume_id, 'path': cb_snapshot_path})
raise exception.VolumeBackendAPIException(data=msg)
LOG.info(
_LI("Created a clone [%(cb_clone)s] "
"at CloudByte snapshot path [%(cb_snap)s] "
"w.r.t parent OpenStack volume [%(stack_vol)s]."),
{'cb_clone': cloned_volume_name,
'cb_snap': cb_snapshot_path,
'stack_vol': parent_volume_id})
chap_info = {}
if self.cb_use_chap is True:
account_name = self.configuration.cb_account_name
# Get account id of this account
account_id = self._get_account_id_from_name(account_name)
chap_info = self._get_chap_info(account_id)
model_update = self._build_provider_details_from_volume(cb_vol,
chap_info)
return model_update
def delete_snapshot(self, snapshot):
"""Delete a snapshot at CloudByte."""
# Find volume id
source_volume_id = snapshot['volume_id']
# CloudByte volume id equals OpenStack volume's provider_id
cb_volume_id = snapshot.get('volume').get('provider_id')
# CloudByte snapshot path equals OpenStack snapshot's provider_id
cb_snapshot_path = snapshot['provider_id']
# If cb_snapshot_path is 'None'
# then no need to execute CloudByte API
if cb_snapshot_path is not None:
params = {
"id": cb_volume_id,
"path": cb_snapshot_path
}
LOG.debug("Will delete CloudByte snapshot [%(snap)s] w.r.t "
"parent CloudByte volume [%(cb_vol)s] "
"and parent OpenStack volume [%(stack_vol)s].",
{'snap': cb_snapshot_path,
'cb_vol': cb_volume_id,
'stack_vol': source_volume_id})
# Execute CloudByte API
self._api_request_for_cloudbyte('deleteSnapshot', params)
LOG.info(
_LI("Deleted CloudByte snapshot [%(snap)s] w.r.t "
"parent CloudByte volume [%(cb_vol)s] "
"and parent OpenStack volume [%(stack_vol)s]."),
{'snap': cb_snapshot_path,
'cb_vol': cb_volume_id,
'stack_vol': source_volume_id})
else:
LOG.error(_LE("CloudByte snapshot information is not available"
" for OpenStack volume [%s]."), source_volume_id)
def extend_volume(self, volume, new_size):
# CloudByte volume id equals OpenStack volume's provider_id
cb_volume_id = volume.get('provider_id')
params = {
"id": cb_volume_id,
"quotasize": six.text_type(new_size) + 'G'
}
# Request the CloudByte api to update the volume
self._api_request_for_cloudbyte('updateFileSystem', params)
def create_export(self, context, volume, connector):
"""Setup the iscsi export info."""
return self._export()
def ensure_export(self, context, volume):
"""Verify the iscsi export info."""
return self._export()
def get_volume_stats(self, refresh=False):
"""Get volume statistics.
If 'refresh' is True, update/refresh the statistics first.
"""
if refresh:
# Get the TSM name from configuration
tsm_name = self.configuration.cb_tsm_name
# Get the storage details of this TSM
data = self._get_storage_info(tsm_name)
data["volume_backend_name"] = (
self.configuration.safe_get('volume_backend_name') or
'CloudByte')
data["vendor_name"] = 'CloudByte'
data['reserved_percentage'] = 0
data["driver_version"] = CloudByteISCSIDriver.VERSION
data["storage_protocol"] = 'iSCSI'
LOG.debug("CloudByte driver stats: [%s].", data)
# Set this to the instance variable
self.volume_stats = data
return self.volume_stats
| nikesh-mahalka/cinder | cinder/volume/drivers/cloudbyte/cloudbyte.py | Python | apache-2.0 | 38,259 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using Microsoft.CodeAnalysis.Editor.CSharp.SignatureHelp;
using Microsoft.CodeAnalysis.Editor.UnitTests.SignatureHelp;
using Microsoft.CodeAnalysis.Editor.UnitTests.Workspaces;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.SignatureHelp
{
public class ElementAccessExpressionSignatureHelpProviderTests : AbstractCSharpSignatureHelpProviderTests
{
public ElementAccessExpressionSignatureHelpProviderTests(CSharpTestWorkspaceFixture workspaceFixture) : base(workspaceFixture)
{
}
internal override ISignatureHelpProvider CreateSignatureHelpProvider()
{
return new ElementAccessExpressionSignatureHelpProvider();
}
#region "Regular tests"
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationWithParametersOn1()
{
var markup = @"
class C
{
public string this[int a]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[$$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a]", string.Empty, string.Empty, currentParameterIndex: 0));
Test(markup, expectedOrderedItems);
}
[WorkItem(636117)]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationOnExpression()
{
var markup = @"
class C
{
public string this[int a]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
C[] c = new C[1];
c[0][$$
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a]", string.Empty, string.Empty, currentParameterIndex: 0));
Test(markup, expectedOrderedItems);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationWithParametersXmlCommentsOn1()
{
var markup = @"
class C
{
/// <summary>
/// Summary for this.
/// </summary>
/// <param name=""a"">Param a</param>
public string this[int a]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[$$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a]", "Summary for this.", "Param a", currentParameterIndex: 0));
Test(markup, expectedOrderedItems);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationWithParametersOn2()
{
var markup = @"
class C
{
public string this[int a, bool b]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[22, $$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a, bool b]", string.Empty, string.Empty, currentParameterIndex: 1));
Test(markup, expectedOrderedItems);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationWithParametersXmlComentsOn2()
{
var markup = @"
class C
{
/// <summary>
/// Summary for this.
/// </summary>
/// <param name=""a"">Param a</param>
/// <param name=""b"">Param b</param>
public string this[int a, bool b]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[22, $$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a, bool b]", "Summary for this.", "Param b", currentParameterIndex: 1));
Test(markup, expectedOrderedItems);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationWithoutClosingBracketWithParameters()
{
var markup =
@"class C
{
public string this[int a]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[$$
|]}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a]", string.Empty, string.Empty, currentParameterIndex: 0));
Test(markup, expectedOrderedItems);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationWithoutClosingBracketWithParametersOn2()
{
var markup = @"
class C
{
public string this[int a, bool b]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[22, $$
|]}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a, bool b]", string.Empty, string.Empty, currentParameterIndex: 1));
Test(markup, expectedOrderedItems);
}
#endregion
#region "Current Parameter Name"
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestCurrentParameterName()
{
var markup = @"
class C
{
public string this[int a, bool b]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[b: false, a: $$42|]];
}
}";
VerifyCurrentParameterName(markup, "a");
}
#endregion
#region "Trigger tests"
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationOnTriggerBracket()
{
var markup = @"
class C
{
public string this[int a]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[$$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a]", string.Empty, string.Empty, currentParameterIndex: 0));
Test(markup, expectedOrderedItems, usePreviousCharAsTrigger: true);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocationOnTriggerComma()
{
var markup = @"
class C
{
public string this[int a, bool b]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[42,$$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a, bool b]", string.Empty, string.Empty, currentParameterIndex: 1));
Test(markup, expectedOrderedItems, usePreviousCharAsTrigger: true);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestNoInvocationOnSpace()
{
var markup = @"
class C
{
public string this[int a, bool b]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
var x = [|c[42, $$|]];
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
Test(markup, expectedOrderedItems, usePreviousCharAsTrigger: true);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestTriggerCharacters()
{
char[] expectedCharacters = { ',', '[' };
char[] unexpectedCharacters = { ' ', '(', '<' };
VerifyTriggerCharacters(expectedCharacters, unexpectedCharacters);
}
#endregion
#region "EditorBrowsable tests"
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_PropertyAlways()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Always)]
public int this[int x]
{
get { return 5; }
set { }
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: expectedOrderedItems,
expectedOrderedItemsSameSolution: expectedOrderedItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp);
}
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_PropertyNever()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public int this[int x]
{
get { return 5; }
set { }
}
}";
var expectedOrderedItemsMetadataReference = new List<SignatureHelpTestItem>();
expectedOrderedItemsMetadataReference.Add(new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: new List<SignatureHelpTestItem>(),
expectedOrderedItemsSameSolution: expectedOrderedItemsMetadataReference,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp);
}
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_PropertyAdvanced()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Advanced)]
public int this[int x]
{
get { return 5; }
set { }
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: new List<SignatureHelpTestItem>(),
expectedOrderedItemsSameSolution: expectedOrderedItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp,
hideAdvancedMembers: true);
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: expectedOrderedItems,
expectedOrderedItemsSameSolution: expectedOrderedItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp,
hideAdvancedMembers: false);
}
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_PropertyNeverOnOneOfTwoOverloads()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
public int this[int x]
{
get { return 5; }
set { }
}
public int this[double d]
{
get { return 5; }
set { }
}
}";
var expectedOrderedItemsMetadataReference = new List<SignatureHelpTestItem>();
expectedOrderedItemsMetadataReference.Add(new SignatureHelpTestItem("int Foo[double d]", string.Empty, string.Empty, currentParameterIndex: 0));
var expectedOrderedItemsSameSolution = new List<SignatureHelpTestItem>
{
new SignatureHelpTestItem("int Foo[double d]", string.Empty, string.Empty, currentParameterIndex: 0),
new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0),
};
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: expectedOrderedItemsMetadataReference,
expectedOrderedItemsSameSolution: expectedOrderedItemsSameSolution,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp);
}
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_GetBrowsableNeverIgnored()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
public int this[int x]
{
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
get { return 5; }
set { }
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: expectedOrderedItems,
expectedOrderedItemsSameSolution: expectedOrderedItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp);
}
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_SetBrowsableNeverIgnored()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
public int this[int x]
{
get { return 5; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
set { }
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: expectedOrderedItems,
expectedOrderedItemsSameSolution: expectedOrderedItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp);
}
[WorkItem(7336, "DevDiv_Projects/Roslyn")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void EditorBrowsable_Indexer_GetSetBrowsableNeverIgnored()
{
var markup = @"
class Program
{
void M()
{
new Foo()[$$
}
}";
var referencedCode = @"
public class Foo
{
public int this[int x]
{
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
get { return 5; }
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
set { }
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("int Foo[int x]", string.Empty, string.Empty, currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: expectedOrderedItems,
expectedOrderedItemsSameSolution: expectedOrderedItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.CSharp);
}
#endregion
#region Indexed Property tests
[WorkItem(530811)]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void IndexedProperty()
{
var markup = @"class Program
{
void M()
{
CCC c = new CCC();
c.IndexProp[$$
}
}";
// Note that <COMImport> is required by compiler. Bug 17013 tracks enabling indexed property for non-COM types.
var referencedCode = @"Imports System.Runtime.InteropServices
<ComImport()>
<GuidAttribute(CCC.ClassId)>
Public Class CCC
#Region ""COM GUIDs""
Public Const ClassId As String = ""9d965fd2-1514-44f6-accd-257ce77c46b0""
Public Const InterfaceId As String = ""a9415060-fdf0-47e3-bc80-9c18f7f39cf6""
Public Const EventsId As String = ""c6a866a5-5f97-4b53-a5df-3739dc8ff1bb""
# End Region
''' <summary>
''' An index property from VB
''' </summary>
''' <param name=""p1"">p1 is an integer index</param>
''' <returns>A string</returns>
Public Property IndexProp(ByVal p1 As Integer) As String
Get
Return Nothing
End Get
Set(ByVal value As String)
End Set
End Property
End Class";
var metadataItems = new List<SignatureHelpTestItem>();
metadataItems.Add(new SignatureHelpTestItem("string CCC.IndexProp[int p1]", string.Empty, string.Empty, currentParameterIndex: 0));
var projectReferenceItems = new List<SignatureHelpTestItem>();
projectReferenceItems.Add(new SignatureHelpTestItem("string CCC.IndexProp[int p1]", "An index property from VB", "p1 is an integer index", currentParameterIndex: 0));
TestSignatureHelpInEditorBrowsableContexts(markup: markup,
referencedCode: referencedCode,
expectedOrderedItemsMetadataReference: metadataItems,
expectedOrderedItemsSameSolution: projectReferenceItems,
sourceLanguage: LanguageNames.CSharp,
referencedLanguage: LanguageNames.VisualBasic);
}
#endregion
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void FieldUnavailableInOneLinkedFile()
{
var markup = @"<Workspace>
<Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj1"" PreprocessorSymbols=""FOO"">
<Document FilePath=""SourceDocument""><![CDATA[
class C
{
#if FOO
public int this[int z]
{
get
{
return 0;
}
}
#endif
void foo()
{
var x = this[$$
}
}
]]>
</Document>
</Project>
<Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj2"">
<Document IsLinkFile=""true"" LinkAssemblyName=""Proj1"" LinkFilePath=""SourceDocument""/>
</Project>
</Workspace>";
var expectedDescription = new SignatureHelpTestItem($"int C[int z]\r\n\r\n{string.Format(FeaturesResources.ProjectAvailability, "Proj1", FeaturesResources.Available)}\r\n{string.Format(FeaturesResources.ProjectAvailability, "Proj2", FeaturesResources.NotAvailable)}\r\n\r\n{FeaturesResources.UseTheNavigationBarToSwitchContext}", currentParameterIndex: 0);
VerifyItemWithReferenceWorker(markup, new[] { expectedDescription }, false);
}
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void ExcludeFilesWithInactiveRegions()
{
var markup = @"<Workspace>
<Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj1"" PreprocessorSymbols=""FOO,BAR"">
<Document FilePath=""SourceDocument""><![CDATA[
class C
{
#if FOO
public int this[int z]
{
get
{
return 0;
}
}
#endif
#if BAR
void foo()
{
var x = this[$$
}
#endif
}
]]>
</Document>
</Project>
<Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj2"">
<Document IsLinkFile=""true"" LinkAssemblyName=""Proj1"" LinkFilePath=""SourceDocument"" />
</Project>
<Project Language=""C#"" CommonReferences=""true"" AssemblyName=""Proj3"" PreprocessorSymbols=""BAR"">
<Document IsLinkFile=""true"" LinkAssemblyName=""Proj1"" LinkFilePath=""SourceDocument""/>
</Project>
</Workspace>";
var expectedDescription = new SignatureHelpTestItem($"int C[int z]\r\n\r\n{string.Format(FeaturesResources.ProjectAvailability, "Proj1", FeaturesResources.Available)}\r\n{string.Format(FeaturesResources.ProjectAvailability, "Proj3", FeaturesResources.NotAvailable)}\r\n\r\n{FeaturesResources.UseTheNavigationBarToSwitchContext}", currentParameterIndex: 0);
VerifyItemWithReferenceWorker(markup, new[] { expectedDescription }, false);
}
public class IncompleteElementAccessExpressionSignatureHelpProviderTests : AbstractCSharpSignatureHelpProviderTests
{
public IncompleteElementAccessExpressionSignatureHelpProviderTests(CSharpTestWorkspaceFixture workspaceFixture) : base(workspaceFixture)
{
}
internal override ISignatureHelpProvider CreateSignatureHelpProvider()
{
return new ElementAccessExpressionSignatureHelpProvider();
}
[WorkItem(636117)]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void TestInvocation()
{
var markup = @"
class C
{
public string this[int a]
{
get { return null; }
set { }
}
}
class D
{
void Foo()
{
var c = new C();
c[$$]
}
}";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("string C[int a]", string.Empty, string.Empty, currentParameterIndex: 0));
Test(markup, expectedOrderedItems);
}
[WorkItem(939417)]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void ConditionalIndexer()
{
var markup = @"
public class P
{
public int this[int z]
{
get
{
return 0;
}
}
public void foo()
{
P p = null;
p?[$$]
}
}
";
var expectedOrderedItems = new List<SignatureHelpTestItem>();
expectedOrderedItems.Add(new SignatureHelpTestItem("int P[int z]", string.Empty, string.Empty, currentParameterIndex: 0));
Test(markup, expectedOrderedItems);
}
[WorkItem(32, "https://github.com/dotnet/roslyn/issues/32")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void NonIdentifierConditionalIndexer()
{
var expected = new[] { new SignatureHelpTestItem("char string[int index]") };
Test(@"class C { void M() { """"?[$$ } }", expected); // inline with a string literal
Test(@"class C { void M() { """"?[/**/$$ } }", expected); // inline with a string literal and multiline comment
Test(@"class C { void M() { ("""")?[$$ } }", expected); // parenthesized expression
Test(@"class C { void M() { new System.String(' ', 1)?[$$ } }", expected); // new object expression
// more complicated parenthesized expression
Test(@"class C { void M() { (null as System.Collections.Generic.List<int>)?[$$ } }", new[] { new SignatureHelpTestItem("int System.Collections.Generic.List<int>[int index]") });
}
[WorkItem(1067933)]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void InvokedWithNoToken()
{
var markup = @"
// foo[$$";
Test(markup);
}
[WorkItem(2482, "https://github.com/dotnet/roslyn/issues/2482")]
[WpfFact, Trait(Traits.Feature, Traits.Features.SignatureHelp)]
public void WhereExpressionLooksLikeArrayTypeSyntaxOfQualifiedName()
{
var markup = @"
class WithIndexer
{
public int this[int index] { get { return 0; } }
}
class TestClass
{
public WithIndexer Item { get; set; }
public void Method(TestClass tc)
{
// `tc.Item[]` parses as ArrayTypeSyntax with an ElementType of QualifiedNameSyntax
tc.Item[$$]
}
}
";
Test(markup, new[] { new SignatureHelpTestItem("int WithIndexer[int index]") }, usePreviousCharAsTrigger: true);
}
}
}
}
| Inverness/roslyn | src/EditorFeatures/CSharpTest/SignatureHelp/ElementAccessExpressionSignatureHelpProviderTests.cs | C# | apache-2.0 | 27,540 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using Microsoft.CodeAnalysis;
namespace a2md
{
public sealed class DescriptorEqualityComparer : IEqualityComparer<DiagnosticDescriptor>
{
public bool Equals(DiagnosticDescriptor x, DiagnosticDescriptor y) => x.Id.Equals(y.Id, StringComparison.Ordinal);
// CA1720: Identifier 'obj' contains type name
// TOODO: Remove the below suppression once https://github.com/dotnet/roslyn-analyzers/issues/938 is fixed.
#pragma warning disable CA1720
public int GetHashCode(DiagnosticDescriptor obj) => obj.Id.GetHashCode();
#pragma warning restore CA1720
}
}
| heejaechang/roslyn-analyzers | tools/a2md/DescriptorEqualityComparer.cs | C# | apache-2.0 | 807 |
/*
* Copyright 2018-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.core.build.engine;
import com.facebook.buck.core.rules.BuildRule;
import java.util.SortedSet;
public interface RuleDepsCache {
SortedSet<BuildRule> get(BuildRule rule);
SortedSet<BuildRule> getRuntimeDeps(BuildRule rule);
}
| LegNeato/buck | src/com/facebook/buck/core/build/engine/RuleDepsCache.java | Java | apache-2.0 | 865 |
package brooklyn.util.text;
import com.google.common.base.Preconditions;
import com.google.common.base.Supplier;
/** wraps a call to {@link String#format(String, Object...)} in a toString, i.e. using %s syntax,
* useful for places where we want deferred evaluation
* (e.g. as message to {@link Preconditions} to skip concatenation when not needed) */
public class FormattedString {
private final String pattern;
private final Object[] args;
public FormattedString(String pattern, Object[] args) {
this.pattern = pattern;
this.args = args;
}
@Override
public String toString() {
return String.format(pattern, args);
}
public String getPattern() {
return pattern;
}
public Object[] getArgs() {
return args;
}
public Supplier<String> supplier() {
return Strings.toStringSupplier(this);
}
}
| rhodgin/brooklyn | utils/common/src/main/java/brooklyn/util/text/FormattedString.java | Java | apache-2.0 | 892 |
package recreate
import (
"fmt"
"time"
"github.com/golang/glog"
kapi "github.com/GoogleCloudPlatform/kubernetes/pkg/api"
kclient "github.com/GoogleCloudPlatform/kubernetes/pkg/client"
"github.com/GoogleCloudPlatform/kubernetes/pkg/kubectl"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
"github.com/GoogleCloudPlatform/kubernetes/pkg/util"
deployapi "github.com/projectatomic/atomic-enterprise/pkg/deploy/api"
stratsupport "github.com/projectatomic/atomic-enterprise/pkg/deploy/strategy/support"
deployutil "github.com/projectatomic/atomic-enterprise/pkg/deploy/util"
)
// RecreateDeploymentStrategy is a simple strategy appropriate as a default.
// Its behavior is to scale down the last deployment to 0, and to scale up the
// new deployment to 1.
//
// A failure to disable any existing deployments will be considered a
// deployment failure.
type RecreateDeploymentStrategy struct {
// getReplicationController knows how to get a replication controller.
getReplicationController func(namespace, name string) (*kapi.ReplicationController, error)
// scaler is used to scale replication controllers.
scaler kubectl.Scaler
// codec is used to decode DeploymentConfigs contained in deployments.
codec runtime.Codec
// hookExecutor can execute a lifecycle hook.
hookExecutor hookExecutor
// retryTimeout is how long to wait for the replica count update to succeed
// before giving up.
retryTimeout time.Duration
// retryPeriod is how often to try updating the replica count.
retryPeriod time.Duration
}
// NewRecreateDeploymentStrategy makes a RecreateDeploymentStrategy backed by
// a real HookExecutor and client.
func NewRecreateDeploymentStrategy(client kclient.Interface, codec runtime.Codec) *RecreateDeploymentStrategy {
scaler, _ := kubectl.ScalerFor("ReplicationController", kubectl.NewScalerClient(client))
return &RecreateDeploymentStrategy{
getReplicationController: func(namespace, name string) (*kapi.ReplicationController, error) {
return client.ReplicationControllers(namespace).Get(name)
},
scaler: scaler,
codec: codec,
hookExecutor: &stratsupport.HookExecutor{
PodClient: &stratsupport.HookExecutorPodClientImpl{
CreatePodFunc: func(namespace string, pod *kapi.Pod) (*kapi.Pod, error) {
return client.Pods(namespace).Create(pod)
},
PodWatchFunc: func(namespace, name, resourceVersion string, stopChannel chan struct{}) func() *kapi.Pod {
return stratsupport.NewPodWatch(client, namespace, name, resourceVersion, stopChannel)
},
},
},
retryTimeout: 120 * time.Second,
retryPeriod: 1 * time.Second,
}
}
// Deploy makes deployment active and disables oldDeployments.
func (s *RecreateDeploymentStrategy) Deploy(from *kapi.ReplicationController, to *kapi.ReplicationController, desiredReplicas int) error {
return s.DeployWithAcceptor(from, to, desiredReplicas, nil)
}
// DeployWithAcceptor scales down from and then scales up to. If
// updateAcceptor is provided and the desired replica count is >1, the first
// replica of to is rolled out and validated before performing the full scale
// up.
//
// This is currently only used in conjunction with the rolling update strategy
// for initial deployments.
func (s *RecreateDeploymentStrategy) DeployWithAcceptor(from *kapi.ReplicationController, to *kapi.ReplicationController, desiredReplicas int, updateAcceptor kubectl.UpdateAcceptor) error {
config, err := deployutil.DecodeDeploymentConfig(to, s.codec)
if err != nil {
return fmt.Errorf("couldn't decode config from deployment %s: %v", to.Name, err)
}
params := config.Template.Strategy.RecreateParams
retryParams := kubectl.NewRetryParams(s.retryPeriod, s.retryTimeout)
waitParams := kubectl.NewRetryParams(s.retryPeriod, s.retryTimeout)
// Execute any pre-hook.
if params != nil && params.Pre != nil {
if err := s.hookExecutor.Execute(params.Pre, to, "prehook"); err != nil {
return fmt.Errorf("Pre hook failed: %s", err)
} else {
glog.Infof("Pre hook finished")
}
}
// Scale down the from deployment.
if from != nil {
glog.Infof("Scaling %s down to zero", deployutil.LabelForDeployment(from))
_, err := s.scaleAndWait(from, 0, retryParams, waitParams)
if err != nil {
return fmt.Errorf("couldn't scale %s to 0: %v", deployutil.LabelForDeployment(from), err)
}
}
// If an UpdateAcceptor is provided and we're trying to scale up to more
// than one replica, scale up to 1 and validate the replica, aborting if the
// replica isn't acceptable.
if updateAcceptor != nil && desiredReplicas > 1 {
glog.Infof("Scaling %s to 1 before validating first replica", deployutil.LabelForDeployment(to))
updatedTo, err := s.scaleAndWait(to, 1, retryParams, waitParams)
if err != nil {
return fmt.Errorf("couldn't scale %s to 1: %v", deployutil.LabelForDeployment(to), err)
}
glog.Infof("Validating first replica of %s", deployutil.LabelForDeployment(to))
if err := updateAcceptor.Accept(updatedTo); err != nil {
return fmt.Errorf("first replica rejected for %s: %v", to.Name, err)
}
to = updatedTo
}
// Complete the scale up.
glog.Infof("Scaling %s to %d", deployutil.LabelForDeployment(to), desiredReplicas)
updatedTo, err := s.scaleAndWait(to, desiredReplicas, retryParams, waitParams)
if err != nil {
return fmt.Errorf("couldn't scale %s to %d: %v", deployutil.LabelForDeployment(to), desiredReplicas, err)
}
to = updatedTo
// Execute any post-hook. Errors are logged and ignored.
if params != nil && params.Post != nil {
if err := s.hookExecutor.Execute(params.Post, to, "posthook"); err != nil {
util.HandleError(fmt.Errorf("post hook failed: %s", err))
} else {
glog.Infof("Post hook finished")
}
}
glog.Infof("Deployment %s successfully made active", to.Name)
return nil
}
func (s *RecreateDeploymentStrategy) scaleAndWait(deployment *kapi.ReplicationController, replicas int, retry *kubectl.RetryParams, wait *kubectl.RetryParams) (*kapi.ReplicationController, error) {
if err := s.scaler.Scale(deployment.Namespace, deployment.Name, uint(replicas), &kubectl.ScalePrecondition{-1, ""}, retry, wait); err != nil {
return nil, err
}
updatedDeployment, err := s.getReplicationController(deployment.Namespace, deployment.Name)
if err != nil {
return nil, err
}
return updatedDeployment, nil
}
// hookExecutor knows how to execute a deployment lifecycle hook.
type hookExecutor interface {
Execute(hook *deployapi.LifecycleHook, deployment *kapi.ReplicationController, label string) error
}
// hookExecutorImpl is a pluggable hookExecutor.
type hookExecutorImpl struct {
executeFunc func(hook *deployapi.LifecycleHook, deployment *kapi.ReplicationController, label string) error
}
// Execute executes the provided lifecycle hook
func (i *hookExecutorImpl) Execute(hook *deployapi.LifecycleHook, deployment *kapi.ReplicationController, label string) error {
return i.executeFunc(hook, deployment, label)
}
| pombredanne/atomic-enterprise | pkg/deploy/strategy/recreate/recreate.go | GO | apache-2.0 | 6,924 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.persistence;
import org.drools.persistence.TransactionManager;
import org.drools.persistence.map.EnvironmentBuilder;
public class ProcessStorageEnvironmentBuilder
implements
EnvironmentBuilder {
private ProcessStorage storage;
private MapBasedProcessPersistenceContext context;
public ProcessStorageEnvironmentBuilder(ProcessStorage storage) {
this.storage = storage;
this.context = new MapBasedProcessPersistenceContext( storage );
}
public ProcessPersistenceContextManager getPersistenceContextManager() {
return new MapProcessPersistenceContextManager( context ) ;
}
public TransactionManager getTransactionManager() {
return new ManualProcessTransactionManager( context, storage );
}
}
| Multi-Support/jbpm | jbpm-persistence-jpa/src/main/java/org/jbpm/persistence/ProcessStorageEnvironmentBuilder.java | Java | apache-2.0 | 1,369 |
<?php
final class PHUIHeaderView extends AphrontTagView {
const PROPERTY_STATUS = 1;
private $header;
private $tags = array();
private $image;
private $imageURL = null;
private $imageEditURL = null;
private $subheader;
private $headerIcon;
private $noBackground;
private $bleedHeader;
private $profileHeader;
private $tall;
private $properties = array();
private $actionLinks = array();
private $buttonBar = null;
private $policyObject;
private $epoch;
private $actionItems = array();
private $href;
private $actionList;
private $actionListID;
public function setHeader($header) {
$this->header = $header;
return $this;
}
public function setNoBackground($nada) {
$this->noBackground = $nada;
return $this;
}
public function setTall($tall) {
$this->tall = $tall;
return $this;
}
public function addTag(PHUITagView $tag) {
$this->tags[] = $tag;
return $this;
}
public function setImage($uri) {
$this->image = $uri;
return $this;
}
public function setImageURL($url) {
$this->imageURL = $url;
return $this;
}
public function setImageEditURL($url) {
$this->imageEditURL = $url;
return $this;
}
public function setSubheader($subheader) {
$this->subheader = $subheader;
return $this;
}
public function setBleedHeader($bleed) {
$this->bleedHeader = $bleed;
return $this;
}
public function setProfileHeader($bighead) {
$this->profileHeader = $bighead;
return $this;
}
public function setHeaderIcon($icon) {
$this->headerIcon = $icon;
return $this;
}
public function setActionList(PhabricatorActionListView $list) {
$this->actionList = $list;
return $this;
}
public function setActionListID($action_list_id) {
$this->actionListID = $action_list_id;
return $this;
}
public function setPolicyObject(PhabricatorPolicyInterface $object) {
$this->policyObject = $object;
return $this;
}
public function addProperty($property, $value) {
$this->properties[$property] = $value;
return $this;
}
public function addActionLink(PHUIButtonView $button) {
$this->actionLinks[] = $button;
return $this;
}
public function addActionItem($action) {
$this->actionItems[] = $action;
return $this;
}
public function setButtonBar(PHUIButtonBarView $bb) {
$this->buttonBar = $bb;
return $this;
}
public function setStatus($icon, $color, $name) {
// TODO: Normalize "closed/archived" to constants.
if ($color == 'dark') {
$color = PHUITagView::COLOR_INDIGO;
}
$tag = id(new PHUITagView())
->setName($name)
->setIcon($icon)
->setColor($color)
->setType(PHUITagView::TYPE_SHADE);
return $this->addProperty(self::PROPERTY_STATUS, $tag);
}
public function setEpoch($epoch) {
$age = time() - $epoch;
$age = floor($age / (60 * 60 * 24));
if ($age < 1) {
$when = pht('Today');
} else if ($age == 1) {
$when = pht('Yesterday');
} else {
$when = pht('%s Day(s) Ago', new PhutilNumber($age));
}
$this->setStatus('fa-clock-o bluegrey', null, pht('Updated %s', $when));
return $this;
}
public function setHref($href) {
$this->href = $href;
return $this;
}
public function getHref() {
return $this->href;
}
protected function getTagName() {
return 'div';
}
protected function getTagAttributes() {
require_celerity_resource('phui-header-view-css');
$classes = array();
$classes[] = 'phui-header-shell';
if ($this->noBackground) {
$classes[] = 'phui-header-no-background';
}
if ($this->bleedHeader) {
$classes[] = 'phui-bleed-header';
}
if ($this->profileHeader) {
$classes[] = 'phui-profile-header';
}
if ($this->properties || $this->policyObject ||
$this->subheader || $this->tall) {
$classes[] = 'phui-header-tall';
}
return array(
'class' => $classes,
);
}
protected function getTagContent() {
if ($this->actionList || $this->actionListID) {
$action_button = id(new PHUIButtonView())
->setTag('a')
->setText(pht('Actions'))
->setHref('#')
->setIcon('fa-bars')
->addClass('phui-mobile-menu');
if ($this->actionList) {
$action_button->setDropdownMenu($this->actionList);
} else if ($this->actionListID) {
$action_button->setDropdownMenuID($this->actionListID);
}
$this->addActionLink($action_button);
}
$image = null;
if ($this->image) {
$image_href = null;
if ($this->imageURL) {
$image_href = $this->imageURL;
} else if ($this->imageEditURL) {
$image_href = $this->imageEditURL;
}
$image = phutil_tag(
'span',
array(
'class' => 'phui-header-image',
'style' => 'background-image: url('.$this->image.')',
));
if ($image_href) {
$edit_view = null;
if ($this->imageEditURL) {
$edit_view = phutil_tag(
'span',
array(
'class' => 'phui-header-image-edit',
),
pht('Edit'));
}
$image = phutil_tag(
'a',
array(
'href' => $image_href,
'class' => 'phui-header-image-href',
),
array(
$image,
$edit_view,
));
}
}
$viewer = $this->getUser();
$left = array();
$right = array();
$space_header = null;
if ($viewer) {
$space_header = id(new PHUISpacesNamespaceContextView())
->setUser($viewer)
->setObject($this->policyObject);
}
if ($this->actionLinks) {
$actions = array();
foreach ($this->actionLinks as $button) {
if (!$button->getColor()) {
$button->setColor(PHUIButtonView::GREY);
}
$button->addClass(PHUI::MARGIN_SMALL_LEFT);
$button->addClass('phui-header-action-link');
$actions[] = $button;
}
$right[] = phutil_tag(
'div',
array(
'class' => 'phui-header-action-links',
),
$actions);
}
if ($this->buttonBar) {
$right[] = phutil_tag(
'div',
array(
'class' => 'phui-header-action-links',
),
$this->buttonBar);
}
if ($this->actionItems) {
$action_list = array();
if ($this->actionItems) {
foreach ($this->actionItems as $item) {
$action_list[] = phutil_tag(
'li',
array(
'class' => 'phui-header-action-item',
),
$item);
}
}
$right[] = phutil_tag(
'ul',
array(
'class' => 'phui-header-action-list',
),
$action_list);
}
$icon = null;
if ($this->headerIcon) {
if ($this->headerIcon instanceof PHUIIconView) {
$icon = id(clone $this->headerIcon)
->addClass('phui-header-icon');
} else {
$icon = id(new PHUIIconView())
->setIcon($this->headerIcon)
->addClass('phui-header-icon');
}
}
$header_content = $this->header;
$href = $this->getHref();
if ($href !== null) {
$header_content = phutil_tag(
'a',
array(
'href' => $href,
),
$header_content);
}
$left[] = phutil_tag(
'span',
array(
'class' => 'phui-header-header',
),
array(
$space_header,
$icon,
$header_content,
));
if ($this->subheader) {
$left[] = phutil_tag(
'div',
array(
'class' => 'phui-header-subheader',
),
array(
$this->subheader,
));
}
if ($this->properties || $this->policyObject || $this->tags) {
$property_list = array();
foreach ($this->properties as $type => $property) {
switch ($type) {
case self::PROPERTY_STATUS:
$property_list[] = $property;
break;
default:
throw new Exception(pht('Incorrect Property Passed'));
break;
}
}
if ($this->policyObject) {
$property_list[] = $this->renderPolicyProperty($this->policyObject);
}
if ($this->tags) {
$property_list[] = $this->tags;
}
$left[] = phutil_tag(
'div',
array(
'class' => 'phui-header-subheader',
),
$property_list);
}
// We here at @phabricator
$header_image = null;
if ($image) {
$header_image = phutil_tag(
'div',
array(
'class' => 'phui-header-col1',
),
$image);
}
// All really love
$header_left = phutil_tag(
'div',
array(
'class' => 'phui-header-col2',
),
$left);
// Tables and Pokemon.
$header_right = phutil_tag(
'div',
array(
'class' => 'phui-header-col3',
),
$right);
$header_row = phutil_tag(
'div',
array(
'class' => 'phui-header-row',
),
array(
$header_image,
$header_left,
$header_right,
));
return phutil_tag(
'h1',
array(
'class' => 'phui-header-view',
),
$header_row);
}
private function renderPolicyProperty(PhabricatorPolicyInterface $object) {
$viewer = $this->getUser();
$policies = PhabricatorPolicyQuery::loadPolicies($viewer, $object);
$view_capability = PhabricatorPolicyCapability::CAN_VIEW;
$policy = idx($policies, $view_capability);
if (!$policy) {
return null;
}
// If an object is in a Space with a strictly stronger (more restrictive)
// policy, we show the more restrictive policy. This better aligns the
// UI hint with the actual behavior.
// NOTE: We'll do this even if the viewer has access to only one space, and
// show them information about the existence of spaces if they click
// through.
$use_space_policy = false;
if ($object instanceof PhabricatorSpacesInterface) {
$space_phid = PhabricatorSpacesNamespaceQuery::getObjectSpacePHID(
$object);
$spaces = PhabricatorSpacesNamespaceQuery::getViewerSpaces($viewer);
$space = idx($spaces, $space_phid);
if ($space) {
$space_policies = PhabricatorPolicyQuery::loadPolicies(
$viewer,
$space);
$space_policy = idx($space_policies, $view_capability);
if ($space_policy) {
if ($space_policy->isStrongerThan($policy)) {
$policy = $space_policy;
$use_space_policy = true;
}
}
}
}
$container_classes = array();
$container_classes[] = 'policy-header-callout';
$phid = $object->getPHID();
$policy_name = array($policy->getShortName());
$policy_icon = $policy->getIcon().' bluegrey';
if ($object instanceof PhabricatorPolicyCodexInterface) {
$codex = PhabricatorPolicyCodex::newFromObject($object, $viewer);
$codex_name = $codex->getPolicyShortName($policy, $view_capability);
if ($codex_name !== null) {
$policy_name = $codex_name;
}
$codex_icon = $codex->getPolicyIcon($policy, $view_capability);
if ($codex_icon !== null) {
$policy_icon = $codex_icon;
}
$codex_classes = $codex->getPolicyTagClasses($policy, $view_capability);
foreach ($codex_classes as $codex_class) {
$container_classes[] = $codex_class;
}
}
if (!is_array($policy_name)) {
$policy_name = (array)$policy_name;
}
$arrow = id(new PHUIIconView())
->setIcon('fa-angle-right')
->addClass('policy-tier-separator');
$policy_name = phutil_implode_html($arrow, $policy_name);
$icon = id(new PHUIIconView())
->setIcon($policy_icon);
$link = javelin_tag(
'a',
array(
'class' => 'policy-link',
'href' => '/policy/explain/'.$phid.'/'.$view_capability.'/',
'sigil' => 'workflow',
),
$policy_name);
return phutil_tag(
'span',
array(
'class' => implode(' ', $container_classes),
),
array($icon, $link));
}
}
| freebsd/phabricator | src/view/phui/PHUIHeaderView.php | PHP | apache-2.0 | 12,343 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.string;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.BeamSqlFnExecutorTestBase;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlExpression;
import org.apache.beam.sdk.extensions.sql.impl.interpreter.operator.BeamSqlPrimitive;
import org.apache.calcite.sql.type.SqlTypeName;
import org.junit.Assert;
import org.junit.Test;
/**
* Test for BeamSqlOverlayExpression.
*/
public class BeamSqlOverlayExpressionTest extends BeamSqlFnExecutorTestBase {
@Test public void accept() throws Exception {
List<BeamSqlExpression> operands = new ArrayList<>();
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "hello"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "hello"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 1));
assertTrue(new BeamSqlOverlayExpression(operands).accept());
operands.clear();
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "hello"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "hello"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 1));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 2));
assertTrue(new BeamSqlOverlayExpression(operands).accept());
}
@Test public void evaluate() throws Exception {
List<BeamSqlExpression> operands = new ArrayList<>();
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "w3333333rce"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "resou"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 3));
Assert.assertEquals("w3resou3rce",
new BeamSqlOverlayExpression(operands).evaluate(record, null).getValue());
operands.clear();
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "w3333333rce"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "resou"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 3));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 4));
Assert.assertEquals("w3resou33rce",
new BeamSqlOverlayExpression(operands).evaluate(record, null).getValue());
operands.clear();
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "w3333333rce"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "resou"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 3));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 5));
Assert.assertEquals("w3resou3rce",
new BeamSqlOverlayExpression(operands).evaluate(record, null).getValue());
operands.clear();
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "w3333333rce"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.VARCHAR, "resou"));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 3));
operands.add(BeamSqlPrimitive.of(SqlTypeName.INTEGER, 7));
Assert.assertEquals("w3resouce",
new BeamSqlOverlayExpression(operands).evaluate(record, null).getValue());
}
}
| jbonofre/beam | sdks/java/extensions/sql/src/test/java/org/apache/beam/sdk/extensions/sql/impl/interpreter/operator/string/BeamSqlOverlayExpressionTest.java | Java | apache-2.0 | 3,880 |
var V;
function F() {
}
;
var C = (function () {
function C() {
}
C.prototype.pF = function () {
};
C.prototype.rF = function () {
};
C.prototype.pgF = function () {
};
C.prototype.psF = function (param) {
};
C.prototype.rgF = function () {
};
C.prototype.rsF = function (param) {
};
C.tF = function tF() {
};
C.tsF = function tsF(param) {
};
C.tgF = function tgF() {
};
return C;
})();
;
;
var M;
(function (M) {
var V;
function F() {
}
;
var C = (function () {
function C() {
}
C.prototype.pF = function () {
};
C.prototype.rF = function () {
};
C.prototype.pgF = function () {
};
C.prototype.psF = function (param) {
};
C.prototype.rgF = function () {
};
C.prototype.rsF = function (param) {
};
C.tF = function tF() {
};
C.tsF = function tsF(param) {
};
C.tgF = function tgF() {
};
return C;
})();
;
;
var M;
(function (M) {
var V;
function F() {
}
;
var C = (function () {
function C() { }
return C;
})();
;
;
;
M.eV;
function eF() {
}
M.eF = eF;
;
var eC = (function () {
function eC() { }
return eC;
})();
M.eC = eC;
;
;
;
;
;
})(M || (M = {}));
M.eV;
function eF() {
}
M.eF = eF;
;
var eC = (function () {
function eC() {
}
eC.prototype.pF = function () {
};
eC.prototype.rF = function () {
};
eC.prototype.pgF = function () {
};
eC.prototype.psF = function (param) {
};
eC.prototype.rgF = function () {
};
eC.prototype.rsF = function (param) {
};
eC.tF = function tF() {
};
eC.tsF = function tsF(param) {
};
eC.tgF = function tgF() {
};
return eC;
})();
M.eC = eC;
;
;
(function (eM) {
var V;
function F() {
}
;
var C = (function () {
function C() { }
return C;
})();
;
;
;
eM.eV;
function eF() {
}
eM.eF = eF;
;
var eC = (function () {
function eC() { }
return eC;
})();
eM.eC = eC;
;
;
;
;
;
})(M.eM || (M.eM = {}));
var eM = M.eM;
;
})(M || (M = {}));
exports.eV;
function eF() {
}
exports.eF = eF;
;
var eC = (function () {
function eC() {
}
eC.prototype.pF = function () {
};
eC.prototype.rF = function () {
};
eC.prototype.pgF = function () {
};
eC.prototype.psF = function (param) {
};
eC.prototype.rgF = function () {
};
eC.prototype.rsF = function (param) {
};
eC.tF = function tF() {
};
eC.tsF = function tsF(param) {
};
eC.tgF = function tgF() {
};
return eC;
})();
exports.eC = eC;
;
;
(function (eM) {
var V;
function F() {
}
;
var C = (function () {
function C() {
}
C.prototype.pF = function () {
};
C.prototype.rF = function () {
};
C.prototype.pgF = function () {
};
C.prototype.psF = function (param) {
};
C.prototype.rgF = function () {
};
C.prototype.rsF = function (param) {
};
C.tF = function tF() {
};
C.tsF = function tsF(param) {
};
C.tgF = function tgF() {
};
return C;
})();
;
;
var M;
(function (M) {
var V;
function F() {
}
;
var C = (function () {
function C() { }
return C;
})();
;
;
;
M.eV;
function eF() {
}
M.eF = eF;
;
var eC = (function () {
function eC() { }
return eC;
})();
M.eC = eC;
;
;
;
;
;
})(M || (M = {}));
eM.eV;
function eF() {
}
eM.eF = eF;
;
var eC = (function () {
function eC() {
}
eC.prototype.pF = function () {
};
eC.prototype.rF = function () {
};
eC.prototype.pgF = function () {
};
eC.prototype.psF = function (param) {
};
eC.prototype.rgF = function () {
};
eC.prototype.rsF = function (param) {
};
eC.tF = function tF() {
};
eC.tsF = function tsF(param) {
};
eC.tgF = function tgF() {
};
return eC;
})();
eM.eC = eC;
;
;
(function (eM) {
var V;
function F() {
}
;
var C = (function () {
function C() { }
return C;
})();
;
;
;
eM.eV;
function eF() {
}
eM.eF = eF;
;
var eC = (function () {
function eC() { }
return eC;
})();
eM.eC = eC;
;
;
;
;
;
})(eM.eM || (eM.eM = {}));
var eM = eM.eM;
;
})(exports.eM || (exports.eM = {}));
var eM = exports.eM;
; | vcsjones/typescript | tests/baselines/reference/giant.commonjs.js | JavaScript | apache-2.0 | 5,825 |
/**
* Copyright 2010-2016 Boxfuse GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flywaydb.core.api;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
/**
* A version of a migration.
*
* @author Axel Fontaine
*/
public final class MigrationVersion implements Comparable<MigrationVersion> {
/**
* Version for an empty schema.
*/
public static final MigrationVersion EMPTY = new MigrationVersion(null, "<< Empty Schema >>");
/**
* Latest version.
*/
public static final MigrationVersion LATEST = new MigrationVersion(BigInteger.valueOf(-1), "<< Latest Version >>");
/**
* Current version. Only a marker. For the real version use Flyway.info().current() instead.
*/
public static final MigrationVersion CURRENT = new MigrationVersion(BigInteger.valueOf(-2), "<< Current Version >>");
/**
* Compiled pattern for matching proper version format
*/
private static Pattern splitPattern = Pattern.compile("\\.(?=\\d)");
/**
* The individual parts this version string is composed of. Ex. 1.2.3.4.0 -> [1, 2, 3, 4, 0]
*/
private final List<BigInteger> versionParts;
/**
* The printable text to represent the version.
*/
private final String displayText;
/**
* Factory for creating a MigrationVersion from a version String
*
* @param version The version String. The value {@code current} will be interpreted as MigrationVersion.CURRENT,
* a marker for the latest version that has been applied to the database.
* @return The MigrationVersion
*/
@SuppressWarnings("ConstantConditions")
public static MigrationVersion fromVersion(String version) {
if ("current".equalsIgnoreCase(version)) return CURRENT;
if (LATEST.getVersion().equals(version)) return LATEST;
if (version == null) return EMPTY;
return new MigrationVersion(version);
}
/**
* Creates a Version using this version string.
*
* @param version The version in one of the following formats: 6, 6.0, 005, 1.2.3.4, 201004200021. <br/>{@code null}
* means that this version refers to an empty schema.
*/
private MigrationVersion(String version) {
String normalizedVersion = version.replace('_', '.');
this.versionParts = tokenize(normalizedVersion);
this.displayText = normalizedVersion;
}
/**
* Creates a Version using this version string.
*
* @param version The version in one of the following formats: 6, 6.0, 005, 1.2.3.4, 201004200021. <br/>{@code null}
* means that this version refers to an empty schema.
* @param displayText The alternative text to display instead of the version number.
*/
private MigrationVersion(BigInteger version, String displayText) {
this.versionParts = new ArrayList<BigInteger>();
this.versionParts.add(version);
this.displayText = displayText;
}
/**
* @return The textual representation of the version.
*/
@Override
public String toString() {
return displayText;
}
/**
* @return Numeric version as String
*/
public String getVersion() {
if (this.equals(EMPTY)) return null;
if (this.equals(LATEST)) return Long.toString(Long.MAX_VALUE);
return displayText;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MigrationVersion version1 = (MigrationVersion) o;
return compareTo(version1) == 0;
}
@Override
public int hashCode() {
return versionParts == null ? 0 : versionParts.hashCode();
}
@SuppressWarnings("NullableProblems")
public int compareTo(MigrationVersion o) {
if (o == null) {
return 1;
}
if (this == EMPTY) {
return o == EMPTY ? 0 : Integer.MIN_VALUE;
}
if (this == CURRENT) {
return o == CURRENT ? 0 : Integer.MIN_VALUE;
}
if (this == LATEST) {
return o == LATEST ? 0 : Integer.MAX_VALUE;
}
if (o == EMPTY) {
return Integer.MAX_VALUE;
}
if (o == CURRENT) {
return Integer.MAX_VALUE;
}
if (o == LATEST) {
return Integer.MIN_VALUE;
}
final List<BigInteger> elements1 = versionParts;
final List<BigInteger> elements2 = o.versionParts;
int largestNumberOfElements = Math.max(elements1.size(), elements2.size());
for (int i = 0; i < largestNumberOfElements; i++) {
final int compared = getOrZero(elements1, i).compareTo(getOrZero(elements2, i));
if (compared != 0) {
return compared;
}
}
return 0;
}
private BigInteger getOrZero(List<BigInteger> elements, int i) {
return i < elements.size() ? elements.get(i) : BigInteger.ZERO;
}
/**
* Splits this string into list of Long
*
* @param str The string to split.
* @return The resulting array.
*/
private List<BigInteger> tokenize(String str) {
List<BigInteger> numbers = new ArrayList<BigInteger>();
for (String number : splitPattern.split(str)) {
try {
numbers.add(new BigInteger(number));
} catch (NumberFormatException e) {
throw new FlywayException(
"Invalid version containing non-numeric characters. Only 0..9 and . are allowed. Invalid version: "
+ str);
}
}
for (int i = numbers.size() - 1; i > 0; i--) {
if (!numbers.get(i).equals(BigInteger.ZERO)) break;
numbers.remove(i);
}
return numbers;
}
}
| nathanvick/flyway | flyway-core/src/main/java/org/flywaydb/core/api/MigrationVersion.java | Java | apache-2.0 | 6,513 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.authorization.history;
import static org.camunda.bpm.engine.authorization.Authorization.ANY;
import static org.camunda.bpm.engine.authorization.Permissions.READ_HISTORY;
import static org.camunda.bpm.engine.authorization.Resources.PROCESS_DEFINITION;
import java.util.Date;
import java.util.List;
import org.camunda.bpm.engine.history.HistoricIncident;
import org.camunda.bpm.engine.history.HistoricIncidentQuery;
import org.camunda.bpm.engine.impl.AbstractQuery;
import org.camunda.bpm.engine.impl.context.Context;
import org.camunda.bpm.engine.impl.interceptor.Command;
import org.camunda.bpm.engine.impl.interceptor.CommandContext;
import org.camunda.bpm.engine.impl.interceptor.CommandExecutor;
import org.camunda.bpm.engine.impl.jobexecutor.TimerSuspendProcessDefinitionHandler;
import org.camunda.bpm.engine.impl.persistence.entity.HistoricIncidentEntity;
import org.camunda.bpm.engine.runtime.Job;
import org.camunda.bpm.engine.test.authorization.AuthorizationTest;
/**
* @author Roman Smirnov
*
*/
public class HistoricIncidentAuthorizationTest extends AuthorizationTest {
protected static final String TIMER_START_PROCESS_KEY = "timerStartProcess";
protected static final String ONE_INCIDENT_PROCESS_KEY = "process";
protected static final String ANOTHER_ONE_INCIDENT_PROCESS_KEY = "anotherOneIncidentProcess";
protected String deploymentId;
public void setUp() throws Exception {
super.setUp();
deploymentId = createDeployment(null,
"org/camunda/bpm/engine/test/authorization/timerStartEventProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/authorization/oneIncidentProcess.bpmn20.xml",
"org/camunda/bpm/engine/test/authorization/anotherOneIncidentProcess.bpmn20.xml").getId();
}
public void tearDown() {
deleteDeployment(deploymentId);
super.tearDown();
}
// historic incident query (standalone) //////////////////////////////
public void testQueryForStandaloneHistoricIncidents() {
// given
disableAuthorization();
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String jobId = null;
List<Job> jobs = managementService.createJobQuery().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
jobId = job.getId();
break;
}
}
managementService.setJobRetries(jobId, 0);
enableAuthorization();
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 1);
disableAuthorization();
managementService.deleteJob(jobId);
enableAuthorization();
clearDatabase();
}
// historic incident query (start timer job incident) //////////////////////////////
public void testStartTimerJobIncidentQueryWithoutAuthorization() {
// given
disableAuthorization();
String jobId = managementService.createJobQuery().singleResult().getId();
managementService.setJobRetries(jobId, 0);
enableAuthorization();
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 0);
}
public void testStartTimerJobIncidentQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
disableAuthorization();
String jobId = managementService.createJobQuery().singleResult().getId();
managementService.setJobRetries(jobId, 0);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, TIMER_START_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 1);
}
public void testStartTimerJobIncidentQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
disableAuthorization();
String jobId = managementService.createJobQuery().singleResult().getId();
managementService.setJobRetries(jobId, 0);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 1);
}
public void testStartTimerJobIncidentQueryWithReadInstancePermissionOnAnyProcessDefinition() {
// given
disableAuthorization();
String jobId = managementService.createJobQuery().singleResult().getId();
managementService.setJobRetries(jobId, 0);
enableAuthorization();
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 1);
}
// historic incident query ///////////////////////////////////////////
public void testSimpleQueryWithoutAuthorization() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 0);
}
public void testSimpleQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 1);
}
public void testSimpleQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 1);
}
// historic incident query (multiple incidents ) ///////////////////////////////////////////
public void testQueryWithoutAuthorization() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 0);
}
public void testQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 2);
}
public void testQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 5);
}
// historic job log (mixed) //////////////////////////////////////////
public void testMixedQueryWithoutAuthorization() {
// given
disableAuthorization();
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String firstJobId = null;
List<Job> jobs = managementService.createJobQuery().withRetriesLeft().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
firstJobId = job.getId();
break;
}
}
managementService.setJobRetries(firstJobId, 0);
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String secondJobId = null;
jobs = managementService.createJobQuery().withRetriesLeft().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
secondJobId = job.getId();
break;
}
}
managementService.setJobRetries(secondJobId, 0);
enableAuthorization();
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 2);
disableAuthorization();
managementService.deleteJob(firstJobId);
managementService.deleteJob(secondJobId);
enableAuthorization();
clearDatabase();
}
public void testMixedQueryWithReadHistoryPermissionOnProcessDefinition() {
// given
disableAuthorization();
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String firstJobId = null;
List<Job> jobs = managementService.createJobQuery().withRetriesLeft().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
firstJobId = job.getId();
break;
}
}
managementService.setJobRetries(firstJobId, 0);
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String secondJobId = null;
jobs = managementService.createJobQuery().withRetriesLeft().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
secondJobId = job.getId();
break;
}
}
managementService.setJobRetries(secondJobId, 0);
enableAuthorization();
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ONE_INCIDENT_PROCESS_KEY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 4);
disableAuthorization();
managementService.deleteJob(firstJobId);
managementService.deleteJob(secondJobId);
enableAuthorization();
clearDatabase();
}
public void testMixedQueryWithReadHistoryPermissionOnAnyProcessDefinition() {
// given
disableAuthorization();
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String firstJobId = null;
List<Job> jobs = managementService.createJobQuery().withRetriesLeft().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
firstJobId = job.getId();
break;
}
}
managementService.setJobRetries(firstJobId, 0);
repositoryService.suspendProcessDefinitionByKey(ONE_INCIDENT_PROCESS_KEY, true, new Date());
String secondJobId = null;
jobs = managementService.createJobQuery().withRetriesLeft().list();
for (Job job : jobs) {
if (job.getProcessDefinitionKey() == null) {
secondJobId = job.getId();
break;
}
}
managementService.setJobRetries(secondJobId, 0);
enableAuthorization();
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
startProcessAndExecuteJob(ANOTHER_ONE_INCIDENT_PROCESS_KEY);
createGrantAuthorization(PROCESS_DEFINITION, ANY, userId, READ_HISTORY);
// when
HistoricIncidentQuery query = historyService.createHistoricIncidentQuery();
// then
verifyQueryResults(query, 7);
disableAuthorization();
managementService.deleteJob(firstJobId);
managementService.deleteJob(secondJobId);
enableAuthorization();
clearDatabase();
}
// helper ////////////////////////////////////////////////////////////
protected void verifyQueryResults(HistoricIncidentQuery query, int countExpected) {
verifyQueryResults((AbstractQuery<?, ?>) query, countExpected);
}
protected void clearDatabase() {
clearOpLog();
CommandExecutor commandExecutor = processEngineConfiguration.getCommandExecutorTxRequired();
commandExecutor.execute(new Command<Object>() {
public Object execute(CommandContext commandContext) {
commandContext.getHistoricJobLogManager().deleteHistoricJobLogsByHandlerType(TimerSuspendProcessDefinitionHandler.TYPE);
List<HistoricIncident> incidents = Context.getProcessEngineConfiguration().getHistoryService().createHistoricIncidentQuery().list();
for (HistoricIncident incident : incidents) {
commandContext.getHistoricIncidentManager().delete((HistoricIncidentEntity) incident);
}
return null;
}
});
}
}
| rainerh/camunda-bpm-platform | engine/src/test/java/org/camunda/bpm/engine/test/authorization/history/HistoricIncidentAuthorizationTest.java | Java | apache-2.0 | 14,196 |
<?php
// Exit if accessed directly
if( !defined( 'ABSPATH' ) ) {
exit;
}
/**
* Main Widget Template
*
*
* @file sidebar.php
* @package Responsive
* @author Emil Uzelac
* @copyright 2003 - 2014 CyberChimps
* @license license.txt
* @version Release: 1.0
* @filesource wp-content/themes/responsive/sidebar.php
* @link http://codex.wordpress.org/Theme_Development#Widgets_.28sidebar.php.29
* @since available since Release 1.0
*/
/*
* If this is a full-width page, exit
*/
if( 'full-width-page' == responsive_get_layout() ) {
return;
}
?>
<?php responsive_widgets_before(); // above widgets container hook ?>
<div id="widgets" class="<?php echo implode( ' ', responsive_get_sidebar_classes() ); ?>">
<?php responsive_widgets(); // above widgets hook ?>
<?php if( !dynamic_sidebar( 'ciencia' ) ) : ?>
<div class="widget-wrapper">
<div class="widget-title"><h3><?php _e( 'In Archive', 'responsive' ); ?></h3></div>
<ul>
<?php wp_get_archives( array( 'type' => 'monthly' ) ); ?>
</ul>
</div><!-- end of .widget-wrapper -->
<?php endif; //end of main-sidebar ?>
<?php responsive_widgets_end(); // after widgets hook ?>
</div><!-- end of #widgets -->
<?php responsive_widgets_after(); // after widgets container hook ?>
| Doap/sinkjuice.com | wp-content/themes/prensiguia/sidebar-ciencia.php | PHP | apache-2.0 | 1,336 |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.siddhi.core.managment;
import io.siddhi.core.SiddhiAppRuntime;
import io.siddhi.core.SiddhiManager;
import io.siddhi.core.event.Event;
import io.siddhi.core.exception.SiddhiAppCreationException;
import io.siddhi.core.stream.input.InputHandler;
import io.siddhi.core.stream.output.StreamCallback;
import io.siddhi.core.util.EventPrinter;
import io.siddhi.core.util.SiddhiTestHelper;
import org.apache.log4j.Logger;
import org.testng.Assert;
import org.testng.AssertJUnit;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.concurrent.atomic.AtomicInteger;
public class AsyncTestCase {
private static final Logger log = Logger.getLogger(AsyncTestCase.class);
private AtomicInteger count;
private boolean eventArrived;
@BeforeMethod
public void init() {
count = new AtomicInteger();
eventArrived = false;
}
@Test(expectedExceptions = SiddhiAppCreationException.class)
public void asyncTest1() throws InterruptedException {
log.info("async test 1");
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
"@app:async " +
"define stream cseEventStream (symbol string, price float, volume int);" +
"define stream cseEventStream2 (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 > price] " +
"select * " +
"insert into outputStream ;" +
"" +
"@info(name = 'query2') " +
"from cseEventStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
}
@Test(expectedExceptions = SiddhiAppCreationException.class, dependsOnMethods = {"asyncTest1"})
public void asyncTest2() throws InterruptedException {
log.info("async test 2");
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
"@app:async(buffer.size='2')" +
" " +
"define stream cseEventStream (symbol string, price float, volume int);" +
"define stream cseEventStream2 (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 > price] " +
"select * " +
"insert into innerStream ;" +
"" +
"@info(name = 'query2') " +
"from innerStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
}
@Test(dependsOnMethods = {"asyncTest2"})
public void asyncTest3() throws InterruptedException {
log.info("async test 3");
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
" " +
"@async(buffer.size='2')" +
"define stream cseEventStream (symbol string, price float, volume int);" +
"" +
"define stream cseEventStream2 (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 > price] " +
"select * " +
"insert into innerStream ;" +
"" +
"@info(name = 'query2') " +
"from innerStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
try {
Thread.sleep(Math.round(Math.random()) * 1000 + 1000);
} catch (InterruptedException e) {
log.error(e.getMessage(), e);
}
eventArrived = true;
for (Event event : events) {
count.incrementAndGet();
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cseEventStream");
siddhiAppRuntime.start();
long startTime = System.currentTimeMillis();
inputHandler.send(new Object[]{"WSO2", 55.6f, 100});
inputHandler.send(new Object[]{"IBM", 9.6f, 100});
inputHandler.send(new Object[]{"FB", 7.6f, 100});
inputHandler.send(new Object[]{"GOOG", 5.6f, 100});
inputHandler.send(new Object[]{"WSO2", 15.6f, 100});
long timeDiff = System.currentTimeMillis() - startTime;
Thread.sleep(5000);
siddhiAppRuntime.shutdown();
AssertJUnit.assertTrue(eventArrived);
AssertJUnit.assertEquals(5, count.get());
AssertJUnit.assertTrue(timeDiff >= 2000);
}
@Test(dependsOnMethods = {"asyncTest3"})
public void asyncTest4() throws InterruptedException {
log.info("async test 4");
HashMap<String, Integer> threads = new HashMap<>();
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
" " +
"@async(buffer.size='16', workers='2', batch.size.max='2')" +
"define stream cseEventStream (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 < price] " +
"select * " +
"insert into innerStream ;" +
"" +
"@info(name = 'query2') " +
"from innerStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
try {
Thread.sleep(Math.round(Math.random()) * 1000 + 1000);
} catch (InterruptedException e) {
log.error(e.getMessage(), e);
}
eventArrived = true;
for (Event event : events) {
count.incrementAndGet();
}
Assert.assertTrue(events.length <= 2);
synchronized (threads) {
Integer count = threads.get(Thread.currentThread().getName());
if (count == null) {
threads.put(Thread.currentThread().getName(), 1);
} else {
count++;
threads.put(Thread.currentThread().getName(), count);
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cseEventStream");
siddhiAppRuntime.start();
for (int i = 0; i < 20; i++) {
inputHandler.send(new Object[]{"WSO2", 115.6f, 100 + i});
}
SiddhiTestHelper.waitForEvents(2000, 20, count, 10000);
AssertJUnit.assertEquals(20, count.get());
siddhiAppRuntime.shutdown();
AssertJUnit.assertTrue(eventArrived);
log.info("Threads count:" + threads.size() + " threads:" + threads);
Assert.assertEquals(threads.size(), 2);
}
@Test//(dependsOnMethods = {"asyncTest4"})
public void asyncTest5() throws InterruptedException {
log.info("async test 5");
HashMap<String, Integer> threads = new HashMap<>();
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
" " +
"@async(buffer.size='512', workers='10', batch.size.max='20')" +
"define stream cseEventStream (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 < price] " +
"select * " +
"insert into outputStream ;" +
"";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
try {
Thread.sleep(Math.round(Math.random()) * 1000 + 1000);
} catch (InterruptedException e) {
log.error(e.getMessage(), e);
}
eventArrived = true;
for (Event event : events) {
count.incrementAndGet();
}
Assert.assertTrue(events.length <= 20);
synchronized (threads) {
Integer count = threads.get(Thread.currentThread().getName());
if (count == null) {
threads.put(Thread.currentThread().getName(), 1);
} else {
count++;
threads.put(Thread.currentThread().getName(), count);
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cseEventStream");
siddhiAppRuntime.start();
long startTime = System.currentTimeMillis();
for (int i = 0; i < 1200; i++) {
inputHandler.send(new Object[]{"WSO2", 115.6f, 100 + i});
}
SiddhiTestHelper.waitForEvents(3000, 1200, count, 10000);
long timeDiff = System.currentTimeMillis() - startTime;
log.info("Time spent: " + timeDiff);
AssertJUnit.assertEquals(1200, count.get());
siddhiAppRuntime.shutdown();
AssertJUnit.assertTrue(eventArrived);
log.info("Threads count:" + threads.size() + " threads:" + threads);
Assert.assertEquals(threads.size(), 10);
}
@Test(dependsOnMethods = {"asyncTest5"})
public void asyncTest6() throws InterruptedException {
log.info("async test 6");
HashMap<String, Integer> threads = new HashMap<>();
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
" " +
"@async(buffer.size='16', workers='2', batch.size.max='25')" +
"define stream cseEventStream (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 < price] " +
"select * " +
"insert into innerStream ;" +
"" +
"@info(name = 'query2') " +
"from innerStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
siddhiAppRuntime.addCallback("outputStream", new StreamCallback() {
@Override
public void receive(Event[] events) {
EventPrinter.print(events);
try {
Thread.sleep(Math.round(Math.random()) * 1000 + 1000);
} catch (InterruptedException e) {
log.error(e.getMessage(), e);
}
eventArrived = true;
for (Event event : events) {
count.incrementAndGet();
}
Assert.assertTrue(events.length <= 25);
synchronized (threads) {
Integer count = threads.get(Thread.currentThread().getName());
if (count == null) {
threads.put(Thread.currentThread().getName(), 1);
} else {
count++;
threads.put(Thread.currentThread().getName(), count);
}
}
}
});
InputHandler inputHandler = siddhiAppRuntime.getInputHandler("cseEventStream");
siddhiAppRuntime.start();
for (int i = 0; i < 20; i++) {
Thread.sleep(100);
inputHandler.send(new Object[]{"WSO2", 115.6f, 100 + i});
}
SiddhiTestHelper.waitForEvents(2000, 20, count, 10000);
AssertJUnit.assertEquals(20, count.get());
siddhiAppRuntime.shutdown();
AssertJUnit.assertTrue(eventArrived);
log.info("Threads count:" + threads.size() + " threads:" + threads);
Assert.assertEquals(threads.size(), 2);
}
@Test(expectedExceptions = SiddhiAppCreationException.class, dependsOnMethods = {"asyncTest6"})
public void asyncTest7() throws InterruptedException {
log.info("async test 7");
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
" " +
"@async(buffer.size='16', workers='0', batch.size.max='25')" +
"define stream cseEventStream (symbol string, price float, volume int);" +
"define stream cseEventStream2 (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 > price] " +
"select * " +
"insert into innerStream ;" +
"" +
"@info(name = 'query2') " +
"from innerStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
}
@Test(expectedExceptions = SiddhiAppCreationException.class, dependsOnMethods = {"asyncTest7"})
public void asyncTest8() throws InterruptedException {
log.info("async test 8");
SiddhiManager siddhiManager = new SiddhiManager();
String siddhiApp = "" +
" " +
"@async(buffer.size='16', workers='1', batch.size.max='0')" +
"define stream cseEventStream (symbol string, price float, volume int);" +
"define stream cseEventStream2 (symbol string, price float, volume int);" +
"" +
"@info(name = 'query1') " +
"from cseEventStream[70 > price] " +
"select * " +
"insert into innerStream ;" +
"" +
"@info(name = 'query2') " +
"from innerStream[volume > 90] " +
"select * " +
"insert into outputStream ;";
SiddhiAppRuntime siddhiAppRuntime = siddhiManager.createSiddhiAppRuntime(siddhiApp);
}
}
| wso2/siddhi | modules/siddhi-core/src/test/java/io/siddhi/core/managment/AsyncTestCase.java | Java | apache-2.0 | 15,834 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.nio.ByteBuffer;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
/**
*
*/
public class CacheEntrySerializablePredicate implements CacheEntryPredicate {
/** */
private static final long serialVersionUID = 0L;
/** */
@GridToStringInclude
@GridDirectTransient
private CacheEntryPredicate p;
/** */
private byte[] bytes;
/**
* Required by {@link org.apache.ignite.plugin.extensions.communication.Message}.
*/
public CacheEntrySerializablePredicate() {
// No-op.
}
/**
* @param p Serializable predicate.
*/
public CacheEntrySerializablePredicate(CacheEntryPredicate p) {
assert p != null;
this.p = p;
}
/**
* @return Predicate.
*/
public CacheEntryPredicate predicate() {
return p;
}
/** {@inheritDoc} */
@Override public void onAckReceived() {
// No-op.
}
/** {@inheritDoc} */
@Override public void entryLocked(boolean locked) {
assert p != null;
p.entryLocked(locked);
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(GridCacheContext ctx, ClassLoader ldr) throws IgniteCheckedException {
assert p != null || bytes != null;
if (p == null) {
p = U.unmarshal(ctx.marshaller(), bytes, U.resolveClassLoader(ldr, ctx.gridConfig()));
p.finishUnmarshal(ctx, ldr);
}
}
/** {@inheritDoc} */
@Override public void prepareMarshal(GridCacheContext ctx) throws IgniteCheckedException {
assert p != null;
p.prepareMarshal(ctx);
if (bytes == null)
bytes = U.marshal(ctx.marshaller(), p);
}
/** {@inheritDoc} */
@Override public boolean apply(GridCacheEntryEx e) {
assert p != null;
return p.apply(e);
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 0:
if (!writer.writeByteArray("bytes", bytes))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
switch (reader.state()) {
case 0:
bytes = reader.readByteArray("bytes");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(CacheEntrySerializablePredicate.class);
}
/** {@inheritDoc} */
@Override public byte directType() {
return 99;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 1;
}
} | afinka77/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/cache/CacheEntrySerializablePredicate.java | Java | apache-2.0 | 4,293 |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.testing;
import com.google.debugging.sourcemap.proto.Mapping.OriginalMapping;
import com.google.javascript.jscomp.Region;
import com.google.javascript.jscomp.SourceExcerptProvider;
import com.google.javascript.jscomp.SourceFile;
/**
* A simple source excerpt provider for testing.
*/
public final class SimpleSourceExcerptProvider implements SourceExcerptProvider {
private final SourceFile sourceFile;
public SimpleSourceExcerptProvider(String source) {
sourceFile = SourceFile.fromCode("input", source);
}
@Override
public String getSourceLine(String sourceName, int lineNumber) {
return sourceFile.getLine(lineNumber);
}
@Override
public Region getSourceLines(String sourceName, int lineNumber, int length) {
return sourceFile.getLines(lineNumber, length);
}
@Override
public Region getSourceRegion(String sourceName, int lineNumber) {
return sourceFile.getRegion(lineNumber);
}
@Override
public OriginalMapping getSourceMapping(String sourceName, int lineNumber,
int columnNumber) {
return null;
}
}
| GoogleChromeLabs/chromeos_smart_card_connector | third_party/closure-compiler/src/src/com/google/javascript/jscomp/testing/SimpleSourceExcerptProvider.java | Java | apache-2.0 | 1,757 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import logging
from neutronclient.neutron import v2_0 as neutronV20
class ListCredential(neutronV20.ListCommand):
"""List credentials that belong to a given tenant."""
resource = 'credential'
log = logging.getLogger(__name__ + '.ListCredential')
_formatters = {}
list_columns = ['credential_id', 'credential_name', 'user_name',
'password', 'type']
class ShowCredential(neutronV20.ShowCommand):
"""Show information of a given credential."""
resource = 'credential'
log = logging.getLogger(__name__ + '.ShowCredential')
allow_names = False
class CreateCredential(neutronV20.CreateCommand):
"""Creates a credential."""
resource = 'credential'
log = logging.getLogger(__name__ + '.CreateCredential')
def add_known_arguments(self, parser):
parser.add_argument(
'credential_name',
help='Name/Ip address for Credential')
parser.add_argument(
'credential_type',
help='Type of the Credential')
parser.add_argument(
'--username',
help='Username for the credential')
parser.add_argument(
'--password',
help='Password for the credential')
def args2body(self, parsed_args):
body = {'credential': {
'credential_name': parsed_args.credential_name}}
if parsed_args.credential_type:
body['credential'].update({'type':
parsed_args.credential_type})
if parsed_args.username:
body['credential'].update({'user_name':
parsed_args.username})
if parsed_args.password:
body['credential'].update({'password':
parsed_args.password})
return body
class DeleteCredential(neutronV20.DeleteCommand):
"""Delete a given credential."""
log = logging.getLogger(__name__ + '.DeleteCredential')
resource = 'credential'
allow_names = False
| vichoward/python-neutronclient | neutronclient/neutron/v2_0/credential.py | Python | apache-2.0 | 2,705 |
/**
* @license
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
foam.CLASS({
package: 'foam.graphics',
name: 'StereoCView',
extends: 'foam.graphics.CView',
methods: [
function paintChildren(x) {
this.children.sort(function(o1, o2) { return o2.z - o1.z; });
for ( var i = 0 ; i < this.children.length ; i++ ) {
var c = this.children[i];
c.x += 20;
c.paint(x);
}
x.translate(500, 0);
for ( var i = 0 ; i < this.children.length ; i++ ) {
var c = this.children[i];
c.x -= 20;
c.paint(x);
}
}
]
});
| jacksonic/vjlofvhjfgm | src/foam/graphics/StereoCView.js | JavaScript | apache-2.0 | 1,166 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator {
private final double[] percents;
private boolean keyed = true;
private List<Double> data;
PercentilesBucketPipelineAggregator(String name, double[] percents, boolean keyed, String[] bucketsPaths,
GapPolicy gapPolicy, DocValueFormat formatter, Map<String, Object> metadata) {
super(name, bucketsPaths, gapPolicy, formatter, metadata);
this.percents = percents;
this.keyed = keyed;
}
@Override
protected void preCollection() {
data = new ArrayList<>(1024);
}
@Override
protected void collectBucketValue(String bucketKey, Double bucketValue) {
data.add(bucketValue);
}
@Override
protected InternalAggregation buildAggregation(Map<String, Object> metadata) {
// Perform the sorting and percentile collection now that all the data
// has been collected.
Collections.sort(data);
double[] percentiles = new double[percents.length];
if (data.size() == 0) {
for (int i = 0; i < percents.length; i++) {
percentiles[i] = Double.NaN;
}
} else {
for (int i = 0; i < percents.length; i++) {
int index = (int) Math.round((percents[i] / 100.0) * (data.size() - 1));
percentiles[i] = data.get(index);
}
}
// todo need postCollection() to clean up temp sorted data?
return new InternalPercentilesBucket(name(), percents, percentiles, keyed, format, metadata);
}
}
| gingerwizard/elasticsearch | server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregator.java | Java | apache-2.0 | 2,793 |
// Generated by xsd compiler for android/java
// DO NOT CHANGE!
package com.ebay.trading.api;
/**
*
* The status of gallery image generation. That status will return either a value of 'Success' or
* a value that indicates why the gallery image has not been generated.
*
*/
public enum GalleryStatusCodeType {
/**
*
* Gallery Image successfully generated.
*
*/
SUCCESS("Success"),
/**
*
* Gallery image has not yet been generated.
*
*/
PENDING("Pending"),
/**
*
* The URL for the image is not valid.
*
*/
INVALID_URL("InvalidUrl"),
/**
*
* URL does not start with http:// - That is the only protocol currently supported for pictures.
*
*/
INVALID_PROTOCOL("InvalidProtocol"),
/**
*
* There is a problem with the file containing the image.
*
*/
INVALID_FILE("InvalidFile"),
/**
*
* The server containing your image was unavailable when we tried to retrieve it.
*
*/
SERVER_DOWN("ServerDown"),
/**
*
* We could not find your Gallery image when we went to retrieve it.
*
*/
IMAGE_NON_EXISTENT("ImageNonExistent"),
/**
*
* The image failed to come across the Internet when we tried to retrieve it.
*
*/
IMAGE_READ_TIME_OUT("ImageReadTimeOut"),
/**
*
* The file containing your image is not in standard jpeg, bmp, or tif format.
*
*/
INVALID_FILE_FORMAT("InvalidFileFormat"),
/**
*
* We were not able to process the image.
*
*/
IMAGE_PROCESSING_ERROR("ImageProcessingError"),
/**
*
* Reserved for internal or future use.
*
*/
CUSTOM_CODE("CustomCode");
private final String value;
GalleryStatusCodeType(String v) {
value = v;
}
public String value() {
return value;
}
public static GalleryStatusCodeType fromValue(String v) {
if (v != null) {
for (GalleryStatusCodeType c: GalleryStatusCodeType.values()) {
if (c.value.equals(v)) {
return c;
}
}
}
throw new IllegalArgumentException(v);
}
} | uaraven/nano | sample/webservice/eBayDemoApp/src/com/ebay/trading/api/GalleryStatusCodeType.java | Java | apache-2.0 | 2,546 |
var path = require('path');
var assign = require('object-assign');
var forEachBail = require('enhanced-resolve/lib/forEachBail');
var basename = require('enhanced-resolve/lib/getPaths').basename;
module.exports = function (modulesToResolveToEs5) {
return {
apply: doApply.bind(this, modulesToResolveToEs5)
};
};
function doApply(modulesToResolveToEs5, resolver) {
// file type taken from: https://github.com/webpack/enhanced-resolve/blob/v4.0.0/test/plugins.js
var target = resolver.ensureHook("undescribed-raw-file");
//console.log(resolver.hooks);
resolver.getHook("resolve")
.tapAsync("ViewerResolvePlugin", (request, resolveContext, callback) => {
for(var package of modulesToResolveToEs5){
if(request.request.indexOf(package+"/") == 0){
const newRequest = Object.assign({}, request, { request: package, });
return resolver.doResolve(target, newRequest, "viewer resolved", resolveContext, callback);
}
}
return callback();
});
}
| BabylonJS/Babylon.js | Tools/WebpackPlugins/viewerResolve.js | JavaScript | apache-2.0 | 1,075 |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.tasks.config;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.*;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.tasks.TaskRepository;
import com.intellij.tasks.TaskRepositoryType;
import com.intellij.tasks.impl.TaskManagerImpl;
import com.intellij.util.containers.CollectionFactory;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashingStrategy;
import com.intellij.util.xmlb.XmlSerializer;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
/**
* @author Dmitry Avdeev
*/
@State(name = "RecentTaskRepositories", storages = @Storage(StoragePathMacros.NON_ROAMABLE_FILE))
@Service(Service.Level.APP)
public final class RecentTaskRepositories implements PersistentStateComponent<Element>, Disposable {
private final Set<TaskRepository> myRepositories = CollectionFactory.createCustomHashingStrategySet(HASHING_STRATEGY);
private static final HashingStrategy<TaskRepository> HASHING_STRATEGY = new HashingStrategy<>() {
@Override
public int hashCode(@Nullable TaskRepository object) {
return object == null || object.getUrl() == null ? 0 : object.getUrl().hashCode();
}
@Override
public boolean equals(TaskRepository o1, TaskRepository o2) {
return o1 == o2 || (o1 != null && o2 != null && Objects.equals(o1.getUrl(), o2.getUrl()));
}
};
public RecentTaskRepositories() {
// remove repositories pertaining to non-existent types
TaskRepositoryType.addEPListChangeListener(this, () -> {
List<Class<?>> possibleRepositoryClasses = TaskRepositoryType.getRepositoryClasses();
myRepositories.removeIf(repository -> {
return !ContainerUtil.exists(possibleRepositoryClasses, clazz -> clazz.isAssignableFrom(repository.getClass()));
});
});
}
public static RecentTaskRepositories getInstance() {
return ApplicationManager.getApplication().getService(RecentTaskRepositories.class);
}
public Set<TaskRepository> getRepositories() {
Set<TaskRepository> set = CollectionFactory.createCustomHashingStrategySet(HASHING_STRATEGY);
set.addAll(ContainerUtil.findAll(myRepositories, repository -> !StringUtil.isEmptyOrSpaces(repository.getUrl())));
return set;
}
public void addRepositories(Collection<TaskRepository> repositories) {
Collection<TaskRepository> old = new ArrayList<>(myRepositories);
myRepositories.clear();
if (doAddReps(repositories)) return;
doAddReps(old);
}
private boolean doAddReps(Collection<TaskRepository> repositories) {
for (TaskRepository repository : repositories) {
if (!StringUtil.isEmptyOrSpaces(repository.getUrl())) {
if (myRepositories.size() == 10) {
return true;
}
myRepositories.add(repository);
}
}
return false;
}
@Override
public Element getState() {
return XmlSerializer.serialize(myRepositories.toArray(new TaskRepository[0]));
}
@Override
public void loadState(@NotNull Element state) {
myRepositories.clear();
myRepositories.addAll(TaskManagerImpl.loadRepositories(state));
}
@Override
public void dispose() {}
}
| siosio/intellij-community | plugins/tasks/tasks-core/src/com/intellij/tasks/config/RecentTaskRepositories.java | Java | apache-2.0 | 3,466 |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.integration.copier.snapshot.reader;
import java.util.Map;
import com.opengamma.core.marketdatasnapshot.CurveKey;
import com.opengamma.core.marketdatasnapshot.CurveSnapshot;
import com.opengamma.core.marketdatasnapshot.UnstructuredMarketDataSnapshot;
import com.opengamma.core.marketdatasnapshot.VolatilitySurfaceKey;
import com.opengamma.core.marketdatasnapshot.VolatilitySurfaceSnapshot;
import com.opengamma.core.marketdatasnapshot.YieldCurveKey;
import com.opengamma.core.marketdatasnapshot.YieldCurveSnapshot;
/**
* Abstract snapshot reader with methods that provide getters for the specific elements a snapshot
* note that VolatilityCubes are not present.
*/
public abstract interface SnapshotReader {
Map<CurveKey, CurveSnapshot> readCurves();
UnstructuredMarketDataSnapshot readGlobalValues();
Map<VolatilitySurfaceKey, VolatilitySurfaceSnapshot> readVolatilitySurfaces();
Map<YieldCurveKey, YieldCurveSnapshot> readYieldCurves();
void close();
String getName();
String getBasisViewName();
}
| DevStreet/FinanceAnalytics | projects/OG-Integration/src/main/java/com/opengamma/integration/copier/snapshot/reader/SnapshotReader.java | Java | apache-2.0 | 1,187 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ValidatingCodec.hh"
#include <string>
#include <map>
#include <algorithm>
#include <boost/shared_ptr.hpp>
#include <boost/make_shared.hpp>
#include <boost/weak_ptr.hpp>
#include <boost/any.hpp>
#include "ValidSchema.hh"
#include "Decoder.hh"
#include "Encoder.hh"
#include "NodeImpl.hh"
namespace avro {
namespace parsing {
using boost::shared_ptr;
using boost::weak_ptr;
using boost::static_pointer_cast;
using std::map;
using std::vector;
using std::pair;
using std::string;
using std::reverse;
using std::ostringstream;
/** Follows the design of Avro Parser in Java. */
Production ValidatingGrammarGenerator::generate(const NodePtr& n)
{
map<NodePtr, shared_ptr<Production> > m;
Production result = doGenerate(n, m);
fixup(result, m);
return result;
}
Symbol ValidatingGrammarGenerator::generate(const ValidSchema& schema)
{
return Symbol::rootSymbol(generate(schema.root()));
}
Production ValidatingGrammarGenerator::doGenerate(const NodePtr& n,
map<NodePtr, shared_ptr<Production> > &m) {
switch (n->type()) {
case AVRO_NULL:
return Production(1, Symbol::nullSymbol());
case AVRO_BOOL:
return Production(1, Symbol::boolSymbol());
case AVRO_INT:
return Production(1, Symbol::intSymbol());
case AVRO_LONG:
return Production(1, Symbol::longSymbol());
case AVRO_FLOAT:
return Production(1, Symbol::floatSymbol());
case AVRO_DOUBLE:
return Production(1, Symbol::doubleSymbol());
case AVRO_STRING:
return Production(1, Symbol::stringSymbol());
case AVRO_BYTES:
return Production(1, Symbol::bytesSymbol());
case AVRO_FIXED:
{
Symbol r[] = {
Symbol::sizeCheckSymbol(n->fixedSize()),
Symbol::fixedSymbol() };
Production result(r, r + 2);
m[n] = boost::make_shared<Production>(result);
return result;
}
case AVRO_RECORD: {
Production result;
m.erase(n);
size_t c = n->leaves();
for (size_t i = 0; i < c; ++i) {
const NodePtr& leaf = n->leafAt(i);
Production v = doGenerate(leaf, m);
copy(v.rbegin(), v.rend(), back_inserter(result));
}
reverse(result.begin(), result.end());
bool found = m.find(n) != m.end();
shared_ptr<Production> p = boost::make_shared<Production>(result);
m[n] = p;
return found ? Production(1, Symbol::indirect(p)) : result;
}
case AVRO_ENUM:
{
Symbol r[] = {
Symbol::sizeCheckSymbol(n->names()),
Symbol::enumSymbol() };
Production result(r, r + 2);
m[n] = boost::make_shared<Production>(result);
return result;
}
case AVRO_ARRAY:
{
Symbol r[] = {
Symbol::arrayEndSymbol(),
Symbol::repeater(doGenerate(n->leafAt(0), m), true),
Symbol::arrayStartSymbol() };
return Production(r, r + 3);
}
case AVRO_MAP:
{
Production v = doGenerate(n->leafAt(1), m);
v.push_back(Symbol::stringSymbol());
Symbol r[] = {
Symbol::mapEndSymbol(),
Symbol::repeater(v, false),
Symbol::mapStartSymbol() };
return Production(r, r + 3);
}
case AVRO_UNION:
{
vector<Production> vv;
size_t c = n->leaves();
vv.reserve(c);
for (size_t i = 0; i < c; ++i) {
vv.push_back(doGenerate(n->leafAt(i), m));
}
Symbol r[] = {
Symbol::alternative(vv),
Symbol::unionSymbol()
};
return Production(r, r + 2);
}
case AVRO_SYMBOLIC:
{
shared_ptr<NodeSymbolic> ns = static_pointer_cast<NodeSymbolic>(n);
NodePtr nn = ns->getNode();
map<NodePtr, shared_ptr<Production> >::iterator it =
m.find(nn);
if (it != m.end() && it->second) {
return *it->second;
} else {
m[nn] = shared_ptr<Production>();
return Production(1, Symbol::placeholder(nn));
}
}
default:
throw Exception("Unknown node type");
}
}
struct DummyHandler {
size_t handle(const Symbol& s) {
return 0;
}
};
template <typename P>
class ValidatingDecoder : public Decoder {
const shared_ptr<Decoder> base;
DummyHandler handler_;
P parser;
void init(InputStream& is);
void decodeNull();
bool decodeBool();
int32_t decodeInt();
int64_t decodeLong();
float decodeFloat();
double decodeDouble();
void decodeString(string& value);
void skipString();
void decodeBytes(vector<uint8_t>& value);
void skipBytes();
void decodeFixed(size_t n, vector<uint8_t>& value);
void skipFixed(size_t n);
size_t decodeEnum();
size_t arrayStart();
size_t arrayNext();
size_t skipArray();
size_t mapStart();
size_t mapNext();
size_t skipMap();
size_t decodeUnionIndex();
public:
ValidatingDecoder(const ValidSchema& s, const shared_ptr<Decoder> b) :
base(b),
parser(ValidatingGrammarGenerator().generate(s), NULL, handler_) { }
};
template <typename P>
void ValidatingDecoder<P>::init(InputStream& is)
{
base->init(is);
}
template <typename P>
void ValidatingDecoder<P>::decodeNull()
{
parser.advance(Symbol::sNull);
}
template <typename P>
bool ValidatingDecoder<P>::decodeBool()
{
parser.advance(Symbol::sBool);
return base->decodeBool();
}
template <typename P>
int32_t ValidatingDecoder<P>::decodeInt()
{
parser.advance(Symbol::sInt);
return base->decodeInt();
}
template <typename P>
int64_t ValidatingDecoder<P>::decodeLong()
{
parser.advance(Symbol::sLong);
return base->decodeLong();
}
template <typename P>
float ValidatingDecoder<P>::decodeFloat()
{
parser.advance(Symbol::sFloat);
return base->decodeFloat();
}
template <typename P>
double ValidatingDecoder<P>::decodeDouble()
{
parser.advance(Symbol::sDouble);
return base->decodeDouble();
}
template <typename P>
void ValidatingDecoder<P>::decodeString(string& value)
{
parser.advance(Symbol::sString);
base->decodeString(value);
}
template <typename P>
void ValidatingDecoder<P>::skipString()
{
parser.advance(Symbol::sString);
base->skipString();
}
template <typename P>
void ValidatingDecoder<P>::decodeBytes(vector<uint8_t>& value)
{
parser.advance(Symbol::sBytes);
base->decodeBytes(value);
}
template <typename P>
void ValidatingDecoder<P>::skipBytes()
{
parser.advance(Symbol::sBytes);
base->skipBytes();
}
template <typename P>
void ValidatingDecoder<P>::decodeFixed(size_t n, vector<uint8_t>& value)
{
parser.advance(Symbol::sFixed);
parser.assertSize(n);
base->decodeFixed(n, value);
}
template <typename P>
void ValidatingDecoder<P>::skipFixed(size_t n)
{
parser.advance(Symbol::sFixed);
parser.assertSize(n);
base->skipFixed(n);
}
template <typename P>
size_t ValidatingDecoder<P>::decodeEnum()
{
parser.advance(Symbol::sEnum);
size_t result = base->decodeEnum();
parser.assertLessThanSize(result);
return result;
}
template <typename P>
size_t ValidatingDecoder<P>::arrayStart()
{
parser.advance(Symbol::sArrayStart);
size_t result = base->arrayStart();
if (result == 0) {
parser.popRepeater();
parser.advance(Symbol::sArrayEnd);
} else {
parser.setRepeatCount(result);
}
return result;
}
template <typename P>
size_t ValidatingDecoder<P>::arrayNext()
{
size_t result = base->arrayNext();
if (result == 0) {
parser.popRepeater();
parser.advance(Symbol::sArrayEnd);
} else {
parser.setRepeatCount(result);
}
return result;
}
template <typename P>
size_t ValidatingDecoder<P>::skipArray()
{
parser.advance(Symbol::sArrayStart);
size_t n = base->skipArray();
if (n == 0) {
parser.pop();
} else {
parser.setRepeatCount(n);
parser.skip(*base);
}
parser.advance(Symbol::sArrayEnd);
return 0;
}
template <typename P>
size_t ValidatingDecoder<P>::mapStart()
{
parser.advance(Symbol::sMapStart);
size_t result = base->mapStart();
if (result == 0) {
parser.popRepeater();
parser.advance(Symbol::sMapEnd);
} else {
parser.setRepeatCount(result);
}
return result;
}
template <typename P>
size_t ValidatingDecoder<P>::mapNext()
{
size_t result = base->mapNext();
if (result == 0) {
parser.popRepeater();
parser.advance(Symbol::sMapEnd);
} else {
parser.setRepeatCount(result);
}
return result;
}
template <typename P>
size_t ValidatingDecoder<P>::skipMap()
{
parser.advance(Symbol::sMapStart);
size_t n = base->skipMap();
if (n == 0) {
parser.pop();
} else {
parser.setRepeatCount(n);
parser.skip(*base);
}
parser.advance(Symbol::sMapEnd);
return 0;
}
template <typename P>
size_t ValidatingDecoder<P>::decodeUnionIndex()
{
parser.advance(Symbol::sUnion);
size_t result = base->decodeUnionIndex();
parser.selectBranch(result);
return result;
}
template <typename P>
class ValidatingEncoder : public Encoder {
DummyHandler handler_;
P parser_;
EncoderPtr base_;
void init(OutputStream& os);
void flush();
void encodeNull();
void encodeBool(bool b);
void encodeInt(int32_t i);
void encodeLong(int64_t l);
void encodeFloat(float f);
void encodeDouble(double d);
void encodeString(const std::string& s);
void encodeBytes(const uint8_t *bytes, size_t len);
void encodeFixed(const uint8_t *bytes, size_t len);
void encodeEnum(size_t e);
void arrayStart();
void arrayEnd();
void mapStart();
void mapEnd();
void setItemCount(size_t count);
void startItem();
void encodeUnionIndex(size_t e);
public:
ValidatingEncoder(const ValidSchema& schema, const EncoderPtr& base) :
parser_(ValidatingGrammarGenerator().generate(schema), NULL, handler_),
base_(base) { }
};
template<typename P>
void ValidatingEncoder<P>::init(OutputStream& os)
{
base_->init(os);
}
template<typename P>
void ValidatingEncoder<P>::flush()
{
base_->flush();
}
template<typename P>
void ValidatingEncoder<P>::encodeNull()
{
parser_.advance(Symbol::sNull);
base_->encodeNull();
}
template<typename P>
void ValidatingEncoder<P>::encodeBool(bool b)
{
parser_.advance(Symbol::sBool);
base_->encodeBool(b);
}
template<typename P>
void ValidatingEncoder<P>::encodeInt(int32_t i)
{
parser_.advance(Symbol::sInt);
base_->encodeInt(i);
}
template<typename P>
void ValidatingEncoder<P>::encodeLong(int64_t l)
{
parser_.advance(Symbol::sLong);
base_->encodeLong(l);
}
template<typename P>
void ValidatingEncoder<P>::encodeFloat(float f)
{
parser_.advance(Symbol::sFloat);
base_->encodeFloat(f);
}
template<typename P>
void ValidatingEncoder<P>::encodeDouble(double d)
{
parser_.advance(Symbol::sDouble);
base_->encodeDouble(d);
}
template<typename P>
void ValidatingEncoder<P>::encodeString(const std::string& s)
{
parser_.advance(Symbol::sString);
base_->encodeString(s);
}
template<typename P>
void ValidatingEncoder<P>::encodeBytes(const uint8_t *bytes, size_t len)
{
parser_.advance(Symbol::sBytes);
base_->encodeBytes(bytes, len);
}
template<typename P>
void ValidatingEncoder<P>::encodeFixed(const uint8_t *bytes, size_t len)
{
parser_.advance(Symbol::sFixed);
parser_.assertSize(len);
base_->encodeFixed(bytes, len);
}
template<typename P>
void ValidatingEncoder<P>::encodeEnum(size_t e)
{
parser_.advance(Symbol::sEnum);
parser_.assertLessThanSize(e);
base_->encodeEnum(e);
}
template<typename P>
void ValidatingEncoder<P>::arrayStart()
{
parser_.advance(Symbol::sArrayStart);
base_->arrayStart();
}
template<typename P>
void ValidatingEncoder<P>::arrayEnd()
{
parser_.popRepeater();
parser_.advance(Symbol::sArrayEnd);
base_->arrayEnd();
}
template<typename P>
void ValidatingEncoder<P>::mapStart()
{
parser_.advance(Symbol::sMapStart);
base_->mapStart();
}
template<typename P>
void ValidatingEncoder<P>::mapEnd()
{
parser_.popRepeater();
parser_.advance(Symbol::sMapEnd);
base_->mapEnd();
}
template<typename P>
void ValidatingEncoder<P>::setItemCount(size_t count)
{
parser_.setRepeatCount(count);
base_->setItemCount(count);
}
template<typename P>
void ValidatingEncoder<P>::startItem()
{
if (parser_.top() != Symbol::sRepeater) {
throw Exception("startItem at not an item boundary");
}
base_->startItem();
}
template<typename P>
void ValidatingEncoder<P>::encodeUnionIndex(size_t e)
{
parser_.advance(Symbol::sUnion);
parser_.selectBranch(e);
base_->encodeUnionIndex(e);
}
} // namespace parsing
DecoderPtr validatingDecoder(const ValidSchema& s,
const DecoderPtr& base)
{
return boost::make_shared<parsing::ValidatingDecoder<
parsing::SimpleParser<parsing::DummyHandler> > >(s, base);
}
EncoderPtr validatingEncoder(const ValidSchema& schema, const EncoderPtr& base)
{
return boost::make_shared<parsing::ValidatingEncoder<
parsing::SimpleParser<parsing::DummyHandler> > >(schema, base);
}
} // namespace avro
| RallySoftware/avro | lang/c++/impl/parsing/ValidatingCodec.cc | C++ | apache-2.0 | 14,444 |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.model.option.pricing.tree;
/**
* A call has payoff max[S1 * S2 - K, 0], while a put pays off max[K - S1 * S2, 0] at expiry
*/
public class ProductOptionFunctionProvider extends OptionFunctionProvider2D {
/**
* @param strike Strike price
* @param timeToExpiry Time to expiry
* @param steps Number of steps
* @param isCall True if call, false if put
*/
public ProductOptionFunctionProvider(final double strike, final double timeToExpiry, final int steps, final boolean isCall) {
super(strike, timeToExpiry, steps, isCall);
}
@Override
public double[][] getPayoffAtExpiry(final double assetPrice1, final double assetPrice2, final double upOverDown1, final double upOverDown2) {
final double strike = getStrike();
final int nStepsP = getNumberOfSteps() + 1;
final double sign = getSign();
final double[][] values = new double[nStepsP][nStepsP];
double priceTmp1 = assetPrice1;
for (int i = 0; i < nStepsP; ++i) {
double priceTmp2 = assetPrice2;
for (int j = 0; j < nStepsP; ++j) {
values[i][j] = Math.max(sign * (priceTmp1 * priceTmp2 - strike), 0.);
priceTmp2 *= upOverDown2;
}
priceTmp1 *= upOverDown1;
}
return values;
}
@Override
public double[][] getPayoffAtExpiryTrinomial(final double assetPrice1, final double assetPrice2, final double middleOverDown1, final double middleOverDown2) {
final double strike = getStrike();
final int nNodes = 2 * getNumberOfSteps() + 1;
final double sign = getSign();
final double[][] values = new double[nNodes][nNodes];
double priceTmp1 = assetPrice1;
for (int i = 0; i < nNodes; ++i) {
double priceTmp2 = assetPrice2;
for (int j = 0; j < nNodes; ++j) {
values[i][j] = Math.max(sign * (priceTmp1 * priceTmp2 - strike), 0.);
priceTmp2 *= middleOverDown2;
}
priceTmp1 *= middleOverDown1;
}
return values;
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof ProductOptionFunctionProvider)) {
return false;
}
return super.equals(obj);
}
}
| jeorme/OG-Platform | projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/model/option/pricing/tree/ProductOptionFunctionProvider.java | Java | apache-2.0 | 2,452 |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.component.factory.engine;
import java.util.LinkedHashMap;
import java.util.Map;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.component.ComponentInfo;
import com.opengamma.component.ComponentRepository;
import com.opengamma.component.factory.AbstractComponentFactory;
import com.opengamma.core.historicaltimeseries.HistoricalTimeSeriesSource;
import com.opengamma.core.marketdatasnapshot.MarketDataSnapshotSource;
import com.opengamma.engine.marketdata.CombinedMarketDataProviderFactory;
import com.opengamma.engine.marketdata.MarketDataProviderFactory;
import com.opengamma.engine.marketdata.historical.HistoricalMarketDataProviderFactory;
import com.opengamma.engine.marketdata.historical.HistoricalShockMarketDataProviderFactory;
import com.opengamma.engine.marketdata.historical.LatestHistoricalMarketDataProviderFactory;
import com.opengamma.engine.marketdata.random.RandomizingMarketDataProviderFactory;
import com.opengamma.engine.marketdata.resolver.CachingMarketDataProviderResolver;
import com.opengamma.engine.marketdata.resolver.MarketDataProviderResolver;
import com.opengamma.engine.marketdata.resolver.TypeBasedMarketDataProviderResolver;
import com.opengamma.engine.marketdata.snapshot.UserMarketDataProviderFactory;
import com.opengamma.engine.marketdata.spec.CombinedMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.FixedHistoricalMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.HistoricalShockMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.LatestHistoricalMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.LiveMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.RandomizingMarketDataSpecification;
import com.opengamma.engine.marketdata.spec.UserMarketDataSpecification;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver;
/**
* Component factory for the market data provider resolver.
*/
@BeanDefinition
public class MarketDataProviderResolverComponentFactory extends AbstractComponentFactory {
/**
* The classifier under which to publish.
*/
@PropertyDefinition(validate = "notNull")
private String _classifier;
/**
* The live market data provider factory. May be null if no live data required.
*/
@PropertyDefinition()
private MarketDataProviderFactory _liveMarketDataProviderFactory;
/**
* The historical time-series source.
*/
@PropertyDefinition(validate = "notNull")
private HistoricalTimeSeriesSource _historicalTimeSeriesSource;
/**
* The historical time-series resolver.
*/
@PropertyDefinition(validate = "notNull")
private HistoricalTimeSeriesResolver _historicalTimeSeriesResolver;
/**
* The market data snapshot source.
*/
@PropertyDefinition(validate = "notNull")
private MarketDataSnapshotSource _marketDataSnapshotSource;
@Override
public void init(final ComponentRepository repo, final LinkedHashMap<String, String> configuration) throws Exception {
initMarketDataProviderResolver(repo);
}
protected MarketDataProviderResolver createMarketDataProviderResolver() {
final TypeBasedMarketDataProviderResolver providerResolver = new TypeBasedMarketDataProviderResolver();
if (getLiveMarketDataProviderFactory() != null) {
providerResolver.addProvider(LiveMarketDataSpecification.class, getLiveMarketDataProviderFactory());
}
final MarketDataProviderFactory fixedHistoricalMarketDataProviderFactory = initFixedHistoricalMarketDataProviderFactory();
providerResolver.addProvider(FixedHistoricalMarketDataSpecification.class, fixedHistoricalMarketDataProviderFactory);
final MarketDataProviderFactory latestHistoricalMarketDataProviderFactory = initLatestHistoricalMarketDataProviderFactory();
providerResolver.addProvider(LatestHistoricalMarketDataSpecification.class, latestHistoricalMarketDataProviderFactory);
final MarketDataProviderFactory userMarketDataProviderFactory = initUserMarketDataProviderFactory();
providerResolver.addProvider(UserMarketDataSpecification.class, userMarketDataProviderFactory);
final MarketDataProviderFactory combinedMarketDataProviderFactory = initCombinedMarketDataProviderFactory(providerResolver);
providerResolver.addProvider(CombinedMarketDataSpecification.class, combinedMarketDataProviderFactory);
final MarketDataProviderFactory historicalShockMarketDataProviderFactory = initHistoricalShockMarketDataProviderFactory(providerResolver);
providerResolver.addProvider(HistoricalShockMarketDataSpecification.class, historicalShockMarketDataProviderFactory);
final MarketDataProviderFactory randomizingMarketDataProviderFactory = initRandomizingMarketDataProviderFactory(providerResolver);
providerResolver.addProvider(RandomizingMarketDataSpecification.class, randomizingMarketDataProviderFactory);
return providerResolver;
}
private void initMarketDataProviderResolver(final ComponentRepository repo) {
final MarketDataProviderResolver resolver = new CachingMarketDataProviderResolver(createMarketDataProviderResolver());
final ComponentInfo info = new ComponentInfo(MarketDataProviderResolver.class, getClassifier());
repo.registerComponent(info, resolver);
}
private MarketDataProviderFactory initRandomizingMarketDataProviderFactory(MarketDataProviderResolver resolver) {
return new RandomizingMarketDataProviderFactory(resolver);
}
protected MarketDataProviderFactory initFixedHistoricalMarketDataProviderFactory() {
return new HistoricalMarketDataProviderFactory(getHistoricalTimeSeriesSource(), getHistoricalTimeSeriesResolver());
}
protected MarketDataProviderFactory initLatestHistoricalMarketDataProviderFactory() {
return new LatestHistoricalMarketDataProviderFactory(getHistoricalTimeSeriesSource(), getHistoricalTimeSeriesResolver());
}
protected MarketDataProviderFactory initUserMarketDataProviderFactory() {
return new UserMarketDataProviderFactory(getMarketDataSnapshotSource());
}
protected MarketDataProviderFactory initCombinedMarketDataProviderFactory(final MarketDataProviderResolver underlyingResolver) {
return new CombinedMarketDataProviderFactory(underlyingResolver);
}
protected MarketDataProviderFactory initHistoricalShockMarketDataProviderFactory(MarketDataProviderResolver underlyingResolver) {
return new HistoricalShockMarketDataProviderFactory(underlyingResolver);
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code MarketDataProviderResolverComponentFactory}.
* @return the meta-bean, not null
*/
public static MarketDataProviderResolverComponentFactory.Meta meta() {
return MarketDataProviderResolverComponentFactory.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(MarketDataProviderResolverComponentFactory.Meta.INSTANCE);
}
@Override
public MarketDataProviderResolverComponentFactory.Meta metaBean() {
return MarketDataProviderResolverComponentFactory.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the classifier under which to publish.
* @return the value of the property, not null
*/
public String getClassifier() {
return _classifier;
}
/**
* Sets the classifier under which to publish.
* @param classifier the new value of the property, not null
*/
public void setClassifier(String classifier) {
JodaBeanUtils.notNull(classifier, "classifier");
this._classifier = classifier;
}
/**
* Gets the the {@code classifier} property.
* @return the property, not null
*/
public final Property<String> classifier() {
return metaBean().classifier().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the live market data provider factory. May be null if no live data required.
* @return the value of the property
*/
public MarketDataProviderFactory getLiveMarketDataProviderFactory() {
return _liveMarketDataProviderFactory;
}
/**
* Sets the live market data provider factory. May be null if no live data required.
* @param liveMarketDataProviderFactory the new value of the property
*/
public void setLiveMarketDataProviderFactory(MarketDataProviderFactory liveMarketDataProviderFactory) {
this._liveMarketDataProviderFactory = liveMarketDataProviderFactory;
}
/**
* Gets the the {@code liveMarketDataProviderFactory} property.
* @return the property, not null
*/
public final Property<MarketDataProviderFactory> liveMarketDataProviderFactory() {
return metaBean().liveMarketDataProviderFactory().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the historical time-series source.
* @return the value of the property, not null
*/
public HistoricalTimeSeriesSource getHistoricalTimeSeriesSource() {
return _historicalTimeSeriesSource;
}
/**
* Sets the historical time-series source.
* @param historicalTimeSeriesSource the new value of the property, not null
*/
public void setHistoricalTimeSeriesSource(HistoricalTimeSeriesSource historicalTimeSeriesSource) {
JodaBeanUtils.notNull(historicalTimeSeriesSource, "historicalTimeSeriesSource");
this._historicalTimeSeriesSource = historicalTimeSeriesSource;
}
/**
* Gets the the {@code historicalTimeSeriesSource} property.
* @return the property, not null
*/
public final Property<HistoricalTimeSeriesSource> historicalTimeSeriesSource() {
return metaBean().historicalTimeSeriesSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the historical time-series resolver.
* @return the value of the property, not null
*/
public HistoricalTimeSeriesResolver getHistoricalTimeSeriesResolver() {
return _historicalTimeSeriesResolver;
}
/**
* Sets the historical time-series resolver.
* @param historicalTimeSeriesResolver the new value of the property, not null
*/
public void setHistoricalTimeSeriesResolver(HistoricalTimeSeriesResolver historicalTimeSeriesResolver) {
JodaBeanUtils.notNull(historicalTimeSeriesResolver, "historicalTimeSeriesResolver");
this._historicalTimeSeriesResolver = historicalTimeSeriesResolver;
}
/**
* Gets the the {@code historicalTimeSeriesResolver} property.
* @return the property, not null
*/
public final Property<HistoricalTimeSeriesResolver> historicalTimeSeriesResolver() {
return metaBean().historicalTimeSeriesResolver().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the market data snapshot source.
* @return the value of the property, not null
*/
public MarketDataSnapshotSource getMarketDataSnapshotSource() {
return _marketDataSnapshotSource;
}
/**
* Sets the market data snapshot source.
* @param marketDataSnapshotSource the new value of the property, not null
*/
public void setMarketDataSnapshotSource(MarketDataSnapshotSource marketDataSnapshotSource) {
JodaBeanUtils.notNull(marketDataSnapshotSource, "marketDataSnapshotSource");
this._marketDataSnapshotSource = marketDataSnapshotSource;
}
/**
* Gets the the {@code marketDataSnapshotSource} property.
* @return the property, not null
*/
public final Property<MarketDataSnapshotSource> marketDataSnapshotSource() {
return metaBean().marketDataSnapshotSource().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public MarketDataProviderResolverComponentFactory clone() {
return JodaBeanUtils.cloneAlways(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
MarketDataProviderResolverComponentFactory other = (MarketDataProviderResolverComponentFactory) obj;
return JodaBeanUtils.equal(getClassifier(), other.getClassifier()) &&
JodaBeanUtils.equal(getLiveMarketDataProviderFactory(), other.getLiveMarketDataProviderFactory()) &&
JodaBeanUtils.equal(getHistoricalTimeSeriesSource(), other.getHistoricalTimeSeriesSource()) &&
JodaBeanUtils.equal(getHistoricalTimeSeriesResolver(), other.getHistoricalTimeSeriesResolver()) &&
JodaBeanUtils.equal(getMarketDataSnapshotSource(), other.getMarketDataSnapshotSource()) &&
super.equals(obj);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = hash * 31 + JodaBeanUtils.hashCode(getClassifier());
hash = hash * 31 + JodaBeanUtils.hashCode(getLiveMarketDataProviderFactory());
hash = hash * 31 + JodaBeanUtils.hashCode(getHistoricalTimeSeriesSource());
hash = hash * 31 + JodaBeanUtils.hashCode(getHistoricalTimeSeriesResolver());
hash = hash * 31 + JodaBeanUtils.hashCode(getMarketDataSnapshotSource());
return hash ^ super.hashCode();
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(192);
buf.append("MarketDataProviderResolverComponentFactory{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
@Override
protected void toString(StringBuilder buf) {
super.toString(buf);
buf.append("classifier").append('=').append(JodaBeanUtils.toString(getClassifier())).append(',').append(' ');
buf.append("liveMarketDataProviderFactory").append('=').append(JodaBeanUtils.toString(getLiveMarketDataProviderFactory())).append(',').append(' ');
buf.append("historicalTimeSeriesSource").append('=').append(JodaBeanUtils.toString(getHistoricalTimeSeriesSource())).append(',').append(' ');
buf.append("historicalTimeSeriesResolver").append('=').append(JodaBeanUtils.toString(getHistoricalTimeSeriesResolver())).append(',').append(' ');
buf.append("marketDataSnapshotSource").append('=').append(JodaBeanUtils.toString(getMarketDataSnapshotSource())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code MarketDataProviderResolverComponentFactory}.
*/
public static class Meta extends AbstractComponentFactory.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code classifier} property.
*/
private final MetaProperty<String> _classifier = DirectMetaProperty.ofReadWrite(
this, "classifier", MarketDataProviderResolverComponentFactory.class, String.class);
/**
* The meta-property for the {@code liveMarketDataProviderFactory} property.
*/
private final MetaProperty<MarketDataProviderFactory> _liveMarketDataProviderFactory = DirectMetaProperty.ofReadWrite(
this, "liveMarketDataProviderFactory", MarketDataProviderResolverComponentFactory.class, MarketDataProviderFactory.class);
/**
* The meta-property for the {@code historicalTimeSeriesSource} property.
*/
private final MetaProperty<HistoricalTimeSeriesSource> _historicalTimeSeriesSource = DirectMetaProperty.ofReadWrite(
this, "historicalTimeSeriesSource", MarketDataProviderResolverComponentFactory.class, HistoricalTimeSeriesSource.class);
/**
* The meta-property for the {@code historicalTimeSeriesResolver} property.
*/
private final MetaProperty<HistoricalTimeSeriesResolver> _historicalTimeSeriesResolver = DirectMetaProperty.ofReadWrite(
this, "historicalTimeSeriesResolver", MarketDataProviderResolverComponentFactory.class, HistoricalTimeSeriesResolver.class);
/**
* The meta-property for the {@code marketDataSnapshotSource} property.
*/
private final MetaProperty<MarketDataSnapshotSource> _marketDataSnapshotSource = DirectMetaProperty.ofReadWrite(
this, "marketDataSnapshotSource", MarketDataProviderResolverComponentFactory.class, MarketDataSnapshotSource.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"classifier",
"liveMarketDataProviderFactory",
"historicalTimeSeriesSource",
"historicalTimeSeriesResolver",
"marketDataSnapshotSource");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
return _classifier;
case -301472921: // liveMarketDataProviderFactory
return _liveMarketDataProviderFactory;
case 358729161: // historicalTimeSeriesSource
return _historicalTimeSeriesSource;
case -946313676: // historicalTimeSeriesResolver
return _historicalTimeSeriesResolver;
case -2019554651: // marketDataSnapshotSource
return _marketDataSnapshotSource;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends MarketDataProviderResolverComponentFactory> builder() {
return new DirectBeanBuilder<MarketDataProviderResolverComponentFactory>(new MarketDataProviderResolverComponentFactory());
}
@Override
public Class<? extends MarketDataProviderResolverComponentFactory> beanType() {
return MarketDataProviderResolverComponentFactory.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code classifier} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> classifier() {
return _classifier;
}
/**
* The meta-property for the {@code liveMarketDataProviderFactory} property.
* @return the meta-property, not null
*/
public final MetaProperty<MarketDataProviderFactory> liveMarketDataProviderFactory() {
return _liveMarketDataProviderFactory;
}
/**
* The meta-property for the {@code historicalTimeSeriesSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<HistoricalTimeSeriesSource> historicalTimeSeriesSource() {
return _historicalTimeSeriesSource;
}
/**
* The meta-property for the {@code historicalTimeSeriesResolver} property.
* @return the meta-property, not null
*/
public final MetaProperty<HistoricalTimeSeriesResolver> historicalTimeSeriesResolver() {
return _historicalTimeSeriesResolver;
}
/**
* The meta-property for the {@code marketDataSnapshotSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<MarketDataSnapshotSource> marketDataSnapshotSource() {
return _marketDataSnapshotSource;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
return ((MarketDataProviderResolverComponentFactory) bean).getClassifier();
case -301472921: // liveMarketDataProviderFactory
return ((MarketDataProviderResolverComponentFactory) bean).getLiveMarketDataProviderFactory();
case 358729161: // historicalTimeSeriesSource
return ((MarketDataProviderResolverComponentFactory) bean).getHistoricalTimeSeriesSource();
case -946313676: // historicalTimeSeriesResolver
return ((MarketDataProviderResolverComponentFactory) bean).getHistoricalTimeSeriesResolver();
case -2019554651: // marketDataSnapshotSource
return ((MarketDataProviderResolverComponentFactory) bean).getMarketDataSnapshotSource();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
((MarketDataProviderResolverComponentFactory) bean).setClassifier((String) newValue);
return;
case -301472921: // liveMarketDataProviderFactory
((MarketDataProviderResolverComponentFactory) bean).setLiveMarketDataProviderFactory((MarketDataProviderFactory) newValue);
return;
case 358729161: // historicalTimeSeriesSource
((MarketDataProviderResolverComponentFactory) bean).setHistoricalTimeSeriesSource((HistoricalTimeSeriesSource) newValue);
return;
case -946313676: // historicalTimeSeriesResolver
((MarketDataProviderResolverComponentFactory) bean).setHistoricalTimeSeriesResolver((HistoricalTimeSeriesResolver) newValue);
return;
case -2019554651: // marketDataSnapshotSource
((MarketDataProviderResolverComponentFactory) bean).setMarketDataSnapshotSource((MarketDataSnapshotSource) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((MarketDataProviderResolverComponentFactory) bean)._classifier, "classifier");
JodaBeanUtils.notNull(((MarketDataProviderResolverComponentFactory) bean)._historicalTimeSeriesSource, "historicalTimeSeriesSource");
JodaBeanUtils.notNull(((MarketDataProviderResolverComponentFactory) bean)._historicalTimeSeriesResolver, "historicalTimeSeriesResolver");
JodaBeanUtils.notNull(((MarketDataProviderResolverComponentFactory) bean)._marketDataSnapshotSource, "marketDataSnapshotSource");
super.validate(bean);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| jeorme/OG-Platform | projects/OG-Component/src/main/java/com/opengamma/component/factory/engine/MarketDataProviderResolverComponentFactory.java | Java | apache-2.0 | 22,810 |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.dalvik;
import com.facebook.buck.jvm.java.classes.FileLike;
import com.google.common.collect.MapMaker;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.ConcurrentMap;
/** Cache to memoize results from DalvikStatsTool. */
class DalvikStatsCache {
private final ConcurrentMap<FileLike, DalvikStatsTool.Stats> cache;
DalvikStatsCache() {
cache = new MapMaker().weakKeys().makeMap();
}
DalvikStatsTool.Stats getStats(FileLike entry) {
String name = entry.getRelativePath();
if (!name.endsWith(".class")) {
// Probably something like a pom.properties file in a JAR: this does not contribute
// to the linear alloc size, so return zero.
return DalvikStatsTool.Stats.ZERO;
}
DalvikStatsTool.Stats stats = cache.get(entry);
if (stats != null) {
return stats;
}
try (InputStream is = entry.getInput()) {
stats = DalvikStatsTool.getEstimate(is);
cache.put(entry, stats);
return stats;
} catch (IOException e) {
throw new RuntimeException(String.format("Error calculating size for %s.", name), e);
} catch (RuntimeException e) {
throw new RuntimeException(String.format("Error calculating size for %s.", name), e);
}
}
}
| marcinkwiatkowski/buck | src/com/facebook/buck/dalvik/DalvikStatsCache.java | Java | apache-2.0 | 1,893 |
package policybased
import (
"errors"
"fmt"
"sort"
kapi "k8s.io/kubernetes/pkg/api"
kapierrors "k8s.io/kubernetes/pkg/api/errors"
"k8s.io/kubernetes/pkg/api/rest"
"k8s.io/kubernetes/pkg/api/unversioned"
"k8s.io/kubernetes/pkg/client/retry"
"k8s.io/kubernetes/pkg/labels"
"k8s.io/kubernetes/pkg/registry/generic/registry"
"k8s.io/kubernetes/pkg/runtime"
oapi "github.com/openshift/origin/pkg/api"
authorizationapi "github.com/openshift/origin/pkg/authorization/api"
authorizationinterfaces "github.com/openshift/origin/pkg/authorization/interfaces"
policybindingregistry "github.com/openshift/origin/pkg/authorization/registry/policybinding"
rolebindingregistry "github.com/openshift/origin/pkg/authorization/registry/rolebinding"
"github.com/openshift/origin/pkg/authorization/rulevalidation"
)
type VirtualStorage struct {
BindingRegistry policybindingregistry.Registry
RuleResolver rulevalidation.AuthorizationRuleResolver
CachedRuleResolver rulevalidation.AuthorizationRuleResolver
CreateStrategy rest.RESTCreateStrategy
UpdateStrategy rest.RESTUpdateStrategy
Resource unversioned.GroupResource
}
// NewVirtualStorage creates a new REST for policies.
func NewVirtualStorage(bindingRegistry policybindingregistry.Registry, ruleResolver, cachedRuleResolver rulevalidation.AuthorizationRuleResolver, resource unversioned.GroupResource) rolebindingregistry.Storage {
return &VirtualStorage{
BindingRegistry: bindingRegistry,
RuleResolver: ruleResolver,
CachedRuleResolver: cachedRuleResolver,
CreateStrategy: rolebindingregistry.LocalStrategy,
UpdateStrategy: rolebindingregistry.LocalStrategy,
Resource: resource,
}
}
func (m *VirtualStorage) New() runtime.Object {
return &authorizationapi.RoleBinding{}
}
func (m *VirtualStorage) NewList() runtime.Object {
return &authorizationapi.RoleBindingList{}
}
func (m *VirtualStorage) List(ctx kapi.Context, options *kapi.ListOptions) (runtime.Object, error) {
policyBindingList, err := m.BindingRegistry.ListPolicyBindings(ctx, &kapi.ListOptions{})
if err != nil {
return nil, err
}
labelSelector, fieldSelector := oapi.ListOptionsToSelectors(options)
roleBindingList := &authorizationapi.RoleBindingList{}
for _, policyBinding := range policyBindingList.Items {
for _, roleBinding := range policyBinding.RoleBindings {
if labelSelector.Matches(labels.Set(roleBinding.Labels)) &&
fieldSelector.Matches(authorizationapi.RoleBindingToSelectableFields(roleBinding)) {
roleBindingList.Items = append(roleBindingList.Items, *roleBinding)
}
}
}
sort.Sort(byName(roleBindingList.Items))
return roleBindingList, nil
}
func (m *VirtualStorage) Get(ctx kapi.Context, name string) (runtime.Object, error) {
policyBinding, err := m.getPolicyBindingOwningRoleBinding(ctx, name)
if kapierrors.IsNotFound(err) {
return nil, kapierrors.NewNotFound(m.Resource, name)
}
if err != nil {
return nil, err
}
binding, exists := policyBinding.RoleBindings[name]
if !exists {
return nil, kapierrors.NewNotFound(m.Resource, name)
}
return binding, nil
}
func (m *VirtualStorage) Delete(ctx kapi.Context, name string, options *kapi.DeleteOptions) (runtime.Object, error) {
if err := retry.RetryOnConflict(retry.DefaultRetry, func() error {
owningPolicyBinding, err := m.getPolicyBindingOwningRoleBinding(ctx, name)
if kapierrors.IsNotFound(err) {
return kapierrors.NewNotFound(m.Resource, name)
}
if err != nil {
return err
}
if _, exists := owningPolicyBinding.RoleBindings[name]; !exists {
return kapierrors.NewNotFound(m.Resource, name)
}
delete(owningPolicyBinding.RoleBindings, name)
owningPolicyBinding.LastModified = unversioned.Now()
return m.BindingRegistry.UpdatePolicyBinding(ctx, owningPolicyBinding)
}); err != nil {
return nil, err
}
return &unversioned.Status{Status: unversioned.StatusSuccess}, nil
}
func (m *VirtualStorage) Create(ctx kapi.Context, obj runtime.Object) (runtime.Object, error) {
return m.createRoleBinding(ctx, obj, false)
}
func (m *VirtualStorage) CreateRoleBindingWithEscalation(ctx kapi.Context, obj *authorizationapi.RoleBinding) (*authorizationapi.RoleBinding, error) {
return m.createRoleBinding(ctx, obj, true)
}
func (m *VirtualStorage) createRoleBinding(ctx kapi.Context, obj runtime.Object, allowEscalation bool) (*authorizationapi.RoleBinding, error) {
// Copy object before passing to BeforeCreate, since it mutates
objCopy, err := kapi.Scheme.DeepCopy(obj)
if err != nil {
return nil, err
}
obj = objCopy.(runtime.Object)
if err := rest.BeforeCreate(m.CreateStrategy, ctx, obj); err != nil {
return nil, err
}
roleBinding := obj.(*authorizationapi.RoleBinding)
if !allowEscalation {
if err := m.confirmNoEscalation(ctx, roleBinding); err != nil {
return nil, err
}
}
// Retry if we hit a conflict on the underlying PolicyBinding object
if err := retry.RetryOnConflict(retry.DefaultRetry, func() error {
policyBinding, err := m.getPolicyBindingForPolicy(ctx, roleBinding.RoleRef.Namespace, allowEscalation)
if err != nil {
return err
}
_, exists := policyBinding.RoleBindings[roleBinding.Name]
if exists {
return kapierrors.NewAlreadyExists(m.Resource, roleBinding.Name)
}
roleBinding.ResourceVersion = policyBinding.ResourceVersion
policyBinding.RoleBindings[roleBinding.Name] = roleBinding
policyBinding.LastModified = unversioned.Now()
return m.BindingRegistry.UpdatePolicyBinding(ctx, policyBinding)
}); err != nil {
return nil, err
}
return roleBinding, nil
}
func (m *VirtualStorage) Update(ctx kapi.Context, name string, objInfo rest.UpdatedObjectInfo) (runtime.Object, bool, error) {
return m.updateRoleBinding(ctx, name, objInfo, false)
}
func (m *VirtualStorage) UpdateRoleBindingWithEscalation(ctx kapi.Context, obj *authorizationapi.RoleBinding) (*authorizationapi.RoleBinding, bool, error) {
return m.updateRoleBinding(ctx, obj.Name, rest.DefaultUpdatedObjectInfo(obj, kapi.Scheme), true)
}
func (m *VirtualStorage) updateRoleBinding(ctx kapi.Context, name string, objInfo rest.UpdatedObjectInfo, allowEscalation bool) (*authorizationapi.RoleBinding, bool, error) {
var updatedRoleBinding *authorizationapi.RoleBinding
var roleBindingConflicted = false
if err := retry.RetryOnConflict(retry.DefaultRetry, func() error {
// Do an initial fetch
old, err := m.Get(ctx, name)
if err != nil {
return err
}
oldRoleBinding, exists := old.(*authorizationapi.RoleBinding)
if !exists {
return kapierrors.NewBadRequest(fmt.Sprintf("old obj is not a role binding: %#v", old))
}
// get the updated object, so we know what namespace we're binding against
obj, err := objInfo.UpdatedObject(ctx, old)
if err != nil {
return err
}
roleBinding, ok := obj.(*authorizationapi.RoleBinding)
if !ok {
return kapierrors.NewBadRequest(fmt.Sprintf("obj is not a role binding: %#v", obj))
}
// now that we know which roleRef we want to go to, fetch the policyBinding we'll actually be updating, and re-get the oldRoleBinding
policyBinding, err := m.getPolicyBindingForPolicy(ctx, roleBinding.RoleRef.Namespace, allowEscalation)
if err != nil {
return err
}
oldRoleBinding, exists = policyBinding.RoleBindings[roleBinding.Name]
if !exists {
return kapierrors.NewNotFound(m.Resource, roleBinding.Name)
}
if len(roleBinding.ResourceVersion) == 0 && m.UpdateStrategy.AllowUnconditionalUpdate() {
roleBinding.ResourceVersion = oldRoleBinding.ResourceVersion
}
if err := rest.BeforeUpdate(m.UpdateStrategy, ctx, obj, oldRoleBinding); err != nil {
return err
}
if !allowEscalation {
if err := m.confirmNoEscalation(ctx, roleBinding); err != nil {
return err
}
}
// conflict detection
if roleBinding.ResourceVersion != oldRoleBinding.ResourceVersion {
// mark as a conflict err, but return an untyped error to escape the retry
roleBindingConflicted = true
return errors.New(registry.OptimisticLockErrorMsg)
}
// non-mutating change
if kapi.Semantic.DeepEqual(oldRoleBinding, roleBinding) {
updatedRoleBinding = roleBinding
return nil
}
roleBinding.ResourceVersion = policyBinding.ResourceVersion
policyBinding.RoleBindings[roleBinding.Name] = roleBinding
policyBinding.LastModified = unversioned.Now()
if err := m.BindingRegistry.UpdatePolicyBinding(ctx, policyBinding); err != nil {
return err
}
updatedRoleBinding = roleBinding
return nil
}); err != nil {
if roleBindingConflicted {
// construct the typed conflict error
return nil, false, kapierrors.NewConflict(m.Resource, name, err)
}
return nil, false, err
}
return updatedRoleBinding, false, nil
}
// roleForEscalationCheck tries to use the CachedRuleResolver if available to avoid expensive checks
func (m *VirtualStorage) roleForEscalationCheck(binding authorizationinterfaces.RoleBinding) (authorizationinterfaces.Role, error) {
if m.CachedRuleResolver != nil {
if role, err := m.CachedRuleResolver.GetRole(binding); err == nil {
return role, nil
}
}
return m.RuleResolver.GetRole(binding)
}
func (m *VirtualStorage) confirmNoEscalation(ctx kapi.Context, roleBinding *authorizationapi.RoleBinding) error {
modifyingRole, err := m.roleForEscalationCheck(authorizationinterfaces.NewLocalRoleBindingAdapter(roleBinding))
if err != nil {
return err
}
return rulevalidation.ConfirmNoEscalation(ctx, m.Resource, roleBinding.Name, m.RuleResolver, m.CachedRuleResolver, modifyingRole)
}
// ensurePolicyBindingToMaster returns a PolicyBinding object that has a PolicyRef pointing to the Policy in the passed namespace.
func (m *VirtualStorage) ensurePolicyBindingToMaster(ctx kapi.Context, policyNamespace, policyBindingName string) (*authorizationapi.PolicyBinding, error) {
policyBinding, err := m.BindingRegistry.GetPolicyBinding(ctx, policyBindingName)
if err != nil {
if !kapierrors.IsNotFound(err) {
return nil, err
}
// if we have no policyBinding, go ahead and make one. creating one here collapses code paths below. We only take this hit once
policyBinding = policybindingregistry.NewEmptyPolicyBinding(kapi.NamespaceValue(ctx), policyNamespace, policyBindingName)
if err := m.BindingRegistry.CreatePolicyBinding(ctx, policyBinding); err != nil {
// Tolerate the policybinding having been created in the meantime
if !kapierrors.IsAlreadyExists(err) {
return nil, err
}
}
policyBinding, err = m.BindingRegistry.GetPolicyBinding(ctx, policyBindingName)
if err != nil {
return nil, err
}
}
if policyBinding.RoleBindings == nil {
policyBinding.RoleBindings = make(map[string]*authorizationapi.RoleBinding)
}
return policyBinding, nil
}
// getPolicyBindingForPolicy returns a PolicyBinding that points to the specified policyNamespace. It will autocreate ONLY if policyNamespace equals the master namespace
func (m *VirtualStorage) getPolicyBindingForPolicy(ctx kapi.Context, policyNamespace string, allowAutoProvision bool) (*authorizationapi.PolicyBinding, error) {
// we can autocreate a PolicyBinding object if the RoleBinding is for the master namespace OR if we've been explicitly told to create the policying binding.
// the latter happens during priming
if (policyNamespace == "") || allowAutoProvision {
return m.ensurePolicyBindingToMaster(ctx, policyNamespace, authorizationapi.GetPolicyBindingName(policyNamespace))
}
policyBinding, err := m.BindingRegistry.GetPolicyBinding(ctx, authorizationapi.GetPolicyBindingName(policyNamespace))
if err != nil {
return nil, err
}
if policyBinding.RoleBindings == nil {
policyBinding.RoleBindings = make(map[string]*authorizationapi.RoleBinding)
}
return policyBinding, nil
}
func (m *VirtualStorage) getPolicyBindingOwningRoleBinding(ctx kapi.Context, bindingName string) (*authorizationapi.PolicyBinding, error) {
policyBindingList, err := m.BindingRegistry.ListPolicyBindings(ctx, &kapi.ListOptions{})
if err != nil {
return nil, err
}
for _, policyBinding := range policyBindingList.Items {
_, exists := policyBinding.RoleBindings[bindingName]
if exists {
return &policyBinding, nil
}
}
return nil, kapierrors.NewNotFound(m.Resource, bindingName)
}
type byName []authorizationapi.RoleBinding
func (r byName) Len() int { return len(r) }
func (r byName) Swap(i, j int) { r[i], r[j] = r[j], r[i] }
func (r byName) Less(i, j int) bool { return r[i].Name < r[j].Name }
| fabric8io/gofabric8 | vendor/github.com/openshift/origin/pkg/authorization/registry/rolebinding/policybased/virtual_storage.go | GO | apache-2.0 | 12,439 |
/*******************************************************************************
*
* Pentaho Big Data
*
* Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.runtime.test.action.impl;
import org.junit.Before;
import org.junit.Test;
import org.pentaho.runtime.test.TestMessageGetterFactory;
import org.pentaho.runtime.test.action.RuntimeTestAction;
import org.pentaho.runtime.test.action.RuntimeTestActionPayload;
import org.pentaho.runtime.test.i18n.MessageGetter;
import org.pentaho.runtime.test.result.RuntimeTestEntrySeverity;
import org.slf4j.Logger;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Created by bryan on 9/10/15.
*/
public class LoggingRuntimeTestActionHandlerImplTest {
private MessageGetter messageGetter;
private Logger logger;
private LoggingRuntimeTestActionHandlerImpl loggingRuntimeTestActionHandler;
private RuntimeTestAction runtimeTestAction;
private String actionDescription;
private String actionName;
private RuntimeTestActionPayload runtimeTestActionPayload;
@Before
public void setup() {
TestMessageGetterFactory messageGetterFactory = new TestMessageGetterFactory();
messageGetter = messageGetterFactory.create( LoggingRuntimeTestActionHandlerImpl.class );
logger = mock( Logger.class );
loggingRuntimeTestActionHandler = new LoggingRuntimeTestActionHandlerImpl( messageGetterFactory, logger );
runtimeTestAction = mock( RuntimeTestAction.class );
actionName = "actionName";
actionDescription = "actionDescription";
runtimeTestActionPayload = mock( RuntimeTestActionPayload.class );
}
@Test
public void testCanHandle() {
// Should work with least specific payload as it always returns true
when( runtimeTestAction.getPayload() ).thenReturn( mock( RuntimeTestActionPayload.class ) );
assertTrue( loggingRuntimeTestActionHandler.canHandle( runtimeTestAction ) );
}
private void handleSetup( RuntimeTestEntrySeverity severity ) {
when( runtimeTestAction.getSeverity() ).thenReturn( severity );
when( runtimeTestAction.getName() ).thenReturn( actionName );
when( runtimeTestAction.getDescription() ).thenReturn( actionDescription );
when( runtimeTestAction.getPayload() ).thenReturn( runtimeTestActionPayload );
loggingRuntimeTestActionHandler.handle( runtimeTestAction );
}
@Test
public void testHandleNullSeverity() {
handleSetup( null );
verify( logger ).warn( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL_MISSING_SEVERITY,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
@Test
public void testHandleDebugSeverity() {
handleSetup( RuntimeTestEntrySeverity.DEBUG );
verify( logger ).debug( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
@Test
public void testHandleInfoSeverity() {
handleSetup( RuntimeTestEntrySeverity.INFO );
verify( logger ).info( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
@Test
public void testHandleWarningSeverity() {
handleSetup( RuntimeTestEntrySeverity.WARNING );
verify( logger ).warn( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
@Test
public void testHandleSkippedSeverity() {
handleSetup( RuntimeTestEntrySeverity.SKIPPED );
verify( logger ).warn( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
@Test
public void testHandleErrorSeverity() {
handleSetup( RuntimeTestEntrySeverity.ERROR );
verify( logger ).error( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
@Test
public void testHandleFatalSeverity() {
handleSetup( RuntimeTestEntrySeverity.FATAL );
verify( logger ).error( messageGetter
.getMessage( LoggingRuntimeTestActionHandlerImpl.LOGGING_RUNTIME_TEST_ACTION_HANDLER_IMPL,
actionName, actionDescription, runtimeTestActionPayload.toString() ) );
}
}
| lgrill-pentaho/big-data-plugin | api/runtimeTest/src/test/java/org/pentaho/runtime/test/action/impl/LoggingRuntimeTestActionHandlerImplTest.java | Java | apache-2.0 | 5,458 |
/*
* Copyright 2016 The Cartographer Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "cartographer_ros/ros_log_sink.h"
#include <chrono>
#include <cstring>
#include <string>
#include <thread>
#include "glog/log_severity.h"
#include "ros/console.h"
namespace cartographer_ros {
namespace {
const char* GetBasename(const char* filepath) {
const char* base = std::strrchr(filepath, '/');
return base ? (base + 1) : filepath;
}
} // namespace
ScopedRosLogSink::ScopedRosLogSink() : will_die_(false) { AddLogSink(this); }
ScopedRosLogSink::~ScopedRosLogSink() { RemoveLogSink(this); }
void ScopedRosLogSink::send(const ::google::LogSeverity severity,
const char* const filename,
const char* const base_filename, const int line,
const struct std::tm* const tm_time,
const char* const message,
const size_t message_len) {
const std::string message_string = ::google::LogSink::ToString(
severity, GetBasename(filename), line, tm_time, message, message_len);
switch (severity) {
case ::google::GLOG_INFO:
ROS_INFO_STREAM(message_string);
break;
case ::google::GLOG_WARNING:
ROS_WARN_STREAM(message_string);
break;
case ::google::GLOG_ERROR:
ROS_ERROR_STREAM(message_string);
break;
case ::google::GLOG_FATAL:
ROS_FATAL_STREAM(message_string);
will_die_ = true;
break;
}
}
void ScopedRosLogSink::WaitTillSent() {
if (will_die_) {
// Give ROS some time to actually publish our message.
std::this_thread::sleep_for(std::chrono::milliseconds(1000));
}
}
} // namespace cartographer_ros
| af-silva/cartographer_ros | cartographer_ros/src/ros_log_sink.cc | C++ | apache-2.0 | 2,260 |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.engine.marketdata.live;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.impl.ViewProcessImpl;
import com.opengamma.engine.view.impl.ViewProcessorInternal;
import com.opengamma.id.ExternalScheme;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.jms.JmsConnector;
/**
* Listens to JMS messages announcing that market data providers have become available and
* forces all view processes to rebuild their graph. This retries any failed market data subscriptions.
* @deprecated This is a temporary fix, once PLAT-3908 is resolved this class won't be necessary
*/
@Deprecated
public class ViewProcessAvailabilityNotificationListener extends AvailabilityNotificationListener {
/** Logger. */
private static final Logger s_logger = LoggerFactory.getLogger(ViewProcessAvailabilityNotificationListener.class);
/** The view processor, used to obtain the running view processes. */
private final ViewProcessorInternal _viewProcessor;
/**
* @param topic The topic for {@link MarketDataAvailabilityNotification} messages
* @param jmsConnector For receiving JMS messages
* @param viewProcessor The view processor, used to obtain the running view processes.
*/
public ViewProcessAvailabilityNotificationListener(String topic,
JmsConnector jmsConnector,
ViewProcessorInternal viewProcessor) {
super(topic, jmsConnector);
ArgumentChecker.notNull(viewProcessor, "viewProcessor");
_viewProcessor = viewProcessor;
}
@Override
protected void notificationReceived(Set<ExternalScheme> schemes) {
for (ViewProcess viewProcess : _viewProcessor.getViewProcesses()) {
if (viewProcess instanceof ViewProcessImpl) {
s_logger.info("Forcing graph rebuild for {}", viewProcess);
((ViewProcessImpl) viewProcess).forceGraphRebuild();
}
}
}
}
| jeorme/OG-Platform | projects/OG-Engine/src/main/java/com/opengamma/engine/marketdata/live/ViewProcessAvailabilityNotificationListener.java | Java | apache-2.0 | 2,195 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
class MapDataTypeTest < Test::Unit::TestCase
sub_test_case(".new") do
def setup
@key = :int8
@item = :string
end
test("ordered arguments") do
assert_equal("map<int8, string>",
Arrow::MapDataType.new(@key, @item).to_s)
end
test("description") do
assert_equal("map<int8, string>",
Arrow::MapDataType.new(key: @key,
item: @item).to_s)
end
end
end
| cpcloud/arrow | ruby/red-arrow/test/test-map-data-type.rb | Ruby | apache-2.0 | 1,264 |
package com.google.api.ads.dfp.jaxws.v201411;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for ReportJobStatus.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="ReportJobStatus">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="COMPLETED"/>
* <enumeration value="IN_PROGRESS"/>
* <enumeration value="FAILED"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "ReportJobStatus")
@XmlEnum
public enum ReportJobStatus {
/**
*
* The {@link ReportJob} has completed successfully and is ready to download.
*
*
*/
COMPLETED,
/**
*
* The {@link ReportJob} is still being executed.
*
*
*/
IN_PROGRESS,
/**
*
* The {@link ReportJob} has failed to run to completion.
*
*
*/
FAILED;
public String value() {
return name();
}
public static ReportJobStatus fromValue(String v) {
return valueOf(v);
}
}
| nafae/developer | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201411/ReportJobStatus.java | Java | apache-2.0 | 1,274 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using Microsoft.Azure.Management.SiteRecovery.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
namespace Microsoft.Azure.Commands.SiteRecovery
{
/// <summary>
/// Updates Azure Site Recovery Recovery Plan object in memory.
/// </summary>
[Cmdlet(VerbsData.Edit, "AzureRmSiteRecoveryRecoveryPlan", DefaultParameterSetName = ASRParameterSets.AppendGroup)]
public class EditAzureSiteRecoveryRecoveryPlan : SiteRecoveryCmdletBase
{
#region Parameters
/// <summary>
/// Gets or sets Name of the Recovery Plan.
/// </summary>
[Parameter(Mandatory = true, ValueFromPipeline = true)]
[ValidateNotNullOrEmpty]
public ASRRecoveryPlan RecoveryPlan { get; set; }
/// <summary>
/// Gets or sets switch parameter
/// </summary>
[Parameter(ParameterSetName = ASRParameterSets.AppendGroup, Mandatory = true)]
public SwitchParameter AppendGroup { get; set; }
/// <summary>
/// Gets or sets switch parameter
/// </summary>
[Parameter(ParameterSetName = ASRParameterSets.RemoveGroup, Mandatory = true)]
public ASRRecoveryPlanGroup RemoveGroup { get; set; }
/// <summary>
/// Gets or sets group
/// </summary>
[Parameter(ParameterSetName = ASRParameterSets.AddProtectedEntities, Mandatory = true)]
[Parameter(ParameterSetName = ASRParameterSets.RemoveProtectedEntities, Mandatory = true)]
public ASRRecoveryPlanGroup Group { get; set; }
/// <summary>
/// Gets or sets switch parameter
/// </summary>
[Parameter(ParameterSetName = ASRParameterSets.AddProtectedEntities, Mandatory = true)]
public ASRProtectionEntity[] AddProtectedEntities { get; set; }
/// <summary>
/// Gets or sets switch parameter
/// </summary>
[Parameter(ParameterSetName = ASRParameterSets.RemoveProtectedEntities, Mandatory = true)]
public ASRProtectionEntity[] RemoveProtectedEntities { get; set; }
#endregion Parameters
/// <summary>
/// ProcessRecord of the command.
/// </summary>
public override void ExecuteSiteRecoveryCmdlet()
{
base.ExecuteSiteRecoveryCmdlet();
ASRRecoveryPlanGroup tempGroup;
switch (this.ParameterSetName)
{
case ASRParameterSets.AppendGroup:
RecoveryPlanGroup recoveryPlanGroup = new RecoveryPlanGroup()
{
GroupType = Constants.Boot,
ReplicationProtectedItems = new List<RecoveryPlanProtectedItem>(),
StartGroupActions = new List<RecoveryPlanAction>(),
EndGroupActions = new List<RecoveryPlanAction>()
};
this.RecoveryPlan.Groups.Add(new ASRRecoveryPlanGroup("Group " + (RecoveryPlan.Groups.Count - 1).ToString(), recoveryPlanGroup));
break;
case ASRParameterSets.RemoveGroup:
tempGroup = this.RecoveryPlan.Groups.FirstOrDefault(g => String.Compare(g.Name, RemoveGroup.Name, StringComparison.OrdinalIgnoreCase) == 0);
if (tempGroup != null)
{
this.RecoveryPlan.Groups.Remove(tempGroup);
this.RecoveryPlan = this.RecoveryPlan.RefreshASRRecoveryPlanGroupNames();
}
else
{
throw new PSArgumentException(string.Format(Properties.Resources.GroupNotFoundInRecoveryPlan, this.RemoveGroup.Name, this.RecoveryPlan.FriendlyName));
}
break;
case ASRParameterSets.AddProtectedEntities:
foreach (ASRProtectionEntity pe in AddProtectedEntities)
{
string fabricName = Utilities.GetValueFromArmId(pe.ID, ARMResourceTypeConstants.ReplicationFabrics);
// fetch the latest PE object
ProtectableItemResponse protectableItemResponse =
RecoveryServicesClient.GetAzureSiteRecoveryProtectableItem(fabricName,
pe.ProtectionContainerId, pe.Name);
ReplicationProtectedItemResponse replicationProtectedItemResponse =
RecoveryServicesClient.GetAzureSiteRecoveryReplicationProtectedItem(fabricName,
pe.ProtectionContainerId, Utilities.GetValueFromArmId(protectableItemResponse.ProtectableItem.Properties.ReplicationProtectedItemId,
ARMResourceTypeConstants.ReplicationProtectedItems));
tempGroup = this.RecoveryPlan.Groups.FirstOrDefault(g => String.Compare(g.Name, Group.Name, StringComparison.OrdinalIgnoreCase) == 0);
if (tempGroup != null)
{
foreach (ASRRecoveryPlanGroup gp in this.RecoveryPlan.Groups)
{
if (gp.ReplicationProtectedItems == null)
continue;
if (gp.ReplicationProtectedItems.Any(pi => String.Compare(pi.Id, replicationProtectedItemResponse.ReplicationProtectedItem.Id, StringComparison.OrdinalIgnoreCase) == 0))
{
throw new PSArgumentException(string.Format(Properties.Resources.VMAlreadyPartOfGroup, pe.FriendlyName, gp.Name, this.RecoveryPlan.FriendlyName));
}
}
this.RecoveryPlan.Groups[RecoveryPlan.Groups.IndexOf(tempGroup)].ReplicationProtectedItems.Add(replicationProtectedItemResponse.ReplicationProtectedItem);
}
else
{
throw new PSArgumentException(string.Format(Properties.Resources.GroupNotFoundInRecoveryPlan, this.Group.Name, this.RecoveryPlan.FriendlyName));
}
}
break;
case ASRParameterSets.RemoveProtectedEntities:
foreach (ASRProtectionEntity pe in RemoveProtectedEntities)
{
string fabricName = Utilities.GetValueFromArmId(pe.ID, ARMResourceTypeConstants.ReplicationFabrics);
// fetch the latest PE object
ProtectableItemResponse protectableItemResponse =
RecoveryServicesClient.GetAzureSiteRecoveryProtectableItem(fabricName,
pe.ProtectionContainerId, pe.Name);
tempGroup = this.RecoveryPlan.Groups.FirstOrDefault(g => String.Compare(g.Name, Group.Name, StringComparison.OrdinalIgnoreCase) == 0);
if (tempGroup != null)
{
var ReplicationProtectedItem =
this.RecoveryPlan.Groups[RecoveryPlan.Groups.IndexOf(tempGroup)].
ReplicationProtectedItems.
FirstOrDefault(pi => String.Compare(pi.Id,
protectableItemResponse.ProtectableItem.Properties.ReplicationProtectedItemId,
StringComparison.OrdinalIgnoreCase) == 0);
if (ReplicationProtectedItem != null)
{
this.RecoveryPlan.Groups[RecoveryPlan.Groups.IndexOf(tempGroup)].ReplicationProtectedItems.Remove(ReplicationProtectedItem);
}
else
{
throw new PSArgumentException(string.Format(Properties.Resources.VMNotFoundInGroup, pe.FriendlyName, this.Group.Name, this.RecoveryPlan.FriendlyName));
}
}
else
{
throw new PSArgumentException(string.Format(Properties.Resources.GroupNotFoundInRecoveryPlan, this.Group.Name, this.RecoveryPlan.FriendlyName));
}
}
break;
};
this.WriteObject(this.RecoveryPlan);
}
}
}
| hovsepm/azure-powershell | src/ResourceManager/SiteRecovery/Commands.SiteRecovery/RecoveryPlan/EditAzureSiteRecoveryRecoveryPlan.cs | C# | apache-2.0 | 9,317 |
// This file was procedurally generated from the following sources:
// - src/dstr-binding-for-await/ary-ptrn-rest-ary-rest.case
// - src/dstr-binding-for-await/default/for-await-of-async-gen-const.template
/*---
description: Rest element containing a rest element (for-await-of statement)
esid: sec-for-in-and-for-of-statements-runtime-semantics-labelledevaluation
features: [destructuring-binding, async-iteration]
flags: [generated, async]
info: |
IterationStatement :
for await ( ForDeclaration of AssignmentExpression ) Statement
[...]
2. Return ? ForIn/OfBodyEvaluation(ForDeclaration, Statement, keyResult,
lexicalBinding, labelSet, async).
13.7.5.13 Runtime Semantics: ForIn/OfBodyEvaluation
[...]
4. Let destructuring be IsDestructuring of lhs.
[...]
6. Repeat
[...]
j. If destructuring is false, then
[...]
k. Else
i. If lhsKind is assignment, then
[...]
ii. Else if lhsKind is varBinding, then
[...]
iii. Else,
1. Assert: lhsKind is lexicalBinding.
2. Assert: lhs is a ForDeclaration.
3. Let status be the result of performing BindingInitialization
for lhs passing nextValue and iterationEnv as arguments.
[...]
13.3.3.6 Runtime Semantics: IteratorBindingInitialization
BindingRestElement : ... BindingPattern
1. Let A be ArrayCreate(0).
[...]
3. Repeat
[...]
b. If iteratorRecord.[[done]] is true, then
i. Return the result of performing BindingInitialization of
BindingPattern with A and environment as the arguments.
[...]
---*/
var values = [1, 2, 3];
var iterCount = 0;
async function *fn() {
for await (const [...[...x]] of [values]) {
assert(Array.isArray(x));
assert.sameValue(x.length, 3);
assert.sameValue(x[0], 1);
assert.sameValue(x[1], 2);
assert.sameValue(x[2], 3);
assert.notSameValue(x, values);
iterCount += 1;
}
}
fn().next()
.then(() => assert.sameValue(iterCount, 1, 'iteration occurred as expected'), $DONE)
.then($DONE, $DONE);
| sebastienros/jint | Jint.Tests.Test262/test/language/statements/for-await-of/async-gen-dstr-const-ary-ptrn-rest-ary-rest.js | JavaScript | bsd-2-clause | 2,180 |
cask 'progressive-downloader' do
version '2.11.3'
sha256 'ca8c5ef946b8c745837d39980b5d8c95d3f15ad28d8f531289163cf6079760b6'
url "https://www.macpsd.net/update/#{version}/PSD.dmg"
name 'Progressive Downloader'
homepage 'https://www.macpsd.net/'
depends_on macos: '>= :mavericks'
app 'Progressive Downloader.app'
zap delete: [
'~/Library/Preferences/com.PS.PSD.plist',
'~/Library/Application Support/Progressive Downloader Data',
'~/Library/Caches/com.PS.PSD',
]
end
| muan/homebrew-cask | Casks/progressive-downloader.rb | Ruby | bsd-2-clause | 547 |
'''
Created on = '10/1/13'
Author = 'mmunn'
Unit test : EUCA-7710 missing input validation on CLC
setUp : Install Credentials, starts instance
test : run euca-bundle-instance, euca-attach-volume and euca-dettach-volume with bad input parameters
for bucket,prefix and device
tearDown : Removes Credentials, terminates instance
cloud.conf:( place in same directory as this test)
IP ADDRESS CENTOS 6.3 64 BZR [CC00 CLC SC00 WS]
IP ADDRESS CENTOS 6.3 64 BZR [NC00]
'''
import unittest
import shutil
from eucaops import Eucaops
class Euca7710(unittest.TestCase):
def setUp(self):
self.OK = '\033[1m\033[37m\033[42m'
self.ENDC = '\033[0m'
self.conf = 'cloud.conf'
self.tester = Eucaops( config_file=self.conf, password='foobar' )
self.source = 'source ' + self.tester.credpath + '/eucarc && '
self.clc1 = self.tester.service_manager.get_enabled_clc()
self.doAuth()
def doAuth(self):
self.keypair = self.tester.add_keypair()
self.group = self.tester.add_group()
self.tester.authorize_group(self.group)
self.tester.authorize_group(self.group, port=3389, protocol='tcp')
self.skey = self.tester.get_secret_key()
self.akey = self.tester.get_access_key()
def runInstances(self, numMax):
#Start instance
self.reservation = self.tester.run_instance(self.emi,type='m1.large', keypair=self.keypair.name, group=self.group, min=1, max=numMax, is_reachable=False)
# Make sure the instance is running
for instance in self.reservation.instances:
if instance.state == 'running':
self.instance = instance
self.instanceid = instance.id
def runCmdTest (self, cmd):
self.out = self.clc1.machine.cmd(self.source + self.cmd)
print self.OK + self.out['output'] + self.ENDC
# make sure InvalidParameterValue error was thrown
assert str(self.out).count('InvalidParameterValue') > 0
def test(self):
self.emi = self.tester.get_emi()
self.runInstances(1)
# Attempt to bundle the running instance with invalid parameters
# regex used to validate bucket and prefix parameters = ( ^[a-zA-Z\d\.\-_]{3,255}$ )
# two few chars
self.badBucket = 'xx'
# invalid char
self.badPrefix = 'xx$'
self.cmd = 'euca-bundle-instance ' + self.instanceid + ' -b ' + self.badBucket + ' -p goodPrefix -o ' + self.akey + ' -w ' + self.skey
self.runCmdTest(self.cmd)
self.cmd = 'euca-bundle-instance ' + self.instanceid + ' -b goodBucket -p ' + self.badPrefix + ' -o ' + self.akey + ' -w ' + self.skey
self.runCmdTest(self.cmd)
# Attempt to attach and detach volume with invalid device name
# regex used to device parameter = ( ^[a-zA-Z\d/]{3,10}$ )
self.volume = 'vol-BOGUS1'
# invalid char
self.badDevice1 = 'sd$'
# invalid name too long
self.badDevice2 = 'sdistoolong'
self.cmd = 'euca-attach-volume -i ' + self.instanceid + ' -d ' + self.badDevice1 + ' ' + self.volume
self.runCmdTest(self.cmd)
self.cmd = 'euca-attach-volume -i ' + self.instanceid + ' -d ' + self.badDevice1 + ' ' + self.volume
self.runCmdTest(self.cmd)
self.cmd = 'euca-detach-volume -i ' + self.instanceid + ' -d ' + self.badDevice1 + ' ' + self.volume
self.runCmdTest(self.cmd)
self.cmd = 'euca-detach-volume -i ' + self.instanceid + ' -d ' + self.badDevice1 + ' ' + self.volume
self.runCmdTest(self.cmd)
def tearDown(self):
if self.reservation is not None:
self.tester.terminate_instances(self.reservation)
self.tester.delete_keypair(self.keypair)
self.tester.local('rm ' + self.keypair.name + '.pem')
shutil.rmtree(self.tester.credpath)
if __name__ == '__main__':
unittest.main() | nagyistoce/eutester | testcases/cloud_admin/3-4/Euca7710.py | Python | bsd-2-clause | 4,008 |
/*
* Copyright (C) 2008 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @constructor
* @extends {WebInspector.Widget}
* @param {!Array.<!WebInspector.DataGrid.ColumnDescriptor>} columnsArray
* @param {function(!WebInspector.DataGridNode, string, string, string)=} editCallback
* @param {function(!WebInspector.DataGridNode)=} deleteCallback
* @param {function()=} refreshCallback
* @param {function(!WebInspector.ContextMenu, !WebInspector.DataGridNode)=} contextMenuCallback
*/
WebInspector.DataGrid = function(columnsArray, editCallback, deleteCallback, refreshCallback, contextMenuCallback)
{
WebInspector.Widget.call(this);
this.registerRequiredCSS("ui_lazy/dataGrid.css");
this.element.className = "data-grid"; // Override
this.element.tabIndex = 0;
this.element.addEventListener("keydown", this._keyDown.bind(this), false);
var headerContainer = createElementWithClass("div", "header-container");
/** @type {!Element} */
this._headerTable = headerContainer.createChild("table", "header");
/** @type {!Object.<string, !Element>} */
this._headerTableHeaders = {};
/** @type {!Element} */
this._scrollContainer = createElementWithClass("div", "data-container");
/** @type {!Element} */
this._dataTable = this._scrollContainer.createChild("table", "data");
this._dataTable.addEventListener("mousedown", this._mouseDownInDataTable.bind(this));
this._dataTable.addEventListener("click", this._clickInDataTable.bind(this), true);
this._dataTable.addEventListener("contextmenu", this._contextMenuInDataTable.bind(this), true);
// FIXME: Add a createCallback which is different from editCallback and has different
// behavior when creating a new node.
if (editCallback)
this._dataTable.addEventListener("dblclick", this._ondblclick.bind(this), false);
/** @type {function(!WebInspector.DataGridNode, string, string, string)|undefined} */
this._editCallback = editCallback;
/** @type {function(!WebInspector.DataGridNode)|undefined} */
this._deleteCallback = deleteCallback;
/** @type {function()|undefined} */
this._refreshCallback = refreshCallback;
/** @type {function(!WebInspector.ContextMenu, !WebInspector.DataGridNode)|undefined} */
this._contextMenuCallback = contextMenuCallback;
this.element.appendChild(headerContainer);
this.element.appendChild(this._scrollContainer);
/** @type {!Element} */
this._headerRow = createElement("tr");
/** @type {!Element} */
this._headerTableColumnGroup = createElement("colgroup");
/** @type {!Element} */
this._dataTableColumnGroup = createElement("colgroup");
/** @type {!Element} */
this._topFillerRow = createElementWithClass("tr", "revealed");
/** @type {!Element} */
this._bottomFillerRow = createElementWithClass("tr", "revealed");
this.setVerticalPadding(0, 0);
/** @type {!Array.<!WebInspector.DataGrid.ColumnDescriptor>} */
this._columnsArray = columnsArray;
/** @type {!Array.<!WebInspector.DataGrid.ColumnDescriptor>} */
this._visibleColumnsArray = columnsArray;
/** @type {!Object.<string, !WebInspector.DataGrid.ColumnDescriptor>} */
this._columns = {};
/** @type {?string} */
this._cellClass = null;
for (var i = 0; i < columnsArray.length; ++i) {
var column = columnsArray[i];
var columnIdentifier = column.identifier = column.id || i;
this._columns[columnIdentifier] = column;
if (column.disclosure)
this.disclosureColumnIdentifier = columnIdentifier;
var cell = createElement("th");
cell.className = columnIdentifier + "-column";
cell.columnIdentifier = columnIdentifier;
this._headerTableHeaders[columnIdentifier] = cell;
var div = createElement("div");
if (column.titleDOMFragment)
div.appendChild(column.titleDOMFragment);
else
div.textContent = column.title;
cell.appendChild(div);
if (column.sort) {
cell.classList.add(column.sort);
this._sortColumnCell = cell;
}
if (column.sortable) {
cell.addEventListener("click", this._clickInHeaderCell.bind(this), false);
cell.classList.add("sortable");
cell.createChild("div", "sort-order-icon-container").createChild("div", "sort-order-icon");
}
}
this._headerTable.appendChild(this._headerTableColumnGroup);
this.headerTableBody.appendChild(this._headerRow);
this._dataTable.appendChild(this._dataTableColumnGroup);
this.dataTableBody.appendChild(this._topFillerRow);
this.dataTableBody.appendChild(this._bottomFillerRow);
this._refreshHeader();
/** @type {boolean} */
this._editing = false;
/** @type {?WebInspector.DataGridNode} */
this.selectedNode = null;
/** @type {boolean} */
this.expandNodesWhenArrowing = false;
this.setRootNode(new WebInspector.DataGridNode());
/** @type {number} */
this.indentWidth = 15;
/** @type {!Array.<!Element|{__index: number, __position: number}>} */
this._resizers = [];
/** @type {boolean} */
this._columnWidthsInitialized = false;
/** @type {number} */
this._cornerWidth = WebInspector.DataGrid.CornerWidth;
/** @type {!WebInspector.DataGrid.ResizeMethod} */
this._resizeMethod = WebInspector.DataGrid.ResizeMethod.Nearest;
}
// Keep in sync with .data-grid col.corner style rule.
WebInspector.DataGrid.CornerWidth = 14;
/** @typedef {!{id: ?string, editable: boolean, longText: ?boolean, sort: !WebInspector.DataGrid.Order, sortable: boolean, align: !WebInspector.DataGrid.Align}} */
WebInspector.DataGrid.ColumnDescriptor;
WebInspector.DataGrid.Events = {
SelectedNode: "SelectedNode",
DeselectedNode: "DeselectedNode",
SortingChanged: "SortingChanged",
ColumnsResized: "ColumnsResized"
}
/** @enum {string} */
WebInspector.DataGrid.Order = {
Ascending: "sort-ascending",
Descending: "sort-descending"
}
/** @enum {string} */
WebInspector.DataGrid.Align = {
Center: "center",
Right: "right"
}
WebInspector.DataGrid.prototype = {
/**
* @param {string} cellClass
*/
setCellClass: function(cellClass)
{
this._cellClass = cellClass;
},
_refreshHeader: function()
{
this._headerTableColumnGroup.removeChildren();
this._dataTableColumnGroup.removeChildren();
this._headerRow.removeChildren();
this._topFillerRow.removeChildren();
this._bottomFillerRow.removeChildren();
for (var i = 0; i < this._visibleColumnsArray.length; ++i) {
var column = this._visibleColumnsArray[i];
var columnIdentifier = column.identifier;
var headerColumn = this._headerTableColumnGroup.createChild("col");
var dataColumn = this._dataTableColumnGroup.createChild("col");
if (column.width) {
headerColumn.style.width = column.width;
dataColumn.style.width = column.width;
}
this._headerRow.appendChild(this._headerTableHeaders[columnIdentifier]);
this._topFillerRow.createChild("td", "top-filler-td");
this._bottomFillerRow.createChild("td", "bottom-filler-td").columnIdentifier_ = columnIdentifier;
}
this._headerRow.createChild("th", "corner");
this._topFillerRow.createChild("td", "corner").classList.add("top-filler-td");
this._bottomFillerRow.createChild("td", "corner").classList.add("bottom-filler-td");
this._headerTableColumnGroup.createChild("col", "corner");
this._dataTableColumnGroup.createChild("col", "corner");
},
/**
* @param {number} top
* @param {number} bottom
* @protected
*/
setVerticalPadding: function(top, bottom)
{
this._topFillerRow.style.height = top + "px";
if (top || bottom)
this._bottomFillerRow.style.height = bottom + "px";
else
this._bottomFillerRow.style.height = "auto";
},
/**
* @param {!WebInspector.DataGridNode} rootNode
* @protected
*/
setRootNode: function(rootNode)
{
if (this._rootNode) {
this._rootNode.removeChildren();
this._rootNode.dataGrid = null;
this._rootNode._isRoot = false;
}
/** @type {!WebInspector.DataGridNode} */
this._rootNode = rootNode;
rootNode._isRoot = true;
rootNode.hasChildren = false;
rootNode._expanded = true;
rootNode._revealed = true;
rootNode.dataGrid = this;
},
/**
* @return {!WebInspector.DataGridNode}
*/
rootNode: function()
{
return this._rootNode;
},
_ondblclick: function(event)
{
if (this._editing || this._editingNode)
return;
var columnIdentifier = this.columnIdentifierFromNode(event.target);
if (!columnIdentifier || !this._columns[columnIdentifier].editable)
return;
this._startEditing(event.target);
},
/**
* @param {!WebInspector.DataGridNode} node
* @param {number} cellIndex
*/
_startEditingColumnOfDataGridNode: function(node, cellIndex)
{
this._editing = true;
/** @type {?WebInspector.DataGridNode} */
this._editingNode = node;
this._editingNode.select();
var element = this._editingNode._element.children[cellIndex];
WebInspector.InplaceEditor.startEditing(element, this._startEditingConfig(element));
element.getComponentSelection().setBaseAndExtent(element, 0, element, 1);
},
_startEditing: function(target)
{
var element = target.enclosingNodeOrSelfWithNodeName("td");
if (!element)
return;
this._editingNode = this.dataGridNodeFromNode(target);
if (!this._editingNode) {
if (!this.creationNode)
return;
this._editingNode = this.creationNode;
}
// Force editing the 1st column when editing the creation node
if (this._editingNode.isCreationNode)
return this._startEditingColumnOfDataGridNode(this._editingNode, this._nextEditableColumn(-1));
this._editing = true;
WebInspector.InplaceEditor.startEditing(element, this._startEditingConfig(element));
element.getComponentSelection().setBaseAndExtent(element, 0, element, 1);
},
renderInline: function()
{
this.element.classList.add("inline");
this._cornerWidth = 0;
this.updateWidths();
},
_startEditingConfig: function(element)
{
return new WebInspector.InplaceEditor.Config(this._editingCommitted.bind(this), this._editingCancelled.bind(this), element.textContent);
},
_editingCommitted: function(element, newText, oldText, context, moveDirection)
{
var columnIdentifier = this.columnIdentifierFromNode(element);
if (!columnIdentifier) {
this._editingCancelled(element);
return;
}
var column = this._columns[columnIdentifier];
var cellIndex = this._visibleColumnsArray.indexOf(column);
var textBeforeEditing = this._editingNode.data[columnIdentifier];
var currentEditingNode = this._editingNode;
/**
* @param {boolean} wasChange
* @this {WebInspector.DataGrid}
*/
function moveToNextIfNeeded(wasChange) {
if (!moveDirection)
return;
if (moveDirection === "forward") {
var firstEditableColumn = this._nextEditableColumn(-1);
if (currentEditingNode.isCreationNode && cellIndex === firstEditableColumn && !wasChange)
return;
var nextEditableColumn = this._nextEditableColumn(cellIndex);
if (nextEditableColumn !== -1)
return this._startEditingColumnOfDataGridNode(currentEditingNode, nextEditableColumn);
var nextDataGridNode = currentEditingNode.traverseNextNode(true, null, true);
if (nextDataGridNode)
return this._startEditingColumnOfDataGridNode(nextDataGridNode, firstEditableColumn);
if (currentEditingNode.isCreationNode && wasChange) {
this.addCreationNode(false);
return this._startEditingColumnOfDataGridNode(this.creationNode, firstEditableColumn);
}
return;
}
if (moveDirection === "backward") {
var prevEditableColumn = this._nextEditableColumn(cellIndex, true);
if (prevEditableColumn !== -1)
return this._startEditingColumnOfDataGridNode(currentEditingNode, prevEditableColumn);
var lastEditableColumn = this._nextEditableColumn(this._visibleColumnsArray.length, true);
var nextDataGridNode = currentEditingNode.traversePreviousNode(true, true);
if (nextDataGridNode)
return this._startEditingColumnOfDataGridNode(nextDataGridNode, lastEditableColumn);
return;
}
}
if (textBeforeEditing == newText) {
this._editingCancelled(element);
moveToNextIfNeeded.call(this, false);
return;
}
// Update the text in the datagrid that we typed
this._editingNode.data[columnIdentifier] = newText;
// Make the callback - expects an editing node (table row), the column number that is being edited,
// the text that used to be there, and the new text.
this._editCallback(this._editingNode, columnIdentifier, textBeforeEditing, newText);
if (this._editingNode.isCreationNode)
this.addCreationNode(false);
this._editingCancelled(element);
moveToNextIfNeeded.call(this, true);
},
_editingCancelled: function(element)
{
this._editing = false;
this._editingNode = null;
},
/**
* @param {number} cellIndex
* @param {boolean=} moveBackward
* @return {number}
*/
_nextEditableColumn: function(cellIndex, moveBackward)
{
var increment = moveBackward ? -1 : 1;
var columns = this._visibleColumnsArray;
for (var i = cellIndex + increment; (i >= 0) && (i < columns.length); i += increment) {
if (columns[i].editable)
return i;
}
return -1;
},
/**
* @return {?string}
*/
sortColumnIdentifier: function()
{
if (!this._sortColumnCell)
return null;
return this._sortColumnCell.columnIdentifier;
},
/**
* @return {?string}
*/
sortOrder: function()
{
if (!this._sortColumnCell || this._sortColumnCell.classList.contains(WebInspector.DataGrid.Order.Ascending))
return WebInspector.DataGrid.Order.Ascending;
if (this._sortColumnCell.classList.contains(WebInspector.DataGrid.Order.Descending))
return WebInspector.DataGrid.Order.Descending;
return null;
},
/**
* @return {boolean}
*/
isSortOrderAscending: function()
{
return !this._sortColumnCell || this._sortColumnCell.classList.contains(WebInspector.DataGrid.Order.Ascending);
},
get headerTableBody()
{
if ("_headerTableBody" in this)
return this._headerTableBody;
this._headerTableBody = this._headerTable.getElementsByTagName("tbody")[0];
if (!this._headerTableBody) {
this._headerTableBody = this.element.ownerDocument.createElement("tbody");
this._headerTable.insertBefore(this._headerTableBody, this._headerTable.tFoot);
}
return this._headerTableBody;
},
get dataTableBody()
{
if ("_dataTableBody" in this)
return this._dataTableBody;
this._dataTableBody = this._dataTable.getElementsByTagName("tbody")[0];
if (!this._dataTableBody) {
this._dataTableBody = this.element.ownerDocument.createElement("tbody");
this._dataTable.insertBefore(this._dataTableBody, this._dataTable.tFoot);
}
return this._dataTableBody;
},
/**
* @param {!Array.<number>} widths
* @param {number} minPercent
* @param {number=} maxPercent
* @return {!Array.<number>}
*/
_autoSizeWidths: function(widths, minPercent, maxPercent)
{
if (minPercent)
minPercent = Math.min(minPercent, Math.floor(100 / widths.length));
var totalWidth = 0;
for (var i = 0; i < widths.length; ++i)
totalWidth += widths[i];
var totalPercentWidth = 0;
for (var i = 0; i < widths.length; ++i) {
var width = Math.round(100 * widths[i] / totalWidth);
if (minPercent && width < minPercent)
width = minPercent;
else if (maxPercent && width > maxPercent)
width = maxPercent;
totalPercentWidth += width;
widths[i] = width;
}
var recoupPercent = totalPercentWidth - 100;
while (minPercent && recoupPercent > 0) {
for (var i = 0; i < widths.length; ++i) {
if (widths[i] > minPercent) {
--widths[i];
--recoupPercent;
if (!recoupPercent)
break;
}
}
}
while (maxPercent && recoupPercent < 0) {
for (var i = 0; i < widths.length; ++i) {
if (widths[i] < maxPercent) {
++widths[i];
++recoupPercent;
if (!recoupPercent)
break;
}
}
}
return widths;
},
/**
* @param {number} minPercent
* @param {number=} maxPercent
* @param {number=} maxDescentLevel
*/
autoSizeColumns: function(minPercent, maxPercent, maxDescentLevel)
{
var widths = [];
for (var i = 0; i < this._columnsArray.length; ++i)
widths.push((this._columnsArray[i].title || "").length);
maxDescentLevel = maxDescentLevel || 0;
var children = this._enumerateChildren(this._rootNode, [], maxDescentLevel + 1);
for (var i = 0; i < children.length; ++i) {
var node = children[i];
for (var j = 0; j < this._columnsArray.length; ++j) {
var text = node.data[this._columnsArray[j].identifier] || "";
if (text.length > widths[j])
widths[j] = text.length;
}
}
widths = this._autoSizeWidths(widths, minPercent, maxPercent);
for (var i = 0; i < this._columnsArray.length; ++i)
this._columnsArray[i].weight = widths[i];
this._columnWidthsInitialized = false;
this.updateWidths();
},
_enumerateChildren: function(rootNode, result, maxLevel)
{
if (!rootNode._isRoot)
result.push(rootNode);
if (!maxLevel)
return;
for (var i = 0; i < rootNode.children.length; ++i)
this._enumerateChildren(rootNode.children[i], result, maxLevel - 1);
return result;
},
onResize: function()
{
this.updateWidths();
},
// Updates the widths of the table, including the positions of the column
// resizers.
//
// IMPORTANT: This function MUST be called once after the element of the
// DataGrid is attached to its parent element and every subsequent time the
// width of the parent element is changed in order to make it possible to
// resize the columns.
//
// If this function is not called after the DataGrid is attached to its
// parent element, then the DataGrid's columns will not be resizable.
updateWidths: function()
{
var headerTableColumns = this._headerTableColumnGroup.children;
// Use container size to avoid changes of table width caused by change of column widths.
var tableWidth = this.element.offsetWidth - this._cornerWidth;
var numColumns = headerTableColumns.length - 1; // Do not process corner column.
// Do not attempt to use offsetes if we're not attached to the document tree yet.
if (!this._columnWidthsInitialized && this.element.offsetWidth) {
// Give all the columns initial widths now so that during a resize,
// when the two columns that get resized get a percent value for
// their widths, all the other columns already have percent values
// for their widths.
for (var i = 0; i < numColumns; i++) {
var columnWidth = this.headerTableBody.rows[0].cells[i].offsetWidth;
var column = this._visibleColumnsArray[i];
if (!column.weight)
column.weight = 100 * columnWidth / tableWidth;
}
this._columnWidthsInitialized = true;
}
this._applyColumnWeights();
},
/**
* @param {string} name
*/
setName: function(name)
{
this._columnWeightsSetting = WebInspector.settings.createSetting("dataGrid-" + name + "-columnWeights", {});
this._loadColumnWeights();
},
_loadColumnWeights: function()
{
if (!this._columnWeightsSetting)
return;
var weights = this._columnWeightsSetting.get();
for (var i = 0; i < this._columnsArray.length; ++i) {
var column = this._columnsArray[i];
var weight = weights[column.identifier];
if (weight)
column.weight = weight;
}
this._applyColumnWeights();
},
_saveColumnWeights: function()
{
if (!this._columnWeightsSetting)
return;
var weights = {};
for (var i = 0; i < this._columnsArray.length; ++i) {
var column = this._columnsArray[i];
weights[column.identifier] = column.weight;
}
this._columnWeightsSetting.set(weights);
},
wasShown: function()
{
this._loadColumnWeights();
},
_applyColumnWeights: function()
{
var tableWidth = this.element.offsetWidth - this._cornerWidth;
if (tableWidth <= 0)
return;
var sumOfWeights = 0.0;
for (var i = 0; i < this._visibleColumnsArray.length; ++i)
sumOfWeights += this._visibleColumnsArray[i].weight;
var sum = 0;
var lastOffset = 0;
for (var i = 0; i < this._visibleColumnsArray.length; ++i) {
sum += this._visibleColumnsArray[i].weight;
var offset = (sum * tableWidth / sumOfWeights) | 0;
var width = (offset - lastOffset) + "px";
this._headerTableColumnGroup.children[i].style.width = width;
this._dataTableColumnGroup.children[i].style.width = width;
lastOffset = offset;
}
this._positionResizers();
this.dispatchEventToListeners(WebInspector.DataGrid.Events.ColumnsResized);
},
/**
* @param {!Object.<string, boolean>} columnsVisibility
*/
setColumnsVisiblity: function(columnsVisibility)
{
this._visibleColumnsArray = [];
for (var i = 0; i < this._columnsArray.length; ++i) {
var column = this._columnsArray[i];
if (columnsVisibility[column.identifier])
this._visibleColumnsArray.push(column);
}
this._refreshHeader();
this._applyColumnWeights();
var nodes = this._enumerateChildren(this.rootNode(), [], -1);
for (var i = 0; i < nodes.length; ++i)
nodes[i].refresh();
},
get scrollContainer()
{
return this._scrollContainer;
},
_positionResizers: function()
{
var headerTableColumns = this._headerTableColumnGroup.children;
var numColumns = headerTableColumns.length - 1; // Do not process corner column.
var left = [];
var resizers = this._resizers;
while (resizers.length > numColumns - 1)
resizers.pop().remove();
for (var i = 0; i < numColumns - 1; i++) {
// Get the width of the cell in the first (and only) row of the
// header table in order to determine the width of the column, since
// it is not possible to query a column for its width.
left[i] = (left[i-1] || 0) + this.headerTableBody.rows[0].cells[i].offsetWidth;
}
// Make n - 1 resizers for n columns.
for (var i = 0; i < numColumns - 1; i++) {
var resizer = resizers[i];
if (!resizer) {
// This is the first call to updateWidth, so the resizers need
// to be created.
resizer = createElement("div");
resizer.__index = i;
resizer.classList.add("data-grid-resizer");
// This resizer is associated with the column to its right.
WebInspector.installDragHandle(resizer, this._startResizerDragging.bind(this), this._resizerDragging.bind(this), this._endResizerDragging.bind(this), "col-resize");
this.element.appendChild(resizer);
resizers.push(resizer);
}
if (resizer.__position !== left[i]) {
resizer.__position = left[i];
resizer.style.left = left[i] + "px";
}
}
},
addCreationNode: function(hasChildren)
{
if (this.creationNode)
this.creationNode.makeNormal();
var emptyData = {};
for (var column in this._columns)
emptyData[column] = null;
this.creationNode = new WebInspector.CreationDataGridNode(emptyData, hasChildren);
this.rootNode().appendChild(this.creationNode);
},
_keyDown: function(event)
{
if (!this.selectedNode || event.shiftKey || event.metaKey || event.ctrlKey || this._editing)
return;
var handled = false;
var nextSelectedNode;
if (event.keyIdentifier === "Up" && !event.altKey) {
nextSelectedNode = this.selectedNode.traversePreviousNode(true);
while (nextSelectedNode && !nextSelectedNode.selectable)
nextSelectedNode = nextSelectedNode.traversePreviousNode(true);
handled = nextSelectedNode ? true : false;
} else if (event.keyIdentifier === "Down" && !event.altKey) {
nextSelectedNode = this.selectedNode.traverseNextNode(true);
while (nextSelectedNode && !nextSelectedNode.selectable)
nextSelectedNode = nextSelectedNode.traverseNextNode(true);
handled = nextSelectedNode ? true : false;
} else if (event.keyIdentifier === "Left") {
if (this.selectedNode.expanded) {
if (event.altKey)
this.selectedNode.collapseRecursively();
else
this.selectedNode.collapse();
handled = true;
} else if (this.selectedNode.parent && !this.selectedNode.parent._isRoot) {
handled = true;
if (this.selectedNode.parent.selectable) {
nextSelectedNode = this.selectedNode.parent;
handled = nextSelectedNode ? true : false;
} else if (this.selectedNode.parent)
this.selectedNode.parent.collapse();
}
} else if (event.keyIdentifier === "Right") {
if (!this.selectedNode.revealed) {
this.selectedNode.reveal();
handled = true;
} else if (this.selectedNode.hasChildren) {
handled = true;
if (this.selectedNode.expanded) {
nextSelectedNode = this.selectedNode.children[0];
handled = nextSelectedNode ? true : false;
} else {
if (event.altKey)
this.selectedNode.expandRecursively();
else
this.selectedNode.expand();
}
}
} else if (event.keyCode === 8 || event.keyCode === 46) {
if (this._deleteCallback) {
handled = true;
this._deleteCallback(this.selectedNode);
this.changeNodeAfterDeletion();
}
} else if (isEnterKey(event)) {
if (this._editCallback) {
handled = true;
this._startEditing(this.selectedNode._element.children[this._nextEditableColumn(-1)]);
}
}
if (nextSelectedNode) {
nextSelectedNode.reveal();
nextSelectedNode.select();
}
if (handled)
event.consume(true);
},
changeNodeAfterDeletion: function()
{
var nextSelectedNode = this.selectedNode.traverseNextNode(true);
while (nextSelectedNode && !nextSelectedNode.selectable)
nextSelectedNode = nextSelectedNode.traverseNextNode(true);
if (!nextSelectedNode || nextSelectedNode.isCreationNode) {
nextSelectedNode = this.selectedNode.traversePreviousNode(true);
while (nextSelectedNode && !nextSelectedNode.selectable)
nextSelectedNode = nextSelectedNode.traversePreviousNode(true);
}
if (nextSelectedNode) {
nextSelectedNode.reveal();
nextSelectedNode.select();
}
},
/**
* @param {!Node} target
* @return {?WebInspector.DataGridNode}
*/
dataGridNodeFromNode: function(target)
{
var rowElement = target.enclosingNodeOrSelfWithNodeName("tr");
return rowElement && rowElement._dataGridNode;
},
/**
* @param {!Node} target
* @return {?string}
*/
columnIdentifierFromNode: function(target)
{
var cellElement = target.enclosingNodeOrSelfWithNodeName("td");
return cellElement && cellElement.columnIdentifier_;
},
_clickInHeaderCell: function(event)
{
var cell = event.target.enclosingNodeOrSelfWithNodeName("th");
if (!cell || (cell.columnIdentifier === undefined) || !cell.classList.contains("sortable"))
return;
var sortOrder = WebInspector.DataGrid.Order.Ascending;
if ((cell === this._sortColumnCell) && this.isSortOrderAscending())
sortOrder = WebInspector.DataGrid.Order.Descending;
if (this._sortColumnCell)
this._sortColumnCell.classList.remove(WebInspector.DataGrid.Order.Ascending, WebInspector.DataGrid.Order.Descending);
this._sortColumnCell = cell;
cell.classList.add(sortOrder);
this.dispatchEventToListeners(WebInspector.DataGrid.Events.SortingChanged);
},
/**
* @param {string} columnIdentifier
* @param {!WebInspector.DataGrid.Order} sortOrder
*/
markColumnAsSortedBy: function(columnIdentifier, sortOrder)
{
if (this._sortColumnCell)
this._sortColumnCell.classList.remove(WebInspector.DataGrid.Order.Ascending, WebInspector.DataGrid.Order.Descending);
this._sortColumnCell = this._headerTableHeaders[columnIdentifier];
this._sortColumnCell.classList.add(sortOrder);
},
/**
* @param {string} columnIdentifier
* @return {!Element}
*/
headerTableHeader: function(columnIdentifier)
{
return this._headerTableHeaders[columnIdentifier];
},
_mouseDownInDataTable: function(event)
{
var gridNode = this.dataGridNodeFromNode(event.target);
if (!gridNode || !gridNode.selectable)
return;
if (gridNode.isEventWithinDisclosureTriangle(event))
return;
if (event.metaKey) {
if (gridNode.selected)
gridNode.deselect();
else
gridNode.select();
} else
gridNode.select();
},
_contextMenuInDataTable: function(event)
{
var contextMenu = new WebInspector.ContextMenu(event);
var gridNode = this.dataGridNodeFromNode(event.target);
if (this._refreshCallback && (!gridNode || gridNode !== this.creationNode))
contextMenu.appendItem(WebInspector.UIString("Refresh"), this._refreshCallback.bind(this));
if (gridNode && gridNode.selectable && !gridNode.isEventWithinDisclosureTriangle(event)) {
if (this._editCallback) {
if (gridNode === this.creationNode)
contextMenu.appendItem(WebInspector.UIString.capitalize("Add ^new"), this._startEditing.bind(this, event.target));
else {
var columnIdentifier = this.columnIdentifierFromNode(event.target);
if (columnIdentifier && this._columns[columnIdentifier].editable)
contextMenu.appendItem(WebInspector.UIString("Edit \"%s\"", this._columns[columnIdentifier].title), this._startEditing.bind(this, event.target));
}
}
if (this._deleteCallback && gridNode !== this.creationNode)
contextMenu.appendItem(WebInspector.UIString.capitalize("Delete"), this._deleteCallback.bind(this, gridNode));
if (this._contextMenuCallback)
this._contextMenuCallback(contextMenu, gridNode);
}
contextMenu.show();
},
_clickInDataTable: function(event)
{
var gridNode = this.dataGridNodeFromNode(event.target);
if (!gridNode || !gridNode.hasChildren)
return;
if (!gridNode.isEventWithinDisclosureTriangle(event))
return;
if (gridNode.expanded) {
if (event.altKey)
gridNode.collapseRecursively();
else
gridNode.collapse();
} else {
if (event.altKey)
gridNode.expandRecursively();
else
gridNode.expand();
}
},
/**
* @param {!WebInspector.DataGrid.ResizeMethod} method
*/
setResizeMethod: function(method)
{
this._resizeMethod = method;
},
/**
* @return {boolean}
*/
_startResizerDragging: function(event)
{
this._currentResizer = event.target;
return true;
},
_resizerDragging: function(event)
{
var resizer = this._currentResizer;
if (!resizer)
return;
// Constrain the dragpoint to be within the containing div of the
// datagrid.
var dragPoint = event.clientX - this.element.totalOffsetLeft();
var firstRowCells = this.headerTableBody.rows[0].cells;
var leftEdgeOfPreviousColumn = 0;
// Constrain the dragpoint to be within the space made up by the
// column directly to the left and the column directly to the right.
var leftCellIndex = resizer.__index;
var rightCellIndex = leftCellIndex + 1;
for (var i = 0; i < leftCellIndex; i++)
leftEdgeOfPreviousColumn += firstRowCells[i].offsetWidth;
// Differences for other resize methods
if (this._resizeMethod === WebInspector.DataGrid.ResizeMethod.Last) {
rightCellIndex = this._resizers.length;
} else if (this._resizeMethod === WebInspector.DataGrid.ResizeMethod.First) {
leftEdgeOfPreviousColumn += firstRowCells[leftCellIndex].offsetWidth - firstRowCells[0].offsetWidth;
leftCellIndex = 0;
}
var rightEdgeOfNextColumn = leftEdgeOfPreviousColumn + firstRowCells[leftCellIndex].offsetWidth + firstRowCells[rightCellIndex].offsetWidth;
// Give each column some padding so that they don't disappear.
var leftMinimum = leftEdgeOfPreviousColumn + this.ColumnResizePadding;
var rightMaximum = rightEdgeOfNextColumn - this.ColumnResizePadding;
if (leftMinimum > rightMaximum)
return;
dragPoint = Number.constrain(dragPoint, leftMinimum, rightMaximum);
var position = (dragPoint - this.CenterResizerOverBorderAdjustment);
resizer.__position = position;
resizer.style.left = position + "px";
var pxLeftColumn = (dragPoint - leftEdgeOfPreviousColumn) + "px";
this._headerTableColumnGroup.children[leftCellIndex].style.width = pxLeftColumn;
this._dataTableColumnGroup.children[leftCellIndex].style.width = pxLeftColumn;
var pxRightColumn = (rightEdgeOfNextColumn - dragPoint) + "px";
this._headerTableColumnGroup.children[rightCellIndex].style.width = pxRightColumn;
this._dataTableColumnGroup.children[rightCellIndex].style.width = pxRightColumn;
var leftColumn = this._visibleColumnsArray[leftCellIndex];
var rightColumn = this._visibleColumnsArray[rightCellIndex];
if (leftColumn.weight || rightColumn.weight) {
var sumOfWeights = leftColumn.weight + rightColumn.weight;
var delta = rightEdgeOfNextColumn - leftEdgeOfPreviousColumn;
leftColumn.weight = (dragPoint - leftEdgeOfPreviousColumn) * sumOfWeights / delta;
rightColumn.weight = (rightEdgeOfNextColumn - dragPoint) * sumOfWeights / delta;
}
this._positionResizers();
event.preventDefault();
this.dispatchEventToListeners(WebInspector.DataGrid.Events.ColumnsResized);
},
/**
* @param {string} columnId
* @return {number}
*/
columnOffset: function(columnId)
{
if (!this.element.offsetWidth)
return 0;
for (var i = 1; i < this._visibleColumnsArray.length; ++i) {
if (columnId === this._visibleColumnsArray[i].identifier) {
if (this._resizers[i - 1])
return this._resizers[i - 1].__position;
}
}
return 0;
},
_endResizerDragging: function(event)
{
this._currentResizer = null;
this._saveColumnWeights();
this.dispatchEventToListeners(WebInspector.DataGrid.Events.ColumnsResized);
},
ColumnResizePadding: 24,
CenterResizerOverBorderAdjustment: 3,
__proto__: WebInspector.Widget.prototype
}
/** @enum {string} */
WebInspector.DataGrid.ResizeMethod = {
Nearest: "nearest",
First: "first",
Last: "last"
}
/**
* @constructor
* @extends {WebInspector.Object}
* @param {?Object.<string, *>=} data
* @param {boolean=} hasChildren
*/
WebInspector.DataGridNode = function(data, hasChildren)
{
/** @type {?Element} */
this._element = null;
/** @type {boolean} */
this._expanded = false;
/** @type {boolean} */
this._selected = false;
/** @type {number|undefined} */
this._depth;
/** @type {boolean|undefined} */
this._revealed;
/** @type {boolean} */
this._attached = false;
/** @type {?{parent: !WebInspector.DataGridNode, index: number}} */
this._savedPosition = null;
/** @type {boolean} */
this._shouldRefreshChildren = true;
/** @type {!Object.<string, *>} */
this._data = data || {};
/** @type {boolean} */
this.hasChildren = hasChildren || false;
/** @type {!Array.<!WebInspector.DataGridNode>} */
this.children = [];
/** @type {?WebInspector.DataGrid} */
this.dataGrid = null;
/** @type {?WebInspector.DataGridNode} */
this.parent = null;
/** @type {?WebInspector.DataGridNode} */
this.previousSibling = null;
/** @type {?WebInspector.DataGridNode} */
this.nextSibling = null;
/** @type {number} */
this.disclosureToggleWidth = 10;
}
WebInspector.DataGridNode.prototype = {
/** @type {boolean} */
selectable: true,
/** @type {boolean} */
_isRoot: false,
/**
* @return {!Element}
*/
element: function()
{
if (!this._element) {
this.createElement();
this.createCells();
}
return /** @type {!Element} */ (this._element);
},
/**
* @protected
*/
createElement: function()
{
this._element = createElement("tr");
this._element._dataGridNode = this;
if (this.hasChildren)
this._element.classList.add("parent");
if (this.expanded)
this._element.classList.add("expanded");
if (this.selected)
this._element.classList.add("selected");
if (this.revealed)
this._element.classList.add("revealed");
},
/**
* @protected
*/
createCells: function()
{
this._element.removeChildren();
var columnsArray = this.dataGrid._visibleColumnsArray;
for (var i = 0; i < columnsArray.length; ++i)
this._element.appendChild(this.createCell(columnsArray[i].identifier));
this._element.appendChild(this._createTDWithClass("corner"));
},
get data()
{
return this._data;
},
set data(x)
{
this._data = x || {};
this.refresh();
},
get revealed()
{
if (this._revealed !== undefined)
return this._revealed;
var currentAncestor = this.parent;
while (currentAncestor && !currentAncestor._isRoot) {
if (!currentAncestor.expanded) {
this._revealed = false;
return false;
}
currentAncestor = currentAncestor.parent;
}
this._revealed = true;
return true;
},
set hasChildren(x)
{
if (this._hasChildren === x)
return;
this._hasChildren = x;
if (!this._element)
return;
this._element.classList.toggle("parent", this._hasChildren);
this._element.classList.toggle("expanded", this._hasChildren && this.expanded);
},
get hasChildren()
{
return this._hasChildren;
},
set revealed(x)
{
if (this._revealed === x)
return;
this._revealed = x;
if (this._element)
this._element.classList.toggle("revealed", this._revealed);
for (var i = 0; i < this.children.length; ++i)
this.children[i].revealed = x && this.expanded;
},
/**
* @return {number}
*/
get depth()
{
if (this._depth !== undefined)
return this._depth;
if (this.parent && !this.parent._isRoot)
this._depth = this.parent.depth + 1;
else
this._depth = 0;
return this._depth;
},
get leftPadding()
{
return this.depth * this.dataGrid.indentWidth;
},
get shouldRefreshChildren()
{
return this._shouldRefreshChildren;
},
set shouldRefreshChildren(x)
{
this._shouldRefreshChildren = x;
if (x && this.expanded)
this.expand();
},
get selected()
{
return this._selected;
},
set selected(x)
{
if (x)
this.select();
else
this.deselect();
},
get expanded()
{
return this._expanded;
},
/**
* @param {boolean} x
*/
set expanded(x)
{
if (x)
this.expand();
else
this.collapse();
},
refresh: function()
{
if (!this.dataGrid)
this._element = null;
if (!this._element)
return;
this.createCells();
},
/**
* @param {string} className
* @return {!Element}
*/
_createTDWithClass: function(className)
{
var cell = createElementWithClass("td", className);
var cellClass = this.dataGrid._cellClass;
if (cellClass)
cell.classList.add(cellClass);
return cell;
},
/**
* @param {string} columnIdentifier
* @return {!Element}
*/
createTD: function(columnIdentifier)
{
var cell = this._createTDWithClass(columnIdentifier + "-column");
cell.columnIdentifier_ = columnIdentifier;
var alignment = this.dataGrid._columns[columnIdentifier].align;
if (alignment)
cell.classList.add(alignment);
if (columnIdentifier === this.dataGrid.disclosureColumnIdentifier) {
cell.classList.add("disclosure");
if (this.leftPadding)
cell.style.setProperty("padding-left", this.leftPadding + "px");
}
return cell;
},
/**
* @param {string} columnIdentifier
* @return {!Element}
*/
createCell: function(columnIdentifier)
{
var cell = this.createTD(columnIdentifier);
var data = this.data[columnIdentifier];
if (data instanceof Node) {
cell.appendChild(data);
} else {
cell.textContent = data;
if (this.dataGrid._columns[columnIdentifier].longText)
cell.title = data;
}
return cell;
},
/**
* @return {number}
*/
nodeSelfHeight: function()
{
return 16;
},
/**
* @param {!WebInspector.DataGridNode} child
*/
appendChild: function(child)
{
this.insertChild(child, this.children.length);
},
/**
* @param {!WebInspector.DataGridNode} child
* @param {number} index
*/
insertChild: function(child, index)
{
if (!child)
throw("insertChild: Node can't be undefined or null.");
if (child.parent === this)
throw("insertChild: Node is already a child of this node.");
child.remove();
this.children.splice(index, 0, child);
this.hasChildren = true;
child.parent = this;
child.dataGrid = this.dataGrid;
child.recalculateSiblings(index);
child._depth = undefined;
child._revealed = undefined;
child._attached = false;
child._shouldRefreshChildren = true;
var current = child.children[0];
while (current) {
current.dataGrid = this.dataGrid;
current._depth = undefined;
current._revealed = undefined;
current._attached = false;
current._shouldRefreshChildren = true;
current = current.traverseNextNode(false, child, true);
}
if (this.expanded)
child._attach();
if (!this.revealed)
child.revealed = false;
},
remove: function()
{
if (this.parent)
this.parent.removeChild(this);
},
/**
* @param {!WebInspector.DataGridNode} child
*/
removeChild: function(child)
{
if (!child)
throw("removeChild: Node can't be undefined or null.");
if (child.parent !== this)
throw("removeChild: Node is not a child of this node.");
child.deselect();
child._detach();
this.children.remove(child, true);
if (child.previousSibling)
child.previousSibling.nextSibling = child.nextSibling;
if (child.nextSibling)
child.nextSibling.previousSibling = child.previousSibling;
child.dataGrid = null;
child.parent = null;
child.nextSibling = null;
child.previousSibling = null;
if (this.children.length <= 0)
this.hasChildren = false;
},
removeChildren: function()
{
for (var i = 0; i < this.children.length; ++i) {
var child = this.children[i];
child.deselect();
child._detach();
child.dataGrid = null;
child.parent = null;
child.nextSibling = null;
child.previousSibling = null;
}
this.children = [];
this.hasChildren = false;
},
/**
* @param {number} myIndex
*/
recalculateSiblings: function(myIndex)
{
if (!this.parent)
return;
var previousChild = this.parent.children[myIndex - 1] || null;
if (previousChild)
previousChild.nextSibling = this;
this.previousSibling = previousChild;
var nextChild = this.parent.children[myIndex + 1] || null;
if (nextChild)
nextChild.previousSibling = this;
this.nextSibling = nextChild;
},
collapse: function()
{
if (this._isRoot)
return;
if (this._element)
this._element.classList.remove("expanded");
this._expanded = false;
for (var i = 0; i < this.children.length; ++i)
this.children[i].revealed = false;
},
collapseRecursively: function()
{
var item = this;
while (item) {
if (item.expanded)
item.collapse();
item = item.traverseNextNode(false, this, true);
}
},
populate: function() { },
expand: function()
{
if (!this.hasChildren || this.expanded)
return;
if (this._isRoot)
return;
if (this.revealed && !this._shouldRefreshChildren)
for (var i = 0; i < this.children.length; ++i)
this.children[i].revealed = true;
if (this._shouldRefreshChildren) {
for (var i = 0; i < this.children.length; ++i)
this.children[i]._detach();
this.populate();
if (this._attached) {
for (var i = 0; i < this.children.length; ++i) {
var child = this.children[i];
if (this.revealed)
child.revealed = true;
child._attach();
}
}
this._shouldRefreshChildren = false;
}
if (this._element)
this._element.classList.add("expanded");
this._expanded = true;
},
expandRecursively: function()
{
var item = this;
while (item) {
item.expand();
item = item.traverseNextNode(false, this);
}
},
reveal: function()
{
if (this._isRoot)
return;
var currentAncestor = this.parent;
while (currentAncestor && !currentAncestor._isRoot) {
if (!currentAncestor.expanded)
currentAncestor.expand();
currentAncestor = currentAncestor.parent;
}
this.element().scrollIntoViewIfNeeded(false);
},
/**
* @param {boolean=} supressSelectedEvent
*/
select: function(supressSelectedEvent)
{
if (!this.dataGrid || !this.selectable || this.selected)
return;
if (this.dataGrid.selectedNode)
this.dataGrid.selectedNode.deselect();
this._selected = true;
this.dataGrid.selectedNode = this;
if (this._element)
this._element.classList.add("selected");
if (!supressSelectedEvent)
this.dataGrid.dispatchEventToListeners(WebInspector.DataGrid.Events.SelectedNode);
},
revealAndSelect: function()
{
if (this._isRoot)
return;
this.reveal();
this.select();
},
/**
* @param {boolean=} supressDeselectedEvent
*/
deselect: function(supressDeselectedEvent)
{
if (!this.dataGrid || this.dataGrid.selectedNode !== this || !this.selected)
return;
this._selected = false;
this.dataGrid.selectedNode = null;
if (this._element)
this._element.classList.remove("selected");
if (!supressDeselectedEvent)
this.dataGrid.dispatchEventToListeners(WebInspector.DataGrid.Events.DeselectedNode);
},
/**
* @param {boolean} skipHidden
* @param {?WebInspector.DataGridNode=} stayWithin
* @param {boolean=} dontPopulate
* @param {!Object=} info
* @return {?WebInspector.DataGridNode}
*/
traverseNextNode: function(skipHidden, stayWithin, dontPopulate, info)
{
if (!dontPopulate && this.hasChildren)
this.populate();
if (info)
info.depthChange = 0;
var node = (!skipHidden || this.revealed) ? this.children[0] : null;
if (node && (!skipHidden || this.expanded)) {
if (info)
info.depthChange = 1;
return node;
}
if (this === stayWithin)
return null;
node = (!skipHidden || this.revealed) ? this.nextSibling : null;
if (node)
return node;
node = this;
while (node && !node._isRoot && !((!skipHidden || node.revealed) ? node.nextSibling : null) && node.parent !== stayWithin) {
if (info)
info.depthChange -= 1;
node = node.parent;
}
if (!node)
return null;
return (!skipHidden || node.revealed) ? node.nextSibling : null;
},
/**
* @param {boolean} skipHidden
* @param {boolean=} dontPopulate
* @return {?WebInspector.DataGridNode}
*/
traversePreviousNode: function(skipHidden, dontPopulate)
{
var node = (!skipHidden || this.revealed) ? this.previousSibling : null;
if (!dontPopulate && node && node.hasChildren)
node.populate();
while (node && ((!skipHidden || (node.revealed && node.expanded)) ? node.children[node.children.length - 1] : null)) {
if (!dontPopulate && node.hasChildren)
node.populate();
node = ((!skipHidden || (node.revealed && node.expanded)) ? node.children[node.children.length - 1] : null);
}
if (node)
return node;
if (!this.parent || this.parent._isRoot)
return null;
return this.parent;
},
/**
* @return {boolean}
*/
isEventWithinDisclosureTriangle: function(event)
{
if (!this.hasChildren)
return false;
var cell = event.target.enclosingNodeOrSelfWithNodeName("td");
if (!cell || !cell.classList.contains("disclosure"))
return false;
var left = cell.totalOffsetLeft() + this.leftPadding;
return event.pageX >= left && event.pageX <= left + this.disclosureToggleWidth;
},
_attach: function()
{
if (!this.dataGrid || this._attached)
return;
this._attached = true;
var previousNode = this.traversePreviousNode(true, true);
var previousElement = previousNode ? previousNode.element() : this.dataGrid._topFillerRow;
this.dataGrid.dataTableBody.insertBefore(this.element(), previousElement.nextSibling);
if (this.expanded)
for (var i = 0; i < this.children.length; ++i)
this.children[i]._attach();
},
_detach: function()
{
if (!this._attached)
return;
this._attached = false;
if (this._element)
this._element.remove();
for (var i = 0; i < this.children.length; ++i)
this.children[i]._detach();
this.wasDetached();
},
wasDetached: function()
{
},
savePosition: function()
{
if (this._savedPosition)
return;
if (!this.parent)
throw("savePosition: Node must have a parent.");
this._savedPosition = {
parent: this.parent,
index: this.parent.children.indexOf(this)
};
},
restorePosition: function()
{
if (!this._savedPosition)
return;
if (this.parent !== this._savedPosition.parent)
this._savedPosition.parent.insertChild(this, this._savedPosition.index);
this._savedPosition = null;
},
__proto__: WebInspector.Object.prototype
}
/**
* @constructor
* @extends {WebInspector.DataGridNode}
*/
WebInspector.CreationDataGridNode = function(data, hasChildren)
{
WebInspector.DataGridNode.call(this, data, hasChildren);
/** @type {boolean} */
this.isCreationNode = true;
}
WebInspector.CreationDataGridNode.prototype = {
makeNormal: function()
{
this.isCreationNode = false;
},
__proto__: WebInspector.DataGridNode.prototype
}
| guorendong/iridium-browser-ubuntu | third_party/WebKit/Source/devtools/front_end/ui_lazy/DataGrid.js | JavaScript | bsd-3-clause | 57,639 |
#! /usr/bin/env python
#
# fits2pdf.py -- Image a FITS file as a PDF.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
"""
$ ./fits2pdf.py <fitsfile> <output.pdf>
"""
import sys, os
import logging
from ginga.cairow.ImageViewCairo import ImageViewCairo
import cairo
from ginga import AstroImage
STD_FORMAT = '%(asctime)s | %(levelname)1.1s | %(filename)s:%(lineno)d (%(funcName)s) | %(message)s'
point_in = 1/72.0
point_cm = 0.0352777778
def main(options, args):
logger = logging.getLogger("example1")
logger.setLevel(logging.INFO)
fmt = logging.Formatter(STD_FORMAT)
stderrHdlr = logging.StreamHandler()
stderrHdlr.setFormatter(fmt)
logger.addHandler(stderrHdlr)
fi = ImageViewCairo(logger)
fi.configure(500, 1000)
# Load fits file
filepath = args[0]
image = AstroImage.AstroImage(logger=logger)
image.load_file(filepath)
# Make any adjustments to the image that we want
fi.set_bg(1.0, 1.0, 1.0)
fi.set_image(image)
fi.auto_levels()
fi.zoom_fit()
fi.center_image()
ht_pts = 11.0 / point_in
wd_pts = 8.5 / point_in
off_x, off_y = 0, 0
outfilepath = args[1]
out_f = open(outfilepath, 'w')
surface = cairo.PDFSurface(out_f, wd_pts, ht_pts)
# set pixels per inch
surface.set_fallback_resolution(300, 300)
surface.set_device_offset(off_x, off_y)
try:
fi.save_image_as_surface(surface)
surface.show_page()
surface.flush()
surface.finish()
finally:
out_f.close()
if __name__ == '__main__':
main(None, sys.argv[1:])
# END
| rajul/ginga | scripts/fits2pdf.py | Python | bsd-3-clause | 1,754 |
#!/usr/bin/env vpython3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from blinkpy.web_tests.stale_expectation_removal import builders
from unexpected_passes_common import constants
from unexpected_passes_common import data_types
class BuilderRunsTestOfInterestUnittest(unittest.TestCase):
def setUp(self):
self.instance = builders.WebTestBuilders(False)
def testMatch(self):
"""Tests that a match can be successfully found."""
test_map = {
'isolated_scripts': [
{
'isolate_name': 'blink_web_tests',
},
],
}
self.assertTrue(
self.instance._BuilderRunsTestOfInterest(test_map, None))
# Re-add once WebGPU tests are supported.
# test_map = {
# 'isolated_scripts': [
# {
# 'isolate_name': 'webgpu_blink_web_tests',
# },
# ],
# }
# self.assertTrue(
# self.instance._BuilderRunsTestOfInterest(test_map, None))
def testNoMatch(self):
test_map = {
'isolated_scripts': [
{
'isolate_name': 'foo_web_tests',
},
],
}
self.assertFalse(
self.instance._BuilderRunsTestOfInterest(test_map, None))
class GetFakeCiBuildersUnittest(unittest.TestCase):
def testStringsConvertedToBuilderEntries(self):
"""Tests that the easier-to-read strings get converted to BuilderEntry."""
instance = builders.WebTestBuilders(False)
fake_builders = instance.GetFakeCiBuilders()
ci_builder = data_types.BuilderEntry('linux-blink-rel-dummy',
constants.BuilderTypes.CI, False)
expected_try = set([
data_types.BuilderEntry('linux-blink-rel',
constants.BuilderTypes.TRY, False),
data_types.BuilderEntry('v8_linux_blink_rel',
constants.BuilderTypes.TRY, False)
])
self.assertEqual(fake_builders[ci_builder], expected_try)
class GetNonChromiumBuildersUnittest(unittest.TestCase):
def testStringsConvertedToBuilderEntries(self):
"""Tests that the easier-to-read strings get converted to BuilderEntry."""
instance = builders.WebTestBuilders(False)
builder = data_types.BuilderEntry('ToTMacOfficial',
constants.BuilderTypes.CI, False)
self.assertIn(builder, instance.GetNonChromiumBuilders())
if __name__ == '__main__':
unittest.main(verbosity=2)
| chromium/chromium | third_party/blink/tools/blinkpy/web_tests/stale_expectation_removal/builders_unittest.py | Python | bsd-3-clause | 2,804 |
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package lockedfile creates and manipulates files whose contents should only
// change atomically.
package lockedfile
import (
"fmt"
"io"
"io/ioutil"
"os"
"runtime"
)
// A File is a locked *os.File.
//
// Closing the file releases the lock.
//
// If the program exits while a file is locked, the operating system releases
// the lock but may not do so promptly: callers must ensure that all locked
// files are closed before exiting.
type File struct {
osFile
closed bool
}
// osFile embeds a *os.File while keeping the pointer itself unexported.
// (When we close a File, it must be the same file descriptor that we opened!)
type osFile struct {
*os.File
}
// OpenFile is like os.OpenFile, but returns a locked file.
// If flag includes os.O_WRONLY or os.O_RDWR, the file is write-locked;
// otherwise, it is read-locked.
func OpenFile(name string, flag int, perm os.FileMode) (*File, error) {
var (
f = new(File)
err error
)
f.osFile.File, err = openFile(name, flag, perm)
if err != nil {
return nil, err
}
// Although the operating system will drop locks for open files when the go
// command exits, we want to hold locks for as little time as possible, and we
// especially don't want to leave a file locked after we're done with it. Our
// Close method is what releases the locks, so use a finalizer to report
// missing Close calls on a best-effort basis.
runtime.SetFinalizer(f, func(f *File) {
panic(fmt.Sprintf("lockedfile.File %s became unreachable without a call to Close", f.Name()))
})
return f, nil
}
// Open is like os.Open, but returns a read-locked file.
func Open(name string) (*File, error) {
return OpenFile(name, os.O_RDONLY, 0)
}
// Create is like os.Create, but returns a write-locked file.
func Create(name string) (*File, error) {
return OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
}
// Edit creates the named file with mode 0666 (before umask),
// but does not truncate existing contents.
//
// If Edit succeeds, methods on the returned File can be used for I/O.
// The associated file descriptor has mode O_RDWR and the file is write-locked.
func Edit(name string) (*File, error) {
return OpenFile(name, os.O_RDWR|os.O_CREATE, 0666)
}
// Close unlocks and closes the underlying file.
//
// Close may be called multiple times; all calls after the first will return a
// non-nil error.
func (f *File) Close() error {
if f.closed {
return &os.PathError{
Op: "close",
Path: f.Name(),
Err: os.ErrClosed,
}
}
f.closed = true
err := closeFile(f.osFile.File)
runtime.SetFinalizer(f, nil)
return err
}
// Read opens the named file with a read-lock and returns its contents.
func Read(name string) ([]byte, error) {
f, err := Open(name)
if err != nil {
return nil, err
}
defer f.Close()
return ioutil.ReadAll(f)
}
// Write opens the named file (creating it with the given permissions if needed),
// then write-locks it and overwrites it with the given content.
func Write(name string, content io.Reader, perm os.FileMode) (err error) {
f, err := OpenFile(name, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm)
if err != nil {
return err
}
_, err = io.Copy(f, content)
if closeErr := f.Close(); err == nil {
err = closeErr
}
return err
}
// Transform invokes t with the result of reading the named file, with its lock
// still held.
//
// If t returns a nil error, Transform then writes the returned contents back to
// the file, making a best effort to preserve existing contents on error.
//
// t must not modify the slice passed to it.
func Transform(name string, t func([]byte) ([]byte, error)) (err error) {
f, err := Edit(name)
if err != nil {
return err
}
defer f.Close()
old, err := ioutil.ReadAll(f)
if err != nil {
return err
}
new, err := t(old)
if err != nil {
return err
}
if len(new) > len(old) {
// The overall file size is increasing, so write the tail first: if we're
// about to run out of space on the disk, we would rather detect that
// failure before we have overwritten the original contents.
if _, err := f.WriteAt(new[len(old):], int64(len(old))); err != nil {
// Make a best effort to remove the incomplete tail.
f.Truncate(int64(len(old)))
return err
}
}
// We're about to overwrite the old contents. In case of failure, make a best
// effort to roll back before we close the file.
defer func() {
if err != nil {
if _, err := f.WriteAt(old, 0); err == nil {
f.Truncate(int64(len(old)))
}
}
}()
if len(new) >= len(old) {
if _, err := f.WriteAt(new[:len(old)], 0); err != nil {
return err
}
} else {
if _, err := f.WriteAt(new, 0); err != nil {
return err
}
// The overall file size is decreasing, so shrink the file to its final size
// after writing. We do this after writing (instead of before) so that if
// the write fails, enough filesystem space will likely still be reserved
// to contain the previous contents.
if err := f.Truncate(int64(len(new))); err != nil {
return err
}
}
return nil
}
| akutz/go | src/cmd/go/internal/lockedfile/lockedfile.go | GO | bsd-3-clause | 5,191 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_View
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
// Call Zend_View_Helper_InlineScriptTest::main() if this source file is executed directly.
if (!defined("PHPUnit_MAIN_METHOD")) {
define("PHPUnit_MAIN_METHOD", "Zend_View_Helper_InlineScriptTest::main");
}
/** Zend_View_Helper_InlineScript */
require_once 'Zend/View/Helper/InlineScript.php';
/** Zend_View_Helper_Placeholder_Registry */
require_once 'Zend/View/Helper/Placeholder/Registry.php';
/** Zend_Registry */
require_once 'Zend/Registry.php';
/**
* Test class for Zend_View_Helper_InlineScript.
*
* @category Zend
* @package Zend_View
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @group Zend_View
* @group Zend_View_Helper
*/
class Zend_View_Helper_InlineScriptTest extends PHPUnit_Framework_TestCase
{
/**
* @var Zend_View_Helper_InlineScript
*/
public $helper;
/**
* @var string
*/
public $basePath;
/**
* Runs the test methods of this class.
*
* @return void
*/
public static function main()
{
$suite = new PHPUnit_Framework_TestSuite("Zend_View_Helper_InlineScriptTest");
$result = PHPUnit_TextUI_TestRunner::run($suite);
}
/**
* Sets up the fixture, for example, open a network connection.
* This method is called before a test is executed.
*
* @return void
*/
public function setUp()
{
$regKey = Zend_View_Helper_Placeholder_Registry::REGISTRY_KEY;
if (Zend_Registry::isRegistered($regKey)) {
$registry = Zend_Registry::getInstance();
unset($registry[$regKey]);
}
$this->basePath = dirname(__FILE__) . '/_files/modules';
$this->helper = new Zend_View_Helper_InlineScript();
}
/**
* Tears down the fixture, for example, close a network connection.
* This method is called after a test is executed.
*
* @return void
*/
public function tearDown()
{
unset($this->helper);
}
public function testNamespaceRegisteredInPlaceholderRegistryAfterInstantiation()
{
$registry = Zend_View_Helper_Placeholder_Registry::getRegistry();
if ($registry->containerExists('Zend_View_Helper_InlineScript')) {
$registry->deleteContainer('Zend_View_Helper_InlineScript');
}
$this->assertFalse($registry->containerExists('Zend_View_Helper_InlineScript'));
$helper = new Zend_View_Helper_InlineScript();
$this->assertTrue($registry->containerExists('Zend_View_Helper_InlineScript'));
}
public function testInlineScriptReturnsObjectInstance()
{
$placeholder = $this->helper->inlineScript();
$this->assertTrue($placeholder instanceof Zend_View_Helper_InlineScript);
}
}
// Call Zend_View_Helper_InlineScriptTest::main() if this source file is executed directly.
if (PHPUnit_MAIN_METHOD == "Zend_View_Helper_InlineScriptTest::main") {
Zend_View_Helper_InlineScriptTest::main();
}
| jupeter/zf1 | tests/Zend/View/Helper/InlineScriptTest.php | PHP | bsd-3-clause | 3,820 |
module.exports = {
test: {
options: {
port: 8000,
keepalive: true,
hostname: 'localhost',
livereload: 35728,
open: 'http://<%= connect.test.options.hostname %>:<%= connect.test.options.port %>/test.html'
}
},
dev: {
options: {
port: 9000,
livereload: 35729,
hostname: 'localhost',
open: 'http://<%= connect.dev.options.hostname %>:<%= connect.dev.options.port %>/preview/'
}
},
}; | TBAPI-0KA/vizabi | grunt/config/connect.js | JavaScript | bsd-3-clause | 536 |
# Generated by Django 2.2.6 on 2019-11-05 16:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scanners', '0013_auto_20191105_1522'),
]
operations = [
migrations.AlterField(
model_name='scannerrule',
name='action',
field=models.PositiveSmallIntegerField(choices=[(1, 'No action'), (20, 'Flag for human review'), (100, 'Delay auto-approval'), (200, 'Delay auto-approval indefinitely')], default=1),
),
]
| eviljeff/olympia | src/olympia/scanners/migrations/0014_alter_action_choices_on_scannerrule.py | Python | bsd-3-clause | 536 |
// © 2017 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html
#include "unicode/utypes.h"
#if !UCONFIG_NO_FORMATTING
#include "cstring.h"
#include "unicode/ures.h"
#include "uresimp.h"
#include "charstr.h"
#include "number_formatimpl.h"
#include "unicode/numfmt.h"
#include "number_patternstring.h"
#include "number_utils.h"
#include "unicode/numberformatter.h"
#include "unicode/dcfmtsym.h"
#include "number_scientific.h"
#include "number_compact.h"
#include "uresimp.h"
#include "ureslocs.h"
using namespace icu;
using namespace icu::number;
using namespace icu::number::impl;
MicroPropsGenerator::~MicroPropsGenerator() = default;
NumberFormatterImpl::NumberFormatterImpl(const MacroProps& macros, UErrorCode& status)
: NumberFormatterImpl(macros, true, status) {
}
int32_t NumberFormatterImpl::formatStatic(const MacroProps& macros, DecimalQuantity& inValue,
FormattedStringBuilder& outString, UErrorCode& status) {
NumberFormatterImpl impl(macros, false, status);
MicroProps& micros = impl.preProcessUnsafe(inValue, status);
if (U_FAILURE(status)) { return 0; }
int32_t length = writeNumber(micros, inValue, outString, 0, status);
length += writeAffixes(micros, outString, 0, length, status);
return length;
}
int32_t NumberFormatterImpl::getPrefixSuffixStatic(const MacroProps& macros, Signum signum,
StandardPlural::Form plural,
FormattedStringBuilder& outString, UErrorCode& status) {
NumberFormatterImpl impl(macros, false, status);
return impl.getPrefixSuffixUnsafe(signum, plural, outString, status);
}
// NOTE: C++ SPECIFIC DIFFERENCE FROM JAVA:
// The "safe" apply method uses a new MicroProps. In the MicroPropsGenerator, fMicros is copied into the new instance.
// The "unsafe" method simply re-uses fMicros, eliminating the extra copy operation.
// See MicroProps::processQuantity() for details.
int32_t NumberFormatterImpl::format(DecimalQuantity& inValue, FormattedStringBuilder& outString,
UErrorCode& status) const {
MicroProps micros;
preProcess(inValue, micros, status);
if (U_FAILURE(status)) { return 0; }
int32_t length = writeNumber(micros, inValue, outString, 0, status);
length += writeAffixes(micros, outString, 0, length, status);
return length;
}
void NumberFormatterImpl::preProcess(DecimalQuantity& inValue, MicroProps& microsOut,
UErrorCode& status) const {
if (U_FAILURE(status)) { return; }
if (fMicroPropsGenerator == nullptr) {
status = U_INTERNAL_PROGRAM_ERROR;
return;
}
fMicroPropsGenerator->processQuantity(inValue, microsOut, status);
microsOut.integerWidth.apply(inValue, status);
}
MicroProps& NumberFormatterImpl::preProcessUnsafe(DecimalQuantity& inValue, UErrorCode& status) {
if (U_FAILURE(status)) {
return fMicros; // must always return a value
}
if (fMicroPropsGenerator == nullptr) {
status = U_INTERNAL_PROGRAM_ERROR;
return fMicros; // must always return a value
}
fMicroPropsGenerator->processQuantity(inValue, fMicros, status);
fMicros.integerWidth.apply(inValue, status);
return fMicros;
}
int32_t NumberFormatterImpl::getPrefixSuffix(Signum signum, StandardPlural::Form plural,
FormattedStringBuilder& outString, UErrorCode& status) const {
if (U_FAILURE(status)) { return 0; }
// #13453: DecimalFormat wants the affixes from the pattern only (modMiddle, aka pattern modifier).
// Safe path: use fImmutablePatternModifier.
const Modifier* modifier = fImmutablePatternModifier->getModifier(signum, plural);
modifier->apply(outString, 0, 0, status);
if (U_FAILURE(status)) { return 0; }
return modifier->getPrefixLength();
}
int32_t NumberFormatterImpl::getPrefixSuffixUnsafe(Signum signum, StandardPlural::Form plural,
FormattedStringBuilder& outString, UErrorCode& status) {
if (U_FAILURE(status)) { return 0; }
// #13453: DecimalFormat wants the affixes from the pattern only (modMiddle, aka pattern modifier).
// Unsafe path: use fPatternModifier.
fPatternModifier->setNumberProperties(signum, plural);
fPatternModifier->apply(outString, 0, 0, status);
if (U_FAILURE(status)) { return 0; }
return fPatternModifier->getPrefixLength();
}
NumberFormatterImpl::NumberFormatterImpl(const MacroProps& macros, bool safe, UErrorCode& status) {
fMicroPropsGenerator = macrosToMicroGenerator(macros, safe, status);
}
//////////
const MicroPropsGenerator*
NumberFormatterImpl::macrosToMicroGenerator(const MacroProps& macros, bool safe, UErrorCode& status) {
if (U_FAILURE(status)) { return nullptr; }
const MicroPropsGenerator* chain = &fMicros;
// Check that macros is error-free before continuing.
if (macros.copyErrorTo(status)) {
return nullptr;
}
// TODO: Accept currency symbols from DecimalFormatSymbols?
// Pre-compute a few values for efficiency.
bool isCurrency = utils::unitIsCurrency(macros.unit);
bool isNoUnit = utils::unitIsNoUnit(macros.unit);
bool isPercent = utils::unitIsPercent(macros.unit);
bool isPermille = utils::unitIsPermille(macros.unit);
bool isAccounting =
macros.sign == UNUM_SIGN_ACCOUNTING || macros.sign == UNUM_SIGN_ACCOUNTING_ALWAYS ||
macros.sign == UNUM_SIGN_ACCOUNTING_EXCEPT_ZERO;
CurrencyUnit currency(u"", status);
if (isCurrency) {
currency = CurrencyUnit(macros.unit, status); // Restore CurrencyUnit from MeasureUnit
}
UNumberUnitWidth unitWidth = UNUM_UNIT_WIDTH_SHORT;
if (macros.unitWidth != UNUM_UNIT_WIDTH_COUNT) {
unitWidth = macros.unitWidth;
}
bool isCldrUnit = !isCurrency && !isNoUnit &&
(unitWidth == UNUM_UNIT_WIDTH_FULL_NAME || !(isPercent || isPermille));
// Select the numbering system.
LocalPointer<const NumberingSystem> nsLocal;
const NumberingSystem* ns;
if (macros.symbols.isNumberingSystem()) {
ns = macros.symbols.getNumberingSystem();
} else {
// TODO: Is there a way to avoid creating the NumberingSystem object?
ns = NumberingSystem::createInstance(macros.locale, status);
// Give ownership to the function scope.
nsLocal.adoptInstead(ns);
}
const char* nsName = U_SUCCESS(status) ? ns->getName() : "latn";
uprv_strncpy(fMicros.nsName, nsName, 8);
fMicros.nsName[8] = 0; // guarantee NUL-terminated
// Resolve the symbols. Do this here because currency may need to customize them.
if (macros.symbols.isDecimalFormatSymbols()) {
fMicros.symbols = macros.symbols.getDecimalFormatSymbols();
} else {
LocalPointer<DecimalFormatSymbols> newSymbols(
new DecimalFormatSymbols(macros.locale, *ns, status), status);
if (U_FAILURE(status)) {
return nullptr;
}
if (isCurrency) {
newSymbols->setCurrency(currency.getISOCurrency(), status);
if (U_FAILURE(status)) {
return nullptr;
}
}
fMicros.symbols = newSymbols.getAlias();
fSymbols.adoptInstead(newSymbols.orphan());
}
// Load and parse the pattern string. It is used for grouping sizes and affixes only.
// If we are formatting currency, check for a currency-specific pattern.
const char16_t* pattern = nullptr;
if (isCurrency && fMicros.symbols->getCurrencyPattern() != nullptr) {
pattern = fMicros.symbols->getCurrencyPattern();
}
if (pattern == nullptr) {
CldrPatternStyle patternStyle;
if (isCldrUnit) {
patternStyle = CLDR_PATTERN_STYLE_DECIMAL;
} else if (isPercent || isPermille) {
patternStyle = CLDR_PATTERN_STYLE_PERCENT;
} else if (!isCurrency || unitWidth == UNUM_UNIT_WIDTH_FULL_NAME) {
patternStyle = CLDR_PATTERN_STYLE_DECIMAL;
} else if (isAccounting) {
// NOTE: Although ACCOUNTING and ACCOUNTING_ALWAYS are only supported in currencies right now,
// the API contract allows us to add support to other units in the future.
patternStyle = CLDR_PATTERN_STYLE_ACCOUNTING;
} else {
patternStyle = CLDR_PATTERN_STYLE_CURRENCY;
}
pattern = utils::getPatternForStyle(macros.locale, nsName, patternStyle, status);
if (U_FAILURE(status)) {
return nullptr;
}
}
auto patternInfo = new ParsedPatternInfo();
if (patternInfo == nullptr) {
status = U_MEMORY_ALLOCATION_ERROR;
return nullptr;
}
fPatternInfo.adoptInstead(patternInfo);
PatternParser::parseToPatternInfo(UnicodeString(pattern), *patternInfo, status);
if (U_FAILURE(status)) {
return nullptr;
}
/////////////////////////////////////////////////////////////////////////////////////
/// START POPULATING THE DEFAULT MICROPROPS AND BUILDING THE MICROPROPS GENERATOR ///
/////////////////////////////////////////////////////////////////////////////////////
// Multiplier
if (macros.scale.isValid()) {
fMicros.helpers.multiplier.setAndChain(macros.scale, chain);
chain = &fMicros.helpers.multiplier;
}
// Rounding strategy
Precision precision;
if (!macros.precision.isBogus()) {
precision = macros.precision;
} else if (macros.notation.fType == Notation::NTN_COMPACT) {
precision = Precision::integer().withMinDigits(2);
} else if (isCurrency) {
precision = Precision::currency(UCURR_USAGE_STANDARD);
} else {
precision = Precision::maxFraction(6);
}
UNumberFormatRoundingMode roundingMode;
if (macros.roundingMode != kDefaultMode) {
roundingMode = macros.roundingMode;
} else {
// Temporary until ICU 64
roundingMode = precision.fRoundingMode;
}
fMicros.rounder = {precision, roundingMode, currency, status};
if (U_FAILURE(status)) {
return nullptr;
}
// Grouping strategy
if (!macros.grouper.isBogus()) {
fMicros.grouping = macros.grouper;
} else if (macros.notation.fType == Notation::NTN_COMPACT) {
// Compact notation uses minGrouping by default since ICU 59
fMicros.grouping = Grouper::forStrategy(UNUM_GROUPING_MIN2);
} else {
fMicros.grouping = Grouper::forStrategy(UNUM_GROUPING_AUTO);
}
fMicros.grouping.setLocaleData(*fPatternInfo, macros.locale);
// Padding strategy
if (!macros.padder.isBogus()) {
fMicros.padding = macros.padder;
} else {
fMicros.padding = Padder::none();
}
// Integer width
if (!macros.integerWidth.isBogus()) {
fMicros.integerWidth = macros.integerWidth;
} else {
fMicros.integerWidth = IntegerWidth::standard();
}
// Sign display
if (macros.sign != UNUM_SIGN_COUNT) {
fMicros.sign = macros.sign;
} else {
fMicros.sign = UNUM_SIGN_AUTO;
}
// Decimal mark display
if (macros.decimal != UNUM_DECIMAL_SEPARATOR_COUNT) {
fMicros.decimal = macros.decimal;
} else {
fMicros.decimal = UNUM_DECIMAL_SEPARATOR_AUTO;
}
// Use monetary separator symbols
fMicros.useCurrency = isCurrency;
// Inner modifier (scientific notation)
if (macros.notation.fType == Notation::NTN_SCIENTIFIC) {
auto newScientificHandler = new ScientificHandler(¯os.notation, fMicros.symbols, chain);
if (newScientificHandler == nullptr) {
status = U_MEMORY_ALLOCATION_ERROR;
return nullptr;
}
fScientificHandler.adoptInstead(newScientificHandler);
chain = fScientificHandler.getAlias();
} else {
// No inner modifier required
fMicros.modInner = &fMicros.helpers.emptyStrongModifier;
}
// Middle modifier (patterns, positive/negative, currency symbols, percent)
auto patternModifier = new MutablePatternModifier(false);
if (patternModifier == nullptr) {
status = U_MEMORY_ALLOCATION_ERROR;
return nullptr;
}
fPatternModifier.adoptInstead(patternModifier);
patternModifier->setPatternInfo(
macros.affixProvider != nullptr ? macros.affixProvider
: static_cast<const AffixPatternProvider*>(fPatternInfo.getAlias()),
kUndefinedField);
patternModifier->setPatternAttributes(fMicros.sign, isPermille);
if (patternModifier->needsPlurals()) {
patternModifier->setSymbols(
fMicros.symbols,
currency,
unitWidth,
resolvePluralRules(macros.rules, macros.locale, status),
status);
} else {
patternModifier->setSymbols(fMicros.symbols, currency, unitWidth, nullptr, status);
}
if (safe) {
fImmutablePatternModifier.adoptInstead(patternModifier->createImmutable(status));
}
if (U_FAILURE(status)) {
return nullptr;
}
// Outer modifier (CLDR units and currency long names)
if (isCldrUnit) {
fLongNameHandler.adoptInstead(
LongNameHandler::forMeasureUnit(
macros.locale,
macros.unit,
macros.perUnit,
unitWidth,
resolvePluralRules(macros.rules, macros.locale, status),
chain,
status));
chain = fLongNameHandler.getAlias();
} else if (isCurrency && unitWidth == UNUM_UNIT_WIDTH_FULL_NAME) {
fLongNameHandler.adoptInstead(
LongNameHandler::forCurrencyLongNames(
macros.locale,
currency,
resolvePluralRules(macros.rules, macros.locale, status),
chain,
status));
chain = fLongNameHandler.getAlias();
} else {
// No outer modifier required
fMicros.modOuter = &fMicros.helpers.emptyWeakModifier;
}
if (U_FAILURE(status)) {
return nullptr;
}
// Compact notation
if (macros.notation.fType == Notation::NTN_COMPACT) {
CompactType compactType = (isCurrency && unitWidth != UNUM_UNIT_WIDTH_FULL_NAME)
? CompactType::TYPE_CURRENCY : CompactType::TYPE_DECIMAL;
auto newCompactHandler = new CompactHandler(
macros.notation.fUnion.compactStyle,
macros.locale,
nsName,
compactType,
resolvePluralRules(macros.rules, macros.locale, status),
patternModifier,
safe,
chain,
status);
if (newCompactHandler == nullptr) {
status = U_MEMORY_ALLOCATION_ERROR;
return nullptr;
}
fCompactHandler.adoptInstead(newCompactHandler);
chain = fCompactHandler.getAlias();
}
if (U_FAILURE(status)) {
return nullptr;
}
// Always add the pattern modifier as the last element of the chain.
if (safe) {
fImmutablePatternModifier->addToChain(chain);
chain = fImmutablePatternModifier.getAlias();
} else {
patternModifier->addToChain(chain);
chain = patternModifier;
}
return chain;
}
const PluralRules*
NumberFormatterImpl::resolvePluralRules(const PluralRules* rulesPtr, const Locale& locale,
UErrorCode& status) {
if (rulesPtr != nullptr) {
return rulesPtr;
}
// Lazily create PluralRules
if (fRules.isNull()) {
fRules.adoptInstead(PluralRules::forLocale(locale, status));
}
return fRules.getAlias();
}
int32_t NumberFormatterImpl::writeAffixes(const MicroProps& micros, FormattedStringBuilder& string,
int32_t start, int32_t end, UErrorCode& status) {
// Always apply the inner modifier (which is "strong").
int32_t length = micros.modInner->apply(string, start, end, status);
if (micros.padding.isValid()) {
length += micros.padding
.padAndApply(*micros.modMiddle, *micros.modOuter, string, start, length + end, status);
} else {
length += micros.modMiddle->apply(string, start, length + end, status);
length += micros.modOuter->apply(string, start, length + end, status);
}
return length;
}
int32_t NumberFormatterImpl::writeNumber(const MicroProps& micros, DecimalQuantity& quantity,
FormattedStringBuilder& string, int32_t index,
UErrorCode& status) {
int32_t length = 0;
if (quantity.isInfinite()) {
length += string.insert(
length + index,
micros.symbols->getSymbol(DecimalFormatSymbols::ENumberFormatSymbol::kInfinitySymbol),
{UFIELD_CATEGORY_NUMBER, UNUM_INTEGER_FIELD},
status);
} else if (quantity.isNaN()) {
length += string.insert(
length + index,
micros.symbols->getSymbol(DecimalFormatSymbols::ENumberFormatSymbol::kNaNSymbol),
{UFIELD_CATEGORY_NUMBER, UNUM_INTEGER_FIELD},
status);
} else {
// Add the integer digits
length += writeIntegerDigits(micros, quantity, string, length + index, status);
// Add the decimal point
if (quantity.getLowerDisplayMagnitude() < 0 || micros.decimal == UNUM_DECIMAL_SEPARATOR_ALWAYS) {
length += string.insert(
length + index,
micros.useCurrency ? micros.symbols->getSymbol(
DecimalFormatSymbols::ENumberFormatSymbol::kMonetarySeparatorSymbol) : micros
.symbols
->getSymbol(
DecimalFormatSymbols::ENumberFormatSymbol::kDecimalSeparatorSymbol),
{UFIELD_CATEGORY_NUMBER, UNUM_DECIMAL_SEPARATOR_FIELD},
status);
}
// Add the fraction digits
length += writeFractionDigits(micros, quantity, string, length + index, status);
if (length == 0) {
// Force output of the digit for value 0
length += utils::insertDigitFromSymbols(
string,
index,
0,
*micros.symbols,
{UFIELD_CATEGORY_NUMBER, UNUM_INTEGER_FIELD},
status);
}
}
return length;
}
int32_t NumberFormatterImpl::writeIntegerDigits(const MicroProps& micros, DecimalQuantity& quantity,
FormattedStringBuilder& string, int32_t index,
UErrorCode& status) {
int length = 0;
int integerCount = quantity.getUpperDisplayMagnitude() + 1;
for (int i = 0; i < integerCount; i++) {
// Add grouping separator
if (micros.grouping.groupAtPosition(i, quantity)) {
length += string.insert(
index,
micros.useCurrency ? micros.symbols->getSymbol(
DecimalFormatSymbols::ENumberFormatSymbol::kMonetaryGroupingSeparatorSymbol)
: micros.symbols->getSymbol(
DecimalFormatSymbols::ENumberFormatSymbol::kGroupingSeparatorSymbol),
{UFIELD_CATEGORY_NUMBER, UNUM_GROUPING_SEPARATOR_FIELD},
status);
}
// Get and append the next digit value
int8_t nextDigit = quantity.getDigit(i);
length += utils::insertDigitFromSymbols(
string,
index,
nextDigit,
*micros.symbols,
{UFIELD_CATEGORY_NUMBER,
UNUM_INTEGER_FIELD},
status);
}
return length;
}
int32_t NumberFormatterImpl::writeFractionDigits(const MicroProps& micros, DecimalQuantity& quantity,
FormattedStringBuilder& string, int32_t index,
UErrorCode& status) {
int length = 0;
int fractionCount = -quantity.getLowerDisplayMagnitude();
for (int i = 0; i < fractionCount; i++) {
// Get and append the next digit value
int8_t nextDigit = quantity.getDigit(-i - 1);
length += utils::insertDigitFromSymbols(
string,
length + index,
nextDigit,
*micros.symbols,
{UFIELD_CATEGORY_NUMBER, UNUM_FRACTION_FIELD},
status);
}
return length;
}
#endif /* #if !UCONFIG_NO_FORMATTING */
| endlessm/chromium-browser | third_party/icu/source/i18n/number_formatimpl.cpp | C++ | bsd-3-clause | 21,061 |
# Generic tests that all raw classes should run
from os import path as op
from numpy.testing import assert_allclose
from mne.datasets import testing
from mne.io import Raw
def _test_concat(reader, *args):
"""Test concatenation of raw classes that allow not preloading"""
data = None
for preload in (True, False):
raw1 = reader(*args, preload=preload)
raw2 = reader(*args, preload=preload)
raw1.append(raw2)
raw1.preload_data()
if data is None:
data = raw1[:, :][0]
assert_allclose(data, raw1[:, :][0])
for first_preload in (True, False):
raw = reader(*args, preload=first_preload)
data = raw[:, :][0]
for preloads in ((True, True), (True, False), (False, False)):
for last_preload in (True, False):
print(first_preload, preloads, last_preload)
raw1 = raw.crop(0, 0.4999)
if preloads[0]:
raw1.preload_data()
raw2 = raw.crop(0.5, None)
if preloads[1]:
raw2.preload_data()
raw1.append(raw2)
if last_preload:
raw1.preload_data()
assert_allclose(data, raw1[:, :][0])
@testing.requires_testing_data
def test_time_index():
"""Test indexing of raw times"""
raw_fname = op.join(op.dirname(__file__), '..', '..', 'io', 'tests',
'data', 'test_raw.fif')
raw = Raw(raw_fname)
# Test original (non-rounding) indexing behavior
orig_inds = raw.time_as_index(raw.times)
assert(len(set(orig_inds)) != len(orig_inds))
# Test new (rounding) indexing behavior
new_inds = raw.time_as_index(raw.times, use_rounding=True)
assert(len(set(new_inds)) == len(new_inds))
| trachelr/mne-python | mne/io/tests/test_raw.py | Python | bsd-3-clause | 1,809 |
class CreateSpreePromotionCategories < ActiveRecord::Migration[4.2]
def change
create_table :spree_promotion_categories do |t|
t.string :name
t.timestamps null: false, precision: 6
end
add_column :spree_promotions, :promotion_category_id, :integer
add_index :spree_promotions, :promotion_category_id
end
end
| ayb/spree | core/db/migrate/20140715182625_create_spree_promotion_categories.rb | Ruby | bsd-3-clause | 341 |
<?php
namespace Vmwarephp;
use Vmwarephp\Exception as Ex;
class Vhost {
private $service;
function __construct($host, $username, $password) {
$this->host = $host;
$this->username = $username;
$this->password = $password;
}
function getPort() {
$port = parse_url($this->host, PHP_URL_PORT);
return $port ? : '443';
}
function __get($propertyName) {
if (!isset($this->$propertyName)) throw new \InvalidArgumentException('Property ' . $propertyName . ' not set on this object!');
return $this->$propertyName;
}
function __set($propertyName, $value) {
$this->validateProperty($propertyName, $value);
$this->$propertyName = $value;
}
function __call($method, $arguments) {
if (!$this->service) $this->initializeService();
return call_user_func_array(array($this->service, $method), $arguments);
}
function getApiType() {
return $this->getServiceContent()->about->apiType;
}
function changeService(\Vmwarephp\Service $service) {
$this->service = $service;
}
private function initializeService() {
if (!$this->service)
$this->service = \Vmwarephp\Factory\Service::makeConnected($this);
}
private function validateProperty($propertyName, $value) {
if (in_array($propertyName, array('host', 'username', 'password')) && empty($value))
throw new Ex\InvalidVhost('Vhost ' . ucfirst($propertyName) . ' cannot be empty!');
}
}
| mattiasgeniar/vmwarephp | library/Vmwarephp/Vhost.php | PHP | bsd-3-clause | 1,379 |
/**
* Copyright (C) 2016 Turi
* All rights reserved.
*
* This software may be modified and distributed under the terms
* of the BSD license. See the LICENSE file for details.
*/
#ifndef GRAPHLAB_LOGGER_LOG_ROTATE_HPP
#define GRAPHLAB_LOGGER_LOG_ROTATE_HPP
#include <cstddef>
#include <string>
namespace graphlab {
/**
* Sets up log rotation.
* The basic procedure is that it will generate files of the form
*
* \verbatim
* [log_file_name].0
* [log_file_name].1
* [log_file_name].2
* etc.
* \endverbatim
*
* When truncate_limit is set, a maximum number of files is maintained.
* Beyond which, older files are deleted.
*
* A symlink [log_file_name].current is also created which always points to the
* most recent log file.
*
* If log rotation has already been set up, this will stop
* the the log rotation and begin a new one.
*
* Not safe for concurrent use.
*
* \param log_file_name The prefix to output to. Logs will emit to
* [log_file_name].0, [log_file_name].1, etc.
* \param log_interval The number of seconds between rotations
* \param truncate_limit The maximum number of files to maintain. Must be >= 1
*/
void begin_log_rotation(std::string log_file_name,
size_t log_interval,
size_t truncate_limit);
/**
* Stops log rotation.
*
* No-op if log rotation was not started.
*
* Not safe for concurrent use.
*/
void stop_log_rotation();
} // graphlab
#endif // GRAPHLAB_LOGGER_LOG_ROTATE_HPP
| dato-code/SFrame | oss_src/logger/log_rotate.hpp | C++ | bsd-3-clause | 1,524 |
//===----- CGOpenMPRuntime.cpp - Interface to OpenMP Runtimes -------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This provides a class for OpenMP runtime code generation.
//
//===----------------------------------------------------------------------===//
#include "CGOpenMPRuntime.h"
#include "CodeGenFunction.h"
#include "clang/AST/Decl.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/IR/DerivedTypes.h"
#include "llvm/IR/GlobalValue.h"
#include "llvm/IR/Value.h"
#include "llvm/Support/raw_ostream.h"
#include <cassert>
using namespace clang;
using namespace CodeGen;
CGOpenMPRuntime::CGOpenMPRuntime(CodeGenModule &CGM)
: CGM(CGM), DefaultOpenMPPSource(nullptr) {
IdentTy = llvm::StructType::create(
"ident_t", CGM.Int32Ty /* reserved_1 */, CGM.Int32Ty /* flags */,
CGM.Int32Ty /* reserved_2 */, CGM.Int32Ty /* reserved_3 */,
CGM.Int8PtrTy /* psource */, NULL);
// Build void (*kmpc_micro)(kmp_int32 *global_tid, kmp_int32 *bound_tid,...)
llvm::Type *MicroParams[] = {llvm::PointerType::getUnqual(CGM.Int32Ty),
llvm::PointerType::getUnqual(CGM.Int32Ty)};
Kmpc_MicroTy = llvm::FunctionType::get(CGM.VoidTy, MicroParams, true);
}
llvm::Value *
CGOpenMPRuntime::GetOrCreateDefaultOpenMPLocation(OpenMPLocationFlags Flags) {
llvm::Value *Entry = OpenMPDefaultLocMap.lookup(Flags);
if (!Entry) {
if (!DefaultOpenMPPSource) {
// Initialize default location for psource field of ident_t structure of
// all ident_t objects. Format is ";file;function;line;column;;".
// Taken from
// http://llvm.org/svn/llvm-project/openmp/trunk/runtime/src/kmp_str.c
DefaultOpenMPPSource =
CGM.GetAddrOfConstantCString(";unknown;unknown;0;0;;");
DefaultOpenMPPSource =
llvm::ConstantExpr::getBitCast(DefaultOpenMPPSource, CGM.Int8PtrTy);
}
llvm::GlobalVariable *DefaultOpenMPLocation = cast<llvm::GlobalVariable>(
CGM.CreateRuntimeVariable(IdentTy, ".kmpc_default_loc.addr"));
DefaultOpenMPLocation->setUnnamedAddr(true);
DefaultOpenMPLocation->setConstant(true);
DefaultOpenMPLocation->setLinkage(llvm::GlobalValue::PrivateLinkage);
llvm::Constant *Zero = llvm::ConstantInt::get(CGM.Int32Ty, 0, true);
llvm::Constant *Values[] = {Zero,
llvm::ConstantInt::get(CGM.Int32Ty, Flags),
Zero, Zero, DefaultOpenMPPSource};
llvm::Constant *Init = llvm::ConstantStruct::get(IdentTy, Values);
DefaultOpenMPLocation->setInitializer(Init);
return DefaultOpenMPLocation;
}
return Entry;
}
llvm::Value *CGOpenMPRuntime::EmitOpenMPUpdateLocation(
CodeGenFunction &CGF, SourceLocation Loc, OpenMPLocationFlags Flags) {
// If no debug info is generated - return global default location.
if (CGM.getCodeGenOpts().getDebugInfo() == CodeGenOptions::NoDebugInfo ||
Loc.isInvalid())
return GetOrCreateDefaultOpenMPLocation(Flags);
assert(CGF.CurFn && "No function in current CodeGenFunction.");
llvm::Value *LocValue = nullptr;
OpenMPLocMapTy::iterator I = OpenMPLocMap.find(CGF.CurFn);
if (I != OpenMPLocMap.end()) {
LocValue = I->second;
} else {
// Generate "ident_t .kmpc_loc.addr;"
llvm::AllocaInst *AI = CGF.CreateTempAlloca(IdentTy, ".kmpc_loc.addr");
AI->setAlignment(CGM.getDataLayout().getPrefTypeAlignment(IdentTy));
OpenMPLocMap[CGF.CurFn] = AI;
LocValue = AI;
CGBuilderTy::InsertPointGuard IPG(CGF.Builder);
CGF.Builder.SetInsertPoint(CGF.AllocaInsertPt);
CGF.Builder.CreateMemCpy(LocValue, GetOrCreateDefaultOpenMPLocation(Flags),
llvm::ConstantExpr::getSizeOf(IdentTy),
CGM.PointerAlignInBytes);
}
// char **psource = &.kmpc_loc_<flags>.addr.psource;
llvm::Value *PSource =
CGF.Builder.CreateConstInBoundsGEP2_32(LocValue, 0, IdentField_PSource);
auto OMPDebugLoc = OpenMPDebugLocMap.lookup(Loc.getRawEncoding());
if (OMPDebugLoc == nullptr) {
SmallString<128> Buffer2;
llvm::raw_svector_ostream OS2(Buffer2);
// Build debug location
PresumedLoc PLoc = CGF.getContext().getSourceManager().getPresumedLoc(Loc);
OS2 << ";" << PLoc.getFilename() << ";";
if (const FunctionDecl *FD =
dyn_cast_or_null<FunctionDecl>(CGF.CurFuncDecl)) {
OS2 << FD->getQualifiedNameAsString();
}
OS2 << ";" << PLoc.getLine() << ";" << PLoc.getColumn() << ";;";
OMPDebugLoc = CGF.Builder.CreateGlobalStringPtr(OS2.str());
OpenMPDebugLocMap[Loc.getRawEncoding()] = OMPDebugLoc;
}
// *psource = ";<File>;<Function>;<Line>;<Column>;;";
CGF.Builder.CreateStore(OMPDebugLoc, PSource);
return LocValue;
}
llvm::Value *CGOpenMPRuntime::GetOpenMPGlobalThreadNum(CodeGenFunction &CGF,
SourceLocation Loc) {
assert(CGF.CurFn && "No function in current CodeGenFunction.");
llvm::Value *GTid = nullptr;
OpenMPGtidMapTy::iterator I = OpenMPGtidMap.find(CGF.CurFn);
if (I != OpenMPGtidMap.end()) {
GTid = I->second;
} else {
// Generate "int32 .kmpc_global_thread_num.addr;"
CGBuilderTy::InsertPointGuard IPG(CGF.Builder);
CGF.Builder.SetInsertPoint(CGF.AllocaInsertPt);
llvm::Value *Args[] = {EmitOpenMPUpdateLocation(CGF, Loc)};
GTid = CGF.EmitRuntimeCall(
CreateRuntimeFunction(OMPRTL__kmpc_global_thread_num), Args);
OpenMPGtidMap[CGF.CurFn] = GTid;
}
return GTid;
}
void CGOpenMPRuntime::FunctionFinished(CodeGenFunction &CGF) {
assert(CGF.CurFn && "No function in current CodeGenFunction.");
if (OpenMPGtidMap.count(CGF.CurFn))
OpenMPGtidMap.erase(CGF.CurFn);
if (OpenMPLocMap.count(CGF.CurFn))
OpenMPLocMap.erase(CGF.CurFn);
}
llvm::Type *CGOpenMPRuntime::getIdentTyPointerTy() {
return llvm::PointerType::getUnqual(IdentTy);
}
llvm::Type *CGOpenMPRuntime::getKmpc_MicroPointerTy() {
return llvm::PointerType::getUnqual(Kmpc_MicroTy);
}
llvm::Constant *
CGOpenMPRuntime::CreateRuntimeFunction(OpenMPRTLFunction Function) {
llvm::Constant *RTLFn = nullptr;
switch (Function) {
case OMPRTL__kmpc_fork_call: {
// Build void __kmpc_fork_call(ident_t *loc, kmp_int32 argc, kmpc_micro
// microtask, ...);
llvm::Type *TypeParams[] = {getIdentTyPointerTy(), CGM.Int32Ty,
getKmpc_MicroPointerTy()};
llvm::FunctionType *FnTy =
llvm::FunctionType::get(CGM.VoidTy, TypeParams, true);
RTLFn = CGM.CreateRuntimeFunction(FnTy, "__kmpc_fork_call");
break;
}
case OMPRTL__kmpc_global_thread_num: {
// Build kmp_int32 __kmpc_global_thread_num(ident_t *loc);
llvm::Type *TypeParams[] = {getIdentTyPointerTy()};
llvm::FunctionType *FnTy =
llvm::FunctionType::get(CGM.Int32Ty, TypeParams, false);
RTLFn = CGM.CreateRuntimeFunction(FnTy, "__kmpc_global_thread_num");
break;
}
}
return RTLFn;
}
| dededong/goblin-core | riscv/llvm/3.5/cfe-3.5.0.src/lib/CodeGen/CGOpenMPRuntime.cpp | C++ | bsd-3-clause | 7,142 |
package org.hisp.dhis.datastatistics;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.fasterxml.jackson.annotation.JsonProperty;
import org.hisp.dhis.common.BaseIdentifiableObject;
/**
* DataStatistics object to be saved as snapshot.
*
* @author Julie Hill Roa
* @author Yrjan A. F. Fraschetti
*/
public class DataStatistics
extends BaseIdentifiableObject
{
private Double mapViews;
private Double chartViews;
private Double reportTableViews;
private Double eventReportViews;
private Double eventChartViews;
private Double dashboardViews;
private Double dataSetReportViews;
private Double totalViews;
private Double savedMaps;
private Double savedCharts;
private Double savedReportTables;
private Double savedEventReports;
private Double savedEventCharts;
private Double savedDashboards;
private Double savedIndicators;
private Double savedDataValues;
private Integer activeUsers;
private Integer users;
public DataStatistics()
{
}
public DataStatistics( Double mapViews, Double chartViews, Double reportTableViews, Double eventReportViews,
Double eventChartViews, Double dashboardViews, Double dataSetReportViews, Double totalViews, Double savedMaps,
Double savedCharts, Double savedReportTables, Double savedEventReports, Double savedEventCharts, Double savedDashboards,
Double savedIndicators, Double savedDataValues, Integer activeUsers, Integer users )
{
this.mapViews = mapViews;
this.chartViews = chartViews;
this.reportTableViews = reportTableViews;
this.eventReportViews = eventReportViews;
this.eventChartViews = eventChartViews;
this.dashboardViews = dashboardViews;
this.dataSetReportViews = dataSetReportViews;
this.totalViews = totalViews;
this.savedMaps = savedMaps;
this.savedCharts = savedCharts;
this.savedReportTables = savedReportTables;
this.savedEventReports = savedEventReports;
this.savedEventCharts = savedEventCharts;
this.savedDashboards = savedDashboards;
this.savedIndicators = savedIndicators;
this.savedDataValues = savedDataValues;
this.activeUsers = activeUsers;
this.users = users;
}
@JsonProperty
public Integer getActiveUsers()
{
return activeUsers;
}
public void setActiveUsers( Integer activeUsers )
{
this.activeUsers = activeUsers;
}
@JsonProperty
public Double getMapViews()
{
return mapViews;
}
public void setMapViews( Double mapViews )
{
this.mapViews = mapViews;
}
@JsonProperty
public Double getChartViews()
{
return chartViews;
}
public void setChartViews( Double chartViews )
{
this.chartViews = chartViews;
}
@JsonProperty
public Double getReportTableViews()
{
return reportTableViews;
}
public void setReportTableViews( Double reportTableViews )
{
this.reportTableViews = reportTableViews;
}
@JsonProperty
public Double getEventReportViews()
{
return eventReportViews;
}
public void setEventReportViews( Double eventReportViews )
{
this.eventReportViews = eventReportViews;
}
@JsonProperty
public Double getEventChartViews()
{
return eventChartViews;
}
public void setEventChartViews( Double eventChartViews )
{
this.eventChartViews = eventChartViews;
}
@JsonProperty
public Double getDashboardViews()
{
return dashboardViews;
}
public void setDashboardViews( Double dashboardViews )
{
this.dashboardViews = dashboardViews;
}
@JsonProperty
public Double getDataSetReportViews()
{
return dataSetReportViews;
}
public void setDataSetReportViews( Double dataSetReportViews )
{
this.dataSetReportViews = dataSetReportViews;
}
@JsonProperty
public Double getTotalViews()
{
return totalViews;
}
public void setTotalViews( Double totalViews )
{
this.totalViews = totalViews;
}
@JsonProperty
public Double getSavedMaps()
{
return savedMaps;
}
public void setSavedMaps( Double savedMaps )
{
this.savedMaps = savedMaps;
}
@JsonProperty
public Double getSavedCharts()
{
return savedCharts;
}
public void setSavedCharts( Double savedCharts )
{
this.savedCharts = savedCharts;
}
@JsonProperty
public Double getSavedReportTables()
{
return savedReportTables;
}
public void setSavedReportTables( Double savedReportTables )
{
this.savedReportTables = savedReportTables;
}
@JsonProperty
public Double getSavedEventReports()
{
return savedEventReports;
}
public void setSavedEventReports( Double savedEventReports )
{
this.savedEventReports = savedEventReports;
}
@JsonProperty
public Double getSavedEventCharts()
{
return savedEventCharts;
}
public void setSavedEventCharts( Double savedEventCharts )
{
this.savedEventCharts = savedEventCharts;
}
@JsonProperty
public Double getSavedDashboards()
{
return savedDashboards;
}
public void setSavedDashboards( Double savedDashboards )
{
this.savedDashboards = savedDashboards;
}
@JsonProperty
public Double getSavedIndicators()
{
return savedIndicators;
}
public void setSavedIndicators( Double savedIndicators )
{
this.savedIndicators = savedIndicators;
}
@JsonProperty
public Double getSavedDataValues()
{
return savedDataValues;
}
public void setSavedDataValues( Double savedDataValues )
{
this.savedDataValues = savedDataValues;
}
@JsonProperty
public Integer getUsers()
{
return users;
}
@JsonProperty
public void setUsers( Integer users )
{
this.users = users;
}
@Override public String toString()
{
return super.toString() + "DataStatistics{" +
"mapViews=" + mapViews +
", chartViews=" + chartViews +
", reportTableViews=" + reportTableViews +
", eventReportViews=" + eventReportViews +
", eventChartViews=" + eventChartViews +
", dashboardViews=" + dashboardViews +
", totalViews=" + totalViews +
", savedMaps=" + savedMaps +
", savedCharts=" + savedCharts +
", savedReportTables=" + savedReportTables +
", savedEventReports=" + savedEventReports +
", savedEventCharts=" + savedEventCharts +
", savedDashboards=" + savedDashboards +
", savedIndicators=" + savedIndicators +
", savedDataValues=" + savedDataValues +
", activeUsers=" + activeUsers +
", users=" + users +
'}';
}
}
| vmluan/dhis2-core | dhis-2/dhis-api/src/main/java/org/hisp/dhis/datastatistics/DataStatistics.java | Java | bsd-3-clause | 8,631 |
package org.hisp.dhis.report;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.OutputStream;
import java.io.Writer;
import java.util.Date;
import java.util.List;
import net.sf.jasperreports.engine.JasperPrint;
import org.hisp.dhis.period.Period;
/**
* @author Lars Helge Overland
*/
public interface ReportService
{
String ID = ReportService.class.getName();
String REPORTTYPE_PDF = "pdf";
String REPORTTYPE_XLS = "xls";
String PARAM_RELATIVE_PERIODS = "periods";
String PARAM_RELATIVE_ISO_PERIODS = "periods_iso";
String PARAM_ORG_UNITS = "organisationunits";
String PARAM_ORG_UNITS_UID = "organisationunits_uid";
String PARAM_ORGANISATIONUNIT_LEVEL = "organisationunit_level";
String PARAM_ORGANISATIONUNIT_LEVEL_COLUMN = "organisationunit_level_column";
String PARAM_ORGANISATIONUNIT_UID_LEVEL_COLUMN = "organisationunit_uid_level_column";
String PARAM_ORGANISATIONUNIT_COLUMN_NAME = "organisationunit_name";
String PARAM_PERIOD_NAME = "period_name";
/**
* Renders a Jasper Report.
* <p/>
* Will make the following params available:
* <p/>
* "periods" String of relative period ids (String)
* "organisationunits" String of selected organisation unit ids (String)
* "period_name" Name of the selected period (String)
* "organisationunit_name" Name of the selected organisation unit (String)
* "organisationunit_level" Level of the selected organisation unit (int)
* "organisationunit_level_column" Name of the relevant level column in
* table _orgunitstructure (String)
*
* @param out the OutputStream to write the report to.
* @param reportUid the uid of the report to render.
* @param period the period to use as parameter.
* @param organisationUnitUid the uid of the org unit to use as parameter.
* @param type the type of the report, can be "xls" and "pdf".
*/
JasperPrint renderReport( OutputStream out, String reportUid, Period period,
String organisationUnitUid, String type );
/**
* Renders and writes a HTML-based standard report to the given Writer.
*
* @param writer the Writer.
* @param uid the report uid.
* @param date the date.
* @param ou the organisation unit uid.
*/
void renderHtmlReport( Writer writer, String uid, Date date, String ou );
/**
* Saves a Report.
*
* @param report the Report to save.
* @return the generated identifier.
*/
int saveReport( Report report );
/**
* Retrieves the Report with the given identifier.
*
* @param id the identifier of the Report to retrieve.
* @return the Report.
*/
Report getReport( int id );
/**
* Retrieves the Report with the given uid.
*
* @param uid the uid of the Report to retrieve.
* @return the Report.
*/
Report getReport( String uid );
/**
* Returns the total number of reports.
*
* @return the total number of reports.
*/
int getReportCount();
/**
* Returns the number of reports which names are like the given name.
* Returns the number of reports which names are like the given name.
*/
int getReportCountByName( String name );
/**
* Retrieves the given number of maximum reports starting at the given start
* index. Reports are sorted on the name property.
*
* @param first the start index.
* @param max the maximum number of reports.
* @return a list of reports.
*/
List<Report> getReportsBetween( int first, int max );
/**
* Retrieves the given number of maximum reports starting at the given start
* index. Reports are sorted on the name property.
*
* @param first the start index.
* @param max the maximum number of reports.
* @return a List of reports.
*/
List<Report> getReportsBetweenByName( String name, int first, int max );
/**
* Deletes a Report.
*
* @param report the Report to delete.
*/
void deleteReport( Report report );
/**
* Retrieves all Reports.
*
* @return a List of Reports.
*/
List<Report> getAllReports();
/**
* Retrieves the Report with the given name.
*
* @param name the name.
* @return the Report.
*/
List<Report> getReportByName( String name );
/**
* Retrieves Reports with the given uids.
*
* @param uids the list of uids.
* @return a list of ReportTables.
*/
List<Report> getReportsByUid( List<String> uids );
}
| troyel/dhis2-core | dhis-2/dhis-api/src/main/java/org/hisp/dhis/report/ReportService.java | Java | bsd-3-clause | 6,363 |
/*
* Copyright (c) 2011-2012 ARM Limited
* All rights reserved
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Copyright (c) 2006 The Regents of The University of Michigan
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Ali Saidi
* Andreas Hansson
* William Wang
*/
/**
* @file
* Definition of a bus object.
*/
#include "base/misc.hh"
#include "base/trace.hh"
#include "debug/Bus.hh"
#include "debug/BusAddrRanges.hh"
#include "debug/Drain.hh"
#include "mem/bus.hh"
BaseBus::BaseBus(const BaseBusParams *p)
: MemObject(p),
headerCycles(p->header_cycles), width(p->width),
defaultPortID(InvalidPortID),
useDefaultRange(p->use_default_range),
defaultBlockSize(p->block_size),
cachedBlockSize(0), cachedBlockSizeValid(false)
{
//width, clock period, and header cycles must be positive
if (width <= 0)
fatal("Bus width must be positive\n");
if (clock <= 0)
fatal("Bus clock period must be positive\n");
if (headerCycles <= 0)
fatal("Number of header cycles must be positive\n");
}
BaseBus::~BaseBus()
{
for (MasterPortIter m = masterPorts.begin(); m != masterPorts.end();
++m) {
delete *m;
}
for (SlavePortIter s = slavePorts.begin(); s != slavePorts.end();
++s) {
delete *s;
}
}
MasterPort &
BaseBus::getMasterPort(const std::string &if_name, int idx)
{
if (if_name == "master" && idx < masterPorts.size()) {
// the master port index translates directly to the vector position
return *masterPorts[idx];
} else if (if_name == "default") {
return *masterPorts[defaultPortID];
} else {
return MemObject::getMasterPort(if_name, idx);
}
}
SlavePort &
BaseBus::getSlavePort(const std::string &if_name, int idx)
{
if (if_name == "slave" && idx < slavePorts.size()) {
// the slave port index translates directly to the vector position
return *slavePorts[idx];
} else {
return MemObject::getSlavePort(if_name, idx);
}
}
Tick
BaseBus::calcPacketTiming(PacketPtr pkt)
{
// determine the current time rounded to the closest following
// clock edge
Tick now = nextCycle();
Tick headerTime = now + headerCycles * clock;
// The packet will be sent. Figure out how long it occupies the bus, and
// how much of that time is for the first "word", aka bus width.
int numCycles = 0;
if (pkt->hasData()) {
// If a packet has data, it needs ceil(size/width) cycles to send it
int dataSize = pkt->getSize();
numCycles += dataSize/width;
if (dataSize % width)
numCycles++;
}
// The first word will be delivered after the current tick, the delivery
// of the address if any, and one bus cycle to deliver the data
pkt->firstWordTime = headerTime + clock;
pkt->finishTime = headerTime + numCycles * clock;
return headerTime;
}
template <typename PortClass>
BaseBus::Layer<PortClass>::Layer(BaseBus& _bus, const std::string& _name,
Tick _clock) :
bus(_bus), _name(_name), state(IDLE), clock(_clock), drainEvent(NULL),
releaseEvent(this)
{
}
template <typename PortClass>
void BaseBus::Layer<PortClass>::occupyLayer(Tick until)
{
// ensure the state is busy or in retry and never idle at this
// point, as the bus should transition from idle as soon as it has
// decided to forward the packet to prevent any follow-on calls to
// sendTiming seeing an unoccupied bus
assert(state != IDLE);
// note that we do not change the bus state here, if we are going
// from idle to busy it is handled by tryTiming, and if we
// are in retry we should remain in retry such that
// succeededTiming still sees the accurate state
// until should never be 0 as express snoops never occupy the bus
assert(until != 0);
bus.schedule(releaseEvent, until);
DPRINTF(BaseBus, "The bus is now busy from tick %d to %d\n",
curTick(), until);
}
template <typename PortClass>
bool
BaseBus::Layer<PortClass>::tryTiming(PortClass* port)
{
// first we see if the bus is busy, next we check if we are in a
// retry with a port other than the current one
if (state == BUSY || (state == RETRY && port != retryList.front())) {
// put the port at the end of the retry list
retryList.push_back(port);
return false;
}
// update the state which is shared for request, response and
// snoop responses, if we were idle we are now busy, if we are in
// a retry, then do not change
if (state == IDLE)
state = BUSY;
return true;
}
template <typename PortClass>
void
BaseBus::Layer<PortClass>::succeededTiming(Tick busy_time)
{
// if a retrying port succeeded, also take it off the retry list
if (state == RETRY) {
DPRINTF(BaseBus, "Remove retry from list %s\n",
retryList.front()->name());
retryList.pop_front();
state = BUSY;
}
// we should either have gone from idle to busy in the
// tryTiming test, or just gone from a retry to busy
assert(state == BUSY);
// occupy the bus accordingly
occupyLayer(busy_time);
}
template <typename PortClass>
void
BaseBus::Layer<PortClass>::failedTiming(PortClass* port, Tick busy_time)
{
// if we are not in a retry, i.e. busy (but never idle), or we are
// in a retry but not for the current port, then add the port at
// the end of the retry list
if (state != RETRY || port != retryList.front()) {
retryList.push_back(port);
}
// even if we retried the current one and did not succeed,
// we are no longer retrying but instead busy
state = BUSY;
// occupy the bus accordingly
occupyLayer(busy_time);
}
template <typename PortClass>
void
BaseBus::Layer<PortClass>::releaseLayer()
{
// releasing the bus means we should now be idle
assert(state == BUSY);
assert(!releaseEvent.scheduled());
// update the state
state = IDLE;
// bus is now idle, so if someone is waiting we can retry
if (!retryList.empty()) {
// note that we block (return false on recvTiming) both
// because the bus is busy and because the destination is
// busy, and in the latter case the bus may be released before
// we see a retry from the destination
retryWaiting();
} else if (drainEvent) {
DPRINTF(Drain, "Bus done draining, processing drain event\n");
//If we weren't able to drain before, do it now.
drainEvent->process();
// Clear the drain event once we're done with it.
drainEvent = NULL;
}
}
template <typename PortClass>
void
BaseBus::Layer<PortClass>::retryWaiting()
{
// this should never be called with an empty retry list
assert(!retryList.empty());
// we always go to retrying from idle
assert(state == IDLE);
// update the state which is shared for request, response and
// snoop responses
state = RETRY;
// note that we might have blocked on the receiving port being
// busy (rather than the bus itself) and now call retry before the
// destination called retry on the bus
retryList.front()->sendRetry();
// If the bus is still in the retry state, sendTiming wasn't
// called in zero time (e.g. the cache does this)
if (state == RETRY) {
retryList.pop_front();
//Burn a cycle for the missed grant.
// update the state which is shared for request, response and
// snoop responses
state = BUSY;
// determine the current time rounded to the closest following
// clock edge
Tick now = bus.nextCycle();
occupyLayer(now + clock);
}
}
template <typename PortClass>
void
BaseBus::Layer<PortClass>::recvRetry()
{
// we got a retry from a peer that we tried to send something to
// and failed, but we sent it on the account of someone else, and
// that source port should be on our retry list, however if the
// bus layer is released before this happens and the retry (from
// the bus point of view) is successful then this no longer holds
// and we could in fact have an empty retry list
if (retryList.empty())
return;
// if the bus layer is idle
if (state == IDLE) {
// note that we do not care who told us to retry at the moment, we
// merely let the first one on the retry list go
retryWaiting();
}
}
PortID
BaseBus::findPort(Addr addr)
{
/* An interval tree would be a better way to do this. --ali. */
PortID dest_id = checkPortCache(addr);
if (dest_id != InvalidPortID)
return dest_id;
// Check normal port ranges
PortMapConstIter i = portMap.find(RangeSize(addr,1));
if (i != portMap.end()) {
dest_id = i->second;
updatePortCache(dest_id, i->first.start, i->first.end);
return dest_id;
}
// Check if this matches the default range
if (useDefaultRange) {
AddrRangeConstIter a_end = defaultRange.end();
for (AddrRangeConstIter i = defaultRange.begin(); i != a_end; i++) {
if (*i == addr) {
DPRINTF(BusAddrRanges, " found addr %#llx on default\n",
addr);
return defaultPortID;
}
}
} else if (defaultPortID != InvalidPortID) {
DPRINTF(BusAddrRanges, "Unable to find destination for addr %#llx, "
"will use default port\n", addr);
return defaultPortID;
}
// we should use the range for the default port and it did not
// match, or the default port is not set
fatal("Unable to find destination for addr %#llx on bus %s\n", addr,
name());
}
/** Function called by the port when the bus is receiving a range change.*/
void
BaseBus::recvRangeChange(PortID master_port_id)
{
AddrRangeList ranges;
AddrRangeIter iter;
if (inRecvRangeChange.count(master_port_id))
return;
inRecvRangeChange.insert(master_port_id);
DPRINTF(BusAddrRanges, "received RangeChange from device id %d\n",
master_port_id);
clearPortCache();
if (master_port_id == defaultPortID) {
defaultRange.clear();
// Only try to update these ranges if the user set a default responder.
if (useDefaultRange) {
// get the address ranges of the connected slave port
AddrRangeList ranges =
masterPorts[master_port_id]->getAddrRanges();
for(iter = ranges.begin(); iter != ranges.end(); iter++) {
defaultRange.push_back(*iter);
DPRINTF(BusAddrRanges, "Adding range %#llx - %#llx for default range\n",
iter->start, iter->end);
}
}
} else {
assert(master_port_id < masterPorts.size() && master_port_id >= 0);
MasterPort *port = masterPorts[master_port_id];
// Clean out any previously existent ids
for (PortMapIter portIter = portMap.begin();
portIter != portMap.end(); ) {
if (portIter->second == master_port_id)
portMap.erase(portIter++);
else
portIter++;
}
// get the address ranges of the connected slave port
ranges = port->getAddrRanges();
for (iter = ranges.begin(); iter != ranges.end(); iter++) {
DPRINTF(BusAddrRanges, "Adding range %#llx - %#llx for id %d\n",
iter->start, iter->end, master_port_id);
if (portMap.insert(*iter, master_port_id) == portMap.end()) {
PortID conflict_id = portMap.find(*iter)->second;
fatal("%s has two ports with same range:\n\t%s\n\t%s\n",
name(),
masterPorts[master_port_id]->getSlavePort().name(),
masterPorts[conflict_id]->getSlavePort().name());
}
}
}
DPRINTF(BusAddrRanges, "port list has %d entries\n", portMap.size());
// tell all our neighbouring master ports that our address range
// has changed
for (SlavePortConstIter p = slavePorts.begin(); p != slavePorts.end();
++p)
(*p)->sendRangeChange();
inRecvRangeChange.erase(master_port_id);
}
AddrRangeList
BaseBus::getAddrRanges() const
{
AddrRangeList ranges;
DPRINTF(BusAddrRanges, "received address range request, returning:\n");
for (AddrRangeConstIter dflt_iter = defaultRange.begin();
dflt_iter != defaultRange.end(); dflt_iter++) {
ranges.push_back(*dflt_iter);
DPRINTF(BusAddrRanges, " -- Dflt: %#llx : %#llx\n",dflt_iter->start,
dflt_iter->end);
}
for (PortMapConstIter portIter = portMap.begin();
portIter != portMap.end(); portIter++) {
bool subset = false;
for (AddrRangeConstIter dflt_iter = defaultRange.begin();
dflt_iter != defaultRange.end(); dflt_iter++) {
if ((portIter->first.start < dflt_iter->start &&
portIter->first.end >= dflt_iter->start) ||
(portIter->first.start < dflt_iter->end &&
portIter->first.end >= dflt_iter->end))
fatal("Devices can not set ranges that itersect the default set\
but are not a subset of the default set.\n");
if (portIter->first.start >= dflt_iter->start &&
portIter->first.end <= dflt_iter->end) {
subset = true;
DPRINTF(BusAddrRanges, " -- %#llx : %#llx is a SUBSET\n",
portIter->first.start, portIter->first.end);
}
}
if (!subset) {
ranges.push_back(portIter->first);
DPRINTF(BusAddrRanges, " -- %#llx : %#llx\n",
portIter->first.start, portIter->first.end);
}
}
return ranges;
}
unsigned
BaseBus::findBlockSize()
{
if (cachedBlockSizeValid)
return cachedBlockSize;
unsigned max_bs = 0;
for (MasterPortConstIter m = masterPorts.begin(); m != masterPorts.end();
++m) {
unsigned tmp_bs = (*m)->peerBlockSize();
if (tmp_bs > max_bs)
max_bs = tmp_bs;
}
for (SlavePortConstIter s = slavePorts.begin(); s != slavePorts.end();
++s) {
unsigned tmp_bs = (*s)->peerBlockSize();
if (tmp_bs > max_bs)
max_bs = tmp_bs;
}
if (max_bs == 0)
max_bs = defaultBlockSize;
if (max_bs != 64)
warn_once("Blocksize found to not be 64... hmm... probably not.\n");
cachedBlockSize = max_bs;
cachedBlockSizeValid = true;
return max_bs;
}
template <typename PortClass>
unsigned int
BaseBus::Layer<PortClass>::drain(Event * de)
{
//We should check that we're not "doing" anything, and that noone is
//waiting. We might be idle but have someone waiting if the device we
//contacted for a retry didn't actually retry.
if (!retryList.empty() || state != IDLE) {
DPRINTF(Drain, "Bus not drained\n");
drainEvent = de;
return 1;
}
return 0;
}
/**
* Bus layer template instantiations. Could be removed with _impl.hh
* file, but since there are only two given options (MasterPort and
* SlavePort) it seems a bit excessive at this point.
*/
template class BaseBus::Layer<SlavePort>;
template class BaseBus::Layer<MasterPort>;
| aferr/LatticeMemCtl | src/mem/bus.cc | C++ | bsd-3-clause | 17,480 |
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/audio_processing/aec3/signal_dependent_erle_estimator.h"
#include <algorithm>
#include <functional>
#include <numeric>
#include "modules/audio_processing/aec3/spectrum_buffer.h"
#include "rtc_base/numerics/safe_minmax.h"
namespace webrtc {
namespace {
constexpr std::array<size_t, SignalDependentErleEstimator::kSubbands + 1>
kBandBoundaries = {1, 8, 16, 24, 32, 48, kFftLengthBy2Plus1};
std::array<size_t, kFftLengthBy2Plus1> FormSubbandMap() {
std::array<size_t, kFftLengthBy2Plus1> map_band_to_subband;
size_t subband = 1;
for (size_t k = 0; k < map_band_to_subband.size(); ++k) {
RTC_DCHECK_LT(subband, kBandBoundaries.size());
if (k >= kBandBoundaries[subband]) {
subband++;
RTC_DCHECK_LT(k, kBandBoundaries[subband]);
}
map_band_to_subband[k] = subband - 1;
}
return map_band_to_subband;
}
// Defines the size in blocks of the sections that are used for dividing the
// linear filter. The sections are split in a non-linear manner so that lower
// sections that typically represent the direct path have a larger resolution
// than the higher sections which typically represent more reverberant acoustic
// paths.
std::vector<size_t> DefineFilterSectionSizes(size_t delay_headroom_blocks,
size_t num_blocks,
size_t num_sections) {
size_t filter_length_blocks = num_blocks - delay_headroom_blocks;
std::vector<size_t> section_sizes(num_sections);
size_t remaining_blocks = filter_length_blocks;
size_t remaining_sections = num_sections;
size_t estimator_size = 2;
size_t idx = 0;
while (remaining_sections > 1 &&
remaining_blocks > estimator_size * remaining_sections) {
RTC_DCHECK_LT(idx, section_sizes.size());
section_sizes[idx] = estimator_size;
remaining_blocks -= estimator_size;
remaining_sections--;
estimator_size *= 2;
idx++;
}
size_t last_groups_size = remaining_blocks / remaining_sections;
for (; idx < num_sections; idx++) {
section_sizes[idx] = last_groups_size;
}
section_sizes[num_sections - 1] +=
remaining_blocks - last_groups_size * remaining_sections;
return section_sizes;
}
// Forms the limits in blocks for each filter section. Those sections
// are used for analyzing the echo estimates and investigating which
// linear filter sections contribute most to the echo estimate energy.
std::vector<size_t> SetSectionsBoundaries(size_t delay_headroom_blocks,
size_t num_blocks,
size_t num_sections) {
std::vector<size_t> estimator_boundaries_blocks(num_sections + 1);
if (estimator_boundaries_blocks.size() == 2) {
estimator_boundaries_blocks[0] = 0;
estimator_boundaries_blocks[1] = num_blocks;
return estimator_boundaries_blocks;
}
RTC_DCHECK_GT(estimator_boundaries_blocks.size(), 2);
const std::vector<size_t> section_sizes =
DefineFilterSectionSizes(delay_headroom_blocks, num_blocks,
estimator_boundaries_blocks.size() - 1);
size_t idx = 0;
size_t current_size_block = 0;
RTC_DCHECK_EQ(section_sizes.size() + 1, estimator_boundaries_blocks.size());
estimator_boundaries_blocks[0] = delay_headroom_blocks;
for (size_t k = delay_headroom_blocks; k < num_blocks; ++k) {
current_size_block++;
if (current_size_block >= section_sizes[idx]) {
idx = idx + 1;
if (idx == section_sizes.size()) {
break;
}
estimator_boundaries_blocks[idx] = k + 1;
current_size_block = 0;
}
}
estimator_boundaries_blocks[section_sizes.size()] = num_blocks;
return estimator_boundaries_blocks;
}
std::array<float, SignalDependentErleEstimator::kSubbands>
SetMaxErleSubbands(float max_erle_l, float max_erle_h, size_t limit_subband_l) {
std::array<float, SignalDependentErleEstimator::kSubbands> max_erle;
std::fill(max_erle.begin(), max_erle.begin() + limit_subband_l, max_erle_l);
std::fill(max_erle.begin() + limit_subband_l, max_erle.end(), max_erle_h);
return max_erle;
}
} // namespace
SignalDependentErleEstimator::SignalDependentErleEstimator(
const EchoCanceller3Config& config,
size_t num_capture_channels)
: min_erle_(config.erle.min),
num_sections_(config.erle.num_sections),
num_blocks_(config.filter.refined.length_blocks),
delay_headroom_blocks_(config.delay.delay_headroom_samples / kBlockSize),
band_to_subband_(FormSubbandMap()),
max_erle_(SetMaxErleSubbands(config.erle.max_l,
config.erle.max_h,
band_to_subband_[kFftLengthBy2 / 2])),
section_boundaries_blocks_(SetSectionsBoundaries(delay_headroom_blocks_,
num_blocks_,
num_sections_)),
erle_(num_capture_channels),
S2_section_accum_(
num_capture_channels,
std::vector<std::array<float, kFftLengthBy2Plus1>>(num_sections_)),
erle_estimators_(
num_capture_channels,
std::vector<std::array<float, kSubbands>>(num_sections_)),
erle_ref_(num_capture_channels),
correction_factors_(
num_capture_channels,
std::vector<std::array<float, kSubbands>>(num_sections_)),
num_updates_(num_capture_channels),
n_active_sections_(num_capture_channels) {
RTC_DCHECK_LE(num_sections_, num_blocks_);
RTC_DCHECK_GE(num_sections_, 1);
Reset();
}
SignalDependentErleEstimator::~SignalDependentErleEstimator() = default;
void SignalDependentErleEstimator::Reset() {
for (size_t ch = 0; ch < erle_.size(); ++ch) {
erle_[ch].fill(min_erle_);
for (auto& erle_estimator : erle_estimators_[ch]) {
erle_estimator.fill(min_erle_);
}
erle_ref_[ch].fill(min_erle_);
for (auto& factor : correction_factors_[ch]) {
factor.fill(1.0f);
}
num_updates_[ch].fill(0);
n_active_sections_[ch].fill(0);
}
}
// Updates the Erle estimate by analyzing the current input signals. It takes
// the render buffer and the filter frequency response in order to do an
// estimation of the number of sections of the linear filter that are needed
// for getting the majority of the energy in the echo estimate. Based on that
// number of sections, it updates the erle estimation by introducing a
// correction factor to the erle that is given as an input to this method.
void SignalDependentErleEstimator::Update(
const RenderBuffer& render_buffer,
rtc::ArrayView<const std::vector<std::array<float, kFftLengthBy2Plus1>>>
filter_frequency_responses,
rtc::ArrayView<const float, kFftLengthBy2Plus1> X2,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> Y2,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> E2,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> average_erle,
const std::vector<bool>& converged_filters) {
RTC_DCHECK_GT(num_sections_, 1);
// Gets the number of filter sections that are needed for achieving 90 %
// of the power spectrum energy of the echo estimate.
ComputeNumberOfActiveFilterSections(render_buffer,
filter_frequency_responses);
// Updates the correction factors that is used for correcting the erle and
// adapt it to the particular characteristics of the input signal.
UpdateCorrectionFactors(X2, Y2, E2, converged_filters);
// Applies the correction factor to the input erle for getting a more refined
// erle estimation for the current input signal.
for (size_t ch = 0; ch < erle_.size(); ++ch) {
for (size_t k = 0; k < kFftLengthBy2; ++k) {
RTC_DCHECK_GT(correction_factors_[ch].size(), n_active_sections_[ch][k]);
float correction_factor =
correction_factors_[ch][n_active_sections_[ch][k]]
[band_to_subband_[k]];
erle_[ch][k] = rtc::SafeClamp(average_erle[ch][k] * correction_factor,
min_erle_, max_erle_[band_to_subband_[k]]);
}
}
}
void SignalDependentErleEstimator::Dump(
const std::unique_ptr<ApmDataDumper>& data_dumper) const {
for (auto& erle : erle_estimators_[0]) {
data_dumper->DumpRaw("aec3_all_erle", erle);
}
data_dumper->DumpRaw("aec3_ref_erle", erle_ref_[0]);
for (auto& factor : correction_factors_[0]) {
data_dumper->DumpRaw("aec3_erle_correction_factor", factor);
}
}
// Estimates for each band the smallest number of sections in the filter that
// together constitute 90% of the estimated echo energy.
void SignalDependentErleEstimator::ComputeNumberOfActiveFilterSections(
const RenderBuffer& render_buffer,
rtc::ArrayView<const std::vector<std::array<float, kFftLengthBy2Plus1>>>
filter_frequency_responses) {
RTC_DCHECK_GT(num_sections_, 1);
// Computes an approximation of the power spectrum if the filter would have
// been limited to a certain number of filter sections.
ComputeEchoEstimatePerFilterSection(render_buffer,
filter_frequency_responses);
// For each band, computes the number of filter sections that are needed for
// achieving the 90 % energy in the echo estimate.
ComputeActiveFilterSections();
}
void SignalDependentErleEstimator::UpdateCorrectionFactors(
rtc::ArrayView<const float, kFftLengthBy2Plus1> X2,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> Y2,
rtc::ArrayView<const std::array<float, kFftLengthBy2Plus1>> E2,
const std::vector<bool>& converged_filters) {
for (size_t ch = 0; ch < converged_filters.size(); ++ch) {
if (converged_filters[ch]) {
constexpr float kX2BandEnergyThreshold = 44015068.0f;
constexpr float kSmthConstantDecreases = 0.1f;
constexpr float kSmthConstantIncreases = kSmthConstantDecreases / 2.f;
auto subband_powers = [](rtc::ArrayView<const float> power_spectrum,
rtc::ArrayView<float> power_spectrum_subbands) {
for (size_t subband = 0; subband < kSubbands; ++subband) {
RTC_DCHECK_LE(kBandBoundaries[subband + 1], power_spectrum.size());
power_spectrum_subbands[subband] = std::accumulate(
power_spectrum.begin() + kBandBoundaries[subband],
power_spectrum.begin() + kBandBoundaries[subband + 1], 0.f);
}
};
std::array<float, kSubbands> X2_subbands, E2_subbands, Y2_subbands;
subband_powers(X2, X2_subbands);
subband_powers(E2[ch], E2_subbands);
subband_powers(Y2[ch], Y2_subbands);
std::array<size_t, kSubbands> idx_subbands;
for (size_t subband = 0; subband < kSubbands; ++subband) {
// When aggregating the number of active sections in the filter for
// different bands we choose to take the minimum of all of them. As an
// example, if for one of the bands it is the direct path its refined
// contributor to the final echo estimate, we consider the direct path
// is as well the refined contributor for the subband that contains that
// particular band. That aggregate number of sections will be later used
// as the identifier of the erle estimator that needs to be updated.
RTC_DCHECK_LE(kBandBoundaries[subband + 1],
n_active_sections_[ch].size());
idx_subbands[subband] = *std::min_element(
n_active_sections_[ch].begin() + kBandBoundaries[subband],
n_active_sections_[ch].begin() + kBandBoundaries[subband + 1]);
}
std::array<float, kSubbands> new_erle;
std::array<bool, kSubbands> is_erle_updated;
is_erle_updated.fill(false);
new_erle.fill(0.f);
for (size_t subband = 0; subband < kSubbands; ++subband) {
if (X2_subbands[subband] > kX2BandEnergyThreshold &&
E2_subbands[subband] > 0) {
new_erle[subband] = Y2_subbands[subband] / E2_subbands[subband];
RTC_DCHECK_GT(new_erle[subband], 0);
is_erle_updated[subband] = true;
++num_updates_[ch][subband];
}
}
for (size_t subband = 0; subband < kSubbands; ++subband) {
const size_t idx = idx_subbands[subband];
RTC_DCHECK_LT(idx, erle_estimators_[ch].size());
float alpha = new_erle[subband] > erle_estimators_[ch][idx][subband]
? kSmthConstantIncreases
: kSmthConstantDecreases;
alpha = static_cast<float>(is_erle_updated[subband]) * alpha;
erle_estimators_[ch][idx][subband] +=
alpha * (new_erle[subband] - erle_estimators_[ch][idx][subband]);
erle_estimators_[ch][idx][subband] = rtc::SafeClamp(
erle_estimators_[ch][idx][subband], min_erle_, max_erle_[subband]);
}
for (size_t subband = 0; subband < kSubbands; ++subband) {
float alpha = new_erle[subband] > erle_ref_[ch][subband]
? kSmthConstantIncreases
: kSmthConstantDecreases;
alpha = static_cast<float>(is_erle_updated[subband]) * alpha;
erle_ref_[ch][subband] +=
alpha * (new_erle[subband] - erle_ref_[ch][subband]);
erle_ref_[ch][subband] = rtc::SafeClamp(erle_ref_[ch][subband],
min_erle_, max_erle_[subband]);
}
for (size_t subband = 0; subband < kSubbands; ++subband) {
constexpr int kNumUpdateThr = 50;
if (is_erle_updated[subband] &&
num_updates_[ch][subband] > kNumUpdateThr) {
const size_t idx = idx_subbands[subband];
RTC_DCHECK_GT(erle_ref_[ch][subband], 0.f);
// Computes the ratio between the erle that is updated using all the
// points and the erle that is updated only on signals that share the
// same number of active filter sections.
float new_correction_factor =
erle_estimators_[ch][idx][subband] / erle_ref_[ch][subband];
correction_factors_[ch][idx][subband] +=
0.1f *
(new_correction_factor - correction_factors_[ch][idx][subband]);
}
}
}
}
}
void SignalDependentErleEstimator::ComputeEchoEstimatePerFilterSection(
const RenderBuffer& render_buffer,
rtc::ArrayView<const std::vector<std::array<float, kFftLengthBy2Plus1>>>
filter_frequency_responses) {
const SpectrumBuffer& spectrum_render_buffer =
render_buffer.GetSpectrumBuffer();
const size_t num_render_channels = spectrum_render_buffer.buffer[0].size();
const size_t num_capture_channels = S2_section_accum_.size();
const float one_by_num_render_channels = 1.f / num_render_channels;
RTC_DCHECK_EQ(S2_section_accum_.size(), filter_frequency_responses.size());
for (size_t capture_ch = 0; capture_ch < num_capture_channels; ++capture_ch) {
RTC_DCHECK_EQ(S2_section_accum_[capture_ch].size() + 1,
section_boundaries_blocks_.size());
size_t idx_render = render_buffer.Position();
idx_render = spectrum_render_buffer.OffsetIndex(
idx_render, section_boundaries_blocks_[0]);
for (size_t section = 0; section < num_sections_; ++section) {
std::array<float, kFftLengthBy2Plus1> X2_section;
std::array<float, kFftLengthBy2Plus1> H2_section;
X2_section.fill(0.f);
H2_section.fill(0.f);
const size_t block_limit =
std::min(section_boundaries_blocks_[section + 1],
filter_frequency_responses[capture_ch].size());
for (size_t block = section_boundaries_blocks_[section];
block < block_limit; ++block) {
for (size_t render_ch = 0;
render_ch < spectrum_render_buffer.buffer[idx_render].size();
++render_ch) {
for (size_t k = 0; k < X2_section.size(); ++k) {
X2_section[k] +=
spectrum_render_buffer.buffer[idx_render][render_ch][k] *
one_by_num_render_channels;
}
}
std::transform(H2_section.begin(), H2_section.end(),
filter_frequency_responses[capture_ch][block].begin(),
H2_section.begin(), std::plus<float>());
idx_render = spectrum_render_buffer.IncIndex(idx_render);
}
std::transform(X2_section.begin(), X2_section.end(), H2_section.begin(),
S2_section_accum_[capture_ch][section].begin(),
std::multiplies<float>());
}
for (size_t section = 1; section < num_sections_; ++section) {
std::transform(S2_section_accum_[capture_ch][section - 1].begin(),
S2_section_accum_[capture_ch][section - 1].end(),
S2_section_accum_[capture_ch][section].begin(),
S2_section_accum_[capture_ch][section].begin(),
std::plus<float>());
}
}
}
void SignalDependentErleEstimator::ComputeActiveFilterSections() {
for (size_t ch = 0; ch < n_active_sections_.size(); ++ch) {
std::fill(n_active_sections_[ch].begin(), n_active_sections_[ch].end(), 0);
for (size_t k = 0; k < kFftLengthBy2Plus1; ++k) {
size_t section = num_sections_;
float target = 0.9f * S2_section_accum_[ch][num_sections_ - 1][k];
while (section > 0 && S2_section_accum_[ch][section - 1][k] >= target) {
n_active_sections_[ch][k] = --section;
}
}
}
}
} // namespace webrtc
| endlessm/chromium-browser | third_party/webrtc/modules/audio_processing/aec3/signal_dependent_erle_estimator.cc | C++ | bsd-3-clause | 17,930 |
/*
* (C) 1999-2003 Lars Knoll (knoll@kde.org)
* Copyright (C) 2004, 2006, 2007, 2012 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "core/css/CSSStyleSheet.h"
#include "bindings/core/v8/ExceptionState.h"
#include "bindings/core/v8/V8Binding.h"
#include "bindings/core/v8/V8PerIsolateData.h"
#include "core/HTMLNames.h"
#include "core/SVGNames.h"
#include "core/css/CSSImportRule.h"
#include "core/css/CSSRuleList.h"
#include "core/css/MediaList.h"
#include "core/css/StyleRule.h"
#include "core/css/StyleSheetContents.h"
#include "core/css/parser/CSSParser.h"
#include "core/dom/Document.h"
#include "core/dom/ExceptionCode.h"
#include "core/dom/Node.h"
#include "core/frame/UseCounter.h"
#include "core/html/HTMLStyleElement.h"
#include "core/inspector/InspectorInstrumentation.h"
#include "core/svg/SVGStyleElement.h"
#include "platform/weborigin/SecurityOrigin.h"
#include "wtf/text/StringBuilder.h"
namespace blink {
class StyleSheetCSSRuleList final : public CSSRuleList {
public:
static PassOwnPtrWillBeRawPtr<StyleSheetCSSRuleList> create(CSSStyleSheet* sheet)
{
return adoptPtrWillBeNoop(new StyleSheetCSSRuleList(sheet));
}
DEFINE_INLINE_VIRTUAL_TRACE()
{
visitor->trace(m_styleSheet);
CSSRuleList::trace(visitor);
}
private:
StyleSheetCSSRuleList(CSSStyleSheet* sheet) : m_styleSheet(sheet) { }
#if !ENABLE(OILPAN)
virtual void ref() override { m_styleSheet->ref(); }
virtual void deref() override { m_styleSheet->deref(); }
#endif
virtual unsigned length() const override { return m_styleSheet->length(); }
virtual CSSRule* item(unsigned index) const override { return m_styleSheet->item(index); }
virtual CSSStyleSheet* styleSheet() const override { return m_styleSheet; }
RawPtrWillBeMember<CSSStyleSheet> m_styleSheet;
};
#if ENABLE(ASSERT)
static bool isAcceptableCSSStyleSheetParent(Node* parentNode)
{
// Only these nodes can be parents of StyleSheets, and they need to call
// clearOwnerNode() when moved out of document.
// Destruction of the style sheet counts as being "moved out of the
// document", but only in the non-oilpan version of blink. I.e. don't call
// clearOwnerNode() in the owner's destructor in oilpan.
return !parentNode
|| parentNode->isDocumentNode()
|| isHTMLLinkElement(*parentNode)
|| isHTMLStyleElement(*parentNode)
|| isSVGStyleElement(*parentNode)
|| parentNode->nodeType() == Node::PROCESSING_INSTRUCTION_NODE;
}
#endif
PassRefPtrWillBeRawPtr<CSSStyleSheet> CSSStyleSheet::create(PassRefPtrWillBeRawPtr<StyleSheetContents> sheet, CSSImportRule* ownerRule)
{
return adoptRefWillBeNoop(new CSSStyleSheet(sheet, ownerRule));
}
PassRefPtrWillBeRawPtr<CSSStyleSheet> CSSStyleSheet::create(PassRefPtrWillBeRawPtr<StyleSheetContents> sheet, Node* ownerNode)
{
return adoptRefWillBeNoop(new CSSStyleSheet(sheet, ownerNode, false, TextPosition::minimumPosition()));
}
PassRefPtrWillBeRawPtr<CSSStyleSheet> CSSStyleSheet::createInline(PassRefPtrWillBeRawPtr<StyleSheetContents> sheet, Node* ownerNode, const TextPosition& startPosition)
{
ASSERT(sheet);
return adoptRefWillBeNoop(new CSSStyleSheet(sheet, ownerNode, true, startPosition));
}
PassRefPtrWillBeRawPtr<CSSStyleSheet> CSSStyleSheet::createInline(Node* ownerNode, const KURL& baseURL, const TextPosition& startPosition, const String& encoding)
{
CSSParserContext parserContext(ownerNode->document(), 0, baseURL, encoding);
RefPtrWillBeRawPtr<StyleSheetContents> sheet = StyleSheetContents::create(baseURL.string(), parserContext);
return adoptRefWillBeNoop(new CSSStyleSheet(sheet.release(), ownerNode, true, startPosition));
}
CSSStyleSheet::CSSStyleSheet(PassRefPtrWillBeRawPtr<StyleSheetContents> contents, CSSImportRule* ownerRule)
: m_contents(contents)
, m_isInlineStylesheet(false)
, m_isDisabled(false)
, m_ownerNode(nullptr)
, m_ownerRule(ownerRule)
, m_startPosition(TextPosition::minimumPosition())
, m_loadCompleted(false)
{
m_contents->registerClient(this);
}
CSSStyleSheet::CSSStyleSheet(PassRefPtrWillBeRawPtr<StyleSheetContents> contents, Node* ownerNode, bool isInlineStylesheet, const TextPosition& startPosition)
: m_contents(contents)
, m_isInlineStylesheet(isInlineStylesheet)
, m_isDisabled(false)
, m_ownerNode(ownerNode)
, m_ownerRule(nullptr)
, m_startPosition(startPosition)
, m_loadCompleted(false)
{
ASSERT(isAcceptableCSSStyleSheetParent(ownerNode));
m_contents->registerClient(this);
}
CSSStyleSheet::~CSSStyleSheet()
{
// With oilpan the parent style sheet pointer is strong and the sheet and
// its RuleCSSOMWrappers die together and we don't need to clear them here.
// Also with oilpan the StyleSheetContents client pointers are weak and
// therefore do not need to be cleared here.
#if !ENABLE(OILPAN)
// For style rules outside the document, .parentStyleSheet can become null even if the style rule
// is still observable from JavaScript. This matches the behavior of .parentNode for nodes, but
// it's not ideal because it makes the CSSOM's behavior depend on the timing of garbage collection.
for (unsigned i = 0; i < m_childRuleCSSOMWrappers.size(); ++i) {
if (m_childRuleCSSOMWrappers[i])
m_childRuleCSSOMWrappers[i]->setParentStyleSheet(0);
}
if (m_mediaCSSOMWrapper)
m_mediaCSSOMWrapper->clearParentStyleSheet();
m_contents->unregisterClient(this);
#endif
}
void CSSStyleSheet::willMutateRules()
{
InspectorInstrumentation::willMutateRules(this);
// If we are the only client it is safe to mutate.
if (m_contents->clientSize() <= 1 && !m_contents->isInMemoryCache()) {
m_contents->clearRuleSet();
if (Document* document = ownerDocument())
m_contents->removeSheetFromCache(document);
m_contents->setMutable();
return;
}
// Only cacheable stylesheets should have multiple clients.
ASSERT(m_contents->isCacheable());
// Copy-on-write.
m_contents->unregisterClient(this);
m_contents = m_contents->copy();
m_contents->registerClient(this);
m_contents->setMutable();
// Any existing CSSOM wrappers need to be connected to the copied child rules.
reattachChildRuleCSSOMWrappers();
}
void CSSStyleSheet::didMutateRules()
{
ASSERT(m_contents->isMutable());
ASSERT(m_contents->clientSize() <= 1);
InspectorInstrumentation::didMutateRules(this);
didMutate(PartialRuleUpdate);
}
void CSSStyleSheet::didMutate(StyleSheetUpdateType updateType)
{
Document* owner = ownerDocument();
if (!owner)
return;
// Need FullStyleUpdate when insertRule or deleteRule,
// because StyleSheetCollection::analyzeStyleSheetChange cannot detect partial rule update.
StyleResolverUpdateMode updateMode = updateType != PartialRuleUpdate ? AnalyzedStyleUpdate : FullStyleUpdate;
owner->modifiedStyleSheet(this, updateMode);
}
void CSSStyleSheet::reattachChildRuleCSSOMWrappers()
{
for (unsigned i = 0; i < m_childRuleCSSOMWrappers.size(); ++i) {
if (!m_childRuleCSSOMWrappers[i])
continue;
m_childRuleCSSOMWrappers[i]->reattach(m_contents->ruleAt(i));
}
}
void CSSStyleSheet::setDisabled(bool disabled)
{
if (disabled == m_isDisabled)
return;
m_isDisabled = disabled;
didMutate();
}
void CSSStyleSheet::setMediaQueries(PassRefPtrWillBeRawPtr<MediaQuerySet> mediaQueries)
{
m_mediaQueries = mediaQueries;
if (m_mediaCSSOMWrapper && m_mediaQueries)
m_mediaCSSOMWrapper->reattach(m_mediaQueries.get());
}
unsigned CSSStyleSheet::length() const
{
return m_contents->ruleCount();
}
CSSRule* CSSStyleSheet::item(unsigned index)
{
unsigned ruleCount = length();
if (index >= ruleCount)
return 0;
if (m_childRuleCSSOMWrappers.isEmpty())
m_childRuleCSSOMWrappers.grow(ruleCount);
ASSERT(m_childRuleCSSOMWrappers.size() == ruleCount);
RefPtrWillBeMember<CSSRule>& cssRule = m_childRuleCSSOMWrappers[index];
if (!cssRule)
cssRule = m_contents->ruleAt(index)->createCSSOMWrapper(this);
return cssRule.get();
}
void CSSStyleSheet::clearOwnerNode()
{
didMutate(EntireStyleSheetUpdate);
if (m_ownerNode)
m_contents->unregisterClient(this);
m_ownerNode = nullptr;
}
bool CSSStyleSheet::canAccessRules() const
{
if (m_isInlineStylesheet)
return true;
KURL baseURL = m_contents->baseURL();
if (baseURL.isEmpty())
return true;
Document* document = ownerDocument();
if (!document)
return true;
if (document->securityOrigin()->canRequest(baseURL))
return true;
if (m_allowRuleAccessFromOrigin && document->securityOrigin()->canAccess(m_allowRuleAccessFromOrigin.get()))
return true;
return false;
}
PassRefPtrWillBeRawPtr<CSSRuleList> CSSStyleSheet::rules()
{
return cssRules();
}
unsigned CSSStyleSheet::insertRule(const String& ruleString, unsigned index, ExceptionState& exceptionState)
{
ASSERT(m_childRuleCSSOMWrappers.isEmpty() || m_childRuleCSSOMWrappers.size() == m_contents->ruleCount());
if (index > length()) {
exceptionState.throwDOMException(IndexSizeError, "The index provided (" + String::number(index) + ") is larger than the maximum index (" + String::number(length()) + ").");
return 0;
}
CSSParserContext context(m_contents->parserContext(), UseCounter::getFrom(this));
RefPtrWillBeRawPtr<StyleRuleBase> rule = CSSParser::parseRule(context, m_contents.get(), ruleString);
// FIXME: @namespace rules have special handling in the CSSOM spec, but it
// mostly doesn't make sense since we don't support CSSNamespaceRule
if (!rule || rule->isNamespaceRule()) {
exceptionState.throwDOMException(SyntaxError, "Failed to parse the rule '" + ruleString + "'.");
return 0;
}
RuleMutationScope mutationScope(this);
bool success = m_contents->wrapperInsertRule(rule, index);
if (!success) {
exceptionState.throwDOMException(HierarchyRequestError, "Failed to insert the rule.");
return 0;
}
if (!m_childRuleCSSOMWrappers.isEmpty())
m_childRuleCSSOMWrappers.insert(index, RefPtrWillBeMember<CSSRule>(nullptr));
return index;
}
unsigned CSSStyleSheet::insertRule(const String& rule, ExceptionState& exceptionState)
{
UseCounter::countDeprecation(callingExecutionContext(V8PerIsolateData::mainThreadIsolate()), UseCounter::CSSStyleSheetInsertRuleOptionalArg);
return insertRule(rule, 0, exceptionState);
}
void CSSStyleSheet::deleteRule(unsigned index, ExceptionState& exceptionState)
{
ASSERT(m_childRuleCSSOMWrappers.isEmpty() || m_childRuleCSSOMWrappers.size() == m_contents->ruleCount());
if (index >= length()) {
exceptionState.throwDOMException(IndexSizeError, "The index provided (" + String::number(index) + ") is larger than the maximum index (" + String::number(length() - 1) + ").");
return;
}
RuleMutationScope mutationScope(this);
m_contents->wrapperDeleteRule(index);
if (!m_childRuleCSSOMWrappers.isEmpty()) {
if (m_childRuleCSSOMWrappers[index])
m_childRuleCSSOMWrappers[index]->setParentStyleSheet(0);
m_childRuleCSSOMWrappers.remove(index);
}
}
int CSSStyleSheet::addRule(const String& selector, const String& style, int index, ExceptionState& exceptionState)
{
StringBuilder text;
text.append(selector);
text.appendLiteral(" { ");
text.append(style);
if (!style.isEmpty())
text.append(' ');
text.append('}');
insertRule(text.toString(), index, exceptionState);
// As per Microsoft documentation, always return -1.
return -1;
}
int CSSStyleSheet::addRule(const String& selector, const String& style, ExceptionState& exceptionState)
{
return addRule(selector, style, length(), exceptionState);
}
PassRefPtrWillBeRawPtr<CSSRuleList> CSSStyleSheet::cssRules()
{
if (!canAccessRules())
return nullptr;
if (!m_ruleListCSSOMWrapper)
m_ruleListCSSOMWrapper = StyleSheetCSSRuleList::create(this);
return m_ruleListCSSOMWrapper.get();
}
String CSSStyleSheet::href() const
{
return m_contents->originalURL();
}
KURL CSSStyleSheet::baseURL() const
{
return m_contents->baseURL();
}
bool CSSStyleSheet::isLoading() const
{
return m_contents->isLoading();
}
MediaList* CSSStyleSheet::media() const
{
if (!m_mediaQueries)
return 0;
if (!m_mediaCSSOMWrapper)
m_mediaCSSOMWrapper = MediaList::create(m_mediaQueries.get(), const_cast<CSSStyleSheet*>(this));
return m_mediaCSSOMWrapper.get();
}
CSSStyleSheet* CSSStyleSheet::parentStyleSheet() const
{
return m_ownerRule ? m_ownerRule->parentStyleSheet() : 0;
}
Document* CSSStyleSheet::ownerDocument() const
{
const CSSStyleSheet* root = this;
while (root->parentStyleSheet())
root = root->parentStyleSheet();
return root->ownerNode() ? &root->ownerNode()->document() : 0;
}
void CSSStyleSheet::setAllowRuleAccessFromOrigin(PassRefPtr<SecurityOrigin> allowedOrigin)
{
m_allowRuleAccessFromOrigin = allowedOrigin;
}
void CSSStyleSheet::clearChildRuleCSSOMWrappers()
{
m_childRuleCSSOMWrappers.clear();
}
bool CSSStyleSheet::sheetLoaded()
{
ASSERT(m_ownerNode);
setLoadCompleted(m_ownerNode->sheetLoaded());
return m_loadCompleted;
}
void CSSStyleSheet::startLoadingDynamicSheet()
{
setLoadCompleted(false);
m_ownerNode->startLoadingDynamicSheet();
}
void CSSStyleSheet::setLoadCompleted(bool completed)
{
if (completed == m_loadCompleted)
return;
m_loadCompleted = completed;
if (completed)
m_contents->clientLoadCompleted(this);
else
m_contents->clientLoadStarted(this);
}
DEFINE_TRACE(CSSStyleSheet)
{
visitor->trace(m_contents);
visitor->trace(m_mediaQueries);
visitor->trace(m_ownerNode);
visitor->trace(m_ownerRule);
visitor->trace(m_mediaCSSOMWrapper);
visitor->trace(m_childRuleCSSOMWrappers);
visitor->trace(m_ruleListCSSOMWrapper);
StyleSheet::trace(visitor);
}
} // namespace blink
| guorendong/iridium-browser-ubuntu | third_party/WebKit/Source/core/css/CSSStyleSheet.cpp | C++ | bsd-3-clause | 15,007 |
/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @emails react-core
*/
'use strict';
var CSSCore = require('CSSCore');
var React;
var ReactDOM;
var ReactCSSTransitionGroup;
// Most of the real functionality is covered in other unit tests, this just
// makes sure we're wired up correctly.
describe('ReactCSSTransitionGroup', function() {
var container;
beforeEach(function() {
require('mock-modules').dumpCache();
React = require('React');
ReactDOM = require('ReactDOM');
ReactCSSTransitionGroup = require('ReactCSSTransitionGroup');
container = document.createElement('div');
spyOn(console, 'error');
});
it('should warn if timeouts aren\'t specified', function() {
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeave={true}
>
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
// Warning about the missing transitionLeaveTimeout prop
expect(console.error.argsForCall.length).toBe(1);
});
it('should not warn if timeouts is zero', function() {
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeave={true}
transitionLeaveTimeout={0}
>
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(console.error.argsForCall.length).toBe(0);
});
it('should clean-up silently after the timeout elapses', function() {
var a = ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeaveTimeout={200}
>
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
setTimeout.mock.calls.length = 0;
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeaveTimeout={200}
>
<span key="two" id="two" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(2);
expect(ReactDOM.findDOMNode(a).childNodes[0].id).toBe('two');
expect(ReactDOM.findDOMNode(a).childNodes[1].id).toBe('one');
// For some reason jst is adding extra setTimeout()s and grunt test isn't,
// so we need to do this disgusting hack.
for (var i = 0; i < setTimeout.mock.calls.length; i++) {
if (setTimeout.mock.calls[i][1] === 200) {
setTimeout.mock.calls[i][0]();
break;
}
}
// No warnings
expect(console.error.argsForCall.length).toBe(0);
// The leaving child has been removed
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
expect(ReactDOM.findDOMNode(a).childNodes[0].id).toBe('two');
});
it('should keep both sets of DOM nodes around', function() {
var a = ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
<span key="two" id="two" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(2);
expect(ReactDOM.findDOMNode(a).childNodes[0].id).toBe('two');
expect(ReactDOM.findDOMNode(a).childNodes[1].id).toBe('one');
});
it('should switch transitionLeave from false to true', function() {
var a = ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeave={false}>
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeave={false}>
<span key="two" id="two" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName="yolo"
transitionEnter={false}
transitionLeave={true}>
<span key="three" id="three" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(2);
expect(ReactDOM.findDOMNode(a).childNodes[0].id).toBe('three');
expect(ReactDOM.findDOMNode(a).childNodes[1].id).toBe('two');
});
it('should work with no children', function() {
ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo" />,
container
);
});
it('should work with a null child', function() {
ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
{[null]}
</ReactCSSTransitionGroup>,
container
);
});
it('should transition from one to null', function() {
var a = ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
{null}
</ReactCSSTransitionGroup>,
container
);
// (Here, we expect the original child to stick around but test that no
// exception is thrown)
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
expect(ReactDOM.findDOMNode(a).childNodes[0].id).toBe('one');
});
it('should transition from false to one', function() {
var a = ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
{false}
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(0);
ReactDOM.render(
<ReactCSSTransitionGroup transitionName="yolo">
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
expect(ReactDOM.findDOMNode(a).childNodes[0].id).toBe('one');
});
it('should use transition-type specific names when they\'re provided', function() {
var customTransitionNames = {
enter: 'custom-entering',
leave: 'custom-leaving',
};
var a = ReactDOM.render(
<ReactCSSTransitionGroup
transitionName={customTransitionNames}
transitionEnterTimeout={1}
transitionLeaveTimeout={1}
>
<span key="one" id="one" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(1);
// Add an element
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName={customTransitionNames}
transitionEnterTimeout={1}
transitionLeaveTimeout={1}
>
<span key="one" id="one" />
<span key="two" id="two" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(2);
var enteringNode = ReactDOM.findDOMNode(a).childNodes[1];
expect(CSSCore.hasClass(enteringNode, 'custom-entering')).toBe(true);
// Remove an element
ReactDOM.render(
<ReactCSSTransitionGroup
transitionName={customTransitionNames}
transitionEnterTimeout={1}
transitionLeaveTimeout={1}
>
<span key="two" id="two" />
</ReactCSSTransitionGroup>,
container
);
expect(ReactDOM.findDOMNode(a).childNodes.length).toBe(2);
var leavingNode = ReactDOM.findDOMNode(a).childNodes[0];
expect(CSSCore.hasClass(leavingNode, 'custom-leaving')).toBe(true);
});
});
| camsong/react | src/addons/transitions/__tests__/ReactCSSTransitionGroup-test.js | JavaScript | bsd-3-clause | 8,182 |
import sys
import re
from functools import wraps
from unittest import TestCase
from scrapy.http import Request
from scrapy.utils.spider import iterate_spider_output
from scrapy.utils.python import get_spec
class ContractsManager(object):
contracts = {}
def __init__(self, contracts):
for contract in contracts:
self.contracts[contract.name] = contract
def extract_contracts(self, method):
contracts = []
for line in method.__doc__.split('\n'):
line = line.strip()
if line.startswith('@'):
name, args = re.match(r'@(\w+)\s*(.*)', line).groups()
args = re.split(r'\s+', args)
contracts.append(self.contracts[name](method, *args))
return contracts
def from_method(self, method, results):
contracts = self.extract_contracts(method)
if contracts:
# calculate request args
args, kwargs = get_spec(Request.__init__)
kwargs['callback'] = method
for contract in contracts:
kwargs = contract.adjust_request_args(kwargs)
# create and prepare request
args.remove('self')
if set(args).issubset(set(kwargs)):
request = Request(**kwargs)
# execute pre and post hooks in order
for contract in reversed(contracts):
request = contract.add_pre_hook(request, results)
for contract in contracts:
request = contract.add_post_hook(request, results)
return request
class Contract(object):
""" Abstract class for contracts """
def __init__(self, method, *args):
self.testcase_pre = self.create_testcase(method, 'pre-hook')
self.testcase_post = self.create_testcase(method, 'post-hook')
self.args = args
def create_testcase(self, method, hook):
spider = method.__self__.name
class ContractTestCase(TestCase):
def __str__(_self):
return "[%s] %s (@%s %s)" % (spider, method.__name__, self.name, hook)
name = '%s_%s' % (spider, method.__name__)
setattr(ContractTestCase, name, lambda x: x)
return ContractTestCase(name)
def add_pre_hook(self, request, results):
if hasattr(self, 'pre_process'):
cb = request.callback
@wraps(cb)
def wrapper(response):
try:
results.startTest(self.testcase_pre)
self.pre_process(response)
results.stopTest(self.testcase_pre)
except AssertionError:
results.addFailure(self.testcase_pre, sys.exc_info())
except Exception:
results.addError(self.testcase_pre, sys.exc_info())
else:
results.addSuccess(self.testcase_pre)
finally:
return list(iterate_spider_output(cb(response)))
request.callback = wrapper
return request
def add_post_hook(self, request, results):
if hasattr(self, 'post_process'):
cb = request.callback
@wraps(cb)
def wrapper(response):
try:
output = list(iterate_spider_output(cb(response)))
results.startTest(self.testcase_post)
self.post_process(output)
results.stopTest(self.testcase_post)
except AssertionError:
results.addFailure(self.testcase_post, sys.exc_info())
except Exception:
results.addError(self.testcase_post, sys.exc_info())
else:
results.addSuccess(self.testcase_post)
finally:
return output
request.callback = wrapper
return request
def adjust_request_args(self, args):
return args
| 1900/scrapy | scrapy/contracts/__init__.py | Python | bsd-3-clause | 4,024 |
/**
* Copyright 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @providesModule SyntheticWheelEvent
*/
'use strict';
var SyntheticMouseEvent = require('SyntheticMouseEvent');
/**
* @interface WheelEvent
* @see http://www.w3.org/TR/DOM-Level-3-Events/
*/
var WheelEventInterface = {
deltaX: function(event) {
return (
'deltaX' in event ? event.deltaX :
// Fallback to `wheelDeltaX` for Webkit and normalize (right is positive).
'wheelDeltaX' in event ? -event.wheelDeltaX : 0
);
},
deltaY: function(event) {
return (
'deltaY' in event ? event.deltaY :
// Fallback to `wheelDeltaY` for Webkit and normalize (down is positive).
'wheelDeltaY' in event ? -event.wheelDeltaY :
// Fallback to `wheelDelta` for IE<9 and normalize (down is positive).
'wheelDelta' in event ? -event.wheelDelta : 0
);
},
deltaZ: null,
// Browsers without "deltaMode" is reporting in raw wheel delta where one
// notch on the scroll is always +/- 120, roughly equivalent to pixels.
// A good approximation of DOM_DELTA_LINE (1) is 5% of viewport size or
// ~40 pixels, for DOM_DELTA_SCREEN (2) it is 87.5% of viewport size.
deltaMode: null,
};
/**
* @param {object} dispatchConfig Configuration used to dispatch this event.
* @param {string} dispatchMarker Marker identifying the event target.
* @param {object} nativeEvent Native browser event.
* @extends {SyntheticMouseEvent}
*/
function SyntheticWheelEvent(dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget) {
SyntheticMouseEvent.call(this, dispatchConfig, dispatchMarker, nativeEvent, nativeEventTarget);
}
SyntheticMouseEvent.augmentClass(SyntheticWheelEvent, WheelEventInterface);
module.exports = SyntheticWheelEvent;
| zeke/react | src/renderers/dom/client/syntheticEvents/SyntheticWheelEvent.js | JavaScript | bsd-3-clause | 2,006 |
package org.hisp.dhis.reporting.dataset.action;
/*
* Copyright (c) 2004-2017, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import static org.hisp.dhis.period.PeriodType.getAvailablePeriodTypes;
import static org.hisp.dhis.period.PeriodType.getPeriodFromIsoString;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.hisp.dhis.dataelement.CategoryOptionGroupSet;
import org.hisp.dhis.dataelement.DataElementCategoryCombo;
import org.hisp.dhis.dataelement.DataElementCategoryService;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.dataset.DataSetService;
import org.hisp.dhis.organisationunit.OrganisationUnitGroupService;
import org.hisp.dhis.organisationunit.OrganisationUnitGroupSet;
import org.hisp.dhis.organisationunit.OrganisationUnitService;
import org.hisp.dhis.oust.manager.SelectionTreeManager;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.joda.time.DateTime;
import com.opensymphony.xwork2.Action;
/**
* @author Lars Helge Overland
*/
public class GetDataSetReportOptionsAction
implements Action
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private DataSetService dataSetService;
public void setDataSetService( DataSetService dataSetService )
{
this.dataSetService = dataSetService;
}
private OrganisationUnitService organisationUnitService;
public void setOrganisationUnitService( OrganisationUnitService organisationUnitService )
{
this.organisationUnitService = organisationUnitService;
}
private OrganisationUnitGroupService organisationUnitGroupService;
public void setOrganisationUnitGroupService( OrganisationUnitGroupService organisationUnitGroupService )
{
this.organisationUnitGroupService = organisationUnitGroupService;
}
private DataElementCategoryService categoryService;
public void setCategoryService( DataElementCategoryService categoryService )
{
this.categoryService = categoryService;
}
private SelectionTreeManager selectionTreeManager;
public void setSelectionTreeManager( SelectionTreeManager selectionTreeManager )
{
this.selectionTreeManager = selectionTreeManager;
}
// -------------------------------------------------------------------------
// Input
// -------------------------------------------------------------------------
private String ds;
public String getDs()
{
return ds;
}
public void setDs( String ds )
{
this.ds = ds;
}
private String pe;
public String getPe()
{
return pe;
}
public void setPe( String pe )
{
this.pe = pe;
}
private String ou;
public void setOu( String ou )
{
this.ou = ou;
}
// -------------------------------------------------------------------------
// Output
// -------------------------------------------------------------------------
private List<DataSet> dataSets;
public List<DataSet> getDataSets()
{
return dataSets;
}
private List<PeriodType> periodTypes;
public List<PeriodType> getPeriodTypes()
{
return periodTypes;
}
private boolean render;
public boolean isRender()
{
return render;
}
private int offset;
public int getOffset()
{
return offset;
}
private PeriodType periodType;
public PeriodType getPeriodType()
{
return periodType;
}
private DataElementCategoryCombo defaultCategoryCombo;
public DataElementCategoryCombo getDefaultCategoryCombo()
{
return defaultCategoryCombo;
}
private List<DataElementCategoryCombo> categoryCombos;
public List<DataElementCategoryCombo> getCategoryCombos()
{
return categoryCombos;
}
private List<CategoryOptionGroupSet> categoryOptionGroupSets;
public List<CategoryOptionGroupSet> getCategoryOptionGroupSets()
{
return categoryOptionGroupSets;
}
private List<OrganisationUnitGroupSet> organisationUnitGroupSets;
public List<OrganisationUnitGroupSet> getOrganisationUnitGroupSets()
{
return organisationUnitGroupSets;
}
// -------------------------------------------------------------------------
// Action implementation
// -------------------------------------------------------------------------
@Override
public String execute()
{
periodTypes = getAvailablePeriodTypes();
render = ( ds != null && pe != null && ou != null );
if ( pe != null && getPeriodFromIsoString( pe ) != null )
{
Period period = getPeriodFromIsoString( pe );
offset = new DateTime( period.getStartDate() ).getYear() - new DateTime().getYear();
periodType = period.getPeriodType();
selectionTreeManager.setSelectedOrganisationUnit( organisationUnitService.getOrganisationUnit( ou ) ); //TODO set unit state in client instead
}
defaultCategoryCombo = categoryService.getDefaultDataElementCategoryCombo();
dataSets = new ArrayList<>( dataSetService.getAllDataSets() );
categoryCombos = new ArrayList<>( categoryService.getAttributeCategoryCombos() );
categoryOptionGroupSets = new ArrayList<>( categoryService.getAllCategoryOptionGroupSets() );
organisationUnitGroupSets = new ArrayList<>( organisationUnitGroupService.getAllOrganisationUnitGroupSets() );
Collections.sort( dataSets );
Collections.sort( categoryCombos );
Collections.sort( categoryOptionGroupSets );
Collections.sort( organisationUnitGroupSets );
return SUCCESS;
}
}
| troyel/dhis2-core | dhis-2/dhis-web/dhis-web-reporting/src/main/java/org/hisp/dhis/reporting/dataset/action/GetDataSetReportOptionsAction.java | Java | bsd-3-clause | 7,482 |
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"bufio"
"fmt"
"os"
"strconv"
"strings"
)
// Generic expression parser/evaluator
type Value interface {
String() string
BinaryOp(op string, y Value) Value
}
type Parser struct {
precTab map[string]int
newVal func(string) Value
src string
pos int
tok string
}
const alphanum = "_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
func (p *Parser) stop(c uint8) bool {
switch {
case p.pos >= len(p.src):
return true
case c == '"':
if p.src[p.pos] == '"' {
p.pos++
return true
}
return false
case strings.IndexRune(alphanum, int(c)) >= 0:
return strings.IndexRune(alphanum, int(p.src[p.pos])) < 0
}
return true
}
func (p *Parser) next() {
// skip blanks
for ; p.pos < len(p.src) && p.src[p.pos] <= ' '; p.pos++ {
}
if p.pos >= len(p.src) {
p.tok = ""
return
}
start := p.pos
c := p.src[p.pos]
for p.pos < len(p.src) {
p.pos++
if p.stop(c) {
break
}
}
p.tok = p.src[start:p.pos]
}
func (p *Parser) binaryExpr(prec1 int) Value {
x := p.newVal(p.tok)
p.next()
for prec := p.precTab[p.tok]; prec >= prec1; prec-- {
for p.precTab[p.tok] == prec {
op := p.tok
p.next()
y := p.binaryExpr(prec + 1)
x = x.BinaryOp(op, y)
}
}
return x
}
func Eval(precTab map[string]int, newVal func(string) Value, src string) Value {
var p Parser
p.precTab = precTab
p.newVal = newVal
p.src = src
p.next()
return p.binaryExpr(1)
}
// Command-line expression evaluator
func main() {
r := bufio.NewReader(os.Stdin)
for {
fmt.Printf("> ")
line, err := r.ReadString('\n')
if err != nil {
break
}
fmt.Printf("%s\n", Eval(precTab, trace(newVal), line))
}
}
// Custom grammar and values
var precTab = map[string]int{
"&&": 1,
"||": 2,
"==": 3,
"!=": 3,
"<": 3,
"<=": 3,
">": 3,
">=": 3,
"+": 4,
"-": 4,
"*": 5,
"/": 5,
"%": 5,
}
func newVal(lit string) Value {
x, err := strconv.Atoi(lit)
if err == nil {
return Int(x)
}
b, err := strconv.ParseBool(lit)
if err == nil {
return Bool(b)
}
s, err := strconv.Unquote(lit)
if err == nil {
return String(s)
}
return Error(fmt.Sprintf("illegal literal '%s'", lit))
}
type Error string
func (e Error) String() string { return string(e) }
func (e Error) BinaryOp(op string, y Value) Value { return e }
type Int int
func (x Int) String() string { return strconv.Itoa(int(x)) }
func (x Int) BinaryOp(op string, y Value) Value {
switch y := y.(type) {
case Error:
return y
case String:
switch op {
case "*":
return String(strings.Repeat(string(y), int(x)))
}
case Int:
switch op {
case "+":
return x + y
case "-":
return x - y
case "*":
return x * y
case "/":
return x / y
case "%":
return x % y
case "==":
return Bool(x == y)
case "!=":
return Bool(x != y)
case "<":
return Bool(x < y)
case "<=":
return Bool(x <= y)
case ">":
return Bool(x > y)
case ">=":
return Bool(x >= y)
}
}
return Error(fmt.Sprintf("illegal operation: '%v %s %v'", x, op, y))
}
type Bool bool
func (x Bool) String() string { return strconv.FormatBool(bool(x)) }
func (x Bool) BinaryOp(op string, y Value) Value {
switch y := y.(type) {
case Error:
return y
case Bool:
switch op {
case "&&":
return Bool(x && y)
case "||":
return Bool(x || y)
case "==":
return Bool(x == y)
case "!=":
return Bool(x != y)
}
}
return Error(fmt.Sprintf("illegal operation: '%v %s %v'", x, op, y))
}
type String string
func (x String) String() string { return strconv.Quote(string(x)) }
func (x String) BinaryOp(op string, y Value) Value {
switch y := y.(type) {
case Error:
return y
case Int:
switch op {
case "*":
return String(strings.Repeat(string(x), int(y)))
}
case String:
switch op {
case "+":
return x + y
case "<":
return Bool(x < y)
}
}
return Error(fmt.Sprintf("illegal operation: '%v %s %v'", x, op, y))
}
func trace(newVal func(string) Value) func(string) Value {
return func(s string) Value {
v := newVal(s)
fmt.Printf("\tnewVal(%q) = %s\n", s, fmtv(v))
return &traceValue{v}
}
}
type traceValue struct {
Value
}
func (x *traceValue) BinaryOp(op string, y Value) Value {
z := x.Value.BinaryOp(op, y.(*traceValue).Value)
fmt.Printf("\t%s.BinaryOp(%q, %s) = %s\n", fmtv(x.Value), op, fmtv(y.(*traceValue).Value), fmtv(z))
return &traceValue{z}
}
func (x *traceValue) String() string {
s := x.Value.String()
fmt.Printf("\t%s.String() = %#v\n", fmtv(x.Value), s)
return s
}
func fmtv(v Value) string {
t := fmt.Sprintf("%T", v)
if i := strings.LastIndex(t, "."); i >= 0 { // strip package
t = t[i+1:]
}
return fmt.Sprintf("%s(%#v)", t, v)
}
| Triskite/willstone-goclone | doc/talks/io2010/eval2.go | GO | bsd-3-clause | 4,858 |
#region License
//
// Copyright (c) 2013, Kooboo team
//
// Licensed under the BSD License
// See the file LICENSE.txt for details.
//
#endregion
using Kooboo.CMS.Common.Persistence.Non_Relational;
using Kooboo.CMS.Sites.Models;
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
namespace Kooboo.CMS.Sites.Persistence.EntityFramework.LabelProvider
{
[Kooboo.CMS.Common.Runtime.Dependency.Dependency(typeof(ILabelProvider), Order = 100)]
[Kooboo.CMS.Common.Runtime.Dependency.Dependency(typeof(IProvider<Label>), Order = 100)]
[Kooboo.CMS.Common.Runtime.Dependency.Dependency(typeof(ISiteExportableProvider), Order = 100, Key = "LabelProvider")]
public class LabelProvider : ILabelProvider
{
#region _dbContext
private SiteDBContext _dbContext;
public LabelProvider(SiteDBContext dbContext)
{
this._dbContext = dbContext;
}
#endregion
#region Label
public IQueryable<Label> GetLabels(Site site, string category)
{
return _dbContext.Labels
.Where(it => it.SiteName == site.FullName)
.Where(it => it.Category == (category ?? ""))
.ToArray()
.Select(it => it.ToLabel(new Label() { Site = site })).AsQueryable();
}
public IEnumerable<Label> All(Site site)
{
return _dbContext.Labels
.Where(it => it.SiteName == site.FullName)
.ToArray()
.Select(it => it.ToLabel(new Label() { Site = site })).AsQueryable();
}
public IEnumerable<Label> All()
{
throw new NotImplementedException();
}
private void InsertOrUpdateLabel(Label @new, Label old)
{
var entity = _dbContext.Labels
.Where(it => it.SiteName == @new.Site.FullName && it.Name == @new.Name && it.Category == (@new.Category ?? ""))
.FirstOrDefault();
if (entity != null)
{
entity.Value = @new.Value;
}
else
{
if (!string.IsNullOrEmpty(@new.Category))
{
AddCategory(@new.Site, @new.Category);
}
entity = new LabelEntity(@new.Site.FullName, @new.Name, @new.Value, @new.Category);
entity.UUID = @new.UUID;
entity.UtcCreationDate = @new.UtcCreationDate;
_dbContext.Labels.Add(entity);
}
entity.LastestEditor = @new.LastestEditor;
entity.UtcLastestModificationDate = @new.UtcLastestModificationDate;
_dbContext.SaveChanges();
}
public Label Get(Label dummy)
{
var labelEntity = GetLabelEntity(_dbContext.Labels, dummy)
.FirstOrDefault();
if (labelEntity != null)
{
return labelEntity.ToLabel(dummy);
}
return null;
}
public void Add(Label item)
{
InsertOrUpdateLabel(item, item);
}
public void Update(Label @new, Label old)
{
InsertOrUpdateLabel(@new, old);
}
private IQueryable<LabelEntity> GetLabelEntity(DbSet<LabelEntity> labels, Label label)
{
if (!string.IsNullOrEmpty(label.Name))
{
return labels.Where(it => it.SiteName == label.Site.FullName && it.Name == label.Name && it.Category == (label.Category ?? ""));
}
else
{
return labels.Where(it => it.SiteName == label.Site.FullName && it.UUID == label.UUID);
}
}
public void Remove(Label item)
{
var entity = GetLabelEntity(_dbContext.Labels, item).FirstOrDefault();
if (entity != null)
{
_dbContext.Labels.Remove(entity);
_dbContext.SaveChanges();
}
}
#endregion
#region Category
public IEnumerable<string> GetCategories(Site site)
{
return _dbContext.LabelCategories
.Where(it => it.SiteName == site.FullName)
.Select(it => it.CategoryName).AsQueryable();
}
public void AddCategory(Site site, string category)
{
var entity = _dbContext.LabelCategories
.Where(it => it.SiteName == site.FullName && it.CategoryName == category)
.FirstOrDefault();
if (entity != null)
{
}
else
{
entity = new CategoryEntity(site.FullName, category);
_dbContext.LabelCategories.Add(entity);
_dbContext.SaveChanges();
}
}
public void RemoveCategory(Site site, string category)
{
var entity = _dbContext.LabelCategories
.Where(it => it.SiteName == site.FullName && it.CategoryName == category)
.FirstOrDefault();
if (entity != null)
{
_dbContext.LabelCategories.Remove(entity);
var labels = _dbContext.Labels
.Where(it => it.SiteName == site.FullName && it.Category == category);
foreach (var item in labels)
{
_dbContext.Labels.Remove(item);
}
_dbContext.SaveChanges();
}
}
#endregion
#region Export/Import
public void Export(Site site, IEnumerable<Label> labels, IEnumerable<string> categories, System.IO.Stream outputStream)
{
new Kooboo.CMS.Sites.Persistence.FileSystem.LabelImportExportHelper(this).Export(site, labels, categories, outputStream);
}
public void Import(Site site, System.IO.Stream zipStream, bool @override)
{
new Kooboo.CMS.Sites.Persistence.FileSystem.LabelImportExportHelper(this).Import(site, zipStream, @override);
}
public void InitializeToDB(Site site)
{
new Kooboo.CMS.Sites.Persistence.FileSystem.LabelImportExportHelper(this).InitializeLabels(site);
}
public void ExportToDisk(Site site)
{
new Kooboo.CMS.Sites.Persistence.FileSystem.LabelImportExportHelper(this).ExportLabelsToDisk(site);
}
#endregion
public void Flush(Site site)
{
var entities = _dbContext.Labels
.Where(it => it.SiteName == site.FullName);
foreach (var item in entities)
{
_dbContext.Labels.Remove(item);
}
_dbContext.SaveChanges();
}
}
}
| lingxyd/CMS | Kooboo.CMS/Kooboo.CMS.SitesProviders/Kooboo.CMS.Sites.Persistence.EntityFramework/LabelProvider/LabelProvider.cs | C# | bsd-3-clause | 6,921 |
import React from 'react'
import ComponentExample from 'docs/app/Components/ComponentDoc/ComponentExample'
import ExampleSection from 'docs/app/Components/ComponentDoc/ExampleSection'
const SelectTypeExamples = () => (
<ExampleSection title='Usage'>
<ComponentExample
title='Select'
description='Default Select.'
examplePath='addons/Select/Types/SelectExample'
/>
</ExampleSection>
)
export default SelectTypeExamples
| aabustamante/Semantic-UI-React | docs/app/Examples/addons/Select/Types/index.js | JavaScript | mit | 450 |
<?php
/**
* This file is part of PDepend.
*
* PHP Version 5
*
* Copyright (c) 2008-2013, Manuel Pichler <mapi@pdepend.org>.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* * Neither the name of Manuel Pichler nor the names of his
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @copyright 2008-2013 Manuel Pichler. All rights reserved.
* @license http://www.opensource.org/licenses/bsd-license.php BSD License
*/
namespace PDepend\Source\AST;
use PDepend\AbstractTest;
/**
* Test case for the code parameter class.
*
* @copyright 2008-2013 Manuel Pichler. All rights reserved.
* @license http://www.opensource.org/licenses/bsd-license.php BSD License
*
* @covers \PDepend\Source\AST\ASTParameter
* @group unittest
*/
class ASTParameterTest extends AbstractTest
{
/**
* testGetIdReturnsExpectedObjectHash
*
* @return void
* @since 1.0.0
*/
public function testGetIdReturnsExpectedObjectHash()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertEquals(spl_object_hash($parameters[0]), $parameters[0]->getId());
}
/**
* Tests that the allows null method returns <b>true</b> for a simple parameter.
*
* @return void
*/
public function testParameterAllowsNullForSimpleVariableIssue67()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertTrue($parameters[0]->allowsNull());
}
/**
* Tests that the allows null method returns <b>true</b> for a simple
* parameter passed by reference.
*
* @return void
*/
public function testParameterAllowsNullForSimpleVariablePassedByReferenceIssue67()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertTrue($parameters[0]->allowsNull());
}
/**
* Tests that the allows null method returns <b>false</b> for an array
* parameter without explicit <b>null</b> default value.
*
* @return void
*/
public function testParameterNotAllowsNullForArrayHintVariableIssue67()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertFalse($parameters[0]->allowsNull());
}
/**
* Tests that the allows null method returns <b>true</b> for an array
* parameter with explicit <b>null</b> default value.
*
* @return void
*/
public function testParameterAllowsNullForArrayHintVariableIssue67()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertTrue($parameters[0]->allowsNull());
}
/**
* Tests that the allows null method returns <b>false</b> for a typed
* parameter without explicit <b>null</b> default value.
*
* @return void
*/
public function testParameterNotAllowsNullForTypeHintVariableIssue67()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertFalse($parameters[0]->allowsNull());
}
/**
* Tests that the allows null method returns <b>true</b> for a type
* parameter with explicit <b>null</b> default value.
*
* @return void
*/
public function testParameterAllowsNullForTypeHintVariableIssue67()
{
$parameter = $this->_getFirstMethodInClass()->getParameters();
$this->assertTrue($parameter[0]->allowsNull());
}
/**
* Tests that the getDeclaringClass() method returns <b>null</b> for a
* function.
*
* @return void
*/
public function testParameterDeclaringClassReturnsNullForFunctionIssue67()
{
$parameter = $this->getFirstFunctionForTestCase()->getParameters();
$this->assertNull($parameter[0]->getDeclaringClass());
}
/**
* Tests that the getDeclaringClass() method returns the declaring class
* of a parent function/method.
*
* @return void
*/
public function testParameterDeclaringClassReturnsExpectedInstanceForMethodIssue67()
{
$class = self::parseCodeResourceForTest()
->current()
->getClasses()
->current();
$parameters = $class->getMethods()
->current()
->getParameters();
$this->assertSame($class, $parameters[0]->getDeclaringClass());
}
/**
* Tests that the parameter class handles a type holder as expected.
*
* @return void
*/
public function testParameterReturnsExpectedTypeFromASTClassOrInterfaceReference()
{
$class = self::parseCodeResourceForTest()
->current()
->getClasses()
->current();
$parameters = $class->getMethods()
->current()
->getParameters();
$this->assertSame($class, $parameters[0]->getClass());
}
/**
* Tests that a parameter returns <b>null</b> when no type holder was set.
*
* @return void
*/
public function testParameterReturnNullForTypeWhenNoASTClassOrInterfaceReferenceWasSet()
{
$parameters = $this->_getFirstMethodInClass()->getParameters();
$this->assertNull($parameters[0]->getClass());
}
/**
* Tests that a parameter returns the expected function instance.
*
* @return void
*/
public function testParameterReturnsExpectedDeclaringFunction()
{
$function = $this->getFirstFunctionForTestCase();
$parameters = $function->getParameters();
$this->assertSame($function, $parameters[0]->getDeclaringFunction());
}
/**
* Tests that a parameter returns the expected method instance.
*
* @return void
*/
public function testParameterReturnsExpectedDeclaringMethod()
{
$method = $this->_getFirstMethodInClass();
$parameters = $method->getParameters();
$this->assertSame($method, $parameters[0]->getDeclaringFunction());
}
/**
* Returns the first class method found in the test file associated with the
* calling test method.
*
* @return \PDepend\Source\AST\ASTMethod
* @since 1.0.0
*/
private function _getFirstMethodInClass()
{
return self::parseCodeResourceForTest()
->current()
->getClasses()
->current()
->getMethods()
->current();
}
/**
* testAcceptInvokesVisitParameterOnSuppliedVisitor
*
* @return void
*/
public function testAcceptInvokesVisitParameterOnSuppliedVisitor()
{
$visitor = $this->getMock('\\PDepend\\Source\\ASTVisitor\\ASTVisitor');
$visitor->expects($this->once())
->method('visitParameter')
->with($this->isInstanceOf('\\PDepend\\Source\\AST\\ASTParameter'));
$parameter = new ASTParameter($this->getMock('PDepend\\Source\\AST\\ASTFormalParameter'));
$parameter->accept($visitor);
}
}
| drentas/JenkinsTest | vendor/pdepend/pdepend/src/test/php/PDepend/Source/AST/ASTParameterTest.php | PHP | mit | 8,342 |
import six
import signal
import logging
import warnings
from twisted.internet import reactor, defer
from zope.interface.verify import verifyClass, DoesNotImplement
from scrapy.core.engine import ExecutionEngine
from scrapy.resolver import CachingThreadedResolver
from scrapy.interfaces import ISpiderLoader
from scrapy.extension import ExtensionManager
from scrapy.settings import Settings
from scrapy.signalmanager import SignalManager
from scrapy.exceptions import ScrapyDeprecationWarning
from scrapy.utils.ossignal import install_shutdown_handlers, signal_names
from scrapy.utils.misc import load_object
from scrapy.utils.log import LogCounterHandler, configure_logging, log_scrapy_info
from scrapy import signals
logger = logging.getLogger(__name__)
class Crawler(object):
def __init__(self, spidercls, settings=None):
if isinstance(settings, dict) or settings is None:
settings = Settings(settings)
self.spidercls = spidercls
self.settings = settings.copy()
self.spidercls.update_settings(self.settings)
self.signals = SignalManager(self)
self.stats = load_object(self.settings['STATS_CLASS'])(self)
handler = LogCounterHandler(self, level=settings.get('LOG_LEVEL'))
logging.root.addHandler(handler)
# lambda is assigned to Crawler attribute because this way it is not
# garbage collected after leaving __init__ scope
self.__remove_handler = lambda: logging.root.removeHandler(handler)
self.signals.connect(self.__remove_handler, signals.engine_stopped)
lf_cls = load_object(self.settings['LOG_FORMATTER'])
self.logformatter = lf_cls.from_crawler(self)
self.extensions = ExtensionManager.from_crawler(self)
self.settings.freeze()
self.crawling = False
self.spider = None
self.engine = None
@property
def spiders(self):
if not hasattr(self, '_spiders'):
warnings.warn("Crawler.spiders is deprecated, use "
"CrawlerRunner.spider_loader or instantiate "
"scrapy.spiderloader.SpiderLoader with your "
"settings.",
category=ScrapyDeprecationWarning, stacklevel=2)
self._spiders = _get_spider_loader(self.settings.frozencopy())
return self._spiders
@defer.inlineCallbacks
def crawl(self, *args, **kwargs):
assert not self.crawling, "Crawling already taking place"
self.crawling = True
try:
self.spider = self._create_spider(*args, **kwargs)
self.engine = self._create_engine()
start_requests = iter(self.spider.start_requests())
yield self.engine.open_spider(self.spider, start_requests)
yield defer.maybeDeferred(self.engine.start)
except Exception:
self.crawling = False
raise
def _create_spider(self, *args, **kwargs):
return self.spidercls.from_crawler(self, *args, **kwargs)
def _create_engine(self):
return ExecutionEngine(self, lambda _: self.stop())
@defer.inlineCallbacks
def stop(self):
if self.crawling:
self.crawling = False
yield defer.maybeDeferred(self.engine.stop)
class CrawlerRunner(object):
"""
This is a convenient helper class that keeps track of, manages and runs
crawlers inside an already setup Twisted `reactor`_.
The CrawlerRunner object must be instantiated with a
:class:`~scrapy.settings.Settings` object.
This class shouldn't be needed (since Scrapy is responsible of using it
accordingly) unless writing scripts that manually handle the crawling
process. See :ref:`run-from-script` for an example.
"""
crawlers = property(
lambda self: self._crawlers,
doc="Set of :class:`crawlers <scrapy.crawler.Crawler>` started by "
":meth:`crawl` and managed by this class."
)
def __init__(self, settings=None):
if isinstance(settings, dict) or settings is None:
settings = Settings(settings)
self.settings = settings
self.spider_loader = _get_spider_loader(settings)
self._crawlers = set()
self._active = set()
@property
def spiders(self):
warnings.warn("CrawlerRunner.spiders attribute is renamed to "
"CrawlerRunner.spider_loader.",
category=ScrapyDeprecationWarning, stacklevel=2)
return self.spider_loader
def crawl(self, crawler_or_spidercls, *args, **kwargs):
"""
Run a crawler with the provided arguments.
It will call the given Crawler's :meth:`~Crawler.crawl` method, while
keeping track of it so it can be stopped later.
If `crawler_or_spidercls` isn't a :class:`~scrapy.crawler.Crawler`
instance, this method will try to create one using this parameter as
the spider class given to it.
Returns a deferred that is fired when the crawling is finished.
:param crawler_or_spidercls: already created crawler, or a spider class
or spider's name inside the project to create it
:type crawler_or_spidercls: :class:`~scrapy.crawler.Crawler` instance,
:class:`~scrapy.spiders.Spider` subclass or string
:param list args: arguments to initialize the spider
:param dict kwargs: keyword arguments to initialize the spider
"""
crawler = crawler_or_spidercls
if not isinstance(crawler_or_spidercls, Crawler):
crawler = self._create_crawler(crawler_or_spidercls)
self.crawlers.add(crawler)
d = crawler.crawl(*args, **kwargs)
self._active.add(d)
def _done(result):
self.crawlers.discard(crawler)
self._active.discard(d)
return result
return d.addBoth(_done)
def _create_crawler(self, spidercls):
if isinstance(spidercls, six.string_types):
spidercls = self.spider_loader.load(spidercls)
return Crawler(spidercls, self.settings)
def stop(self):
"""
Stops simultaneously all the crawling jobs taking place.
Returns a deferred that is fired when they all have ended.
"""
return defer.DeferredList([c.stop() for c in list(self.crawlers)])
@defer.inlineCallbacks
def join(self):
"""
join()
Returns a deferred that is fired when all managed :attr:`crawlers` have
completed their executions.
"""
while self._active:
yield defer.DeferredList(self._active)
class CrawlerProcess(CrawlerRunner):
"""
A class to run multiple scrapy crawlers in a process simultaneously.
This class extends :class:`~scrapy.crawler.CrawlerRunner` by adding support
for starting a Twisted `reactor`_ and handling shutdown signals, like the
keyboard interrupt command Ctrl-C. It also configures top-level logging.
This utility should be a better fit than
:class:`~scrapy.crawler.CrawlerRunner` if you aren't running another
Twisted `reactor`_ within your application.
The CrawlerProcess object must be instantiated with a
:class:`~scrapy.settings.Settings` object.
This class shouldn't be needed (since Scrapy is responsible of using it
accordingly) unless writing scripts that manually handle the crawling
process. See :ref:`run-from-script` for an example.
"""
def __init__(self, settings=None):
super(CrawlerProcess, self).__init__(settings)
install_shutdown_handlers(self._signal_shutdown)
configure_logging(self.settings)
log_scrapy_info(self.settings)
def _signal_shutdown(self, signum, _):
install_shutdown_handlers(self._signal_kill)
signame = signal_names[signum]
logger.info("Received %(signame)s, shutting down gracefully. Send again to force ",
{'signame': signame})
reactor.callFromThread(self._graceful_stop_reactor)
def _signal_kill(self, signum, _):
install_shutdown_handlers(signal.SIG_IGN)
signame = signal_names[signum]
logger.info('Received %(signame)s twice, forcing unclean shutdown',
{'signame': signame})
reactor.callFromThread(self._stop_reactor)
def start(self, stop_after_crawl=True):
"""
This method starts a Twisted `reactor`_, adjusts its pool size to
:setting:`REACTOR_THREADPOOL_MAXSIZE`, and installs a DNS cache based
on :setting:`DNSCACHE_ENABLED` and :setting:`DNSCACHE_SIZE`.
If `stop_after_crawl` is True, the reactor will be stopped after all
crawlers have finished, using :meth:`join`.
:param boolean stop_after_crawl: stop or not the reactor when all
crawlers have finished
"""
if stop_after_crawl:
d = self.join()
# Don't start the reactor if the deferreds are already fired
if d.called:
return
d.addBoth(self._stop_reactor)
reactor.installResolver(self._get_dns_resolver())
tp = reactor.getThreadPool()
tp.adjustPoolsize(maxthreads=self.settings.getint('REACTOR_THREADPOOL_MAXSIZE'))
reactor.addSystemEventTrigger('before', 'shutdown', self.stop)
reactor.run(installSignalHandlers=False) # blocking call
def _get_dns_resolver(self):
if self.settings.getbool('DNSCACHE_ENABLED'):
cache_size = self.settings.getint('DNSCACHE_SIZE')
else:
cache_size = 0
return CachingThreadedResolver(
reactor=reactor,
cache_size=cache_size,
timeout=self.settings.getfloat('DNS_TIMEOUT')
)
def _graceful_stop_reactor(self):
d = self.stop()
d.addBoth(self._stop_reactor)
return d
def _stop_reactor(self, _=None):
try:
reactor.stop()
except RuntimeError: # raised if already stopped or in shutdown stage
pass
def _get_spider_loader(settings):
""" Get SpiderLoader instance from settings """
if settings.get('SPIDER_MANAGER_CLASS'):
warnings.warn(
'SPIDER_MANAGER_CLASS option is deprecated. '
'Please use SPIDER_LOADER_CLASS.',
category=ScrapyDeprecationWarning, stacklevel=2
)
cls_path = settings.get('SPIDER_MANAGER_CLASS',
settings.get('SPIDER_LOADER_CLASS'))
loader_cls = load_object(cls_path)
try:
verifyClass(ISpiderLoader, loader_cls)
except DoesNotImplement:
warnings.warn(
'SPIDER_LOADER_CLASS (previously named SPIDER_MANAGER_CLASS) does '
'not fully implement scrapy.interfaces.ISpiderLoader interface. '
'Please add all missing methods to avoid unexpected runtime errors.',
category=ScrapyDeprecationWarning, stacklevel=2
)
return loader_cls.from_settings(settings.frozencopy())
| bdh1011/wau | venv/lib/python2.7/site-packages/scrapy/crawler.py | Python | mit | 11,081 |
<?php
if ( ! defined( 'ABSPATH' ) ) {
exit;
}
/**
* Legacy product contains all deprecated methods for this class and can be
* removed in the future.
*/
include_once( WC_ABSPATH . 'includes/legacy/abstract-wc-legacy-product.php' );
/**
* Abstract Product Class
*
* The WooCommerce product class handles individual product data.
*
* @version 3.0.0
* @package WooCommerce/Abstracts
* @category Abstract Class
* @author WooThemes
*/
class WC_Product extends WC_Abstract_Legacy_Product {
/**
* This is the name of this object type.
* @var string
*/
protected $object_type = 'product';
/**
* Post type.
* @var string
*/
protected $post_type = 'product';
/**
* Cache group.
* @var string
*/
protected $cache_group = 'products';
/**
* Stores product data.
*
* @var array
*/
protected $data = array(
'name' => '',
'slug' => '',
'date_created' => null,
'date_modified' => null,
'status' => false,
'featured' => false,
'catalog_visibility' => 'visible',
'description' => '',
'short_description' => '',
'sku' => '',
'price' => '',
'regular_price' => '',
'sale_price' => '',
'date_on_sale_from' => null,
'date_on_sale_to' => null,
'total_sales' => '0',
'tax_status' => 'taxable',
'tax_class' => '',
'manage_stock' => false,
'stock_quantity' => null,
'stock_status' => 'instock',
'backorders' => 'no',
'sold_individually' => false,
'weight' => '',
'length' => '',
'width' => '',
'height' => '',
'upsell_ids' => array(),
'cross_sell_ids' => array(),
'parent_id' => 0,
'reviews_allowed' => true,
'purchase_note' => '',
'attributes' => array(),
'default_attributes' => array(),
'menu_order' => 0,
'virtual' => false,
'downloadable' => false,
'category_ids' => array(),
'tag_ids' => array(),
'shipping_class_id' => 0,
'downloads' => array(),
'image_id' => '',
'gallery_image_ids' => array(),
'download_limit' => -1,
'download_expiry' => -1,
'rating_counts' => array(),
'average_rating' => 0,
'review_count' => 0,
);
/**
* Supported features such as 'ajax_add_to_cart'.
*
* @var array
*/
protected $supports = array();
/**
* Get the product if ID is passed, otherwise the product is new and empty.
* This class should NOT be instantiated, but the wc_get_product() function
* should be used. It is possible, but the wc_get_product() is preferred.
*
* @param int|WC_Product|object $product Product to init.
*/
public function __construct( $product = 0 ) {
parent::__construct( $product );
if ( is_numeric( $product ) && $product > 0 ) {
$this->set_id( $product );
} elseif ( $product instanceof self ) {
$this->set_id( absint( $product->get_id() ) );
} elseif ( ! empty( $product->ID ) ) {
$this->set_id( absint( $product->ID ) );
} else {
$this->set_object_read( true );
}
$this->data_store = WC_Data_Store::load( 'product-' . $this->get_type() );
if ( $this->get_id() > 0 ) {
$this->data_store->read( $this );
}
}
/**
* Get internal type. Should return string and *should be overridden* by child classes.
*
* The product_type property is deprecated but is used here for BW compat with child classes which may be defining product_type and not have a get_type method.
*
* @since 3.0.0
* @return string
*/
public function get_type() {
return isset( $this->product_type ) ? $this->product_type : 'simple';
}
/*
|--------------------------------------------------------------------------
| Getters
|--------------------------------------------------------------------------
|
| Methods for getting data from the product object.
*/
/**
* Get product name.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_name( $context = 'view' ) {
return $this->get_prop( 'name', $context );
}
/**
* Get product slug.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_slug( $context = 'view' ) {
return $this->get_prop( 'slug', $context );
}
/**
* Get product created date.
*
* @since 3.0.0
* @param string $context
* @return WC_DateTime|NULL object if the date is set or null if there is no date.
*/
public function get_date_created( $context = 'view' ) {
return $this->get_prop( 'date_created', $context );
}
/**
* Get product modified date.
*
* @since 3.0.0
* @param string $context
* @return WC_DateTime|NULL object if the date is set or null if there is no date.
*/
public function get_date_modified( $context = 'view' ) {
return $this->get_prop( 'date_modified', $context );
}
/**
* Get product status.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_status( $context = 'view' ) {
return $this->get_prop( 'status', $context );
}
/**
* If the product is featured.
*
* @since 3.0.0
* @param string $context
* @return boolean
*/
public function get_featured( $context = 'view' ) {
return $this->get_prop( 'featured', $context );
}
/**
* Get catalog visibility.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_catalog_visibility( $context = 'view' ) {
return $this->get_prop( 'catalog_visibility', $context );
}
/**
* Get product description.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_description( $context = 'view' ) {
return $this->get_prop( 'description', $context );
}
/**
* Get product short description.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_short_description( $context = 'view' ) {
return $this->get_prop( 'short_description', $context );
}
/**
* Get SKU (Stock-keeping unit) - product unique ID.
*
* @param string $context
* @return string
*/
public function get_sku( $context = 'view' ) {
return $this->get_prop( 'sku', $context );
}
/**
* Returns the product's active price.
*
* @param string $context
* @return string price
*/
public function get_price( $context = 'view' ) {
return $this->get_prop( 'price', $context );
}
/**
* Returns the product's regular price.
*
* @param string $context
* @return string price
*/
public function get_regular_price( $context = 'view' ) {
return $this->get_prop( 'regular_price', $context );
}
/**
* Returns the product's sale price.
*
* @param string $context
* @return string price
*/
public function get_sale_price( $context = 'view' ) {
return $this->get_prop( 'sale_price', $context );
}
/**
* Get date on sale from.
*
* @since 3.0.0
* @param string $context
* @return WC_DateTime|NULL object if the date is set or null if there is no date.
*/
public function get_date_on_sale_from( $context = 'view' ) {
return $this->get_prop( 'date_on_sale_from', $context );
}
/**
* Get date on sale to.
*
* @since 3.0.0
* @param string $context
* @return WC_DateTime|NULL object if the date is set or null if there is no date.
*/
public function get_date_on_sale_to( $context = 'view' ) {
return $this->get_prop( 'date_on_sale_to', $context );
}
/**
* Get number total of sales.
*
* @since 3.0.0
* @param string $context
* @return int
*/
public function get_total_sales( $context = 'view' ) {
return $this->get_prop( 'total_sales', $context );
}
/**
* Returns the tax status.
*
* @param string $context
* @return string
*/
public function get_tax_status( $context = 'view' ) {
return $this->get_prop( 'tax_status', $context );
}
/**
* Returns the tax class.
*
* @param string $context
* @return string
*/
public function get_tax_class( $context = 'view' ) {
return $this->get_prop( 'tax_class', $context );
}
/**
* Return if product manage stock.
*
* @since 3.0.0
* @param string $context
* @return boolean
*/
public function get_manage_stock( $context = 'view' ) {
return $this->get_prop( 'manage_stock', $context );
}
/**
* Returns number of items available for sale.
*
* @param string $context
* @return int|null
*/
public function get_stock_quantity( $context = 'view' ) {
return $this->get_prop( 'stock_quantity', $context );
}
/**
* Return the stock status.
*
* @param string $context
* @since 3.0.0
* @return string
*/
public function get_stock_status( $context = 'view' ) {
return $this->get_prop( 'stock_status', $context );
}
/**
* Get backorders.
*
* @param string $context
* @since 3.0.0
* @return string yes no or notify
*/
public function get_backorders( $context = 'view' ) {
return $this->get_prop( 'backorders', $context );
}
/**
* Return if should be sold individually.
*
* @param string $context
* @since 3.0.0
* @return boolean
*/
public function get_sold_individually( $context = 'view' ) {
return $this->get_prop( 'sold_individually', $context );
}
/**
* Returns the product's weight.
*
* @param string $context
* @return string
*/
public function get_weight( $context = 'view' ) {
return $this->get_prop( 'weight', $context );
}
/**
* Returns the product length.
*
* @param string $context
* @return string
*/
public function get_length( $context = 'view' ) {
return $this->get_prop( 'length', $context );
}
/**
* Returns the product width.
*
* @param string $context
* @return string
*/
public function get_width( $context = 'view' ) {
return $this->get_prop( 'width', $context );
}
/**
* Returns the product height.
*
* @param string $context
* @return string
*/
public function get_height( $context = 'view' ) {
return $this->get_prop( 'height', $context );
}
/**
* Returns formatted dimensions.
*
* @param $formatted bool True by default for legacy support - will be false/not set in future versions to return the array only. Use wc_format_dimensions for formatted versions instead.
* @return string|array
*/
public function get_dimensions( $formatted = true ) {
if ( $formatted ) {
wc_deprecated_argument( 'WC_Product::get_dimensions', '3.0', 'By default, get_dimensions has an argument set to true so that HTML is returned. This is to support the legacy version of the method. To get HTML dimensions, instead use wc_format_dimensions() function. Pass false to this method to return an array of dimensions. This will be the new default behavior in future versions.' );
return apply_filters( 'woocommerce_product_dimensions', wc_format_dimensions( $this->get_dimensions( false ) ), $this );
}
return array(
'length' => $this->get_length(),
'width' => $this->get_width(),
'height' => $this->get_height(),
);
}
/**
* Get upsel IDs.
*
* @since 3.0.0
* @param string $context
* @return array
*/
public function get_upsell_ids( $context = 'view' ) {
return $this->get_prop( 'upsell_ids', $context );
}
/**
* Get cross sell IDs.
*
* @since 3.0.0
* @param string $context
* @return array
*/
public function get_cross_sell_ids( $context = 'view' ) {
return $this->get_prop( 'cross_sell_ids', $context );
}
/**
* Get parent ID.
*
* @since 3.0.0
* @param string $context
* @return int
*/
public function get_parent_id( $context = 'view' ) {
return $this->get_prop( 'parent_id', $context );
}
/**
* Return if reviews is allowed.
*
* @since 3.0.0
* @param string $context
* @return bool
*/
public function get_reviews_allowed( $context = 'view' ) {
return $this->get_prop( 'reviews_allowed', $context );
}
/**
* Get purchase note.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_purchase_note( $context = 'view' ) {
return $this->get_prop( 'purchase_note', $context );
}
/**
* Returns product attributes.
*
* @param string $context
* @return array
*/
public function get_attributes( $context = 'view' ) {
return $this->get_prop( 'attributes', $context );
}
/**
* Get default attributes.
*
* @since 3.0.0
* @param string $context
* @return array
*/
public function get_default_attributes( $context = 'view' ) {
return $this->get_prop( 'default_attributes', $context );
}
/**
* Get menu order.
*
* @since 3.0.0
* @param string $context
* @return int
*/
public function get_menu_order( $context = 'view' ) {
return $this->get_prop( 'menu_order', $context );
}
/**
* Get category ids.
*
* @since 3.0.0
* @param string $context
* @return array
*/
public function get_category_ids( $context = 'view' ) {
return $this->get_prop( 'category_ids', $context );
}
/**
* Get tag ids.
*
* @since 3.0.0
* @param string $context
* @return array
*/
public function get_tag_ids( $context = 'view' ) {
return $this->get_prop( 'tag_ids', $context );
}
/**
* Get virtual.
*
* @since 3.0.0
* @param string $context
* @return bool
*/
public function get_virtual( $context = 'view' ) {
return $this->get_prop( 'virtual', $context );
}
/**
* Returns the gallery attachment ids.
*
* @param string $context
* @return array
*/
public function get_gallery_image_ids( $context = 'view' ) {
return $this->get_prop( 'gallery_image_ids', $context );
}
/**
* Get shipping class ID.
*
* @since 3.0.0
* @param string $context
* @return int
*/
public function get_shipping_class_id( $context = 'view' ) {
return $this->get_prop( 'shipping_class_id', $context );
}
/**
* Get downloads.
*
* @since 3.0.0
* @param string $context
* @return array
*/
public function get_downloads( $context = 'view' ) {
return $this->get_prop( 'downloads', $context );
}
/**
* Get download expiry.
*
* @since 3.0.0
* @param string $context
* @return int
*/
public function get_download_expiry( $context = 'view' ) {
return $this->get_prop( 'download_expiry', $context );
}
/**
* Get downloadable.
*
* @since 3.0.0
* @param string $context
* @return bool
*/
public function get_downloadable( $context = 'view' ) {
return $this->get_prop( 'downloadable', $context );
}
/**
* Get download limit.
*
* @since 3.0.0
* @param string $context
* @return int
*/
public function get_download_limit( $context = 'view' ) {
return $this->get_prop( 'download_limit', $context );
}
/**
* Get main image ID.
*
* @since 3.0.0
* @param string $context
* @return string
*/
public function get_image_id( $context = 'view' ) {
return $this->get_prop( 'image_id', $context );
}
/**
* Get rating count.
* @param string $context
* @return array of counts
*/
public function get_rating_counts( $context = 'view' ) {
return $this->get_prop( 'rating_counts', $context );
}
/**
* Get average rating.
* @param string $context
* @return float
*/
public function get_average_rating( $context = 'view' ) {
return $this->get_prop( 'average_rating', $context );
}
/**
* Get review count.
* @param string $context
* @return int
*/
public function get_review_count( $context = 'view' ) {
return $this->get_prop( 'review_count', $context );
}
/*
|--------------------------------------------------------------------------
| Setters
|--------------------------------------------------------------------------
|
| Functions for setting product data. These should not update anything in the
| database itself and should only change what is stored in the class
| object.
*/
/**
* Set product name.
*
* @since 3.0.0
* @param string $name Product name.
*/
public function set_name( $name ) {
$this->set_prop( 'name', $name );
}
/**
* Set product slug.
*
* @since 3.0.0
* @param string $slug Product slug.
*/
public function set_slug( $slug ) {
$this->set_prop( 'slug', $slug );
}
/**
* Set product created date.
*
* @since 3.0.0
* @param string|integer|null $date UTC timestamp, or ISO 8601 DateTime. If the DateTime string has no timezone or offset, WordPress site timezone will be assumed. Null if their is no date.
*/
public function set_date_created( $date = null ) {
$this->set_date_prop( 'date_created', $date );
}
/**
* Set product modified date.
*
* @since 3.0.0
* @param string|integer|null $date UTC timestamp, or ISO 8601 DateTime. If the DateTime string has no timezone or offset, WordPress site timezone will be assumed. Null if their is no date.
*/
public function set_date_modified( $date = null ) {
$this->set_date_prop( 'date_modified', $date );
}
/**
* Set product status.
*
* @since 3.0.0
* @param string $status Product status.
*/
public function set_status( $status ) {
$this->set_prop( 'status', $status );
}
/**
* Set if the product is featured.
*
* @since 3.0.0
* @param bool|string
*/
public function set_featured( $featured ) {
$this->set_prop( 'featured', wc_string_to_bool( $featured ) );
}
/**
* Set catalog visibility.
*
* @since 3.0.0
* @throws WC_Data_Exception
* @param string $visibility Options: 'hidden', 'visible', 'search' and 'catalog'.
*/
public function set_catalog_visibility( $visibility ) {
$options = array_keys( wc_get_product_visibility_options() );
if ( ! in_array( $visibility, $options, true ) ) {
$this->error( 'product_invalid_catalog_visibility', __( 'Invalid catalog visibility option.', 'woocommerce' ) );
}
$this->set_prop( 'catalog_visibility', $visibility );
}
/**
* Set product description.
*
* @since 3.0.0
* @param string $description Product description.
*/
public function set_description( $description ) {
$this->set_prop( 'description', $description );
}
/**
* Set product short description.
*
* @since 3.0.0
* @param string $short_description Product short description.
*/
public function set_short_description( $short_description ) {
$this->set_prop( 'short_description', $short_description );
}
/**
* Set SKU.
*
* @since 3.0.0
* @throws WC_Data_Exception
* @param string $sku Product SKU.
*/
public function set_sku( $sku ) {
$sku = (string) $sku;
if ( $this->get_object_read() && ! empty( $sku ) && ! wc_product_has_unique_sku( $this->get_id(), $sku ) ) {
$sku_found = wc_get_product_id_by_sku( $sku );
$this->error( 'product_invalid_sku', __( 'Invalid or duplicated SKU.', 'woocommerce' ), 400, array( 'resource_id' => $sku_found ) );
}
$this->set_prop( 'sku', $sku );
}
/**
* Set the product's active price.
*
* @param string $price Price.
*/
public function set_price( $price ) {
$this->set_prop( 'price', wc_format_decimal( $price ) );
}
/**
* Set the product's regular price.
*
* @since 3.0.0
* @param string $price Regular price.
*/
public function set_regular_price( $price ) {
$this->set_prop( 'regular_price', wc_format_decimal( $price ) );
}
/**
* Set the product's sale price.
*
* @since 3.0.0
* @param string $price sale price.
*/
public function set_sale_price( $price ) {
$this->set_prop( 'sale_price', wc_format_decimal( $price ) );
}
/**
* Set date on sale from.
*
* @since 3.0.0
* @param string|integer|null $date UTC timestamp, or ISO 8601 DateTime. If the DateTime string has no timezone or offset, WordPress site timezone will be assumed. Null if their is no date.
*/
public function set_date_on_sale_from( $date = null ) {
$this->set_date_prop( 'date_on_sale_from', $date );
}
/**
* Set date on sale to.
*
* @since 3.0.0
* @param string|integer|null $date UTC timestamp, or ISO 8601 DateTime. If the DateTime string has no timezone or offset, WordPress site timezone will be assumed. Null if their is no date.
*/
public function set_date_on_sale_to( $date = null ) {
$this->set_date_prop( 'date_on_sale_to', $date );
}
/**
* Set number total of sales.
*
* @since 3.0.0
* @param int $total Total of sales.
*/
public function set_total_sales( $total ) {
$this->set_prop( 'total_sales', absint( $total ) );
}
/**
* Set the tax status.
*
* @since 3.0.0
* @throws WC_Data_Exception
* @param string $status Tax status.
*/
public function set_tax_status( $status ) {
$options = array(
'taxable',
'shipping',
'none',
);
// Set default if empty.
if ( empty( $status ) ) {
$status = 'taxable';
}
if ( ! in_array( $status, $options, true ) ) {
$this->error( 'product_invalid_tax_status', __( 'Invalid product tax status.', 'woocommerce' ) );
}
$this->set_prop( 'tax_status', $status );
}
/**
* Set the tax class.
*
* @since 3.0.0
* @param string $class Tax class.
*/
public function set_tax_class( $class ) {
$class = sanitize_title( $class );
$class = 'standard' === $class ? '' : $class;
$this->set_prop( 'tax_class', $class );
}
/**
* Set if product manage stock.
*
* @since 3.0.0
* @param bool
*/
public function set_manage_stock( $manage_stock ) {
$this->set_prop( 'manage_stock', wc_string_to_bool( $manage_stock ) );
}
/**
* Set number of items available for sale.
*
* @since 3.0.0
* @param float|null $quantity Stock quantity.
*/
public function set_stock_quantity( $quantity ) {
$this->set_prop( 'stock_quantity', '' !== $quantity ? wc_stock_amount( $quantity ) : null );
}
/**
* Set stock status.
*
* @param string $status New status.
*/
public function set_stock_status( $status = '' ) {
$this->set_prop( 'stock_status', 'outofstock' === $status ? 'outofstock' : 'instock' );
}
/**
* Set backorders.
*
* @since 3.0.0
* @param string $backorders Options: 'yes', 'no' or 'notify'.
*/
public function set_backorders( $backorders ) {
$this->set_prop( 'backorders', $backorders );
}
/**
* Set if should be sold individually.
*
* @since 3.0.0
* @param bool
*/
public function set_sold_individually( $sold_individually ) {
$this->set_prop( 'sold_individually', wc_string_to_bool( $sold_individually ) );
}
/**
* Set the product's weight.
*
* @since 3.0.0
* @param float|string $weight Total weight.
*/
public function set_weight( $weight ) {
$this->set_prop( 'weight', '' === $weight ? '' : wc_format_decimal( $weight ) );
}
/**
* Set the product length.
*
* @since 3.0.0
* @param float|string $length Total length.
*/
public function set_length( $length ) {
$this->set_prop( 'length', '' === $length ? '' : wc_format_decimal( $length ) );
}
/**
* Set the product width.
*
* @since 3.0.0
* @param float|string $width Total width.
*/
public function set_width( $width ) {
$this->set_prop( 'width', '' === $width ? '' : wc_format_decimal( $width ) );
}
/**
* Set the product height.
*
* @since 3.0.0
* @param float|string $height Total height.
*/
public function set_height( $height ) {
$this->set_prop( 'height', '' === $height ? '' : wc_format_decimal( $height ) );
}
/**
* Set upsell IDs.
*
* @since 3.0.0
* @param array $upsell_ids IDs from the up-sell products.
*/
public function set_upsell_ids( $upsell_ids ) {
$this->set_prop( 'upsell_ids', array_filter( (array) $upsell_ids ) );
}
/**
* Set crosssell IDs.
*
* @since 3.0.0
* @param array $cross_sell_ids IDs from the cross-sell products.
*/
public function set_cross_sell_ids( $cross_sell_ids ) {
$this->set_prop( 'cross_sell_ids', array_filter( (array) $cross_sell_ids ) );
}
/**
* Set parent ID.
*
* @since 3.0.0
* @param int $parent_id Product parent ID.
*/
public function set_parent_id( $parent_id ) {
$this->set_prop( 'parent_id', absint( $parent_id ) );
}
/**
* Set if reviews is allowed.
*
* @since 3.0.0
* @param bool $reviews_allowed Reviews allowed or not.
*/
public function set_reviews_allowed( $reviews_allowed ) {
$this->set_prop( 'reviews_allowed', wc_string_to_bool( $reviews_allowed ) );
}
/**
* Set purchase note.
*
* @since 3.0.0
* @param string $purchase_note Purchase note.
*/
public function set_purchase_note( $purchase_note ) {
$this->set_prop( 'purchase_note', $purchase_note );
}
/**
* Set product attributes.
*
* Attributes are made up of:
* id - 0 for product level attributes. ID for global attributes.
* name - Attribute name.
* options - attribute value or array of term ids/names.
* position - integer sort order.
* visible - If visible on frontend.
* variation - If used for variations.
* Indexed by unqiue key to allow clearing old ones after a set.
*
* @since 3.0.0
* @param array $raw_attributes Array of WC_Product_Attribute objects.
*/
public function set_attributes( $raw_attributes ) {
$attributes = array_fill_keys( array_keys( $this->get_attributes( 'edit' ) ), null );
foreach ( $raw_attributes as $attribute ) {
if ( is_a( $attribute, 'WC_Product_Attribute' ) ) {
$attributes[ sanitize_title( $attribute->get_name() ) ] = $attribute;
}
}
uasort( $attributes, 'wc_product_attribute_uasort_comparison' );
$this->set_prop( 'attributes', $attributes );
}
/**
* Set default attributes.
*
* @since 3.0.0
* @param array $default_attributes List of default attributes.
*/
public function set_default_attributes( $default_attributes ) {
$this->set_prop( 'default_attributes',
array_filter( (array) $default_attributes, 'wc_array_filter_default_attributes' ) );
}
/**
* Set menu order.
*
* @since 3.0.0
* @param int $menu_order Menu order.
*/
public function set_menu_order( $menu_order ) {
$this->set_prop( 'menu_order', intval( $menu_order ) );
}
/**
* Set the product categories.
*
* @since 3.0.0
* @param array $term_ids List of terms IDs.
*/
public function set_category_ids( $term_ids ) {
$this->set_prop( 'category_ids', array_unique( array_map( 'intval', $term_ids ) ) );
}
/**
* Set the product tags.
*
* @since 3.0.0
* @param array $term_ids List of terms IDs.
*/
public function set_tag_ids( $term_ids ) {
$this->set_prop( 'tag_ids', array_unique( array_map( 'intval', $term_ids ) ) );
}
/**
* Set if the product is virtual.
*
* @since 3.0.0
* @param bool|string
*/
public function set_virtual( $virtual ) {
$this->set_prop( 'virtual', wc_string_to_bool( $virtual ) );
}
/**
* Set shipping class ID.
*
* @since 3.0.0
* @param int
*/
public function set_shipping_class_id( $id ) {
$this->set_prop( 'shipping_class_id', absint( $id ) );
}
/**
* Set if the product is downloadable.
*
* @since 3.0.0
* @param bool|string
*/
public function set_downloadable( $downloadable ) {
$this->set_prop( 'downloadable', wc_string_to_bool( $downloadable ) );
}
/**
* Set downloads.
*
* @since 3.0.0
* @param $downloads_array array of WC_Product_Download objects or arrays.
*/
public function set_downloads( $downloads_array ) {
$downloads = array();
$errors = array();
foreach ( $downloads_array as $download ) {
if ( is_a( $download, 'WC_Product_Download' ) ) {
$download_object = $download;
} else {
$download_object = new WC_Product_Download();
$download['previous_hash'] = isset( $download['previous_hash'] ) ? $download['previous_hash'] : '';
$file_hash = apply_filters( 'woocommerce_downloadable_file_hash', md5( $download['file'] ), $this->get_id(), $download['name'], $download['file'], $download['previous_hash'] );
$download_object->set_id( $file_hash );
$download_object->set_name( $download['name'] );
$download_object->set_file( $download['file'] );
$download_object->set_previous_hash( $download['previous_hash'] );
}
// Validate the file extension
if ( ! $download_object->is_allowed_filetype() ) {
if ( $this->get_object_read() ) {
$errors[] = sprintf( __( 'The downloadable file %1$s cannot be used as it does not have an allowed file type. Allowed types include: %2$s', 'woocommerce' ), '<code>' . basename( $download_object->get_file() ) . '</code>', '<code>' . implode( ', ', array_keys( $download_object->get_allowed_mime_types() ) ) . '</code>' );
}
continue;
}
// Validate the file exists.
if ( ! $download_object->file_exists() ) {
if ( $this->get_object_read() ) {
$errors[] = sprintf( __( 'The downloadable file %s cannot be used as it does not exist on the server.', 'woocommerce' ), '<code>' . $download_object->get_file() . '</code>' );
}
continue;
}
$downloads[ $download_object->get_id() ] = $download_object;
}
if ( $errors ) {
$this->error( 'product_invalid_download', $errors[0] );
}
$this->set_prop( 'downloads', $downloads );
}
/**
* Set download limit.
*
* @since 3.0.0
* @param int $download_limit
*/
public function set_download_limit( $download_limit ) {
$this->set_prop( 'download_limit', -1 === (int) $download_limit || '' === $download_limit ? -1 : absint( $download_limit ) );
}
/**
* Set download expiry.
*
* @since 3.0.0
* @param int $download_expiry
*/
public function set_download_expiry( $download_expiry ) {
$this->set_prop( 'download_expiry', -1 === (int) $download_expiry || '' === $download_expiry ? -1 : absint( $download_expiry ) );
}
/**
* Set gallery attachment ids.
*
* @since 3.0.0
* @param array $image_ids
*/
public function set_gallery_image_ids( $image_ids ) {
$image_ids = wp_parse_id_list( $image_ids );
if ( $this->get_object_read() ) {
$image_ids = array_filter( $image_ids, 'wp_attachment_is_image' );
}
$this->set_prop( 'gallery_image_ids', $image_ids );
}
/**
* Set main image ID.
*
* @since 3.0.0
* @param int $image_id
*/
public function set_image_id( $image_id = '' ) {
$this->set_prop( 'image_id', $image_id );
}
/**
* Set rating counts. Read only.
* @param array $counts
*/
public function set_rating_counts( $counts ) {
$this->set_prop( 'rating_counts', array_filter( array_map( 'absint', (array) $counts ) ) );
}
/**
* Set average rating. Read only.
* @param float $average
*/
public function set_average_rating( $average ) {
$this->set_prop( 'average_rating', wc_format_decimal( $average ) );
}
/**
* Set review count. Read only.
* @param int $count
*/
public function set_review_count( $count ) {
$this->set_prop( 'review_count', absint( $count ) );
}
/*
|--------------------------------------------------------------------------
| Other Methods
|--------------------------------------------------------------------------
*/
/**
* Ensure properties are set correctly before save.
* @since 3.0.0
*/
public function validate_props() {
// Before updating, ensure stock props are all aligned. Qty and backorders are not needed if not stock managed.
if ( ! $this->get_manage_stock() ) {
$this->set_stock_quantity( '' );
$this->set_backorders( 'no' );
// If we are stock managing and we don't have stock, force out of stock status.
} elseif ( $this->get_stock_quantity() <= get_option( 'woocommerce_notify_no_stock_amount' ) && 'no' === $this->get_backorders() ) {
$this->set_stock_status( 'outofstock' );
// If the stock level is changing and we do now have enough, force in stock status.
} elseif ( $this->get_stock_quantity() > get_option( 'woocommerce_notify_no_stock_amount' ) && array_key_exists( 'stock_quantity', $this->get_changes() ) ) {
$this->set_stock_status( 'instock' );
}
}
/**
* Save data (either create or update depending on if we are working on an existing product).
*
* @since 3.0.0
*/
public function save() {
$this->validate_props();
if ( $this->data_store ) {
// Trigger action before saving to the DB. Use a pointer to adjust object props before save.
do_action( 'woocommerce_before_' . $this->object_type . '_object_save', $this, $this->data_store );
if ( $this->get_id() ) {
$this->data_store->update( $this );
} else {
$this->data_store->create( $this );
}
if ( $this->get_parent_id() ) {
wc_deferred_product_sync( $this->get_parent_id() );
}
return $this->get_id();
}
}
/*
|--------------------------------------------------------------------------
| Conditionals
|--------------------------------------------------------------------------
*/
/**
* Check if a product supports a given feature.
*
* Product classes should override this to declare support (or lack of support) for a feature.
*
* @param string $feature string The name of a feature to test support for.
* @return bool True if the product supports the feature, false otherwise.
* @since 2.5.0
*/
public function supports( $feature ) {
return apply_filters( 'woocommerce_product_supports', in_array( $feature, $this->supports ) ? true : false, $feature, $this );
}
/**
* Returns whether or not the product post exists.
*
* @return bool
*/
public function exists() {
return false !== $this->get_status();
}
/**
* Checks the product type.
*
* Backwards compat with downloadable/virtual.
*
* @param string|array $type Array or string of types
* @return bool
*/
public function is_type( $type ) {
return ( $this->get_type() === $type || ( is_array( $type ) && in_array( $this->get_type(), $type ) ) );
}
/**
* Checks if a product is downloadable.
*
* @return bool
*/
public function is_downloadable() {
return apply_filters( 'woocommerce_is_downloadable', true === $this->get_downloadable(), $this );
}
/**
* Checks if a product is virtual (has no shipping).
*
* @return bool
*/
public function is_virtual() {
return apply_filters( 'woocommerce_is_virtual', true === $this->get_virtual(), $this );
}
/**
* Returns whether or not the product is featured.
*
* @return bool
*/
public function is_featured() {
return true === $this->get_featured();
}
/**
* Check if a product is sold individually (no quantities).
*
* @return bool
*/
public function is_sold_individually() {
return apply_filters( 'woocommerce_is_sold_individually', true === $this->get_sold_individually(), $this );
}
/**
* Returns whether or not the product is visible in the catalog.
*
* @return bool
*/
public function is_visible() {
$visible = 'visible' === $this->get_catalog_visibility() || ( is_search() && 'search' === $this->get_catalog_visibility() ) || ( ! is_search() && 'catalog' === $this->get_catalog_visibility() );
if ( 'publish' !== $this->get_status() && ! current_user_can( 'edit_post', $this->get_id() ) ) {
$visible = false;
}
if ( 'yes' === get_option( 'woocommerce_hide_out_of_stock_items' ) && ! $this->is_in_stock() ) {
$visible = false;
}
return apply_filters( 'woocommerce_product_is_visible', $visible, $this->get_id() );
}
/**
* Returns false if the product cannot be bought.
*
* @return bool
*/
public function is_purchasable() {
return apply_filters( 'woocommerce_is_purchasable', $this->exists() && ( 'publish' === $this->get_status() || current_user_can( 'edit_post', $this->get_id() ) ) && '' !== $this->get_price(), $this );
}
/**
* Returns whether or not the product is on sale.
*
* @param string $context What the value is for. Valid values are view and edit.
* @return bool
*/
public function is_on_sale( $context = 'view' ) {
if ( '' !== (string) $this->get_sale_price( $context ) && $this->get_regular_price( $context ) > $this->get_sale_price( $context ) ) {
$on_sale = true;
if ( $this->get_date_on_sale_from( $context ) && $this->get_date_on_sale_from( $context )->getTimestamp() > current_time( 'timestamp', true ) ) {
$on_sale = false;
}
if ( $this->get_date_on_sale_to( $context ) && $this->get_date_on_sale_to( $context )->getTimestamp() < current_time( 'timestamp', true ) ) {
$on_sale = false;
}
} else {
$on_sale = false;
}
return 'view' === $context ? apply_filters( 'woocommerce_product_is_on_sale', $on_sale, $this ) : $on_sale;
}
/**
* Returns whether or not the product has dimensions set.
*
* @return bool
*/
public function has_dimensions() {
return ( $this->get_length() || $this->get_height() || $this->get_width() ) && ! $this->get_virtual();
}
/**
* Returns whether or not the product has weight set.
*
* @return bool
*/
public function has_weight() {
return $this->get_weight() && ! $this->get_virtual();
}
/**
* Returns whether or not the product is in stock.
*
* @return bool
*/
public function is_in_stock() {
return apply_filters( 'woocommerce_product_is_in_stock', 'instock' === $this->get_stock_status(), $this );
}
/**
* Checks if a product needs shipping.
*
* @return bool
*/
public function needs_shipping() {
return apply_filters( 'woocommerce_product_needs_shipping', ! $this->is_virtual(), $this );
}
/**
* Returns whether or not the product is taxable.
*
* @return bool
*/
public function is_taxable() {
return apply_filters( 'woocommerce_product_is_taxable', $this->get_tax_status() === 'taxable' && wc_tax_enabled(), $this );
}
/**
* Returns whether or not the product shipping is taxable.
*
* @return bool
*/
public function is_shipping_taxable() {
return $this->get_tax_status() === 'taxable' || $this->get_tax_status() === 'shipping';
}
/**
* Returns whether or not the product is stock managed.
*
* @return bool
*/
public function managing_stock() {
if ( 'yes' === get_option( 'woocommerce_manage_stock' ) ) {
return $this->get_manage_stock();
}
return false;
}
/**
* Returns whether or not the product can be backordered.
*
* @return bool
*/
public function backorders_allowed() {
return apply_filters( 'woocommerce_product_backorders_allowed', ( 'yes' === $this->get_backorders() || 'notify' === $this->get_backorders() ), $this->get_id(), $this );
}
/**
* Returns whether or not the product needs to notify the customer on backorder.
*
* @return bool
*/
public function backorders_require_notification() {
return apply_filters( 'woocommerce_product_backorders_require_notification', ( $this->managing_stock() && 'notify' === $this->get_backorders() ), $this );
}
/**
* Check if a product is on backorder.
*
* @param int $qty_in_cart (default: 0)
* @return bool
*/
public function is_on_backorder( $qty_in_cart = 0 ) {
return $this->managing_stock() && $this->backorders_allowed() && ( $this->get_stock_quantity() - $qty_in_cart ) < 0 ? true : false;
}
/**
* Returns whether or not the product has enough stock for the order.
*
* @param mixed $quantity
* @return bool
*/
public function has_enough_stock( $quantity ) {
return ! $this->managing_stock() || $this->backorders_allowed() || $this->get_stock_quantity() >= $quantity;
}
/**
* Returns whether or not the product has any visible attributes.
*
* @return boolean
*/
public function has_attributes() {
foreach ( $this->get_attributes() as $attribute ) {
if ( $attribute->get_visible() ) {
return true;
}
}
return false;
}
/**
* Returns whether or not the product has any child product.
*
* @return bool
*/
public function has_child() {
return 0 < count( $this->get_children() );
}
/**
* Does a child have dimensions?
*
* @since 3.0.0
* @return bool
*/
public function child_has_dimensions() {
return false;
}
/**
* Does a child have a weight?
*
* @since 3.0.0
* @return boolean
*/
public function child_has_weight() {
return false;
}
/**
* Check if downloadable product has a file attached.
*
* @since 1.6.2
*
* @param string $download_id file identifier
* @return bool Whether downloadable product has a file attached.
*/
public function has_file( $download_id = '' ) {
return $this->is_downloadable() && $this->get_file( $download_id );
}
/**
* Returns whether or not the product has additonal options that need
* selecting before adding to cart.
*
* @since 3.0.0
* @return boolean
*/
public function has_options() {
return false;
}
/*
|--------------------------------------------------------------------------
| Non-CRUD Getters
|--------------------------------------------------------------------------
*/
/**
* Get the product's title. For products this is the product name.
*
* @return string
*/
public function get_title() {
return apply_filters( 'woocommerce_product_title', $this->get_name(), $this );
}
/**
* Product permalink.
* @return string
*/
public function get_permalink() {
return get_permalink( $this->get_id() );
}
/**
* Returns the children IDs if applicable. Overridden by child classes.
*
* @return array of IDs
*/
public function get_children() {
return array();
}
/**
* If the stock level comes from another product ID, this should be modified.
* @since 3.0.0
* @return int
*/
public function get_stock_managed_by_id() {
return $this->get_id();
}
/**
* Returns the price in html format.
*
* @param string $deprecated
*
* @return string
*/
public function get_price_html( $deprecated = '' ) {
if ( '' === $this->get_price() ) {
$price = apply_filters( 'woocommerce_empty_price_html', '', $this );
} elseif ( $this->is_on_sale() ) {
$price = wc_format_sale_price( wc_get_price_to_display( $this, array( 'price' => $this->get_regular_price() ) ), wc_get_price_to_display( $this ) ) . $this->get_price_suffix();
} else {
$price = wc_price( wc_get_price_to_display( $this ) ) . $this->get_price_suffix();
}
return apply_filters( 'woocommerce_get_price_html', $price, $this );
}
/**
* Get product name with SKU or ID. Used within admin.
*
* @return string Formatted product name
*/
public function get_formatted_name() {
if ( $this->get_sku() ) {
$identifier = $this->get_sku();
} else {
$identifier = '#' . $this->get_id();
}
return sprintf( '%2$s (%1$s)', $identifier, $this->get_name() );
}
/**
* Get min quantity which can be purchased at once.
*
* @since 3.0.0
* @return int
*/
public function get_min_purchase_quantity() {
return 1;
}
/**
* Get max quantity which can be purchased at once.
*
* @since 3.0.0
* @return int Quantity or -1 if unlimited.
*/
public function get_max_purchase_quantity() {
return $this->is_sold_individually() ? 1 : ( $this->backorders_allowed() || ! $this->managing_stock() ? -1 : $this->get_stock_quantity() );
}
/**
* Get the add to url used mainly in loops.
*
* @return string
*/
public function add_to_cart_url() {
return apply_filters( 'woocommerce_product_add_to_cart_url', $this->get_permalink(), $this );
}
/**
* Get the add to cart button text for the single page.
*
* @return string
*/
public function single_add_to_cart_text() {
return apply_filters( 'woocommerce_product_single_add_to_cart_text', __( 'Add to cart', 'woocommerce' ), $this );
}
/**
* Get the add to cart button text.
*
* @return string
*/
public function add_to_cart_text() {
return apply_filters( 'woocommerce_product_add_to_cart_text', __( 'Read more', 'woocommerce' ), $this );
}
/**
* Returns the main product image.
*
* @param string $size (default: 'shop_thumbnail')
* @param array $attr
* @param bool $placeholder True to return $placeholder if no image is found, or false to return an empty string.
* @return string
*/
public function get_image( $size = 'shop_thumbnail', $attr = array(), $placeholder = true ) {
if ( has_post_thumbnail( $this->get_id() ) ) {
$image = get_the_post_thumbnail( $this->get_id(), $size, $attr );
} elseif ( ( $parent_id = wp_get_post_parent_id( $this->get_id() ) ) && has_post_thumbnail( $parent_id ) ) {
$image = get_the_post_thumbnail( $parent_id, $size, $attr );
} elseif ( $placeholder ) {
$image = wc_placeholder_img( $size );
} else {
$image = '';
}
return str_replace( array( 'https://', 'http://' ), '//', $image );
}
/**
* Returns the product shipping class SLUG.
*
* @return string
*/
public function get_shipping_class() {
if ( $class_id = $this->get_shipping_class_id() ) {
$term = get_term_by( 'id', $class_id, 'product_shipping_class' );
if ( $term && ! is_wp_error( $term ) ) {
return $term->slug;
}
}
return '';
}
/**
* Returns a single product attribute as a string.
* @param string $attribute to get.
* @return string
*/
public function get_attribute( $attribute ) {
$attributes = $this->get_attributes();
$attribute = sanitize_title( $attribute );
if ( isset( $attributes[ $attribute ] ) ) {
$attribute_object = $attributes[ $attribute ];
} elseif ( isset( $attributes[ 'pa_' . $attribute ] ) ) {
$attribute_object = $attributes[ 'pa_' . $attribute ];
} else {
return '';
}
return $attribute_object->is_taxonomy() ? implode( ', ', wc_get_product_terms( $this->get_id(), $attribute_object->get_name(), array( 'fields' => 'names' ) ) ) : wc_implode_text_attributes( $attribute_object->get_options() );
}
/**
* Get the total amount (COUNT) of ratings, or just the count for one rating e.g. number of 5 star ratings.
* @param int $value Optional. Rating value to get the count for. By default returns the count of all rating values.
* @return int
*/
public function get_rating_count( $value = null ) {
$counts = $this->get_rating_counts();
if ( is_null( $value ) ) {
return array_sum( $counts );
} elseif ( isset( $counts[ $value ] ) ) {
return absint( $counts[ $value ] );
} else {
return 0;
}
}
/**
* Get a file by $download_id.
*
* @param string $download_id file identifier
* @return array|false if not found
*/
public function get_file( $download_id = '' ) {
$files = $this->get_downloads();
if ( '' === $download_id ) {
$file = sizeof( $files ) ? current( $files ) : false;
} elseif ( isset( $files[ $download_id ] ) ) {
$file = $files[ $download_id ];
} else {
$file = false;
}
return apply_filters( 'woocommerce_product_file', $file, $this, $download_id );
}
/**
* Get file download path identified by $download_id.
*
* @param string $download_id file identifier
* @return string
*/
public function get_file_download_path( $download_id ) {
$files = $this->get_downloads();
$file_path = isset( $files[ $download_id ] ) ? $files[ $download_id ]->get_file() : '';
// allow overriding based on the particular file being requested
return apply_filters( 'woocommerce_product_file_download_path', $file_path, $this, $download_id );
}
/**
* Get the suffix to display after prices > 0.
*
* @param string $price to calculate, left blank to just use get_price()
* @param integer $qty passed on to get_price_including_tax() or get_price_excluding_tax()
* @return string
*/
public function get_price_suffix( $price = '', $qty = 1 ) {
$html = '';
if ( ( $suffix = get_option( 'woocommerce_price_display_suffix' ) ) && wc_tax_enabled() && 'taxable' === $this->get_tax_status() ) {
if ( '' === $price ) {
$price = $this->get_price();
}
$replacements = array(
'{price_including_tax}' => wc_price( wc_get_price_including_tax( $this, array( 'qty' => $qty, 'price' => $price ) ) ),
'{price_excluding_tax}' => wc_price( wc_get_price_excluding_tax( $this, array( 'qty' => $qty, 'price' => $price ) ) ),
);
$html = str_replace( array_keys( $replacements ), array_values( $replacements ), ' <small class="woocommerce-price-suffix">' . wp_kses_post( $suffix ) . '</small>' );
}
return apply_filters( 'woocommerce_get_price_suffix', $html, $this, $price, $qty );
}
/**
* Returns the availability of the product.
*
* @return string[]
*/
public function get_availability() {
return apply_filters( 'woocommerce_get_availability', array(
'availability' => $this->get_availability_text(),
'class' => $this->get_availability_class(),
), $this );
}
/**
* Get availability text based on stock status.
*
* @return string
*/
protected function get_availability_text() {
if ( ! $this->is_in_stock() ) {
$availability = __( 'Out of stock', 'woocommerce' );
} elseif ( $this->managing_stock() && $this->is_on_backorder( 1 ) ) {
$availability = $this->backorders_require_notification() ? __( 'Available on backorder', 'woocommerce' ) : '';
} elseif ( $this->managing_stock() ) {
$availability = wc_format_stock_for_display( $this );
} else {
$availability = '';
}
return apply_filters( 'woocommerce_get_availability_text', $availability, $this );
}
/**
* Get availability classname based on stock status.
*
* @return string
*/
protected function get_availability_class() {
if ( ! $this->is_in_stock() ) {
$class = 'out-of-stock';
} elseif ( $this->managing_stock() && $this->is_on_backorder( 1 ) ) {
$class = 'available-on-backorder';
} else {
$class = 'in-stock';
}
return apply_filters( 'woocommerce_get_availability_class', $class, $this );
}
}
| hai2219/tantap-shop | wp-content/plugins/woocommerce/includes/abstracts/abstract-wc-product.php | PHP | mit | 49,196 |
# coding: utf-8
require 'test_helper'
class GarantiTest < Test::Unit::TestCase
def setup
@original_kcode = nil
Base.mode = :test
@gateway = GarantiGateway.new(login: 'a', password: 'b', terminal_id: 'c', merchant_id: 'd')
@credit_card = credit_card(4242424242424242)
@amount = 1000 # 1000 cents, 10$
@options = {
order_id: 'db4af18c5222503d845180350fbda516',
billing_address: address,
description: 'Store Purchase'
}
end
def teardown
$KCODE = @original_kcode if @original_kcode
end
def test_successful_purchase
@gateway.expects(:ssl_post).returns(successful_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
# Replace with authorization number from the successful response
assert_equal 'db4af18c5222503d845180350fbda516', response.authorization
assert response.test?
end
def test_unsuccessful_request
@gateway.expects(:ssl_post).returns(failed_purchase_response)
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_failure response
assert response.test?
end
def test_character_normalization
if ActiveSupport::Inflector.method(:transliterate).arity == -2
assert_equal 'ABCCDEFGGHIIJKLMNOOPRSSTUUVYZ', @gateway.send(:normalize, 'ABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZ')
assert_equal 'abccdefgghiijklmnooprsstuuvyz', @gateway.send(:normalize, 'abcçdefgğhıijklmnoöprsştuüvyz')
else
assert_equal 'ABCDEFGHIJKLMNOPRSTUVYZ', @gateway.send(:normalize, 'ABCÇDEFGĞHIİJKLMNOÖPRSŞTUÜVYZ')
assert_equal 'abcdefghijklmnoprstuvyz', @gateway.send(:normalize, 'abcçdefgğhıijklmnoöprsştuüvyz')
end
end
def test_nil_normalization
assert_nil @gateway.send(:normalize, nil)
end
def test_strip_invalid_xml_chars
xml = <<XML
<response>
<element>Parse the First & but not this ˜ &x002a;</element>
</response>
XML
parsed_xml = @gateway.send(:strip_invalid_xml_chars, xml)
assert REXML::Document.new(parsed_xml)
assert_raise(REXML::ParseException) do
REXML::Document.new(xml)
end
end
private
# Place raw successful response from gateway here
def successful_purchase_response
<<~XML
<GVPSResponse>
<Mode></Mode>
<Order>
<OrderID>db4af18c5222503d845180350fbda516</OrderID>
<GroupID></GroupID>
</Order>
<Transaction>
<Response>
<Source>HOST</Source>
<Code>00</Code>
<ReasonCode>00</ReasonCode>
<Message>Approved</Message>
<ErrorMsg></ErrorMsg>
<SysErrMsg></SysErrMsg>
</Response>
<RetrefNum>035208609374</RetrefNum>
<AuthCode>784260</AuthCode>
<BatchNum>000089</BatchNum>
<SequenceNum>000008</SequenceNum>
<ProvDate>20101218 08:56:39</ProvDate>
<CardNumberMasked></CardNumberMasked>
<CardHolderName>Company Name & Another Name</CardHolderName>
<HostMsgList></HostMsgList>
<RewardInqResult>
<RewardList></RewardList>
<ChequeList></ChequeList>
</RewardInqResult>
</Transaction>
</GVPSResponse>
XML
end
# Place raw failed response from gateway here
def failed_purchase_response
<<~XML
<GVPSResponse>
<Mode></Mode>
<Order>
<OrderID>db4af18c5222503d845180350fbda516</OrderID>
<GroupID></GroupID>
</Order>
<Transaction>
<Response>
<Source>GVPS</Source>
<Code>92</Code>
<ReasonCode>0651</ReasonCode>
<Message>Declined</Message>
<ErrorMsg></ErrorMsg>
<SysErrMsg>ErrorId: 0651</SysErrMsg>
</Response>
<RetrefNum></RetrefNum>
<AuthCode> </AuthCode>
<BatchNum></BatchNum>
<SequenceNum></SequenceNum>
<ProvDate>20101220 01:58:41</ProvDate>
<CardNumberMasked></CardNumberMasked>
<CardHolderName></CardHolderName>
<HostMsgList></HostMsgList>
<RewardInqResult>
<RewardList></RewardList>
<ChequeList></ChequeList>
</RewardInqResult>
</Transaction>
</GVPSResponse>
XML
end
end
| atxwebs/active_merchant | test/unit/gateways/garanti_test.rb | Ruby | mit | 4,823 |
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
require 'redis/objects'
class Roster
include Redis::Objects
counter :available_slots, :start => 10
counter :pitchers, :limit => :max_pitchers
counter :basic
hash_key :contact_information, :marshal_keys=>{'updated_at'=>true}
lock :resort, :timeout => 2
value :starting_pitcher, :marshal => true
list :player_stats, :marshal => true
set :outfielders, :marshal => true
sorted_set :rank
# global class counters
counter :total_players_online, :global => true
set :all_players_online, :global => true
value :last_player, :global => true
# custom keys
counter :player_totals, :key => 'players/#{username}/total'
list :all_player_stats, :key => 'players:all_stats', :global => true
set :total_wins, :key => 'players:#{id}:all_stats'
value :my_rank, :key => 'players:my_rank:#{username}'
# now support class interpolation as well. not sure why not previously
def self.jimmyhat; 350; end
value :weird_key, :key => 'players:weird_key:#{jimmyhat}', :global => true
#callable as key
counter :daily, :global => true, :key => Proc.new { |roster| "#{roster.name}:#{Time.now.strftime('%Y-%m-%dT%H')}:daily" }
# set default expiration
value :value_with_expiration, :expiration => 10
value :value_with_expireat, :expireat => Time.now + 10.seconds
set :set_with_expiration, :expiration => 10
set :set_with_expireat, :expireat => Time.now + 10.seconds
list :list_with_expiration, :expiration => 10
list :list_with_expireat, :expireat => Time.now + 10.seconds
hash_key :hash_with_expiration, :expiration => 10
hash_key :hash_with_expireat, :expireat => Time.now + 10.seconds
counter :counter_with_expiration, :expiration => 10
counter :counter_with_expireat, :expireat => Time.now + 10.seconds
sorted_set :sorted_set_with_expiration,:expiration => 10
sorted_set :sorted_set_with_expireat, :expireat => Time.now + 10.seconds
def initialize(id=1) @id = id end
def id; @id; end
def username; "user#{id}"; end
def max_pitchers; 3; end
end
class VanillaRoster < Roster
# No explicit Redis::Objects
end
class CustomRoster < Roster
include Redis::Objects
counter :basic # Override
counter :special # New
end
class MethodRoster
def increment(attribute, by=1)
42
end
def initialize(id=1) @id = id end
def id; @id; end
end
class CustomMethodRoster < MethodRoster
include Redis::Objects
attr_accessor :counter
counter :basic
end
class UidRoster < Roster
attr_accessor :uid
def initialize(uid=123) @uid = uid end
end
class CustomIdFieldRoster < UidRoster
redis_id_field :uid
include Redis::Objects
counter :basic
end
describe Redis::Objects do
before do
@roster = Roster.new
@roster2 = Roster.new
@roster_1 = Roster.new(1)
@roster_2 = Roster.new(2)
@roster_3 = Roster.new(3)
@vanilla_roster = VanillaRoster.new
@custom_roster = CustomRoster.new
@roster.available_slots.reset
@roster.pitchers.reset
@roster.basic.reset
@roster.resort_lock.clear
@roster.starting_pitcher.delete
@roster.player_stats.clear
@roster.outfielders.clear
@roster.contact_information.clear
@roster_1.outfielders.clear
@roster_2.outfielders.clear
@roster_3.outfielders.clear
@roster.redis.del(UNIONSTORE_KEY)
@roster.redis.del(INTERSTORE_KEY)
@roster.redis.del(DIFFSTORE_KEY)
Roster.total_players_online.reset
Roster.all_player_stats.clear
Roster.all_players_online.clear
Roster.last_player.delete
Roster.weird_key.clear
@roster.player_totals.clear
@roster.all_player_stats.clear
@roster.total_wins.clear
@roster.my_rank.clear
@roster.daily.clear
@custom_roster.basic.reset
@custom_roster.special.reset
end
it "should provide a connection method" do
Roster.redis.should == Redis::Objects.redis
# Roster.redis.should.be.kind_of(Redis)
end
it "should support interpolation of key names" do
@roster.player_totals.incr
@roster.redis.get('players/user1/total').should == '1'
@roster.redis.get('players/#{username}/total').should.be.nil
@roster.all_player_stats << 'a'
@roster.redis.lindex('players:all_stats', 0).should == 'a'
@roster.total_wins << 'a'
# test for interpolation of key names
@roster.redis.smembers('players:#{id}:all_stats').should == []
@roster.redis.smembers('players:1:all_stats').should == ['a']
@roster.my_rank = 'a'
@roster.redis.get('players:my_rank:user1').should == 'a'
Roster.weird_key = 'tuka'
Roster.redis.get("players:weird_key:#{Roster.jimmyhat}").should == 'tuka'
k = "Roster:#{Time.now.strftime('%Y-%m-%dT%H')}:daily"
@roster.daily.incr
@roster.redis.get(k).should == '1'
end
it "should be able to get/set contact info" do
@roster.contact_information['John_Phone'] = '123415352'
@roster.contact_information['John_Address'] = '123 LANE'
@roster.contact_information['John_Phone'].should == '123415352'
@roster.contact_information['John_Address'].should == '123 LANE'
@roster.contact_information['asdasd'].should.be.nil
@roster.contact_information.size.should == 2
end
it 'should be able to expire keys and then persist them' do
# on a hash_key
@roster.contact_information['Jenny_Phone'] = '8675309'
@roster.contact_information.expire 30
@roster.contact_information.ttl.should > -1
@roster.contact_information.ttl.should <= 30
@roster.contact_information.persist
@roster.contact_information.ttl.should == -1
@roster.contact_information['Jenny_Phone'].should == '8675309'
# on a value
@roster.my_rank = 42
@roster.my_rank.expire 30
@roster.my_rank.ttl.should > -1
@roster.my_rank.ttl.should <= 30
@roster.my_rank.persist
@roster.my_rank.ttl.should == -1
@roster.my_rank.to_i.should == 42
end
it "should be marshalling hash keys" do
@roster.contact_information['updated_at'] = Time.now
@roster.contact_information['updated_at'].class.should == Time
end
it "should create counter accessors" do
[:available_slots, :pitchers, :basic].each do |m|
@roster.respond_to?(m).should == true
end
end
it "should support increment/decrement of counters" do
@roster.available_slots.key.should == 'roster:1:available_slots'
@roster.available_slots.should == 10
# math proxy ops
(@roster.available_slots == 10).should.be.true
(@roster.available_slots <= 10).should.be.true
(@roster.available_slots < 11).should.be.true
(@roster.available_slots > 9).should.be.true
(@roster.available_slots >= 10).should.be.true
"#{@roster.available_slots}".should == "10"
@roster.available_slots.increment.should == 11
@roster.available_slots.increment.should == 12
@roster2.available_slots.increment.should == 13
@roster2.available_slots.increment(2).should == 15
@roster.available_slots.decrement.should == 14
@roster2.available_slots.decrement.should == 13
@roster.available_slots.decrement.should == 12
@roster2.available_slots.decrement(4).should == 8
@roster.available_slots.should == 8
@roster.available_slots.reset.should.be.true
@roster.available_slots.should == 10
@roster.available_slots.reset(15).should.be.true
@roster.available_slots.should == 15
@roster.pitchers.increment.should == 1
@roster.basic.increment.should == 1
@roster2.basic.decrement.should == 0
@roster.basic.get.should == 0
end
it "should support class-level increment/decrement of counters" do
Roster.get_counter(:available_slots, @roster.id).should == 10
Roster.increment_counter(:available_slots, @roster.id).should == 11
Roster.increment_counter(:available_slots, @roster.id, 3).should == 14
Roster.decrement_counter(:available_slots, @roster.id, 2).should == 12
Roster.decrement_counter(:available_slots, @roster.id).should == 11
Roster.reset_counter(:available_slots, @roster.id).should == true
Roster.get_counter(:available_slots, @roster.id).should == 10
Roster.getset_counter(:available_slots, @roster.id, 555).should == 10
Roster.get_counter(:available_slots, @roster.id).should == 555
end
it "should support class-level increment/decrement of global counters" do
Roster.total_players_online.should == 0
Roster.total_players_online.increment.should == 1
Roster.total_players_online.decrement.should == 0
Roster.total_players_online.increment(3).should == 3
Roster.total_players_online.decrement(2).should == 1
Roster.total_players_online.reset.should.be.true
Roster.total_players_online.should == 0
Roster.get_counter(:total_players_online).should == 0
Roster.increment_counter(:total_players_online).should == 1
Roster.increment_counter(:total_players_online, nil, 3).should == 4
Roster.decrement_counter(:total_players_online, nil, 2).should == 2
Roster.decrement_counter(:total_players_online).should == 1
Roster.reset_counter(:total_players_online).should == true
Roster.get_counter(:total_players_online).should == 0
Roster.getset_counter(:total_players_online, nil, 111).should == 0
Roster.get_counter(:total_players_online).should == 111
end
it "should take an atomic block for increment/decrement" do
a = false
@roster.available_slots.should == 10
@roster.available_slots.decr do |cnt|
if cnt >= 0
a = true
end
end
@roster.available_slots.should == 9
a.should.be.true
@roster.available_slots.should == 9
@roster.available_slots.decr do |cnt|
@roster.available_slots.should == 8
false
end
@roster.available_slots.should == 8
@roster.available_slots.should == 8
@roster.available_slots.decr do |cnt|
@roster.available_slots.should == 7
nil # should rewind
end
@roster.available_slots.should == 8
@roster.available_slots.should == 8
@roster.available_slots.decr(4) do |cnt|
@roster.available_slots.should == 4
nil # should rewind
end
@roster.available_slots.should == 8
@roster.available_slots.should == 8
@roster.available_slots.incr do |cnt|
if 1 == 2 # should rewind
true
end
end
@roster.available_slots.should == 8
@roster.available_slots.should == 8
@roster.available_slots.incr(5) do |cnt|
if 1 == 2 # should rewind
true
end
end
@roster.available_slots.should == 8
@roster.available_slots.should == 8
@roster.available_slots.incr do |cnt|
@roster.available_slots.should == 9
[]
end
@roster.available_slots.should == 9
@roster.available_slots.should == 9
begin
@roster.available_slots.decr do |cnt|
@roster.available_slots.should == 8
raise 'oops'
end
rescue
end
@roster.available_slots.should == 9
# check return value from the block
value =
@roster.available_slots.decr do |cnt|
@roster.available_slots.should == 8
42
end
value.should == 42
@roster.available_slots.should == 8
end
it "should take an atomic block for increment/decrement class methods" do
a = false
Roster.get_counter(:available_slots, @roster.id).should == 10
Roster.decrement_counter(:available_slots, @roster.id) do |cnt|
if cnt >= 0
a = true
end
end
Roster.get_counter(:available_slots, @roster.id).should == 9
a.should.be.true
Roster.get_counter(:available_slots, @roster.id).should == 9
Roster.decrement_counter(:available_slots, @roster.id) do |cnt|
Roster.get_counter(:available_slots, @roster.id).should == 8
false
end
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.decrement_counter(:available_slots, @roster.id) do |cnt|
Roster.get_counter(:available_slots, @roster.id).should == 7
nil # should rewind
end
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.decrement_counter(:available_slots, @roster.id, 4) do |cnt|
Roster.get_counter(:available_slots, @roster.id).should == 4
nil # should rewind
end
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.increment_counter(:available_slots, @roster.id) do |cnt|
if 1 == 2 # should rewind
true
end
end
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.increment_counter(:available_slots, @roster.id, 4) do |cnt|
if 1 == 2 # should rewind
true
end
end
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.get_counter(:available_slots, @roster.id).should == 8
Roster.increment_counter(:available_slots, @roster.id) do |cnt|
Roster.get_counter(:available_slots, @roster.id).should == 9
[]
end
Roster.get_counter(:available_slots, @roster.id).should == 9
Roster.get_counter(:available_slots, @roster.id).should == 9
begin
Roster.decrement_counter(:available_slots, @roster.id) do |cnt|
Roster.get_counter(:available_slots, @roster.id).should == 8
raise 'oops'
end
rescue
end
Roster.get_counter(:available_slots, @roster.id).should == 9
# check return value from the block
value =
Roster.decrement_counter(:available_slots, @roster.id) do |cnt|
Roster.get_counter(:available_slots, @roster.id).should == 8
42
end
value.should == 42
Roster.get_counter(:available_slots, @roster.id).should == 8
end
it "should properly throw errors on bad counters" do
error = nil
begin
Roster.increment_counter(:badness, 2)
rescue => error
end
error.should.be.kind_of(NoMethodError)
error = nil
begin
Roster.obtain_lock(:badness, 2){}
rescue => error
end
error.should.be.kind_of(Redis::Objects::UndefinedLock)
error = nil
begin
@roster.available_slots = 42
rescue => error
end
error.should.be.kind_of(NoMethodError)
error = nil
begin
@roster.available_slots += 69
rescue => error
end
error.should.be.kind_of(NoMethodError)
error = nil
begin
@roster.available_slots -= 15
rescue => error
end
error.should.be.kind_of(NoMethodError)
end
it "should support obtain_lock as a class method" do
error = nil
begin
Roster.obtain_lock(:resort, 2) do
Roster.redis.get("roster:2:resort_lock").should.not.be.nil
end
rescue => error
end
error.should.be.nil
Roster.redis.get("roster:2:resort_lock").should.be.nil
end
it "should handle simple values" do
@roster.starting_pitcher.should == nil
@roster.starting_pitcher = 'Trevor Hoffman'
@roster.starting_pitcher.should == 'Trevor Hoffman'
@roster.starting_pitcher.get.should == 'Trevor Hoffman'
@roster.starting_pitcher = 'Tom Selleck'
@roster.starting_pitcher.should == 'Tom Selleck'
@roster.starting_pitcher.del.should == 1
@roster.starting_pitcher.should.be.nil
end
it "should handle complex marshaled values" do
@roster.starting_pitcher.should == nil
@roster.starting_pitcher = {:json => 'data'}
@roster.starting_pitcher.should == {:json => 'data'}
@roster.starting_pitcher.get.should == {:json => 'data'}
@roster.starting_pitcher.del.should == 1
@roster.starting_pitcher.should.be.nil
end
it "should handle lists of simple values" do
@roster.player_stats.should.be.empty
@roster.player_stats << 'a'
@roster.player_stats.should == ['a']
@roster.player_stats.get.should == ['a']
@roster.player_stats.unshift 'b'
@roster.player_stats.to_s.should == 'b, a'
@roster.player_stats.should == ['b','a']
@roster.player_stats.get.should == ['b','a']
@roster.player_stats.push 'c'
@roster.player_stats.should == ['b','a','c']
@roster.player_stats.get.should == ['b','a','c']
@roster.player_stats.first.should == 'b'
@roster.player_stats.last.should == 'c'
@roster.player_stats << 'd'
@roster.player_stats.should == ['b','a','c','d']
@roster.player_stats[1].should == 'a'
@roster.player_stats[0].should == 'b'
@roster.player_stats[2].should == 'c'
@roster.player_stats[3].should == 'd'
@roster.player_stats.include?('c').should.be.true
@roster.player_stats.include?('no').should.be.false
@roster.player_stats.pop.should == 'd'
@roster.player_stats[0].should == @roster.player_stats.at(0)
@roster.player_stats[1].should == @roster.player_stats.at(1)
@roster.player_stats[2].should == @roster.player_stats.at(2)
@roster.player_stats.should == ['b','a','c']
@roster.player_stats.get.should == ['b','a','c']
@roster.player_stats.shift.should == 'b'
@roster.player_stats.should == ['a','c']
@roster.player_stats.get.should == ['a','c']
@roster.player_stats << 'e' << 'f' << 'e'
@roster.player_stats.should == ['a','c','e','f','e']
@roster.player_stats.get.should == ['a','c','e','f','e']
@roster.player_stats.delete('e').should == 2
@roster.player_stats.should == ['a','c','f']
@roster.player_stats.get.should == ['a','c','f']
@roster.player_stats << 'j'
@roster.player_stats.should == ['a','c','f','j']
@roster.player_stats[0..2].should == ['a','c','f']
@roster.player_stats[1, 3].should == ['c','f','j']
@roster.player_stats.length.should == 4
@roster.player_stats.size.should == 4
@roster.player_stats.should == ['a','c','f','j']
@roster.player_stats.get.should == ['a','c','f','j']
@roster.player_stats.push *['h','i']
@roster.player_stats.should == ['a','c','f','j','h','i']
@roster.player_stats.get.should == ['a','c','f','j','h','i']
i = -1
@roster.player_stats.each do |st|
st.should == @roster.player_stats[i += 1]
end
@roster.player_stats.should == ['a','c','f','j','h','i']
@roster.player_stats.get.should == ['a','c','f','j','h','i']
@roster.player_stats.each_with_index do |st,i|
st.should == @roster.player_stats[i]
end
@roster.player_stats.should == ['a','c','f','j','h','i']
@roster.player_stats.get.should == ['a','c','f','j','h','i']
coll = @roster.player_stats.collect{|st| st}
coll.should == ['a','c','f','j','h','i']
@roster.player_stats.should == ['a','c','f','j','h','i']
@roster.player_stats.get.should == ['a','c','f','j','h','i']
@roster.player_stats << 'a'
coll = @roster.player_stats.select{|st| st == 'a'}
coll.should == ['a','a']
@roster.player_stats.should == ['a','c','f','j','h','i','a']
@roster.player_stats.get.should == ['a','c','f','j','h','i','a']
end
it "should handle sets of simple values" do
@roster.outfielders.should.be.empty
@roster.outfielders << 'a' << 'a' << 'a'
@roster.outfielders.should == ['a']
@roster.outfielders.get.should == ['a']
@roster.outfielders << 'b' << 'b'
@roster.outfielders.sort.should == ['a','b']
@roster.outfielders.members.sort.should == ['a','b']
@roster.outfielders.get.sort.should == ['a','b']
@roster.outfielders << 'c'
@roster.outfielders.sort.should == ['a','b','c']
@roster.outfielders.get.sort.should == ['a','b','c']
@roster.outfielders.delete('c')
@roster.outfielders.sort.should == ['a','b']
@roster.outfielders.get.sort.should == ['a','b']
@roster.outfielders.length.should == 2
@roster.outfielders.size.should == 2
i = 0
@roster.outfielders.each do |st|
i += 1
end
i.should == @roster.outfielders.length
coll = @roster.outfielders.collect{|st| st}
coll.sort.should == ['a','b']
@roster.outfielders.sort.should == ['a','b']
@roster.outfielders.get.sort.should == ['a','b']
@roster.outfielders << 'c'
@roster.outfielders.member?('c').should.be.true
@roster.outfielders.include?('c').should.be.true
@roster.outfielders.member?('no').should.be.false
coll = @roster.outfielders.select{|st| st == 'c'}
coll.should == ['c']
@roster.outfielders.sort.should == ['a','b','c']
end
it "should handle set intersections and unions" do
@roster_1.outfielders << 'a' << 'b' << 'c' << 'd' << 'e'
@roster_2.outfielders << 'c' << 'd' << 'e' << 'f' << 'g'
@roster_3.outfielders << 'a' << 'd' << 'g' << 'l' << 'm'
@roster_1.outfielders.sort.should == %w(a b c d e)
@roster_2.outfielders.sort.should == %w(c d e f g)
@roster_3.outfielders.sort.should == %w(a d g l m)
(@roster_1.outfielders & @roster_2.outfielders).sort.should == ['c','d','e']
@roster_1.outfielders.intersection(@roster_2.outfielders).sort.should == ['c','d','e']
@roster_1.outfielders.intersection(@roster_2.outfielders, @roster_3.outfielders).sort.should == ['d']
@roster_1.outfielders.intersect(@roster_2.outfielders).sort.should == ['c','d','e']
@roster_1.outfielders.inter(@roster_2.outfielders, @roster_3.outfielders).sort.should == ['d']
@roster_1.outfielders.interstore(INTERSTORE_KEY, @roster_2.outfielders).should == 3
@roster_1.redis.smembers(INTERSTORE_KEY).sort.map{|v| Marshal.restore(v)}.should == ['c','d','e']
@roster_1.outfielders.interstore(INTERSTORE_KEY, @roster_2.outfielders, @roster_3.outfielders).should == 1
@roster_1.redis.smembers(INTERSTORE_KEY).sort.map{|v| Marshal.restore(v)}.should == ['d']
(@roster_1.outfielders | @roster_2.outfielders).sort.should == ['a','b','c','d','e','f','g']
(@roster_1.outfielders + @roster_2.outfielders).sort.should == ['a','b','c','d','e','f','g']
@roster_1.outfielders.union(@roster_2.outfielders).sort.should == ['a','b','c','d','e','f','g']
@roster_1.outfielders.union(@roster_2.outfielders, @roster_3.outfielders).sort.should == ['a','b','c','d','e','f','g','l','m']
@roster_1.outfielders.unionstore(UNIONSTORE_KEY, @roster_2.outfielders).should == 7
@roster_1.redis.smembers(UNIONSTORE_KEY).map{|v| Marshal.restore(v)}.sort.should == ['a','b','c','d','e','f','g']
@roster_1.outfielders.unionstore(UNIONSTORE_KEY, @roster_2.outfielders, @roster_3.outfielders).should == 9
@roster_1.redis.smembers(UNIONSTORE_KEY).map{|v| Marshal.restore(v)}.sort.should == ['a','b','c','d','e','f','g','l','m']
end
it "should handle class-level global lists of simple values" do
Roster.all_player_stats.should.be.empty
Roster.all_player_stats << 'a'
Roster.all_player_stats.should == ['a']
Roster.all_player_stats.get.should == ['a']
Roster.all_player_stats.unshift 'b'
Roster.all_player_stats.to_s.should == 'b, a'
Roster.all_player_stats.should == ['b','a']
Roster.all_player_stats.get.should == ['b','a']
Roster.all_player_stats.push 'c'
Roster.all_player_stats.should == ['b','a','c']
Roster.all_player_stats.get.should == ['b','a','c']
Roster.all_player_stats.first.should == 'b'
Roster.all_player_stats.last.should == 'c'
Roster.all_player_stats << 'd'
Roster.all_player_stats.should == ['b','a','c','d']
Roster.all_player_stats[1].should == 'a'
Roster.all_player_stats[0].should == 'b'
Roster.all_player_stats[2].should == 'c'
Roster.all_player_stats[3].should == 'd'
Roster.all_player_stats.include?('c').should.be.true
Roster.all_player_stats.include?('no').should.be.false
Roster.all_player_stats.pop.should == 'd'
Roster.all_player_stats[0].should == Roster.all_player_stats.at(0)
Roster.all_player_stats[1].should == Roster.all_player_stats.at(1)
Roster.all_player_stats[2].should == Roster.all_player_stats.at(2)
Roster.all_player_stats.should == ['b','a','c']
Roster.all_player_stats.get.should == ['b','a','c']
Roster.all_player_stats.shift.should == 'b'
Roster.all_player_stats.should == ['a','c']
Roster.all_player_stats.get.should == ['a','c']
Roster.all_player_stats << 'e' << 'f' << 'e'
Roster.all_player_stats.should == ['a','c','e','f','e']
Roster.all_player_stats.get.should == ['a','c','e','f','e']
Roster.all_player_stats.delete('e').should == 2
Roster.all_player_stats.should == ['a','c','f']
Roster.all_player_stats.get.should == ['a','c','f']
Roster.all_player_stats << 'j'
Roster.all_player_stats.should == ['a','c','f','j']
Roster.all_player_stats[0..2].should == ['a','c','f']
Roster.all_player_stats[1, 3].should == ['c','f','j']
Roster.all_player_stats.length.should == 4
Roster.all_player_stats.size.should == 4
Roster.all_player_stats.should == ['a','c','f','j']
Roster.all_player_stats.get.should == ['a','c','f','j']
i = -1
Roster.all_player_stats.each do |st|
st.should == Roster.all_player_stats[i += 1]
end
Roster.all_player_stats.should == ['a','c','f','j']
Roster.all_player_stats.get.should == ['a','c','f','j']
Roster.all_player_stats.each_with_index do |st,i|
st.should == Roster.all_player_stats[i]
end
Roster.all_player_stats.should == ['a','c','f','j']
Roster.all_player_stats.get.should == ['a','c','f','j']
coll = Roster.all_player_stats.collect{|st| st}
coll.should == ['a','c','f','j']
Roster.all_player_stats.should == ['a','c','f','j']
Roster.all_player_stats.get.should == ['a','c','f','j']
Roster.all_player_stats << 'a'
coll = Roster.all_player_stats.select{|st| st == 'a'}
coll.should == ['a','a']
Roster.all_player_stats.should == ['a','c','f','j','a']
Roster.all_player_stats.get.should == ['a','c','f','j','a']
end
it "should handle class-level global sets of simple values" do
Roster.all_players_online.should.be.empty
Roster.all_players_online << 'a' << 'a' << 'a'
Roster.all_players_online.should == ['a']
Roster.all_players_online.get.should == ['a']
Roster.all_players_online << 'b' << 'b'
Roster.all_players_online.sort.should == ['a','b']
Roster.all_players_online.members.sort.should == ['a','b']
Roster.all_players_online.get.sort.should == ['a','b']
Roster.all_players_online << 'c'
Roster.all_players_online.sort.should == ['a','b','c']
Roster.all_players_online.get.sort.should == ['a','b','c']
Roster.all_players_online.delete('c')
Roster.all_players_online.sort.should == ['a','b']
Roster.all_players_online.get.sort.should == ['a','b']
Roster.all_players_online.length.should == 2
Roster.all_players_online.size.should == 2
i = 0
Roster.all_players_online.each do |st|
i += 1
end
i.should == Roster.all_players_online.length
coll = Roster.all_players_online.collect{|st| st}
coll.sort.should == ['a','b']
Roster.all_players_online.sort.should == ['a','b']
Roster.all_players_online.get.sort.should == ['a','b']
Roster.all_players_online << 'c'
Roster.all_players_online.member?('c').should.be.true
Roster.all_players_online.include?('c').should.be.true
Roster.all_players_online.member?('no').should.be.false
coll = Roster.all_players_online.select{|st| st == 'c'}
coll.should == ['c']
Roster.all_players_online.sort.should == ['a','b','c']
end
it "should handle class-level global values" do
Roster.last_player.should == nil
Roster.last_player = 'Trevor Hoffman'
Roster.last_player.should == 'Trevor Hoffman'
Roster.last_player.get.should == 'Trevor Hoffman'
Roster.last_player = 'Tom Selleck'
Roster.last_player.should == 'Tom Selleck'
Roster.last_player.del.should == 1
Roster.last_player.should.be.nil
end
it "should easily enable @object.class.global_objects" do
@roster.class.all_players_online.should.be.empty
@roster.class.all_players_online << 'a' << 'a' << 'a'
@roster.class.all_players_online.should == ['a']
@roster2.class.all_players_online.should == ['a']
@roster.all_players_online.should == ['a']
@roster2.all_players_online.should == ['a']
@roster.class.all_player_stats.should.be.empty
@roster.class.all_player_stats << 'a'
@roster.class.all_player_stats.should == ['a']
@roster.class.all_player_stats.get.should == ['a']
@roster.class.all_player_stats.unshift 'b'
@roster.class.all_player_stats.to_s.should == 'b, a'
@roster.class.all_player_stats.should == ['b','a']
@roster2.class.all_player_stats.should == ['b','a']
@roster.all_player_stats.should == ['b','a']
@roster2.all_player_stats.should == ['b','a']
@roster2.all_player_stats << 'b'
@roster.all_player_stats.should == ['b','a','b']
@roster.last_player.should == nil
@roster.class.last_player = 'Trevor Hoffman'
@roster.last_player.should == 'Trevor Hoffman'
@roster.last_player.get.should == 'Trevor Hoffman'
@roster2.last_player.get.should == 'Trevor Hoffman'
@roster2.last_player = 'Tom Selleck'
@roster.last_player.should == 'Tom Selleck'
@roster.last_player.del.should == 1
@roster.last_player.should.be.nil
@roster2.last_player.should.be.nil
end
it "should handle lists of complex data types" do
@roster.player_stats << {:json => 'data'}
@roster.player_stats << {:json2 => 'data2'}
@roster.player_stats.first.should == {:json => 'data'}
@roster.player_stats.last.should == {:json2 => 'data2'}
@roster.player_stats << [1,2,3,[4,5]]
@roster.player_stats.last.should == [1,2,3,[4,5]]
@roster.player_stats.shift.should == {:json => 'data'}
end
it "should handle sets of complex data types" do
@roster.outfielders << {:a => 1}
@roster.outfielders.members.should == [{:a => 1}]
@roster.outfielders << {:b => 2}
@roster.outfielders.member?({:b => 2})
@roster_1.outfielders << {:a => 1} << {:b => 2}
@roster_2.outfielders << {:b => 2} << {:c => 3}
(@roster_1.outfielders & @roster_2.outfielders).should == [{:b => 2}]
#(@roster_1.outfielders | @roster_2.outfielders).members.should ==
end
it "should provide a lock method that accepts a block" do
@roster.resort_lock.key.should == 'roster:1:resort_lock'
a = false
@roster.resort_lock.lock do
a = true
end
a.should.be.true
end
it "should raise an exception if the timeout is exceeded" do
@roster.redis.set(@roster.resort_lock.key, 1)
error = nil
begin
@roster.resort_lock.lock {}
rescue => error
end
error.should.not.be.nil
error.should.be.kind_of(Redis::Lock::LockTimeout)
end
it "should pick up objects from superclass automatically" do
@vanilla_roster.available_slots.should.be.kind_of(Redis::Counter)
@vanilla_roster.pitchers.should.be.kind_of(Redis::Counter)
@vanilla_roster.basic.should.be.kind_of(Redis::Counter)
@vanilla_roster.resort_lock.should.be.kind_of(Redis::Lock)
@vanilla_roster.starting_pitcher.should.be.kind_of(Redis::Value)
@vanilla_roster.player_stats.should.be.kind_of(Redis::List)
@vanilla_roster.outfielders.should.be.kind_of(Redis::Set)
@vanilla_roster.rank.should.be.kind_of(Redis::SortedSet)
# custom keys
@vanilla_roster.player_totals.should.be.kind_of(Redis::Counter)
@vanilla_roster.all_player_stats.should.be.kind_of(Redis::List)
@vanilla_roster.total_wins.should.be.kind_of(Redis::Set)
@vanilla_roster.my_rank.should.be.kind_of(Redis::Value)
@vanilla_roster.weird_key.should.be.kind_of(Redis::Value)
# globals via class
@vanilla_roster.total_players_online.should.be.kind_of(Redis::Counter)
@vanilla_roster.all_player_stats.should.be.kind_of(Redis::List)
@vanilla_roster.all_players_online.should.be.kind_of(Redis::Set)
@vanilla_roster.last_player.should.be.kind_of(Redis::Value)
VanillaRoster.total_players_online.should.be.kind_of(Redis::Counter)
VanillaRoster.all_player_stats.should.be.kind_of(Redis::List)
VanillaRoster.all_players_online.should.be.kind_of(Redis::Set)
VanillaRoster.last_player.should.be.kind_of(Redis::Value)
end
it "should allow subclass overrides of the same redis object" do
@roster.basic.should == 0
@custom_roster.basic.increment.should == 1
@roster2.basic.should == 0
CustomRoster.new.basic.should == 1
@custom_roster.basic.decrement.should == 0
end
it "should handle new subclass objects" do
@custom_roster.special.increment.should == 1
end
it "should allow passing of increment/decrement to super class" do
@custom_method_roster = CustomMethodRoster.new
@custom_method_roster.counter.should.be.nil
@custom_method_roster.increment(:counter).should == 42
@custom_method_roster.increment(:basic).should == 1
@custom_method_roster.basic.increment.should == 2
@custom_method_roster.decrement(:basic).should == 1
@custom_method_roster.basic.decrement.should == 0
@custom_method_roster.basic.reset.should.be.true
@custom_method_roster.basic.should == 0
@custom_method_roster.basic.should.be.kind_of(Redis::Counter)
end
it "should persist object with custom id field name" do
@custom_id_field_roster = CustomIdFieldRoster.new()
@custom_id_field_roster.uid.should == 123 # sanity
@custom_id_field_roster.increment(:basic).should == 1
@custom_id_field_roster.basic.increment.should == 2
@custom_id_field_roster.basic.reset
@custom_id_field_roster.basic.should == 0
end
it "should pick up class methods from superclass automatically" do
CounterRoster = Class.new(Roster)
CounterRoster.counter :extended_counter
extended_roster = CounterRoster.new
extended_roster.basic.should.be.kind_of(Redis::Counter)
extended_roster.extended_counter.should.be.kind_of(Redis::Counter)
@roster.respond_to?(:extended_counter).should == false
HashKeyRoster = Class.new(Roster)
HashKeyRoster.hash_key :extended_hash_key
extended_roster = HashKeyRoster.new
extended_roster.contact_information.should.be.kind_of(Redis::HashKey)
extended_roster.extended_hash_key.should.be.kind_of(Redis::HashKey)
@roster.respond_to?(:extended_hash_key).should == false
LockRoster = Class.new(Roster)
LockRoster.lock :extended
extended_roster = LockRoster.new
extended_roster.resort_lock.should.be.kind_of(Redis::Lock)
extended_roster.extended_lock.should.be.kind_of(Redis::Lock)
@roster.respond_to?(:extended_lock).should == false
ValueRoster = Class.new(Roster)
ValueRoster.value :extended_value
extended_roster = ValueRoster.new
extended_roster.starting_pitcher.should.be.kind_of(Redis::Value)
extended_roster.extended_value.should.be.kind_of(Redis::Value)
@roster.respond_to?(:extended_value).should == false
ListRoster = Class.new(Roster)
ListRoster.list :extended_list
extended_roster = ListRoster.new
extended_roster.player_stats.should.be.kind_of(Redis::List)
extended_roster.extended_list.should.be.kind_of(Redis::List)
@roster.respond_to?(:extended_list).should == false
SetRoster = Class.new(Roster)
SetRoster.set :extended_set
extended_roster = SetRoster.new
extended_roster.outfielders.should.be.kind_of(Redis::Set)
extended_roster.extended_set.should.be.kind_of(Redis::Set)
@roster.respond_to?(:extended_set).should == false
SortedSetRoster = Class.new(Roster)
SortedSetRoster.sorted_set :extended_sorted_set
extended_roster = SortedSetRoster.new
extended_roster.rank.should.be.kind_of(Redis::SortedSet)
extended_roster.extended_sorted_set.should.be.kind_of(Redis::SortedSet)
@roster.respond_to?(:extended_sorted_set).should == false
end
it "should set time to live in seconds when expiration option assigned" do
@roster.value_with_expiration.value = 'val'
@roster.value_with_expiration.ttl.should > 0
@roster.value_with_expiration.ttl.should <= 10
@roster.set_with_expiration << 'val'
@roster.set_with_expiration.ttl.should > 0
@roster.set_with_expiration.ttl.should <= 10
@roster.list_with_expiration << 'val'
@roster.list_with_expiration.ttl.should > 0
@roster.list_with_expiration.ttl.should <= 10
@roster.hash_with_expiration[:foo] = :bar
@roster.hash_with_expiration.ttl.should > 0
@roster.hash_with_expiration.ttl.should <= 10
@roster.counter_with_expiration.increment
@roster.counter_with_expiration.ttl.should > 0
@roster.counter_with_expiration.ttl.should <= 10
@roster.sorted_set_with_expiration[:foo] = 1
@roster.sorted_set_with_expiration.ttl.should > 0
@roster.sorted_set_with_expiration.ttl.should <= 10
end
it "should set expiration when expireat option assigned" do
@roster.value_with_expireat.value = 'val'
@roster.value_with_expireat.ttl.should > 0
@roster.value_with_expireat.ttl.should <= 10
@roster.set_with_expireat << 'val'
@roster.set_with_expireat.ttl.should > 0
@roster.set_with_expireat.ttl.should <= 10
@roster.list_with_expireat << 'val'
@roster.list_with_expireat.ttl.should > 0
@roster.list_with_expireat.ttl.should <= 10
@roster.hash_with_expireat[:foo] = :bar
@roster.hash_with_expireat.ttl.should > 0
@roster.hash_with_expireat.ttl.should <= 10
@roster.sorted_set_with_expireat[:foo] = 1
@roster.sorted_set_with_expireat.ttl.should > 0
@roster.sorted_set_with_expireat.ttl.should <= 10
end
end
| timrourke/rchat | vendor/cache/ruby/2.2.0/gems/redis-objects-1.2.0/spec/redis_objects_model_spec.rb | Ruby | mit | 37,399 |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
// <Area> Nullable - Box-Unbox </Area>
// <Title> Nullable type with unbox box expr </Title>
// <Description>
// checking type of byte using is operator
// </Description>
// <RelatedBugs> </RelatedBugs>
//<Expects Status=success></Expects>
// <Code>
using System.Runtime.InteropServices;
using System;
internal class NullableTest
{
private static bool BoxUnboxToNQ(ValueType o)
{
return Helper.Compare((byte)o, Helper.Create(default(byte)));
}
private static bool BoxUnboxToQ(ValueType o)
{
return Helper.Compare((byte?)o, Helper.Create(default(byte)));
}
private static int Main()
{
byte? s = Helper.Create(default(byte));
if (BoxUnboxToNQ(s) && BoxUnboxToQ(s))
return ExitCode.Passed;
else
return ExitCode.Failed;
}
}
| ktos/coreclr | tests/src/JIT/jit64/valuetypes/nullable/box-unbox/value/box-unbox-value003.cs | C# | mit | 987 |
require 'fog/aws/models/dns/record'
module Fog
module DNS
class AWS
class Records < Fog::Collection
attribute :is_truncated, :aliases => ['IsTruncated']
attribute :max_items, :aliases => ['MaxItems']
attribute :name
attribute :next_record_name, :aliases => ['NextRecordName']
attribute :next_record_type, :aliases => ['NextRecordType']
attribute :next_record_identifier, :aliases => ['NextRecordIdentifier']
attribute :type
attribute :identifier
attribute :zone
model Fog::DNS::AWS::Record
def all(options = {})
requires :zone
options[:max_items] ||= max_items
options[:name] ||= zone.domain
options[:type] ||= type
options[:identifier] ||= identifier
options.delete_if {|key, value| value.nil?}
data = service.list_resource_record_sets(zone.id, options).body
# NextRecordIdentifier is completely absent instead of nil, so set to nil, or iteration breaks.
data['NextRecordIdentifier'] = nil unless data.key?('NextRecordIdentifier')
merge_attributes(data.reject {|key, value| !['IsTruncated', 'MaxItems', 'NextRecordName', 'NextRecordType', 'NextRecordIdentifier'].include?(key)})
load(data['ResourceRecordSets'])
end
#
# Load all zone records into the collection.
#
def all!
data = []
merge_attributes({'NextRecordName' => nil,
'NextRecordType' => nil,
'NextRecordIdentifier' => nil,
'IsTruncated' => nil})
begin
options = {
:name => next_record_name,
:type => next_record_type,
:identifier => next_record_identifier
}
options.delete_if {|key, value| value.nil?}
batch = service.list_resource_record_sets(zone.id, options).body
# NextRecordIdentifier is completely absent instead of nil, so set to nil, or iteration breaks.
batch['NextRecordIdentifier'] = nil unless batch.key?('NextRecordIdentifier')
merge_attributes(batch.reject {|key, value| !['IsTruncated', 'MaxItems', 'NextRecordName', 'NextRecordType', 'NextRecordIdentifier'].include?(key)})
data.concat(batch['ResourceRecordSets'])
end while is_truncated
load(data)
end
#
# AWS Route 53 records are uniquely identified by a compound key of name, type, and identifier.
# #get allows one to retrieve a record using one or more of those key components.
#
# ==== Parameters
# * record_name - The name of the record to retrieve.
# * record_type - The type of record to retrieve, if nil, then the first matching record is returned.
# * record_identifier - The record set identifier to retrieve, if nil, then the first matching record is returned.
#
def get(record_name, record_type = nil, record_identifier = nil)
requires :zone
# Append a trailing period to the record_name if absent.
record_name = record_name + "." unless record_name.end_with?(".")
record_type = record_type.upcase unless record_type.nil?
options = {
:max_items => 1,
:name => record_name,
:type => record_type,
:identifier => record_identifier
}
options.delete_if {|key, value| value.nil?}
data = service.list_resource_record_sets(zone.id, options).body
# look for an exact match in the records
(data['ResourceRecordSets'] || []).map do |record_data|
record = new(record_data)
if (record.name.casecmp(record_name) == 0) &&
(record_type.nil? || (record.type == record_type)) &&
(record_identifier.nil? || (record.set_identifier == record_identifier))
record
end
end.compact.first
rescue Fog::DNS::AWS::NotFound
nil
end
def new(attributes = {})
requires :zone
super({ :zone => zone }.merge!(attributes))
end
end
end
end
end
| jonpstone/portfolio-project-rails-mean-movie-reviews | vendor/bundle/ruby/2.3.0/gems/fog-aws-2.0.0/lib/fog/aws/models/dns/records.rb | Ruby | mit | 4,370 |
(function () {
'use strict';
/**
* @ngdoc overview
* @name ui.grid.edit
* @description
*
* # ui.grid.edit
*
* <div class="alert alert-success" role="alert"><strong>Stable</strong> This feature is stable. There should no longer be breaking api changes without a deprecation warning.</div>
*
* This module provides cell editing capability to ui.grid. The goal was to emulate keying data in a spreadsheet via
* a keyboard.
* <br/>
* <br/>
* To really get the full spreadsheet-like data entry, the ui.grid.cellNav module should be used. This will allow the
* user to key data and then tab, arrow, or enter to the cells beside or below.
*
* <div doc-module-components="ui.grid.edit"></div>
*/
var module = angular.module('ui.grid.edit', ['ui.grid']);
/**
* @ngdoc object
* @name ui.grid.edit.constant:uiGridEditConstants
*
* @description constants available in edit module
*/
module.constant('uiGridEditConstants', {
EDITABLE_CELL_TEMPLATE: /EDITABLE_CELL_TEMPLATE/g,
//must be lowercase because template bulder converts to lower
EDITABLE_CELL_DIRECTIVE: /editable_cell_directive/g,
events: {
BEGIN_CELL_EDIT: 'uiGridEventBeginCellEdit',
END_CELL_EDIT: 'uiGridEventEndCellEdit',
CANCEL_CELL_EDIT: 'uiGridEventCancelCellEdit'
}
});
/**
* @ngdoc service
* @name ui.grid.edit.service:uiGridEditService
*
* @description Services for editing features
*/
module.service('uiGridEditService', ['$q', 'uiGridConstants', 'gridUtil',
function ($q, uiGridConstants, gridUtil) {
var service = {
initializeGrid: function (grid) {
service.defaultGridOptions(grid.options);
grid.registerColumnBuilder(service.editColumnBuilder);
grid.edit = {};
/**
* @ngdoc object
* @name ui.grid.edit.api:PublicApi
*
* @description Public Api for edit feature
*/
var publicApi = {
events: {
edit: {
/**
* @ngdoc event
* @name afterCellEdit
* @eventOf ui.grid.edit.api:PublicApi
* @description raised when cell editing is complete
* <pre>
* gridApi.edit.on.afterCellEdit(scope,function(rowEntity, colDef){})
* </pre>
* @param {object} rowEntity the options.data element that was edited
* @param {object} colDef the column that was edited
* @param {object} newValue new value
* @param {object} oldValue old value
*/
afterCellEdit: function (rowEntity, colDef, newValue, oldValue) {
},
/**
* @ngdoc event
* @name beginCellEdit
* @eventOf ui.grid.edit.api:PublicApi
* @description raised when cell editing starts on a cell
* <pre>
* gridApi.edit.on.beginCellEdit(scope,function(rowEntity, colDef){})
* </pre>
* @param {object} rowEntity the options.data element that was edited
* @param {object} colDef the column that was edited
* @param {object} triggerEvent the event that triggered the edit. Useful to prevent losing keystrokes on some
* complex editors
*/
beginCellEdit: function (rowEntity, colDef, triggerEvent) {
},
/**
* @ngdoc event
* @name cancelCellEdit
* @eventOf ui.grid.edit.api:PublicApi
* @description raised when cell editing is cancelled on a cell
* <pre>
* gridApi.edit.on.cancelCellEdit(scope,function(rowEntity, colDef){})
* </pre>
* @param {object} rowEntity the options.data element that was edited
* @param {object} colDef the column that was edited
*/
cancelCellEdit: function (rowEntity, colDef) {
}
}
},
methods: {
edit: { }
}
};
grid.api.registerEventsFromObject(publicApi.events);
//grid.api.registerMethodsFromObject(publicApi.methods);
},
defaultGridOptions: function (gridOptions) {
/**
* @ngdoc object
* @name ui.grid.edit.api:GridOptions
*
* @description Options for configuring the edit feature, these are available to be
* set using the ui-grid {@link ui.grid.class:GridOptions gridOptions}
*/
/**
* @ngdoc object
* @name enableCellEdit
* @propertyOf ui.grid.edit.api:GridOptions
* @description If defined, sets the default value for the editable flag on each individual colDefs
* if their individual enableCellEdit configuration is not defined. Defaults to undefined.
*/
/**
* @ngdoc object
* @name cellEditableCondition
* @propertyOf ui.grid.edit.api:GridOptions
* @description If specified, either a value or function to be used by all columns before editing.
* If falsy, then editing of cell is not allowed.
* @example
* <pre>
* function($scope){
* //use $scope.row.entity and $scope.col.colDef to determine if editing is allowed
* return true;
* }
* </pre>
*/
gridOptions.cellEditableCondition = gridOptions.cellEditableCondition === undefined ? true : gridOptions.cellEditableCondition;
/**
* @ngdoc object
* @name editableCellTemplate
* @propertyOf ui.grid.edit.api:GridOptions
* @description If specified, cellTemplate to use as the editor for all columns.
* <br/> defaults to 'ui-grid/cellTextEditor'
*/
/**
* @ngdoc object
* @name enableCellEditOnFocus
* @propertyOf ui.grid.edit.api:GridOptions
* @description If true, then editor is invoked as soon as cell receives focus. Default false.
* <br/>_requires cellNav feature and the edit feature to be enabled_
*/
//enableCellEditOnFocus can only be used if cellnav module is used
gridOptions.enableCellEditOnFocus = gridOptions.enableCellEditOnFocus === undefined ? false : gridOptions.enableCellEditOnFocus;
},
/**
* @ngdoc service
* @name editColumnBuilder
* @methodOf ui.grid.edit.service:uiGridEditService
* @description columnBuilder function that adds edit properties to grid column
* @returns {promise} promise that will load any needed templates when resolved
*/
editColumnBuilder: function (colDef, col, gridOptions) {
var promises = [];
/**
* @ngdoc object
* @name ui.grid.edit.api:ColumnDef
*
* @description Column Definition for edit feature, these are available to be
* set using the ui-grid {@link ui.grid.class:GridOptions.columnDef gridOptions.columnDefs}
*/
/**
* @ngdoc object
* @name enableCellEdit
* @propertyOf ui.grid.edit.api:ColumnDef
* @description enable editing on column
*/
colDef.enableCellEdit = colDef.enableCellEdit === undefined ? (gridOptions.enableCellEdit === undefined ?
(colDef.type !== 'object') : gridOptions.enableCellEdit) : colDef.enableCellEdit;
/**
* @ngdoc object
* @name cellEditableCondition
* @propertyOf ui.grid.edit.api:ColumnDef
* @description If specified, either a value or function evaluated before editing cell. If falsy, then editing of cell is not allowed.
* @example
* <pre>
* function($scope){
* //use $scope.row.entity and $scope.col.colDef to determine if editing is allowed
* return true;
* }
* </pre>
*/
colDef.cellEditableCondition = colDef.cellEditableCondition === undefined ? gridOptions.cellEditableCondition : colDef.cellEditableCondition;
/**
* @ngdoc object
* @name editableCellTemplate
* @propertyOf ui.grid.edit.api:ColumnDef
* @description cell template to be used when editing this column. Can be Url or text template
* <br/>Defaults to gridOptions.editableCellTemplate
*/
if (colDef.enableCellEdit) {
colDef.editableCellTemplate = colDef.editableCellTemplate || gridOptions.editableCellTemplate || 'ui-grid/cellEditor';
promises.push(gridUtil.getTemplate(colDef.editableCellTemplate)
.then(
function (template) {
col.editableCellTemplate = template;
},
function (res) {
// Todo handle response error here?
throw new Error("Couldn't fetch/use colDef.editableCellTemplate '" + colDef.editableCellTemplate + "'");
}));
}
/**
* @ngdoc object
* @name enableCellEditOnFocus
* @propertyOf ui.grid.edit.api:ColumnDef
* @requires ui.grid.cellNav
* @description If true, then editor is invoked as soon as cell receives focus. Default false.
* <br>_requires both the cellNav feature and the edit feature to be enabled_
*/
//enableCellEditOnFocus can only be used if cellnav module is used
colDef.enableCellEditOnFocus = colDef.enableCellEditOnFocus === undefined ? gridOptions.enableCellEditOnFocus : colDef.enableCellEditOnFocus;
/**
* @ngdoc string
* @name editModelField
* @propertyOf ui.grid.edit.api:ColumnDef
* @description a bindable string value that is used when binding to edit controls instead of colDef.field
* <br/> example: You have a complex property on and object like state:{abbrev:'MS',name:'Mississippi'}. The
* grid should display state.name in the cell and sort/filter based on the state.name property but the editor
* requires the full state object.
* <br/>colDef.field = 'state.name'
* <br/>colDef.editModelField = 'state'
*/
//colDef.editModelField
return $q.all(promises);
},
/**
* @ngdoc service
* @name isStartEditKey
* @methodOf ui.grid.edit.service:uiGridEditService
* @description Determines if a keypress should start editing. Decorate this service to override with your
* own key events. See service decorator in angular docs.
* @param {Event} evt keydown event
* @returns {boolean} true if an edit should start
*/
isStartEditKey: function (evt) {
if (evt.metaKey ||
evt.keyCode === uiGridConstants.keymap.ESC ||
evt.keyCode === uiGridConstants.keymap.SHIFT ||
evt.keyCode === uiGridConstants.keymap.CTRL ||
evt.keyCode === uiGridConstants.keymap.ALT ||
evt.keyCode === uiGridConstants.keymap.WIN ||
evt.keyCode === uiGridConstants.keymap.CAPSLOCK ||
evt.keyCode === uiGridConstants.keymap.LEFT ||
(evt.keyCode === uiGridConstants.keymap.TAB && evt.shiftKey) ||
evt.keyCode === uiGridConstants.keymap.RIGHT ||
evt.keyCode === uiGridConstants.keymap.TAB ||
evt.keyCode === uiGridConstants.keymap.UP ||
(evt.keyCode === uiGridConstants.keymap.ENTER && evt.shiftKey) ||
evt.keyCode === uiGridConstants.keymap.DOWN ||
evt.keyCode === uiGridConstants.keymap.ENTER) {
return false;
}
return true;
}
};
return service;
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:uiGridEdit
* @element div
* @restrict A
*
* @description Adds editing features to the ui-grid directive.
*
* @example
<example module="app">
<file name="app.js">
var app = angular.module('app', ['ui.grid', 'ui.grid.edit']);
app.controller('MainCtrl', ['$scope', function ($scope) {
$scope.data = [
{ name: 'Bob', title: 'CEO' },
{ name: 'Frank', title: 'Lowly Developer' }
];
$scope.columnDefs = [
{name: 'name', enableCellEdit: true},
{name: 'title', enableCellEdit: true}
];
}]);
</file>
<file name="index.html">
<div ng-controller="MainCtrl">
<div ui-grid="{ data: data, columnDefs: columnDefs }" ui-grid-edit></div>
</div>
</file>
</example>
*/
module.directive('uiGridEdit', ['gridUtil', 'uiGridEditService', function (gridUtil, uiGridEditService) {
return {
replace: true,
priority: 0,
require: '^uiGrid',
scope: false,
compile: function () {
return {
pre: function ($scope, $elm, $attrs, uiGridCtrl) {
uiGridEditService.initializeGrid(uiGridCtrl.grid);
},
post: function ($scope, $elm, $attrs, uiGridCtrl) {
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:uiGridRenderContainer
* @element div
* @restrict A
*
* @description Adds keydown listeners to renderContainer element so we can capture when to begin edits
*
*/
module.directive('uiGridViewport', [ 'uiGridEditConstants',
function ( uiGridEditConstants) {
return {
replace: true,
priority: -99998, //run before cellNav
require: ['^uiGrid', '^uiGridRenderContainer'],
scope: false,
compile: function () {
return {
post: function ($scope, $elm, $attrs, controllers) {
var uiGridCtrl = controllers[0];
// Skip attaching if edit and cellNav is not enabled
if (!uiGridCtrl.grid.api.edit || !uiGridCtrl.grid.api.cellNav) { return; }
var containerId = controllers[1].containerId;
//no need to process for other containers
if (containerId !== 'body') {
return;
}
//refocus on the grid
$scope.$on(uiGridEditConstants.events.CANCEL_CELL_EDIT, function () {
uiGridCtrl.focus();
});
$scope.$on(uiGridEditConstants.events.END_CELL_EDIT, function () {
uiGridCtrl.focus();
});
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:uiGridCell
* @element div
* @restrict A
*
* @description Stacks on top of ui.grid.uiGridCell to provide in-line editing capabilities to the cell
* Editing Actions.
*
* Binds edit start events to the uiGridCell element. When the events fire, the gridCell element is appended
* with the columnDef.editableCellTemplate element ('cellEditor.html' by default).
*
* The editableCellTemplate should respond to uiGridEditConstants.events.BEGIN\_CELL\_EDIT angular event
* and do the initial steps needed to edit the cell (setfocus on input element, etc).
*
* When the editableCellTemplate recognizes that the editing is ended (blur event, Enter key, etc.)
* it should emit the uiGridEditConstants.events.END\_CELL\_EDIT event.
*
* If editableCellTemplate recognizes that the editing has been cancelled (esc key)
* it should emit the uiGridEditConstants.events.CANCEL\_CELL\_EDIT event. The original value
* will be set back on the model by the uiGridCell directive.
*
* Events that invoke editing:
* - dblclick
* - F2 keydown (when using cell selection)
*
* Events that end editing:
* - Dependent on the specific editableCellTemplate
* - Standards should be blur and enter keydown
*
* Events that cancel editing:
* - Dependent on the specific editableCellTemplate
* - Standards should be Esc keydown
*
* Grid Events that end editing:
* - uiGridConstants.events.GRID_SCROLL
*
*/
/**
* @ngdoc object
* @name ui.grid.edit.api:GridRow
*
* @description GridRow options for edit feature, these are available to be
* set internally only, by other features
*/
/**
* @ngdoc object
* @name enableCellEdit
* @propertyOf ui.grid.edit.api:GridRow
* @description enable editing on row, grouping for example might disable editing on group header rows
*/
module.directive('uiGridCell',
['$compile', '$injector', '$timeout', 'uiGridConstants', 'uiGridEditConstants', 'gridUtil', '$parse', 'uiGridEditService', '$rootScope',
function ($compile, $injector, $timeout, uiGridConstants, uiGridEditConstants, gridUtil, $parse, uiGridEditService, $rootScope) {
var touchstartTimeout = 500;
if ($injector.has('uiGridCellNavService')) {
var uiGridCellNavService = $injector.get('uiGridCellNavService');
}
return {
priority: -100, // run after default uiGridCell directive
restrict: 'A',
scope: false,
require: '?^uiGrid',
link: function ($scope, $elm, $attrs, uiGridCtrl) {
var html;
var origCellValue;
var inEdit = false;
var cellModel;
var cancelTouchstartTimeout;
var editCellScope;
if (!$scope.col.colDef.enableCellEdit) {
return;
}
var cellNavNavigateDereg = function() {};
var viewPortKeyDownDereg = function() {};
var setEditable = function() {
if ($scope.col.colDef.enableCellEdit && $scope.row.enableCellEdit !== false) {
if (!$scope.beginEditEventsWired) { //prevent multiple attachments
registerBeginEditEvents();
}
} else {
if ($scope.beginEditEventsWired) {
cancelBeginEditEvents();
}
}
};
setEditable();
var rowWatchDereg = $scope.$watch('row', function (n, o) {
if (n !== o) {
setEditable();
}
});
$scope.$on( '$destroy', rowWatchDereg );
function registerBeginEditEvents() {
$elm.on('dblclick', beginEdit);
// Add touchstart handling. If the users starts a touch and it doesn't end after X milliseconds, then start the edit
$elm.on('touchstart', touchStart);
if (uiGridCtrl && uiGridCtrl.grid.api.cellNav) {
viewPortKeyDownDereg = uiGridCtrl.grid.api.cellNav.on.viewPortKeyDown($scope, function (evt, rowCol) {
if (rowCol === null) {
return;
}
if (rowCol.row === $scope.row && rowCol.col === $scope.col && !$scope.col.colDef.enableCellEditOnFocus) {
//important to do this before scrollToIfNecessary
beginEditKeyDown(evt);
}
});
cellNavNavigateDereg = uiGridCtrl.grid.api.cellNav.on.navigate($scope, function (newRowCol, oldRowCol) {
if ($scope.col.colDef.enableCellEditOnFocus) {
// Don't begin edit if the cell hasn't changed
if ((!oldRowCol || newRowCol.row !== oldRowCol.row || newRowCol.col !== oldRowCol.col) &&
newRowCol.row === $scope.row && newRowCol.col === $scope.col) {
$timeout(function () {
beginEdit();
});
}
}
});
}
$scope.beginEditEventsWired = true;
}
function touchStart(event) {
// jQuery masks events
if (typeof(event.originalEvent) !== 'undefined' && event.originalEvent !== undefined) {
event = event.originalEvent;
}
// Bind touchend handler
$elm.on('touchend', touchEnd);
// Start a timeout
cancelTouchstartTimeout = $timeout(function() { }, touchstartTimeout);
// Timeout's done! Start the edit
cancelTouchstartTimeout.then(function () {
// Use setTimeout to start the edit because beginEdit expects to be outside of $digest
setTimeout(beginEdit, 0);
// Undbind the touchend handler, we don't need it anymore
$elm.off('touchend', touchEnd);
});
}
// Cancel any touchstart timeout
function touchEnd(event) {
$timeout.cancel(cancelTouchstartTimeout);
$elm.off('touchend', touchEnd);
}
function cancelBeginEditEvents() {
$elm.off('dblclick', beginEdit);
$elm.off('keydown', beginEditKeyDown);
$elm.off('touchstart', touchStart);
cellNavNavigateDereg();
viewPortKeyDownDereg();
$scope.beginEditEventsWired = false;
}
function beginEditKeyDown(evt) {
if (uiGridEditService.isStartEditKey(evt)) {
beginEdit(evt);
}
}
function shouldEdit(col, row) {
return !row.isSaving &&
( angular.isFunction(col.colDef.cellEditableCondition) ?
col.colDef.cellEditableCondition($scope) :
col.colDef.cellEditableCondition );
}
function beginEdit(triggerEvent) {
//we need to scroll the cell into focus before invoking the editor
$scope.grid.api.core.scrollToIfNecessary($scope.row, $scope.col)
.then(function () {
beginEditAfterScroll(triggerEvent);
});
}
/**
* @ngdoc property
* @name editDropdownOptionsArray
* @propertyOf ui.grid.edit.api:ColumnDef
* @description an array of values in the format
* [ {id: xxx, value: xxx} ], which is populated
* into the edit dropdown
*
*/
/**
* @ngdoc property
* @name editDropdownIdLabel
* @propertyOf ui.grid.edit.api:ColumnDef
* @description the label for the "id" field
* in the editDropdownOptionsArray. Defaults
* to 'id'
* @example
* <pre>
* $scope.gridOptions = {
* columnDefs: [
* {name: 'status', editableCellTemplate: 'ui-grid/dropdownEditor',
* editDropdownOptionsArray: [{code: 1, status: 'active'}, {code: 2, status: 'inactive'}],
* editDropdownIdLabel: 'code', editDropdownValueLabel: 'status' }
* ],
* </pre>
*
*/
/**
* @ngdoc property
* @name editDropdownRowEntityOptionsArrayPath
* @propertyOf ui.grid.edit.api:ColumnDef
* @description a path to a property on row.entity containing an
* array of values in the format
* [ {id: xxx, value: xxx} ], which will be used to populate
* the edit dropdown. This can be used when the dropdown values are dependent on
* the backing row entity.
* If this property is set then editDropdownOptionsArray will be ignored.
* @example
* <pre>
* $scope.gridOptions = {
* columnDefs: [
* {name: 'status', editableCellTemplate: 'ui-grid/dropdownEditor',
* editDropdownRowEntityOptionsArrayPath: 'foo.bars[0].baz',
* editDropdownIdLabel: 'code', editDropdownValueLabel: 'status' }
* ],
* </pre>
*
*/
/**
* @ngdoc property
* @name editDropdownValueLabel
* @propertyOf ui.grid.edit.api:ColumnDef
* @description the label for the "value" field
* in the editDropdownOptionsArray. Defaults
* to 'value'
* @example
* <pre>
* $scope.gridOptions = {
* columnDefs: [
* {name: 'status', editableCellTemplate: 'ui-grid/dropdownEditor',
* editDropdownOptionsArray: [{code: 1, status: 'active'}, {code: 2, status: 'inactive'}],
* editDropdownIdLabel: 'code', editDropdownValueLabel: 'status' }
* ],
* </pre>
*
*/
/**
* @ngdoc property
* @name editDropdownFilter
* @propertyOf ui.grid.edit.api:ColumnDef
* @description A filter that you would like to apply to the values in the options list
* of the dropdown. For example if you were using angular-translate you might set this
* to `'translate'`
* @example
* <pre>
* $scope.gridOptions = {
* columnDefs: [
* {name: 'status', editableCellTemplate: 'ui-grid/dropdownEditor',
* editDropdownOptionsArray: [{code: 1, status: 'active'}, {code: 2, status: 'inactive'}],
* editDropdownIdLabel: 'code', editDropdownValueLabel: 'status', editDropdownFilter: 'translate' }
* ],
* </pre>
*
*/
function beginEditAfterScroll(triggerEvent) {
// If we are already editing, then just skip this so we don't try editing twice...
if (inEdit) {
return;
}
if (!shouldEdit($scope.col, $scope.row)) {
return;
}
cellModel = $parse($scope.row.getQualifiedColField($scope.col));
//get original value from the cell
origCellValue = cellModel($scope);
html = $scope.col.editableCellTemplate;
if ($scope.col.colDef.editModelField) {
html = html.replace(uiGridConstants.MODEL_COL_FIELD, gridUtil.preEval('row.entity.' + $scope.col.colDef.editModelField));
}
else {
html = html.replace(uiGridConstants.MODEL_COL_FIELD, $scope.row.getQualifiedColField($scope.col));
}
html = html.replace(uiGridConstants.COL_FIELD, 'grid.getCellValue(row, col)');
var optionFilter = $scope.col.colDef.editDropdownFilter ? '|' + $scope.col.colDef.editDropdownFilter : '';
html = html.replace(uiGridConstants.CUSTOM_FILTERS, optionFilter);
var inputType = 'text';
switch ($scope.col.colDef.type){
case 'boolean':
inputType = 'checkbox';
break;
case 'number':
inputType = 'number';
break;
case 'date':
inputType = 'date';
break;
}
html = html.replace('INPUT_TYPE', inputType);
var editDropdownRowEntityOptionsArrayPath = $scope.col.colDef.editDropdownRowEntityOptionsArrayPath;
if (editDropdownRowEntityOptionsArrayPath) {
$scope.editDropdownOptionsArray = resolveObjectFromPath($scope.row.entity, editDropdownRowEntityOptionsArrayPath);
}
else {
$scope.editDropdownOptionsArray = $scope.col.colDef.editDropdownOptionsArray;
}
$scope.editDropdownIdLabel = $scope.col.colDef.editDropdownIdLabel ? $scope.col.colDef.editDropdownIdLabel : 'id';
$scope.editDropdownValueLabel = $scope.col.colDef.editDropdownValueLabel ? $scope.col.colDef.editDropdownValueLabel : 'value';
var cellElement;
var createEditor = function(){
inEdit = true;
cancelBeginEditEvents();
var cellElement = angular.element(html);
$elm.append(cellElement);
editCellScope = $scope.$new();
$compile(cellElement)(editCellScope);
var gridCellContentsEl = angular.element($elm.children()[0]);
gridCellContentsEl.addClass('ui-grid-cell-contents-hidden');
};
if (!$rootScope.$$phase) {
$scope.$apply(createEditor);
} else {
createEditor();
}
//stop editing when grid is scrolled
var deregOnGridScroll = $scope.col.grid.api.core.on.scrollBegin($scope, function () {
if ($scope.grid.disableScrolling) {
return;
}
endEdit();
$scope.grid.api.edit.raise.afterCellEdit($scope.row.entity, $scope.col.colDef, cellModel($scope), origCellValue);
deregOnGridScroll();
deregOnEndCellEdit();
deregOnCancelCellEdit();
});
//end editing
var deregOnEndCellEdit = $scope.$on(uiGridEditConstants.events.END_CELL_EDIT, function () {
endEdit();
$scope.grid.api.edit.raise.afterCellEdit($scope.row.entity, $scope.col.colDef, cellModel($scope), origCellValue);
deregOnEndCellEdit();
deregOnGridScroll();
deregOnCancelCellEdit();
});
//cancel editing
var deregOnCancelCellEdit = $scope.$on(uiGridEditConstants.events.CANCEL_CELL_EDIT, function () {
cancelEdit();
deregOnCancelCellEdit();
deregOnGridScroll();
deregOnEndCellEdit();
});
$scope.$broadcast(uiGridEditConstants.events.BEGIN_CELL_EDIT, triggerEvent);
$timeout(function () {
//execute in a timeout to give any complex editor templates a cycle to completely render
$scope.grid.api.edit.raise.beginCellEdit($scope.row.entity, $scope.col.colDef, triggerEvent);
});
}
function endEdit() {
$scope.grid.disableScrolling = false;
if (!inEdit) {
return;
}
//sometimes the events can't keep up with the keyboard and grid focus is lost, so always focus
//back to grid here. The focus call needs to be before the $destroy and removal of the control,
//otherwise ng-model-options of UpdateOn: 'blur' will not work.
if (uiGridCtrl && uiGridCtrl.grid.api.cellNav) {
uiGridCtrl.focus();
}
var gridCellContentsEl = angular.element($elm.children()[0]);
//remove edit element
editCellScope.$destroy();
angular.element($elm.children()[1]).remove();
gridCellContentsEl.removeClass('ui-grid-cell-contents-hidden');
inEdit = false;
registerBeginEditEvents();
$scope.grid.api.core.notifyDataChange( uiGridConstants.dataChange.EDIT );
}
function cancelEdit() {
$scope.grid.disableScrolling = false;
if (!inEdit) {
return;
}
cellModel.assign($scope, origCellValue);
$scope.$apply();
$scope.grid.api.edit.raise.cancelCellEdit($scope.row.entity, $scope.col.colDef);
endEdit();
}
// resolves a string path against the given object
// shamelessly borrowed from
// http://stackoverflow.com/questions/6491463/accessing-nested-javascript-objects-with-string-key
function resolveObjectFromPath(object, path) {
path = path.replace(/\[(\w+)\]/g, '.$1'); // convert indexes to properties
path = path.replace(/^\./, ''); // strip a leading dot
var a = path.split('.');
while (a.length) {
var n = a.shift();
if (n in object) {
object = object[n];
} else {
return;
}
}
return object;
}
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:uiGridEditor
* @element div
* @restrict A
*
* @description input editor directive for editable fields.
* Provides EndEdit and CancelEdit events
*
* Events that end editing:
* blur and enter keydown
*
* Events that cancel editing:
* - Esc keydown
*
*/
module.directive('uiGridEditor',
['gridUtil', 'uiGridConstants', 'uiGridEditConstants','$timeout', 'uiGridEditService',
function (gridUtil, uiGridConstants, uiGridEditConstants, $timeout, uiGridEditService) {
return {
scope: true,
require: ['?^uiGrid', '?^uiGridRenderContainer', 'ngModel'],
compile: function () {
return {
pre: function ($scope, $elm, $attrs) {
},
post: function ($scope, $elm, $attrs, controllers) {
var uiGridCtrl, renderContainerCtrl, ngModel;
if (controllers[0]) { uiGridCtrl = controllers[0]; }
if (controllers[1]) { renderContainerCtrl = controllers[1]; }
if (controllers[2]) { ngModel = controllers[2]; }
//set focus at start of edit
$scope.$on(uiGridEditConstants.events.BEGIN_CELL_EDIT, function (evt,triggerEvent) {
$timeout(function () {
$elm[0].focus();
//only select text if it is not being replaced below in the cellNav viewPortKeyPress
if ($scope.col.colDef.enableCellEditOnFocus || !(uiGridCtrl && uiGridCtrl.grid.api.cellNav)) {
$elm[0].select();
}
else {
//some browsers (Chrome) stupidly, imo, support the w3 standard that number, email, ...
//fields should not allow setSelectionRange. We ignore the error for those browsers
//https://www.w3.org/Bugs/Public/show_bug.cgi?id=24796
try {
$elm[0].setSelectionRange($elm[0].value.length, $elm[0].value.length);
}
catch (ex) {
//ignore
}
}
});
//set the keystroke that started the edit event
//we must do this because the BeginEdit is done in a different event loop than the intitial
//keydown event
//fire this event for the keypress that is received
if (uiGridCtrl && uiGridCtrl.grid.api.cellNav) {
var viewPortKeyDownUnregister = uiGridCtrl.grid.api.cellNav.on.viewPortKeyPress($scope, function (evt, rowCol) {
if (uiGridEditService.isStartEditKey(evt)) {
ngModel.$setViewValue(String.fromCharCode(evt.keyCode), evt);
ngModel.$render();
}
viewPortKeyDownUnregister();
});
}
$elm.on('blur', function (evt) {
$scope.stopEdit(evt);
});
});
$scope.deepEdit = false;
$scope.stopEdit = function (evt) {
if ($scope.inputForm && !$scope.inputForm.$valid) {
evt.stopPropagation();
$scope.$emit(uiGridEditConstants.events.CANCEL_CELL_EDIT);
}
else {
$scope.$emit(uiGridEditConstants.events.END_CELL_EDIT);
}
$scope.deepEdit = false;
};
$elm.on('click', function (evt) {
if ($elm[0].type !== 'checkbox') {
$scope.deepEdit = true;
$timeout(function () {
$scope.grid.disableScrolling = true;
});
}
});
$elm.on('keydown', function (evt) {
switch (evt.keyCode) {
case uiGridConstants.keymap.ESC:
evt.stopPropagation();
$scope.$emit(uiGridEditConstants.events.CANCEL_CELL_EDIT);
break;
}
if ($scope.deepEdit &&
(evt.keyCode === uiGridConstants.keymap.LEFT ||
evt.keyCode === uiGridConstants.keymap.RIGHT ||
evt.keyCode === uiGridConstants.keymap.UP ||
evt.keyCode === uiGridConstants.keymap.DOWN)) {
evt.stopPropagation();
}
// Pass the keydown event off to the cellNav service, if it exists
else if (uiGridCtrl && uiGridCtrl.grid.api.cellNav) {
evt.uiGridTargetRenderContainerId = renderContainerCtrl.containerId;
if (uiGridCtrl.cellNav.handleKeyDown(evt) !== null) {
$scope.stopEdit(evt);
}
}
else {
//handle enter and tab for editing not using cellNav
switch (evt.keyCode) {
case uiGridConstants.keymap.ENTER: // Enter (Leave Field)
case uiGridConstants.keymap.TAB:
evt.stopPropagation();
evt.preventDefault();
$scope.stopEdit(evt);
break;
}
}
return true;
});
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:input
* @element input
* @restrict E
*
* @description directive to provide binding between input[date] value and ng-model for angular 1.2
* It is similar to input[date] directive of angular 1.3
*
* Supported date format for input is 'yyyy-MM-dd'
* The directive will set the $valid property of input element and the enclosing form to false if
* model is invalid date or value of input is entered wrong.
*
*/
module.directive('uiGridEditor', ['$filter', function ($filter) {
function parseDateString(dateString) {
if (typeof(dateString) === 'undefined' || dateString === '') {
return null;
}
var parts = dateString.split('-');
if (parts.length !== 3) {
return null;
}
var year = parseInt(parts[0], 10);
var month = parseInt(parts[1], 10);
var day = parseInt(parts[2], 10);
if (month < 1 || year < 1 || day < 1) {
return null;
}
return new Date(year, (month - 1), day);
}
return {
priority: -100, // run after default uiGridEditor directive
require: '?ngModel',
link: function (scope, element, attrs, ngModel) {
if (angular.version.minor === 2 && attrs.type && attrs.type === 'date' && ngModel) {
ngModel.$formatters.push(function (modelValue) {
ngModel.$setValidity(null,(!modelValue || !isNaN(modelValue.getTime())));
return $filter('date')(modelValue, 'yyyy-MM-dd');
});
ngModel.$parsers.push(function (viewValue) {
if (viewValue && viewValue.length > 0) {
var dateValue = parseDateString(viewValue);
ngModel.$setValidity(null, (dateValue && !isNaN(dateValue.getTime())));
return dateValue;
}
else {
ngModel.$setValidity(null, true);
return null;
}
});
}
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:uiGridEditDropdown
* @element div
* @restrict A
*
* @description dropdown editor for editable fields.
* Provides EndEdit and CancelEdit events
*
* Events that end editing:
* blur and enter keydown, and any left/right nav
*
* Events that cancel editing:
* - Esc keydown
*
*/
module.directive('uiGridEditDropdown',
['uiGridConstants', 'uiGridEditConstants',
function (uiGridConstants, uiGridEditConstants) {
return {
require: ['?^uiGrid', '?^uiGridRenderContainer'],
scope: true,
compile: function () {
return {
pre: function ($scope, $elm, $attrs) {
},
post: function ($scope, $elm, $attrs, controllers) {
var uiGridCtrl = controllers[0];
var renderContainerCtrl = controllers[1];
//set focus at start of edit
$scope.$on(uiGridEditConstants.events.BEGIN_CELL_EDIT, function () {
$elm[0].focus();
$elm[0].style.width = ($elm[0].parentElement.offsetWidth - 1) + 'px';
$elm.on('blur', function (evt) {
$scope.stopEdit(evt);
});
});
$scope.stopEdit = function (evt) {
// no need to validate a dropdown - invalid values shouldn't be
// available in the list
$scope.$emit(uiGridEditConstants.events.END_CELL_EDIT);
};
$elm.on('keydown', function (evt) {
switch (evt.keyCode) {
case uiGridConstants.keymap.ESC:
evt.stopPropagation();
$scope.$emit(uiGridEditConstants.events.CANCEL_CELL_EDIT);
break;
}
if (uiGridCtrl && uiGridCtrl.grid.api.cellNav) {
evt.uiGridTargetRenderContainerId = renderContainerCtrl.containerId;
if (uiGridCtrl.cellNav.handleKeyDown(evt) !== null) {
$scope.stopEdit(evt);
}
}
else {
//handle enter and tab for editing not using cellNav
switch (evt.keyCode) {
case uiGridConstants.keymap.ENTER: // Enter (Leave Field)
case uiGridConstants.keymap.TAB:
evt.stopPropagation();
evt.preventDefault();
$scope.stopEdit(evt);
break;
}
}
return true;
});
}
};
}
};
}]);
/**
* @ngdoc directive
* @name ui.grid.edit.directive:uiGridEditFileChooser
* @element div
* @restrict A
*
* @description input editor directive for editable fields.
* Provides EndEdit and CancelEdit events
*
* Events that end editing:
* blur and enter keydown
*
* Events that cancel editing:
* - Esc keydown
*
*/
module.directive('uiGridEditFileChooser',
['gridUtil', 'uiGridConstants', 'uiGridEditConstants','$timeout',
function (gridUtil, uiGridConstants, uiGridEditConstants, $timeout) {
return {
scope: true,
require: ['?^uiGrid', '?^uiGridRenderContainer'],
compile: function () {
return {
pre: function ($scope, $elm, $attrs) {
},
post: function ($scope, $elm, $attrs, controllers) {
var uiGridCtrl, renderContainerCtrl;
if (controllers[0]) { uiGridCtrl = controllers[0]; }
if (controllers[1]) { renderContainerCtrl = controllers[1]; }
var grid = uiGridCtrl.grid;
var handleFileSelect = function( event ){
var target = event.srcElement || event.target;
if (target && target.files && target.files.length > 0) {
/**
* @ngdoc property
* @name editFileChooserCallback
* @propertyOf ui.grid.edit.api:ColumnDef
* @description A function that should be called when any files have been chosen
* by the user. You should use this to process the files appropriately for your
* application.
*
* It passes the gridCol, the gridRow (from which you can get gridRow.entity),
* and the files. The files are in the format as returned from the file chooser,
* an array of files, with each having useful information such as:
* - `files[0].lastModifiedDate`
* - `files[0].name`
* - `files[0].size` (appears to be in bytes)
* - `files[0].type` (MIME type by the looks)
*
* Typically you would do something with these files - most commonly you would
* use the filename or read the file itself in. The example function does both.
*
* @example
* <pre>
* editFileChooserCallBack: function(gridRow, gridCol, files ){
* // ignore all but the first file, it can only choose one anyway
* // set the filename into this column
* gridRow.entity.filename = file[0].name;
*
* // read the file and set it into a hidden column, which we may do stuff with later
* var setFile = function(fileContent){
* gridRow.entity.file = fileContent.currentTarget.result;
* };
* var reader = new FileReader();
* reader.onload = setFile;
* reader.readAsText( files[0] );
* }
* </pre>
*/
if ( typeof($scope.col.colDef.editFileChooserCallback) === 'function' ) {
$scope.col.colDef.editFileChooserCallback($scope.row, $scope.col, target.files);
} else {
gridUtil.logError('You need to set colDef.editFileChooserCallback to use the file chooser');
}
target.form.reset();
$scope.$emit(uiGridEditConstants.events.END_CELL_EDIT);
} else {
$scope.$emit(uiGridEditConstants.events.CANCEL_CELL_EDIT);
}
};
$elm[0].addEventListener('change', handleFileSelect, false); // TODO: why the false on the end? Google
$scope.$on(uiGridEditConstants.events.BEGIN_CELL_EDIT, function () {
$elm[0].focus();
$elm[0].select();
$elm.on('blur', function (evt) {
$scope.$emit(uiGridEditConstants.events.END_CELL_EDIT);
});
});
}
};
}
};
}]);
})();
| Lincoln-xzc/angular-ui-grid | src/features/edit/js/gridEdit.js | JavaScript | mit | 48,612 |
/*************************************************************************/
/* thread_posix.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2019 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2019 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "thread_posix.h"
#include "core/script_language.h"
#if (defined(UNIX_ENABLED) || defined(PTHREAD_ENABLED)) && !defined(NO_THREADS)
#ifdef PTHREAD_BSD_SET_NAME
#include <pthread_np.h>
#endif
#include "core/os/memory.h"
#include "core/safe_refcount.h"
static void _thread_id_key_destr_callback(void *p_value) {
memdelete(static_cast<Thread::ID *>(p_value));
}
static pthread_key_t _create_thread_id_key() {
pthread_key_t key;
pthread_key_create(&key, &_thread_id_key_destr_callback);
return key;
}
pthread_key_t ThreadPosix::thread_id_key = _create_thread_id_key();
Thread::ID ThreadPosix::next_thread_id = 0;
Thread::ID ThreadPosix::get_id() const {
return id;
}
Thread *ThreadPosix::create_thread_posix() {
return memnew(ThreadPosix);
}
void *ThreadPosix::thread_callback(void *userdata) {
ThreadPosix *t = reinterpret_cast<ThreadPosix *>(userdata);
t->id = atomic_increment(&next_thread_id);
pthread_setspecific(thread_id_key, (void *)memnew(ID(t->id)));
ScriptServer::thread_enter(); //scripts may need to attach a stack
t->callback(t->user);
ScriptServer::thread_exit();
return NULL;
}
Thread *ThreadPosix::create_func_posix(ThreadCreateCallback p_callback, void *p_user, const Settings &) {
ThreadPosix *tr = memnew(ThreadPosix);
tr->callback = p_callback;
tr->user = p_user;
pthread_attr_init(&tr->pthread_attr);
pthread_attr_setdetachstate(&tr->pthread_attr, PTHREAD_CREATE_JOINABLE);
pthread_attr_setstacksize(&tr->pthread_attr, 256 * 1024);
pthread_create(&tr->pthread, &tr->pthread_attr, thread_callback, tr);
return tr;
}
Thread::ID ThreadPosix::get_thread_id_func_posix() {
void *value = pthread_getspecific(thread_id_key);
if (value)
return *static_cast<ID *>(value);
ID new_id = atomic_increment(&next_thread_id);
pthread_setspecific(thread_id_key, (void *)memnew(ID(new_id)));
return new_id;
}
void ThreadPosix::wait_to_finish_func_posix(Thread *p_thread) {
ThreadPosix *tp = static_cast<ThreadPosix *>(p_thread);
ERR_FAIL_COND(!tp);
ERR_FAIL_COND(tp->pthread == 0);
pthread_join(tp->pthread, NULL);
tp->pthread = 0;
}
Error ThreadPosix::set_name_func_posix(const String &p_name) {
#ifdef PTHREAD_NO_RENAME
return ERR_UNAVAILABLE;
#else
#ifdef PTHREAD_RENAME_SELF
// check if thread is the same as caller
int err = pthread_setname_np(p_name.utf8().get_data());
#else
pthread_t running_thread = pthread_self();
#ifdef PTHREAD_BSD_SET_NAME
pthread_set_name_np(running_thread, p_name.utf8().get_data());
int err = 0; // Open/FreeBSD ignore errors in this function
#else
int err = pthread_setname_np(running_thread, p_name.utf8().get_data());
#endif // PTHREAD_BSD_SET_NAME
#endif // PTHREAD_RENAME_SELF
return err == 0 ? OK : ERR_INVALID_PARAMETER;
#endif // PTHREAD_NO_RENAME
};
void ThreadPosix::make_default() {
create_func = create_func_posix;
get_thread_id_func = get_thread_id_func_posix;
wait_to_finish_func = wait_to_finish_func_posix;
set_name_func = set_name_func_posix;
}
ThreadPosix::ThreadPosix() {
pthread = 0;
}
ThreadPosix::~ThreadPosix() {
}
#endif
| okamstudio/godot | drivers/unix/thread_posix.cpp | C++ | mit | 5,278 |
///////////////////////////////////////////////////////////////////////////
// Copyright © 2014 - 2016 Esri. All Rights Reserved.
//
// Licensed under the Apache License Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
///////////////////////////////////////////////////////////////////////////
define([
'dojo/_base/declare',
'dojo/_base/lang',
'dojo/_base/array',
'dojo/_base/html',
'dojo/topic',
'dojo/Deferred',
'dojo/on',
'./utils',
'./WidgetManager',
'./shared/AppVersionManager',
'./ConfigLoader',
'./tokenUtils',
'./dijit/LoadingIndicator',
'esri/config',
'esri/tasks/GeometryService'
],
function (declare, lang, array, html, topic, Deferred, on, jimuUtils, WidgetManager,
AppVersionManager, ConfigLoader, tokenUtils, LoadingIndicator, esriConfig, GeometryService) {
var instance = null, clazz;
clazz = declare(null, {
urlParams: null,
appConfig: null,
configFile: null,
_configLoaded: false,
portalSelf: null,
constructor: function (urlParams) {
this.urlParams = urlParams || {};
this.listenBuilderEvents();
this.versionManager = new AppVersionManager();
this.widgetManager = WidgetManager.getInstance();
this.configLoader = ConfigLoader.getInstance(this.urlParams, {
versionManager: this.versionManager
});
if(this.urlParams.mode === 'config' && window.parent.setConfigViewerTopic &&
lang.isFunction(window.parent.setConfigViewerTopic)){
window.parent.setConfigViewerTopic(topic);
}
if(this.urlParams.mode === 'preview' && window.parent.setPreviewViewerTopic &&
lang.isFunction(window.parent.setPreviewViewerTopic)){
window.parent.setPreviewViewerTopic(topic);
}
if (!jimuUtils.isMobileUa()) {
//mobile devices do NOT listen to the 'resize' event
//avoid to virtual-keyboard appears and then app switches between "Mobile Mode" and "Desktop Mode"
on(window, 'resize', lang.hitch(this, this._onWindowResize));
}
on(window, "orientationchange", lang.hitch(this, this._onOrientationChange));
},
listenBuilderEvents: function(){
//whatever(app, map, widget, widgetPoolChanged) config changed, publish this event.
//*when app changed, the id is "app", the data is app's properties, like title, subtitle.
//*when map changed, the id is "map", the data is itemId
//*when widget that is in preloadwidget/widgetpool changed, the id is widget's id,
// the data is widget's setting
//*when anything in the widget pool changed, the id is "widgetPool", the data is
// widgets and groups
topic.subscribe('builder/widgetChanged', lang.hitch(this, this._onWidgetChanged));
topic.subscribe('builder/groupChanged', lang.hitch(this, this._onGroupChanged));
topic.subscribe('builder/widgetPoolChanged', lang.hitch(this, this._onWidgetPoolChanged));
topic.subscribe('builder/openAtStartChange', lang.hitch(this, this._onOpenAtStartChanged));
topic.subscribe('builder/mapChanged', lang.hitch(this, this._onMapChanged));
topic.subscribe('builder/mapOptionsChanged', lang.hitch(this, this._onMapOptionsChanged));
topic.subscribe('builder/appAttributeChanged', lang.hitch(this, this._onAppAttributeChanged));
//actionTriggered event is proccessed by layout manager.
// topic.subscribe('builder/actionTriggered', lang.hitch(this, this._onConfigChanged));
topic.subscribe('builder/setAppConfig', lang.hitch(this, this._onAppConfigSet));
topic.subscribe('builder/themeChanged', lang.hitch(this, this._onThemeChanged));
topic.subscribe('builder/layoutChanged', lang.hitch(this, this._onLayoutChanged));
topic.subscribe('builder/styleChanged', lang.hitch(this, this._onStyleChanged));
topic.subscribe('builder/syncExtent', lang.hitch(this, this._onSyncExtent));
topic.subscribe('builder/loadingPageChanged', lang.hitch(this, this._onLoadingPageChanged));
topic.subscribe('builder/templateConfigChanged',
lang.hitch(this, this._onTemplateConfigChanged));
topic.subscribe('builder/appProxyForMapChanged', lang.hitch(this,
this._onAppProxyForMapChanged));
topic.subscribe('builder/appProxyForUrlChanged', lang.hitch(this,
this._onAppProxyForUrlChanged));
},
loadConfig: function(){
if(this.urlParams.mode === 'preview' ||
this.urlParams.mode === 'config'){
//in preview/config mode, the config is set by the builder.
return;
}
var loading = new LoadingIndicator();
loading.placeAt(window.jimuConfig.layoutId);
return this.configLoader.loadConfig().then(lang.hitch(this, function(appConfig){
this.portalSelf = this.configLoader.portalSelf;
this.appConfig = this._addDefaultValues(appConfig);
window.appInfo.isRunInMobile = jimuUtils.inMobileSize();
console.timeEnd('Load Config');
var _ac = this.getAppConfig();
loading.destroy();
topic.publish("appConfigLoaded", _ac);
return _ac;
}), lang.hitch(this, function(err){
loading.destroy();
console.error(err);
if(err && err.message && typeof err.message === 'string'){
this._showErrorMessage(err.message);
}
}));
},
_showErrorMessage: function(msg){
html.create('div', {
'class': 'app-error',
innerHTML: msg
}, document.body);
},
getAppConfig: function () {
var c;
if(window.appInfo.isRunInMobile){
// console.log('Switch to mobile mode.');
c = lang.clone(this._getMobileConfig(this.appConfig));
c._originConfig = lang.clone(this.appConfig);
}else{
// console.log('Switch to desktop mode.');
c = lang.clone(this.appConfig);
}
c.getConfigElementById = function(id){
return jimuUtils.getConfigElementById(this, id);
};
c.getConfigElementsByName = function(name){
return jimuUtils.getConfigElementsByName(this, name);
};
c.getCleanConfig = function(isForAGOLTemplate){
if(this._originConfig){
return getCleanConfig(this._originConfig, isForAGOLTemplate);
}else{
return getCleanConfig(this, isForAGOLTemplate);
}
};
c.visitElement = function(cb){
jimuUtils.visitElement(this, cb);
};
return c;
},
_onOrientationChange: function() {
if (this.appConfig) {
topic.publish("appConfigChanged", this.getAppConfig(), 'layoutChange');
}
},
_onWindowResize: function () {
var runInMobile = jimuUtils.inMobileSize();
if(window.appInfo.isRunInMobile === runInMobile){
return;
}
window.appInfo.isRunInMobile = runInMobile;
if(this.appConfig){
topic.publish("appConfigChanged", this.getAppConfig(), 'layoutChange');
}
},
_getMobileConfig: function(appConfig) {
return jimuUtils.mixinAppConfigPosition(appConfig, appConfig.mobileLayout);
},
_onWidgetChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
var oldJson = jimuUtils.getConfigElementById(this.appConfig, _newJson.id);
//for placeholder, add off panel
if(newJson.inPanel === false && !oldJson.uri){
newJson.closeable = true;
}
//for now, we can add/update property only
for(var p in newJson){
oldJson[p] = newJson[p];
}
delete oldJson.isDefaultConfig;
this.configLoader.addNeedValues(this.appConfig);
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'widgetChange', newJson);
},
_onGroupChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
var oldJson = jimuUtils.getConfigElementById(this.appConfig, _newJson.id);
//for now, we can add/update property only
for(var p in newJson){
oldJson[p] = newJson[p];
}
this.configLoader.addNeedValues(this.appConfig);
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'groupChange', newJson);
},
_onWidgetPoolChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
var controllerWidgets = this.widgetManager.getControllerWidgets();
if(controllerWidgets.length === 1){
this.appConfig.widgetPool.widgets = newJson.widgets;
this.appConfig.widgetPool.groups = newJson.groups;
}else{
var controllerJson = jimuUtils.getConfigElementById(this.appConfig, newJson.controllerId);
//remove old jsons from pool
array.forEach(controllerJson.controlledWidgets, function(widgetId){
this._removeWidgetOrGroupFromPoolById(this.appConfig, widgetId);
}, this);
array.forEach(controllerJson.controlledGroups, function(groupId){
this._removeWidgetOrGroupFromPoolById(this.appConfig, groupId);
}, this);
//add new jsons into pool
if(typeof this.appConfig.widgetPool.widgets === 'undefined'){
this.appConfig.widgetPool.widgets = newJson.widgets;
}else{
this.appConfig.widgetPool.widgets =
this.appConfig.widgetPool.widgets.concat(newJson.widgets);
}
if(typeof this.appConfig.widgetPool.groups === 'undefined'){
this.appConfig.widgetPool.groups = newJson.groups;
}else{
this.appConfig.widgetPool.groups =
this.appConfig.widgetPool.groups.concat(newJson.groups);
}
//add this line because we need id below
this.configLoader.addNeedValues(this.appConfig);
//update controller setting
controllerJson.controlledWidgets = array.map(newJson.widgets, function(widgetJson){
return widgetJson.id;
});
controllerJson.controlledGroups = array.map(newJson.groups, function(groupJson){
return groupJson.id;
});
}
this.configLoader.addNeedValues(this.appConfig);
this.configLoader.loadAndUpgradeAllWidgetsConfig(this.appConfig).then(lang.hitch(this, function(appConfig){
this.appConfig = appConfig;
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'widgetPoolChange', newJson);
}));
},
_removeWidgetOrGroupFromPoolById: function(appConfig, id){
array.some(appConfig.widgetPool.widgets, function(widget, i){
if(widget.id === id){
appConfig.widgetPool.widgets.splice(i, 1);
return true;
}
});
array.some(appConfig.widgetPool.groups, function(group, i){
if(group.id === id){
appConfig.widgetPool.groups.splice(i, 1);
return true;
}
});
},
_onOpenAtStartChanged: function(_newJson) {
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
//TODO we support only one controller for now, so we don't do much here
var appConfig = this.appConfig;
if (_newJson.isOnScreen) {
var onScreenWidgets = appConfig.widgetOnScreen && appConfig.widgetOnScreen.widgets;
if (onScreenWidgets && onScreenWidgets.length > 0) {
array.forEach(onScreenWidgets, lang.hitch(this, function(w) {
if (w.id === _newJson.id) {
w.openAtStart = !w.openAtStart;
} else {
delete w.openAtStart;
}
}));
}
} else {
var pool = appConfig.widgetPool;
if (pool && pool.groups && pool.groups.length > 0) {
array.forEach(pool.groups, lang.hitch(this, function(g) {
if (g.id === _newJson.id) {
g.openAtStart = !g.openAtStart;
} else {
delete g.openAtStart;
}
}));
}
if (pool && pool.widgets && pool.widgets.length > 0) {
array.forEach(pool.widgets, lang.hitch(this, function(w) {
if (w.id === _newJson.id) {
w.openAtStart = !w.openAtStart;
} else {
delete w.openAtStart;
}
}));
}
}
topic.publish('appConfigChanged', this.getAppConfig(), 'openAtStartChange', newJson);
},
_onAppAttributeChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
lang.mixin(this.appConfig, newJson);
this.configLoader.processProxy(this.appConfig);
this.configLoader.addNeedValues(this.appConfig);
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'attributeChange', newJson);
},
_onLoadingPageChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
var oldConfig;
if('backgroundColor' in newJson){
this.appConfig.loadingPage.backgroundColor = newJson.backgroundColor;
}else if('backgroundImage' in newJson){
oldConfig = this.appConfig.loadingPage.backgroundImage || {};
this.appConfig.loadingPage.backgroundImage =
lang.mixin(oldConfig, newJson.backgroundImage);
}else if('loadingGif' in newJson){
oldConfig = this.appConfig.loadingPage.loadingGif || {};
this.appConfig.loadingPage.loadingGif =
lang.mixin(oldConfig, newJson.loadingGif);
}
this.configLoader.addNeedValues(this.appConfig);
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'loadingPageChange', newJson);
},
/**
* _newJson pattern:
* {
* mapItemId: itemId,
* proxyItems: [{
* sourceUrl: string;
* title: string;
* premium: boolean;
* consumeCredits: boolean;
* useProxy: boolean;
* proxyUrl?: string;
* proxyId?: string;
* }]
* }
*/
_onAppProxyForMapChanged: function(_newJson) {
var newJson = jimuUtils.reCreateObject(_newJson);
if (!('appProxy' in this.appConfig.map)) {
// Set appProxy if there is no such property
this.appConfig.map.appProxy = newJson;
} else if (this.appConfig.map.appProxy.mapItemId !== newJson.mapItemId) {
// Replace the app proxy if map changed.
this.appConfig.map.appProxy = newJson;
} else {
// Update proxy items
array.forEach(newJson.proxyItems, lang.hitch(this, function(item) {
array.some(this.appConfig.map.appProxy.proxyItems, function(configItem) {
if (configItem.sourceUrl === item.sourceUrl) {
configItem.useProxy = item.useProxy;
configItem.proxyUrl = item.proxyUrl || '';
configItem.proxyId = item.proxyId || '';
return true;
}
});
}));
}
topic.publish('appConfigChanged', this.getAppConfig(), 'appProxyChange', newJson);
},
/**
* _newJson pattern is array of:
* {
* sourceUrl: string,
* proxyUrl?: string,
* proxyId?: string,
* useProxy: boolean,
* title: string,
* premium: boolean,
* consumeCredits: boolean
* }
*/
_onAppProxyForUrlChanged: function(_newJson) {
var newJson = jimuUtils.reCreateObject(_newJson);
this.appConfig.appProxies = newJson;
topic.publish('appConfigChanged', this.getAppConfig(), 'appProxyChange', newJson);
},
_onTemplateConfigChanged: function(_newJson){
var newJson = jimuUtils.reCreateObject(_newJson);
this.appConfig.templateConfig = newJson;
this.configLoader.addNeedValues(this.appConfig);
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'templateConfigChange', newJson);
},
_onMapChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
//remove the options that are relative to map's display when map is changed.
if(this.appConfig.map.mapOptions){
jimuUtils.deleteMapOptions(this.appConfig.map.mapOptions);
}
lang.mixin(this.appConfig.map, newJson);
this.configLoader.addNeedValues(this.appConfig);
this.configLoader.loadAndUpgradeAllWidgetsConfig(this.appConfig).then(lang.hitch(this, function(appConfig){
this.appConfig = appConfig;
this._addDefaultValues(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'mapChange', newJson);
}));
},
_onMapOptionsChanged: function(_newJson){
// transfer obj to another iframe may cause problems on IE8
var newJson = jimuUtils.reCreateObject(_newJson);
if(!this.appConfig.map.mapOptions){
this.appConfig.map.mapOptions = {};
}
lang.mixin(this.appConfig.map.mapOptions, newJson);
topic.publish('appConfigChanged', this.getAppConfig(), 'mapOptionsChange', newJson);
},
_onThemeChanged: function(theme){
this._getAppConfigFromTheme(theme).then(lang.hitch(this, function(config){
this.appConfig = config;
topic.publish('appConfigChanged', this.getAppConfig(), 'themeChange', theme.getName());
}));
},
_onLayoutChanged: function(layout){
//summary:
// * Layout contains widget/group position, panel uri.
// * For default layout, use the same format with app config to define it.
// * For other layouts, we support 2 ways:
// * Use array, one by one
// * Use object, key is widget uri, or ph_<index>(placeholder), or g_<index>(group)
this.appConfig = jimuUtils.mixinAppConfigPosition(this.appConfig, layout.layoutConfig);
this._addDefaultPanelAndPosition(this.appConfig);
topic.publish('appConfigChanged', this.getAppConfig(), 'layoutChange', layout.name);
},
_onStyleChanged: function(style){
this.appConfig.theme.styles = this._genStyles(this.appConfig.theme.styles, style.name);
if (style.isCustom) {
this.appConfig.theme.customStyles = {
mainBackgroundColor: style.styleColor
};
} else {
delete this.appConfig.theme.customStyles;
}
topic.publish('appConfigChanged', this.getAppConfig(), 'styleChange', style.name);
},
_onSyncExtent: function(map){
topic.publish('syncExtent', map);
},
_genStyles: function(allStyle, currentStyle){
var styles = [];
styles.push(currentStyle);
array.forEach(allStyle, function(_style){
if(styles.indexOf(_style) < 0){
styles.push(_style);
}
});
return styles;
},
/**************************************
Keep the following same between themes:
1. map config excluding map's position
2. widget pool config excluding pool panel config
***************************************/
_getAppConfigFromTheme: function(theme){
var def = new Deferred();
var config, styles = [];
var currentConfig = this.getAppConfig().getCleanConfig();
currentConfig.mode = this.urlParams.mode;
//because we don't allow user config panel for group,
//and group's panel should be different between differrent theme
//so, we delete group panel
array.forEach(currentConfig.widgetPool.groups, function(group){
delete group.panel;
}, this);
//theme has already appConfig object, use it but keep something
if(theme.appConfig){
config = lang.clone(theme.appConfig);
config.map = currentConfig.map;
config.map.position = theme.appConfig.map.position;
this._copyPoolToThemePool(currentConfig, config);
config.links = currentConfig.links;
config.title = currentConfig.title;
config.subtitle = currentConfig.subtitle;
config.logo = currentConfig.logo;
}else{
//use layout and style to create a new appConfig, which may contain some place holders
var layout = theme.getCurrentLayout();
var style = theme.getCurrentStyle();
config = lang.clone(currentConfig);
var layoutConfig = lang.clone(layout.layoutConfig);
//use onscreen
config.widgetOnScreen = layoutConfig.widgetOnScreen;
//add flag
if(layoutConfig.widgetPool){
array.forEach(layoutConfig.widgetPool.widgets, function(w){
w.isPreconfiguredInTheme = true;
});
array.forEach(layoutConfig.widgetPool.groups, function(g){
g.isPreconfiguredInTheme = true;
});
}
//copy pool
this._copyPoolToThemePool(currentConfig, layoutConfig);
config.widgetPool = layoutConfig.widgetPool;
if(layoutConfig.map && layoutConfig.map.position){
config.map.position = layoutConfig.map.position;
}
config.mobileLayout = layoutConfig.mobileLayout;
//put all styles into the style array, and the current style is the first element
styles = this._genStyles(array.map(theme.getStyles(), function(style){
return style.name;
}), style.name);
config.theme = {
name: theme.getName(),
styles: styles,
version: theme.getVersion()
};
}
this.configLoader.addNeedValues(config);
this.configLoader.loadWidgetsManifest(config).then(lang.hitch(this, function(config){
return this.configLoader.loadAndUpgradeAllWidgetsConfig(config);
})).then(lang.hitch(this, function(){
this._addDefaultValues(config);
def.resolve(config);
}));
return def;
},
_copyPoolToThemePool: function(currentAppConfig, themeAppConfig){
var cpool = currentAppConfig.widgetPool;
if(!themeAppConfig.widgetPool){
themeAppConfig.widgetPool = {};
}
var tpool = themeAppConfig.widgetPool;
//widgets/groups defined in theme
var themePoolWidgets = array.filter(tpool.widgets, function(tw){
if(tw.isPreconfiguredInTheme){
return true;
}
//widgets that exists in the theme only(added by user, not pre-configured)
if(!array.some(cpool.widgets, function(cw){
return cw.name === tw.name;
})){
return true;
}
});
var themePoolGroups = array.filter(tpool.groups, function(g){
return g.isPreconfiguredInTheme;
});
//widgets/groups are shared
var currentPoolWidgets = array.filter(cpool.widgets, function(w){
return !w.isPreconfiguredInTheme;
});
var currentPoolGroups = array.filter(cpool.groups, function(g){
return !g.isPreconfiguredInTheme;
});
currentPoolWidgets = this._getPoolWidgetsWithoutDuplicated(currentPoolWidgets,
themeAppConfig.widgetOnScreen.widgets || []);
tpool.widgets = currentPoolWidgets.concat(themePoolWidgets);
tpool.groups = currentPoolGroups.concat(themePoolGroups);
},
_getPoolWidgetsWithoutDuplicated: function(currentPoolWidgets, themeOnScreeWidgets){
var ret = lang.clone(currentPoolWidgets);
var currentAppConfig = this.getAppConfig();
//we don't care groups and theme pool, because all in-panel widgets are not singleton
for(var i = currentPoolWidgets.length - 1; i >= 0; i --){
for(var j = themeOnScreeWidgets.length - 1; j >= 0; j --){
if(!themeOnScreeWidgets[j].uri){
continue;
}
var wname = themeOnScreeWidgets[j].name;
if(!wname){
wname = jimuUtils.getWidgetNameFromUri(themeOnScreeWidgets[j].uri);
}
var wid = currentPoolWidgets[i].id;
var wjson = currentAppConfig.getConfigElementById(wid);
if(currentPoolWidgets[i] && currentPoolWidgets[i].name === wname &&
wjson.supportMultiInstance === false){
console.log('Widget', currentPoolWidgets[i].name,
'is not copied to new theme because this widget exists in new theme.');
ret.splice(i, 1);
}
}
}
return ret;
},
_onAppConfigSet: function(c){
//summary:
// this method may be called by builder or UT
c = jimuUtils.reCreateObject(c);
window.appInfo.isRunInMobile = jimuUtils.inMobileSize();
this.configLoader.processProxy(c);
this.configLoader.addNeedValues(c);
this.configLoader.loadAndUpgradeAllWidgetsConfig(c).then(lang.hitch(this, function(c){
this._addDefaultValues(c);
tokenUtils.setPortalUrl(c.portalUrl);
window.portalUrl = c.portalUrl;
if(this.appConfig){
//remove the options that are relative to map's display when map is changed.
jimuUtils.deleteMapOptions(c.map.mapOptions);
this.appConfig = c;
topic.publish('appConfigChanged', this.getAppConfig(), 'resetConfig', c);
}else{
this.appConfig = c;
topic.publish("appConfigLoaded", this.getAppConfig());
}
}));
},
/**********************************************
* Add default values
************************************************/
_addDefaultValues: function(config) {
this._addDefaultPortalUrl(config);
this._addDefaultGeometryService(config);
this._addDefaultStyle(config);
this._addDefaultMap(config);
this._addDefaultVisible(config);
//preload widgets
if(typeof config.widgetOnScreen === 'undefined'){
config.widgetOnScreen = {};
}
if(typeof config.widgetPool === 'undefined'){
config.widgetPool = {};
}
this._addDefaultPanelAndPosition(config);
this._addDefaultOfWidgetGroup(config);
//if the first widget or first group doesn't have index property, we add it
if(config.widgetPool.widgets && config.widgetPool.widgets.length > 0 &&
config.widgetPool.widgets[0].index === undefined ||
config.widgetPool.groups && config.widgetPool.groups.length > 0 &&
config.widgetPool.groups[0].index === undefined){
this._addIndexForWidgetPool(config);
}
return config;
},
_addDefaultPortalUrl: function(config){
if(typeof config.portalUrl === 'undefined'){
config.portalUrl = 'http://www.arcgis.com/';
}
if(config.portalUrl && config.portalUrl.substr(config.portalUrl.length - 1) !== '/'){
config.portalUrl += '/';
}
},
_addDefaultGeometryService: function(appConfig){
var geoServiceUrl = appConfig && appConfig.geometryService;
var validGeoServiceUrl = geoServiceUrl && typeof geoServiceUrl === 'string' &&
lang.trim(geoServiceUrl);
if(validGeoServiceUrl){
geoServiceUrl = lang.trim(geoServiceUrl);
}
else{
//TODO this.portalSelf is null if app is loaded in builder.
//but we can ensure appConfig.geometryService is not null if app is created by builder,
//so this line will not be executed.
geoServiceUrl = this.portalSelf.helperServices.geometry.url;
}
appConfig.geometryService = geoServiceUrl;
esriConfig.defaults.geometryService = new GeometryService(appConfig.geometryService);
},
_addDefaultStyle: function(config){
if(config.theme){
if(!config.theme.styles || config.theme.styles.length === 0){
config.theme.styles = ['default'];
}
}
},
_addDefaultMap: function(config){
config.map.id = 'map';
if(typeof config.map['3D'] === 'undefined' && typeof config.map['2D'] === 'undefined'){
config.map['2D'] = true;
}
if(typeof config.map.position === 'undefined'){
config.map.position = {
left: 0,
right: 0,
top: 0,
bottom: 0
};
}
if(typeof config.map.portalUrl === 'undefined'){
config.map.portalUrl = config.portalUrl;
}
},
_addDefaultVisible: function(config){
jimuUtils.visitElement(config, function(e){
if(e.visible === undefined){
e.visible = true;
}
});
},
_addDefaultPanelAndPosition: function(config){
this._addOnScreenDefaultPanelAndPosition(config);
this._addPoolDefaultPanelAndPosition(config);
},
_addOnScreenDefaultPanelAndPosition: function(config){
var i, j, screenSectionConfig = config.widgetOnScreen;
if(!screenSectionConfig){
return;
}
var panelDefaultPositionR =
screenSectionConfig.panel && screenSectionConfig.panel.positionRelativeTo?
screenSectionConfig.panel.positionRelativeTo: 'map';
if(typeof screenSectionConfig.panel === 'undefined' ||
typeof screenSectionConfig.panel.uri === 'undefined'){
screenSectionConfig.panel = {
uri: 'jimu/OnScreenWidgetPanel',
//positionRelativeTo: 'map',
position: {
//move positionRelativeTo to position.relativeTo
relativeTo: panelDefaultPositionR
}
};
}else if(typeof screenSectionConfig.panel.position === 'undefined'){
screenSectionConfig.panel.position = {relativeTo: panelDefaultPositionR};
}else if(typeof screenSectionConfig.panel.position.relativeTo === 'undefined'){
screenSectionConfig.panel.position.relativeTo = panelDefaultPositionR;
}
if(screenSectionConfig.widgets){
for(i = 0; i < screenSectionConfig.widgets.length; i++){
if(!screenSectionConfig.widgets[i].position){
screenSectionConfig.widgets[i].position = {};
}
if(!screenSectionConfig.widgets[i].position.relativeTo){
screenSectionConfig.widgets[i].position.relativeTo =
screenSectionConfig.widgets[i] && screenSectionConfig.widgets[i].positionRelativeTo?
screenSectionConfig.widgets[i].positionRelativeTo: 'map';
}
if(screenSectionConfig.widgets[i].inPanel === true &&
!screenSectionConfig.widgets[i].panel){
screenSectionConfig.widgets[i].panel = lang.clone(screenSectionConfig.panel);
screenSectionConfig.widgets[i].panel.position = screenSectionConfig.widgets[i].position;
screenSectionConfig.widgets[i].panel.position.relativeTo =
screenSectionConfig.widgets[i].position.relativeTo;
}
}
}
if(screenSectionConfig.groups){
for(i = 0; i < screenSectionConfig.groups.length; i++){
if(!screenSectionConfig.groups[i].panel){
screenSectionConfig.groups[i].panel = screenSectionConfig.panel;
}
if(screenSectionConfig.groups[i].panel && !screenSectionConfig.groups[i].panel.position){
screenSectionConfig.groups[i].panel.position = {};
}
if(!screenSectionConfig.groups[i].panel.position.relativeTo){
screenSectionConfig.groups[i].panel.position.relativeTo =
screenSectionConfig.groups[i].panel.positionRelativeTo?
screenSectionConfig.groups[i].panel.positionRelativeTo:'map';
}
if(!screenSectionConfig.groups[i].widgets){
screenSectionConfig.groups[i].widgets = [];
}
for(j = 0; j < screenSectionConfig.groups[i].widgets.length; j++){
screenSectionConfig.groups[i].widgets[j].panel = screenSectionConfig.groups[i].panel;
}
}
}
},
_addPoolDefaultPanelAndPosition: function(config){
var i, j, poolSectionConfig = config.widgetPool;
if(!poolSectionConfig){
return;
}
var panelDefaultPositionR =
poolSectionConfig.panel && poolSectionConfig.panel.positionRelativeTo?
poolSectionConfig.panel.positionRelativeTo: 'map';
if(typeof poolSectionConfig.panel === 'undefined' ||
typeof poolSectionConfig.panel.uri === 'undefined'){
poolSectionConfig.panel = {
uri: 'jimu/OnScreenWidgetPanel',
position: {
relativeTo: panelDefaultPositionR
}
};
}else if(typeof poolSectionConfig.panel.position === 'undefined'){
poolSectionConfig.panel.position = {relativeTo: panelDefaultPositionR};
}else if(typeof poolSectionConfig.panel.position.relativeTo === 'undefined'){
poolSectionConfig.panel.position.relativeTo = panelDefaultPositionR;
}
if(poolSectionConfig.groups){
for(i = 0; i < poolSectionConfig.groups.length; i++){
if(!poolSectionConfig.groups[i].panel){
poolSectionConfig.groups[i].panel = poolSectionConfig.panel;
}else if(!poolSectionConfig.groups[i].panel.position.relativeTo){
poolSectionConfig.groups[i].panel.position.relativeTo =
poolSectionConfig.groups[i].panel.positionRelativeTo?
poolSectionConfig.groups[i].panel.positionRelativeTo: 'map';
}
if(!poolSectionConfig.groups[i].widgets){
poolSectionConfig.groups[i].widgets = [];
}
for(j = 0; j < poolSectionConfig.groups[i].widgets.length; j++){
poolSectionConfig.groups[i].widgets[j].panel = poolSectionConfig.groups[i].panel;
}
}
}
if(poolSectionConfig.widgets){
for(i = 0; i < poolSectionConfig.widgets.length; i++){
if(poolSectionConfig.widgets[i].inPanel === false){
var defaultWidgetPositionR = poolSectionConfig.widgets[i].positionRelativeTo?
poolSectionConfig.widgets[i].positionRelativeTo: 'map';
if(!poolSectionConfig.widgets[i].position){
poolSectionConfig.widgets[i].position = {
relativeTo: defaultWidgetPositionR
};
}else if(!poolSectionConfig.widgets[i].position.relativeTo){
poolSectionConfig.widgets[i].position.relativeTo = defaultWidgetPositionR;
}
}else if(!poolSectionConfig.widgets[i].panel){
poolSectionConfig.widgets[i].panel = config.widgetPool.panel;
}
}
}
},
_addDefaultOfWidgetGroup: function(config){
//group/widget labe, icon
jimuUtils.visitElement(config, lang.hitch(this, function(e, info){
e.isOnScreen = info.isOnScreen;
if(e.widgets){
//it's group
e.gid = e.id;
if(e.widgets.length === 1){
if(!e.label){
e.label = e.widgets[0].label? e.widgets[0].label: 'Group';
}
if(!e.icon){
if(e.widgets[0].uri){
e.icon = this._getDefaultIconFromUri(e.widgets[0].uri);
}else{
e.icon = 'jimu.js/images/group_icon.png';
}
}
}else{
e.icon = e.icon? e.icon: 'jimu.js/images/group_icon.png';
e.label = e.label? e.label: 'Group_' + info.index;
}
}else{
e.gid = info.groupId;
}
}));
},
_getDefaultIconFromUri: function(uri){
var segs = uri.split('/');
segs.pop();
return segs.join('/') + '/images/icon.png?wab_dv=' + window.deployVersion;
},
_addIndexForWidgetPool: function(config){
//be default, widgets are in front
var index = 0, i, j;
if(config.widgetPool.widgets){
for(i = 0; i < config.widgetPool.widgets.length; i++){
config.widgetPool.widgets[i].index = index;
index ++;
}
}
if(config.widgetPool.groups){
for(i = 0; i < config.widgetPool.groups.length; i++){
config.widgetPool.groups[i].index = index;
index ++;
for(j = 0; j < config.widgetPool.groups[i].widgets.length; j++){
config.widgetPool.groups[i].widgets[j].index = j;
}
}
}
}
});
clazz.getInstance = function (urlParams) {
if(instance === null) {
instance = new clazz(urlParams);
}else{
if(urlParams){
instance.urlParams = urlParams;
if(instance.configLoader){
instance.configLoader.urlParams = urlParams;
}
}
}
window.getAppConfig = lang.hitch(instance, instance.getAppConfig);
return instance;
};
function getCleanConfig(config, isForAGOLTemplate){
//delete the properties that framework add
var newConfig = lang.clone(config);
var properties = jimuUtils.widgetProperties;
if(typeof isForAGOLTemplate === 'undefined'){
isForAGOLTemplate = false;
}
delete newConfig.mode;
jimuUtils.visitElement(newConfig, function(e, info){
if(e.widgets){
delete e.isOnScreen;
delete e.gid;
if(e.icon === 'jimu.js/images/group_icon.png'){
delete e.icon;
}
delete e.openType;
if(info.isOnScreen){
if(e.panel && jimuUtils.isEqual(e.panel, newConfig.widgetOnScreen.panel)){
delete e.panel;
}
}
return;
}
if(e.icon && e.icon === e.folderUrl + 'images/icon.png?wab_dv=' + window.deployVersion){
delete e.icon;
}
delete e.panel;
delete e.folderUrl;
delete e.amdFolder;
delete e.thumbnail;
delete e.configFile;
delete e.gid;
delete e.isOnScreen;
delete e.isRemote;
properties.forEach(function(p){
delete e[p];
});
if(!isForAGOLTemplate){
if(e.visible){
delete e.visible;
}
if(e.manifest && e.label === e.manifest.label){
delete e.label;
}
if(e.isDefaultConfig){
delete e.config;
delete e.isDefaultConfig;
}
}else{
if(typeof e.openAtStart === 'undefined'){
e.openAtStart = false;
}
}
delete e.manifest;
});
delete newConfig.rawAppConfig;
//the _ssl property is added by esriRequest
delete newConfig._ssl;
//delete all of the methods
delete newConfig.getConfigElementById;
delete newConfig.getConfigElementsByName;
delete newConfig.processNoUriWidgets;
delete newConfig.addElementId;
delete newConfig.getCleanConfig;
delete newConfig.visitElement;
delete newConfig.agolConfig;
delete newConfig._itemData;
delete newConfig.oldWabVersion;
return newConfig;
}
return clazz;
});
| cmccullough2/cmv-wab-widgets | wab/2.3/jimu.js/ConfigManager.js | JavaScript | mit | 39,282 |
'use strict';
var fs = require('fs');
var path = require('path');
var walk = require('pug-walk');
module.exports = load;
function load(ast, options) {
load.validateOptions(options);
// clone the ast
ast = JSON.parse(JSON.stringify(ast));
return walk(ast, function (node) {
if (node.str === undefined) {
if (node.type === 'Include' || node.type === 'Extends') {
var file = node.file;
if (file.type !== 'FileReference') {
throw new Error('Expected file.type to be "FileReference"');
}
var path, str;
try {
path = load.resolve(file.path, file.filename, options);
file.fullPath = path;
str = load.read(path, options);
} catch (ex) {
ex.message += '\n at ' + node.filename + ' line ' + node.line;
throw ex;
}
file.str = str;
if (node.type === 'Extends' || (!node.filter && /\.jade$/.test(path))) {
file.ast = load.string(str, path, options);
}
}
}
});
}
load.string = function loadString(str, filename, options) {
load.validateOptions(options);
var tokens = options.lex(str, filename);
var ast = options.parse(tokens, filename);
return load(ast, options);
};
load.file = function loadFile(filename, options) {
load.validateOptions(options);
var str = load.read(filename, options);
return load.string(str, filename, options);
}
load.resolve = function resolve(filename, source, options) {
if (options && options.resolve) return options.resolve(filename, source, options);
filename = filename.trim();
source = source.trim();
if (filename[0] !== '/' && !source)
throw new Error('the "filename" option is required to use includes and extends with "relative" paths');
if (filename[0] === '/' && !options.basedir)
throw new Error('the "basedir" option is required to use includes and extends with "absolute" paths');
filename = path.join(filename[0] === '/' ? options.basedir : path.dirname(source), filename);
if (path.basename(filename).indexOf('.') === -1) filename += '.jade';
return filename;
};
load.read = function read(filename, options) {
if (options && options.read) return options.read(filename, options);
return fs.readFileSync(filename, 'utf8');
};
load.validateOptions = function validateOptions(options) {
if (typeof options.lex !== 'function') {
throw new TypeError('options.lex must be a function');
}
if (typeof options.parse !== 'function') {
throw new TypeError('options.parse must be a function');
}
if (options.resolve && typeof options.resolve !== 'function') {
throw new TypeError('options.resolve must be a function');
}
if (options.read && typeof options.read !== 'function') {
throw new TypeError('options.read must be a function');
}
};
| ajredovan/.tour | tour_v1/node_modules/pug-loader/index.js | JavaScript | mit | 2,820 |
namespace TestStack.BDDfy
{
public class AndGivenAttribute : ExecutableAttribute
{
public AndGivenAttribute() : this(null) { }
public AndGivenAttribute(string stepTitle) : base(ExecutionOrder.ConsecutiveSetupState, stepTitle) { }
}
} | sebgod/TestStack.BDDfy | src/TestStack.BDDfy/Scanners/StepScanners/ExecutableAttribute/GwtAttributes/AndGivenAttribute.cs | C# | mit | 264 |
package org.spongycastle.crypto.engines;
import org.spongycastle.crypto.BlockCipher;
import org.spongycastle.crypto.CipherParameters;
import org.spongycastle.crypto.DataLengthException;
import org.spongycastle.crypto.params.KeyParameter;
import org.spongycastle.crypto.params.TweakableBlockCipherParameters;
/**
* Implementation of the Threefish tweakable large block cipher in 256, 512 and 1024 bit block
* sizes.
* <p>
* This is the 1.3 version of Threefish defined in the Skein hash function submission to the NIST
* SHA-3 competition in October 2010.
* <p>
* Threefish was designed by Niels Ferguson - Stefan Lucks - Bruce Schneier - Doug Whiting - Mihir
* Bellare - Tadayoshi Kohno - Jon Callas - Jesse Walker.
* <p>
* This implementation inlines all round functions, unrolls 8 rounds, and uses 1.2k of static tables
* to speed up key schedule injection. <br>
* 2 x block size state is retained by each cipher instance.
*/
public class ThreefishEngine
implements BlockCipher
{
/**
* 256 bit block size - Threefish-256
*/
public static final int BLOCKSIZE_256 = 256;
/**
* 512 bit block size - Threefish-512
*/
public static final int BLOCKSIZE_512 = 512;
/**
* 1024 bit block size - Threefish-1024
*/
public static final int BLOCKSIZE_1024 = 1024;
/**
* Size of the tweak in bytes (always 128 bit/16 bytes)
*/
private static final int TWEAK_SIZE_BYTES = 16;
private static final int TWEAK_SIZE_WORDS = TWEAK_SIZE_BYTES / 8;
/**
* Rounds in Threefish-256
*/
private static final int ROUNDS_256 = 72;
/**
* Rounds in Threefish-512
*/
private static final int ROUNDS_512 = 72;
/**
* Rounds in Threefish-1024
*/
private static final int ROUNDS_1024 = 80;
/**
* Max rounds of any of the variants
*/
private static final int MAX_ROUNDS = ROUNDS_1024;
/**
* Key schedule parity constant
*/
private static final long C_240 = 0x1BD11BDAA9FC1A22L;
/* Pre-calculated modulo arithmetic tables for key schedule lookups */
private static int[] MOD9 = new int[MAX_ROUNDS];
private static int[] MOD17 = new int[MOD9.length];
private static int[] MOD5 = new int[MOD9.length];
private static int[] MOD3 = new int[MOD9.length];
static
{
for (int i = 0; i < MOD9.length; i++)
{
MOD17[i] = i % 17;
MOD9[i] = i % 9;
MOD5[i] = i % 5;
MOD3[i] = i % 3;
}
}
/**
* Block size in bytes
*/
private int blocksizeBytes;
/**
* Block size in 64 bit words
*/
private int blocksizeWords;
/**
* Buffer for byte oriented processBytes to call internal word API
*/
private long[] currentBlock;
/**
* Tweak bytes (2 byte t1,t2, calculated t3 and repeat of t1,t2 for modulo free lookup
*/
private long[] t = new long[5];
/**
* Key schedule words
*/
private long[] kw;
/**
* The internal cipher implementation (varies by blocksize)
*/
private ThreefishCipher cipher;
private boolean forEncryption;
/**
* Constructs a new Threefish cipher, with a specified block size.
*
* @param blocksizeBits the block size in bits, one of {@link #BLOCKSIZE_256}, {@link #BLOCKSIZE_512},
* {@link #BLOCKSIZE_1024}.
*/
public ThreefishEngine(final int blocksizeBits)
{
this.blocksizeBytes = (blocksizeBits / 8);
this.blocksizeWords = (this.blocksizeBytes / 8);
this.currentBlock = new long[blocksizeWords];
/*
* Provide room for original key words, extended key word and repeat of key words for modulo
* free lookup of key schedule words.
*/
this.kw = new long[2 * blocksizeWords + 1];
switch (blocksizeBits)
{
case BLOCKSIZE_256:
cipher = new Threefish256Cipher(kw, t);
break;
case BLOCKSIZE_512:
cipher = new Threefish512Cipher(kw, t);
break;
case BLOCKSIZE_1024:
cipher = new Threefish1024Cipher(kw, t);
break;
default:
throw new IllegalArgumentException(
"Invalid blocksize - Threefish is defined with block size of 256, 512, or 1024 bits");
}
}
/**
* Initialise the engine.
*
* @param params an instance of {@link TweakableBlockCipherParameters}, or {@link KeyParameter} (to
* use a 0 tweak)
*/
public void init(boolean forEncryption, CipherParameters params)
throws IllegalArgumentException
{
final byte[] keyBytes;
final byte[] tweakBytes;
if (params instanceof TweakableBlockCipherParameters)
{
TweakableBlockCipherParameters tParams = (TweakableBlockCipherParameters)params;
keyBytes = tParams.getKey().getKey();
tweakBytes = tParams.getTweak();
}
else if (params instanceof KeyParameter)
{
keyBytes = ((KeyParameter)params).getKey();
tweakBytes = null;
}
else
{
throw new IllegalArgumentException("Invalid parameter passed to Threefish init - "
+ params.getClass().getName());
}
long[] keyWords = null;
long[] tweakWords = null;
if (keyBytes != null)
{
if (keyBytes.length != this.blocksizeBytes)
{
throw new IllegalArgumentException("Threefish key must be same size as block (" + blocksizeBytes
+ " bytes)");
}
keyWords = new long[blocksizeWords];
for (int i = 0; i < keyWords.length; i++)
{
keyWords[i] = bytesToWord(keyBytes, i * 8);
}
}
if (tweakBytes != null)
{
if (tweakBytes.length != TWEAK_SIZE_BYTES)
{
throw new IllegalArgumentException("Threefish tweak must be " + TWEAK_SIZE_BYTES + " bytes");
}
tweakWords = new long[]{bytesToWord(tweakBytes, 0), bytesToWord(tweakBytes, 8)};
}
init(forEncryption, keyWords, tweakWords);
}
/**
* Initialise the engine, specifying the key and tweak directly.
*
* @param forEncryption the cipher mode.
* @param key the words of the key, or <code>null</code> to use the current key.
* @param tweak the 2 word (128 bit) tweak, or <code>null</code> to use the current tweak.
*/
public void init(boolean forEncryption, final long[] key, final long[] tweak)
{
this.forEncryption = forEncryption;
if (key != null)
{
setKey(key);
}
if (tweak != null)
{
setTweak(tweak);
}
}
private void setKey(long[] key)
{
if (key.length != this.blocksizeWords)
{
throw new IllegalArgumentException("Threefish key must be same size as block (" + blocksizeWords
+ " words)");
}
/*
* Full subkey schedule is deferred to execution to avoid per cipher overhead (10k for 512,
* 20k for 1024).
*
* Key and tweak word sequences are repeated, and static MOD17/MOD9/MOD5/MOD3 calculations
* used, to avoid expensive mod computations during cipher operation.
*/
long knw = C_240;
for (int i = 0; i < blocksizeWords; i++)
{
kw[i] = key[i];
knw = knw ^ kw[i];
}
kw[blocksizeWords] = knw;
System.arraycopy(kw, 0, kw, blocksizeWords + 1, blocksizeWords);
}
private void setTweak(long[] tweak)
{
if (tweak.length != TWEAK_SIZE_WORDS)
{
throw new IllegalArgumentException("Tweak must be " + TWEAK_SIZE_WORDS + " words.");
}
/*
* Tweak schedule partially repeated to avoid mod computations during cipher operation
*/
t[0] = tweak[0];
t[1] = tweak[1];
t[2] = t[0] ^ t[1];
t[3] = t[0];
t[4] = t[1];
}
public String getAlgorithmName()
{
return "Threefish-" + (blocksizeBytes * 8);
}
public int getBlockSize()
{
return blocksizeBytes;
}
public void reset()
{
}
public int processBlock(byte[] in, int inOff, byte[] out, int outOff)
throws DataLengthException,
IllegalStateException
{
if ((outOff + blocksizeBytes) > out.length)
{
throw new DataLengthException("Output buffer too short");
}
if ((inOff + blocksizeBytes) > in.length)
{
throw new DataLengthException("Input buffer too short");
}
for (int i = 0; i < blocksizeBytes; i += 8)
{
currentBlock[i >> 3] = bytesToWord(in, inOff + i);
}
processBlock(this.currentBlock, this.currentBlock);
for (int i = 0; i < blocksizeBytes; i += 8)
{
wordToBytes(this.currentBlock[i >> 3], out, outOff + i);
}
return blocksizeBytes;
}
/**
* Process a block of data represented as 64 bit words.
*
* @param in a block sized buffer of words to process.
* @param out a block sized buffer of words to receive the output of the operation.
* @return the number of 8 byte words processed (which will be the same as the block size).
* @throws DataLengthException if either the input or output is not block sized.
* @throws IllegalStateException if this engine is not initialised.
*/
public int processBlock(long[] in, long[] out)
throws DataLengthException, IllegalStateException
{
if (kw[blocksizeWords] == 0)
{
throw new IllegalStateException("Threefish engine not initialised");
}
if (in.length != blocksizeWords)
{
throw new DataLengthException("Input buffer too short");
}
if (out.length != blocksizeWords)
{
throw new DataLengthException("Output buffer too short");
}
if (forEncryption)
{
cipher.encryptBlock(in, out);
}
else
{
cipher.decryptBlock(in, out);
}
return blocksizeWords;
}
/**
* Read a single 64 bit word from input in LSB first order.
*/
// At least package protected for efficient access from inner class
public static long bytesToWord(final byte[] bytes, final int off)
{
if ((off + 8) > bytes.length)
{
// Help the JIT avoid index checks
throw new IllegalArgumentException();
}
long word = 0;
int index = off;
word = (bytes[index++] & 0xffL);
word |= (bytes[index++] & 0xffL) << 8;
word |= (bytes[index++] & 0xffL) << 16;
word |= (bytes[index++] & 0xffL) << 24;
word |= (bytes[index++] & 0xffL) << 32;
word |= (bytes[index++] & 0xffL) << 40;
word |= (bytes[index++] & 0xffL) << 48;
word |= (bytes[index++] & 0xffL) << 56;
return word;
}
/**
* Write a 64 bit word to output in LSB first order.
*/
// At least package protected for efficient access from inner class
public static void wordToBytes(final long word, final byte[] bytes, final int off)
{
if ((off + 8) > bytes.length)
{
// Help the JIT avoid index checks
throw new IllegalArgumentException();
}
int index = off;
bytes[index++] = (byte)word;
bytes[index++] = (byte)(word >> 8);
bytes[index++] = (byte)(word >> 16);
bytes[index++] = (byte)(word >> 24);
bytes[index++] = (byte)(word >> 32);
bytes[index++] = (byte)(word >> 40);
bytes[index++] = (byte)(word >> 48);
bytes[index++] = (byte)(word >> 56);
}
/**
* Rotate left + xor part of the mix operation.
*/
// Package protected for efficient access from inner class
static long rotlXor(long x, int n, long xor)
{
return ((x << n) | (x >>> -n)) ^ xor;
}
/**
* Rotate xor + rotate right part of the unmix operation.
*/
// Package protected for efficient access from inner class
static long xorRotr(long x, int n, long xor)
{
long xored = x ^ xor;
return (xored >>> n) | (xored << -n);
}
private static abstract class ThreefishCipher
{
/**
* The extended + repeated tweak words
*/
protected final long[] t;
/**
* The extended + repeated key words
*/
protected final long[] kw;
protected ThreefishCipher(final long[] kw, final long[] t)
{
this.kw = kw;
this.t = t;
}
abstract void encryptBlock(long[] block, long[] out);
abstract void decryptBlock(long[] block, long[] out);
}
private static final class Threefish256Cipher
extends ThreefishCipher
{
/**
* Mix rotation constants defined in Skein 1.3 specification
*/
private static final int ROTATION_0_0 = 14, ROTATION_0_1 = 16;
private static final int ROTATION_1_0 = 52, ROTATION_1_1 = 57;
private static final int ROTATION_2_0 = 23, ROTATION_2_1 = 40;
private static final int ROTATION_3_0 = 5, ROTATION_3_1 = 37;
private static final int ROTATION_4_0 = 25, ROTATION_4_1 = 33;
private static final int ROTATION_5_0 = 46, ROTATION_5_1 = 12;
private static final int ROTATION_6_0 = 58, ROTATION_6_1 = 22;
private static final int ROTATION_7_0 = 32, ROTATION_7_1 = 32;
public Threefish256Cipher(long[] kw, long[] t)
{
super(kw, t);
}
void encryptBlock(long[] block, long[] out)
{
final long[] kw = this.kw;
final long[] t = this.t;
final int[] mod5 = MOD5;
final int[] mod3 = MOD3;
/* Help the JIT avoid index bounds checks */
if (kw.length != 9)
{
throw new IllegalArgumentException();
}
if (t.length != 5)
{
throw new IllegalArgumentException();
}
/*
* Read 4 words of plaintext data, not using arrays for cipher state
*/
long b0 = block[0];
long b1 = block[1];
long b2 = block[2];
long b3 = block[3];
/*
* First subkey injection.
*/
b0 += kw[0];
b1 += kw[1] + t[0];
b2 += kw[2] + t[1];
b3 += kw[3];
/*
* Rounds loop, unrolled to 8 rounds per iteration.
*
* Unrolling to multiples of 4 avoids the mod 4 check for key injection, and allows
* inlining of the permutations, which cycle every of 2 rounds (avoiding array
* index/lookup).
*
* Unrolling to multiples of 8 avoids the mod 8 rotation constant lookup, and allows
* inlining constant rotation values (avoiding array index/lookup).
*/
for (int d = 1; d < (ROUNDS_256 / 4); d += 2)
{
final int dm5 = mod5[d];
final int dm3 = mod3[d];
/*
* 4 rounds of mix and permute.
*
* Permute schedule has a 2 round cycle, so permutes are inlined in the mix
* operations in each 4 round block.
*/
b1 = rotlXor(b1, ROTATION_0_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_0_1, b2 += b3);
b3 = rotlXor(b3, ROTATION_1_0, b0 += b3);
b1 = rotlXor(b1, ROTATION_1_1, b2 += b1);
b1 = rotlXor(b1, ROTATION_2_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_2_1, b2 += b3);
b3 = rotlXor(b3, ROTATION_3_0, b0 += b3);
b1 = rotlXor(b1, ROTATION_3_1, b2 += b1);
/*
* Subkey injection for first 4 rounds.
*/
b0 += kw[dm5];
b1 += kw[dm5 + 1] + t[dm3];
b2 += kw[dm5 + 2] + t[dm3 + 1];
b3 += kw[dm5 + 3] + d;
/*
* 4 more rounds of mix/permute
*/
b1 = rotlXor(b1, ROTATION_4_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_4_1, b2 += b3);
b3 = rotlXor(b3, ROTATION_5_0, b0 += b3);
b1 = rotlXor(b1, ROTATION_5_1, b2 += b1);
b1 = rotlXor(b1, ROTATION_6_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_6_1, b2 += b3);
b3 = rotlXor(b3, ROTATION_7_0, b0 += b3);
b1 = rotlXor(b1, ROTATION_7_1, b2 += b1);
/*
* Subkey injection for next 4 rounds.
*/
b0 += kw[dm5 + 1];
b1 += kw[dm5 + 2] + t[dm3 + 1];
b2 += kw[dm5 + 3] + t[dm3 + 2];
b3 += kw[dm5 + 4] + d + 1;
}
/*
* Output cipher state.
*/
out[0] = b0;
out[1] = b1;
out[2] = b2;
out[3] = b3;
}
void decryptBlock(long[] block, long[] state)
{
final long[] kw = this.kw;
final long[] t = this.t;
final int[] mod5 = MOD5;
final int[] mod3 = MOD3;
/* Help the JIT avoid index bounds checks */
if (kw.length != 9)
{
throw new IllegalArgumentException();
}
if (t.length != 5)
{
throw new IllegalArgumentException();
}
long b0 = block[0];
long b1 = block[1];
long b2 = block[2];
long b3 = block[3];
for (int d = (ROUNDS_256 / 4) - 1; d >= 1; d -= 2)
{
final int dm5 = mod5[d];
final int dm3 = mod3[d];
/* Reverse key injection for second 4 rounds */
b0 -= kw[dm5 + 1];
b1 -= kw[dm5 + 2] + t[dm3 + 1];
b2 -= kw[dm5 + 3] + t[dm3 + 2];
b3 -= kw[dm5 + 4] + d + 1;
/* Reverse second 4 mix/permute rounds */
b3 = xorRotr(b3, ROTATION_7_0, b0);
b0 -= b3;
b1 = xorRotr(b1, ROTATION_7_1, b2);
b2 -= b1;
b1 = xorRotr(b1, ROTATION_6_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_6_1, b2);
b2 -= b3;
b3 = xorRotr(b3, ROTATION_5_0, b0);
b0 -= b3;
b1 = xorRotr(b1, ROTATION_5_1, b2);
b2 -= b1;
b1 = xorRotr(b1, ROTATION_4_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_4_1, b2);
b2 -= b3;
/* Reverse key injection for first 4 rounds */
b0 -= kw[dm5];
b1 -= kw[dm5 + 1] + t[dm3];
b2 -= kw[dm5 + 2] + t[dm3 + 1];
b3 -= kw[dm5 + 3] + d;
/* Reverse first 4 mix/permute rounds */
b3 = xorRotr(b3, ROTATION_3_0, b0);
b0 -= b3;
b1 = xorRotr(b1, ROTATION_3_1, b2);
b2 -= b1;
b1 = xorRotr(b1, ROTATION_2_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_2_1, b2);
b2 -= b3;
b3 = xorRotr(b3, ROTATION_1_0, b0);
b0 -= b3;
b1 = xorRotr(b1, ROTATION_1_1, b2);
b2 -= b1;
b1 = xorRotr(b1, ROTATION_0_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_0_1, b2);
b2 -= b3;
}
/*
* First subkey uninjection.
*/
b0 -= kw[0];
b1 -= kw[1] + t[0];
b2 -= kw[2] + t[1];
b3 -= kw[3];
/*
* Output cipher state.
*/
state[0] = b0;
state[1] = b1;
state[2] = b2;
state[3] = b3;
}
}
private static final class Threefish512Cipher
extends ThreefishCipher
{
/**
* Mix rotation constants defined in Skein 1.3 specification
*/
private static final int ROTATION_0_0 = 46, ROTATION_0_1 = 36, ROTATION_0_2 = 19, ROTATION_0_3 = 37;
private static final int ROTATION_1_0 = 33, ROTATION_1_1 = 27, ROTATION_1_2 = 14, ROTATION_1_3 = 42;
private static final int ROTATION_2_0 = 17, ROTATION_2_1 = 49, ROTATION_2_2 = 36, ROTATION_2_3 = 39;
private static final int ROTATION_3_0 = 44, ROTATION_3_1 = 9, ROTATION_3_2 = 54, ROTATION_3_3 = 56;
private static final int ROTATION_4_0 = 39, ROTATION_4_1 = 30, ROTATION_4_2 = 34, ROTATION_4_3 = 24;
private static final int ROTATION_5_0 = 13, ROTATION_5_1 = 50, ROTATION_5_2 = 10, ROTATION_5_3 = 17;
private static final int ROTATION_6_0 = 25, ROTATION_6_1 = 29, ROTATION_6_2 = 39, ROTATION_6_3 = 43;
private static final int ROTATION_7_0 = 8, ROTATION_7_1 = 35, ROTATION_7_2 = 56, ROTATION_7_3 = 22;
protected Threefish512Cipher(long[] kw, long[] t)
{
super(kw, t);
}
public void encryptBlock(long[] block, long[] out)
{
final long[] kw = this.kw;
final long[] t = this.t;
final int[] mod9 = MOD9;
final int[] mod3 = MOD3;
/* Help the JIT avoid index bounds checks */
if (kw.length != 17)
{
throw new IllegalArgumentException();
}
if (t.length != 5)
{
throw new IllegalArgumentException();
}
/*
* Read 8 words of plaintext data, not using arrays for cipher state
*/
long b0 = block[0];
long b1 = block[1];
long b2 = block[2];
long b3 = block[3];
long b4 = block[4];
long b5 = block[5];
long b6 = block[6];
long b7 = block[7];
/*
* First subkey injection.
*/
b0 += kw[0];
b1 += kw[1];
b2 += kw[2];
b3 += kw[3];
b4 += kw[4];
b5 += kw[5] + t[0];
b6 += kw[6] + t[1];
b7 += kw[7];
/*
* Rounds loop, unrolled to 8 rounds per iteration.
*
* Unrolling to multiples of 4 avoids the mod 4 check for key injection, and allows
* inlining of the permutations, which cycle every of 4 rounds (avoiding array
* index/lookup).
*
* Unrolling to multiples of 8 avoids the mod 8 rotation constant lookup, and allows
* inlining constant rotation values (avoiding array index/lookup).
*/
for (int d = 1; d < (ROUNDS_512 / 4); d += 2)
{
final int dm9 = mod9[d];
final int dm3 = mod3[d];
/*
* 4 rounds of mix and permute.
*
* Permute schedule has a 4 round cycle, so permutes are inlined in the mix
* operations in each 4 round block.
*/
b1 = rotlXor(b1, ROTATION_0_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_0_1, b2 += b3);
b5 = rotlXor(b5, ROTATION_0_2, b4 += b5);
b7 = rotlXor(b7, ROTATION_0_3, b6 += b7);
b1 = rotlXor(b1, ROTATION_1_0, b2 += b1);
b7 = rotlXor(b7, ROTATION_1_1, b4 += b7);
b5 = rotlXor(b5, ROTATION_1_2, b6 += b5);
b3 = rotlXor(b3, ROTATION_1_3, b0 += b3);
b1 = rotlXor(b1, ROTATION_2_0, b4 += b1);
b3 = rotlXor(b3, ROTATION_2_1, b6 += b3);
b5 = rotlXor(b5, ROTATION_2_2, b0 += b5);
b7 = rotlXor(b7, ROTATION_2_3, b2 += b7);
b1 = rotlXor(b1, ROTATION_3_0, b6 += b1);
b7 = rotlXor(b7, ROTATION_3_1, b0 += b7);
b5 = rotlXor(b5, ROTATION_3_2, b2 += b5);
b3 = rotlXor(b3, ROTATION_3_3, b4 += b3);
/*
* Subkey injection for first 4 rounds.
*/
b0 += kw[dm9];
b1 += kw[dm9 + 1];
b2 += kw[dm9 + 2];
b3 += kw[dm9 + 3];
b4 += kw[dm9 + 4];
b5 += kw[dm9 + 5] + t[dm3];
b6 += kw[dm9 + 6] + t[dm3 + 1];
b7 += kw[dm9 + 7] + d;
/*
* 4 more rounds of mix/permute
*/
b1 = rotlXor(b1, ROTATION_4_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_4_1, b2 += b3);
b5 = rotlXor(b5, ROTATION_4_2, b4 += b5);
b7 = rotlXor(b7, ROTATION_4_3, b6 += b7);
b1 = rotlXor(b1, ROTATION_5_0, b2 += b1);
b7 = rotlXor(b7, ROTATION_5_1, b4 += b7);
b5 = rotlXor(b5, ROTATION_5_2, b6 += b5);
b3 = rotlXor(b3, ROTATION_5_3, b0 += b3);
b1 = rotlXor(b1, ROTATION_6_0, b4 += b1);
b3 = rotlXor(b3, ROTATION_6_1, b6 += b3);
b5 = rotlXor(b5, ROTATION_6_2, b0 += b5);
b7 = rotlXor(b7, ROTATION_6_3, b2 += b7);
b1 = rotlXor(b1, ROTATION_7_0, b6 += b1);
b7 = rotlXor(b7, ROTATION_7_1, b0 += b7);
b5 = rotlXor(b5, ROTATION_7_2, b2 += b5);
b3 = rotlXor(b3, ROTATION_7_3, b4 += b3);
/*
* Subkey injection for next 4 rounds.
*/
b0 += kw[dm9 + 1];
b1 += kw[dm9 + 2];
b2 += kw[dm9 + 3];
b3 += kw[dm9 + 4];
b4 += kw[dm9 + 5];
b5 += kw[dm9 + 6] + t[dm3 + 1];
b6 += kw[dm9 + 7] + t[dm3 + 2];
b7 += kw[dm9 + 8] + d + 1;
}
/*
* Output cipher state.
*/
out[0] = b0;
out[1] = b1;
out[2] = b2;
out[3] = b3;
out[4] = b4;
out[5] = b5;
out[6] = b6;
out[7] = b7;
}
public void decryptBlock(long[] block, long[] state)
{
final long[] kw = this.kw;
final long[] t = this.t;
final int[] mod9 = MOD9;
final int[] mod3 = MOD3;
/* Help the JIT avoid index bounds checks */
if (kw.length != 17)
{
throw new IllegalArgumentException();
}
if (t.length != 5)
{
throw new IllegalArgumentException();
}
long b0 = block[0];
long b1 = block[1];
long b2 = block[2];
long b3 = block[3];
long b4 = block[4];
long b5 = block[5];
long b6 = block[6];
long b7 = block[7];
for (int d = (ROUNDS_512 / 4) - 1; d >= 1; d -= 2)
{
final int dm9 = mod9[d];
final int dm3 = mod3[d];
/* Reverse key injection for second 4 rounds */
b0 -= kw[dm9 + 1];
b1 -= kw[dm9 + 2];
b2 -= kw[dm9 + 3];
b3 -= kw[dm9 + 4];
b4 -= kw[dm9 + 5];
b5 -= kw[dm9 + 6] + t[dm3 + 1];
b6 -= kw[dm9 + 7] + t[dm3 + 2];
b7 -= kw[dm9 + 8] + d + 1;
/* Reverse second 4 mix/permute rounds */
b1 = xorRotr(b1, ROTATION_7_0, b6);
b6 -= b1;
b7 = xorRotr(b7, ROTATION_7_1, b0);
b0 -= b7;
b5 = xorRotr(b5, ROTATION_7_2, b2);
b2 -= b5;
b3 = xorRotr(b3, ROTATION_7_3, b4);
b4 -= b3;
b1 = xorRotr(b1, ROTATION_6_0, b4);
b4 -= b1;
b3 = xorRotr(b3, ROTATION_6_1, b6);
b6 -= b3;
b5 = xorRotr(b5, ROTATION_6_2, b0);
b0 -= b5;
b7 = xorRotr(b7, ROTATION_6_3, b2);
b2 -= b7;
b1 = xorRotr(b1, ROTATION_5_0, b2);
b2 -= b1;
b7 = xorRotr(b7, ROTATION_5_1, b4);
b4 -= b7;
b5 = xorRotr(b5, ROTATION_5_2, b6);
b6 -= b5;
b3 = xorRotr(b3, ROTATION_5_3, b0);
b0 -= b3;
b1 = xorRotr(b1, ROTATION_4_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_4_1, b2);
b2 -= b3;
b5 = xorRotr(b5, ROTATION_4_2, b4);
b4 -= b5;
b7 = xorRotr(b7, ROTATION_4_3, b6);
b6 -= b7;
/* Reverse key injection for first 4 rounds */
b0 -= kw[dm9];
b1 -= kw[dm9 + 1];
b2 -= kw[dm9 + 2];
b3 -= kw[dm9 + 3];
b4 -= kw[dm9 + 4];
b5 -= kw[dm9 + 5] + t[dm3];
b6 -= kw[dm9 + 6] + t[dm3 + 1];
b7 -= kw[dm9 + 7] + d;
/* Reverse first 4 mix/permute rounds */
b1 = xorRotr(b1, ROTATION_3_0, b6);
b6 -= b1;
b7 = xorRotr(b7, ROTATION_3_1, b0);
b0 -= b7;
b5 = xorRotr(b5, ROTATION_3_2, b2);
b2 -= b5;
b3 = xorRotr(b3, ROTATION_3_3, b4);
b4 -= b3;
b1 = xorRotr(b1, ROTATION_2_0, b4);
b4 -= b1;
b3 = xorRotr(b3, ROTATION_2_1, b6);
b6 -= b3;
b5 = xorRotr(b5, ROTATION_2_2, b0);
b0 -= b5;
b7 = xorRotr(b7, ROTATION_2_3, b2);
b2 -= b7;
b1 = xorRotr(b1, ROTATION_1_0, b2);
b2 -= b1;
b7 = xorRotr(b7, ROTATION_1_1, b4);
b4 -= b7;
b5 = xorRotr(b5, ROTATION_1_2, b6);
b6 -= b5;
b3 = xorRotr(b3, ROTATION_1_3, b0);
b0 -= b3;
b1 = xorRotr(b1, ROTATION_0_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_0_1, b2);
b2 -= b3;
b5 = xorRotr(b5, ROTATION_0_2, b4);
b4 -= b5;
b7 = xorRotr(b7, ROTATION_0_3, b6);
b6 -= b7;
}
/*
* First subkey uninjection.
*/
b0 -= kw[0];
b1 -= kw[1];
b2 -= kw[2];
b3 -= kw[3];
b4 -= kw[4];
b5 -= kw[5] + t[0];
b6 -= kw[6] + t[1];
b7 -= kw[7];
/*
* Output cipher state.
*/
state[0] = b0;
state[1] = b1;
state[2] = b2;
state[3] = b3;
state[4] = b4;
state[5] = b5;
state[6] = b6;
state[7] = b7;
}
}
private static final class Threefish1024Cipher
extends ThreefishCipher
{
/**
* Mix rotation constants defined in Skein 1.3 specification
*/
private static final int ROTATION_0_0 = 24, ROTATION_0_1 = 13, ROTATION_0_2 = 8, ROTATION_0_3 = 47;
private static final int ROTATION_0_4 = 8, ROTATION_0_5 = 17, ROTATION_0_6 = 22, ROTATION_0_7 = 37;
private static final int ROTATION_1_0 = 38, ROTATION_1_1 = 19, ROTATION_1_2 = 10, ROTATION_1_3 = 55;
private static final int ROTATION_1_4 = 49, ROTATION_1_5 = 18, ROTATION_1_6 = 23, ROTATION_1_7 = 52;
private static final int ROTATION_2_0 = 33, ROTATION_2_1 = 4, ROTATION_2_2 = 51, ROTATION_2_3 = 13;
private static final int ROTATION_2_4 = 34, ROTATION_2_5 = 41, ROTATION_2_6 = 59, ROTATION_2_7 = 17;
private static final int ROTATION_3_0 = 5, ROTATION_3_1 = 20, ROTATION_3_2 = 48, ROTATION_3_3 = 41;
private static final int ROTATION_3_4 = 47, ROTATION_3_5 = 28, ROTATION_3_6 = 16, ROTATION_3_7 = 25;
private static final int ROTATION_4_0 = 41, ROTATION_4_1 = 9, ROTATION_4_2 = 37, ROTATION_4_3 = 31;
private static final int ROTATION_4_4 = 12, ROTATION_4_5 = 47, ROTATION_4_6 = 44, ROTATION_4_7 = 30;
private static final int ROTATION_5_0 = 16, ROTATION_5_1 = 34, ROTATION_5_2 = 56, ROTATION_5_3 = 51;
private static final int ROTATION_5_4 = 4, ROTATION_5_5 = 53, ROTATION_5_6 = 42, ROTATION_5_7 = 41;
private static final int ROTATION_6_0 = 31, ROTATION_6_1 = 44, ROTATION_6_2 = 47, ROTATION_6_3 = 46;
private static final int ROTATION_6_4 = 19, ROTATION_6_5 = 42, ROTATION_6_6 = 44, ROTATION_6_7 = 25;
private static final int ROTATION_7_0 = 9, ROTATION_7_1 = 48, ROTATION_7_2 = 35, ROTATION_7_3 = 52;
private static final int ROTATION_7_4 = 23, ROTATION_7_5 = 31, ROTATION_7_6 = 37, ROTATION_7_7 = 20;
public Threefish1024Cipher(long[] kw, long[] t)
{
super(kw, t);
}
void encryptBlock(long[] block, long[] out)
{
final long[] kw = this.kw;
final long[] t = this.t;
final int[] mod17 = MOD17;
final int[] mod3 = MOD3;
/* Help the JIT avoid index bounds checks */
if (kw.length != 33)
{
throw new IllegalArgumentException();
}
if (t.length != 5)
{
throw new IllegalArgumentException();
}
/*
* Read 16 words of plaintext data, not using arrays for cipher state
*/
long b0 = block[0];
long b1 = block[1];
long b2 = block[2];
long b3 = block[3];
long b4 = block[4];
long b5 = block[5];
long b6 = block[6];
long b7 = block[7];
long b8 = block[8];
long b9 = block[9];
long b10 = block[10];
long b11 = block[11];
long b12 = block[12];
long b13 = block[13];
long b14 = block[14];
long b15 = block[15];
/*
* First subkey injection.
*/
b0 += kw[0];
b1 += kw[1];
b2 += kw[2];
b3 += kw[3];
b4 += kw[4];
b5 += kw[5];
b6 += kw[6];
b7 += kw[7];
b8 += kw[8];
b9 += kw[9];
b10 += kw[10];
b11 += kw[11];
b12 += kw[12];
b13 += kw[13] + t[0];
b14 += kw[14] + t[1];
b15 += kw[15];
/*
* Rounds loop, unrolled to 8 rounds per iteration.
*
* Unrolling to multiples of 4 avoids the mod 4 check for key injection, and allows
* inlining of the permutations, which cycle every of 4 rounds (avoiding array
* index/lookup).
*
* Unrolling to multiples of 8 avoids the mod 8 rotation constant lookup, and allows
* inlining constant rotation values (avoiding array index/lookup).
*/
for (int d = 1; d < (ROUNDS_1024 / 4); d += 2)
{
final int dm17 = mod17[d];
final int dm3 = mod3[d];
/*
* 4 rounds of mix and permute.
*
* Permute schedule has a 4 round cycle, so permutes are inlined in the mix
* operations in each 4 round block.
*/
b1 = rotlXor(b1, ROTATION_0_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_0_1, b2 += b3);
b5 = rotlXor(b5, ROTATION_0_2, b4 += b5);
b7 = rotlXor(b7, ROTATION_0_3, b6 += b7);
b9 = rotlXor(b9, ROTATION_0_4, b8 += b9);
b11 = rotlXor(b11, ROTATION_0_5, b10 += b11);
b13 = rotlXor(b13, ROTATION_0_6, b12 += b13);
b15 = rotlXor(b15, ROTATION_0_7, b14 += b15);
b9 = rotlXor(b9, ROTATION_1_0, b0 += b9);
b13 = rotlXor(b13, ROTATION_1_1, b2 += b13);
b11 = rotlXor(b11, ROTATION_1_2, b6 += b11);
b15 = rotlXor(b15, ROTATION_1_3, b4 += b15);
b7 = rotlXor(b7, ROTATION_1_4, b10 += b7);
b3 = rotlXor(b3, ROTATION_1_5, b12 += b3);
b5 = rotlXor(b5, ROTATION_1_6, b14 += b5);
b1 = rotlXor(b1, ROTATION_1_7, b8 += b1);
b7 = rotlXor(b7, ROTATION_2_0, b0 += b7);
b5 = rotlXor(b5, ROTATION_2_1, b2 += b5);
b3 = rotlXor(b3, ROTATION_2_2, b4 += b3);
b1 = rotlXor(b1, ROTATION_2_3, b6 += b1);
b15 = rotlXor(b15, ROTATION_2_4, b12 += b15);
b13 = rotlXor(b13, ROTATION_2_5, b14 += b13);
b11 = rotlXor(b11, ROTATION_2_6, b8 += b11);
b9 = rotlXor(b9, ROTATION_2_7, b10 += b9);
b15 = rotlXor(b15, ROTATION_3_0, b0 += b15);
b11 = rotlXor(b11, ROTATION_3_1, b2 += b11);
b13 = rotlXor(b13, ROTATION_3_2, b6 += b13);
b9 = rotlXor(b9, ROTATION_3_3, b4 += b9);
b1 = rotlXor(b1, ROTATION_3_4, b14 += b1);
b5 = rotlXor(b5, ROTATION_3_5, b8 += b5);
b3 = rotlXor(b3, ROTATION_3_6, b10 += b3);
b7 = rotlXor(b7, ROTATION_3_7, b12 += b7);
/*
* Subkey injection for first 4 rounds.
*/
b0 += kw[dm17];
b1 += kw[dm17 + 1];
b2 += kw[dm17 + 2];
b3 += kw[dm17 + 3];
b4 += kw[dm17 + 4];
b5 += kw[dm17 + 5];
b6 += kw[dm17 + 6];
b7 += kw[dm17 + 7];
b8 += kw[dm17 + 8];
b9 += kw[dm17 + 9];
b10 += kw[dm17 + 10];
b11 += kw[dm17 + 11];
b12 += kw[dm17 + 12];
b13 += kw[dm17 + 13] + t[dm3];
b14 += kw[dm17 + 14] + t[dm3 + 1];
b15 += kw[dm17 + 15] + d;
/*
* 4 more rounds of mix/permute
*/
b1 = rotlXor(b1, ROTATION_4_0, b0 += b1);
b3 = rotlXor(b3, ROTATION_4_1, b2 += b3);
b5 = rotlXor(b5, ROTATION_4_2, b4 += b5);
b7 = rotlXor(b7, ROTATION_4_3, b6 += b7);
b9 = rotlXor(b9, ROTATION_4_4, b8 += b9);
b11 = rotlXor(b11, ROTATION_4_5, b10 += b11);
b13 = rotlXor(b13, ROTATION_4_6, b12 += b13);
b15 = rotlXor(b15, ROTATION_4_7, b14 += b15);
b9 = rotlXor(b9, ROTATION_5_0, b0 += b9);
b13 = rotlXor(b13, ROTATION_5_1, b2 += b13);
b11 = rotlXor(b11, ROTATION_5_2, b6 += b11);
b15 = rotlXor(b15, ROTATION_5_3, b4 += b15);
b7 = rotlXor(b7, ROTATION_5_4, b10 += b7);
b3 = rotlXor(b3, ROTATION_5_5, b12 += b3);
b5 = rotlXor(b5, ROTATION_5_6, b14 += b5);
b1 = rotlXor(b1, ROTATION_5_7, b8 += b1);
b7 = rotlXor(b7, ROTATION_6_0, b0 += b7);
b5 = rotlXor(b5, ROTATION_6_1, b2 += b5);
b3 = rotlXor(b3, ROTATION_6_2, b4 += b3);
b1 = rotlXor(b1, ROTATION_6_3, b6 += b1);
b15 = rotlXor(b15, ROTATION_6_4, b12 += b15);
b13 = rotlXor(b13, ROTATION_6_5, b14 += b13);
b11 = rotlXor(b11, ROTATION_6_6, b8 += b11);
b9 = rotlXor(b9, ROTATION_6_7, b10 += b9);
b15 = rotlXor(b15, ROTATION_7_0, b0 += b15);
b11 = rotlXor(b11, ROTATION_7_1, b2 += b11);
b13 = rotlXor(b13, ROTATION_7_2, b6 += b13);
b9 = rotlXor(b9, ROTATION_7_3, b4 += b9);
b1 = rotlXor(b1, ROTATION_7_4, b14 += b1);
b5 = rotlXor(b5, ROTATION_7_5, b8 += b5);
b3 = rotlXor(b3, ROTATION_7_6, b10 += b3);
b7 = rotlXor(b7, ROTATION_7_7, b12 += b7);
/*
* Subkey injection for next 4 rounds.
*/
b0 += kw[dm17 + 1];
b1 += kw[dm17 + 2];
b2 += kw[dm17 + 3];
b3 += kw[dm17 + 4];
b4 += kw[dm17 + 5];
b5 += kw[dm17 + 6];
b6 += kw[dm17 + 7];
b7 += kw[dm17 + 8];
b8 += kw[dm17 + 9];
b9 += kw[dm17 + 10];
b10 += kw[dm17 + 11];
b11 += kw[dm17 + 12];
b12 += kw[dm17 + 13];
b13 += kw[dm17 + 14] + t[dm3 + 1];
b14 += kw[dm17 + 15] + t[dm3 + 2];
b15 += kw[dm17 + 16] + d + 1;
}
/*
* Output cipher state.
*/
out[0] = b0;
out[1] = b1;
out[2] = b2;
out[3] = b3;
out[4] = b4;
out[5] = b5;
out[6] = b6;
out[7] = b7;
out[8] = b8;
out[9] = b9;
out[10] = b10;
out[11] = b11;
out[12] = b12;
out[13] = b13;
out[14] = b14;
out[15] = b15;
}
void decryptBlock(long[] block, long[] state)
{
final long[] kw = this.kw;
final long[] t = this.t;
final int[] mod17 = MOD17;
final int[] mod3 = MOD3;
/* Help the JIT avoid index bounds checks */
if (kw.length != 33)
{
throw new IllegalArgumentException();
}
if (t.length != 5)
{
throw new IllegalArgumentException();
}
long b0 = block[0];
long b1 = block[1];
long b2 = block[2];
long b3 = block[3];
long b4 = block[4];
long b5 = block[5];
long b6 = block[6];
long b7 = block[7];
long b8 = block[8];
long b9 = block[9];
long b10 = block[10];
long b11 = block[11];
long b12 = block[12];
long b13 = block[13];
long b14 = block[14];
long b15 = block[15];
for (int d = (ROUNDS_1024 / 4) - 1; d >= 1; d -= 2)
{
final int dm17 = mod17[d];
final int dm3 = mod3[d];
/* Reverse key injection for second 4 rounds */
b0 -= kw[dm17 + 1];
b1 -= kw[dm17 + 2];
b2 -= kw[dm17 + 3];
b3 -= kw[dm17 + 4];
b4 -= kw[dm17 + 5];
b5 -= kw[dm17 + 6];
b6 -= kw[dm17 + 7];
b7 -= kw[dm17 + 8];
b8 -= kw[dm17 + 9];
b9 -= kw[dm17 + 10];
b10 -= kw[dm17 + 11];
b11 -= kw[dm17 + 12];
b12 -= kw[dm17 + 13];
b13 -= kw[dm17 + 14] + t[dm3 + 1];
b14 -= kw[dm17 + 15] + t[dm3 + 2];
b15 -= kw[dm17 + 16] + d + 1;
/* Reverse second 4 mix/permute rounds */
b15 = xorRotr(b15, ROTATION_7_0, b0);
b0 -= b15;
b11 = xorRotr(b11, ROTATION_7_1, b2);
b2 -= b11;
b13 = xorRotr(b13, ROTATION_7_2, b6);
b6 -= b13;
b9 = xorRotr(b9, ROTATION_7_3, b4);
b4 -= b9;
b1 = xorRotr(b1, ROTATION_7_4, b14);
b14 -= b1;
b5 = xorRotr(b5, ROTATION_7_5, b8);
b8 -= b5;
b3 = xorRotr(b3, ROTATION_7_6, b10);
b10 -= b3;
b7 = xorRotr(b7, ROTATION_7_7, b12);
b12 -= b7;
b7 = xorRotr(b7, ROTATION_6_0, b0);
b0 -= b7;
b5 = xorRotr(b5, ROTATION_6_1, b2);
b2 -= b5;
b3 = xorRotr(b3, ROTATION_6_2, b4);
b4 -= b3;
b1 = xorRotr(b1, ROTATION_6_3, b6);
b6 -= b1;
b15 = xorRotr(b15, ROTATION_6_4, b12);
b12 -= b15;
b13 = xorRotr(b13, ROTATION_6_5, b14);
b14 -= b13;
b11 = xorRotr(b11, ROTATION_6_6, b8);
b8 -= b11;
b9 = xorRotr(b9, ROTATION_6_7, b10);
b10 -= b9;
b9 = xorRotr(b9, ROTATION_5_0, b0);
b0 -= b9;
b13 = xorRotr(b13, ROTATION_5_1, b2);
b2 -= b13;
b11 = xorRotr(b11, ROTATION_5_2, b6);
b6 -= b11;
b15 = xorRotr(b15, ROTATION_5_3, b4);
b4 -= b15;
b7 = xorRotr(b7, ROTATION_5_4, b10);
b10 -= b7;
b3 = xorRotr(b3, ROTATION_5_5, b12);
b12 -= b3;
b5 = xorRotr(b5, ROTATION_5_6, b14);
b14 -= b5;
b1 = xorRotr(b1, ROTATION_5_7, b8);
b8 -= b1;
b1 = xorRotr(b1, ROTATION_4_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_4_1, b2);
b2 -= b3;
b5 = xorRotr(b5, ROTATION_4_2, b4);
b4 -= b5;
b7 = xorRotr(b7, ROTATION_4_3, b6);
b6 -= b7;
b9 = xorRotr(b9, ROTATION_4_4, b8);
b8 -= b9;
b11 = xorRotr(b11, ROTATION_4_5, b10);
b10 -= b11;
b13 = xorRotr(b13, ROTATION_4_6, b12);
b12 -= b13;
b15 = xorRotr(b15, ROTATION_4_7, b14);
b14 -= b15;
/* Reverse key injection for first 4 rounds */
b0 -= kw[dm17];
b1 -= kw[dm17 + 1];
b2 -= kw[dm17 + 2];
b3 -= kw[dm17 + 3];
b4 -= kw[dm17 + 4];
b5 -= kw[dm17 + 5];
b6 -= kw[dm17 + 6];
b7 -= kw[dm17 + 7];
b8 -= kw[dm17 + 8];
b9 -= kw[dm17 + 9];
b10 -= kw[dm17 + 10];
b11 -= kw[dm17 + 11];
b12 -= kw[dm17 + 12];
b13 -= kw[dm17 + 13] + t[dm3];
b14 -= kw[dm17 + 14] + t[dm3 + 1];
b15 -= kw[dm17 + 15] + d;
/* Reverse first 4 mix/permute rounds */
b15 = xorRotr(b15, ROTATION_3_0, b0);
b0 -= b15;
b11 = xorRotr(b11, ROTATION_3_1, b2);
b2 -= b11;
b13 = xorRotr(b13, ROTATION_3_2, b6);
b6 -= b13;
b9 = xorRotr(b9, ROTATION_3_3, b4);
b4 -= b9;
b1 = xorRotr(b1, ROTATION_3_4, b14);
b14 -= b1;
b5 = xorRotr(b5, ROTATION_3_5, b8);
b8 -= b5;
b3 = xorRotr(b3, ROTATION_3_6, b10);
b10 -= b3;
b7 = xorRotr(b7, ROTATION_3_7, b12);
b12 -= b7;
b7 = xorRotr(b7, ROTATION_2_0, b0);
b0 -= b7;
b5 = xorRotr(b5, ROTATION_2_1, b2);
b2 -= b5;
b3 = xorRotr(b3, ROTATION_2_2, b4);
b4 -= b3;
b1 = xorRotr(b1, ROTATION_2_3, b6);
b6 -= b1;
b15 = xorRotr(b15, ROTATION_2_4, b12);
b12 -= b15;
b13 = xorRotr(b13, ROTATION_2_5, b14);
b14 -= b13;
b11 = xorRotr(b11, ROTATION_2_6, b8);
b8 -= b11;
b9 = xorRotr(b9, ROTATION_2_7, b10);
b10 -= b9;
b9 = xorRotr(b9, ROTATION_1_0, b0);
b0 -= b9;
b13 = xorRotr(b13, ROTATION_1_1, b2);
b2 -= b13;
b11 = xorRotr(b11, ROTATION_1_2, b6);
b6 -= b11;
b15 = xorRotr(b15, ROTATION_1_3, b4);
b4 -= b15;
b7 = xorRotr(b7, ROTATION_1_4, b10);
b10 -= b7;
b3 = xorRotr(b3, ROTATION_1_5, b12);
b12 -= b3;
b5 = xorRotr(b5, ROTATION_1_6, b14);
b14 -= b5;
b1 = xorRotr(b1, ROTATION_1_7, b8);
b8 -= b1;
b1 = xorRotr(b1, ROTATION_0_0, b0);
b0 -= b1;
b3 = xorRotr(b3, ROTATION_0_1, b2);
b2 -= b3;
b5 = xorRotr(b5, ROTATION_0_2, b4);
b4 -= b5;
b7 = xorRotr(b7, ROTATION_0_3, b6);
b6 -= b7;
b9 = xorRotr(b9, ROTATION_0_4, b8);
b8 -= b9;
b11 = xorRotr(b11, ROTATION_0_5, b10);
b10 -= b11;
b13 = xorRotr(b13, ROTATION_0_6, b12);
b12 -= b13;
b15 = xorRotr(b15, ROTATION_0_7, b14);
b14 -= b15;
}
/*
* First subkey uninjection.
*/
b0 -= kw[0];
b1 -= kw[1];
b2 -= kw[2];
b3 -= kw[3];
b4 -= kw[4];
b5 -= kw[5];
b6 -= kw[6];
b7 -= kw[7];
b8 -= kw[8];
b9 -= kw[9];
b10 -= kw[10];
b11 -= kw[11];
b12 -= kw[12];
b13 -= kw[13] + t[0];
b14 -= kw[14] + t[1];
b15 -= kw[15];
/*
* Output cipher state.
*/
state[0] = b0;
state[1] = b1;
state[2] = b2;
state[3] = b3;
state[4] = b4;
state[5] = b5;
state[6] = b6;
state[7] = b7;
state[8] = b8;
state[9] = b9;
state[10] = b10;
state[11] = b11;
state[12] = b12;
state[13] = b13;
state[14] = b14;
state[15] = b15;
}
}
}
| Skywalker-11/spongycastle | core/src/main/java/org/spongycastle/crypto/engines/ThreefishEngine.java | Java | mit | 51,043 |
/*globals Handlebars */
require("ember-handlebars/ext");
/**
@module ember
@submodule ember-handlebars
*/
/**
@private
Find templates stored in the head tag as script tags and make them available
to `Ember.CoreView` in the global `Ember.TEMPLATES` object. This will be run
as as jQuery DOM-ready callback.
Script tags with `text/x-handlebars` will be compiled
with Ember's Handlebars and are suitable for use as a view's template.
Those with type `text/x-raw-handlebars` will be compiled with regular
Handlebars and are suitable for use in views' computed properties.
@method bootstrap
@for Ember.Handlebars
@static
@param ctx
*/
Ember.Handlebars.bootstrap = function(ctx) {
var selectors = 'script[type="text/x-handlebars"], script[type="text/x-raw-handlebars"]';
Ember.$(selectors, ctx)
.each(function() {
// Get a reference to the script tag
var script = Ember.$(this);
var compile = (script.attr('type') === 'text/x-raw-handlebars') ?
Ember.$.proxy(Handlebars.compile, Handlebars) :
Ember.$.proxy(Ember.Handlebars.compile, Ember.Handlebars),
// Get the name of the script, used by Ember.View's templateName property.
// First look for data-template-name attribute, then fall back to its
// id if no name is found.
templateName = script.attr('data-template-name') || script.attr('id') || 'application',
template = compile(script.html());
// For templates which have a name, we save them and then remove them from the DOM
Ember.TEMPLATES[templateName] = template;
// Remove script tag from DOM
script.remove();
});
};
function bootstrap() {
Ember.Handlebars.bootstrap( Ember.$(document) );
}
/*
We tie this to application.load to ensure that we've at least
attempted to bootstrap at the point that the application is loaded.
We also tie this to document ready since we're guaranteed that all
the inline templates are present at this point.
There's no harm to running this twice, since we remove the templates
from the DOM after processing.
*/
Ember.onLoad('application', bootstrap);
| chancancode/ember.js | packages/ember-handlebars/lib/loader.js | JavaScript | mit | 2,142 |
var vows = require('vows');
var assert = require('assert');
var suite = vows.describe('jStat.distribution');
require('../env.js');
suite.addBatch({
'normal pdf': {
'topic': function() {
return jStat;
},
//Checked against R's dnorm(x, mean, sd)
'check pdf calculation': function(jStat) {
var tol = 0.000001;
assert.epsilon(tol, jStat.normal.pdf(0, 0, 1.0), 0.3989423);
assert.epsilon(tol, jStat.normal.pdf(5, 10, 3.0), 0.03315905);
assert.epsilon(tol, jStat.normal.pdf(-1, 1, 0.5), 0.00026766);
},
//Checked against R's qnorm(p, mean, sd)
'check inv calculation': function(jStat) {
var tol = 0.000001;
assert.epsilon(tol, jStat.normal.inv(0.3989423, 0, 1.0), -0.2560858);
assert.epsilon(tol, jStat.normal.inv(0.05, 10, 3.0), 5.065439);
assert.epsilon(tol, jStat.normal.inv(0.65, -2, .5), -1.80734);
}
}
});
suite.export(module);
| akrawitz/jstat | test/distribution/normal-test.js | JavaScript | mit | 923 |
require 'spec_helper'
describe UserDecorator do
describe "#display_name" do
subject{ user.display_name }
context 'when profile_type is personal' do
context "when we only have a full name" do
let(:user){ create(:user, name: nil, full_name: "Full Name") }
it{ should == 'Full Name' }
end
context "when we have only a name" do
let(:user){ create(:user, name: nil, name: 'name') }
it{ should == 'name' }
end
context "when we have a name and a full name" do
let(:user){ create(:user, name: 'name', full_name: 'full name') }
it{ should == 'name' }
end
context "when we have no name" do
let(:user){ create(:user, name: nil, nickname: nil) }
it{ should == I18n.t('words.no_name') }
end
end
context 'when profile_type is organization' do
context "when we the organization name" do
let(:user){ create(:user, profile_type: 'organization', organization_attributes: { name: 'Neighbor.ly' }) }
it{ should == 'Neighbor.ly' }
end
context "when we have no organization name" do
let(:user){ create(:user, profile_type: 'organization', organization_attributes: { name: nil }) }
it{ should == I18n.t('words.no_name') }
end
end
context 'when profile_type is channel' do
let(:user){ create(:channel, name: 'Neighbor.ly').user.reload }
it{ should == 'Neighbor.ly' }
end
end
describe "#display_image_html" do
let(:user){ build(:user, image_url: 'http://image.jpg', uploaded_image: nil )}
let(:options){ {width: 300, height: 300} }
subject{ user.display_image_html(options) }
it { should == "<figure class=\"profile-image personal\"><img alt=\"Foo bar\" class=\"avatar\" src=\"http://image.jpg\" style=\"width: #{options[:width]}px; height: #{options[:height]}px\" /></figure>"}
end
describe "#display_image" do
subject{ user.display_image }
context 'when profile_type is personal' do
context "when we have an uploaded image" do
let(:user){ build(:user, uploaded_image: 'image.png' )}
before do
image = double(url: 'image.png')
image.stub(:thumb_avatar).and_return(image)
user.stub(:uploaded_image).and_return(image)
end
it{ should == 'image.png' }
end
context "when we have an image url" do
let(:user){ build(:user, image_url: 'image.png') }
it{ should == 'image.png' }
end
context "when we have an email" do
let(:user){ create(:user, image_url: nil, email: 'diogob@gmail.com') }
it{ should ~ /https:\/\/gravatar.com\/avatar\/5e2a237dafbc45f79428fdda9c5024b1\.jpg\?size=150&default=#{::Configuration[:base_url]}\/assets\/default-avatars\/(d+.)\.png/ }
end
end
context 'when profile_type is organization' do
context "when we have a organization image" do
let(:user){ build(:user, profile_type: 'organization', organization_attributes: { image: 'image.png'} )}
before do
image = double(url: 'image.png')
image.stub(:thumb).and_return(image)
image.stub(:large).and_return(image)
user.organization.stub(:image).and_return(image)
end
it{ should == 'image.png' }
end
context 'when we dont have a organization image' do
let(:user){ build(:user, profile_type: 'organization', organization_attributes: { image: nil }) }
it{ should == '/assets/logo-blank.jpg' }
end
end
context 'when profile_type is channel' do
context "when we have a channel image" do
let(:user){ create(:channel, image: 'image.png').user.reload }
before do
image = double(url: 'image.png')
image.stub(:thumb).and_return(image)
image.stub(:large).and_return(image)
user.channel.stub(:image).and_return(image)
end
it{ should == 'image.png' }
end
context 'when we dont have a organization image' do
let(:user){ create(:channel, image: nil).user.reload }
it{ should == '/assets/logo-blank.jpg' }
end
end
end
describe "#short_name" do
subject { create(:user, name: 'My Name Is Lorem Ipsum Dolor Sit Amet') }
its(:short_name) { should == 'My Name Is Lorem ...' }
end
describe "#medium_name" do
subject { create(:user, name: 'My Name Is Lorem Ipsum Dolor Sit Amet And This Is a Bit Name I Think') }
its(:medium_name) { should == 'My Name Is Lorem Ipsum Dolor Sit Amet A...' }
end
describe "#display_credits" do
subject { create(:user) }
its(:display_credits) { should == '$0.00'}
end
describe "#display_total_of_contributions" do
subject { create(:user) }
context "with confirmed contributions" do
before do
create(:contribution, state: 'confirmed', user: subject, value: 500.0)
end
its(:display_total_of_contributions) { should == '$500.00'}
end
end
end
| jinutm/silverclass | spec/decorators/user_decorator_spec.rb | Ruby | mit | 5,001 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Paginator
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
/**
* @see Zend_Exception
*/
require_once 'Zend/Exception.php';
/**
* @category Zend
* @package Zend_Paginator
* @copyright Copyright (c) 2005-2011 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Paginator_Exception extends Zend_Exception
{
} | ramonornela/Zebra | library/Zend/Paginator/Exception.php | PHP | mit | 1,050 |
package hep.aida.ref;
import hep.aida.IHistogram1D;
import hep.aida.IHistogram2D;
/**
* Convenient histogram utilities.
*/
class Util
{
/**
* Creates a new utility object.
*/
public Util() {}
/**
* Returns the index of the in-range bin containing the maxBinHeight().
*/
public int maxBin(IHistogram1D h)
{
int maxBin = -1;
double maxValue = Double.MIN_VALUE;
for (int i=h.xAxis().bins(); --i >= 0; ) {
double value = h.binHeight(i);
if (value > maxValue) {
maxValue = value;
maxBin = i;
}
}
return maxBin;
}
/**
* Returns the indexX of the in-range bin containing the maxBinHeight().
*/
public int maxBinX(IHistogram2D h)
{
double maxValue = Double.MIN_VALUE;
int maxBinX = -1;
int maxBinY = -1;
for (int i=h.xAxis().bins(); --i >= 0; ) {
for (int j=h.yAxis().bins(); --j >= 0; ) {
double value = h.binHeight(i,j);
if (value > maxValue) {
maxValue = value;
maxBinX = i;
maxBinY = j;
}
}
}
return maxBinX;
}
/**
* Returns the indexY of the in-range bin containing the maxBinHeight().
*/
public int maxBinY(IHistogram2D h)
{
double maxValue = Double.MIN_VALUE;
int maxBinX = -1;
int maxBinY = -1;
for (int i=h.xAxis().bins(); --i >= 0; ) {
for (int j=h.yAxis().bins(); --j >= 0; ) {
double value = h.binHeight(i,j);
if (value > maxValue) {
maxValue = value;
maxBinX = i;
maxBinY = j;
}
}
}
return maxBinY;
}
/**
* Returns the index of the in-range bin containing the minBinHeight().
*/
public int minBin(IHistogram1D h)
{
int minBin = -1;
double minValue = Double.MAX_VALUE;
for (int i=h.xAxis().bins(); --i >= 0; ) {
double value = h.binHeight(i);
if (value < minValue) {
minValue = value;
minBin = i;
}
}
return minBin;
}
/**
* Returns the indexX of the in-range bin containing the minBinHeight().
*/
public int minBinX(IHistogram2D h)
{
double minValue = Double.MAX_VALUE;
int minBinX = -1;
int minBinY = -1;
for (int i=h.xAxis().bins(); --i >= 0; ) {
for (int j=h.yAxis().bins(); --j >= 0; ) {
double value = h.binHeight(i,j);
if (value < minValue) {
minValue = value;
minBinX = i;
minBinY = j;
}
}
}
return minBinX;
}
/**
* Returns the indexY of the in-range bin containing the minBinHeight().
*/
public int minBinY(IHistogram2D h)
{
double minValue = Double.MAX_VALUE;
int minBinX = -1;
int minBinY = -1;
for (int i=h.xAxis().bins(); --i >= 0; ) {
for (int j=h.yAxis().bins(); --j >= 0; ) {
double value = h.binHeight(i,j);
if (value < minValue) {
minValue = value;
minBinX = i;
minBinY = j;
}
}
}
return minBinY;
}
}
| tobyclemson/msci-project | vendor/colt-1.2.0/src/hep/aida/ref/Util.java | Java | mit | 2,854 |
// Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using System;
using osu.Framework.Extensions;
using osu.Framework.Localisation;
using osu.Game.Graphics;
using osuTK.Graphics;
namespace osu.Game.Overlays.Dashboard.Friends
{
public class FriendsOnlineStatusItem : OverlayStreamItem<FriendStream>
{
public FriendsOnlineStatusItem(FriendStream value)
: base(value)
{
}
protected override LocalisableString MainText => Value.Status.GetLocalisableDescription();
protected override LocalisableString AdditionalText => Value.Count.ToString();
protected override Color4 GetBarColour(OsuColour colours)
{
switch (Value.Status)
{
case OnlineStatus.All:
return Color4.White;
case OnlineStatus.Online:
return colours.GreenLight;
case OnlineStatus.Offline:
return Color4.Black;
default:
throw new ArgumentException($@"{Value.Status} status does not provide a colour in {nameof(GetBarColour)}.");
}
}
}
}
| ppy/osu | osu.Game/Overlays/Dashboard/Friends/FriendsOnlineStatusItem.cs | C# | mit | 1,270 |
// Project Orleans Cloud Service SDK ver. 1.0
//
// Copyright (c) .NET Foundation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
using System;
using System.CodeDom;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using Microsoft.CSharp;
using Microsoft.VisualBasic;
using Orleans.Runtime;
namespace Orleans.CodeGeneration
{
/// <summary>
/// Base class for code generators. It contains a number of helper methods for code generation
/// </summary>
internal abstract class CodeGeneratorBase : MarshalByRefObject
{
/// <summary>
/// Stores the compile errors
/// </summary>
protected static List<string> Errors = new List<string>();
protected const string CODE_GENERATOR_NAME = "Orleans-CodeGenerator";
protected static readonly string CodeGeneratorVersion = RuntimeVersion.FileVersion;
protected string CurrentNamespace;
private readonly Language language;
/// <summary>
/// Returns a name string for a nested class type name (ClassName.TypeName)
/// for a serializable type, the name string is only the TypeName
/// </summary>
internal static string GetNestedClassName(string name, bool noNamespace)
{
var builder = new StringBuilder();
int index = 0;
int start = 0;
while (start < name.Length)
{
index = name.IndexOf('+', start);
if (index == -1) break;
builder.Append(name.Substring(start, index - start));
builder.Append('.');
start = index + 1;
}
if (index == -1)
{
if (noNamespace)
return name.Substring(start);
builder.Append(name.Substring(start));
}
return builder.ToString();
}
protected CodeGeneratorBase(Language language)
{
ReferencedNamespaces = new HashSet<string>();
ReferencedAssemblies = new HashSet<string>();
this.language = language;
}
internal HashSet<string> ReferencedNamespaces { get; private set; }
protected HashSet<string> ReferencedAssemblies { get; private set; }
/// <summary>
/// Calls the appropriate GetInterfaceInfo method depending on whether we are dealing with an implicit or explicit service type and
/// returns the a dictionary of Inteface and Event info exposed by either service type
/// </summary>
/// <param name="grainType"></param>
/// <returns></returns>
internal static GrainInterfaceInfo GetInterfaceInfo(Type grainType)
{
var result = new GrainInterfaceInfo();
Dictionary<int, Type> interfaces = GrainInterfaceData.GetRemoteInterfaces(grainType);
if (interfaces.Keys.Count == 0)
{
// Should never happen!
Debug.Fail("Could not find any service interfaces for type=" + grainType.Name);
}
IEqualityComparer<InterfaceInfo> ifaceComparer = new InterfaceInfoComparer();
foreach (var interfaceId in interfaces.Keys)
{
Type interfaceType = interfaces[interfaceId];
var interfaceInfo = new InterfaceInfo(interfaceType);
if (!result.Interfaces.Values.Contains(interfaceInfo, ifaceComparer))
result.Interfaces.Add(GrainInterfaceData.GetGrainInterfaceId(interfaceType), interfaceInfo);
}
return result;
}
/// <summary>
/// Decide whether this method is a remote grain call method
/// </summary>
internal protected static bool IsGrainMethod(MethodInfo methodInfo)
{
if (methodInfo == null) throw new ArgumentNullException("methodInfo", "Cannot inspect null method info");
// ignore static, event, or non-remote methods
if (methodInfo.IsStatic || methodInfo.IsSpecialName || IsSpecialEventMethod(methodInfo))
return false; // Methods which are derived from base class or object class, or property getter/setter methods
return methodInfo.DeclaringType.IsInterface;
}
internal static CodeDomProvider GetCodeProvider(Language language, bool debug = false)
{
switch (language)
{
case Language.CSharp:
{
var providerOptions = new Dictionary<string, string> { { "CompilerVersion", "v4.0" } };
if (debug)
providerOptions.Add("debug", "full");
return new CSharpCodeProvider(providerOptions);
}
case Language.VisualBasic:
{
var providerOptions = new Dictionary<string, string>();
if (debug)
providerOptions.Add("debug", "full");
var prov = new VBCodeProvider(providerOptions);
return prov;
}
default:
return null;
}
}
internal static void MarkAsGeneratedCode(CodeTypeDeclaration classRef, bool suppressDebugger = false, bool suppressCoverage = true)
{
classRef.CustomAttributes.Add(new CodeAttributeDeclaration(
new CodeTypeReference(typeof(GeneratedCodeAttribute)),
new CodeAttributeArgument(new CodePrimitiveExpression(CODE_GENERATOR_NAME)),
new CodeAttributeArgument(new CodePrimitiveExpression(CodeGeneratorVersion))));
if (classRef.IsInterface) return;
if (suppressCoverage) classRef.CustomAttributes.Add(
new CodeAttributeDeclaration(new CodeTypeReference(typeof(System.Diagnostics.CodeAnalysis.ExcludeFromCodeCoverageAttribute))));
if (suppressDebugger) classRef.CustomAttributes.Add(
new CodeAttributeDeclaration(new CodeTypeReference(typeof(DebuggerNonUserCodeAttribute))));
}
protected virtual string GetInvokerImpl(GrainInterfaceData si, CodeTypeDeclaration invokerClass, Type grainType, GrainInterfaceInfo grainInterfaceInfo, bool isClient)
{
throw new NotImplementedException("InvokerGeneratorBasics.GetInvokerImpl");
}
/// <summary>
/// get the name string for a nested class type name
/// </summary>
protected static string GetNestedClassName(string name)
{
var builder = new StringBuilder();
int index = 0;
int start = 0;
while (start < name.Length)
{
index = name.IndexOf('+', start);
if (index == -1) break;
builder.Append(name.Substring(start, index - start));
builder.Append('.');
start = index + 1;
}
if (index == -1)
builder.Append(name.Substring(start));
return builder.ToString();
}
/// <summary>
/// Decide whether the method is some special methods that implement an event.
/// Special Methods, like add_** and remove_**, shall be marked SpecialName in the metadata
/// </summary>
protected static bool IsSpecialEventMethod(MethodInfo methodInfo)
{
return methodInfo.IsSpecialName &&
(!(methodInfo.Name.StartsWith("get_") || methodInfo.Name.StartsWith("set_")));
}
protected virtual string GetOrleansGetMethodNameImpl(Type grainType, GrainInterfaceInfo grainInterfaceInfo)
{
throw new NotImplementedException("InvokerGeneratorBasics.GetOrleansGetMethodNameImpl");
}
/// <summary>
/// Get the name string for generic type
/// </summary>
protected virtual string GetGenericTypeName(Type type, Action<Type> referred, Func<Type, bool> noNamespace = null)
{
throw new NotImplementedException("GetGenericTypeName");
}
/// <summary>
/// Get the name string for generic type
/// </summary>
protected virtual string GetGenericTypeName(Type type)
{
// Add in the namespace of the type and the assembly file in which the type is defined
AddReferencedAssembly(type);
// Add in the namespace of the type and the assembly file in which any generic argument types are defined
if (type.IsGenericType)
{
foreach (Type argument in type.GetGenericArguments())
AddReferencedAssembly(argument);
}
var typeName = TypeUtils.GetTemplatedName(type, t => CurrentNamespace != t.Namespace && !ReferencedNamespaces.Contains(t.Namespace), language);
return GetNestedClassName(typeName);
}
/// <summary>
/// Returns the language-dependent name for the provided <paramref name="parameter"/>.
/// </summary>
/// <param name="parameter">The parameter.</param>
/// <returns>The language-dependent name for the provided <paramref name="parameter"/>.</returns>
protected virtual string GetParameterName(ParameterInfo parameter)
{
throw new NotImplementedException("GetParamterName");
}
protected void AddReferencedAssembly(Type t)
{
var assembly = t.Assembly.GetName().Name + Path.GetExtension(t.Assembly.Location).ToLowerInvariant();
if (!ReferencedAssemblies.Contains(assembly))
ReferencedAssemblies.Add(assembly);
}
internal class InterfaceInfo
{
public Type InterfaceType { get; private set; }
public Dictionary<int, MethodInfo> Methods { get; private set; }
public InterfaceInfo(Type interfaceType)
{
InterfaceType = interfaceType;
Methods = GetGrainMethods();
}
private Dictionary<int, MethodInfo> GetGrainMethods()
{
var grainMethods = new Dictionary<int, MethodInfo>();
foreach (var interfaceMethodInfo in GrainInterfaceData.GetMethods(InterfaceType))
{
ParameterInfo[] parameters = interfaceMethodInfo.GetParameters();
var args = new Type[parameters.Length];
for (int i = 0; i < parameters.Length; i++)
args[i] = parameters[i].ParameterType;
MethodInfo methodInfo = InterfaceType.GetMethod(interfaceMethodInfo.Name, args) ?? interfaceMethodInfo;
if (IsGrainMethod(methodInfo))
grainMethods.Add(GrainInterfaceData.ComputeMethodId(methodInfo), methodInfo);
}
return grainMethods;
}
public override string ToString()
{
return "InterfaceInfo:" + InterfaceType.FullName + ",#Methods=" + Methods.Count;
}
}
internal class GrainInterfaceInfo
{
public Dictionary<int, InterfaceInfo> Interfaces { get; private set; }
public GrainInterfaceInfo()
{
Interfaces = new Dictionary<int, InterfaceInfo>();
}
}
internal class InterfaceInfoComparer : IEqualityComparer<InterfaceInfo>
{
#region IEqualityComparer<InterfaceInfo> Members
public bool Equals(InterfaceInfo x, InterfaceInfo y)
{
var xFullName = TypeUtils.GetFullName(x.InterfaceType);
var yFullName = TypeUtils.GetFullName(y.InterfaceType);
return string.CompareOrdinal(xFullName, yFullName) == 0;
}
public int GetHashCode(InterfaceInfo obj)
{
throw new NotImplementedException();
}
#endregion
}
}
}
| cbredlow/orleans | src/ClientGenerator/CodeGeneratorBase.cs | C# | mit | 13,358 |
# -*- coding: utf-8 -*-
from django.db import models, migrations
def migrate_requested_on(apps, schema_editor):
Changeset = apps.get_model("changeset", "Changeset")
for changeset in Changeset.objects.all():
if not changeset.requested_on:
changeset.requested_on = changeset.committed_on
changeset.save()
class Migration(migrations.Migration):
dependencies = [
('changeset', '0004_changeset_requested_on'),
]
operations = [
migrations.RunPython(migrate_requested_on),
] | tzhaoredhat/automation | pdc/apps/changeset/migrations/0005_changeset_requested_on_data_migration.py | Python | mit | 543 |