code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
let connectionIdx = 0;
let messageIdx = 0;
function addConnection(connection) {
connection.connectionId = ++connectionIdx;
addMessage('New connection #' + connectionIdx);
connection.addEventListener('message', function(event) {
messageIdx++;
const data = JSON.parse(event.data);
const logString = 'Message ' + messageIdx + ' from connection #' +
connection.connectionId + ': ' + data.message;
addMessage(logString, data.lang);
maybeSetFruit(data.message);
connection.send('Received message ' + messageIdx);
});
connection.addEventListener('close', function(event) {
addMessage('Connection #' + connection.connectionId + ' closed, reason = ' +
event.reason + ', message = ' + event.message);
});
};
/* Utils */
const fruitEmoji = {
'grapes': '\u{1F347}',
'watermelon': '\u{1F349}',
'melon': '\u{1F348}',
'tangerine': '\u{1F34A}',
'lemon': '\u{1F34B}',
'banana': '\u{1F34C}',
'pineapple': '\u{1F34D}',
'green apple': '\u{1F35F}',
'apple': '\u{1F34E}',
'pear': '\u{1F350}',
'peach': '\u{1F351}',
'cherries': '\u{1F352}',
'strawberry': '\u{1F353}'
};
function addMessage(content, language) {
const listItem = document.createElement("li");
if (language) {
listItem.lang = language;
}
listItem.textContent = content;
document.querySelector("#message-list").appendChild(listItem);
};
function maybeSetFruit(message) {
const fruit = message.toLowerCase();
if (fruit in fruitEmoji) {
document.querySelector('#main').textContent = fruitEmoji[fruit];
}
};
document.addEventListener('DOMContentLoaded', function() {
if (navigator.presentation.receiver) {
navigator.presentation.receiver.connectionList.then(list => {
list.connections.map(connection => addConnection(connection));
list.addEventListener('connectionavailable', function(event) {
addConnection(event.connection);
});
});
}
});
| beaufortfrancois/samples | presentation-api/receiver/receiver.js | JavaScript | apache-2.0 | 1,980 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2014, Red Hat, Inc. and/or its affiliates, and individual
* contributors by the @authors tag. See the copyright.txt in the
* distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.as.quickstart.deltaspike.partialbean;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import javax.enterprise.context.ApplicationScoped;
/**
* This class implements a dynamic DeltaSpike Partial Bean. It is bound to
* one or more abstract classes or interfaces via the Binding Annotation
* (@ExamplePartialBeanBinding below).
*
* All abstract, unimplemented methods from those beans will be implemented
* via the invoke method.
*
*/
@ExamplePartialBeanBinding
@ApplicationScoped
public class ExamplePartialBeanImplementation implements InvocationHandler {
/**
* In our example, this method will be invoked when the "sayHello" method is called.
*
* @param proxy The object upon which the method is being invoked.
* @param method The method being invoked (sayHello in this QuickStart)
* @param args The arguments being passed in to the invoked method
*/
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
return "Hello " + args[0];
}
}
| jboss-developer/jboss-wfk-quickstarts | deltaspike-partialbean-basic/src/main/java/org/jboss/as/quickstart/deltaspike/partialbean/ExamplePartialBeanImplementation.java | Java | apache-2.0 | 1,876 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010, Red Hat, Inc. and/or its affiliates, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.cfg.defs;
import javax.validation.constraints.Pattern;
import org.hibernate.validator.cfg.ConstraintDef;
import org.hibernate.validator.constraints.Email;
/**
* @author Hardy Ferentschik
*/
public class EmailDef extends ConstraintDef<EmailDef, Email> {
public EmailDef() {
super( Email.class );
}
public EmailDef regexp(String regexp) {
addParameter( "regexp", regexp );
return this;
}
public EmailDef flags(Pattern.Flag... flags) {
addParameter( "flags", flags );
return this;
}
}
| jmartisk/hibernate-validator | engine/src/main/java/org/hibernate/validator/cfg/defs/EmailDef.java | Java | apache-2.0 | 1,337 |
/*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package common
import (
"github.com/golang/protobuf/proto"
"github.com/hyperledger/fabric/core/util"
)
func (b *BlockHeader) Hash() []byte {
data, err := proto.Marshal(b) // XXX this is wrong, protobuf is not the right mechanism to serialize for a hash
if err != nil {
panic("This should never fail and is generally irrecoverable")
}
return util.ComputeCryptoHash(data)
}
func (b *BlockData) Hash() []byte {
data, err := proto.Marshal(b) // XXX this is wrong, protobuf is not the right mechanism to serialize for a hash, AND, it is not a MerkleTree hash
if err != nil {
panic("This should never fail and is generally irrecoverable")
}
return util.ComputeCryptoHash(data)
}
| stonejiang208/fabric | protos/common/block.go | GO | apache-2.0 | 1,283 |
<?php
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* GENERATED CODE WARNING
* Generated by gapic-generator-php from the file
* https://github.com/googleapis/googleapis/blob/master/google/cloud/accessapproval/v1/accessapproval.proto
* Updates to the above are reflected here through a refresh process.
*/
namespace Google\Cloud\AccessApproval\V1;
use Google\Cloud\AccessApproval\V1\Gapic\AccessApprovalGapicClient;
/** {@inheritdoc} */
class AccessApprovalClient extends AccessApprovalGapicClient
{
// This class is intentionally empty, and is intended to hold manual additions to
// the generated {@see AccessApprovalGapicClient} class.
}
| googleapis/google-cloud-php-access-approval | src/V1/AccessApprovalClient.php | PHP | apache-2.0 | 1,209 |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/core/grappler/optimizers/arithmetic_optimizer.h"
#include "tensorflow/cc/ops/standard_ops.h"
#include "tensorflow/core/framework/node_def.pb.h"
#include "tensorflow/core/framework/tensor_testutil.h"
#include "tensorflow/core/grappler/grappler_item.h"
#include "tensorflow/core/grappler/inputs/trivial_test_graph_input_yielder.h"
#include "tensorflow/core/grappler/optimizers/constant_folding.h"
#include "tensorflow/core/grappler/optimizers/model_pruner.h"
#include "tensorflow/core/grappler/utils.h"
#include "tensorflow/core/grappler/utils/grappler_test.h"
#include "tensorflow/core/lib/core/status_test_util.h"
#include "tensorflow/core/platform/test.h"
namespace tensorflow {
namespace grappler {
namespace {
constexpr char kHoistFactorOptimizerDiv[] =
"ArithmeticOptimizer/HoistCommonFactor_Div_";
constexpr char kHoistFactorOptimizerMul[] =
"ArithmeticOptimizer/HoistCommonFactor_Mul_";
constexpr char kHoistFactorOptimizerAdd[] =
"ArithmeticOptimizer/HoistCommonFactor_Add_";
constexpr char kSimplifyAggregationConst[] =
"ArithmeticOptimizer/SimplifyAggregation_Const_";
constexpr char kSimplifyAggregationMul[] =
"ArithmeticOptimizer/SimplifyAggregation_Mul_";
// Optimized name of outer Mul node by HoistCommonFactorOutOfAggregation.
string HoistMulName(const string& name) {
return AddPrefixToNodeName(name, kHoistFactorOptimizerMul, "");
}
// Optimized name of outer Div node by HoistCommonFactorOutOfAggregation.
string HoistDivName(const string& name) {
return AddPrefixToNodeName(name, kHoistFactorOptimizerDiv, "");
}
// Optimized name of inner Add node by HoistCommonFactorOutOfAggregation.
string HoistAddName(const string& name) {
return AddPrefixToNodeName(name, kHoistFactorOptimizerAdd, "");
}
// Optimized name of Const node by SimplifyAggregation.
string AggregationConstName(const string& name) {
return AddPrefixToNodeName(name, kSimplifyAggregationConst, "");
}
// Optimized name of Mul node by SimplifyAggregation.
string AggregationMulName(const string& name) {
return AddPrefixToNodeName(name, kSimplifyAggregationMul, "");
}
string OptimizedName(const string& name) {
return AddPrefixToNodeName(name, kArithmeticOptimizer);
}
void VerifyGraphsMatch(const GraphDef& original_graph,
const GraphDef& optimized_graph, int line) {
EXPECT_EQ(original_graph.node_size(), optimized_graph.node_size()) << line;
for (int i = 0; i < original_graph.node_size(); ++i) {
const NodeDef& original = original_graph.node(i);
const NodeDef& optimized = optimized_graph.node(i);
EXPECT_EQ(original.name(), optimized.name()) << line;
EXPECT_EQ(original.op(), optimized.op()) << line;
EXPECT_EQ(original.input_size(), optimized.input_size()) << line;
for (int j = 0; j < original.input_size(); ++j) {
EXPECT_EQ(original.input(j), optimized.input(j)) << line;
}
}
}
} // namespace
class ArithmeticOptimizerTest : public GrapplerTest {
protected:
// Optimize a graph using ArithmeticOptimizer and prune all the nodes that no
// longer have any output consumers.
void OptimizeAndPrune(ArithmeticOptimizer* optimizer, GrapplerItem* item,
GraphDef* output) {
TF_EXPECT_OK(optimizer->Optimize(nullptr, *item, output));
item->graph.Swap(output);
output->Clear();
TF_EXPECT_OK(ModelPruner().Optimize(nullptr, *item, output));
}
// Run ArithmeticOptimizer twice to make sure the rewrite is idempotent.
void OptimizeTwice(ArithmeticOptimizer* optimizer, GrapplerItem* item,
GraphDef* output) {
TF_EXPECT_OK(optimizer->Optimize(nullptr, *item, output));
item->graph.Swap(output);
output->Clear();
TF_EXPECT_OK(optimizer->Optimize(nullptr, *item, output));
}
// Run ArithmeticOptimizer twice to make sure the rewrite is idempotent.
// Optionally run a constant folding pass before pruning.
void OptimizeTwiceAndPrune(ArithmeticOptimizer* optimizer, GrapplerItem* item,
GraphDef* output, bool const_folding = false) {
TF_EXPECT_OK(optimizer->Optimize(nullptr, *item, output));
item->graph.Swap(output);
output->Clear();
TF_EXPECT_OK(optimizer->Optimize(nullptr, *item, output));
if (const_folding) {
item->graph.Swap(output);
output->Clear();
TF_EXPECT_OK(ConstantFolding(/*cpu_device=*/nullptr)
.Optimize(nullptr, *item, output));
}
item->graph.Swap(output);
output->Clear();
TF_EXPECT_OK(ModelPruner().Optimize(nullptr, *item, output));
}
// TODO(ezhulenev): Make private. After migration to stages each test
// should explicitly enable required optimization for tests isolation
void DisableAllStages(ArithmeticOptimizer* optimizer) {
ArithmeticOptimizer::ArithmeticOptimizerOptions options;
options.dedup_computations = false;
options.combine_add_to_addn = false;
options.convert_sqrt_div_to_rsqrt_mul = false;
options.convert_pow = false;
options.convert_log1p = false;
options.optimize_max_or_min_of_monotonic = false;
options.fold_conjugate_into_transpose = false;
options.fold_multiply_into_conv = false;
options.fold_transpose_into_matmul = false;
options.hoist_common_factor_out_of_aggregation = false;
options.hoist_cwise_unary_chains = false;
options.minimize_broadcasts = false;
options.remove_identity_transpose = false;
options.remove_involution = false;
options.remove_idempotent = false;
options.remove_redundant_bitcast = false;
options.remove_redundant_cast = false;
options.remove_redundant_reshape = false;
options.remove_negation = false;
options.remove_logical_not = false;
options.reorder_cast_and_transpose = false;
options.replace_mul_with_square = false;
options.simplify_aggregation = false;
options.unary_ops_composition = false;
optimizer->options_ = options;
}
void DisableAddToAddNCombining(ArithmeticOptimizer* optimizer) {
optimizer->options_.combine_add_to_addn = false;
}
void EnableOnlyAddToAddNCombining(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.combine_add_to_addn = true;
}
void EnableOnlyFoldConjugateIntoTranspose(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.fold_conjugate_into_transpose = true;
}
void EnableOnlyFoldMultipleIntoConv(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.fold_multiply_into_conv = true;
}
void EnableOnlyFoldTransposeIntoMatMul(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.fold_transpose_into_matmul = true;
}
void EnableOnlyHoistCommonFactor(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.hoist_common_factor_out_of_aggregation = true;
}
void EnableOnlyMinimizeBroadcasts(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.minimize_broadcasts = true;
}
void EnableOnlyRemoveIdentityTranspose(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_identity_transpose = true;
}
void EnableOnlyRemoveInvolution(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_involution = true;
}
void EnableOnlyRemoveRedundantBitcast(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_redundant_bitcast = true;
}
void EnableOnlyRemoveRedundantCast(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_redundant_cast = true;
}
void EnableOnlyRemoveRedundantReshape(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_redundant_reshape = true;
}
void EnableOnlyRemoveNegation(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_negation = true;
}
void EnableOnlyReorderCastAndTranspose(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.reorder_cast_and_transpose = true;
}
void EnableOnlyReplaceMulWithSquare(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.replace_mul_with_square = true;
}
void EnableOnlyHoistCWiseUnaryChains(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.hoist_cwise_unary_chains = true;
}
void EnableOnlySqrtDivToRsqrtMul(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.convert_sqrt_div_to_rsqrt_mul = true;
}
void EnableOnlyConvertPow(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.convert_pow = true;
}
void EnableOnlyRemoveIdempotent(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_idempotent = true;
}
void EnableOnlyRemoveLogicalNot(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.remove_logical_not = true;
}
void EnableOnlySimplifyAggregation(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.simplify_aggregation = true;
}
void EnableOnlyLog1p(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.convert_log1p = true;
}
void EnableOnlyOptimizeMaxOrMinOfMonotonic(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.optimize_max_or_min_of_monotonic = true;
}
void EnableOnlyExpm1(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.convert_expm1 = true;
}
void EnableOnlyUnaryOpsComposition(ArithmeticOptimizer* optimizer) {
DisableAllStages(optimizer);
optimizer->options_.unary_ops_composition = true;
}
};
TEST_F(ArithmeticOptimizerTest, NoOp) {
// This trivial graph is so basic there's nothing to optimize.
TrivialTestGraphInputYielder fake_input(4, 1, 10, false, {"CPU:0"});
GrapplerItem item;
CHECK(fake_input.NextItem(&item));
ArithmeticOptimizer optimizer;
GraphDef output;
Status status = optimizer.Optimize(nullptr, item, &output);
TF_EXPECT_OK(status);
VerifyGraphsMatch(item.graph, output, __LINE__);
}
TEST_F(ArithmeticOptimizerTest, OpDedupping) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output c1 = ops::Const(s.WithOpName("c1"), {3.14, 2.7}, {1, 2});
Output c2 = ops::Const(s.WithOpName("c2"), {3.14, 2.7}, {1, 2});
Output div = ops::Div(s.WithOpName("div"), c1, c2);
GrapplerItem item;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
item.fetch = {"div"};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(2, output.node_size());
const NodeDef* new_c1 = node_map.GetNode("c1");
ASSERT_NE(new_c1, nullptr);
const NodeDef* new_div = node_map.GetNode("div");
ASSERT_NE(new_div, nullptr);
EXPECT_EQ(2, new_div->input_size());
EXPECT_EQ("c1", new_div->input(0));
EXPECT_EQ("c1", new_div->input(1));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<double>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, OpDeduppingAssertAndCheckNumerics) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output p = ops::Placeholder(s, DT_BOOL, ops::Placeholder::Shape({}));
Output c = ops::Const(s.WithOpName("c"), {3.14, 2.7}, {1, 2});
auto check1 = ops::CheckNumerics(s.WithOpName("check1"), c, "foo");
auto check2 = ops::CheckNumerics(s.WithOpName("check2"), c, "foo");
auto assert1 = ops::Assert(s.WithOpName("assert1"), p, {c});
auto assert2 = ops::Assert(s.WithOpName("assert2"), p, {c});
Output div = ops::Div(s.WithOpName("div").WithControlDependencies(
{assert1.operation, assert2.operation}),
check1, check2);
GrapplerItem item;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
item.fetch = {"div"};
Tensor bool_t(DT_BOOL, TensorShape({}));
bool_t.scalar<bool>().setConstant(true);
auto tensors_expected =
EvaluateNodes(item.graph, item.fetch, {{"Placeholder", bool_t}});
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(5, output.node_size());
const NodeDef* new_div = node_map.GetNode("div");
ASSERT_NE(new_div, nullptr);
EXPECT_EQ(4, new_div->input_size());
EXPECT_EQ("check1", new_div->input(0));
EXPECT_EQ("check1", new_div->input(1));
EXPECT_EQ("^assert1", new_div->input(2));
EXPECT_EQ("^assert1", new_div->input(3));
auto tensors = EvaluateNodes(output, item.fetch, {{"Placeholder", bool_t}});
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<double>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, OpDedupCommutative) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output c1 = ops::Const(s.WithOpName("c1"), {1.0f, 2.0f}, {1, 2});
Output c2 = ops::Const(s.WithOpName("c2"), {3.0f, 4.0f}, {1, 2});
Output mul1 = ops::Mul(s.WithOpName("mul1"), c1, c2);
Output mul2 = ops::Mul(s.WithOpName("mul2"), c2, c1);
Output div1 = ops::Div(s.WithOpName("div1"), mul1, mul2);
GrapplerItem item;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
item.fetch = {"div1"};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(4, output.node_size());
const NodeDef* new_c1 = node_map.GetNode("c1");
ASSERT_NE(new_c1, nullptr);
const NodeDef* new_c2 = node_map.GetNode("c2");
ASSERT_NE(new_c2, nullptr);
const NodeDef* new_mul1 = node_map.GetNode("mul1");
ASSERT_NE(new_mul1, nullptr);
EXPECT_EQ(2, new_mul1->input_size());
EXPECT_EQ("c1", new_mul1->input(0));
EXPECT_EQ("c2", new_mul1->input(1));
const NodeDef* new_div1 = node_map.GetNode("div1");
ASSERT_NE(new_div1, nullptr);
EXPECT_EQ(2, new_div1->input_size());
EXPECT_EQ("mul1", new_div1->input(0));
EXPECT_EQ("mul1", new_div1->input(1));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, ReplaceMulWithSquare) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output c = ops::Const(s.WithOpName("c"), {1.0f, 2.0f}, {1, 2});
Output d = ops::Const(s.WithOpName("d"), {3.0f, 4.0f}, {1, 2});
Output mul = ops::Mul(s.WithControlDependencies(d).WithOpName("mul"), c, c);
Output id = ops::Identity(s.WithOpName("id"), mul);
GrapplerItem item;
item.fetch = {"id"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyReplaceMulWithSquare(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
EXPECT_EQ(4, output.node_size());
NodeMap node_map(&output);
const string p = "ArithmeticOptimizer/ReplaceMulWithSquare";
const NodeDef* square_node = node_map.GetNode(strings::StrCat(p, "_", "mul"));
ASSERT_NE(square_node, nullptr);
EXPECT_EQ("Square", square_node->op());
EXPECT_EQ("c", square_node->input(0));
EXPECT_EQ("^d", square_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveInvolution_AdjacentNodes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto c = ops::Const(s.WithOpName("c"), {1.0f, 2.0f}, {1, 2});
auto neg1 = ops::Neg(s.WithOpName("neg1"), c);
auto neg2 = ops::Neg(s.WithOpName("neg2"), neg1);
auto recip1 = ops::Reciprocal(s.WithOpName("recip1"), neg2);
auto recip2 = ops::Reciprocal(s.WithOpName("recip2"), recip1);
auto id = ops::Identity(s.WithOpName("id"), recip2);
GrapplerItem item;
item.fetch = {"id"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveInvolution(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// Negation and Reciprocal nodes cancelled each other.
EXPECT_EQ(2, output.node_size());
EXPECT_EQ("id", output.node(1).name());
EXPECT_EQ("c", output.node(1).input(0));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveInvolution_AroundValuePreservingChain) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto c = ops::Const(s.WithOpName("c"), {1.0f, 2.0f}, {1, 2});
auto recip1 = ops::Reciprocal(s.WithOpName("recip1"), c);
auto id1 = ops::Identity(s.WithOpName("id1"), recip1);
auto squeeze = ops::Squeeze(s.WithOpName("squeeze"), id1);
auto recip2 = ops::Reciprocal(s.WithOpName("recip2"), squeeze);
auto id2 = ops::Identity(s.WithOpName("id2"), recip2);
std::vector<string> fetch = {"id2"};
GrapplerItem item;
item.fetch = fetch;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveInvolution(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
// Check that Reciprocal nodes were removed from the graph.
EXPECT_EQ(3, output.node_size());
// And const directly flows into squeeze.
int found = 0;
for (const NodeDef& node : output.node()) {
if (node.name() == "squeeze") {
EXPECT_EQ("c", node.input(0));
found++;
} else if (node.name() == "id2") {
EXPECT_EQ("squeeze", node.input(0));
found++;
}
}
EXPECT_EQ(2, found);
auto tensors = EvaluateNodes(output, fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveInvolution_SkipControlDependencies) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto c = ops::Const(s.WithOpName("c"), {1.0f, 2.0f}, {1, 2});
auto recip1 = ops::Reciprocal(s.WithOpName("recip1"), c);
auto id1 = ops::Identity(s.WithOpName("id1"), recip1);
auto squeeze = ops::Squeeze(s.WithOpName("squeeze"), id1);
auto recip2 = ops::Reciprocal(
s.WithOpName("recip2").WithControlDependencies(squeeze), c);
auto id2 = ops::Identity(s.WithOpName("id2"), recip2);
std::vector<string> fetch = {"id2"};
GrapplerItem item;
item.fetch = fetch;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveInvolution(&optimizer);
OptimizeTwice(&optimizer, &item, &output); // do not prune in this test
// The optimizer should be a noop.
VerifyGraphsMatch(item.graph, output, __LINE__);
auto tensors = EvaluateNodes(output, fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, TrivialSumsSimple) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output add = ops::Add(s.WithOpName("add"), x, x);
Output id = ops::Identity(s.WithOpName("id"), add);
GrapplerItem item;
item.fetch = {"id"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(5, output.node_size());
const string optimized_const_name = AggregationConstName("add");
const string optimized_mul_name = AggregationMulName("add");
const NodeDef* new_const = node_map.GetNode(optimized_const_name);
ASSERT_NE(new_const, nullptr);
EXPECT_EQ("^x", new_const->input(0));
EXPECT_EQ(string("\0\0\0@", 4),
new_const->attr().at("value").tensor().tensor_content());
const NodeDef* new_mul = node_map.GetNode(optimized_mul_name);
ASSERT_NE(new_mul, nullptr);
EXPECT_EQ(optimized_const_name, new_mul->input(0));
EXPECT_EQ("x", new_mul->input(1));
const NodeDef* new_id = node_map.GetNode("id");
ASSERT_NE(new_id, nullptr);
EXPECT_EQ(optimized_mul_name, new_id->input(0));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, TrivialSumsSimpleWithControlDep) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output y = ops::Const(s.WithOpName("y"), {1.0f, 2.0f}, {1, 2});
Output x = ops::Const(s.WithOpName("x"), {3.0f, 4.0f}, {1, 2});
Output add = ops::Add(s.WithOpName("add").WithControlDependencies(y), x, x);
Output id = ops::Identity(s.WithOpName("id"), add);
GrapplerItem item;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
std::vector<string> fetch = {"id"};
auto tensors_expected = EvaluateNodes(item.graph, fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(6, output.node_size());
const string optimized_const_name = AggregationConstName("add");
const string optimized_mul_name = AggregationMulName("add");
const NodeDef* new_const = node_map.GetNode(optimized_const_name);
ASSERT_NE(new_const, nullptr);
EXPECT_EQ("^x", new_const->input(0));
EXPECT_EQ(string("\0\0\0@", 4),
new_const->attr().at("value").tensor().tensor_content());
const NodeDef* new_mul = node_map.GetNode(optimized_mul_name);
ASSERT_NE(new_mul, nullptr);
EXPECT_EQ(optimized_const_name, new_mul->input(0));
EXPECT_EQ("x", new_mul->input(1));
EXPECT_EQ("^y", new_mul->input(2));
const NodeDef* new_id = node_map.GetNode("id");
ASSERT_NE(new_id, nullptr);
EXPECT_EQ(optimized_mul_name, new_id->input(0));
auto tensors = EvaluateNodes(output, fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, TrivialSumsRepeatedAdd) {
// Test case from b/69059093.
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output p = ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({10, 10}));
Output add = ops::Add(s.WithOpName("Add"), p, p);
Output add1 = ops::Add(s.WithOpName("Add_1"), p, p);
Output add4 = ops::Add(s.WithOpName("Add_4"), add, add1);
Output add5 = ops::Add(s.WithOpName("Add_5"), add, add1);
Output add6 = ops::Add(s.WithOpName("Add_6"), add4, add5);
Output id = ops::Identity(s.WithOpName("id"), add6);
GrapplerItem item;
TF_CHECK_OK(s.ToGraphDef(&item.graph));
const std::vector<string> devices{
"/device:CPU:0", "/device:GPU:0", "/device:CPU:0", "/device:GPU:1",
"/device:CPU:0", "/device:CPU:0", "/device:CPU:0",
};
for (int i = 0; i < item.graph.node_size(); ++i) {
item.graph.mutable_node(i)->set_device(devices[i]);
}
ArithmeticOptimizer optimizer;
DisableAddToAddNCombining(&optimizer);
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// Mul(p,
// Add_6(Add_4(Const(2), Const(2)),
// Add_5(Const(2), Const(2))))
NodeMap node_map(&output);
EXPECT_EQ(17, output.node_size());
const NodeDef* id_node = node_map.GetNode("id");
ASSERT_NE(id_node, nullptr);
EXPECT_EQ(1, id_node->input_size());
EXPECT_EQ(HoistMulName("Add_6"), id_node->input(0));
const NodeDef* mul_node = node_map.GetNode(HoistMulName("Add_6"));
ASSERT_NE(mul_node, nullptr);
EXPECT_EQ(2, mul_node->input_size());
EXPECT_EQ("Placeholder", mul_node->input(0));
EXPECT_EQ(HoistAddName("Add_6"), mul_node->input(1));
const NodeDef* add_6_node = node_map.GetNode(HoistAddName("Add_6"));
ASSERT_NE(add_6_node, nullptr);
EXPECT_EQ(2, add_6_node->input_size());
EXPECT_EQ(HoistAddName("Add_4"), add_6_node->input(0));
EXPECT_EQ(HoistAddName("Add_5"), add_6_node->input(1));
const NodeDef* add_4_node = node_map.GetNode(HoistAddName("Add_4"));
ASSERT_NE(add_4_node, nullptr);
EXPECT_EQ("Add", add_4_node->op());
EXPECT_EQ(2, add_4_node->input_size());
EXPECT_EQ(AggregationConstName("Add"), add_4_node->input(0));
EXPECT_EQ(AggregationConstName("Add_1"), add_4_node->input(1));
const NodeDef* add_5_node = node_map.GetNode(HoistAddName("Add_5"));
ASSERT_NE(add_5_node, nullptr);
EXPECT_EQ("Add", add_5_node->op());
EXPECT_EQ(2, add_5_node->input_size());
EXPECT_EQ(AggregationConstName("Add"), add_5_node->input(0));
EXPECT_EQ(AggregationConstName("Add_1"), add_5_node->input(1));
const NodeDef* add_const_node = node_map.GetNode(AggregationConstName("Add"));
ASSERT_NE(add_const_node, nullptr);
EXPECT_EQ("Const", add_const_node->op());
EXPECT_EQ(1, add_const_node->input_size());
EXPECT_EQ("^Placeholder", add_const_node->input(0));
const NodeDef* add_1_const_node =
node_map.GetNode(AggregationConstName("Add_1"));
ASSERT_NE(add_1_const_node, nullptr);
EXPECT_EQ("Const", add_1_const_node->op());
EXPECT_EQ(1, add_1_const_node->input_size());
EXPECT_EQ("^Placeholder", add_1_const_node->input(0));
}
TEST_F(ArithmeticOptimizerTest, HoistFactorMul) {
for (bool matching_shapes : {true, false}) {
for (bool use_addn : {true, false}) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output y1 = ops::Const(s.WithOpName("y1"), {3.0f, 4.0f}, {1, 2});
Output y2 = matching_shapes
? ops::Const(s.WithOpName("y2"), {5.0f, 6.0f}, {1, 2})
: ops::Const(s.WithOpName("y2"), {5.0f}, {1, 1});
Output mul1 = ops::Mul(s.WithOpName("mul1"), x, y1);
Output mul2 = ops::Mul(s.WithOpName("mul2"), y2, x);
Output id =
use_addn ? ops::Identity(s.WithOpName("id"),
ops::AddN(s.WithOpName("add"), {mul1, mul2}))
: ops::Identity(s.WithOpName("id"),
ops::Add(s.WithOpName("add"), mul1, mul2));
GrapplerItem item;
item.fetch = {"id"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
EnableOnlyHoistCommonFactor(&optimizer);
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// Add Mul
// / \ / \
// Mul Mul -> x Add
// / \ / \ / \
// x y1 y2 x y1 y2
//
// If "root" op is AddN and shapes does not match, this rewrite is not
// possible and graph should stay intact.
NodeMap node_map(&output);
if (use_addn && !matching_shapes) {
VerifyGraphsMatch(item.graph, output, __LINE__);
} else {
EXPECT_EQ(9, output.node_size());
const NodeDef* new_add_node = node_map.GetNode(HoistAddName("add"));
ASSERT_NE(new_add_node, nullptr) << "Hoisted Add node not found";
EXPECT_EQ("y1", new_add_node->input(0));
EXPECT_EQ("y2", new_add_node->input(1));
const NodeDef* new_mul_node = node_map.GetNode(HoistMulName("add"));
ASSERT_NE(new_mul_node, nullptr) << "Hoisted Mul node not found";
EXPECT_EQ("x", new_mul_node->input(0));
EXPECT_EQ(new_add_node->name(), new_mul_node->input(1));
const NodeDef* id_node = node_map.GetNode("id");
ASSERT_NE(id_node, nullptr) << "Id node not found";
EXPECT_EQ("id", id_node->name());
EXPECT_EQ(HoistMulName("add"), id_node->input(0));
}
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
}
}
TEST_F(ArithmeticOptimizerTest, HoistFactorDiv) {
for (bool matching_shapes : {true, false}) {
for (bool use_addn : {true, false}) {
for (bool use_ints : {true, false}) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output x = use_ints
? ops::Const(s.WithOpName("x"), {1, 2}, {1, 2})
: ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output y1 = use_ints
? ops::Const(s.WithOpName("y1"), {3, 4}, {1, 2})
: ops::Const(s.WithOpName("y1"), {3.0f, 4.0f}, {1, 2});
Output y2;
if (matching_shapes) {
y2 = use_ints ? ops::Const(s.WithOpName("y2"), {5, 6}, {1, 2})
: ops::Const(s.WithOpName("y2"), {5.0f, 6.0f}, {1, 2});
} else {
y2 = use_ints ? ops::Const(s.WithOpName("y2"), {5}, {1, 1})
: ops::Const(s.WithOpName("y2"), {5.0f}, {1, 1});
}
Output div1 = ops::Div(s.WithOpName("div1"), y1, x);
Output div2 = ops::Div(s.WithOpName("div2"), y2, x);
Output id =
use_addn
? ops::Identity(s.WithOpName("id"),
ops::AddN(s.WithOpName("add"), {div1, div2}))
: ops::Identity(s.WithOpName("id"),
ops::Add(s.WithOpName("add"), div1, div2));
GrapplerItem item;
item.fetch = {"id"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
EnableOnlyHoistCommonFactor(&optimizer);
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// Add Div
// / \ / \
// Div Div -> Add x
// / \ / \ / \
// y1 x y2 x y1 y2
//
// If "root" op is AddN and shapes does not match, this rewrite is not
// possible and graph should stay intact.
NodeMap node_map(&output);
if ((use_addn && !matching_shapes) || use_ints) {
VerifyGraphsMatch(item.graph, output, __LINE__);
} else {
EXPECT_EQ(9, output.node_size());
const NodeDef* new_add_node = node_map.GetNode(HoistAddName("add"));
ASSERT_TRUE(new_add_node != nullptr) << "Hoisted Add node not found";
EXPECT_EQ("y1", new_add_node->input(0));
EXPECT_EQ("y2", new_add_node->input(1));
const NodeDef* new_div_node = node_map.GetNode(HoistDivName("add"));
ASSERT_TRUE(new_div_node != nullptr) << "Hoisted Div node not found";
EXPECT_EQ(new_add_node->name(), new_div_node->input(0));
EXPECT_EQ("x", new_div_node->input(1));
const NodeDef* id_node = node_map.GetNode("id");
ASSERT_TRUE(id_node != nullptr) << "Id node not found";
EXPECT_EQ("id", id_node->name());
EXPECT_EQ(HoistDivName("add"), id_node->input(0));
}
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
if (use_ints) {
test::ExpectTensorEqual<int32>(tensors_expected[0], tensors[0]);
} else {
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
}
}
}
}
TEST_F(ArithmeticOptimizerTest, FuseConjAndTranspose) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output re = ops::Const(s.WithOpName("re"), {1.0f, 2.0f, 3.0f, 4.0f}, {2, 2});
Output im = ops::Const(s.WithOpName("im"), {5.0f, 6.0f, 7.0f, 8.0f}, {2, 2});
Output z = ops::Complex(s.WithOpName("z"), re, im);
Output perm = ops::Const(s.WithOpName("perm"), {1, 0}, {2});
Output conj = ops::Conj(s.WithOpName("conj"), z);
Output transp = ops::Transpose(s.WithOpName("trans"), conj, perm);
GrapplerItem item;
item.fetch = {"trans"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(7, output.node_size());
const string p = "ArithmeticOptimizer/FoldConjugateIntoTranspose";
const string optimized_name = strings::StrCat(p, "_", "trans");
const NodeDef* trans_fused_node = node_map.GetNode(optimized_name);
ASSERT_NE(trans_fused_node, nullptr);
EXPECT_EQ("ConjugateTranspose", trans_fused_node->op());
EXPECT_EQ("z", trans_fused_node->input(0));
EXPECT_EQ("perm", trans_fused_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<complex64>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, FuseConjAndConjugateTranspose) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output re = ops::Const(s.WithOpName("re"), {1.0f, 2.0f, 3.0f, 4.0f}, {2, 2});
Output im = ops::Const(s.WithOpName("im"), {5.0f, 6.0f, 7.0f, 8.0f}, {2, 2});
Output z = ops::Complex(s.WithOpName("z"), re, im);
Output perm = ops::Const(s.WithOpName("perm"), {1, 0}, {2});
Output conj = ops::Conj(s.WithOpName("conj"), z);
Output transp =
ops::ConjugateTranspose(s.WithOpName("conjugate_trans"), conj, perm);
GrapplerItem item;
item.fetch = {"conjugate_trans"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(7, output.node_size());
const string p = "ArithmeticOptimizer/FoldConjugateIntoTranspose";
const string optimized_name = strings::StrCat(p, "_", "conjugate_trans");
const NodeDef* conjugate_trans_fused_node = node_map.GetNode(optimized_name);
ASSERT_NE(conjugate_trans_fused_node, nullptr);
EXPECT_EQ("Transpose", conjugate_trans_fused_node->op());
EXPECT_EQ("z", conjugate_trans_fused_node->input(0));
EXPECT_EQ("perm", conjugate_trans_fused_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<complex64>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, FuseTransposeAndConj) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output re = ops::Const(s.WithOpName("re"), {1.0f, 2.0f, 3.0f, 4.0f}, {2, 2});
Output im = ops::Const(s.WithOpName("im"), {5.0f, 6.0f, 7.0f, 8.0f}, {2, 2});
Output z = ops::Complex(s.WithOpName("z"), re, im);
Output perm = ops::Const(s.WithOpName("perm"), {1, 0}, {2});
Output trans = ops::Transpose(s.WithOpName("trans"), z, perm);
Output conj = ops::Conj(s.WithOpName("conj"), trans);
GrapplerItem item;
item.fetch = {"conj"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(7, output.node_size());
const string p = "ArithmeticOptimizer/FoldConjugateIntoTranspose";
const string optimized_name = strings::StrCat(p, "_", "conj");
const NodeDef* conj_fused_node = node_map.GetNode(optimized_name);
ASSERT_NE(conj_fused_node, nullptr);
EXPECT_EQ("ConjugateTranspose", conj_fused_node->op());
EXPECT_EQ("z", conj_fused_node->input(0));
EXPECT_EQ("perm", conj_fused_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<complex64>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, FoldTransposeIntoMatMul) {
for (const string matmul_type : {"MatMul", "SparseMatMul", "BatchMatMul"}) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output a = ops::Const(s.WithOpName("a"), {1.0f, 2.0f, 3.0f, 4.0f}, {2, 2});
Output b = ops::Const(s.WithOpName("b"), {5.0f, 6.0f, 7.0f, 8.0f}, {2, 2});
Output perm = ops::Const(s.WithOpName("perm"), {1, 0}, {2});
Output trans_a = ops::Transpose(s.WithOpName("trans_a"), a, perm);
Output trans_b = ops::Transpose(s.WithOpName("trans_b"), b, perm);
auto matmul_op = s.WithOpName("matmul");
if (matmul_type == "MatMul") {
Output matmul = ops::MatMul(matmul_op, trans_a, trans_b);
} else if (matmul_type == "SparseMatMul") {
Output matmul = ops::SparseMatMul(matmul_op, trans_a, trans_b);
} else if (matmul_type == "BatchMatMul") {
Output matmul = ops::BatchMatMul(matmul_op, trans_a, trans_b);
}
GrapplerItem item;
item.fetch = {"matmul"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
EnableOnlyFoldTransposeIntoMatMul(&optimizer);
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
EXPECT_EQ(7, output.node_size());
const string p = "ArithmeticOptimizer/FoldTransposeIntoMatMul";
const string optimized_name = strings::StrCat(p, "_", "matmul");
const NodeDef* matmul_fused_node = node_map.GetNode(optimized_name);
ASSERT_NE(matmul_fused_node, nullptr);
EXPECT_EQ("a", matmul_fused_node->input(0));
EXPECT_EQ("b", matmul_fused_node->input(1));
if (matmul_type == "BatchMatMul") {
EXPECT_TRUE(matmul_fused_node->attr().at("adj_x").b());
EXPECT_TRUE(matmul_fused_node->attr().at("adj_y").b());
} else {
EXPECT_TRUE(matmul_fused_node->attr().at("transpose_a").b());
EXPECT_TRUE(matmul_fused_node->attr().at("transpose_b").b());
}
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
}
TEST_F(ArithmeticOptimizerTest, FoldConjugateTransposeIntoBatchMatMul) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output re_a =
ops::Const(s.WithOpName("re_a"), {1.0f, 2.0f, 3.0f, 4.0f}, {2, 2});
Output im_a =
ops::Const(s.WithOpName("im_a"), {-1.0f, -2.0f, -3.0f, -4.0f}, {2, 2});
Output re_b =
ops::Const(s.WithOpName("re_b"), {5.0f, 6.0f, 7.0f, 8.0f}, {2, 2});
Output im_b =
ops::Const(s.WithOpName("im_b"), {-5.0f, -6.0f, -7.0f, -8.0f}, {2, 2});
Output a = ops::Complex(s.WithOpName("a"), re_a, im_a);
Output b = ops::Complex(s.WithOpName("b"), re_b, im_b);
Output perm = ops::Const(s.WithOpName("perm"), {1, 0}, {2});
Output trans_a = ops::ConjugateTranspose(s.WithOpName("trans_a"), a, perm);
Output trans_b = ops::ConjugateTranspose(s.WithOpName("trans_b"), b, perm);
Output matmul = ops::BatchMatMul(s.WithOpName("matmul"), trans_a, trans_b);
GrapplerItem item;
item.fetch = {"matmul"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
ArithmeticOptimizer optimizer;
GraphDef output;
OptimizeTwice(&optimizer, &item, &output);
NodeMap node_map(&output);
ASSERT_EQ(11, output.node_size());
const string p = "ArithmeticOptimizer/FoldTransposeIntoMatMul";
const string optimized_name = strings::StrCat(p, "_", "matmul");
const NodeDef* optimized_matmul = node_map.GetNode(optimized_name);
ASSERT_NE(optimized_matmul, nullptr);
EXPECT_EQ("a", optimized_matmul->input(0));
EXPECT_EQ("b", optimized_matmul->input(1));
EXPECT_TRUE(optimized_matmul->attr().at("adj_x").b());
EXPECT_TRUE(optimized_matmul->attr().at("adj_y").b());
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<complex64>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveRedundantReshape_IdentityReshape) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({-1, 3, 28, 28}));
Output inputs_shape = ops::Shape(s, inputs);
// The target shape of the reshape is the concatenation of `batch_size` and
// [3,28,28].
Output batch_size = ops::Slice(s, inputs_shape, ops::Const(s, {0}, {1}),
ops::Const(s, {1}, {1}));
Output target_shape = ops::Concat(
s.WithOpName("target_shape"),
{batch_size, ops::Const(s, {3, 28, 28}, {3})}, ops::Const(s, {0}, {}));
Output reshape = ops::Reshape(s, inputs, target_shape);
Output outputs = ops::Identity(s.WithOpName("outputs"), reshape);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({3, 3, 28, 28}));
auto tensors_expected =
EvaluateNodes(item.graph, item.fetch, {{"Placeholder", x_t}});
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
EXPECT_EQ(0, CountOpNodes(output, "Reshape"));
auto tensors = EvaluateNodes(output, item.fetch, {{"Placeholder", x_t}});
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest,
RemoveRedundantReshape_IdentityReshapeBetweenSymbolicShapes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({-1, 3, -1, -1}));
Output inputs_shape = ops::Shape(s, inputs);
// The target shape of the reshape is the concatenation of `batch_size`, 3,
// `height, and `width`.
Output batch_size = ops::Slice(s, inputs_shape, ops::Const(s, {0}, {1}),
ops::Const(s, {1}, {1}));
Output height = ops::Slice(s, inputs_shape, ops::Const(s, {2}, {1}),
ops::Const(s, {1}, {1}));
Output width = ops::Slice(s, inputs_shape, ops::Const(s, {3}, {1}),
ops::Const(s, {1}, {1}));
Output target_shape =
ops::Concat(s.WithOpName("target_shape"),
{batch_size, ops::Const(s, {3}, {1}), height, width},
ops::Const(s, {0}, {}));
Output reshape = ops::Reshape(s, inputs, target_shape);
Output outputs = ops::Identity(s.WithOpName("outputs"), reshape);
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({3, 3, 28, 28}));
GrapplerItem item;
item.fetch = {"outputs"};
item.feed = {{"Placeholder", x_t}};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
// Assume valid feed shape in aggressive mode.
ArithmeticOptimizer optimizer(RewriterConfig::AGGRESSIVE);
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
EXPECT_EQ(0, CountOpNodes(output, "Reshape"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveRedundantReshape_NotAssumeValidFeeds) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({4, 3, 28, 28}));
Output target_shape = ops::Const(s, {4, 3, 28, 28}, {4});
Output reshape = ops::Reshape(s, inputs, target_shape);
Output outputs = ops::Identity(s.WithOpName("outputs"), reshape);
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({4, 3, 28, 28}));
GrapplerItem item;
item.fetch = {"outputs"};
item.feed = {{"Placeholder", x_t}};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
// The reshape is preserved because the shape of the placeholder can be
// different from the shape of the actual feed.
EXPECT_EQ(1, CountOpNodes(output, "Reshape"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest,
RemoveRedundantReshape_AssumeValidFeedsInAggressiveMode) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({4, 3, 28, 28}));
Output target_shape = ops::Const(s, {4, 3, 28, 28}, {4});
Output reshape = ops::Reshape(s, inputs, target_shape);
Output outputs = ops::Identity(s.WithOpName("outputs"), reshape);
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({4, 3, 28, 28}));
GrapplerItem item;
item.fetch = {"outputs"};
item.feed = {{"Placeholder", x_t}};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer(RewriterConfig::AGGRESSIVE);
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
EXPECT_EQ(0, CountOpNodes(output, "Reshape"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveRedundantReshape_NotIdentityReshape) {
// Reshape from [-1,3,28,28] to [8,-1,28,28] is not identity, because it can
// be from [4,3,28,28] to [8,6,28,28].
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({-1, 3, 28, 28}));
Output reshape = ops::Reshape(s, inputs, ops::Const(s, {8, -1, 28, 28}, {4}));
Output outputs = ops::Identity(s.WithOpName("outputs"), reshape);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({8, 3, 28, 28}));
item.feed = {{"Placeholder", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
EXPECT_EQ(1, CountOpNodes(output, "Reshape"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest,
RemoveRedundantReshape_NotIdentityReshapeTooManyUnknownDimSizes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({4, 3}));
Output reshape = ops::Reshape(s, inputs, ops::Const(s, {-1, -1}, {2}));
Output outputs = ops::Identity(s.WithOpName("outputs"), reshape);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
EXPECT_EQ(1, CountOpNodes(output, "Reshape"));
}
TEST_F(ArithmeticOptimizerTest, RemoveRedundantReshape_CombineReshapes) {
// Converts an NCHW_VECT_C tensor to NHWC and then flattens it to 2D. The two
// reshapes should be combined.
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output nchw_vect_c =
ops::Placeholder(s.WithOpName("nchw_vect_c"), DT_INT8,
ops::Placeholder::Shape({8, 3, 28, 28, 4}));
Output transpose =
ops::Transpose(s.WithOpName("transpose"), nchw_vect_c,
ops::Const(s.WithOpName("perm"), {0, 2, 3, 1, 4}, {5}));
Output nhwc = ops::Reshape(
s.WithOpName("nhwc"), transpose,
ops::Const(s.WithOpName("nhwc_shape"), {8, 28, 28, 12}, {4}));
Output flatten = ops::Reshape(
s.WithOpName("flatten"), nhwc,
ops::Const(s.WithOpName("flatten_shape"), {8, 28 * 28 * 12}, {2}));
Output outputs = ops::Identity(s.WithOpName("outputs"), flatten);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_INT8>(TensorShape({8, 3, 28, 28, 4}));
item.feed = {{"nchw_vect_c", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantReshape(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
EXPECT_EQ(1, CountOpNodes(output, "Reshape"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<int8>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, ReorderTransposeCast) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope().WithDevice("/gpu:0");
Output nhwc_uint8 =
ops::Placeholder(s, DT_UINT8, ops::Placeholder::Shape({8, 28, 28, 3}));
Output nhwc_fp32 = ops::Cast(s, nhwc_uint8, DT_FLOAT);
Output nchw_fp32 =
ops::Transpose(s, nhwc_fp32, ops::Const(s, {0, 3, 1, 2}, {4}));
Output outputs = ops::Identity(s.WithOpName("outputs"), nchw_fp32);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
TF_EXPECT_OK(ArithmeticOptimizer().Optimize(nullptr, item, &output));
item.graph.Swap(&output);
TF_EXPECT_OK(ModelPruner().Optimize(nullptr, item, &output));
const NodeDef* transpose_node = nullptr;
for (const NodeDef& node : output.node()) {
if (node.op() == "Transpose") {
EXPECT_EQ(transpose_node, nullptr);
EXPECT_EQ(DT_UINT8, node.attr().at("T").type());
transpose_node = &node;
}
}
EXPECT_NE(transpose_node, nullptr);
for (const NodeDef& node : output.node()) {
if (node.op() == "Cast") {
EXPECT_EQ(NodeName(node.input(0)), transpose_node->name());
}
}
}
TEST_F(ArithmeticOptimizerTest, NoReorderTransposeCast) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope().WithDevice("/gpu:0");
Output nhwc_fp32 =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({8, 28, 28, 3}));
Output nhwc_uint8 = ops::Cast(s, nhwc_fp32, DT_UINT8);
Output nchw_uint8 =
ops::Transpose(s, nhwc_uint8, ops::Const(s, {0, 3, 1, 2}, {4}));
Output outputs = ops::Identity(s.WithOpName("outputs"), nchw_uint8);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
TF_EXPECT_OK(ArithmeticOptimizer().Optimize(nullptr, item, &output));
item.graph.Swap(&output);
TF_EXPECT_OK(ModelPruner().Optimize(nullptr, item, &output));
int num_transposes = 0;
for (const NodeDef& node : output.node()) {
if (node.op() == "Transpose") {
EXPECT_EQ(DT_UINT8, node.attr().at("T").type());
EXPECT_EQ(node.input(0), "Cast");
++num_transposes;
}
}
EXPECT_EQ(1, num_transposes);
}
TEST_F(ArithmeticOptimizerTest, RemoveIdentityTransposes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs_shape =
ops::Const(s.WithOpName("inputs_shape"), {8, 3, 28, 28}, {4});
Output inputs =
ops::RandomUniform(s.WithOpName("inputs"), inputs_shape, DT_FLOAT);
Output perm1 = ops::Const(s.WithOpName("perm1"), {0, 2, 3, 1}, {4});
Output perm2 = ops::Const(s.WithOpName("perm2"), {0, 3, 1, 2}, {4});
Output perm3 = ops::Const(s.WithOpName("perm3"), {0, 1, 2, 3}, {4});
Output transpose1 = ops::Transpose(s.WithOpName("transpose1"), inputs, perm1);
Output transpose2 =
ops::Transpose(s.WithOpName("transpose2"), transpose1, perm2);
Output transpose3 = ops::Transpose(s.WithOpName("transpose3"), inputs, perm3);
Output id1 = ops::Identity(s.WithOpName("id1"), transpose2);
Output id2 = ops::Identity(s.WithOpName("id2"), transpose3);
GrapplerItem item;
item.fetch = {"id1", "id2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveIdentityTranspose(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
std::set<string> nodes_after_optimization;
for (const NodeDef& node : output.node()) {
nodes_after_optimization.insert(node.name());
}
EXPECT_EQ(nodes_after_optimization,
std::set<string>({"id1", "id2", "inputs_shape", "inputs"}));
}
TEST_F(ArithmeticOptimizerTest, RemoveIdentityTransposesMultipleOutputs) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs_shape =
ops::Const(s.WithOpName("inputs_shape"), {8, 9, 28, 28}, {4});
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_FLOAT,
ops::Placeholder::Shape({8, 12, 28, 28}));
OutputList split = ops::Split(s, ops::Const(s, 1), inputs, 3).output;
Output perm1 = ops::Const(s, {0, 2, 3, 1}, {4});
Output perm2 = ops::Const(s, {0, 3, 1, 2}, {4});
Output branch0 = split[0];
Output branch1 = ops::Transpose(s, ops::Transpose(s, split[1], perm1), perm2);
Output branch2 = split[2];
Output concat = ops::Concat(s, {branch0, branch1, branch2}, ops::Const(s, 1));
Output outputs = ops::Identity(s.WithOpName("outputs"), concat);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({8, 12, 28, 28}));
item.feed = {{"inputs", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveIdentityTranspose(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
for (const NodeDef& node : output.node()) {
if (node.op() == "Concat") {
EXPECT_EQ(node.input(0), "Split");
EXPECT_EQ(node.input(1), "Split:1");
EXPECT_EQ(node.input(2), "Split:2");
}
}
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveTransposesWithControlDependency) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({2, 3}));
Output transpose1 = ops::Transpose(s, inputs, ops::Const(s, {1, 0}));
Output transpose2 = ops::Transpose(s, transpose1, ops::Const(s, {1, 0}));
Output outputs =
ops::Identity(s.WithOpName("outputs").WithControlDependencies(transpose2),
ops::Const(s.WithOpName("outputs_const"), 1.0f));
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 3}));
item.feed = {{"Placeholder", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveIdentityTranspose(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
NodeMap node_map(&output);
const NodeDef* outputs_node = node_map.GetNode("outputs");
EXPECT_EQ(2, outputs_node->input_size());
EXPECT_EQ(outputs_node->input(0), "outputs_const");
EXPECT_EQ(outputs_node->input(1), "^Placeholder");
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, NotRemoveTransposes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs_shape =
ops::Const(s.WithOpName("inputs_shape"), {8, 3, 28, 28}, {4});
Output inputs =
ops::RandomUniform(s.WithOpName("inputs"), inputs_shape, DT_FLOAT);
Output perm = ops::Const(s.WithOpName("perm"), {1, 2, 3, 0}, {4});
Output transpose1 = ops::Transpose(s.WithOpName("transpose1"), inputs, perm);
Output transpose2 =
ops::Transpose(s.WithOpName("transpose2"), transpose1, perm);
Output outputs = ops::Identity(s.WithOpName("outputs"), transpose2);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveIdentityTranspose(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
EXPECT_EQ(6, output.node_size());
}
TEST_F(ArithmeticOptimizerTest, RemoveIdentityTransposesThroughChain) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs_shape =
ops::Const(s.WithOpName("inputs_shape"), {8, 3, 28, 28}, {4});
Output inputs =
ops::RandomUniform(s.WithOpName("inputs"), inputs_shape, DT_FLOAT);
Output perm1 = ops::Const(s.WithOpName("perm1"), {0, 2, 3, 1}, {4});
Output perm2 = ops::Const(s.WithOpName("perm2"), {0, 3, 1, 2}, {4});
Output transpose1 = ops::Transpose(
s.WithOpName("transpose1").WithControlDependencies(perm2), inputs, perm1);
Output identity = ops::Identity(s.WithOpName("id"), transpose1);
Output transpose2 =
ops::Transpose(s.WithOpName("transpose2"), identity, perm2);
Output id1 = ops::Identity(s.WithOpName("id1"), transpose2);
GrapplerItem item;
item.fetch = {"id1"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer(RewriterConfig::AGGRESSIVE);
EnableOnlyRemoveIdentityTranspose(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
std::set<string> nodes_after_optimization;
for (const NodeDef& node : output.node()) {
nodes_after_optimization.insert(node.name());
if (node.name() == "id") {
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("inputs", node.input(0));
EXPECT_EQ("^perm2", node.input(1));
}
if (node.name() == "id1") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("id", node.input(0));
}
}
EXPECT_EQ(nodes_after_optimization,
std::set<string>({"id", "id1", "inputs_shape", "inputs", "perm2"}));
}
TEST_F(ArithmeticOptimizerTest, FoldMulToTransposeConv) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_FLOAT,
ops::Placeholder::Shape({8, 28, 28, 3}));
Output scale = ops::Const(s.WithOpName("scale"), 1.0f / 255.0f, {});
Output scaled_inputs =
ops::Multiply(s.WithOpName("scaled_inputs"), inputs, scale);
Output perm_nhwc_to_nchw =
ops::Const(s.WithOpName("perm_nhwc_to_nchw"), {0, 3, 1, 2}, {4});
Output inputs_nchw = ops::Transpose(s.WithOpName("inputs_nchw"),
scaled_inputs, perm_nhwc_to_nchw);
Output weights = ops::Const(s.WithOpName("weights"),
Input::Initializer(127.0f, {5, 5, 3, 16}));
Output conv =
ops::Conv2D(s.WithOpName("conv"), inputs_nchw, weights, {1, 1, 1, 1},
"VALID", ops::Conv2D::DataFormat("NCHW"));
Output outputs = ops::Identity(s.WithOpName("outputs"), conv);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyFoldMultipleIntoConv(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
NodeMap node_map(&output);
// `conv` is now a folded convolution with scaled weights.
const NodeDef* folded_conv = node_map.GetNode(conv.node()->name());
ASSERT_NE(folded_conv, nullptr);
const NodeDef* folded_conv_weights = node_map.GetNode(folded_conv->input(1));
ASSERT_NE(folded_conv_weights, nullptr);
EXPECT_EQ("Mul", folded_conv_weights->op());
// Its input should be a transpose of `inputs`.
const NodeDef* transpose = node_map.GetNode(NodeName(folded_conv->input(0)));
ASSERT_NE(transpose, nullptr);
EXPECT_EQ("inputs", transpose->input(0));
}
TEST_F(ArithmeticOptimizerTest, NotFoldMulAcrossPreservedTranspose) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_FLOAT,
ops::Placeholder::Shape({8, 28, 28, 3}));
Output scale = ops::Const(s.WithOpName("scale"), 1.0f / 255.0f, {});
Output scaled_inputs =
ops::Multiply(s.WithOpName("scaled_inputs"), inputs, scale);
Output perm_nhwc_to_nchw =
ops::Const(s.WithOpName("perm_nhwc_to_nchw"), {0, 3, 1, 2}, {4});
Output inputs_nchw = ops::Transpose(s.WithOpName("inputs_nchw"),
scaled_inputs, perm_nhwc_to_nchw);
Output weights = ops::Const(s.WithOpName("weights"),
Input::Initializer(127.0f, {5, 5, 3, 16}));
Output conv =
ops::Conv2D(s.WithOpName("conv"), inputs_nchw, weights, {1, 1, 1, 1},
"VALID", ops::Conv2D::DataFormat("NCHW"));
Output outputs = ops::Identity(s.WithOpName("outputs"), conv);
Tensor inputs_nchw_tensor(DT_FLOAT, {8, 3, 28, 28});
memset(const_cast<char*>(inputs_nchw_tensor.tensor_data().data()), 0,
inputs_nchw_tensor.tensor_data().size());
GrapplerItem item;
item.fetch = {"outputs"};
item.feed = {{"inputs_nchw", inputs_nchw_tensor}};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
TF_EXPECT_OK(ArithmeticOptimizer().Optimize(nullptr, item, &output));
item.graph.Swap(&output);
TF_EXPECT_OK(ModelPruner().Optimize(nullptr, item, &output));
NodeMap node_map(&output);
const NodeDef* inputs_nchw_node_def =
node_map.GetNode(inputs_nchw.node()->name());
EXPECT_EQ(NodeName(inputs_nchw_node_def->input(0)),
scaled_inputs.node()->name());
}
TEST_F(ArithmeticOptimizerTest, FoldMulToConv) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_FLOAT,
ops::Placeholder::Shape({8, 28, 28, 28, 3}));
Output scale = ops::Const(s.WithOpName("scale"), 1.0f / 255.0f, {});
Output scaled_inputs =
ops::Multiply(s.WithOpName("scaled_inputs"), inputs, scale);
Output weights = ops::Const(s.WithOpName("weights"),
Input::Initializer(127.0f, {5, 5, 5, 3, 16}));
Output conv = ops::Conv3D(s.WithOpName("conv"), scaled_inputs, weights,
{1, 1, 1, 1, 1}, "VALID");
Output outputs = ops::Identity(s.WithOpName("outputs"), conv);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
TF_EXPECT_OK(ArithmeticOptimizer().Optimize(nullptr, item, &output));
item.graph.Swap(&output);
TF_EXPECT_OK(ModelPruner().Optimize(nullptr, item, &output));
NodeMap node_map(&output);
// `conv` is now a folded convolution on `inputs` and scaled weights.
const NodeDef* folded_conv = node_map.GetNode(conv.node()->name());
CHECK_EQ(inputs.node()->name(), NodeName(folded_conv->input(0)));
CHECK_EQ(node_map.GetNode(NodeName(folded_conv->input(1)))->op(), "Mul");
}
TEST_F(ArithmeticOptimizerTest, OptimizeCastMulTransposeConv) {
// This unit test exercises two optimizations, folding mul into conv, and
// reordering cast and transpose.
//
// Conv2D(Transpose(Mul(Cast(I), S)), W)
// =>
// Conv2D(Transpose(Cast(I)), W*S)
// =>
// Conv2D(Cast(Transpose(I)), W*S)
tensorflow::Scope s = tensorflow::Scope::NewRootScope().WithDevice("/gpu:0");
Output inputs =
ops::Placeholder(s, DT_UINT8, ops::Placeholder::Shape({8, 28, 28, 3}));
Output cast = ops::Cast(s, inputs, DT_FLOAT);
Output mul = ops::Mul(s, cast, ops::Const(s, 1.0f / 255.0f));
Output transpose =
ops::Transpose(s, mul, ops::Const(s.WithOpName("perm"), {0, 3, 1, 2}));
Output weights = ops::Const(s.WithOpName("weights"),
Input::Initializer(127.0f, {5, 5, 3, 16}));
Output conv = ops::Conv2D(s, transpose, weights, {1, 1, 1, 1}, "VALID",
ops::Conv2D::DataFormat("NCHW"));
Output outputs = ops::Identity(s.WithOpName("outputs"), conv);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer; // all optimization stages are on
OptimizeTwiceAndPrune(&optimizer, &item, &output, /*const_folding=*/true);
NodeMap node_map(&output);
// Expected names for reordered cast and transpose.
const string p = "ArithmeticOptimizer/ReorderCastAndTranspose_";
const string optimized_cast_name = strings::StrCat(p, "float_Cast");
const string optimized_transpose_name = strings::StrCat(p, "uint8_Transpose");
// Expected names for folded multiply and conv.
const string optimized_weights =
"ArithmeticOptimizer/FoldMultiplyIntoConv_scaled_Conv2D_weights";
const NodeDef* inputs_node = node_map.GetNode("Placeholder");
const NodeDef* transpose_node = node_map.GetNode(optimized_transpose_name);
const NodeDef* cast_node = node_map.GetNode(optimized_cast_name);
const NodeDef* weights_node = node_map.GetNode(optimized_weights);
const NodeDef* conv_node = node_map.GetNode("Conv2D");
ASSERT_NE(inputs_node, nullptr);
ASSERT_NE(transpose_node, nullptr);
ASSERT_NE(cast_node, nullptr);
ASSERT_NE(weights_node, nullptr);
ASSERT_NE(conv_node, nullptr);
EXPECT_EQ(output.node_size(), 7);
EXPECT_EQ(transpose_node->input(0), inputs_node->name());
EXPECT_EQ(cast_node->input(0), transpose_node->name());
EXPECT_EQ(conv_node->input(0), cast_node->name());
EXPECT_EQ(conv_node->input(1), weights_node->name());
}
TEST_F(ArithmeticOptimizerTest, OptimizeMultipleMulTransposeConv) {
// This unit test exercises optimization of folding mul into conv for
// multiple nodes in the graph.
tensorflow::Scope s = tensorflow::Scope::NewRootScope().WithDevice("/gpu:0");
GrapplerItem item;
Output conv[2];
for (int i = 0; i < 2; ++i) {
Output inputs =
ops::Placeholder(s, DT_FLOAT, ops::Placeholder::Shape({8, 3, 28, 28}));
Output mul = ops::Mul(s, inputs, ops::Const(s, 1.0f / 255.0f));
Output weights = ops::Const(s.WithOpName("weights"),
Input::Initializer(127.0f, {5, 5, 3, 16}));
conv[i] = ops::Conv2D(s, mul, weights, {1, 1, 1, 1}, "VALID",
ops::Conv2D::DataFormat("NCHW"));
}
Output outputs = ops::Add(s.WithOpName("outputs"), conv[0], conv[1]);
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyFoldMultipleIntoConv(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output, /*const_folding=*/true);
NodeMap node_map(&output);
using strings::StrCat;
const string p = "ArithmeticOptimizer/FoldMultiplyIntoConv_";
const string optimized_weights = StrCat(p, "scaled_Conv2D_weights");
const string optimized_weights_1 = StrCat(p, "scaled_Conv2D_1_weights_1");
const NodeDef* weights_node = node_map.GetNode(optimized_weights);
const NodeDef* weights_node_1 = node_map.GetNode(optimized_weights_1);
const NodeDef* conv_node = node_map.GetNode("Conv2D");
const NodeDef* conv_node_1 = node_map.GetNode("Conv2D_1");
ASSERT_NE(weights_node, nullptr);
ASSERT_NE(weights_node_1, nullptr);
ASSERT_NE(conv_node, nullptr);
ASSERT_NE(conv_node_1, nullptr);
EXPECT_EQ(conv_node->input(1), weights_node->name());
EXPECT_EQ(conv_node_1->input(1), weights_node_1->name());
}
TEST_F(ArithmeticOptimizerTest, CombineBitcasts) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_UINT8,
ops::Placeholder::Shape({2, 3}));
Output bc1 = ops::Bitcast(s.WithOpName("bc1"), inputs, DT_QINT8);
Output bc2 = ops::Bitcast(s.WithOpName("bc2"), bc1, DT_INT8);
Output outputs = ops::Identity(s.WithOpName("outputs"), bc2);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_UINT8>(TensorShape({2, 3}));
item.feed = {{"inputs", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantBitcast(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
NodeMap node_map(&output);
// Bitcasts combined into a single op and inputs redirected to updated Bitcast
EXPECT_EQ(3, output.node_size());
EXPECT_EQ(1, CountOpNodes(output, "Bitcast"));
EXPECT_TRUE(IsNodesDirectlyConnected(node_map, "inputs", "bc2"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<int8>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, CombineAndRemoveBitcasts) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_INT8,
ops::Placeholder::Shape({2, 3}));
Output bc1 = ops::Bitcast(s, inputs, DT_QINT8);
Output bc2 = ops::Bitcast(s, bc1, DT_INT8);
Output outputs = ops::Identity(s.WithOpName("outputs"), bc2);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_INT8>(TensorShape({2, 3}));
item.feed = {{"inputs", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantBitcast(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
NodeMap node_map(&output);
// Bitcasts removed and inputs redirected to outputs
EXPECT_EQ(2, output.node_size());
EXPECT_EQ(0, CountOpNodes(output, "Bitcast"));
EXPECT_TRUE(IsNodesDirectlyConnected(node_map, "inputs", "outputs"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<int8>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, RemoveRedundantCast) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output inputs = ops::Placeholder(s.WithOpName("inputs"), DT_INT8,
ops::Placeholder::Shape({2, 3}));
Output cast = ops::Cast(s, inputs, DT_INT8);
Output outputs = ops::Identity(s.WithOpName("outputs"), cast);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_INT8>(TensorShape({2, 3}));
item.feed = {{"inputs", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, item.feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveRedundantCast(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
NodeMap node_map(&output);
// Cast removed and inputs redirected to outputs
EXPECT_EQ(2, output.node_size());
EXPECT_EQ(0, CountOpNodes(output, "Cast"));
EXPECT_TRUE(IsNodesDirectlyConnected(node_map, "inputs", "outputs"));
auto tensors = EvaluateNodes(output, item.fetch, item.feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorEqual<int8>(tensors_expected[0], tensors[0]);
}
TEST_F(ArithmeticOptimizerTest, AddOpsRewrite_AddOpsOfIdenticalShape) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
tensorflow::Scope sx = s.NewSubScope("x");
tensorflow::Scope sy = s.NewSubScope("y");
auto a = ops::Variable(s.WithOpName("a"), {2, 2}, DT_FLOAT);
auto b = ops::Variable(s.WithOpName("b"), {2, 2}, DT_FLOAT);
auto c = ops::Variable(s.WithOpName("c"), {2, 2}, DT_FLOAT);
auto add_ab = ops::Add(sx.WithOpName("Add_ab"), a, b);
auto add_abc = ops::Add(sy.WithOpName("Add_abc"), add_ab, c);
auto outputs = ops::Identity(s.WithOpName("outputs"), add_abc);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto b_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto c_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyAddToAddNCombining(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// +
// / \
// + c --> AddN(a, b, c)
// / \
// a b
EXPECT_EQ(5, output.node_size());
NodeMap node_map(&output);
// check add tree was replaced with AddN
const NodeDef* collapsed_add =
node_map.GetNode("y/ArithmeticOptimizer/AddOpsRewrite_Add_abc");
ASSERT_NE(collapsed_add, nullptr);
EXPECT_EQ("AddN", collapsed_add->op());
EXPECT_EQ(3, collapsed_add->input_size());
EXPECT_EQ("a", collapsed_add->input(0));
EXPECT_EQ("b", collapsed_add->input(1));
EXPECT_EQ("c", collapsed_add->input(2));
// check output was re-wired to new node
const NodeDef* updated_outputs = node_map.GetNode("outputs");
ASSERT_NE(updated_outputs, nullptr);
EXPECT_EQ(collapsed_add->name(), updated_outputs->input(0));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, AddOpsRewrite_MultiplePasses) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto a = ops::Variable(s.WithOpName("a"), {2, 2}, DT_FLOAT);
auto b = ops::Variable(s.WithOpName("b"), {2, 2}, DT_FLOAT);
auto c = ops::Variable(s.WithOpName("c"), {2, 2}, DT_FLOAT);
auto add_ab = ops::Add(s.WithOpName("Add_ab"), a, b);
auto add_abc = ops::Add(s.WithOpName("Add_abc"), add_ab, c);
auto x = ops::Variable(s.WithOpName("x"), {2, 2}, DT_FLOAT);
auto y = ops::Variable(s.WithOpName("y"), {2, 2}, DT_FLOAT);
auto z = ops::Variable(s.WithOpName("z"), {2, 2}, DT_FLOAT);
auto add_xy = ops::Add(s.WithOpName("Add_xy"), x, y);
auto add_xyz = ops::Add(s.WithOpName("Add_xyz"), add_xy, z);
auto mul = ops::Multiply(s.WithOpName("Mul"), add_abc, add_xyz);
auto outputs = ops::Identity(s.WithOpName("outputs"), mul);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto b_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto c_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto y_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto z_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}, {"x", x_t}, {"y", y_t}, {"z", z_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyAddToAddNCombining(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// *
// / \
// + + *
// / \ / \ / \
// + c x + --> AddN(a, b, c) AddN(x, y, z))
// / \ / \
// a b y z
EXPECT_EQ(10, output.node_size());
NodeMap node_map(&output);
// check left Add subtree replaced with AddN
const NodeDef* collapsed_left =
node_map.GetNode("ArithmeticOptimizer/AddOpsRewrite_Add_abc");
ASSERT_NE(collapsed_left, nullptr);
EXPECT_EQ("AddN", collapsed_left->op());
EXPECT_EQ(3, collapsed_left->input_size());
EXPECT_EQ("a", collapsed_left->input(0));
EXPECT_EQ("b", collapsed_left->input(1));
EXPECT_EQ("c", collapsed_left->input(2));
// check right Add subtree replaced with AddN
const NodeDef* collapsed_right =
node_map.GetNode("ArithmeticOptimizer/AddOpsRewrite_Add_xyz");
ASSERT_NE(collapsed_right, nullptr);
EXPECT_EQ("AddN", collapsed_right->op());
EXPECT_EQ(3, collapsed_right->input_size());
EXPECT_EQ("x", collapsed_right->input(0));
EXPECT_EQ("y", collapsed_right->input(1));
EXPECT_EQ("z", collapsed_right->input(2));
// check that Mul inputs re-wired to new Nodes
const NodeDef* updated_mul = node_map.GetNode("Mul");
ASSERT_NE(updated_mul, nullptr);
EXPECT_EQ("Mul", updated_mul->op());
EXPECT_EQ(2, updated_mul->input_size());
EXPECT_EQ(collapsed_left->name(), updated_mul->input(0));
EXPECT_EQ(collapsed_right->name(), updated_mul->input(1));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, AddOpsRewrite_AddInputMultipleTimes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto a = ops::Variable(s.WithOpName("a"), {2, 2}, DT_FLOAT);
auto b = ops::Variable(s.WithOpName("b"), {2, 2}, DT_FLOAT);
auto c = ops::Variable(s.WithOpName("c"), {2, 2}, DT_FLOAT);
auto add_ab = ops::Add(s.WithOpName("Add_ab"), a, b);
auto add_bc = ops::Add(s.WithOpName("Add_bc"), b, c);
auto add_all = ops::Add(s.WithOpName("Add_all"), add_ab, add_bc);
auto outputs = ops::Identity(s.WithOpName("outputs"), add_all);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto b_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto c_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyAddToAddNCombining(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// +
// / \
// + + --> AddN(a, b, b, c)
// / \ / \ ^
// a b c b added twice!
EXPECT_EQ(5, output.node_size());
NodeMap node_map(&output);
// check Add tree replaced with AddN
const NodeDef* collapsed_add =
node_map.GetNode("ArithmeticOptimizer/AddOpsRewrite_Add_all");
ASSERT_NE(collapsed_add, nullptr);
EXPECT_EQ("AddN", collapsed_add->op());
EXPECT_EQ(4, collapsed_add->input_size());
EXPECT_EQ("a", collapsed_add->input(0));
EXPECT_EQ("b", collapsed_add->input(1));
EXPECT_EQ("b", collapsed_add->input(2));
EXPECT_EQ("c", collapsed_add->input(3));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, AddOpsRewrite_AddOpsOfSymbolicallyEqualShape) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
// unknown input shape propagated symbolically through the graph
auto input = ops::Variable(s.WithOpName("input"), {-1, 2}, DT_FLOAT);
// [a, b, c] have symbolically equal shapes
auto a = ops::Sqrt(s.WithOpName("a"), input);
auto b = ops::Square(s.WithOpName("b"), input);
auto c = ops::Round(s.WithOpName("c"), input);
// [add_ab, add_abc] shape must be inferred from inputs
auto add_ab = ops::Add(s.WithOpName("Add_ab"), a, b);
auto add_abc = ops::Add(s.WithOpName("Add_abc"), add_ab, c);
auto outputs = ops::Identity(s.WithOpName("outputs"), add_abc);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
std::vector<std::pair<string, Tensor>> feed = {{"input", x_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyAddToAddNCombining(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// +
// / \
// + c --> AddN(a, b, c)
// / \
// a b
EXPECT_EQ(6, output.node_size());
NodeMap node_map(&output);
// check add tree was replaced with AddN
const NodeDef* collapsed_add =
node_map.GetNode("ArithmeticOptimizer/AddOpsRewrite_Add_abc");
ASSERT_NE(collapsed_add, nullptr);
EXPECT_EQ("AddN", collapsed_add->op());
EXPECT_EQ(3, collapsed_add->input_size());
EXPECT_EQ("a", collapsed_add->input(0));
EXPECT_EQ("b", collapsed_add->input(1));
EXPECT_EQ("c", collapsed_add->input(2));
// check output was re-wired to new node
const NodeDef* updated_outputs = node_map.GetNode("outputs");
ASSERT_NE(updated_outputs, nullptr);
EXPECT_EQ(collapsed_add->name(), updated_outputs->input(0));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, AddOpsRewrite_MinimizeBCast) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto a = ops::Variable(s.WithOpName("a"), {32}, DT_FLOAT);
auto b = ops::Variable(s.WithOpName("b"), {32, 32}, DT_FLOAT);
auto c = ops::Variable(s.WithOpName("c"), {32, 32, 32}, DT_FLOAT);
auto add_ab = ops::Add(s.WithOpName("Add_ab"), a, b);
auto add_abc = ops::Add(s.WithOpName("Add_abc"), add_ab, c);
auto x = ops::Variable(s.WithOpName("x"), {32}, DT_FLOAT);
auto y = ops::Variable(s.WithOpName("y"), {32, 32}, DT_FLOAT);
auto z = ops::Variable(s.WithOpName("z"), {32, 32, 32}, DT_FLOAT);
auto add_xy = ops::Add(s.WithOpName("Add_xy"), x, y);
auto add_xyz = ops::Add(s.WithOpName("Add_xyz"), add_xy, z);
auto add_all = ops::Add(s.WithOpName("AddAll"), add_abc, add_xyz);
auto outputs = ops::Identity(s.WithOpName("outputs"), add_all);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
auto b_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32, 32}));
auto c_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32, 32, 32}));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
auto y_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32, 32}));
auto z_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32, 32, 32}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}, {"x", x_t}, {"y", y_t}, {"z", z_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyAddToAddNCombining(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
// 1) [a, x], [b, y], [c, z] - aggregate same shapes first
// 2) Build an aggregation tree minimizing cost of broadcast
//
// + +
// / \ / \
// + + + AddN(c, z)
// / \ / \ / \
// + c x + --> AddN(a, x) AddN(b, y)
// / \ / \
// a b y z
EXPECT_EQ(12, output.node_size());
NodeMap node_map(&output);
// expected names of outer and inner nodes
string outer_add_name = "ArithmeticOptimizer/AddOpsRewrite_AddAll";
string outer_0_add_name =
"ArithmeticOptimizer/AddOpsRewrite_Internal_0_AddAll";
string inner_0_add_name = "ArithmeticOptimizer/AddOpsRewrite_Leaf_0_AddAll";
string inner_1_add_name = "ArithmeticOptimizer/AddOpsRewrite_Leaf_1_AddAll";
string inner_2_add_name = "ArithmeticOptimizer/AddOpsRewrite_Leaf_2_AddAll";
// Add [a, x] first
const NodeDef* add_ax_node = node_map.GetNode(inner_0_add_name);
ASSERT_NE(add_ax_node, nullptr);
EXPECT_EQ("AddN", add_ax_node->op());
EXPECT_EQ(2, add_ax_node->input_size());
EXPECT_EQ("a", add_ax_node->input(0));
EXPECT_EQ("x", add_ax_node->input(1));
// Then add [b, y]
const NodeDef* add_by_node = node_map.GetNode(inner_1_add_name);
ASSERT_NE(add_by_node, nullptr);
EXPECT_EQ("AddN", add_by_node->op());
EXPECT_EQ(2, add_by_node->input_size());
EXPECT_EQ("b", add_by_node->input(0));
EXPECT_EQ("y", add_by_node->input(1));
// Then add [c, z]
const NodeDef* add_cz_node = node_map.GetNode(inner_2_add_name);
ASSERT_NE(add_cz_node, nullptr);
EXPECT_EQ("AddN", add_cz_node->op());
EXPECT_EQ(2, add_cz_node->input_size());
EXPECT_EQ("c", add_cz_node->input(0));
EXPECT_EQ("z", add_cz_node->input(1));
// Then add results together starting from smaller shapes [a, x] + [b, y]
const NodeDef* outer_0_node = node_map.GetNode(outer_0_add_name);
ASSERT_NE(outer_0_node, nullptr);
EXPECT_EQ("Add", outer_0_node->op());
EXPECT_EQ(2, outer_0_node->input_size());
EXPECT_EQ(inner_0_add_name, outer_0_node->input(0));
EXPECT_EQ(inner_1_add_name, outer_0_node->input(1));
// And finally top level Add node
const NodeDef* outer_node = node_map.GetNode(outer_add_name);
ASSERT_NE(outer_node, nullptr);
EXPECT_EQ("Add", outer_node->op());
EXPECT_EQ(2, outer_node->input_size());
EXPECT_EQ(outer_0_add_name, outer_node->input(0));
EXPECT_EQ(inner_2_add_name, outer_node->input(1));
// And outputs reading new top level Add node
const NodeDef* updated_outputs = node_map.GetNode("outputs");
ASSERT_NE(updated_outputs, nullptr);
EXPECT_EQ(outer_add_name, updated_outputs->input(0));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, AddOpsRewrite_MinimizeBCastWithSymbolicShapes) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
// We have a small input with one unknown dimension
auto small = ops::Variable(s.WithOpName("small"), {-1, 1, 1}, DT_DOUBLE);
// And second input which is larger, but has the same unknown dimension
// device spec prevents this node from rewriting
auto d = "/device:CPU:0";
auto v = ops::Variable(s.WithOpName("v"), {1, 32, 32}, DT_DOUBLE);
auto large = ops::Add(s.WithOpName("large").WithDevice(d), small, v);
// [a, c] have {?, 1, 1} shape, [b] has {?, 32, 32}
auto a = ops::Sqrt(s.WithOpName("a"), small);
auto b = ops::Square(s.WithOpName("b"), large);
auto c = ops::Round(s.WithOpName("c"), small);
// [add_ab, add_abc] shape must be inferred from inputs
auto add_ab = ops::Add(s.WithOpName("Add_ab"), a, b);
auto add_abc = ops::Add(s.WithOpName("Add_abc"), add_ab, c);
auto outputs = ops::Identity(s.WithOpName("outputs"), add_abc);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto s_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({8, 1, 1}));
auto v_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({1, 32, 32}));
std::vector<std::pair<string, Tensor>> feed = {{"small", s_t}, {"v", v_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyAddToAddNCombining(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur: it's much cheaper to add small
// tensors, and do the broadcast just once
//
// + +
// / \ / \
// + c --> + b
// / \ / \
// a b a c
EXPECT_EQ(9, output.node_size());
NodeMap node_map(&output);
// expected names of outer and inner nodes
string outer_add_name = "ArithmeticOptimizer/AddOpsRewrite_Add_abc";
string inner_add_name = "ArithmeticOptimizer/AddOpsRewrite_Leaf_0_Add_abc";
// outer Add node
const NodeDef* outer_add = node_map.GetNode(outer_add_name);
ASSERT_NE(outer_add, nullptr);
EXPECT_EQ("Add", outer_add->op());
EXPECT_EQ(inner_add_name, outer_add->input(0));
EXPECT_EQ("b", outer_add->input(1));
// inner AddN node
const NodeDef* inner_add = node_map.GetNode(inner_add_name);
ASSERT_NE(inner_add, nullptr);
EXPECT_EQ(2, inner_add->input_size());
EXPECT_EQ("a", inner_add->input(0));
EXPECT_EQ("c", inner_add->input(1));
// check output was re-wired to new node
const NodeDef* updated_outputs = node_map.GetNode("outputs");
ASSERT_NE(updated_outputs, nullptr);
EXPECT_EQ(outer_add_name, updated_outputs->input(0));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<double>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, RemoveNegation) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Variable(s.WithOpName("x"), {2, 2}, DT_FLOAT);
auto y = ops::Variable(s.WithOpName("y"), {2, 2}, DT_FLOAT);
Output neg_x = ops::Neg(s.WithOpName("Neg_x"), x);
Output neg_y = ops::Neg(s.WithOpName("Neg_y"), y);
Output add_x_y = ops::Add(s.WithOpName("Add_x_y"), x, y);
Output add_negx_y = ops::Add(s.WithOpName("Add_negx_y"), neg_x, y);
Output add_x_negy = ops::Add(s.WithOpName("Add_x_negy"), x, neg_y);
Output add_negx_negy = ops::Add(s.WithOpName("Add_negx_negy"), neg_x, neg_y);
Output sub_x_y = ops::Sub(s.WithOpName("Sub_x_y"), x, y);
Output sub_negx_y = ops::Sub(s.WithOpName("Sub_negx_y"), neg_x, y);
Output sub_x_negy = ops::Sub(s.WithOpName("Sub_x_negy"), x, neg_y);
Output sub_negx_negy = ops::Sub(s.WithOpName("Sub_negx_negy"), neg_x, neg_y);
Output neg_x_with_dep = ops::Neg(
s.WithOpName("Neg_x_with_dep").WithControlDependencies({add_x_y}), x);
Output add_negx_with_dep_y =
ops::Add(s.WithOpName("Add_negx_with_dep_y"), neg_x_with_dep, y);
auto add_all =
ops::AddN(s.WithOpName("add_all"),
{add_x_y, add_negx_y, add_x_negy, add_negx_negy, sub_x_y,
sub_negx_y, sub_x_negy, sub_negx_negy, add_negx_with_dep_y});
GrapplerItem item;
item.fetch = {"add_all"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto x_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
auto y_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({2, 2}));
std::vector<std::pair<string, Tensor>> feed = {{"x", x_t}, {"y", y_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveNegation(&optimizer);
OptimizeTwice(&optimizer, &item, &output);
EXPECT_EQ(item.graph.node_size(), output.node_size());
int found = 0;
for (int i = 0; i < output.node_size(); ++i) {
const NodeDef& node = output.node(i);
if (node.name() == "Add_negx_y") {
++found;
EXPECT_EQ("Sub", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("y", node.input(0));
EXPECT_EQ("x", node.input(1));
} else if (node.name() == "Add_x_negy") {
++found;
EXPECT_EQ("Sub", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("x", node.input(0));
EXPECT_EQ("y", node.input(1));
} else if (node.name() == "Add_negx_negy") {
++found;
EXPECT_EQ("Sub", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("Neg_x", node.input(0));
EXPECT_EQ("y", node.input(1));
} else if (node.name() == "Sub_x_negy") {
++found;
EXPECT_EQ("Add", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("x", node.input(0));
EXPECT_EQ("y", node.input(1));
} else if (node.name() == "Sub_negx_negy") {
++found;
EXPECT_EQ("Sub", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("y", node.input(0));
EXPECT_EQ("x", node.input(1));
} else if (node.name() == "Add_negx_with_dep_y") {
++found;
EXPECT_EQ("Sub", node.op());
EXPECT_EQ(3, node.input_size());
EXPECT_EQ("y", node.input(0));
EXPECT_EQ("x", node.input(1));
EXPECT_EQ("^Add_x_y", node.input(2));
}
}
EXPECT_EQ(6, found);
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, ConvertSqrtDivToRsqrtMul) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
auto y = ops::Const(s.WithOpName("y"), {3.0f, 4.0f}, {1, 2});
Output sqrt_y = ops::Sqrt(s.WithOpName("sqrt_y"), y);
Output div_x_sqrt_y = ops::Div(s.WithOpName("output"), x, sqrt_y);
GrapplerItem item;
item.fetch = {"output"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlySqrtDivToRsqrtMul(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
EXPECT_EQ(item.graph.node_size(), output.node_size());
for (int i = 0; i < output.node_size(); ++i) {
const NodeDef& node = output.node(i);
if (node.name() == "output") {
EXPECT_EQ("Mul", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("x", node.input(0));
EXPECT_EQ("sqrt_y", node.input(1));
} else if (node.name() == "sqrt_y") {
EXPECT_EQ("Rsqrt", node.op());
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("y", node.input(0));
}
}
}
TEST_F(ArithmeticOptimizerTest, ConvertPow) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
auto y2 = ops::Const(s.WithOpName("y2"), {2.0f, 2.0f}, {1, 2});
auto y1 = ops::Const(s.WithOpName("y1"), {1.0f, 1.0f}, {1, 2});
auto yPoint5 = ops::Const(s.WithOpName("y.5"), {0.5f, 0.5f}, {1, 2});
auto y0 = ops::Const(s.WithOpName("y0"), {0.0f, 0.0f}, {1, 2});
auto y_Point5 = ops::Const(s.WithOpName("y_.5"), {-0.5f, -0.5f}, {1, 2});
auto y_1 = ops::Const(s.WithOpName("y_1"), {-1.0f, -1.0f}, {1, 2});
auto y = ops::Const(s.WithOpName("y"), {3.0f, 4.0f}, {1, 2});
auto z = ops::Const(s.WithOpName("z"), {42.0f}, {});
auto ones = ops::Const(s.WithOpName("ones"), {1.0f, 1.0f, 1.0f}, {1, 3});
auto zeros = ops::Const(s.WithOpName("zeros"), {0.0f, 0.0f, 0.0f}, {1, 3});
Output out2 = ops::Pow(s.WithOpName("out2"), x, y2);
Output out1 = ops::Pow(s.WithOpName("out1"), x, y1);
Output outPoint5 = ops::Pow(s.WithOpName("out.5"), x, yPoint5);
Output out0 = ops::Pow(s.WithOpName("out0"), x, y0);
Output out_Point5 = ops::Pow(s.WithOpName("out_.5"), x, y_Point5);
Output out_1 = ops::Pow(s.WithOpName("out_1"), x, y_1);
Output out = ops::Pow(s.WithOpName("out"), x, y);
Output out_bcast1 = ops::Pow(s.WithOpName("out_bcast1"), z, ones);
Output out_bcast2 = ops::Pow(s.WithOpName("out_bcast2"), z, zeros);
GrapplerItem item;
item.fetch = {"out2", "out1", "out.5", "out0", "out_.5",
"out_1", "out", "out_bcast1", "out_bcast2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(9, tensors_expected.size());
GraphDef got;
ArithmeticOptimizer optimizer;
EnableOnlyConvertPow(&optimizer);
OptimizeAndPrune(&optimizer, &item, &got);
auto tensors = EvaluateNodes(got, item.fetch);
EXPECT_EQ(9, tensors.size());
for (int i = 0; i < tensors.size(); ++i) {
EXPECT_EQ(tensors[i].NumElements(), tensors_expected[i].NumElements());
test::ExpectTensorNear<float>(tensors[i], tensors_expected[i], 1e-6);
}
GraphDef want;
AddNode("x", "Const", {}, {}, &want);
AddNode("y2", "Const", {}, {}, &want);
AddNode("y1", "Const", {}, {}, &want);
AddNode("y.5", "Const", {}, {}, &want);
AddNode("y0", "Const", {}, {}, &want);
AddNode("y_.5", "Const", {}, {}, &want);
AddNode("y_1", "Const", {}, {}, &want);
AddNode("y", "Const", {}, {}, &want);
AddNode("z", "Const", {}, {}, &want);
AddNode("ones", "Const", {}, {}, &want);
AddNode("zeros", "Const", {}, {}, &want);
AddNode("out2", "Square", {"x", AsControlDependency("y2")}, {}, &want);
AddNode("out1", "Identity", {"x", AsControlDependency("y1")}, {}, &want);
AddNode("out.5", "Sqrt", {"x", AsControlDependency("y.5")}, {}, &want);
AddNode("out0", "Const",
{AsControlDependency("x"), AsControlDependency("y0")}, {}, &want);
AddNode("out_.5", "Rsqrt", {"x", AsControlDependency("y_.5")}, {}, &want);
AddNode("out_1", "Reciprocal", {"x", AsControlDependency("y_1")}, {}, &want);
AddNode("out", "Pow", {"x", "y"}, {}, &want);
AddNode("out_bcast1", "Pow", {"z", "ones"}, {}, &want);
AddNode("out_bcast2", "Pow", {"z", "zeros"}, {}, &want);
CompareGraphs(want, got);
}
TEST_F(ArithmeticOptimizerTest, Log1p) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x1 = ops::Const(s.WithOpName("x1"), {1.0f, 1.0f}, {1, 2});
auto x2 = ops::Const(s.WithOpName("x2"), {2.0f, 2.0f}, {1, 2});
auto x3 = ops::Const(s.WithOpName("x3"), {3.0f, 3.0f}, {1, 2});
auto a12 = ops::Add(s.WithOpName("a12").WithControlDependencies(x3), x1, x2);
auto a23 = ops::Add(s.WithOpName("a23"), x2, x3);
Output out1 = ops::Log(s.WithOpName("out1"), a12);
Output out2 = ops::Log(s.WithOpName("out2"), a23);
GrapplerItem item;
item.fetch = {"out1", "out2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(2, tensors_expected.size());
GraphDef got;
ArithmeticOptimizer optimizer;
EnableOnlyLog1p(&optimizer);
OptimizeAndPrune(&optimizer, &item, &got);
auto tensors = EvaluateNodes(got, item.fetch);
EXPECT_EQ(2, tensors.size());
for (int i = 0; i < 2; ++i) {
EXPECT_EQ(tensors[i].NumElements(), tensors_expected[i].NumElements());
test::ExpectTensorNear<float>(tensors[i], tensors_expected[i], 1e-6);
}
GraphDef want;
AddNode("x1", "Const", {}, {}, &want);
AddNode("x2", "Const", {}, {}, &want);
AddNode("x3", "Const", {}, {}, &want);
AddNode("a23", "Add", {"x2", "x3"}, {}, &want);
AddNode("out1", "Log1p",
{"x2", AsControlDependency("x1"), AsControlDependency("x3")}, {},
&want);
AddNode("out2", "Log", {"a23"}, {}, &want);
CompareGraphs(want, got);
}
TEST_F(ArithmeticOptimizerTest, Expm1) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x1 = ops::Const(s.WithOpName("x1"), {2.0f, 2.0f}, {1, 2});
auto x2 = ops::Const(s.WithOpName("x2"), {1.0f, 1.0f}, {1, 2});
auto x3 = ops::Const(s.WithOpName("x3"), {3.0f, 3.0f}, {1, 2});
auto exp1 = ops::Exp(s.WithOpName("exp1").WithControlDependencies(x3), x1);
Output out1 = ops::Sub(s.WithOpName("out1"), exp1, x2);
Output out2 = ops::Sub(s.WithOpName("out2"), exp1, x3);
GrapplerItem item;
item.fetch = {"out1", "out2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(2, tensors_expected.size());
GraphDef got;
ArithmeticOptimizer optimizer;
EnableOnlyExpm1(&optimizer);
OptimizeAndPrune(&optimizer, &item, &got);
auto tensors = EvaluateNodes(got, item.fetch);
EXPECT_EQ(2, tensors.size());
for (int i = 0; i < 2; ++i) {
EXPECT_EQ(tensors[i].NumElements(), tensors_expected[i].NumElements());
test::ExpectTensorNear<float>(tensors[i], tensors_expected[i], 1e-6);
}
GraphDef want;
AddNode("x1", "Const", {}, {}, &want);
AddNode("x2", "Const", {}, {}, &want);
AddNode("x3", "Const", {}, {}, &want);
AddNode("exp1", "Exp", {"x1", AsControlDependency("x3")}, {}, &want);
AddNode("out1", "Expm1",
{"x1", AsControlDependency("x2"), AsControlDependency("x3")}, {},
&want);
AddNode("out2", "Sub", {"exp1", "x3"}, {}, &want);
CompareGraphs(want, got);
}
TEST_F(ArithmeticOptimizerTest, MinimizeBroadcasts_SimpleSwap) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto a = ops::Variable(s.WithOpName("a"), {32}, DT_FLOAT);
auto b = ops::Variable(s.WithOpName("b"), {32, 32}, DT_FLOAT);
auto c = ops::Variable(s.WithOpName("c"), {32}, DT_FLOAT);
auto mul1 = ops::Mul(s.WithOpName("mul1"), a, b);
auto mul2 = ops::Mul(s.WithOpName("mul2"), mul1, c);
auto outputs = ops::Identity(s.WithOpName("outputs"), mul2);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
auto b_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32, 32}));
auto c_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyMinimizeBroadcasts(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// * *
// / \ / \
// * c --> * b
// / \ / \
// a b a c
NodeMap node_map(&output);
const NodeDef* mul1_node = node_map.GetNode("mul1");
ASSERT_NE(mul1_node, nullptr);
EXPECT_EQ("a", mul1_node->input(0));
EXPECT_EQ("c", mul1_node->input(1));
const NodeDef* mul2_node = node_map.GetNode("mul2");
ASSERT_NE(mul2_node, nullptr);
EXPECT_EQ("mul1", mul2_node->input(0));
EXPECT_EQ("b", mul2_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, MinimizeBroadcasts_FlattenTallGraph) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto a = ops::Variable(s.WithOpName("a"), {32}, DT_DOUBLE);
auto b = ops::Variable(s.WithOpName("b"), {32, 32}, DT_DOUBLE);
auto c = ops::Variable(s.WithOpName("c"), {32}, DT_DOUBLE);
auto d = ops::Variable(s.WithOpName("d"), {32}, DT_DOUBLE);
auto e = ops::Variable(s.WithOpName("e"), {32}, DT_DOUBLE);
auto mul1 = ops::Mul(s.WithOpName("mul1"), a, b);
auto mul2 = ops::Mul(s.WithOpName("mul2"), mul1, c);
auto mul3 = ops::Mul(s.WithOpName("mul3"), mul2, d);
auto mul4 = ops::Mul(s.WithOpName("mul4"), mul3, e);
auto outputs = ops::Identity(s.WithOpName("outputs"), mul4);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({32}));
auto b_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({32, 32}));
auto c_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({32}));
auto d_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({32}));
auto e_t = GenerateRandomTensor<DT_DOUBLE>(TensorShape({32}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}, {"d", d_t}, {"e", e_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyMinimizeBroadcasts(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur: Graph is "flattened" and
// largest shape pushed to the top.
//
// *
// / \
// * e *
// / \ / \
// * d * b
// / \ / \
// * c --> * *
// / \ / \ / \
// a b a c d e
NodeMap node_map(&output);
const NodeDef* mul1_node = node_map.GetNode("mul1");
ASSERT_NE(mul1_node, nullptr);
EXPECT_EQ("a", mul1_node->input(0));
EXPECT_EQ("c", mul1_node->input(1));
const NodeDef* mul2_node = node_map.GetNode("mul2");
ASSERT_NE(mul2_node, nullptr);
EXPECT_EQ("d", mul2_node->input(0));
EXPECT_EQ("e", mul2_node->input(1));
const NodeDef* mul3_node = node_map.GetNode("mul3");
ASSERT_NE(mul3_node, nullptr);
EXPECT_EQ("mul1", mul3_node->input(0));
EXPECT_EQ("mul2", mul3_node->input(1));
const NodeDef* mul4_node = node_map.GetNode("mul4");
ASSERT_NE(mul4_node, nullptr);
EXPECT_EQ("mul3", mul4_node->input(0));
EXPECT_EQ("b", mul4_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<double>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, MinimizeBroadcasts_BuildTreeUp) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
// [a, b, c] - scalars, [d] - matrix
auto a = ops::Variable(s.WithOpName("a"), {32}, DT_FLOAT);
auto b = ops::Variable(s.WithOpName("b"), {32}, DT_FLOAT);
auto c = ops::Variable(s.WithOpName("c"), {32}, DT_FLOAT);
auto d = ops::Variable(s.WithOpName("D"), {32, 32}, DT_FLOAT);
auto mul1 = ops::Mul(s.WithOpName("mul1"), a, b);
auto mul2 = ops::Mul(s.WithOpName("mul2"), c, d);
auto mul3 = ops::Mul(s.WithOpName("mul3"), mul1, mul2);
auto outputs = ops::Identity(s.WithOpName("outputs"), mul3);
GrapplerItem item;
item.fetch = {"outputs"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto a_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
auto b_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
auto c_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32}));
auto d_t = GenerateRandomTensor<DT_FLOAT>(TensorShape({32, 32}));
std::vector<std::pair<string, Tensor>> feed = {
{"a", a_t}, {"b", b_t}, {"c", c_t}, {"D", d_t}};
auto tensors_expected = EvaluateNodes(item.graph, item.fetch, feed);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyMinimizeBroadcasts(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
// We expect the following rewrite(s) to occur:
//
// *
// / \
// * * D
// / \ / \
// * * -> * c
// / \ / \ / \
// a b c D a b
NodeMap node_map(&output);
const NodeDef* mul1_node = node_map.GetNode("mul2");
ASSERT_NE(mul1_node, nullptr);
EXPECT_EQ("a", mul1_node->input(0));
EXPECT_EQ("b", mul1_node->input(1));
const NodeDef* mul2_node = node_map.GetNode("mul1");
ASSERT_NE(mul2_node, nullptr);
EXPECT_EQ("mul2", mul2_node->input(0));
EXPECT_EQ("c", mul2_node->input(1));
const NodeDef* mul3_node = node_map.GetNode("mul3");
ASSERT_NE(mul3_node, nullptr);
EXPECT_EQ("D", mul3_node->input(0));
EXPECT_EQ("mul1", mul3_node->input(1));
auto tensors = EvaluateNodes(output, item.fetch, feed);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
TEST_F(ArithmeticOptimizerTest, HoistCWiseUnaryFromConcat) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output a = ops::Const(s.WithOpName("a"), 3.14f, {32});
Output b = ops::Const(s.WithOpName("b"), 1.0f, {32});
Output c = ops::Const(s.WithOpName("c"), 42.0f, {32});
Output axis = ops::Const(s.WithOpName("axis"), 0, {});
Output ctrl1 = ops::Const(s.WithOpName("ctrl1"), 1, {});
Output ctrl2 = ops::Const(s.WithOpName("ctrl2"), 2, {});
Output ctrl3 = ops::Const(s.WithOpName("ctrl3"), 3, {});
// Test case with chains of length 1.
// Rewrites
// Concat({Exp(a), Exp(b), Exp(c)})
// into
// Exp(Concat({a, b, c})).
Output sin_a =
ops::Sin(s.WithOpName("sin_a").WithControlDependencies(ctrl3), a);
Output exp_a =
ops::Exp(s.WithOpName("exp_a").WithControlDependencies(ctrl1), sin_a);
Output exp_b = ops::Exp(s.WithOpName("exp_b"), b);
Output exp_c =
ops::Exp(s.WithOpName("exp_c").WithControlDependencies(ctrl2), c);
Output concat =
ops::Concat(s.WithOpName("concat"), {exp_a, exp_b, exp_c}, axis);
Output id = ops::Identity(s.WithOpName("id"), concat);
// Test case with chains of length 2.
// Rewrites
// Concat({Cos(Exp(a)), Cos(Exp(b)), Cos(Exp(c))})
// into
// Cos(Exp(Concat({a, b, c}))).
Output exp_a2 =
ops::Exp(s.WithOpName("exp_a2").WithControlDependencies(ctrl1), sin_a);
Output exp_b2 = ops::Exp(s.WithOpName("exp_b2"), b);
Output exp_c2 =
ops::Exp(s.WithOpName("exp_c2").WithControlDependencies(ctrl2), c);
Output cos_exp_a2 = ops::Cos(
s.WithOpName("cos_exp_a2").WithControlDependencies(ctrl1), exp_a2);
Output cos_exp_b2 = ops::Cos(
s.WithOpName("cos_exp_b2").WithControlDependencies(ctrl3), exp_b2);
Output cos_exp_c2 = ops::Cos(s.WithOpName("cos_exp_c2"), exp_c2);
Output concat2 = ops::Concat(s.WithOpName("concat2"),
{cos_exp_a2, cos_exp_b2, cos_exp_c2}, axis);
Output id2 = ops::Identity(s.WithOpName("id2"), concat2);
GrapplerItem item;
item.fetch = {"id", "id2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyHoistCWiseUnaryChains(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
int found = 0;
for (const NodeDef& node : output.node()) {
if (node.name() == "concat") {
EXPECT_EQ(6, node.input_size());
EXPECT_EQ("sin_a", node.input(0));
EXPECT_EQ("b", node.input(1));
EXPECT_EQ("c", node.input(2));
EXPECT_EQ("axis", node.input(3));
EXPECT_EQ("^ctrl1", node.input(4));
EXPECT_EQ("^ctrl2", node.input(5));
found++;
}
if (node.name() == "exp_a") {
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("concat", node.input(0));
EXPECT_EQ("^ctrl1", node.input(1));
found++;
}
if (node.name() == "id") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("exp_a", node.input(0));
found++;
}
if (node.name() == "concat2") {
EXPECT_EQ(7, node.input_size());
EXPECT_EQ("sin_a", node.input(0));
EXPECT_EQ("b", node.input(1));
EXPECT_EQ("c", node.input(2));
EXPECT_EQ("axis", node.input(3));
EXPECT_EQ("^ctrl1", node.input(4));
EXPECT_EQ("^ctrl2", node.input(5));
EXPECT_EQ("^ctrl3", node.input(6));
found++;
}
if (node.name() == "exp_a2") {
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("concat2", node.input(0));
EXPECT_EQ("^ctrl1", node.input(1));
found++;
}
if (node.name() == "cos_exp_a2") {
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("exp_a2", node.input(0));
EXPECT_EQ("^ctrl1", node.input(1));
found++;
}
if (node.name() == "id2") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("cos_exp_a2", node.input(0));
found++;
}
}
EXPECT_EQ(7, found);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(tensors.size(), tensors_expected.size());
EXPECT_EQ(tensors.size(), item.fetch.size());
for (int i = 0; i < item.fetch.size(); ++i) {
test::ExpectTensorNear<float>(tensors_expected[i], tensors[i], 1e-6);
}
}
TEST_F(ArithmeticOptimizerTest, HoistCWiseUnaryIntoSplit) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output x = ops::Const(s.WithOpName("x"), 3.1415f, {32});
Output axis = ops::Const(s.WithOpName("axis"), 0, {});
Output ctrl1 = ops::Const(s.WithOpName("ctrl1"), 1, {});
Output ctrl2 = ops::Const(s.WithOpName("ctrl2"), 2, {});
Output ctrl3 = ops::Const(s.WithOpName("ctrl3"), 3, {});
// Test case with chains of length 1.
// Rewrites
// [Sin(y) for y in Split(x)]
// into
// [y for y in Split(Sin(x))].
ops::Split split1(s.WithOpName("split1"), axis, x, 2);
Output sin_a =
ops::Sin(s.WithOpName("sin_a").WithControlDependencies(ctrl1), split1[0]);
Output id_a = ops::Identity(s.WithOpName("id_a"), sin_a);
Output sin_b = ops::Sin(s.WithOpName("sin_b"), split1[1]);
Output exp_b = ops::Exp(s.WithOpName("exp_b"), sin_b);
Output id_b = ops::Identity(s.WithOpName("id_b"), exp_b);
// Test case with SplitV and chains of length 2.
// Rewrites
// [Cos(Exp(y)) for y in Split(x)]
// into
// [y for y in Split(Cos(Exp(x)))].
Output size_splits2 = ops::Const(s.WithOpName("size_splits2"), {20, 12}, {2});
ops::SplitV split2(s.WithOpName("split2"), x, size_splits2, axis, 2);
Output exp_a2 = ops::Exp(
s.WithOpName("exp_a2").WithControlDependencies(ctrl1), split2[0]);
Output exp_b2 = ops::Exp(s.WithOpName("exp_b2"), split2[1]);
Output cos_exp_a2 = ops::Cos(
s.WithOpName("cos_exp_a2").WithControlDependencies(ctrl2), exp_a2);
Output cos_exp_b2 = ops::Cos(
s.WithOpName("cos_exp_b2").WithControlDependencies(ctrl3), exp_b2);
Output id_a2 = ops::Identity(s.WithOpName("id_a2"), cos_exp_a2);
Output id_b2 = ops::Identity(s.WithOpName("id_b2"), cos_exp_b2);
GrapplerItem item;
item.fetch = {"id_a", "id_b", "id_a2", "id_b2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyHoistCWiseUnaryChains(&optimizer);
OptimizeTwiceAndPrune(&optimizer, &item, &output);
int found = 0;
for (const NodeDef& node : output.node()) {
// The following 6 nodes should be pruned.
EXPECT_NE(node.name(), "sin_a");
EXPECT_NE(node.name(), "sin_b");
EXPECT_NE(node.name(), "exp_a2");
EXPECT_NE(node.name(), "exp_b2");
EXPECT_NE(node.name(), "cos_exp_a2");
EXPECT_NE(node.name(), "cos_exp_b2");
if (node.name() == "split1") {
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("axis", node.input(0));
EXPECT_EQ("ArithmeticOptimizer/_sin_a_split1", node.input(1));
found++;
}
if (node.name() == "ArithmeticOptimizer/_sin_a_split1") {
EXPECT_EQ("Sin", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("x", node.input(0));
EXPECT_EQ("^ctrl1", node.input(1));
found++;
}
if (node.name() == "id_a") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("split1", node.input(0));
found++;
}
if (node.name() == "exp_b") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("split1:1", node.input(0));
found++;
}
if (node.name() == "id_b") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("exp_b", node.input(0));
found++;
}
if (node.name() == "ArithmeticOptimizer/_exp_a2_split2") {
EXPECT_EQ("Exp", node.op());
EXPECT_EQ(4, node.input_size());
EXPECT_EQ("x", node.input(0));
EXPECT_EQ("^ctrl1", node.input(1));
EXPECT_EQ("^ctrl2", node.input(2));
EXPECT_EQ("^ctrl3", node.input(3));
found++;
}
if (node.name() == "ArithmeticOptimizer/_cos_exp_a2_split2") {
EXPECT_EQ("Cos", node.op());
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("ArithmeticOptimizer/_exp_a2_split2", node.input(0));
found++;
}
if (node.name() == "split2") {
EXPECT_EQ(3, node.input_size());
EXPECT_EQ("ArithmeticOptimizer/_cos_exp_a2_split2", node.input(0));
EXPECT_EQ("size_splits2", node.input(1));
EXPECT_EQ("axis", node.input(2));
found++;
}
if (node.name() == "id_a2") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("split2", node.input(0));
found++;
}
if (node.name() == "id_b2") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("split2:1", node.input(0));
found++;
}
}
EXPECT_EQ(10, found);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(tensors.size(), tensors_expected.size());
EXPECT_EQ(tensors.size(), item.fetch.size());
for (int i = 0; i < item.fetch.size(); ++i) {
test::ExpectTensorNear<float>(tensors_expected[i], tensors[i], 1e-6);
}
}
TEST_F(ArithmeticOptimizerTest, RemoveIdempotent) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output a = ops::Const(s.WithOpName("a"), 3.14f, {32});
Output sn1 = ops::Snapshot(s.WithOpName("sn1"), a);
Output sn2 = ops::Snapshot(s.WithOpName("sn2"), sn1);
Output out1 = ops::Identity(s.WithOpName("out1"), sn2);
Output id1 = ops::Identity(s.WithOpName("id1"), a);
Output id2 = ops::Identity(s.WithOpName("id2"), id1);
Output out2 = ops::Identity(s.WithOpName("out2"), id2);
GrapplerItem item;
item.fetch = {"out1", "out2"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveIdempotent(&optimizer);
OptimizeTwice(&optimizer, &item, &output);
EXPECT_EQ(7, output.node_size());
int found = 0;
for (const NodeDef& node : output.node()) {
if (node.name() == "out1") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("sn1", node.input(0));
found++;
} else if (node.name() == "out2") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("id1", node.input(0));
found++;
} else if (node.name() == "sn1") {
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("a", node.input(0));
found++;
}
}
EXPECT_EQ(3, found);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(tensors.size(), tensors_expected.size());
EXPECT_EQ(tensors.size(), item.fetch.size());
for (int i = 0; i < item.fetch.size(); ++i) {
test::ExpectTensorNear<float>(tensors_expected[i], tensors[i], 1e-6);
}
}
TEST_F(ArithmeticOptimizerTest, RemoveLogicalNot) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
Output a = ops::Const(s.WithOpName("a"), 3.14f, {32});
Output b = ops::Const(s.WithOpName("b"), -3.14f, {32});
Output eq = ops::Equal(s.WithOpName("eq"), a, b);
Output neq = ops::NotEqual(s.WithOpName("neq"), a, b);
Output lt = ops::Less(s.WithOpName("lt"), a, b);
Output le = ops::LessEqual(s.WithOpName("le"), a, b);
Output gt = ops::Greater(s.WithOpName("gt"), a, b);
Output ge = ops::GreaterEqual(s.WithOpName("ge"), a, b);
// not_eq is reserved
Output not_eq1 = ops::LogicalNot(s.WithOpName("not_eq1"), eq);
Output not_neq = ops::LogicalNot(s.WithOpName("not_neq"), neq);
Output not_lt = ops::LogicalNot(s.WithOpName("not_lt"), lt);
Output not_le = ops::LogicalNot(s.WithOpName("not_le"), le);
Output not_gt = ops::LogicalNot(s.WithOpName("not_gt"), gt);
Output not_ge = ops::LogicalNot(s.WithOpName("not_ge"), ge);
Output id_not_eq = ops::Identity(s.WithOpName("id_not_eq"), not_eq1);
Output id_not_neq = ops::Identity(s.WithOpName("id_not_neq"), not_neq);
Output id_not_lt = ops::Identity(s.WithOpName("id_not_lt"), not_lt);
Output id_not_le = ops::Identity(s.WithOpName("id_not_le"), not_le);
Output id_not_gt = ops::Identity(s.WithOpName("id_not_gt"), not_gt);
Output id_not_ge = ops::Identity(s.WithOpName("id_not_ge"), not_ge);
GrapplerItem item;
item.fetch = {"id_not_eq", "id_not_neq", "id_not_lt",
"id_not_le", "id_not_gt", "id_not_ge"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyRemoveLogicalNot(&optimizer);
OptimizeTwice(&optimizer, &item, &output);
int found = 0;
for (const NodeDef& node : output.node()) {
if (node.name() == "id_not_eq") {
EXPECT_EQ("eq", node.input(0));
++found;
}
if (node.name() == "id_not_neq") {
EXPECT_EQ("neq", node.input(0));
++found;
}
if (node.name() == "id_not_lt") {
EXPECT_EQ("lt", node.input(0));
++found;
}
if (node.name() == "id_not_le") {
EXPECT_EQ("le", node.input(0));
++found;
}
if (node.name() == "id_not_gt") {
EXPECT_EQ("gt", node.input(0));
++found;
}
if (node.name() == "id_not_ge") {
EXPECT_EQ("ge", node.input(0));
++found;
}
if (node.name() == "eq") {
EXPECT_EQ("NotEqual", node.op());
++found;
}
if (node.name() == "neq") {
EXPECT_EQ("Equal", node.op());
++found;
}
if (node.name() == "lt") {
EXPECT_EQ("GreaterEqual", node.op());
++found;
}
if (node.name() == "le") {
EXPECT_EQ("Greater", node.op());
++found;
}
if (node.name() == "gt") {
EXPECT_EQ("LessEqual", node.op());
++found;
}
if (node.name() == "ge") {
EXPECT_EQ("Less", node.op());
++found;
}
}
EXPECT_EQ(12, found);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(tensors.size(), tensors_expected.size());
EXPECT_EQ(tensors.size(), item.fetch.size());
for (int i = 0; i < item.fetch.size(); ++i) {
test::ExpectTensorEqual<bool>(tensors_expected[i], tensors[i]);
}
}
TEST_F(ArithmeticOptimizerTest, OptimizeMaxOrMinOfMonotonicElementWise) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output sqrt = ops::Sqrt(s.WithOpName("sqrt"), x);
Output reduce_max = ops::Max(s.WithOpName("reduce_max"), sqrt, {0});
Output final_out = ops::Identity(s.WithOpName("final_out"), reduce_max);
GrapplerItem item;
item.fetch = {"final_out"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyOptimizeMaxOrMinOfMonotonic(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
EXPECT_EQ(item.graph.node_size(), output.node_size());
// Check if the inputs are switched
int required_node_count = 0;
for (int i = 0; i < output.node_size(); ++i) {
const NodeDef& node = output.node(i);
if (node.name() == "sqrt") {
EXPECT_EQ("Sqrt", node.op());
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("reduce_max", node.input(0));
++required_node_count;
} else if (node.name() == "reduce_max") {
EXPECT_EQ("Max", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("x", node.input(0));
++required_node_count;
}
}
EXPECT_EQ(2, required_node_count);
}
TEST_F(ArithmeticOptimizerTest,
OptimizeMaxOrMinOfMonotonicElementWise_DoNotChangeFetchNode) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output sqrt = ops::Sqrt(s.WithOpName("sqrt"), x);
Output reduce_max = ops::Max(s.WithOpName("reduce_max"), sqrt, {0});
Output final_out = ops::Identity(s.WithOpName("final_out"), reduce_max);
GrapplerItem item;
item.fetch = {"sqrt", "final_out"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(2, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyOptimizeMaxOrMinOfMonotonic(&optimizer);
OptimizeTwice(&optimizer, &item, &output);
// Should be a NoOp since we are not allowed to change the output of fetch
// nodes.
VerifyGraphsMatch(item.graph, output, __LINE__);
}
TEST_F(ArithmeticOptimizerTest,
OptimizeMaxOrMinOfMonotonicElementWiseNonIncreasing) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output neg = ops::Neg(s.WithOpName("neg"), x);
Output reduce_max = ops::Max(s.WithOpName("reduce_max"), neg, {0});
Output final_out = ops::Identity(s.WithOpName("final_out"), reduce_max);
GrapplerItem item;
item.fetch = {"final_out"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyOptimizeMaxOrMinOfMonotonic(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
EXPECT_EQ(item.graph.node_size(), output.node_size());
// Check if the inputs are switched
int required_node_count = 0;
for (int i = 0; i < output.node_size(); ++i) {
const NodeDef& node = output.node(i);
if (node.name() == "neg") {
EXPECT_EQ("Neg", node.op());
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("reduce_max", node.input(0));
++required_node_count;
} else if (node.name() == "reduce_max") {
EXPECT_EQ("Min", node.op());
EXPECT_EQ(2, node.input_size());
EXPECT_EQ("x", node.input(0));
++required_node_count;
}
}
EXPECT_EQ(2, required_node_count);
}
TEST_F(ArithmeticOptimizerTest, UnaryOpsComposition) {
tensorflow::Scope s = tensorflow::Scope::NewRootScope();
auto x = ops::Const(s.WithOpName("x"), {1.0f, 2.0f}, {1, 2});
Output sqrt = ops::Sqrt(s.WithOpName("sqrt"), x);
Output log = ops::Log(s.WithOpName("log"), sqrt);
Output relu = ops::Relu(s.WithOpName("relu"), log);
Output final_out = ops::Identity(s.WithOpName("final_out"), relu);
GrapplerItem item;
item.fetch = {"final_out"};
TF_CHECK_OK(s.ToGraphDef(&item.graph));
// Place all nodes on CPU.
for (int i = 0; i < item.graph.node_size(); ++i) {
item.graph.mutable_node(i)->set_device("/device:CPU:0");
}
auto tensors_expected = EvaluateNodes(item.graph, item.fetch);
EXPECT_EQ(1, tensors_expected.size());
GraphDef output;
ArithmeticOptimizer optimizer;
EnableOnlyUnaryOpsComposition(&optimizer);
OptimizeAndPrune(&optimizer, &item, &output);
EXPECT_EQ(3, output.node_size());
// Check that Sqrt/Log/Relu were replaced with a single op.
int required_node_count = 0;
for (int i = 0; i < output.node_size(); ++i) {
const NodeDef& node = output.node(i);
if (node.name() == "final_out") {
EXPECT_EQ("Identity", node.op());
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("relu/unary_ops_composition", node.input(0));
++required_node_count;
} else if (node.name() == "relu/unary_ops_composition") {
EXPECT_EQ("_UnaryOpsComposition", node.op());
EXPECT_EQ(1, node.input_size());
EXPECT_EQ("x", node.input(0));
auto op_names = node.attr().at("op_names").list().s();
EXPECT_EQ(3, op_names.size());
EXPECT_EQ("Sqrt", op_names[0]);
EXPECT_EQ("Log", op_names[1]);
EXPECT_EQ("Relu", op_names[2]);
++required_node_count;
}
}
EXPECT_EQ(2, required_node_count);
auto tensors = EvaluateNodes(output, item.fetch);
EXPECT_EQ(1, tensors.size());
test::ExpectTensorNear<float>(tensors_expected[0], tensors[0], 1e-6);
}
} // namespace grappler
} // namespace tensorflow
| snnn/tensorflow | tensorflow/core/grappler/optimizers/arithmetic_optimizer_test.cc | C++ | apache-2.0 | 128,921 |
require 'spec_helper'
describe Puppet::Type.type(:openldap_access) do
describe 'namevar title patterns' do
it 'handles componsite name' do
access = described_class.new(name: 'to attrs=userPassword,shadowLastChange by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:name]).to eq('to attrs=userPassword,shadowLastChange by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:what]).to eq('attrs=userPassword,shadowLastChange')
expect(access[:access]).to eq([['by dn="cn=admin,dc=example,dc=com" write', 'by anonymous auth']])
end
it 'handles componsite name with position' do
access = described_class.new(name: '{0}to attrs=userPassword,shadowLastChange by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:name]).to eq('{0}to attrs=userPassword,shadowLastChange by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:position]).to eq('0')
expect(access[:what]).to eq('attrs=userPassword,shadowLastChange')
expect(access[:access]).to eq([['by dn="cn=admin,dc=example,dc=com" write', 'by anonymous auth']])
end
it 'handles componsite name with position' do
access = described_class.new(name: '{0}to attrs=userPassword,shadowLastChange by dn="cn=admin,dc=example,dc=com" write by anonymous auth on dc=example,dc=com')
expect(access[:name]).to eq('{0}to attrs=userPassword,shadowLastChange by dn="cn=admin,dc=example,dc=com" write by anonymous auth on dc=example,dc=com')
expect(access[:position]).to eq('0')
expect(access[:what]).to eq('attrs=userPassword,shadowLastChange')
expect(access[:access]).to eq([['by dn="cn=admin,dc=example,dc=com" write', 'by anonymous auth']])
expect(access[:suffix]).to eq('dc=example,dc=com')
end
it 'handles specific value of attr' do
access = described_class.new(name: 'to attrs=objectClass val=posixAccount by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:name]).to eq('to attrs=objectClass val=posixAccount by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:what]).to eq('attrs=objectClass val=posixAccount')
expect(access[:access]).to eq([['by dn="cn=admin,dc=example,dc=com" write', 'by anonymous auth']])
end
end
describe 'access' do
it 'handles array of values' do
access = described_class.new(name: 'foo', access: ['by dn="cn=admin,dc=example,dc=com" write', 'by anonymous auth'])
expect(access[:access]).to eq([['by dn="cn=admin,dc=example,dc=com" write'], ['by anonymous auth']])
end
it 'handles string' do
access = described_class.new(name: 'foo', access: 'by dn="cn=admin,dc=example,dc=com" write by anonymous auth')
expect(access[:access]).to eq([['by dn="cn=admin,dc=example,dc=com" write', 'by anonymous auth']])
end
end
end
| nfrance-conseil/puppet-openldap | spec/unit/puppet/type/openldap_acess_spec.rb | Ruby | apache-2.0 | 2,911 |
/*
Copyright 2020 The Knative Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generators
import (
"io"
"k8s.io/gengo/generator"
"k8s.io/gengo/namer"
"k8s.io/gengo/types"
"k8s.io/klog"
)
// reconcilerControllerStubGenerator produces a file of the stub of the
// controller for a custom impl with injection.
type reconcilerControllerStubGenerator struct {
generator.DefaultGen
outputPackage string
imports namer.ImportTracker
typeToGenerate *types.Type
reconcilerPkg string
informerPackagePath string
reconcilerClass string
hasReconcilerClass bool
}
var _ generator.Generator = (*reconcilerControllerStubGenerator)(nil)
func (g *reconcilerControllerStubGenerator) Filter(c *generator.Context, t *types.Type) bool {
// Only process the type for this generator.
return t == g.typeToGenerate
}
func (g *reconcilerControllerStubGenerator) Namers(c *generator.Context) namer.NameSystems {
return namer.NameSystems{
"raw": namer.NewRawNamer(g.outputPackage, g.imports),
}
}
func (g *reconcilerControllerStubGenerator) Imports(c *generator.Context) (imports []string) {
imports = append(imports, g.imports.ImportLines()...)
return
}
func (g *reconcilerControllerStubGenerator) GenerateType(c *generator.Context, t *types.Type, w io.Writer) error {
sw := generator.NewSnippetWriter(w, c, "{{", "}}")
klog.V(5).Info("processing type ", t)
m := map[string]interface{}{
"type": t,
"class": g.reconcilerClass,
"hasClass": g.hasReconcilerClass,
"informerGet": c.Universe.Function(types.Name{
Package: g.informerPackagePath,
Name: "Get",
}),
"controllerImpl": c.Universe.Type(types.Name{Package: "knative.dev/pkg/controller", Name: "Impl"}),
"reconcilerNewImpl": c.Universe.Type(types.Name{
Package: g.reconcilerPkg,
Name: "NewImpl",
}),
"loggingFromContext": c.Universe.Function(types.Name{
Package: "knative.dev/pkg/logging",
Name: "FromContext",
}),
"contextContext": c.Universe.Type(types.Name{
Package: "context",
Name: "Context",
}),
"configmapWatcher": c.Universe.Type(types.Name{
Package: "knative.dev/pkg/configmap",
Name: "Watcher",
}),
"classAnnotationKey": c.Universe.Variable(types.Name{
Package: g.reconcilerPkg,
Name: "ClassAnnotationKey",
}),
"annotationFilterFunc": c.Universe.Function(types.Name{
Package: "knative.dev/pkg/reconciler",
Name: "AnnotationFilterFunc",
}),
"filterHandler": c.Universe.Type(types.Name{
Package: "k8s.io/client-go/tools/cache",
Name: "FilteringResourceEventHandler",
}),
}
sw.Do(reconcilerControllerStub, m)
return sw.Error()
}
var reconcilerControllerStub = `
// TODO: PLEASE COPY AND MODIFY THIS FILE AS A STARTING POINT
// NewController creates a Reconciler for {{.type|public}} and returns the result of NewImpl.
func NewController(
ctx {{.contextContext|raw}},
cmw {{.configmapWatcher|raw}},
) *{{.controllerImpl|raw}} {
logger := {{.loggingFromContext|raw}}(ctx)
{{.type|lowercaseSingular}}Informer := {{.informerGet|raw}}(ctx)
{{if .hasClass}}
classValue := "default" // TODO: update this to the appropriate value.
classFilter := {{.annotationFilterFunc|raw}}({{.classAnnotationKey|raw}}, classValue, false /*allowUnset*/)
{{end}}
// TODO: setup additional informers here.
{{if .hasClass}}// TODO: remember to use the classFilter from above to filter appropriately.{{end}}
r := &Reconciler{}
impl := {{.reconcilerNewImpl|raw}}(ctx, r{{if .hasClass}}, classValue{{end}})
logger.Info("Setting up event handlers.")
{{if .hasClass}}
{{.type|lowercaseSingular}}Informer.Informer().AddEventHandler({{.filterHandler|raw}}{
FilterFunc: classFilter,
Handler: controller.HandleAll(impl.Enqueue),
})
{{else}}
{{.type|lowercaseSingular}}Informer.Informer().AddEventHandler(controller.HandleAll(impl.Enqueue))
{{end}}
// TODO: add additional informer event handlers here.
return impl
}
`
| google/knative-gcp | vendor/knative.dev/pkg/codegen/cmd/injection-gen/generators/reconciler_controller_stub.go | GO | apache-2.0 | 4,426 |
/*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mss.examples.petstore.util.fe.view;
import java.io.Serializable;
import javax.faces.bean.ManagedBean;
import javax.faces.bean.SessionScoped;
/**
* Bean classes used for JSF model.
*/
@ManagedBean
@SessionScoped
public class NavigationBean implements Serializable {
private static final long serialVersionUID = -8628674465932953415L;
public String redirectToStoreWelcome() {
return "pet/list.xhtml?faces-redirect=true";
}
public String redirectToAdminWelcome() {
return "pet/index.xhtml?faces-redirect=true";
}
public String toLogin() {
return "/login.xhtml";
}
public String backtoList() {
return "list";
}
}
| dakshika/product-mss | samples/petstore/frontend-util/src/main/java/org/wso2/carbon/mss/examples/petstore/util/fe/view/NavigationBean.java | Java | apache-2.0 | 1,401 |
/*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.disassembly;
import com.google.security.zynamics.zylib.gui.zygraph.edges.IViewEdgeListener;
/**
* Interface for objects that want to be notified about changes in edges.
*/
public interface INaviEdgeListener extends IViewEdgeListener {
/**
* Invoked after the global comment of an edge changed.
*
* @param naviEdge The edge whose global comment changed.
*/
void changedGlobalComment(CNaviViewEdge naviEdge);
/**
* Invoked after the local comment of an edge changed.
*
* @param naviEdge The edge whose local comment changed.
*/
void changedLocalComment(CNaviViewEdge naviEdge);
}
| chubbymaggie/binnavi | src/main/java/com/google/security/zynamics/binnavi/disassembly/INaviEdgeListener.java | Java | apache-2.0 | 1,248 |
package com.bazaarvoice.emodb.common.dropwizard.leader;
import com.bazaarvoice.curator.recipes.leader.LeaderService;
import com.bazaarvoice.emodb.common.dropwizard.task.TaskRegistry;
import com.bazaarvoice.emodb.common.zookeeper.leader.PartitionedLeaderService;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Service;
import com.google.inject.Inject;
import io.dropwizard.servlets.tasks.Task;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.PrintWriter;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentMap;
/**
* Shows the current status of leadership processes managed by {@link LeaderService}. Allows terminating
* individual leadership processes, but such that they can be restarted only by restarting the entire server.
*/
public class LeaderServiceTask extends Task {
private static final Logger _log = LoggerFactory.getLogger(LeaderServiceTask.class);
private final ConcurrentMap<String, LeaderService> _selectorMap = Maps.newConcurrentMap();
@Inject
public LeaderServiceTask(TaskRegistry tasks) {
super("leader");
tasks.addTask(this);
}
public void register(final String name, final LeaderService leaderService) {
_selectorMap.put(name, leaderService);
// Unregister automatically to avoid memory leaks.
leaderService.addListener(new AbstractServiceListener() {
@Override
public void terminated(Service.State from) {
unregister(name, leaderService);
}
@Override
public void failed(Service.State from, Throwable failure) {
unregister(name, leaderService);
}
}, MoreExecutors.sameThreadExecutor());
}
public void register(final String name, final PartitionedLeaderService partitionedLeaderService) {
int partition = 0;
for (LeaderService leaderService : partitionedLeaderService.getPartitionLeaderServices()) {
register(String.format("%s-%d", name, partition++), leaderService);
}
}
public void unregister(String name, LeaderService leaderService) {
_selectorMap.remove(name, leaderService);
}
@Override
public void execute(ImmutableMultimap<String, String> parameters, PrintWriter out) throws Exception {
// The 'release' argument tells a server to give up leadership and let a new leader be elected, possibly
// re-electing the current server. This is useful for rebalancing leader-controlled activities.
for (String name : parameters.get("release")) {
LeaderService leaderService = _selectorMap.get(name);
if (leaderService == null) {
out.printf("Unknown leader process: %s%n", name);
continue;
}
Service actualService = leaderService.getCurrentDelegateService().orNull();
if (actualService == null || !actualService.isRunning()) {
out.printf("Process is not currently elected leader: %s%n", name);
continue;
}
_log.warn("Temporarily releasing leadership for process: {}", name);
out.printf("Temporarily releasing leadership for process: %s, cluster will elect a new leader.%n", name);
actualService.stopAndWait();
}
// The 'terminate' argument tells a server to give up leadership permanently (or until the server restarts).
for (String name : parameters.get("terminate")) {
LeaderService leaderService = _selectorMap.get(name);
if (leaderService == null) {
out.printf("Unknown leader process: %s%n", name);
continue;
}
_log.warn("Terminating leader process for: {}", name);
out.printf("Terminating leader process for: %s. Restart the server to restart the leader process.%n", name);
leaderService.stopAndWait();
}
// Print current status.
for (Map.Entry<String, LeaderService> entry : new TreeMap<>(_selectorMap).entrySet()) {
String name = entry.getKey();
LeaderService leaderService = entry.getValue();
out.printf("%s: %s (leader=%s)%n", name,
describeState(leaderService.state(), leaderService.hasLeadership()),
getLeaderId(leaderService));
}
}
private String describeState(Service.State state, boolean hasLeadership) {
if (state == Service.State.RUNNING && !hasLeadership) {
return "waiting to win leadership election";
} else {
return state.name();
}
}
private String getLeaderId(LeaderService leaderService) {
try {
return leaderService.getLeader().getId();
} catch (Exception e) {
return "<unknown>";
}
}
}
| billkalter/emodb | common/dropwizard/src/main/java/com/bazaarvoice/emodb/common/dropwizard/leader/LeaderServiceTask.java | Java | apache-2.0 | 5,047 |
package org.elasticsearch.action.get;
import com.bazaarvoice.elasticsearch.client.core.spi.RestExecutor;
import com.bazaarvoice.elasticsearch.client.core.spi.RestResponse;
import com.bazaarvoice.elasticsearch.client.core.util.UrlBuilder;
import org.elasticsearch.action.AbstractRestClientAction;
import org.elasticsearch.common.base.Function;
import org.elasticsearch.common.util.concurrent.Futures;
import org.elasticsearch.common.util.concurrent.ListenableFuture;
import static com.bazaarvoice.elasticsearch.client.core.util.StringFunctions.booleanToString;
import static com.bazaarvoice.elasticsearch.client.core.util.StringFunctions.commaDelimitedToString;
import static com.bazaarvoice.elasticsearch.client.core.util.UrlBuilder.urlEncode;
import static com.bazaarvoice.elasticsearch.client.core.util.Validation.notNull;
import static org.elasticsearch.common.base.Optional.fromNullable;
/**
* The inverse of {@link org.elasticsearch.rest.action.get.RestGetAction}
*
* @param <ResponseType>
*/
public class GetRest<ResponseType> extends AbstractRestClientAction<GetRequest, ResponseType> {
public GetRest(final String protocol, final String host, final int port, final RestExecutor executor, final Function<RestResponse, ResponseType> responseTransform) {
super(protocol, host, port, executor, responseTransform);
}
@Override public ListenableFuture<ResponseType> act(GetRequest request) {
UrlBuilder url = UrlBuilder.create()
.protocol(protocol).host(host).port(port)
.path(urlEncode(notNull(request.index())))
.seg(urlEncode(notNull(request.type())))
.seg(urlEncode(notNull(request.id())))
.paramIfPresent("refresh", fromNullable(request.refresh()).transform(booleanToString))
.paramIfPresent("routing", fromNullable(request.routing()))
// note parent(string) seems just to set the routing, so we don't need to provide it here
.paramIfPresent("preference", fromNullable(request.preference()))
.paramIfPresent("realtime", fromNullable(request.realtime()).transform(booleanToString))
.paramIfPresent("fields", fromNullable(request.fields()).transform(commaDelimitedToString));
return Futures.transform(executor.get(url.url()), responseTransform);
}
}
| bazaarvoice/es-client-java | es-rest-client-1.3/core/src/main/java/org/elasticsearch/action/get/GetRest.java | Java | apache-2.0 | 2,328 |
package internalversion
import (
"github.com/openshift/origin/pkg/template/api"
"k8s.io/apimachinery/pkg/api/errors"
)
// TemplateListerExpansion allows custom methods to be added to
// TemplateLister.
type TemplateListerExpansion interface {
GetByUID(uid string) (*api.Template, error)
}
// TemplateNamespaceListerExpansion allows custom methods to be added to
// TemplateNamespaceLister.
type TemplateNamespaceListerExpansion interface{}
func (s templateLister) GetByUID(uid string) (*api.Template, error) {
templates, err := s.indexer.ByIndex(api.TemplateUIDIndex, uid)
if err != nil {
return nil, err
}
if len(templates) == 0 {
return nil, errors.NewNotFound(api.Resource("template"), uid)
}
return templates[0].(*api.Template), nil
}
| louyihua/origin | pkg/template/generated/listers/template/internalversion/template_expansion.go | GO | apache-2.0 | 756 |
dir = File.expand_path(File.dirname(__FILE__))
$LOAD_PATH.unshift File.join(dir, "../lib")
# Maybe puppetlabs_spec_helper is in a directory next to puppetdb. If not, we
# don't fail any worse than we already would.
$LOAD_PATH.push File.join(dir, "../../../puppetlabs_spec_helper")
require 'rspec'
require 'puppetlabs_spec_helper/puppet_spec_helper'
require 'tmpdir'
require 'fileutils'
require 'puppet'
require 'puppet/util/log'
require 'puppet/util/puppetdb/command'
RSpec.configure do |config|
config.before :each do
@logs = []
Puppet::Util::Log.level = :info
Puppet::Util::Log.newdestination(Puppet::Test::LogCollector.new(@logs))
def test_logs
@logs.map(&:message)
end
end
end
| melissa/puppetdb | puppet/spec/spec_helper.rb | Ruby | apache-2.0 | 718 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.tools;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.font.PDFont;
import org.apache.pdfbox.pdmodel.font.PDType0Font;
import org.apache.pdfbox.pdmodel.font.PDType1Font;
/**
* This will take a text file and ouput a pdf with that text.
*
* @author Ben Litchfield
*/
public class TextToPDF
{
/**
* The scaling factor for font units to PDF units
*/
private static final int FONTSCALE = 1000;
/**
* The default font
*/
private static final PDType1Font DEFAULT_FONT = PDType1Font.HELVETICA;
/**
* The default font size
*/
private static final int DEFAULT_FONT_SIZE = 10;
/**
* The line height as a factor of the font size
*/
private static final float LINE_HEIGHT_FACTOR = 1.05f;
private int fontSize = DEFAULT_FONT_SIZE;
private PDFont font = DEFAULT_FONT;
private static final Map<String, PDType1Font> STANDARD_14 = new HashMap<String, PDType1Font>();
static
{
STANDARD_14.put(PDType1Font.TIMES_ROMAN.getBaseFont(), PDType1Font.TIMES_ROMAN);
STANDARD_14.put(PDType1Font.TIMES_BOLD.getBaseFont(), PDType1Font.TIMES_BOLD);
STANDARD_14.put(PDType1Font.TIMES_ITALIC.getBaseFont(), PDType1Font.TIMES_ITALIC);
STANDARD_14.put(PDType1Font.TIMES_BOLD_ITALIC.getBaseFont(), PDType1Font.TIMES_BOLD_ITALIC);
STANDARD_14.put(PDType1Font.HELVETICA.getBaseFont(), PDType1Font.HELVETICA);
STANDARD_14.put(PDType1Font.HELVETICA_BOLD.getBaseFont(), PDType1Font.HELVETICA_BOLD);
STANDARD_14.put(PDType1Font.HELVETICA_OBLIQUE.getBaseFont(), PDType1Font.HELVETICA_OBLIQUE);
STANDARD_14.put(PDType1Font.HELVETICA_BOLD_OBLIQUE.getBaseFont(), PDType1Font.HELVETICA_BOLD_OBLIQUE);
STANDARD_14.put(PDType1Font.COURIER.getBaseFont(), PDType1Font.COURIER);
STANDARD_14.put(PDType1Font.COURIER_BOLD.getBaseFont(), PDType1Font.COURIER_BOLD);
STANDARD_14.put(PDType1Font.COURIER_OBLIQUE.getBaseFont(), PDType1Font.COURIER_OBLIQUE);
STANDARD_14.put(PDType1Font.COURIER_BOLD_OBLIQUE.getBaseFont(), PDType1Font.COURIER_BOLD_OBLIQUE);
STANDARD_14.put(PDType1Font.SYMBOL.getBaseFont(), PDType1Font.SYMBOL);
STANDARD_14.put(PDType1Font.ZAPF_DINGBATS.getBaseFont(), PDType1Font.ZAPF_DINGBATS);
}
/**
* Create a PDF document with some text.
*
* @param text The stream of text data.
*
* @return The document with the text in it.
*
* @throws IOException If there is an error writing the data.
*/
public PDDocument createPDFFromText( Reader text ) throws IOException
{
PDDocument doc = new PDDocument();
createPDFFromText(doc, text);
return doc;
}
/**
* Create a PDF document with some text.
*
* @param text The stream of text data.
*
* @throws IOException If there is an error writing the data.
*/
public void createPDFFromText( PDDocument doc, Reader text ) throws IOException
{
try
{
final int margin = 40;
float height = font.getBoundingBox().getHeight() / FONTSCALE;
//calculate font height and increase by a factor.
height = height*fontSize*LINE_HEIGHT_FACTOR;
BufferedReader data = new BufferedReader( text );
String nextLine = null;
PDPage page = new PDPage();
PDPageContentStream contentStream = null;
float y = -1;
float maxStringLength = page.getMediaBox().getWidth() - 2*margin;
// There is a special case of creating a PDF document from an empty string.
boolean textIsEmpty = true;
while( (nextLine = data.readLine()) != null )
{
// The input text is nonEmpty. New pages will be created and added
// to the PDF document as they are needed, depending on the length of
// the text.
textIsEmpty = false;
String[] lineWords = nextLine.trim().split( " " );
int lineIndex = 0;
while( lineIndex < lineWords.length )
{
StringBuilder nextLineToDraw = new StringBuilder();
float lengthIfUsingNextWord = 0;
do
{
nextLineToDraw.append( lineWords[lineIndex] );
nextLineToDraw.append( " " );
lineIndex++;
if( lineIndex < lineWords.length )
{
String lineWithNextWord = nextLineToDraw.toString() + lineWords[lineIndex];
lengthIfUsingNextWord =
(font.getStringWidth( lineWithNextWord )/FONTSCALE) * fontSize;
}
}
while( lineIndex < lineWords.length &&
lengthIfUsingNextWord < maxStringLength );
if( y < margin )
{
// We have crossed the end-of-page boundary and need to extend the
// document by another page.
page = new PDPage();
doc.addPage( page );
if( contentStream != null )
{
contentStream.endText();
contentStream.close();
}
contentStream = new PDPageContentStream(doc, page);
contentStream.setFont( font, fontSize );
contentStream.beginText();
y = page.getMediaBox().getHeight() - margin + height;
contentStream.newLineAtOffset(
margin, y);
}
if( contentStream == null )
{
throw new IOException( "Error:Expected non-null content stream." );
}
contentStream.newLineAtOffset(0, -height);
y -= height;
contentStream.showText(nextLineToDraw.toString());
}
}
// If the input text was the empty string, then the above while loop will have short-circuited
// and we will not have added any PDPages to the document.
// So in order to make the resultant PDF document readable by Adobe Reader etc, we'll add an empty page.
if (textIsEmpty)
{
doc.addPage(page);
}
if( contentStream != null )
{
contentStream.endText();
contentStream.close();
}
}
catch( IOException io )
{
if( doc != null )
{
doc.close();
}
throw io;
}
}
/**
* This will create a PDF document with some text in it.
* <br />
* see usage() for commandline
*
* @param args Command line arguments.
*
* @throws IOException If there is an error with the PDF.
*/
public static void main(String[] args) throws IOException
{
// suppress the Dock icon on OS X
System.setProperty("apple.awt.UIElement", "true");
TextToPDF app = new TextToPDF();
PDDocument doc = new PDDocument();
try
{
if( args.length < 2 )
{
app.usage();
}
else
{
for( int i=0; i<args.length-2; i++ )
{
if( args[i].equals( "-standardFont" ))
{
i++;
app.setFont( getStandardFont( args[i] ));
}
else if( args[i].equals( "-ttf" ))
{
i++;
PDFont font = PDType0Font.load( doc, new File( args[i]) );
app.setFont( font );
}
else if( args[i].equals( "-fontSize" ))
{
i++;
app.setFontSize( Integer.parseInt( args[i] ) );
}
else
{
throw new IOException( "Unknown argument:" + args[i] );
}
}
app.createPDFFromText( doc, new FileReader( args[args.length-1] ) );
doc.save( args[args.length-2] );
}
}
finally
{
doc.close();
}
}
/**
* This will print out a message telling how to use this example.
*/
private void usage()
{
String[] std14 = getStandard14Names();
StringBuilder message = new StringBuilder();
message.append("Usage: jar -jar pdfbox-app-x.y.z.jar TextToPDF [options] <outputfile> <textfile>\n");
message.append("\nOptions:\n");
message.append(" -standardFont <name> : " + DEFAULT_FONT.getBaseFont() + " (default)\n");
for (String std14String : std14)
{
message.append(" " + std14String + "\n");
}
message.append(" -ttf <ttf file> : The TTF font to use.\n");
message.append(" -fontSize <fontSize> : default: " + DEFAULT_FONT_SIZE );
System.err.println(message.toString());
System.exit(1);
}
/**
* A convenience method to get one of the standard 14 font from name.
*
* @param name The name of the font to get.
*
* @return The font that matches the name or null if it does not exist.
*/
private static PDType1Font getStandardFont(String name)
{
return STANDARD_14.get(name);
}
/**
* This will get the names of the standard 14 fonts.
*
* @return An array of the names of the standard 14 fonts.
*/
private static String[] getStandard14Names()
{
return STANDARD_14.keySet().toArray(new String[14]);
}
/**
* @return Returns the font.
*/
public PDFont getFont()
{
return font;
}
/**
* @param aFont The font to set.
*/
public void setFont(PDFont aFont)
{
this.font = aFont;
}
/**
* @return Returns the fontSize.
*/
public int getFontSize()
{
return fontSize;
}
/**
* @param aFontSize The fontSize to set.
*/
public void setFontSize(int aFontSize)
{
this.fontSize = aFontSize;
}
}
| benmccann/pdfbox | tools/src/main/java/org/apache/pdfbox/tools/TextToPDF.java | Java | apache-2.0 | 11,926 |
/*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.batch.api.chunk.listener;
import java.util.List;
/**
* The AbstractItemWriteListener provides default
* implementations of less commonly implemented methods.
*/
public abstract class AbstractItemWriteListener implements
ItemWriteListener {
/**
* Override this method if the ItemWriteListener
* will do something before the items are written.
* The default implementation does nothing.
*
* @param items specifies the items about to be
* written.
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public void beforeWrite(List<Object> items) throws Exception {}
/**
* Override this method if the ItemWriteListener
* will do something after the items are written.
* The default implementation does nothing.
*
* @param items specifies the items about to be
* written.
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public void afterWrite(List<Object> items) throws Exception {}
/**
* Override this method if the ItemWriteListener
* will do something when the ItemWriter writeItems
* method throws an exception.
* The default implementation does nothing.
*
* @param items specifies the items about to be
* written.
* @param ex specifies the exception thrown by the item
* writer.
* @throws Exception (or subclass) if an error occurs.
*/
@Override
public void onWriteError(List<Object> items, Exception ex) throws Exception {}
}
| sidgoyal/standards.jsr352.jbatch | javax.batch/src/main/java/javax/batch/api/chunk/listener/AbstractItemWriteListener.java | Java | apache-2.0 | 2,264 |
/*
* #%L
* SparkCommerce Framework Web
* %%
* Copyright (C) 2009 - 2013 Spark Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.sparkcommerce.core.payment.service;
import org.sparkcommerce.common.payment.PaymentGatewayType;
import org.springframework.stereotype.Service;
/**
* In order to use load this demo service, you will need to component scan
* the package "com.mycompany.sample".
*
* This should NOT be used in production, and is meant solely for demonstration
* purposes only.
*
* @author Elbert Bautista (elbertbautista)
*/
@Service("blNullPaymentGatewayConfiguration")
public class NullPaymentGatewayConfigurationImpl implements NullPaymentGatewayConfiguration {
protected int failureReportingThreshold = 1;
protected boolean performAuthorizeAndCapture = true;
@Override
public String getTransparentRedirectUrl() {
return "/null-checkout/process";
}
@Override
public String getTransparentRedirectReturnUrl() {
return "/null-checkout/return";
}
@Override
public boolean isPerformAuthorizeAndCapture() {
return true;
}
@Override
public void setPerformAuthorizeAndCapture(boolean performAuthorizeAndCapture) {
this.performAuthorizeAndCapture = performAuthorizeAndCapture;
}
@Override
public int getFailureReportingThreshold() {
return failureReportingThreshold;
}
@Override
public void setFailureReportingThreshold(int failureReportingThreshold) {
this.failureReportingThreshold = failureReportingThreshold;
}
@Override
public boolean handlesAuthorize() {
return true;
}
@Override
public boolean handlesCapture() {
return false;
}
@Override
public boolean handlesAuthorizeAndCapture() {
return true;
}
@Override
public boolean handlesReverseAuthorize() {
return false;
}
@Override
public boolean handlesVoid() {
return false;
}
@Override
public boolean handlesRefund() {
return false;
}
@Override
public boolean handlesPartialCapture() {
return false;
}
@Override
public boolean handlesMultipleShipment() {
return false;
}
@Override
public boolean handlesRecurringPayment() {
return false;
}
@Override
public boolean handlesSavedCustomerPayment() {
return false;
}
@Override
public boolean handlesMultiplePayments() {
return false;
}
@Override
public PaymentGatewayType getGatewayType() {
return NullPaymentGatewayType.NULL_GATEWAY;
}
}
| akdasari/SparkIntegration | src/test/java/org/sparkcommerce/core/payment/service/NullPaymentGatewayConfigurationImpl.java | Java | apache-2.0 | 3,194 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function () {
'use strict';
angular
.module('webApp')
.controller('DriversListCtrl', DriversListCtrl);
DriversListCtrl.$inject = ['$scope', 'EntityFactory', 'ModalService', 'UtilsService', '$state'];
function DriversListCtrl($scope, EntityFactory, ModalService, UtilsService, $state) {
/*jshint validthis: true*/
var vm = this;
vm.deleteDriver = deleteDriver;
vm.getAllDrivers = getAllDrivers;
vm.createDriver = createDriver;
vm.sortDrivers = sortDrivers;
vm.tableReverse = false;
vm.sortField = 'fileName';
vm.errorMessage = {
type: 'error',
text: '',
internalTrace: ''
};
vm.successMessage = {
type: 'success',
text: '',
internalTrace: ''
};
init();
/////////////////////////////////
function init() {
getAllDrivers();
}
function getAllDrivers() {
EntityFactory.getAllDrivers().then(function (drivers) {
vm.driversData = drivers;
});
}
function createDriver() {
var controller = 'CreateEntityModalCtrl';
var templateUrl = "templates/modal/entity-creation-modal.tpl.html";
var resolve = {
type: function () {
return "DRIVER";
},
title: function () {
return "_ENTITY_._CREATE_DRIVER_TITLE_";
},
info: function () {
return "_DRIVER_INFO_";
},
text: function () {
return "_DRIVER_TEXT_";
},
};
var modalInstance = ModalService.openModal(controller, templateUrl, resolve, '', 'lg');
return modalInstance.result.then(function () {
getAllDrivers();
vm.successMessage.text = '_DRIVER_CREATE_OK_';
});
}
function deleteDriver(fileName) {
return deleteDriverConfirm('lg', fileName);
}
function deleteDriverConfirm(size, fileName) {
var controller = 'DeleteEntityModalCtrl';
var templateUrl = "templates/modal/entity-delete-modal.tpl.html";
var resolve = {
item: function () {
return fileName;
},
type: function () {
return "DRIVER";
},
title: function () {
return "_ENTITY_._DELETE_DRIVER_TITLE_";
}
};
var modalInstance = ModalService.openModal(controller, templateUrl, resolve, '', size);
return modalInstance.result.then(function (fileName) {
var index = UtilsService.getArrayElementPosition(vm.driversData, 'fileName', fileName);
vm.driversData.splice(index, 1);
vm.successMessage.text = '_DRIVER_DELETE_OK_';
});
}
function sortDrivers(fieldName) {
if (fieldName == vm.sortField) {
vm.tableReverse = !vm.tableReverse;
} else {
vm.tableReverse = false;
vm.sortField = fieldName;
}
}
}
})();
| Stratio/Sparta | web/src/scripts/controllers/drivers-list.js | JavaScript | apache-2.0 | 3,460 |
package org.zstack.sdk.zwatch.thirdparty.api;
public class QueryThirdpartyAlertResult {
public java.util.List inventories;
public void setInventories(java.util.List inventories) {
this.inventories = inventories;
}
public java.util.List getInventories() {
return this.inventories;
}
public java.lang.Long total;
public void setTotal(java.lang.Long total) {
this.total = total;
}
public java.lang.Long getTotal() {
return this.total;
}
}
| zstackorg/zstack | sdk/src/main/java/org/zstack/sdk/zwatch/thirdparty/api/QueryThirdpartyAlertResult.java | Java | apache-2.0 | 513 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from 'react';
import { Link } from 'dva/router';
import Exception from '../../components/Exception';
export default () => (
<Exception type="500" style={{ minHeight: 500, height: '80%' }} linkElement={Link} />
);
| ascrutae/sky-walking-ui | src/routes/Exception/500.js | JavaScript | apache-2.0 | 1,035 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen.publish;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.discovery.zen.DiscoveryNodesProvider;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BytesTransportRequest;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportConnectionListener;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseOptions;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyIterable;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@TestLogging("discovery.zen.publish:TRACE")
public class PublishClusterStateActionTests extends ESTestCase {
private static final ClusterName CLUSTER_NAME = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY);
protected ThreadPool threadPool;
protected Map<String, MockNode> nodes = new HashMap<>();
public static class MockNode implements PublishClusterStateAction.NewPendingClusterStateListener, DiscoveryNodesProvider {
public final DiscoveryNode discoveryNode;
public final MockTransportService service;
public MockPublishAction action;
public final ClusterStateListener listener;
public volatile ClusterState clusterState;
private final ESLogger logger;
public MockNode(DiscoveryNode discoveryNode, MockTransportService service, @Nullable ClusterStateListener listener, ESLogger logger) {
this.discoveryNode = discoveryNode;
this.service = service;
this.listener = listener;
this.logger = logger;
this.clusterState = ClusterState.builder(CLUSTER_NAME).nodes(DiscoveryNodes.builder().put(discoveryNode).localNodeId(discoveryNode.getId()).build()).build();
}
public MockNode setAsMaster() {
this.clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).masterNodeId(discoveryNode.getId())).build();
return this;
}
public MockNode resetMasterId() {
this.clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes()).masterNodeId(null)).build();
return this;
}
public void connectTo(DiscoveryNode node) {
service.connectToNode(node);
}
@Override
public void onNewClusterState(String reason) {
ClusterState newClusterState = action.pendingStatesQueue().getNextClusterStateToProcess();
logger.debug("[{}] received version [{}], uuid [{}]", discoveryNode.getName(), newClusterState.version(), newClusterState.stateUUID());
if (listener != null) {
ClusterChangedEvent event = new ClusterChangedEvent("", newClusterState, clusterState);
listener.clusterChanged(event);
}
if (clusterState.nodes().getMasterNode() == null || newClusterState.supersedes(clusterState)) {
clusterState = newClusterState;
}
action.pendingStatesQueue().markAsProcessed(newClusterState);
}
@Override
public DiscoveryNodes nodes() {
return clusterState.nodes();
}
}
public MockNode createMockNode(final String name) throws Exception {
return createMockNode(name, Settings.EMPTY);
}
public MockNode createMockNode(String name, Settings settings) throws Exception {
return createMockNode(name, settings, null);
}
public MockNode createMockNode(String name, final Settings basSettings, @Nullable ClusterStateListener listener) throws Exception {
final Settings settings = Settings.builder()
.put("name", name)
.put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "", TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING")
.put(basSettings)
.build();
MockTransportService service = buildTransportService(settings);
DiscoveryNode discoveryNode = DiscoveryNode.createLocal(settings, service.boundAddress().publishAddress(),
NodeEnvironment.generateNodeId(settings));
MockNode node = new MockNode(discoveryNode, service, listener, logger);
node.action = buildPublishClusterStateAction(settings, service, () -> node.clusterState, node);
final CountDownLatch latch = new CountDownLatch(nodes.size() * 2 + 1);
TransportConnectionListener waitForConnection = new TransportConnectionListener() {
@Override
public void onNodeConnected(DiscoveryNode node) {
latch.countDown();
}
@Override
public void onNodeDisconnected(DiscoveryNode node) {
fail("disconnect should not be called " + node);
}
};
node.service.addConnectionListener(waitForConnection);
for (MockNode curNode : nodes.values()) {
curNode.service.addConnectionListener(waitForConnection);
curNode.connectTo(node.discoveryNode);
node.connectTo(curNode.discoveryNode);
}
node.connectTo(node.discoveryNode);
assertThat("failed to wait for all nodes to connect", latch.await(5, TimeUnit.SECONDS), equalTo(true));
for (MockNode curNode : nodes.values()) {
curNode.service.removeConnectionListener(waitForConnection);
}
node.service.removeConnectionListener(waitForConnection);
if (nodes.put(name, node) != null) {
fail("Node with the name " + name + " already exist");
}
return node;
}
public MockTransportService service(String name) {
MockNode node = nodes.get(name);
if (node != null) {
return node.service;
}
return null;
}
public PublishClusterStateAction action(String name) {
MockNode node = nodes.get(name);
if (node != null) {
return node.action;
}
return null;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
threadPool = new TestThreadPool(getClass().getName());
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
for (MockNode curNode : nodes.values()) {
curNode.action.close();
curNode.service.close();
}
terminate(threadPool);
}
protected MockTransportService buildTransportService(Settings settings) {
MockTransportService transportService = MockTransportService.local(Settings.EMPTY, Version.CURRENT, threadPool);
transportService.start();
transportService.acceptIncomingRequests();
return transportService;
}
protected MockPublishAction buildPublishClusterStateAction(
Settings settings,
MockTransportService transportService,
Supplier<ClusterState> clusterStateSupplier,
PublishClusterStateAction.NewPendingClusterStateListener listener
) {
DiscoverySettings discoverySettings =
new DiscoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
return new MockPublishAction(
settings,
transportService,
clusterStateSupplier,
listener,
discoverySettings,
CLUSTER_NAME);
}
public void testSimpleClusterStatePublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY).setAsMaster();
MockNode nodeB = createMockNode("nodeB", Settings.EMPTY);
// Initial cluster state
ClusterState clusterState = nodeA.clusterState;
// cluster state update - add nodeB
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(clusterState.nodes()).put(nodeB.discoveryNode).build();
ClusterState previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
assertThat(nodeB.clusterState.blocks().global().size(), equalTo(1));
// cluster state update - remove block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
assertTrue(nodeB.clusterState.wasReadFromDiff());
// Adding new node - this node should get full cluster state while nodeB should still be getting diffs
MockNode nodeC = createMockNode("nodeC", Settings.EMPTY);
// cluster state update 3 - register node C
previousClusterState = clusterState;
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeC.discoveryNode).build();
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
// First state
assertSameStateFromFull(nodeC.clusterState, clusterState);
// cluster state update 4 - update settings
previousClusterState = clusterState;
MetaData metaData = MetaData.builder(clusterState.metaData()).transientSettings(Settings.builder().put("foo", "bar").build()).build();
clusterState = ClusterState.builder(clusterState).metaData(metaData).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
assertThat(nodeB.clusterState.blocks().global().size(), equalTo(0));
assertSameStateFromDiff(nodeC.clusterState, clusterState);
assertThat(nodeC.clusterState.blocks().global().size(), equalTo(0));
// cluster state update - skipping one version change - should request full cluster state
previousClusterState = ClusterState.builder(clusterState).incrementVersion().build();
clusterState = ClusterState.builder(clusterState).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
assertSameStateFromFull(nodeC.clusterState, clusterState);
assertFalse(nodeC.clusterState.wasReadFromDiff());
// node A steps down from being master
nodeA.resetMasterId();
nodeB.resetMasterId();
nodeC.resetMasterId();
// node B becomes the master and sends a version of the cluster state that goes back
discoveryNodes = DiscoveryNodes.builder(discoveryNodes)
.put(nodeA.discoveryNode)
.put(nodeB.discoveryNode)
.put(nodeC.discoveryNode)
.masterNodeId(nodeB.discoveryNode.getId())
.localNodeId(nodeB.discoveryNode.getId())
.build();
previousClusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeB.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeA.clusterState, clusterState);
assertSameStateFromFull(nodeC.clusterState, clusterState);
}
public void testUnexpectedDiffPublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, event -> {
fail("Shouldn't send cluster state to myself");
}).setAsMaster();
MockNode nodeB = createMockNode("nodeB", Settings.EMPTY);
// Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).put(nodeB.discoveryNode).build();
ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build();
ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
}
public void testDisablingDiffPublishing() throws Exception {
Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build();
MockNode nodeA = createMockNode("nodeA", noDiffPublishingSettings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
fail("Shouldn't send cluster state to myself");
}
});
MockNode nodeB = createMockNode("nodeB", noDiffPublishingSettings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
assertFalse(event.state().wasReadFromDiff());
}
});
// Initial cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder().put(nodeA.discoveryNode).localNodeId(nodeA.discoveryNode.getId()).masterNodeId(nodeA.discoveryNode.getId()).build();
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build();
// cluster state update - add nodeB
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).put(nodeB.discoveryNode).build();
ClusterState previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
}
/**
* Test not waiting on publishing works correctly (i.e., publishing times out)
*/
public void testSimultaneousClusterStatePublishing() throws Exception {
int numberOfNodes = randomIntBetween(2, 10);
int numberOfIterations = scaledRandomIntBetween(5, 50);
Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), randomBoolean()).build();
MockNode master = createMockNode("node0", settings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
assertProperMetaDataForVersion(event.state().metaData(), event.state().version());
}
}).setAsMaster();
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder(master.nodes());
for (int i = 1; i < numberOfNodes; i++) {
final String name = "node" + i;
final MockNode node = createMockNode(name, settings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
assertProperMetaDataForVersion(event.state().metaData(), event.state().version());
}
});
discoveryNodesBuilder.put(node.discoveryNode);
}
AssertingAckListener[] listeners = new AssertingAckListener[numberOfIterations];
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).build();
ClusterState previousState;
for (int i = 0; i < numberOfIterations; i++) {
previousState = clusterState;
metaData = buildMetaDataForVersion(metaData, i + 1);
clusterState = ClusterState.builder(clusterState).incrementVersion().metaData(metaData).nodes(discoveryNodes).build();
listeners[i] = publishState(master.action, clusterState, previousState);
}
for (int i = 0; i < numberOfIterations; i++) {
listeners[i].await(1, TimeUnit.SECONDS);
}
// set the master cs
master.clusterState = clusterState;
for (MockNode node : nodes.values()) {
assertSameState(node.clusterState, clusterState);
assertThat(node.clusterState.nodes().getLocalNode(), equalTo(node.discoveryNode));
}
}
public void testSerializationFailureDuringDiffPublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
fail("Shouldn't send cluster state to myself");
}
}).setAsMaster();
MockNode nodeB = createMockNode("nodeB", Settings.EMPTY);
// Initial cluster state with both states - the second node still shouldn't get diff even though it's present in the previous cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).put(nodeB.discoveryNode).build();
ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build();
ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder().addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
ClusterState unserializableClusterState = new ClusterState(clusterState.version(), clusterState.stateUUID(), clusterState) {
@Override
public Diff<ClusterState> diff(ClusterState previousState) {
return new Diff<ClusterState>() {
@Override
public ClusterState apply(ClusterState part) {
fail("this diff shouldn't be applied");
return part;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new IOException("Simulated failure of diff serialization");
}
};
}
};
try {
publishStateAndWait(nodeA.action, unserializableClusterState, previousClusterState);
fail("cluster state published despite of diff errors");
} catch (Discovery.FailedToCommitClusterStateException e) {
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("failed to serialize"));
}
}
public void testFailToPublishWithLessThanMinMasterNodes() throws Exception {
final int masterNodes = randomIntBetween(1, 10);
MockNode master = createMockNode("master");
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder().put(master.discoveryNode);
for (int i = 1; i < masterNodes; i++) {
discoveryNodesBuilder.put(createMockNode("node" + i).discoveryNode);
}
final int dataNodes = randomIntBetween(0, 5);
final Settings dataSettings = Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build();
for (int i = 0; i < dataNodes; i++) {
discoveryNodesBuilder.put(createMockNode("data_" + i, dataSettings).discoveryNode);
}
discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId());
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build();
ClusterState previousState = master.clusterState;
try {
publishState(master.action, clusterState, previousState, masterNodes + randomIntBetween(1, 5));
fail("cluster state publishing didn't fail despite of not having enough nodes");
} catch (Discovery.FailedToCommitClusterStateException expected) {
logger.debug("failed to publish as expected", expected);
}
}
public void testPublishingWithSendingErrors() throws Exception {
int goodNodes = randomIntBetween(2, 5);
int errorNodes = randomIntBetween(1, 5);
int timeOutNodes = randomBoolean() ? 0 : randomIntBetween(1, 5); // adding timeout nodes will force timeout errors
final int numberOfMasterNodes = goodNodes + errorNodes + timeOutNodes + 1; // master
final boolean expectingToCommit = randomBoolean();
Settings.Builder settings = Settings.builder();
// make sure we have a reasonable timeout if we expect to timeout, o.w. one that will make the test "hang"
settings.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h")
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "5ms"); // test is about committing
MockNode master = createMockNode("master", settings.build());
// randomize things a bit
int[] nodeTypes = new int[goodNodes + errorNodes + timeOutNodes];
for (int i = 0; i < goodNodes; i++) {
nodeTypes[i] = 0;
}
for (int i = goodNodes; i < goodNodes + errorNodes; i++) {
nodeTypes[i] = 1;
}
for (int i = goodNodes + errorNodes; i < nodeTypes.length; i++) {
nodeTypes[i] = 2;
}
Collections.shuffle(Arrays.asList(nodeTypes), random());
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder().put(master.discoveryNode);
for (int i = 0; i < nodeTypes.length; i++) {
final MockNode mockNode = createMockNode("node" + i);
discoveryNodesBuilder.put(mockNode.discoveryNode);
switch (nodeTypes[i]) {
case 1:
mockNode.action.errorOnSend.set(true);
break;
case 2:
mockNode.action.timeoutOnSend.set(true);
break;
}
}
final int dataNodes = randomIntBetween(0, 3); // data nodes don't matter
for (int i = 0; i < dataNodes; i++) {
final MockNode mockNode = createMockNode("data_" + i, Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build());
discoveryNodesBuilder.put(mockNode.discoveryNode);
if (randomBoolean()) {
// we really don't care - just chaos monkey
mockNode.action.errorOnCommit.set(randomBoolean());
mockNode.action.errorOnSend.set(randomBoolean());
mockNode.action.timeoutOnCommit.set(randomBoolean());
mockNode.action.timeoutOnSend.set(randomBoolean());
}
}
final int minMasterNodes;
final String expectedBehavior;
if (expectingToCommit) {
minMasterNodes = randomIntBetween(0, goodNodes + 1); // count master
expectedBehavior = "succeed";
} else {
minMasterNodes = randomIntBetween(goodNodes + 2, numberOfMasterNodes); // +2 because of master
expectedBehavior = timeOutNodes > 0 ? "timeout" : "fail";
}
logger.info("--> expecting commit to {}. good nodes [{}], errors [{}], timeouts [{}]. min_master_nodes [{}]",
expectedBehavior, goodNodes + 1, errorNodes, timeOutNodes, minMasterNodes);
discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId());
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build();
ClusterState previousState = master.clusterState;
try {
publishState(master.action, clusterState, previousState, minMasterNodes);
if (expectingToCommit == false) {
fail("cluster state publishing didn't fail despite of not have enough nodes");
}
} catch (Discovery.FailedToCommitClusterStateException exception) {
logger.debug("failed to publish as expected", exception);
if (expectingToCommit) {
throw exception;
}
assertThat(exception.getMessage(), containsString(timeOutNodes > 0 ? "timed out" : "failed"));
}
}
public void testIncomingClusterStateValidation() throws Exception {
MockNode node = createMockNode("node");
logger.info("--> testing acceptances of any master when having no master");
ClusterState state = ClusterState.builder(node.clusterState)
.nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId(randomAsciiOfLength(10))).incrementVersion().build();
node.action.validateIncomingState(state, null);
// now set a master node
node.clusterState = ClusterState.builder(node.clusterState).nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId("master")).build();
logger.info("--> testing rejection of another master");
try {
node.action.validateIncomingState(state, node.clusterState);
fail("node accepted state from another master");
} catch (IllegalStateException OK) {
assertThat(OK.toString(), containsString("cluster state from a different master than the current one, rejecting"));
}
logger.info("--> test state from the current master is accepted");
node.action.validateIncomingState(ClusterState.builder(node.clusterState)
.nodes(DiscoveryNodes.builder(node.nodes()).masterNodeId("master")).incrementVersion().build(), node.clusterState);
logger.info("--> testing rejection of another cluster name");
try {
node.action.validateIncomingState(ClusterState.builder(new ClusterName(randomAsciiOfLength(10))).nodes(node.nodes()).build(), node.clusterState);
fail("node accepted state with another cluster name");
} catch (IllegalStateException OK) {
assertThat(OK.toString(), containsString("received state from a node that is not part of the cluster"));
}
logger.info("--> testing rejection of a cluster state with wrong local node");
try {
state = ClusterState.builder(node.clusterState)
.nodes(DiscoveryNodes.builder(node.nodes()).localNodeId("_non_existing_").build())
.incrementVersion().build();
node.action.validateIncomingState(state, node.clusterState);
fail("node accepted state with non-existence local node");
} catch (IllegalStateException OK) {
assertThat(OK.toString(), containsString("received state with a local node that does not match the current local node"));
}
try {
MockNode otherNode = createMockNode("otherNode");
state = ClusterState.builder(node.clusterState).nodes(
DiscoveryNodes.builder(node.nodes()).put(otherNode.discoveryNode).localNodeId(otherNode.discoveryNode.getId()).build()
).incrementVersion().build();
node.action.validateIncomingState(state, node.clusterState);
fail("node accepted state with existent but wrong local node");
} catch (IllegalStateException OK) {
assertThat(OK.toString(), containsString("received state with a local node that does not match the current local node"));
}
logger.info("--> testing acceptance of an old cluster state");
final ClusterState incomingState = node.clusterState;
node.clusterState = ClusterState.builder(node.clusterState).incrementVersion().build();
final IllegalStateException e =
expectThrows(IllegalStateException.class, () -> node.action.validateIncomingState(incomingState, node.clusterState));
final String message = String.format(
Locale.ROOT,
"rejecting cluster state version [%d] uuid [%s] received from [%s]",
incomingState.version(),
incomingState.stateUUID(),
incomingState.nodes().getMasterNodeId()
);
assertThat(e, hasToString("java.lang.IllegalStateException: " + message));
// an older version from a *new* master is also OK!
ClusterState previousState = ClusterState.builder(node.clusterState).incrementVersion().build();
state = ClusterState.builder(node.clusterState)
.nodes(DiscoveryNodes.builder(node.clusterState.nodes()).masterNodeId("_new_master_").build())
.build();
// remove the master of the node (but still have a previous cluster state with it)!
node.resetMasterId();
node.action.validateIncomingState(state, previousState);
}
public void testOutOfOrderCommitMessages() throws Throwable {
MockNode node = createMockNode("node").setAsMaster();
final CapturingTransportChannel channel = new CapturingTransportChannel();
List<ClusterState> states = new ArrayList<>();
final int numOfStates = scaledRandomIntBetween(3, 25);
for (int i = 1; i <= numOfStates; i++) {
states.add(ClusterState.builder(node.clusterState).version(i).stateUUID(ClusterState.UNKNOWN_UUID).build());
}
final ClusterState finalState = states.get(numOfStates - 1);
logger.info("--> publishing states");
for (ClusterState state : states) {
node.action.handleIncomingClusterStateRequest(
new BytesTransportRequest(PublishClusterStateAction.serializeFullClusterState(state, Version.CURRENT), Version.CURRENT),
channel);
assertThat(channel.response.get(), equalTo((TransportResponse) TransportResponse.Empty.INSTANCE));
assertThat(channel.error.get(), nullValue());
channel.clear();
}
logger.info("--> committing states");
long largestVersionSeen = Long.MIN_VALUE;
Randomness.shuffle(states);
for (ClusterState state : states) {
node.action.handleCommitRequest(new PublishClusterStateAction.CommitClusterStateRequest(state.stateUUID()), channel);
if (largestVersionSeen < state.getVersion()) {
assertThat(channel.response.get(), equalTo((TransportResponse) TransportResponse.Empty.INSTANCE));
if (channel.error.get() != null) {
throw channel.error.get();
}
largestVersionSeen = state.getVersion();
} else {
// older cluster states will be rejected
assertNotNull(channel.error.get());
assertThat(channel.error.get(), instanceOf(IllegalStateException.class));
}
channel.clear();
}
//now check the last state held
assertSameState(node.clusterState, finalState);
}
/**
* Tests that cluster is committed or times out. It should never be the case that we fail
* an update due to a commit timeout, but it ends up being committed anyway
*/
public void testTimeoutOrCommit() throws Exception {
Settings settings = Settings.builder()
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "1ms").build(); // short but so we will sometime commit sometime timeout
MockNode master = createMockNode("master", settings);
MockNode node = createMockNode("node", settings);
ClusterState state = ClusterState.builder(master.clusterState)
.nodes(DiscoveryNodes.builder(master.clusterState.nodes()).put(node.discoveryNode).masterNodeId(master.discoveryNode.getId())).build();
for (int i = 0; i < 10; i++) {
state = ClusterState.builder(state).incrementVersion().build();
logger.debug("--> publishing version [{}], UUID [{}]", state.version(), state.stateUUID());
boolean success;
try {
publishState(master.action, state, master.clusterState, 2).await(1, TimeUnit.HOURS);
success = true;
} catch (Discovery.FailedToCommitClusterStateException OK) {
success = false;
}
logger.debug("--> publishing [{}], verifying...", success ? "succeeded" : "failed");
if (success) {
assertSameState(node.clusterState, state);
} else {
assertThat(node.clusterState.stateUUID(), not(equalTo(state.stateUUID())));
}
}
}
private MetaData buildMetaDataForVersion(MetaData metaData, long version) {
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.builder(metaData.indices());
indices.put("test" + version, IndexMetaData.builder("test" + version).settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards((int) version).numberOfReplicas(0).build());
return MetaData.builder(metaData)
.transientSettings(Settings.builder().put("test", version).build())
.indices(indices.build())
.build();
}
private void assertProperMetaDataForVersion(MetaData metaData, long version) {
for (long i = 1; i <= version; i++) {
assertThat(metaData.index("test" + i), notNullValue());
assertThat(metaData.index("test" + i).getNumberOfShards(), equalTo((int) i));
}
assertThat(metaData.index("test" + (version + 1)), nullValue());
assertThat(metaData.transientSettings().get("test"), equalTo(Long.toString(version)));
}
public void publishStateAndWait(PublishClusterStateAction action, ClusterState state, ClusterState previousState) throws InterruptedException {
publishState(action, state, previousState).await(1, TimeUnit.SECONDS);
}
public AssertingAckListener publishState(PublishClusterStateAction action, ClusterState state, ClusterState previousState) throws InterruptedException {
final int minimumMasterNodes = randomIntBetween(-1, state.nodes().getMasterNodes().size());
return publishState(action, state, previousState, minimumMasterNodes);
}
public AssertingAckListener publishState(PublishClusterStateAction action, ClusterState state, ClusterState previousState, int minMasterNodes) throws InterruptedException {
AssertingAckListener assertingAckListener = new AssertingAckListener(state.nodes().getSize() - 1);
ClusterChangedEvent changedEvent = new ClusterChangedEvent("test update", state, previousState);
action.publish(changedEvent, minMasterNodes, assertingAckListener);
return assertingAckListener;
}
public static class AssertingAckListener implements Discovery.AckListener {
private final List<Tuple<DiscoveryNode, Throwable>> errors = new CopyOnWriteArrayList<>();
private final AtomicBoolean timeoutOccurred = new AtomicBoolean();
private final CountDownLatch countDown;
public AssertingAckListener(int nodeCount) {
countDown = new CountDownLatch(nodeCount);
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
if (e != null) {
errors.add(new Tuple<>(node, e));
}
countDown.countDown();
}
@Override
public void onTimeout() {
timeoutOccurred.set(true);
// Fast forward the counter - no reason to wait here
long currentCount = countDown.getCount();
for (long i = 0; i < currentCount; i++) {
countDown.countDown();
}
}
public void await(long timeout, TimeUnit unit) throws InterruptedException {
assertThat(awaitErrors(timeout, unit), emptyIterable());
}
public List<Tuple<DiscoveryNode, Throwable>> awaitErrors(long timeout, TimeUnit unit) throws InterruptedException {
countDown.await(timeout, unit);
assertFalse(timeoutOccurred.get());
return errors;
}
}
void assertSameState(ClusterState actual, ClusterState expected) {
assertThat(actual, notNullValue());
final String reason = "\n--> actual ClusterState: " + actual.prettyPrint() + "\n--> expected ClusterState:" + expected.prettyPrint();
assertThat("unequal UUIDs" + reason, actual.stateUUID(), equalTo(expected.stateUUID()));
assertThat("unequal versions" + reason, actual.version(), equalTo(expected.version()));
}
void assertSameStateFromDiff(ClusterState actual, ClusterState expected) {
assertSameState(actual, expected);
assertTrue(actual.wasReadFromDiff());
}
void assertSameStateFromFull(ClusterState actual, ClusterState expected) {
assertSameState(actual, expected);
assertFalse(actual.wasReadFromDiff());
}
static class MockPublishAction extends PublishClusterStateAction {
AtomicBoolean timeoutOnSend = new AtomicBoolean();
AtomicBoolean errorOnSend = new AtomicBoolean();
AtomicBoolean timeoutOnCommit = new AtomicBoolean();
AtomicBoolean errorOnCommit = new AtomicBoolean();
public MockPublishAction(Settings settings, TransportService transportService, Supplier<ClusterState> clusterStateSupplier, NewPendingClusterStateListener listener, DiscoverySettings discoverySettings, ClusterName clusterName) {
super(settings, transportService, clusterStateSupplier, listener, discoverySettings, clusterName);
}
@Override
protected void handleIncomingClusterStateRequest(BytesTransportRequest request, TransportChannel channel) throws IOException {
if (errorOnSend.get()) {
throw new ElasticsearchException("forced error on incoming cluster state");
}
if (timeoutOnSend.get()) {
return;
}
super.handleIncomingClusterStateRequest(request, channel);
}
@Override
protected void handleCommitRequest(PublishClusterStateAction.CommitClusterStateRequest request, TransportChannel channel) {
if (errorOnCommit.get()) {
throw new ElasticsearchException("forced error on incoming commit");
}
if (timeoutOnCommit.get()) {
return;
}
super.handleCommitRequest(request, channel);
}
}
static class CapturingTransportChannel implements TransportChannel {
AtomicReference<TransportResponse> response = new AtomicReference<>();
AtomicReference<Throwable> error = new AtomicReference<>();
public void clear() {
response.set(null);
error.set(null);
}
@Override
public String action() {
return "_noop_";
}
@Override
public String getProfileName() {
return "_noop_";
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
this.response.set(response);
assertThat(error.get(), nullValue());
}
@Override
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
this.response.set(response);
assertThat(error.get(), nullValue());
}
@Override
public void sendResponse(Exception exception) throws IOException {
this.error.set(exception);
assertThat(response.get(), nullValue());
}
@Override
public long getRequestId() {
return 0;
}
@Override
public String getChannelType() {
return "capturing";
}
}
}
| dpursehouse/elasticsearch | core/src/test/java/org/elasticsearch/discovery/zen/publish/PublishClusterStateActionTests.java | Java | apache-2.0 | 44,836 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/kinesisanalytics/model/LambdaOutputUpdate.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace KinesisAnalytics
{
namespace Model
{
LambdaOutputUpdate::LambdaOutputUpdate() :
m_resourceARNUpdateHasBeenSet(false),
m_roleARNUpdateHasBeenSet(false)
{
}
LambdaOutputUpdate::LambdaOutputUpdate(JsonView jsonValue) :
m_resourceARNUpdateHasBeenSet(false),
m_roleARNUpdateHasBeenSet(false)
{
*this = jsonValue;
}
LambdaOutputUpdate& LambdaOutputUpdate::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("ResourceARNUpdate"))
{
m_resourceARNUpdate = jsonValue.GetString("ResourceARNUpdate");
m_resourceARNUpdateHasBeenSet = true;
}
if(jsonValue.ValueExists("RoleARNUpdate"))
{
m_roleARNUpdate = jsonValue.GetString("RoleARNUpdate");
m_roleARNUpdateHasBeenSet = true;
}
return *this;
}
JsonValue LambdaOutputUpdate::Jsonize() const
{
JsonValue payload;
if(m_resourceARNUpdateHasBeenSet)
{
payload.WithString("ResourceARNUpdate", m_resourceARNUpdate);
}
if(m_roleARNUpdateHasBeenSet)
{
payload.WithString("RoleARNUpdate", m_roleARNUpdate);
}
return payload;
}
} // namespace Model
} // namespace KinesisAnalytics
} // namespace Aws
| jt70471/aws-sdk-cpp | aws-cpp-sdk-kinesisanalytics/source/model/LambdaOutputUpdate.cpp | C++ | apache-2.0 | 1,459 |
package com.orientechnologies.orient.core.index;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.testng.Assert;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import com.orientechnologies.common.collection.OCompositeKey;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.impl.ODocument;
@Test
@SuppressWarnings("unchecked")
public class OSimpleKeyIndexDefinitionTest {
private OSimpleKeyIndexDefinition simpleKeyIndexDefinition;
@BeforeMethod
public void beforeMethod() {
simpleKeyIndexDefinition = new OSimpleKeyIndexDefinition(OType.INTEGER, OType.STRING);
}
@Test
public void testGetFields() {
Assert.assertTrue(simpleKeyIndexDefinition.getFields().isEmpty());
}
@Test
public void testGetClassName() {
Assert.assertNull(simpleKeyIndexDefinition.getClassName());
}
@Test
public void testCreateValueSimpleKey() {
final OSimpleKeyIndexDefinition keyIndexDefinition = new OSimpleKeyIndexDefinition(OType.INTEGER);
final Object result = keyIndexDefinition.createValue("2");
Assert.assertEquals(result, 2);
}
@Test
public void testCreateValueCompositeKeyListParam() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList("2", "3"));
final OCompositeKey compositeKey = new OCompositeKey(Arrays.asList(2, "3"));
Assert.assertEquals(result, compositeKey);
}
@Test
public void testCreateValueCompositeKeyNullListParam() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList((Object) null));
Assert.assertNull(result);
}
@Test
public void testNullParamListItem() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList("2", null));
Assert.assertNull(result);
}
@Test
public void testWrongParamTypeListItem() {
final Object result = simpleKeyIndexDefinition.createValue(Arrays.asList("a", "3"));
Assert.assertNull(result);
}
@Test
public void testCreateValueCompositeKey() {
final Object result = simpleKeyIndexDefinition.createValue("2", "3");
final OCompositeKey compositeKey = new OCompositeKey(Arrays.asList(2, "3"));
Assert.assertEquals(result, compositeKey);
}
@Test
public void testCreateValueCompositeKeyNullParamList() {
final Object result = simpleKeyIndexDefinition.createValue((List<?>) null);
Assert.assertNull(result);
}
@Test
public void testCreateValueCompositeKeyNullParam() {
final Object result = simpleKeyIndexDefinition.createValue((Object) null);
Assert.assertNull(result);
}
@Test
public void testCreateValueCompositeKeyEmptyList() {
final Object result = simpleKeyIndexDefinition.createValue(Collections.<Object> emptyList());
Assert.assertNull(result);
}
@Test
public void testNullParamItem() {
final Object result = simpleKeyIndexDefinition.createValue("2", null);
Assert.assertNull(result);
}
@Test
public void testWrongParamType() {
final Object result = simpleKeyIndexDefinition.createValue("a", "3");
Assert.assertNull(result);
}
@Test
public void testParamCount() {
Assert.assertEquals(simpleKeyIndexDefinition.getParamCount(), 2);
}
@Test
public void testParamCountOneItem() {
final OSimpleKeyIndexDefinition keyIndexDefinition = new OSimpleKeyIndexDefinition(OType.INTEGER);
Assert.assertEquals(keyIndexDefinition.getParamCount(), 1);
}
@Test
public void testGetKeyTypes() {
Assert.assertEquals(simpleKeyIndexDefinition.getTypes(), new OType[] { OType.INTEGER, OType.STRING });
}
@Test
public void testGetKeyTypesOneType() {
final OSimpleKeyIndexDefinition keyIndexDefinition = new OSimpleKeyIndexDefinition(OType.BOOLEAN);
Assert.assertEquals(keyIndexDefinition.getTypes(), new OType[] { OType.BOOLEAN });
}
@Test
public void testReload() {
final ODatabaseDocumentTx databaseDocumentTx = new ODatabaseDocumentTx("memory:osimplekeyindexdefinitiontest");
databaseDocumentTx.create();
final ODocument storeDocument = simpleKeyIndexDefinition.toStream();
storeDocument.save();
final ODocument loadDocument = databaseDocumentTx.load(storeDocument.getIdentity());
final OSimpleKeyIndexDefinition loadedKeyIndexDefinition = new OSimpleKeyIndexDefinition();
loadedKeyIndexDefinition.fromStream(loadDocument);
databaseDocumentTx.drop();
Assert.assertEquals(loadedKeyIndexDefinition, simpleKeyIndexDefinition);
}
@Test(expectedExceptions = OIndexException.class)
public void testGetDocumentValueToIndex() {
simpleKeyIndexDefinition.getDocumentValueToIndex(new ODocument());
}
}
| nengxu/OrientDB | core/src/test/java/com/orientechnologies/orient/core/index/OSimpleKeyIndexDefinitionTest.java | Java | apache-2.0 | 4,965 |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class systemglobal_authenticationpolicy_binding(base_resource) :
""" Binding class showing the authenticationpolicy that can be bound to systemglobal.
"""
def __init__(self) :
self._policyname = ""
self._priority = 0
self._builtin = []
self.___count = 0
@property
def priority(self) :
ur"""The priority of the command policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
ur"""The priority of the command policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def builtin(self) :
ur"""Indicates that a variable is a built-in (SYSTEM INTERNAL) type.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE.
"""
try :
return self._builtin
except Exception as e:
raise e
@builtin.setter
def builtin(self, builtin) :
ur"""Indicates that a variable is a built-in (SYSTEM INTERNAL) type.<br/>Possible values = MODIFIABLE, DELETABLE, IMMUTABLE
"""
try :
self._builtin = builtin
except Exception as e:
raise e
@property
def policyname(self) :
ur"""The name of the command policy.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
ur"""The name of the command policy.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(systemglobal_authenticationpolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.systemglobal_authenticationpolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = systemglobal_authenticationpolicy_binding()
updateresource.policyname = resource.policyname
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [systemglobal_authenticationpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].policyname = resource[i].policyname
updateresources[i].priority = resource[i].priority
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = systemglobal_authenticationpolicy_binding()
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [systemglobal_authenticationpolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service) :
ur""" Use this API to fetch a systemglobal_authenticationpolicy_binding resources.
"""
try :
obj = systemglobal_authenticationpolicy_binding()
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, filter_) :
ur""" Use this API to fetch filtered set of systemglobal_authenticationpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = systemglobal_authenticationpolicy_binding()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service) :
ur""" Use this API to count systemglobal_authenticationpolicy_binding resources configued on NetScaler.
"""
try :
obj = systemglobal_authenticationpolicy_binding()
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, filter_) :
ur""" Use this API to count the filtered set of systemglobal_authenticationpolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = systemglobal_authenticationpolicy_binding()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Builtin:
MODIFIABLE = "MODIFIABLE"
DELETABLE = "DELETABLE"
IMMUTABLE = "IMMUTABLE"
class systemglobal_authenticationpolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.systemglobal_authenticationpolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.systemglobal_authenticationpolicy_binding = [systemglobal_authenticationpolicy_binding() for _ in range(length)]
| benfinke/ns_python | nssrc/com/citrix/netscaler/nitro/resource/config/system/systemglobal_authenticationpolicy_binding.py | Python | apache-2.0 | 6,831 |
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.lang.ant.refactoring;
import com.intellij.codeInsight.TargetElementUtilBase;
import com.intellij.lang.ant.dom.AntDomFileDescription;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.LangDataKeys;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.xml.XmlFile;
import com.intellij.refactoring.rename.PsiElementRenameHandler;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* @author Eugene Zhuravlev
* Date: Mar 19, 2007
*/
public final class AntRenameHandler extends PsiElementRenameHandler {
public boolean isAvailableOnDataContext(final DataContext dataContext) {
final PsiElement[] elements = getElements(dataContext);
return elements != null && elements.length > 1;
}
public void invoke(@NotNull final Project project, final Editor editor, final PsiFile file, final DataContext dataContext) {
final PsiElement[] elements = getElements(dataContext);
if (elements != null && elements.length > 0) {
invoke(project, new PsiElement[]{elements[0]}, dataContext);
}
}
public void invoke(@NotNull final Project project, @NotNull final PsiElement[] elements, final DataContext dataContext) {
super.invoke(project, elements, dataContext);
}
@Nullable
private static PsiElement[] getElements(DataContext dataContext) {
final PsiFile psiFile = CommonDataKeys.PSI_FILE.getData(dataContext);
if (!(psiFile instanceof XmlFile && AntDomFileDescription.isAntFile((XmlFile)psiFile))) {
return null;
}
final Editor editor = LangDataKeys.EDITOR.getData(dataContext);
if (editor == null) {
return null;
}
return getPsiElementsIn(editor, psiFile);
}
@Nullable
private static PsiElement[] getPsiElementsIn(final Editor editor, final PsiFile psiFile) {
try {
final PsiReference reference = TargetElementUtilBase.findReference(editor, editor.getCaretModel().getOffset());
if (reference == null) {
return null;
}
final Collection<PsiElement> candidates = TargetElementUtilBase.getInstance().getTargetCandidates(reference);
return ContainerUtil.toArray(candidates, new PsiElement[candidates.size()]);
}
catch (IndexNotReadyException e) {
return null;
}
}
}
| IllusionRom-deprecated/android_platform_tools_idea | plugins/ant/src/com/intellij/lang/ant/refactoring/AntRenameHandler.java | Java | apache-2.0 | 3,289 |
"""
Drone.io badge generator.
Currently set up to work on Mac.
Requires Pillow.
"""
import os
from PIL import Image, ImageDraw, ImageFont
SIZE = (95, 18)
def hex_colour(hex):
if hex[0] == '#':
hex = hex[1:]
return (
int(hex[:2], 16),
int(hex[2:4], 16),
int(hex[4:6], 16),
)
BACKGROUND = hex_colour('#4A4A4A')
SUCCESS = hex_colour('#94B944')
WARNING = hex_colour('#E4A83C')
ERROR = hex_colour('#B10610')
SUCCESS_CUTOFF = 85
WARNING_CUTOFF = 45
FONT = ImageFont.truetype(size=10, filename="/Library/Fonts/Arial.ttf")
FONT_SHADOW = hex_colour('#525252')
PADDING_TOP = 3
def build_image(percentage, colour):
image = Image.new('RGB', SIZE, color=BACKGROUND)
drawing = ImageDraw.Draw(image)
drawing.rectangle([(55, 0), SIZE], colour, colour)
drawing.text((8, PADDING_TOP+1), 'coverage', font=FONT, fill=FONT_SHADOW)
drawing.text((7, PADDING_TOP), 'coverage', font=FONT)
drawing.text((63, PADDING_TOP+1), '%s%%' % percentage, font=FONT, fill=FONT_SHADOW)
drawing.text((62, PADDING_TOP), '%s%%' % percentage, font=FONT)
return image
os.chdir('_build')
for i in range(101):
filename = '%i.png' % i
file = open(filename, 'wb')
if i < WARNING_CUTOFF:
build_image(i, ERROR).save(file)
elif i < SUCCESS_CUTOFF:
build_image(i, WARNING).save(file)
else:
build_image(i, SUCCESS).save(file)
| 21strun/django-coverage | assets/badges/drone.io/badge_maker.py | Python | apache-2.0 | 1,424 |
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ThreadedComponent import threadedcomponent, threadedadaptivecommscomponent
import heapq
import time
class SchedulingComponentMixin(object):
"""
SchedulingComponent() -> new SchedulingComponent
Base class for a threadedcomponent with an inbuilt scheduler, allowing a
component to block until a scheduled event is ready or a message is received
on an inbox.
"""
Inboxes = {"inbox" : "Standard inbox for receiving data from other components",
"control" : "Standard inbox for receiving control messages from other components",
"event" : "Scheduled events which are ready to be processed"}
def __init__(self, **argd):
super(SchedulingComponentMixin, self).__init__(**argd)
self.eventQueue = []
def scheduleRel(self, message, delay, priority=1):
"""
Schedule an event to wake the component and send a message to the
"event" inbox after a delay.
"""
return self.scheduleAbs(message, time.time() + delay, priority)
def scheduleAbs(self, message, eventTime, priority=1):
"""
Schedule an event to wake the component and send a message to the
"event" inbox after at a specified time.
"""
event = eventTime, priority, message
heapq.heappush(self.eventQueue, event)
return event
def cancelEvent(self, event):
""" Remove a scheduled event from the scheduler """
self.eventQueue.remove(event)
heapq.heapify(self.eventQueue)
def eventReady(self):
""" Returns true if there is an event ready to be processed """
if self.eventQueue:
eventTime = self.eventQueue[0][0]
if time.time() >= eventTime:
return True
return False
def pause(self):
"""
Sleep until there is either an event ready or a message is received on
an inbox
"""
if self.eventReady():
self.signalEvent()
else:
if self.eventQueue:
eventTime = self.eventQueue[0][0]
super(SchedulingComponentMixin, self).pause(eventTime - time.time())
if self.eventReady():
self.signalEvent()
else:
super(SchedulingComponentMixin, self).pause()
def signalEvent(self):
"""
Put the event message of the earliest scheduled event onto the
component's "event" inbox and remove it from the scheduler.
"""
eventTime, priority, message = heapq.heappop(self.eventQueue)
#print "Signalling, late by:", (time.time() - eventTime)
if not self.inqueues["event"].full():
self.inqueues["event"].put(message)
class SchedulingComponent(SchedulingComponentMixin, threadedcomponent):
def __init__(self, **argd):
super(SchedulingComponent, self).__init__(**argd)
class SchedulingAdaptiveCommsComponent(SchedulingComponentMixin,
threadedadaptivecommscomponent):
def __init__(self, **argd):
super(SchedulingAdaptiveCommsComponent, self).__init__(**argd)
| sparkslabs/kamaelia_ | Sketches/JT/Jam/library/trunk/Axon/SchedulingComponent.py | Python | apache-2.0 | 3,988 |
<?php
namespace PHPExcel\Reader\Excel5;
/**
* PHPExcel_Reader_Excel5_RC4
*
* Copyright (c) 2006 - 2015 PHPExcel
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* @category PHPExcel
* @package PHPExcel_Reader_Excel5
* @copyright Copyright (c) 2006 - 2015 PHPExcel (http://www.codeplex.com/PHPExcel)
* @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL
* @version ##VERSION##, ##DATE##
*/
class RC4
{
// Context
protected $s = array();
protected $i = 0;
protected $j = 0;
/**
* RC4 stream decryption/encryption constrcutor
*
* @param string $key Encryption key/passphrase
*/
public function __construct($key)
{
$len = strlen($key);
for ($this->i = 0; $this->i < 256; $this->i++) {
$this->s[$this->i] = $this->i;
}
$this->j = 0;
for ($this->i = 0; $this->i < 256; $this->i++) {
$this->j = ($this->j + $this->s[$this->i] + ord($key[$this->i % $len])) % 256;
$t = $this->s[$this->i];
$this->s[$this->i] = $this->s[$this->j];
$this->s[$this->j] = $t;
}
$this->i = $this->j = 0;
}
/**
* Symmetric decryption/encryption function
*
* @param string $data Data to encrypt/decrypt
*
* @return string
*/
public function RC4($data)
{
$len = strlen($data);
for ($c = 0; $c < $len; $c++) {
$this->i = ($this->i + 1) % 256;
$this->j = ($this->j + $this->s[$this->i]) % 256;
$t = $this->s[$this->i];
$this->s[$this->i] = $this->s[$this->j];
$this->s[$this->j] = $t;
$t = ($this->s[$this->i] + $this->s[$this->j]) % 256;
$data[$c] = chr(ord($data[$c]) ^ $this->s[$t]);
}
return $data;
}
}
| winerQin/yesnophp | library/PHPExcel/Reader/Excel5/RC4.php | PHP | apache-2.0 | 2,548 |
/*
* Copyright 2011 Christopher Pheby
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jadira.bindings.core.binder;
import java.lang.annotation.Annotation;
import java.net.URL;
import org.jadira.bindings.core.api.Binding;
import org.jadira.bindings.core.api.Converter;
import org.jadira.bindings.core.api.FromUnmarshaller;
import org.jadira.bindings.core.api.ToMarshaller;
public interface RegisterableBinder {
/**
* Register the configuration file (bindings.xml) at the given URL
* @param nextLocation The URL to register
*/
void registerConfiguration(URL nextLocation);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param key The converter key
* @param converter The binding to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(ConverterKey<S,T> key, Binding<S, T> converter);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param key The converter key
* @param converter The FromUnmarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(ConverterKey<S,T> key, FromUnmarshaller<S, T> converter);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param key The converter key
* @param converter The ToMarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(ConverterKey<S,T> key, ToMarshaller<S, T> converter);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param key The converter key
* @param converter The Converter to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(ConverterKey<S,T> key, Converter<S, T> converter);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param sourceClass The source (owning) class
* @param targetClass The target (foreign) class
* @param converter The binding to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(final Class<S> sourceClass, Class<T> targetClass, Binding<S, T> converter);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The FromUnmarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(Class<S> sourceClass, Class<T> targetClass, FromUnmarshaller<S, T> converter);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The ToMarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(Class<S> sourceClass, Class<T> targetClass, ToMarshaller<S, T> converter);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The Converter to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(final Class<S> sourceClass, Class<T> targetClass, Converter<S, T> converter);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param sourceClass The source (owning) class
* @param targetClass The target (foreign) class
* @param converter The binding to be registered
* @param qualifier The qualifier for which the binding must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(final Class<S> sourceClass, Class<T> targetClass, Binding<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The FromUnmarshaller to be registered
* @param qualifier The qualifier for which the unmarshaller must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(Class<S> sourceClass, Class<T> targetClass, FromUnmarshaller<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The ToMarshaller to be registered
* @param qualifier The qualifier for which the marshaller must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(Class<S> sourceClass, Class<T> targetClass, ToMarshaller<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The Converter to be registered
* @param qualifier The qualifier for which the converter must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(final Class<S> sourceClass, Class<T> targetClass, Converter<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Inspect each of the supplied classes, processing any of the annotated methods found
* @param classesToInspect
*/
void registerAnnotatedClasses(Class<?>... classesToInspect);
/**
* Return an iterable collection of ConverterKeys, one for each currently registered conversion
*/
Iterable<ConverterKey<?, ?>> getConverterEntries();
}
| JadiraOrg/jadira | bindings/src/main/java/org/jadira/bindings/core/binder/RegisterableBinder.java | Java | apache-2.0 | 8,392 |
/**
* @license
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { IOptions, RuleSeverity } from "./language/rule/rule";
export interface IConfigurationFile {
/**
* @deprecated property is never set
*
* The severity that is applied to rules in this config file as well as rules
* in any inherited config files which have their severity set to "default".
* Not inherited.
*/
defaultSeverity?: RuleSeverity;
/**
* An array of config files whose rules are inherited by this config file.
*/
extends: string[];
/**
* Rules that are used to lint to JavaScript files.
*/
jsRules: Map<string, Partial<IOptions>>;
/**
* A subset of the CLI options.
*/
linterOptions?: Partial<{
exclude: string[];
format: string;
}>;
/**
* Directories containing custom rules. Resolved using node module semantics.
*/
rulesDirectory: string[];
/**
* Rules that are used to lint TypeScript files.
*/
rules: Map<string, Partial<IOptions>>;
}
export interface IConfigurationLoadResult {
path?: string;
results?: IConfigurationFile;
}
export declare const JSON_CONFIG_FILENAME = "tslint.json";
/** @deprecated use `JSON_CONFIG_FILENAME` or `CONFIG_FILENAMES` instead. */
export declare const CONFIG_FILENAME = "tslint.json";
export declare const CONFIG_FILENAMES: string[];
export declare const DEFAULT_CONFIG: IConfigurationFile;
export declare const EMPTY_CONFIG: IConfigurationFile;
/**
* Searches for a TSLint configuration and returns the data from the config.
* @param configFile A path to a config file, this can be null if the location of a config is not known
* @param inputFilePath A path containing the current file being linted. This is the starting location
* of the search for a configuration.
* @returns Load status for a TSLint configuration object
*/
export declare function findConfiguration(configFile: string | null, inputFilePath: string): IConfigurationLoadResult;
export declare function findConfiguration(configFile: string, inputFilePath?: string): IConfigurationLoadResult;
/**
* Searches for a TSLint configuration and returns the path to it.
* Could return undefined if not configuration is found.
* @param suppliedConfigFilePath A path to an known config file supplied by a user. Pass null here if
* the location of the config file is not known and you want to search for one.
* @param inputFilePath A path to the current file being linted. This is the starting location
* of the search for a configuration.
* @returns An absolute path to a tslint.json or tslint.yml or tslint.yaml file
* or undefined if neither can be found.
*/
export declare function findConfigurationPath(suppliedConfigFilePath: string | null, inputFilePath: string): string | undefined;
export declare function findConfigurationPath(suppliedConfigFilePath: string, inputFilePath?: string): string | undefined;
/**
* Used Node semantics to load a configuration file given configFilePath.
* For example:
* '/path/to/config' will be treated as an absolute path
* './path/to/config' will be treated as a relative path
* 'path/to/config' will attempt to load a to/config file inside a node module named path
* @param configFilePath The configuration to load
* @param originalFilePath (deprecated) The entry point configuration file
* @returns a configuration object for TSLint loaded from the file at configFilePath
*/
export declare function loadConfigurationFromPath(configFilePath?: string, _originalFilePath?: string): IConfigurationFile;
/** Reads the configuration file from disk and parses it as raw JSON, YAML or JS depending on the extension. */
export declare function readConfigurationFile(filepath: string): RawConfigFile;
export declare function extendConfigurationFile(targetConfig: IConfigurationFile, nextConfigSource: IConfigurationFile): IConfigurationFile;
/**
* returns the absolute path (contrary to what the name implies)
*
* @deprecated use `path.resolve` instead
*/
export declare function getRelativePath(directory?: string | null, relativeTo?: string): string | undefined;
export declare function useAsPath(directory: string): boolean;
/**
* @param directories A path(s) to a directory of custom rules
* @param relativeTo A path that directories provided are relative to.
* For example, if the directories come from a tslint.json file, this path
* should be the path to the tslint.json file.
* @return An array of absolute paths to directories potentially containing rules
*/
export declare function getRulesDirectories(directories?: string | string[], relativeTo?: string): string[];
export interface RawConfigFile {
extends?: string | string[];
linterOptions?: IConfigurationFile["linterOptions"];
rulesDirectory?: string | string[];
defaultSeverity?: string;
rules?: RawRulesConfig;
jsRules?: RawRulesConfig | boolean;
}
export interface RawRulesConfig {
[key: string]: RawRuleConfig;
}
export declare type RawRuleConfig = null | undefined | boolean | any[] | {
severity?: RuleSeverity | "warn" | "none" | "default";
options?: any;
};
/**
* Parses a config file and normalizes legacy config settings.
* If `configFileDir` and `readConfig` are provided, this function will load all base configs and reduce them to the final configuration.
*
* @param configFile The raw object read from the JSON of a config file
* @param configFileDir The directory of the config file
* @param readConfig Will be used to load all base configurations while parsing. The function is called with the resolved path.
*/
export declare function parseConfigFile(configFile: RawConfigFile, configFileDir?: string, readConfig?: (path: string) => RawConfigFile): IConfigurationFile;
/**
* Fills in default values for `IOption` properties and outputs an array of `IOption`
*/
export declare function convertRuleOptions(ruleConfiguration: Map<string, Partial<IOptions>>): IOptions[];
export declare function isFileExcluded(filepath: string, configFile?: IConfigurationFile): boolean;
export declare function stringifyConfiguration(configFile: IConfigurationFile): string;
| cloudfoundry-community/asp.net5-buildpack | fixtures/node_apps/angular_dotnet/ClientApp/node_modules/tslint/lib/configuration.d.ts | TypeScript | apache-2.0 | 6,738 |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.plugin.ij.intentions;
import com.intellij.codeInsight.CodeInsightUtilBase;
import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.util.PsiMatcherImpl;
import gw.internal.gosu.parser.Expression;
import gw.internal.gosu.parser.expressions.NumericLiteral;
import gw.lang.parser.IStatement;
import gw.lang.parser.statements.IAssignmentStatement;
import gw.lang.parser.statements.IStatementList;
import gw.lang.parser.statements.IWhileStatement;
import gw.plugin.ij.lang.psi.api.statements.IGosuVariable;
import gw.plugin.ij.lang.psi.impl.statements.GosuForEachStatementImpl;
import gw.plugin.ij.lang.psi.impl.statements.GosuWhileStatementImpl;
import gw.plugin.ij.lang.psi.util.GosuPsiParseUtil;
import gw.plugin.ij.util.GosuBundle;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.psi.util.PsiMatchers.hasClass;
public class WhileToForFix extends LocalQuickFixAndIntentionActionOnPsiElement {
String ident;
Expression rhs;
private IGosuVariable declarationEqualToZero;
private IAssignmentStatement increment;
public WhileToForFix(PsiElement whileStmt, String ident, Expression rhs, IGosuVariable declarationEqualToZero, IAssignmentStatement increment) {
super(whileStmt);
this.ident = ident;
this.rhs = rhs;
this.declarationEqualToZero = declarationEqualToZero;
this.increment = increment;
}
@Override
public void invoke(@NotNull Project project, @NotNull PsiFile file, @Nullable("is null when called from inspection") Editor editor, @NotNull PsiElement startElement, @NotNull PsiElement endElement) {
if (!CodeInsightUtilBase.prepareFileForWrite(startElement.getContainingFile())) {
return;
}
IWhileStatement parsedElement = ((GosuWhileStatementImpl) startElement).getParsedElement();
if (parsedElement == null) {
return;
}
IStatement statement = parsedElement.getStatement();
IStatement[] statements = ((IStatementList) statement).getStatements();
StringBuilder forStmt = new StringBuilder();
forStmt.append("for (");
forStmt.append(ident);
forStmt.append(" in 0..");
if(rhs instanceof NumericLiteral) {
Object res = rhs.evaluate();
if(res instanceof Integer) {
forStmt.append(((Integer)res)-1);
}
} else {
forStmt.append("|" + rhs);
}
forStmt.append(") {\n");
String indent = getIndet(parsedElement, statements);
for (IStatement statement1 : statements) {
if (statement1 != increment) {
forStmt.append(indent);
forStmt.append(statement1.getLocation().getTextFromTokens());
forStmt.append("\n");
}
}
forStmt.append("}");
PsiElement stub = GosuPsiParseUtil.parseProgramm(forStmt.toString(), startElement, file.getManager(), null);
PsiElement newForStmt = new PsiMatcherImpl(stub)
.descendant(hasClass(GosuForEachStatementImpl.class))
.getElement();
if (newForStmt != null) {
declarationEqualToZero.delete();
startElement.replace(newForStmt);
}
}
private String getIndet(IWhileStatement parsedElement, IStatement[] statements) {
int whileColum = parsedElement.getLocation().getColumn();
int column = statements[1].getLocation().getColumn() - whileColum;
if(column < 0) {
return " ";
}
StringBuilder out = new StringBuilder();
for(int i = 0; i <= column; i++) {
out.append(" ");
}
return out.toString();
}
private void removeVarDecl(PsiElement whileStmt, String ident) {
PsiElement prev = whileStmt.getPrevSibling();
while (prev instanceof PsiWhiteSpace) {
prev = prev.getPrevSibling();
}
if (prev instanceof IGosuVariable && ((IGosuVariable) prev).getName().equals(ident)) {
prev.delete();
}
}
@Override
public boolean isAvailable(@NotNull Project project,
@NotNull PsiFile file,
@NotNull PsiElement startElement,
@NotNull PsiElement endElement) {
return startElement instanceof GosuWhileStatementImpl;
}
@NotNull
@Override
public String getText() {
return GosuBundle.message("inspection.while.to.for");
}
@NotNull
@Override
public String getFamilyName() {
return GosuBundle.message("inspection.group.name.statement.issues");
}
}
| pdalbora/gosu-lang | idea-gosu-plugin/src/main/java/gw/plugin/ij/intentions/WhileToForFix.java | Java | apache-2.0 | 4,712 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tools to work with checkpoints."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import six
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import py_checkpoint_reader
from tensorflow.python.training.saving import saveable_object_util
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"load_checkpoint", "load_variable", "list_variables",
"checkpoints_iterator", "init_from_checkpoint"
]
@tf_export("train.load_checkpoint")
def load_checkpoint(ckpt_dir_or_file):
"""Returns `CheckpointReader` for checkpoint found in `ckpt_dir_or_file`.
If `ckpt_dir_or_file` resolves to a directory with multiple checkpoints,
reader for the latest checkpoint is returned.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint
file.
Returns:
`CheckpointReader` object.
Raises:
ValueError: If `ckpt_dir_or_file` resolves to a directory with no
checkpoints.
"""
filename = _get_checkpoint_filename(ckpt_dir_or_file)
if filename is None:
raise ValueError("Couldn't find 'checkpoint' file or checkpoints in "
"given directory %s" % ckpt_dir_or_file)
return py_checkpoint_reader.NewCheckpointReader(filename)
@tf_export("train.load_variable")
def load_variable(ckpt_dir_or_file, name):
"""Returns the tensor value of the given variable in the checkpoint.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
name: Name of the variable to return.
Returns:
A numpy `ndarray` with a copy of the value of this variable.
"""
# TODO(b/29227106): Fix this in the right place and remove this.
if name.endswith(":0"):
name = name[:-2]
reader = load_checkpoint(ckpt_dir_or_file)
return reader.get_tensor(name)
@tf_export("train.list_variables")
def list_variables(ckpt_dir_or_file):
"""Returns list of all variables in the checkpoint.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
Returns:
List of tuples `(name, shape)`.
"""
reader = load_checkpoint(ckpt_dir_or_file)
variable_map = reader.get_variable_to_shape_map()
names = sorted(variable_map.keys())
result = []
for name in names:
result.append((name, variable_map[name]))
return result
def wait_for_new_checkpoint(checkpoint_dir,
last_checkpoint=None,
seconds_to_sleep=1,
timeout=None):
"""Waits until a new checkpoint file is found.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
last_checkpoint: The last checkpoint path used or `None` if we're expecting
a checkpoint for the first time.
seconds_to_sleep: The number of seconds to sleep for before looking for a
new checkpoint.
timeout: The maximum number of seconds to wait. If left as `None`, then the
process will wait indefinitely.
Returns:
a new checkpoint path, or None if the timeout was reached.
"""
logging.info("Waiting for new checkpoint at %s", checkpoint_dir)
stop_time = time.time() + timeout if timeout is not None else None
while True:
checkpoint_path = checkpoint_management.latest_checkpoint(checkpoint_dir)
if checkpoint_path is None or checkpoint_path == last_checkpoint:
if stop_time is not None and time.time() + seconds_to_sleep > stop_time:
return None
time.sleep(seconds_to_sleep)
else:
logging.info("Found new checkpoint at %s", checkpoint_path)
return checkpoint_path
@tf_export("train.checkpoints_iterator")
def checkpoints_iterator(checkpoint_dir,
min_interval_secs=0,
timeout=None,
timeout_fn=None):
"""Continuously yield new checkpoint files as they appear.
The iterator only checks for new checkpoints when control flow has been
reverted to it. This means it can miss checkpoints if your code takes longer
to run between iterations than `min_interval_secs` or the interval at which
new checkpoints are written.
The `timeout` argument is the maximum number of seconds to block waiting for
a new checkpoint. It is used in combination with the `timeout_fn` as
follows:
* If the timeout expires and no `timeout_fn` was specified, the iterator
stops yielding.
* If a `timeout_fn` was specified, that function is called and if it returns
a true boolean value the iterator stops yielding.
* If the function returns a false boolean value then the iterator resumes the
wait for new checkpoints. At this point the timeout logic applies again.
This behavior gives control to callers on what to do if checkpoints do not
come fast enough or stop being generated. For example, if callers have a way
to detect that the training has stopped and know that no new checkpoints
will be generated, they can provide a `timeout_fn` that returns `True` when
the training has stopped. If they know that the training is still going on
they return `False` instead.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
min_interval_secs: The minimum number of seconds between yielding
checkpoints.
timeout: The maximum number of seconds to wait between checkpoints. If left
as `None`, then the process will wait indefinitely.
timeout_fn: Optional function to call after a timeout. If the function
returns True, then it means that no new checkpoints will be generated and
the iterator will exit. The function is called with no arguments.
Yields:
String paths to latest checkpoint files as they arrive.
"""
checkpoint_path = None
while True:
new_checkpoint_path = wait_for_new_checkpoint(
checkpoint_dir, checkpoint_path, timeout=timeout)
if new_checkpoint_path is None:
if not timeout_fn:
# timed out
logging.info("Timed-out waiting for a checkpoint.")
return
if timeout_fn():
# The timeout_fn indicated that we are truly done.
return
else:
# The timeout_fn indicated that more checkpoints may come.
continue
start = time.time()
checkpoint_path = new_checkpoint_path
yield checkpoint_path
time_to_next_eval = start + min_interval_secs - time.time()
if time_to_next_eval > 0:
time.sleep(time_to_next_eval)
@tf_export(v1=["train.init_from_checkpoint"])
def init_from_checkpoint(ckpt_dir_or_file, assignment_map):
"""Replaces `tf.Variable` initializers so they load from a checkpoint file.
Values are not loaded immediately, but when the initializer is run
(typically by running a `tf.compat.v1.global_variables_initializer` op).
Note: This overrides default initialization ops of specified variables and
redefines dtype.
Assignment map supports following syntax:
* `'checkpoint_scope_name/': 'scope_name/'` - will load all variables in
current `scope_name` from `checkpoint_scope_name` with matching tensor
names.
* `'checkpoint_scope_name/some_other_variable': 'scope_name/variable_name'` -
will initialize `scope_name/variable_name` variable
from `checkpoint_scope_name/some_other_variable`.
* `'scope_variable_name': variable` - will initialize given `tf.Variable`
object with tensor 'scope_variable_name' from the checkpoint.
* `'scope_variable_name': list(variable)` - will initialize list of
partitioned variables with tensor 'scope_variable_name' from the checkpoint.
* `'/': 'scope_name/'` - will load all variables in current `scope_name` from
checkpoint's root (e.g. no scope).
Supports loading into partitioned variables, which are represented as
`'<variable>/part_<part #>'`.
Example:
```python
# Say, '/tmp/model.ckpt' has the following tensors:
# -- name='old_scope_1/var1', shape=[20, 2]
# -- name='old_scope_1/var2', shape=[50, 4]
# -- name='old_scope_2/var3', shape=[100, 100]
# Create new model's variables
with tf.compat.v1.variable_scope('new_scope_1'):
var1 = tf.compat.v1.get_variable('var1', shape=[20, 2],
initializer=tf.compat.v1.zeros_initializer())
with tf.compat.v1.variable_scope('new_scope_2'):
var2 = tf.compat.v1.get_variable('var2', shape=[50, 4],
initializer=tf.compat.v1.zeros_initializer())
# Partition into 5 variables along the first axis.
var3 = tf.compat.v1.get_variable(name='var3', shape=[100, 100],
initializer=tf.compat.v1.zeros_initializer(),
partitioner=lambda shape, dtype: [5, 1])
# Initialize all variables in `new_scope_1` from `old_scope_1`.
init_from_checkpoint('/tmp/model.ckpt', {'old_scope_1/': 'new_scope_1'})
# Use names to specify which variables to initialize from checkpoint.
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_1/var1': 'new_scope_1/var1',
'old_scope_1/var2': 'new_scope_2/var2'})
# Or use tf.Variable objects to identify what to initialize.
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_1/var1': var1,
'old_scope_1/var2': var2})
# Initialize partitioned variables using variable's name
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_2/var3': 'new_scope_2/var3'})
# Or specify the list of tf.Variable objects.
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_2/var3': var3._get_variable_list()})
```
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
assignment_map: Dict, where keys are names of the variables in the
checkpoint and values are current variables or names of current variables
(in default graph).
Raises:
ValueError: If missing variables in current graph, or if missing
checkpoints or tensors in checkpoints.
"""
init_from_checkpoint_fn = lambda _: _init_from_checkpoint(
ckpt_dir_or_file, assignment_map)
if distribution_strategy_context.get_cross_replica_context():
init_from_checkpoint_fn(None)
else:
distribution_strategy_context.get_replica_context().merge_call(
init_from_checkpoint_fn)
def _init_from_checkpoint(ckpt_dir_or_file, assignment_map):
"""See `init_from_checkpoint` for documentation."""
ckpt_file = _get_checkpoint_filename(ckpt_dir_or_file)
reader = load_checkpoint(ckpt_dir_or_file)
variable_map = reader.get_variable_to_shape_map()
for tensor_name_in_ckpt, current_var_or_name in sorted(
six.iteritems(assignment_map)):
var = None
# Check if this is Variable object or list of Variable objects (in case of
# partitioned variables).
if _is_variable(current_var_or_name) or (
isinstance(current_var_or_name, list)
and all(_is_variable(v) for v in current_var_or_name)):
var = current_var_or_name
else:
store_vars = vs._get_default_variable_store()._vars # pylint:disable=protected-access
# Check if this variable is in var_store.
var = store_vars.get(current_var_or_name, None)
# Also check if variable is partitioned as list.
if var is None:
var = _collect_partitioned_variable(current_var_or_name, store_vars)
if var is not None:
# If 1 to 1 mapping was provided, find variable in the checkpoint.
if tensor_name_in_ckpt not in variable_map:
raise ValueError("Tensor %s is not found in %s checkpoint %s" % (
tensor_name_in_ckpt, ckpt_dir_or_file, variable_map
))
if _is_variable(var):
# Additional at-call-time checks.
if not var.get_shape().is_compatible_with(
variable_map[tensor_name_in_ckpt]):
raise ValueError(
"Shape of variable %s (%s) doesn't match with shape of "
"tensor %s (%s) from checkpoint reader." % (
var.name, str(var.get_shape()),
tensor_name_in_ckpt, str(variable_map[tensor_name_in_ckpt])
))
var_name = var.name
else:
var_name = ",".join([v.name for v in var])
_set_variable_or_list_initializer(var, ckpt_file, tensor_name_in_ckpt)
logging.debug("Initialize variable %s from checkpoint %s with %s",
var_name, ckpt_dir_or_file, tensor_name_in_ckpt)
else:
scopes = ""
# TODO(vihanjain): Support list of 'current_var_or_name' here.
if "/" in current_var_or_name:
scopes = current_var_or_name[:current_var_or_name.rindex("/")]
if not tensor_name_in_ckpt.endswith("/"):
raise ValueError(
"Assignment map with scope only name {} should map to scope only "
"{}. Should be 'scope/': 'other_scope/'.".format(
scopes, tensor_name_in_ckpt))
# If scope to scope mapping was provided, find all variables in the scope
# and create variable to variable mapping.
scope_variables = set()
for var_name in store_vars:
if not scopes or var_name.startswith(scopes + "/"):
# Consume /part_ if partitioned variable.
if "/part_" in var_name:
var_name = var_name[:var_name.index("/part_")]
scope_variables.add(var_name)
for var_name in sorted(scope_variables):
# Lookup name with specified prefix and suffix from current variable.
# If tensor_name given is '/' (root), don't use it for full name.
full_tensor_name = var_name[len(scopes):]
if current_var_or_name != "/":
full_tensor_name = full_tensor_name[1:]
if tensor_name_in_ckpt != "/":
full_tensor_name = tensor_name_in_ckpt + full_tensor_name
# Remove trailing '/', if any, in the full_tensor_name
if full_tensor_name.endswith("/"):
full_tensor_name = full_tensor_name[:-1]
if full_tensor_name not in variable_map:
raise ValueError(
"Tensor %s (%s in %s) is not found in %s checkpoint" % (
full_tensor_name, var_name[len(scopes) + 1:],
tensor_name_in_ckpt, ckpt_dir_or_file
))
var = store_vars.get(var_name, None)
if var is None:
var = _collect_partitioned_variable(var_name, store_vars)
_set_variable_or_list_initializer(var, ckpt_file, full_tensor_name)
logging.debug("Initialize variable %s from checkpoint %s with %s",
var_name, ckpt_dir_or_file, full_tensor_name)
def _get_checkpoint_filename(ckpt_dir_or_file):
"""Returns checkpoint filename given directory or specific checkpoint file."""
if gfile.IsDirectory(ckpt_dir_or_file):
return checkpoint_management.latest_checkpoint(ckpt_dir_or_file)
return ckpt_dir_or_file
def _set_checkpoint_initializer(variable,
ckpt_file,
tensor_name,
slice_spec,
name="checkpoint_initializer"):
"""Overrides given variable's initialization op.
Sets variable initializer to assign op that initializes variable from tensor's
value in the checkpoint.
Args:
variable: `tf.Variable` object.
ckpt_file: string, full path of the checkpoint.
tensor_name: Name of the tensor to load from the checkpoint.
slice_spec: Slice specification for loading partitioned tensors.
name: Name of the operation.
"""
base_type = variable.dtype.base_dtype
# Do not colocate with variable since RestoreV2 op only runs on CPU and
# colocation will force variable (and other ops that colocate with variable)
# to be on CPU as well. It is okay to place the variable's initializer op on
# CPU since it will only be run once at the start.
with ops.device(variable.device), ops.device("/cpu:0"):
restore_op = io_ops.restore_v2(
ckpt_file, [tensor_name], [slice_spec], [base_type], name=name)[0]
names_to_saveables = saveable_object_util.op_list_to_dict([variable])
saveable_objects = []
for name, op in names_to_saveables.items():
for s in saveable_object_util.saveable_objects_for_op(op, name):
saveable_objects.append(s)
assert len(saveable_objects) == 1 # Should be only one variable.
init_op = saveable_objects[0].restore([restore_op], restored_shapes=None)
# pylint:disable=protected-access
variable._initializer_op = init_op
restore_op.set_shape(variable.shape)
variable._initial_value = restore_op
# pylint:enable=protected-access
def _set_variable_or_list_initializer(variable_or_list, ckpt_file,
tensor_name):
"""Overrides initialization op of given variable or list of variables.
Calls `_set_checkpoint_initializer` for each variable in the given list of
variables.
Args:
variable_or_list: `tf.Variable` object or a list of `tf.Variable` objects.
ckpt_file: string, full path of the checkpoint.
tensor_name: Name of the tensor to load from the checkpoint.
Raises:
ValueError: if all objects in `variable_or_list` are not partitions of the
same large variable.
"""
if isinstance(variable_or_list, (list, tuple)):
# A set of slices.
slice_name = None
for v in variable_or_list:
slice_info = v._save_slice_info # pylint:disable=protected-access
if slice_name is None:
slice_name = slice_info.full_name
elif slice_name != slice_info.full_name:
raise ValueError("Slices must all be from the same tensor: %s != %s" %
(slice_name, slice_info.full_name))
_set_checkpoint_initializer(v, ckpt_file, tensor_name, slice_info.spec)
else:
_set_checkpoint_initializer(variable_or_list, ckpt_file, tensor_name, "")
def _is_variable(x):
return (isinstance(x, variables.Variable) or
resource_variable_ops.is_resource_variable(x))
def _collect_partitioned_variable(name, all_vars):
"""Returns list of `tf.Variable` that comprise the partitioned variable."""
if name + "/part_0" in all_vars:
var = []
i = 0
while name + "/part_%d" % i in all_vars:
var.append(all_vars[name + "/part_%d" % i])
i += 1
return var
return None
| ppwwyyxx/tensorflow | tensorflow/python/training/checkpoint_utils.py | Python | apache-2.0 | 19,434 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
using System.Reflection;
using System.Runtime.InteropServices;
[assembly: AssemblyCompany("The Apache Software Foundation.")]
[assembly: AssemblyTrademark("The Apache Software Foundation")]
[assembly: AssemblyCopyright("Copyright © 2017 The Apache Software Foundation")]
[assembly: AssemblyCulture("")]
[assembly: AssemblyConfiguration("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
[assembly: AssemblyVersion("0.16.0.0")]
[assembly: AssemblyFileVersion("0.16.0.0")]
| tcNickolas/reef | lang/cs/SharedAssemblyInfo.cs | C# | apache-2.0 | 1,475 |
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// This file implements logic for lowering MHLO dialect to Standard dialect.
#include "llvm/ADT/StringSwitch.h"
#include "mlir-hlo/Dialect/mhlo/IR/hlo_ops.h"
#include "mlir-hlo/Dialect/mhlo/transforms/passes.h"
#include "mlir-hlo/Dialect/mhlo/transforms/rewriters.h"
#include "mlir/Dialect/StandardOps/IR/Ops.h"
#include "mlir/IR/Function.h"
#include "mlir/IR/PatternMatch.h"
#include "mlir/Pass/Pass.h"
namespace mlir {
namespace {
#include "generated_legalize_to_standard.inc"
} // end anonymous namespace
namespace mhlo {
namespace {
class CompareIConvert : public OpRewritePattern<mhlo::CompareOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(mhlo::CompareOp op,
PatternRewriter &rewriter) const override {
auto lhs = op.lhs();
auto rhs = op.rhs();
auto lhs_type = lhs.getType().cast<TensorType>();
auto rhs_type = rhs.getType().cast<TensorType>();
// Broadcasting not supported by this rewrite.
if (lhs_type.getShape() != rhs_type.getShape()) return failure();
if (!lhs_type.getElementType().isSignlessInteger() ||
!rhs_type.getElementType().isSignlessInteger())
return failure();
auto comparison_direction = op.comparison_direction();
auto compare_predicate =
llvm::StringSwitch<Optional<CmpIPredicate>>(comparison_direction)
.Case("EQ", CmpIPredicate::eq)
.Case("NE", CmpIPredicate::ne)
.Case("LT", CmpIPredicate::slt)
.Case("LE", CmpIPredicate::sle)
.Case("GT", CmpIPredicate::sgt)
.Case("GE", CmpIPredicate::sge)
.Default(llvm::None);
if (!compare_predicate.hasValue()) return failure();
rewriter.replaceOpWithNewOp<CmpIOp>(op, compare_predicate.getValue(), lhs,
rhs);
return success();
}
};
class CompareFConvert : public OpRewritePattern<mhlo::CompareOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(mhlo::CompareOp op,
PatternRewriter &rewriter) const override {
auto lhs = op.lhs();
auto rhs = op.rhs();
auto lhs_type = lhs.getType().cast<TensorType>();
auto rhs_type = rhs.getType().cast<TensorType>();
// Broadcasting not supported by this rewrite.
if (lhs_type.getShape() != rhs_type.getShape()) return failure();
if (!lhs_type.getElementType().isa<FloatType>() ||
!rhs_type.getElementType().isa<FloatType>())
return failure();
auto comparison_direction = op.comparison_direction();
auto compare_predicate =
llvm::StringSwitch<Optional<CmpFPredicate>>(comparison_direction)
.Case("EQ", CmpFPredicate::OEQ)
.Case("NE", CmpFPredicate::UNE)
.Case("LT", CmpFPredicate::OLT)
.Case("LE", CmpFPredicate::OLE)
.Case("GT", CmpFPredicate::OGT)
.Case("GE", CmpFPredicate::OGE)
.Default(llvm::None);
if (!compare_predicate.hasValue()) return failure();
rewriter.replaceOpWithNewOp<CmpFOp>(op, compare_predicate.getValue(), lhs,
rhs);
return success();
}
};
// Replace IotaOp with an integer constant. A ConvertOp is added to
// convert the integer constant to iota result type. For complex types, the real
// part is replaced with the generated constant and the imaginary part is
// replaced with zero tensor.
class ConvertIotaOp : public OpRewritePattern<mhlo::IotaOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(mhlo::IotaOp op,
PatternRewriter &rewriter) const override {
auto output_type = op.getType().cast<ShapedType>();
auto output_size = output_type.getNumElements();
auto dimension = op.iota_dimension();
auto max_dim_size = output_type.getDimSize(dimension);
auto element_type = output_type.getElementType();
int bitwidth;
auto complex_ty = element_type.dyn_cast<ComplexType>();
Type int_or_float_ty = element_type;
if (complex_ty) int_or_float_ty = complex_ty.getElementType();
bitwidth = int_or_float_ty.getIntOrFloatBitWidth();
llvm::SmallVector<APInt, 10> values;
values.reserve(output_size);
int64_t increase_stride = output_size;
for (int i = 0; i <= dimension; i++) {
increase_stride /= output_type.getDimSize(i);
}
int64_t current_value = 0;
for (int i = 0; i < output_size; i++) {
int64_t value = (current_value / increase_stride) % max_dim_size;
values.push_back(APInt(bitwidth, value));
++current_value;
}
auto int_shape_type = RankedTensorType::get(
output_type.getShape(),
IntegerType::get(bitwidth, rewriter.getContext()));
auto loc = op.getLoc();
auto integer_const = rewriter.create<mlir::ConstantOp>(
loc, DenseIntElementsAttr::get(int_shape_type, values));
auto int_or_float_shape_ty =
RankedTensorType::get(output_type.getShape(), int_or_float_ty);
auto iota_const =
rewriter.create<ConvertOp>(loc, int_or_float_shape_ty, integer_const);
// For int/float types we are done, replace op and return.
if (!complex_ty) {
rewriter.replaceOp(op, iota_const.getResult());
return success();
}
// For complex types, generate a constant tensor of zeroes for the imaginary
// part and use iota_const for real part.
auto zeroes = rewriter.create<mlir::ConstantOp>(
loc, DenseIntElementsAttr::get(int_shape_type, APInt(bitwidth, 0)));
auto imag_zeroes =
rewriter.create<ConvertOp>(loc, int_or_float_shape_ty, zeroes);
rewriter.replaceOpWithNewOp<mhlo::ComplexOp>(op, iota_const, imag_zeroes);
return success();
}
};
} // end anonymous namespace
namespace {
struct LegalizeToStandardPass
: public PassWrapper<LegalizeToStandardPass, FunctionPass> {
void getDependentDialects(DialectRegistry ®istry) const override {
registry.insert<StandardOpsDialect>();
}
/// Perform the lowering to Standard dialect.
void runOnFunction() override;
};
} // end anonymous namespace
std::unique_ptr<mlir::OperationPass<mlir::FuncOp>> createLegalizeToStdPass() {
return std::make_unique<LegalizeToStandardPass>();
}
void PopulateMhloToStdPatterns(OwningRewritePatternList *patterns,
mlir::MLIRContext *ctx) {
mlir::populateWithGenerated(ctx, patterns);
patterns->insert<CompareFConvert, CompareIConvert, ConvertIotaOp>(ctx);
}
/// Perform the lowering to standard dialect.
void LegalizeToStandardPass::runOnFunction() {
OwningRewritePatternList patterns;
mlir::mhlo::PopulateMhloToStdPatterns(&patterns, &getContext());
applyPatternsAndFoldGreedily(getFunction(), patterns);
}
} // end namespace mhlo
} // end namespace mlir
| karllessard/tensorflow | tensorflow/compiler/mlir/hlo/lib/Dialect/mhlo/transforms/legalize_to_standard.cc | C++ | apache-2.0 | 7,534 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.annotator.regex;
import java.util.regex.Pattern;
/**
* RegexVariables interface.
*/
public interface RegexVariables {
public static final String VARIABLE_START = "\\v";
public static final String VARIABLE_REGEX_BEGIN = "\\\\v\\{";
public static final String VARIABLE_REGEX_END = "\\}";
public static final Pattern VARIABLE_REGEX_PATTERN = Pattern
.compile(VARIABLE_REGEX_BEGIN + "(\\w+)" + VARIABLE_REGEX_END);
/**
* Adds a variable to the Variables object.
*
* @param varName
* variable name
*
* @param varValue
* variable value
*/
public void addVariable(String varName, String varValue);
/**
* returns the value of the specified variable or <code>null</code> if the
* variable does not exist
*
* @param varName
* variable name
*
* @return returns the variable value of <code>null</code> if the variable
* does not exist
*
*/
public String getVariableValue(String varName);
} | jgrivolla/uima-addons | RegularExpressionAnnotator/src/main/java/org/apache/uima/annotator/regex/RegexVariables.java | Java | apache-2.0 | 1,870 |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.block.stream;
import alluxio.conf.AlluxioConfiguration;
import alluxio.conf.PropertyKey;
import alluxio.exception.status.AlluxioStatusException;
import alluxio.exception.status.UnauthenticatedException;
import alluxio.grpc.BlockWorkerGrpc;
import alluxio.grpc.CacheRequest;
import alluxio.grpc.ClearMetricsRequest;
import alluxio.grpc.ClearMetricsResponse;
import alluxio.grpc.CreateLocalBlockRequest;
import alluxio.grpc.CreateLocalBlockResponse;
import alluxio.grpc.DataMessageMarshaller;
import alluxio.grpc.DataMessageMarshallerProvider;
import alluxio.grpc.GrpcChannel;
import alluxio.grpc.GrpcChannelBuilder;
import alluxio.grpc.GrpcNetworkGroup;
import alluxio.grpc.GrpcSerializationUtils;
import alluxio.grpc.GrpcServerAddress;
import alluxio.grpc.MoveBlockRequest;
import alluxio.grpc.MoveBlockResponse;
import alluxio.grpc.OpenLocalBlockRequest;
import alluxio.grpc.OpenLocalBlockResponse;
import alluxio.grpc.ReadRequest;
import alluxio.grpc.ReadResponse;
import alluxio.grpc.RemoveBlockRequest;
import alluxio.grpc.RemoveBlockResponse;
import alluxio.grpc.WriteRequest;
import alluxio.grpc.WriteResponse;
import alluxio.resource.AlluxioResourceLeakDetectorFactory;
import alluxio.retry.RetryPolicy;
import alluxio.retry.RetryUtils;
import alluxio.security.user.UserState;
import com.google.common.base.Preconditions;
import com.google.common.io.Closer;
import io.grpc.StatusRuntimeException;
import io.grpc.stub.StreamObserver;
import io.netty.util.ResourceLeakDetector;
import io.netty.util.ResourceLeakTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
/**
* Default implementation of {@link BlockWorkerClient}.
*/
public class DefaultBlockWorkerClient implements BlockWorkerClient {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultBlockWorkerClient.class.getName());
private static final ResourceLeakDetector<DefaultBlockWorkerClient> DETECTOR =
AlluxioResourceLeakDetectorFactory.instance()
.newResourceLeakDetector(DefaultBlockWorkerClient.class);
private GrpcChannel mStreamingChannel;
private GrpcChannel mRpcChannel;
private GrpcServerAddress mAddress;
private final long mRpcTimeoutMs;
private BlockWorkerGrpc.BlockWorkerStub mStreamingAsyncStub;
private BlockWorkerGrpc.BlockWorkerBlockingStub mRpcBlockingStub;
private BlockWorkerGrpc.BlockWorkerStub mRpcAsyncStub;
@Nullable
private final ResourceLeakTracker<DefaultBlockWorkerClient> mTracker;
/**
* Creates a client instance for communicating with block worker.
*
* @param userState the user state
* @param address the address of the worker
* @param alluxioConf Alluxio configuration
*/
public DefaultBlockWorkerClient(UserState userState, GrpcServerAddress address,
AlluxioConfiguration alluxioConf) throws IOException {
RetryPolicy retryPolicy = RetryUtils.defaultClientRetry(
alluxioConf.getDuration(PropertyKey.USER_RPC_RETRY_MAX_DURATION),
alluxioConf.getDuration(PropertyKey.USER_RPC_RETRY_BASE_SLEEP_MS),
alluxioConf.getDuration(PropertyKey.USER_RPC_RETRY_MAX_SLEEP_MS));
UnauthenticatedException lastException = null;
// TODO(feng): unify worker client with AbstractClient
while (retryPolicy.attempt()) {
try {
// Disables channel pooling for data streaming to achieve better throughput.
// Channel is still reused due to client pooling.
mStreamingChannel = GrpcChannelBuilder.newBuilder(address, alluxioConf)
.setSubject(userState.getSubject())
.setNetworkGroup(GrpcNetworkGroup.STREAMING)
.setClientType("DefaultBlockWorkerClient-Stream")
.build();
mStreamingChannel.intercept(new StreamSerializationClientInterceptor());
// Uses default pooling strategy for RPC calls for better scalability.
mRpcChannel = GrpcChannelBuilder.newBuilder(address, alluxioConf)
.setSubject(userState.getSubject())
.setNetworkGroup(GrpcNetworkGroup.RPC)
.setClientType("DefaultBlockWorkerClient-Rpc")
.build();
lastException = null;
break;
} catch (StatusRuntimeException e) {
close();
throw AlluxioStatusException.fromStatusRuntimeException(e);
} catch (UnauthenticatedException e) {
close();
userState.relogin();
lastException = e;
}
}
if (lastException != null) {
throw lastException;
}
mStreamingAsyncStub = BlockWorkerGrpc.newStub(mStreamingChannel);
mRpcBlockingStub = BlockWorkerGrpc.newBlockingStub(mRpcChannel);
mRpcAsyncStub = BlockWorkerGrpc.newStub(mRpcChannel);
mAddress = address;
mRpcTimeoutMs = alluxioConf.getMs(PropertyKey.USER_RPC_RETRY_MAX_DURATION);
mTracker = DETECTOR.track(this);
}
@Override
public boolean isShutdown() {
return mStreamingChannel.isShutdown() || mRpcChannel.isShutdown();
}
@Override
public boolean isHealthy() {
return !isShutdown() && mStreamingChannel.isHealthy() && mRpcChannel.isHealthy();
}
@Override
public void close() throws IOException {
try (Closer closer = Closer.create()) {
closer.register(() -> {
if (mStreamingChannel != null) {
mStreamingChannel.shutdown();
}
});
closer.register(() -> {
if (mRpcChannel != null) {
mRpcChannel.shutdown();
}
});
closer.register(() -> {
if (mTracker != null) {
mTracker.close(this);
}
});
}
}
@Override
public StreamObserver<WriteRequest> writeBlock(StreamObserver<WriteResponse> responseObserver) {
if (responseObserver instanceof DataMessageMarshallerProvider) {
DataMessageMarshaller<WriteRequest> marshaller =
((DataMessageMarshallerProvider<WriteRequest, WriteResponse>) responseObserver)
.getRequestMarshaller();
Preconditions.checkNotNull(marshaller, "marshaller");
return mStreamingAsyncStub
.withOption(GrpcSerializationUtils.OVERRIDDEN_METHOD_DESCRIPTOR,
BlockWorkerGrpc.getWriteBlockMethod().toBuilder()
.setRequestMarshaller(marshaller)
.build())
.writeBlock(responseObserver);
} else {
return mStreamingAsyncStub.writeBlock(responseObserver);
}
}
@Override
public StreamObserver<ReadRequest> readBlock(StreamObserver<ReadResponse> responseObserver) {
if (responseObserver instanceof DataMessageMarshallerProvider) {
DataMessageMarshaller<ReadResponse> marshaller =
((DataMessageMarshallerProvider<ReadRequest, ReadResponse>) responseObserver)
.getResponseMarshaller();
Preconditions.checkNotNull(marshaller);
return mStreamingAsyncStub
.withOption(GrpcSerializationUtils.OVERRIDDEN_METHOD_DESCRIPTOR,
BlockWorkerGrpc.getReadBlockMethod().toBuilder()
.setResponseMarshaller(marshaller)
.build())
.readBlock(responseObserver);
} else {
return mStreamingAsyncStub.readBlock(responseObserver);
}
}
@Override
public StreamObserver<CreateLocalBlockRequest> createLocalBlock(
StreamObserver<CreateLocalBlockResponse> responseObserver) {
return mStreamingAsyncStub.createLocalBlock(responseObserver);
}
@Override
public StreamObserver<OpenLocalBlockRequest> openLocalBlock(
StreamObserver<OpenLocalBlockResponse> responseObserver) {
return mStreamingAsyncStub.openLocalBlock(responseObserver);
}
@Override
public RemoveBlockResponse removeBlock(final RemoveBlockRequest request) {
return mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS)
.removeBlock(request);
}
@Override
public MoveBlockResponse moveBlock(MoveBlockRequest request) {
return mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS)
.moveBlock(request);
}
@Override
public ClearMetricsResponse clearMetrics(ClearMetricsRequest request) {
return mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS)
.clearMetrics(request);
}
@Override
public void cache(CacheRequest request) {
boolean async = request.getAsync();
try {
mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS).cache(request);
} catch (Exception e) {
if (!async) {
throw e;
}
LOG.warn("Error sending async cache request {} to worker {}.", request, mAddress, e);
}
}
}
| wwjiang007/alluxio | core/client/fs/src/main/java/alluxio/client/block/stream/DefaultBlockWorkerClient.java | Java | apache-2.0 | 9,174 |
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.test.acceptance.framework.loan;
public class CreateLoanAccountSearchParameters {
private String searchString;
private String loanProduct;
public String getSearchString() {
return this.searchString;
}
public void setSearchString(String searchString) {
this.searchString = searchString;
}
public String getLoanProduct() {
return this.loanProduct;
}
public void setLoanProduct(String loanProduct) {
this.loanProduct = loanProduct;
}
}
| vorburger/mifos-head | acceptanceTests/src/test/java/org/mifos/test/acceptance/framework/loan/CreateLoanAccountSearchParameters.java | Java | apache-2.0 | 1,290 |
/*
* Copyright 2014 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.internal.test;
import io.realm.internal.DefineTable;
/**
* A helper class containing model(s) for simple code generation tests.
*/
class CodeGenTest {
@DefineTable // this is enabled only for occasional local tests
class someModel {
String name;
int age;
}
}
| ShikaSD/realm-java | realm/realm-library/src/androidTest/java/io/realm/internal/test/CodeGenTest.java | Java | apache-2.0 | 901 |
/*
mustache.js — Logic-less templates in JavaScript
See http://mustache.github.com/ for more info.
*/
var Mustache = function() {
var Renderer = function() {};
Renderer.prototype = {
otag: "{{",
ctag: "}}",
pragmas: {},
buffer: [],
pragmas_implemented: {
"IMPLICIT-ITERATOR": true,
"TRANSLATION-HINT": true
},
context: {},
render: function(template, context, partials, in_recursion) {
// reset buffer & set context
if(!in_recursion) {
this.context = context;
this.buffer = []; // TODO: make this non-lazy
}
// fail fast
if(!this.includes("", template)) {
if(in_recursion) {
return template;
} else {
this.send(template);
return;
}
}
// Branching or moving down the partial stack, save any translation mode info.
if (this.pragmas['TRANSLATION-HINT']) {
context['_mode'] = this.pragmas['TRANSLATION-HINT']['mode'];
}
template = this.render_pragmas(template);
template = this.render_i18n(template, context, partials);
var html = this.render_section(template, context, partials);
if (html === template) {
if (in_recursion) {
return this.render_tags(html, context, partials, true);
}
this.render_tags(html, context, partials, false);
} else {
if(in_recursion) {
return html;
} else {
var lines = html.split("\n");
for (var i = 0; i < lines.length; i++) {
this.send(lines[i]);
}
return;
}
}
},
/*
Sends parsed lines
*/
send: function(line) {
if(line != "") {
this.buffer.push(line);
}
},
/*
Looks for %PRAGMAS
*/
render_pragmas: function(template) {
// no pragmas
if(!this.includes("%", template)) {
return template;
}
var that = this;
var regex = new RegExp(this.otag + "%([\\w-]+) ?([\\w]+=[\\w]+)?" +
this.ctag);
return template.replace(regex, function(match, pragma, options) {
if(!that.pragmas_implemented[pragma]) {
throw({message:
"This implementation of mustache doesn't understand the '" +
pragma + "' pragma"});
}
that.pragmas[pragma] = {};
if(options) {
var opts = options.split("=");
that.pragmas[pragma][opts[0]] = opts[1];
}
return "";
// ignore unknown pragmas silently
});
},
/*
Tries to find a partial in the curent scope and render it
*/
render_partial: function(name, context, partials) {
name = this.trim(name);
if(!partials || partials[name] === undefined) {
throw({message: "unknown_partial '" + name + "'"});
}
if(typeof(context[name]) != "object") {
return this.render(partials[name], context, partials, true);
}
return this.render(partials[name], context[name], partials, true);
},
render_i18n: function(html, context, partials) {
if (html.indexOf(this.otag + "_i") == -1) {
return html;
}
var that = this;
var regex = new RegExp(this.otag + "\\_i" + this.ctag +
"\\s*([\\s\\S]+?)" + this.otag + "\\/i" + this.ctag, "mg");
// for each {{_i}}{{/i}} section do...
return html.replace(regex, function(match, content) {
var translation_mode = undefined;
if (that.pragmas && that.pragmas["TRANSLATION-HINT"] && that.pragmas["TRANSLATION-HINT"]['mode']) {
translation_mode = { _mode: that.pragmas["TRANSLATION-HINT"]['mode'] };
} else if (context['_mode']) {
translation_mode = { _mode: context['_mode'] };
}
return that.render(_(content, translation_mode), context, partials, true);
});
},
/*
Renders inverted (^) and normal (#) sections
*/
render_section: function(template, context, partials) {
if(!this.includes("#", template) && !this.includes("^", template)) {
return template;
}
var that = this;
// This regex matches _the first_ section ({{#foo}}{{/foo}}), and captures the remainder
var regex = new RegExp(
"^([\\s\\S]*?)" + // all the crap at the beginning that is not {{*}} ($1)
this.otag + // {{
"(\\^|\\#)\\s*(.+)\\s*" + // #foo (# == $2, foo == $3)
this.ctag + // }}
"\n*([\\s\\S]*?)" + // between the tag ($2). leading newlines are dropped
this.otag + // {{
"\\/\\s*\\3\\s*" + // /foo (backreference to the opening tag).
this.ctag + // }}
"\\s*([\\s\\S]*)$", // everything else in the string ($4). leading whitespace is dropped.
"g");
// for each {{#foo}}{{/foo}} section do...
return template.replace(regex, function(match, before, type, name, content, after) {
// before contains only tags, no sections
var renderedBefore = before ? that.render_tags(before, context, partials, true) : "",
// after may contain both sections and tags, so use full rendering function
renderedAfter = after ? that.render(after, context, partials, true) : "";
var value = that.find(name, context);
if(type == "^") { // inverted section
if(!value || that.is_array(value) && value.length === 0) {
// false or empty list, render it
return renderedBefore + that.render(content, context, partials, true) + renderedAfter;
} else {
return renderedBefore + "" + renderedAfter;
}
} else if(type == "#") { // normal section
if(that.is_array(value)) { // Enumerable, Let's loop!
return renderedBefore + that.map(value, function(row) {
return that.render(content, that.create_context(row), partials, true);
}).join("") + renderedAfter;
} else if(that.is_object(value)) { // Object, Use it as subcontext!
return renderedBefore + that.render(content, that.create_context(value),
partials, true) + renderedAfter;
} else if(typeof value === "function") {
// higher order section
return renderedBefore + value.call(context, content, function(text) {
return that.render(text, context, partials, true);
}) + renderedAfter;
} else if(value) { // boolean section
return renderedBefore + that.render(content, context, partials, true) + renderedAfter;
} else {
return renderedBefore + "" + renderedAfter;
}
}
});
},
/*
Replace {{foo}} and friends with values from our view
*/
render_tags: function(template, context, partials, in_recursion) {
// tit for tat
var that = this;
var new_regex = function() {
return new RegExp(that.otag + "(=|!|>|\\{|%)?([^\\/#\\^]+?)\\1?" +
that.ctag + "+", "g");
};
var regex = new_regex();
var tag_replace_callback = function(match, operator, name) {
switch(operator) {
case "!": // ignore comments
return "";
case "=": // set new delimiters, rebuild the replace regexp
that.set_delimiters(name);
regex = new_regex();
return "";
case ">": // render partial
return that.render_partial(name, context, partials);
case "{": // the triple mustache is unescaped
return that.find(name, context);
default: // escape the value
return that.escape(that.find(name, context));
}
};
var lines = template.split("\n");
for(var i = 0; i < lines.length; i++) {
lines[i] = lines[i].replace(regex, tag_replace_callback, this);
if(!in_recursion) {
this.send(lines[i]);
}
}
if(in_recursion) {
return lines.join("\n");
}
},
set_delimiters: function(delimiters) {
var dels = delimiters.split(" ");
this.otag = this.escape_regex(dels[0]);
this.ctag = this.escape_regex(dels[1]);
},
escape_regex: function(text) {
// thank you Simon Willison
if(!arguments.callee.sRE) {
var specials = [
'/', '.', '*', '+', '?', '|',
'(', ')', '[', ']', '{', '}', '\\'
];
arguments.callee.sRE = new RegExp(
'(\\' + specials.join('|\\') + ')', 'g'
);
}
return text.replace(arguments.callee.sRE, '\\$1');
},
/*
find `name` in current `context`. That is find me a value
from the view object
*/
find: function(name, context) {
name = this.trim(name);
// Checks whether a value is thruthy or false or 0
function is_kinda_truthy(bool) {
return bool === false || bool === 0 || bool;
}
var value;
if(is_kinda_truthy(context[name])) {
value = context[name];
} else if(is_kinda_truthy(this.context[name])) {
value = this.context[name];
}
if(typeof value === "function") {
return value.apply(context);
}
if(value !== undefined) {
return value;
}
// silently ignore unkown variables
return "";
},
// Utility methods
/* includes tag */
includes: function(needle, haystack) {
return haystack.indexOf(this.otag + needle) != -1;
},
/*
Does away with nasty characters
*/
escape: function(s) {
s = String(s === null ? "" : s);
return s.replace(/&(?!\w+;)|["'<>\\]/g, function(s) {
switch(s) {
case "&": return "&";
case "\\": return "\\\\";
case '"': return '"';
case "'": return ''';
case "<": return "<";
case ">": return ">";
default: return s;
}
});
},
// by @langalex, support for arrays of strings
create_context: function(_context) {
if(this.is_object(_context)) {
return _context;
} else {
var iterator = ".";
if(this.pragmas["IMPLICIT-ITERATOR"]) {
iterator = this.pragmas["IMPLICIT-ITERATOR"].iterator;
}
var ctx = {};
ctx[iterator] = _context;
return ctx;
}
},
is_object: function(a) {
return a && typeof a == "object";
},
is_array: function(a) {
return Object.prototype.toString.call(a) === '[object Array]';
},
/*
Gets rid of leading and trailing whitespace
*/
trim: function(s) {
return s.replace(/^\s*|\s*$/g, "");
},
/*
Why, why, why? Because IE. Cry, cry cry.
*/
map: function(array, fn) {
if (typeof array.map == "function") {
return array.map(fn);
} else {
var r = [];
var l = array.length;
for(var i = 0; i < l; i++) {
r.push(fn(array[i]));
}
return r;
}
}
};
return({
name: "mustache.js",
version: "0.3.1-dev-twitter",
/*
Turns a template and view into HTML
*/
to_html: function(template, view, partials, send_fun) {
var renderer = new Renderer();
if(send_fun) {
renderer.send = send_fun;
}
renderer.render(template, view || {}, partials);
if(!send_fun) {
return renderer.buffer.join("\n");
}
}
});
}();
| EHJ-52n/js-sensorweb-client | src/main/js/libs/mustache.js | JavaScript | apache-2.0 | 11,529 |
/*
* $Id: WrapperClassBean.java 799110 2009-07-29 22:44:26Z musachy $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.json;
import java.util.List;
import java.util.Map;
public class WrapperClassBean {
private String stringField;
private Integer intField;
private int nullIntField;
private Boolean booleanField;
private boolean primitiveBooleanField1;
private boolean primitiveBooleanField2;
private boolean primitiveBooleanField3;
private Character charField;
private Long longField;
private Float floatField;
private Double doubleField;
private Object objectField;
private Byte byteField;
private List<SimpleValue> listField;
private List<Map<String, Long>> listMapField;
private Map<String, List<Long>> mapListField;
private Map<String, Long>[] arrayMapField;
public List<SimpleValue> getListField() {
return listField;
}
public void setListField(List<SimpleValue> listField) {
this.listField = listField;
}
public List<Map<String, Long>> getListMapField() {
return listMapField;
}
public void setListMapField(List<Map<String, Long>> listMapField) {
this.listMapField = listMapField;
}
public Map<String, List<Long>> getMapListField() {
return mapListField;
}
public void setMapListField(Map<String, List<Long>> mapListField) {
this.mapListField = mapListField;
}
public Map<String, Long>[] getArrayMapField() {
return arrayMapField;
}
public void setArrayMapField(Map<String, Long>[] arrayMapField) {
this.arrayMapField = arrayMapField;
}
public Boolean getBooleanField() {
return booleanField;
}
public void setBooleanField(Boolean booleanField) {
this.booleanField = booleanField;
}
public boolean isPrimitiveBooleanField1() {
return primitiveBooleanField1;
}
public void setPrimitiveBooleanField1(boolean primitiveBooleanField1) {
this.primitiveBooleanField1 = primitiveBooleanField1;
}
public boolean isPrimitiveBooleanField2() {
return primitiveBooleanField2;
}
public void setPrimitiveBooleanField2(boolean primitiveBooleanField2) {
this.primitiveBooleanField2 = primitiveBooleanField2;
}
public boolean isPrimitiveBooleanField3() {
return primitiveBooleanField3;
}
public void setPrimitiveBooleanField3(boolean primitiveBooleanField3) {
this.primitiveBooleanField3 = primitiveBooleanField3;
}
public Byte getByteField() {
return byteField;
}
public void setByteField(Byte byteField) {
this.byteField = byteField;
}
public Character getCharField() {
return charField;
}
public void setCharField(Character charField) {
this.charField = charField;
}
public Double getDoubleField() {
return doubleField;
}
public void setDoubleField(Double doubleField) {
this.doubleField = doubleField;
}
public Float getFloatField() {
return floatField;
}
public void setFloatField(Float floatField) {
this.floatField = floatField;
}
public Integer getIntField() {
return intField;
}
public void setIntField(Integer intField) {
this.intField = intField;
}
public int getNullIntField() {
return nullIntField;
}
public void setNullIntField(int nullIntField) {
this.nullIntField = nullIntField;
}
public Long getLongField() {
return longField;
}
public void setLongField(Long longField) {
this.longField = longField;
}
public Object getObjectField() {
return objectField;
}
public void setObjectField(Object objectField) {
this.objectField = objectField;
}
public String getStringField() {
return stringField;
}
public void setStringField(String stringField) {
this.stringField = stringField;
}
}
| WillJiang/WillJiang | src/plugins/json/src/test/java/org/apache/struts2/json/WrapperClassBean.java | Java | apache-2.0 | 4,821 |
package alien4cloud.tosca.parser.mapping.generator;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import org.yaml.snakeyaml.nodes.MappingNode;
import org.yaml.snakeyaml.nodes.Node;
import org.yaml.snakeyaml.nodes.NodeTuple;
import org.yaml.snakeyaml.nodes.ScalarNode;
import org.yaml.snakeyaml.nodes.SequenceNode;
import alien4cloud.tosca.parser.IChecker;
import alien4cloud.tosca.parser.INodeParser;
import alien4cloud.tosca.parser.KeyValueMappingTarget;
import alien4cloud.tosca.parser.MappingTarget;
import alien4cloud.tosca.parser.ParserUtils;
import alien4cloud.tosca.parser.ParsingContextExecution;
import alien4cloud.tosca.parser.ParsingError;
import alien4cloud.tosca.parser.ParsingException;
import alien4cloud.tosca.parser.ParsingResult;
import alien4cloud.tosca.parser.YamlSimpleParser;
import alien4cloud.tosca.parser.impl.ErrorCode;
import alien4cloud.tosca.parser.impl.base.CheckedTypeNodeParser;
import alien4cloud.tosca.parser.impl.base.ScalarParser;
import alien4cloud.tosca.parser.impl.base.TypeNodeParser;
import alien4cloud.tosca.parser.mapping.DefaultParser;
import com.google.common.collect.Maps;
/**
* Load type mapping definition from yaml and add it to the type mapping registry.
*/
@Slf4j
@Component
public class MappingGenerator extends DefaultParser<Map<String, INodeParser>> {
@Resource
private ApplicationContext applicationContext;
private Map<String, INodeParser> parsers = Maps.newHashMap();
private Map<String, IMappingBuilder> mappingBuilders = Maps.newHashMap();
private Map<String, IChecker> checkers = Maps.newHashMap();
@PostConstruct
public void initialize() {
Map<String, INodeParser> contextParsers = applicationContext.getBeansOfType(INodeParser.class);
// register parsers based on their class name.
for (INodeParser parser : contextParsers.values()) {
parsers.put(parser.getClass().getName(), parser);
}
Map<String, IMappingBuilder> contextMappingBuilders = applicationContext.getBeansOfType(IMappingBuilder.class);
for (IMappingBuilder mappingBuilder : contextMappingBuilders.values()) {
mappingBuilders.put(mappingBuilder.getKey(), mappingBuilder);
}
Map<String, IChecker> contextCheckers = applicationContext.getBeansOfType(IChecker.class);
for (IChecker checker : contextCheckers.values()) {
checkers.put(checker.getName(), checker);
}
}
public Map<String, INodeParser> process(String resourceLocation) throws ParsingException {
org.springframework.core.io.Resource resource = applicationContext.getResource(resourceLocation);
YamlSimpleParser<Map<String, INodeParser>> nodeParser = new YamlSimpleParser<>(this);
try {
ParsingResult<Map<String, INodeParser>> result = nodeParser.parseFile(resource.getURI().toString(), resource.getFilename(),
resource.getInputStream(), null);
if (result.getContext().getParsingErrors().isEmpty()) {
return result.getResult();
}
throw new ParsingException(resource.getFilename(), result.getContext().getParsingErrors());
} catch (IOException e) {
log.error("Failed to open stream", e);
throw new ParsingException(resource.getFilename(), new ParsingError(ErrorCode.MISSING_FILE, "Unable to load file.", null, e.getMessage(), null,
resourceLocation));
}
}
public Map<String, INodeParser> parse(Node node, ParsingContextExecution context) {
Map<String, INodeParser> parsers = Maps.newHashMap();
if (node instanceof SequenceNode) {
SequenceNode types = (SequenceNode) node;
for (Node mapping : types.getValue()) {
Map.Entry<String, INodeParser<?>> entry = processTypeMapping(mapping, context);
if (entry != null) {
parsers.put(entry.getKey(), entry.getValue());
}
}
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Mapping should be a sequence of type mappings", node.getStartMark(), "Actually was "
+ node.getClass().getSimpleName(), node.getEndMark(), ""));
}
return parsers;
}
private Map.Entry<String, INodeParser<?>> processTypeMapping(Node node, ParsingContextExecution context) {
try {
return doProcessTypeMapping(node, context);
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
log.error("Failed to load class while parsing mapping", e);
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Unable to load class", node.getStartMark(), e.getMessage(), node.getEndMark(), ""));
return null;
}
}
private Map.Entry<String, INodeParser<?>> doProcessTypeMapping(Node node, ParsingContextExecution context) throws ClassNotFoundException,
IllegalAccessException, InstantiationException {
if (node instanceof MappingNode) {
MappingNode mapping = (MappingNode) node;
String yamlType = null;
INodeParser<?> parser = null;
for (NodeTuple tuple : mapping.getValue()) {
if (yamlType == null) {
yamlType = ParserUtils.getScalar(tuple.getKeyNode(), context);
String type = ParserUtils.getScalar(tuple.getValueNode(), context);
if (type.startsWith("__")) {
parser = getWrapperParser(type, mapping, context);
return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser);
}
parser = this.parsers.get(type);
if (parser != null) {
log.debug("Mapping yaml type <" + yamlType + "> using parser <" + type + ">");
return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser);
}
parser = buildTypeNodeParser(yamlType, type);
// log.debug("Mapping yaml type <" + yamlType + "> to class <" + type + ">");
// Class<?> javaClass = Class.forName(type);
// parser = new TypeNodeParser<>(javaClass, yamlType);
} else {
// process a mapping
map(tuple, (TypeNodeParser) parser, context);
}
}
return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser);
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Unable to process type mapping.", node.getStartMark(),
"Mapping must be defined using a mapping node.", node.getEndMark(), ""));
}
return null;
}
private TypeNodeParser<?> buildTypeNodeParser(String yamlType, String javaType) throws ClassNotFoundException {
String realJavaType = javaType;
IChecker checker = null;
if (javaType.contains("|")) {
realJavaType = javaType.substring(0, javaType.indexOf("|"));
String checkerName = javaType.substring(javaType.indexOf("|") + 1);
log.debug(String.format("After parsing <%s>, realJavaType is <%s>, checkerName is <%s>", javaType, realJavaType, checkerName));
checker = checkers.get(checkerName);
if (checker == null) {
log.warn(String.format("Can not find checker <%s>, using a standard TypeNodeParser", checkerName));
}
}
Class<?> javaClass = Class.forName(realJavaType);
if (checker == null) {
log.debug("Mapping yaml type <" + yamlType + "> to class <" + realJavaType + ">");
return new TypeNodeParser<>(javaClass, yamlType);
} else {
// TODO check that the type are compatible
log.debug("Mapping yaml type <" + yamlType + "> to class <" + realJavaType + "> using checker " + checker.toString());
return new CheckedTypeNodeParser<>(javaClass, yamlType, checker);
}
}
private INodeParser<?> getWrapperParser(String wrapperKey, MappingNode mapping, ParsingContextExecution context) {
IMappingBuilder builder = this.mappingBuilders.get(wrapperKey.substring(2));
return builder.buildMapping(mapping, context).getParser();
}
private void map(NodeTuple tuple, TypeNodeParser<?> parser, ParsingContextExecution context) {
String key = ParserUtils.getScalar(tuple.getKeyNode(), context);
int positionMappingIndex = positionMappingIndex(key);
if (positionMappingIndex > -1) {
mapPositionMapping(positionMappingIndex, tuple.getValueNode(), parser, context);
} else {
MappingTarget mappingTarget = getMappingTarget(tuple.getValueNode(), context);
if (mappingTarget != null) {
parser.getYamlToObjectMapping().put(key, mappingTarget);
}
}
}
private MappingTarget getMappingTarget(Node mappingNode, ParsingContextExecution context) {
if (mappingNode instanceof ScalarNode) {
// create a scalar mapping
String value = ParserUtils.getScalar(mappingNode, context);
return new MappingTarget(value, parsers.get(ScalarParser.class.getName()));
} else if (mappingNode instanceof MappingNode) {
return mapMappingNode((MappingNode) mappingNode, context);
}
return null;
}
private int positionMappingIndex(String key) {
if (key.startsWith("__")) {
try {
int position = Integer.valueOf(key.substring(2));
return position;
} catch (NumberFormatException e) {
// not a position mapping
return -1;
}
}
return -1;
}
private void mapPositionMapping(Integer index, Node positionMapping, TypeNodeParser<?> parser, ParsingContextExecution context) {
if (positionMapping instanceof MappingNode) {
MappingNode mappingNode = (MappingNode) positionMapping;
String key = null;
MappingTarget valueMappingTarget = null;
for (NodeTuple tuple : mappingNode.getValue()) {
String tupleKey = ParserUtils.getScalar(tuple.getKeyNode(), context);
if (tupleKey.equals("key")) {
key = ParserUtils.getScalar(tuple.getValueNode(), context);
} else if (tupleKey.equals("value")) {
valueMappingTarget = getMappingTarget(tuple.getValueNode(), context);
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Unknown key for position mapping.", tuple.getKeyNode().getStartMark(), tupleKey, tuple
.getKeyNode().getEndMark(), ""));
}
}
if (valueMappingTarget == null) {
return;
}
if (key == null) {
parser.getYamlOrderedToObjectMapping().put(index, valueMappingTarget);
} else {
parser.getYamlOrderedToObjectMapping().put(index, new KeyValueMappingTarget(key, valueMappingTarget.getPath(), valueMappingTarget.getParser()));
}
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Position mapping must be a mapping node with key and value fields.", positionMapping
.getStartMark(), "", positionMapping.getEndMark(), ""));
}
}
private MappingTarget mapMappingNode(MappingNode mappingNode, ParsingContextExecution context) {
String key = ParserUtils.getScalar(mappingNode.getValue().get(0).getKeyNode(), context);
IMappingBuilder mappingBuilder = mappingBuilders.get(key);
if (mappingBuilder != null) {
log.debug("Mapping yaml key <" + key + "> using mapping builder " + mappingBuilder.getClass().getName());
return mappingBuilder.buildMapping(mappingNode, context);
}
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "No mapping target found for key", mappingNode.getValue().get(0).getKeyNode().getStartMark(), key,
mappingNode.getValue().get(0).getKeyNode().getEndMark(), ""));
return null;
}
} | loicalbertin/alien4cloud | alien4cloud-core/src/main/java/alien4cloud/tosca/parser/mapping/generator/MappingGenerator.java | Java | apache-2.0 | 12,944 |
'use strict';
import { module } from 'angular';
import _ from 'lodash';
import { AccountService, ExpectedArtifactService } from '@spinnaker/core';
import { KubernetesProviderSettings } from '../../../kubernetes.settings';
export const KUBERNETES_V1_CLUSTER_CONFIGURE_COMMANDBUILDER = 'spinnaker.kubernetes.clusterCommandBuilder.service';
export const name = KUBERNETES_V1_CLUSTER_CONFIGURE_COMMANDBUILDER; // for backwards compatibility
module(KUBERNETES_V1_CLUSTER_CONFIGURE_COMMANDBUILDER, []).factory('kubernetesClusterCommandBuilder', function() {
function attemptToSetValidAccount(application, defaultAccount, command) {
return AccountService.listAccounts('kubernetes', 'v1').then(function(kubernetesAccounts) {
const kubernetesAccountNames = _.map(kubernetesAccounts, 'name');
let firstKubernetesAccount = null;
if (application.accounts.length) {
firstKubernetesAccount = _.find(application.accounts, function(applicationAccount) {
return kubernetesAccountNames.includes(applicationAccount);
});
} else if (kubernetesAccountNames.length) {
firstKubernetesAccount = kubernetesAccountNames[0];
}
const defaultAccountIsValid = defaultAccount && kubernetesAccountNames.includes(defaultAccount);
command.account = defaultAccountIsValid
? defaultAccount
: firstKubernetesAccount
? firstKubernetesAccount
: 'my-account-name';
});
}
function applyHealthProviders(application, command) {
command.interestingHealthProviderNames = ['KubernetesContainer', 'KubernetesPod'];
}
function buildNewClusterCommand(application, defaults = {}) {
const defaultAccount = defaults.account || KubernetesProviderSettings.defaults.account;
const command = {
account: defaultAccount,
application: application.name,
strategy: '',
targetSize: 1,
cloudProvider: 'kubernetes',
selectedProvider: 'kubernetes',
namespace: 'default',
containers: [],
initContainers: [],
volumeSources: [],
buildImageId: buildImageId,
groupByRegistry: groupByRegistry,
terminationGracePeriodSeconds: 30,
viewState: {
mode: defaults.mode || 'create',
disableStrategySelection: true,
useAutoscaler: false,
},
capacity: {
min: 1,
desired: 1,
max: 1,
},
scalingPolicy: {
cpuUtilization: {
target: 40,
},
},
useSourceCapacity: false,
deployment: {
enabled: false,
minReadySeconds: 0,
deploymentStrategy: {
type: 'RollingUpdate',
rollingUpdate: {
maxUnavailable: 1,
maxSurge: 1,
},
},
},
};
applyHealthProviders(application, command);
attemptToSetValidAccount(application, defaultAccount, command);
return command;
}
function buildClusterCommandFromExisting(application, existing, mode) {
mode = mode || 'clone';
const command = _.cloneDeep(existing.deployDescription);
command.groupByRegistry = groupByRegistry;
command.cloudProvider = 'kubernetes';
command.selectedProvider = 'kubernetes';
command.account = existing.account;
command.buildImageId = buildImageId;
command.strategy = '';
command.containers.forEach(container => {
container.imageDescription.imageId = buildImageId(container.imageDescription);
});
command.initContainers.forEach(container => {
container.imageDescription.imageId = buildImageId(container.imageDescription);
});
command.viewState = {
mode: mode,
useAutoscaler: !!command.scalingPolicy,
};
if (!command.capacity) {
command.capacity = {
min: command.targetSize,
max: command.targetSize,
desired: command.targetSize,
};
}
if (!_.has(command, 'scalingPolicy.cpuUtilization.target')) {
command.scalingPolicy = { cpuUtilization: { target: 40 } };
}
applyHealthProviders(application, command);
return command;
}
function groupByRegistry(container) {
if (container.imageDescription) {
if (container.imageDescription.fromContext) {
return 'Find Image Result(s)';
} else if (container.imageDescription.fromTrigger) {
return 'Images from Trigger(s)';
} else if (container.imageDescription.fromArtifact) {
return 'Images from Artifact(s)';
} else {
return container.imageDescription.registry;
}
}
}
function buildImageId(image) {
if (image.fromFindImage) {
return `${image.cluster} ${image.pattern}`;
} else if (image.fromBake) {
return `${image.repository} (Baked during execution)`;
} else if (image.fromTrigger && !image.tag) {
return `${image.registry}/${image.repository} (Tag resolved at runtime)`;
} else if (image.fromArtifact) {
return `${image.name} (Artifact resolved at runtime)`;
} else {
if (image.registry) {
return `${image.registry}/${image.repository}:${image.tag}`;
} else {
return `${image.repository}:${image.tag}`;
}
}
}
function reconcileUpstreamImages(containers, upstreamImages) {
const getConfig = image => {
if (image.fromContext) {
return {
match: other => other.fromContext && other.stageId === image.stageId,
fieldsToCopy: matchImage => {
const { cluster, pattern, repository } = matchImage;
return { cluster, pattern, repository };
},
};
} else if (image.fromTrigger) {
return {
match: other =>
other.fromTrigger &&
other.registry === image.registry &&
other.repository === image.repository &&
other.tag === image.tag,
fieldsToCopy: () => ({}),
};
} else if (image.fromArtifact) {
return {
match: other => other.fromArtifact && other.stageId === image.stageId,
fieldsToCopy: matchImage => {
const { name } = matchImage;
return { name };
},
};
} else {
return {
skipProcessing: true,
};
}
};
const result = [];
containers.forEach(container => {
const imageDescription = container.imageDescription;
const imageConfig = getConfig(imageDescription);
if (imageConfig.skipProcessing) {
result.push(container);
} else {
const matchingImage = upstreamImages.find(imageConfig.match);
if (matchingImage) {
Object.assign(imageDescription, imageConfig.fieldsToCopy(matchingImage));
result.push(container);
}
}
});
return result;
}
function findContextImages(current, all, visited = {}) {
// This actually indicates a loop in the stage dependencies.
if (visited[current.refId]) {
return [];
} else {
visited[current.refId] = true;
}
let result = [];
if (current.type === 'findImage') {
result.push({
fromContext: true,
fromFindImage: true,
cluster: current.cluster,
pattern: current.imageNamePattern,
repository: current.name,
stageId: current.refId,
});
} else if (current.type === 'bake') {
result.push({
fromContext: true,
fromBake: true,
repository: current.ami_name,
organization: current.organization,
stageId: current.refId,
});
}
current.requisiteStageRefIds.forEach(function(id) {
const next = all.find(stage => stage.refId === id);
if (next) {
result = result.concat(findContextImages(next, all, visited));
}
});
return result;
}
function findTriggerImages(triggers) {
return triggers
.filter(trigger => {
return trigger.type === 'docker';
})
.map(trigger => {
return {
fromTrigger: true,
repository: trigger.repository,
account: trigger.account,
organization: trigger.organization,
registry: trigger.registry,
tag: trigger.tag,
};
});
}
function findArtifactImages(currentStage, pipeline) {
const artifactImages = ExpectedArtifactService.getExpectedArtifactsAvailableToStage(currentStage, pipeline)
.filter(artifact => artifact.matchArtifact.type === 'docker/image')
.map(artifact => ({
fromArtifact: true,
artifactId: artifact.id,
name: artifact.matchArtifact.name,
}));
return artifactImages;
}
function buildNewClusterCommandForPipeline(current, pipeline) {
let contextImages = findContextImages(current, pipeline.stages) || [];
contextImages = contextImages.concat(findTriggerImages(pipeline.triggers));
contextImages = contextImages.concat(findArtifactImages(current, pipeline));
return {
strategy: '',
viewState: {
contextImages: contextImages,
mode: 'editPipeline',
submitButtonLabel: 'Done',
requiresTemplateSelection: true,
useAutoscaler: false,
},
};
}
function buildClusterCommandFromPipeline(app, originalCommand, current, pipeline) {
const command = _.cloneDeep(originalCommand);
let contextImages = findContextImages(current, pipeline.stages) || [];
contextImages = contextImages.concat(findTriggerImages(pipeline.triggers));
contextImages = contextImages.concat(findArtifactImages(current, pipeline));
command.containers = reconcileUpstreamImages(command.containers, contextImages);
command.containers.map(container => {
container.imageDescription.imageId = buildImageId(container.imageDescription);
});
command.groupByRegistry = groupByRegistry;
command.buildImageId = buildImageId;
command.strategy = command.strategy || '';
command.selectedProvider = 'kubernetes';
command.viewState = {
mode: 'editPipeline',
contextImages: contextImages,
submitButtonLabel: 'Done',
useAutoscaler: !!command.scalingPolicy,
};
if (!_.has(command, 'scalingPolicy.cpuUtilization.target')) {
command.scalingPolicy = { cpuUtilization: { target: 40 } };
}
return command;
}
return {
buildNewClusterCommand: buildNewClusterCommand,
buildClusterCommandFromExisting: buildClusterCommandFromExisting,
buildNewClusterCommandForPipeline: buildNewClusterCommandForPipeline,
buildClusterCommandFromPipeline: buildClusterCommandFromPipeline,
groupByRegistry: groupByRegistry,
buildImageId: buildImageId,
};
});
| sgarlick987/deck | app/scripts/modules/kubernetes/src/v1/cluster/configure/CommandBuilder.js | JavaScript | apache-2.0 | 10,658 |
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates a VM with the provided name, metadata, and auth scopes."""
COMPUTE_URL_BASE = 'https://www.googleapis.com/compute/v1/'
def GlobalComputeUrl(project, collection, name):
return ''.join([COMPUTE_URL_BASE, 'projects/', project,
'/global/', collection, '/', name])
def ZonalComputeUrl(project, zone, collection, name):
return ''.join([COMPUTE_URL_BASE, 'projects/', project,
'/zones/', zone, '/', collection, '/', name])
def GenerateConfig(context):
"""Generate configuration."""
base_name = context.properties['instanceName']
items = []
for key, value in context.properties['metadata'].iteritems():
items.append({
'key': key,
'value': value
})
metadata = {'items': items}
# Properties for the container-based instance.
instance = {
'zone': context.properties['zone'],
'machineType': ZonalComputeUrl(
context.env['project'], context.properties['zone'], 'machineTypes',
'f1-micro'),
'metadata': metadata,
'serviceAccounts': [{
'email': 'default',
'scopes': context.properties['scopes']
}],
'disks': [{
'deviceName': 'boot',
'type': 'PERSISTENT',
'autoDelete': True,
'boot': True,
'initializeParams': {
'diskName': base_name + '-disk',
'sourceImage': GlobalComputeUrl(
'debian-cloud', 'images',
''.join(['backports-debian', '-7-wheezy-v20151104']))
},
}],
'networkInterfaces': [{
'accessConfigs': [{
'name': 'external-nat',
'type': 'ONE_TO_ONE_NAT'
}],
'network': GlobalComputeUrl(
context.env['project'], 'networks', 'default')
}]
}
# Resources and output to return.
return {
'resources': [{
'name': base_name,
'type': 'compute.v1.instance',
'properties': instance
}]
}
| aljim/deploymentmanager-samples | examples/v2/waiter/instance.py | Python | apache-2.0 | 2,634 |
import app from 'common/electron/app';
import path from 'path';
/**
* @return the theme's css path
*/
function getThemePath (name) {
return path.join(app.getAppPath(), 'themes', name + '.css');
}
/**
* @return the style's css path
*/
function getStylePath (name) {
return path.join(app.getAppPath(), 'styles', name + '.css');
}
/**
* @return the image's path
*/
function getImagePath (name) {
return path.join(app.getAppPath(), 'images', name);
}
/**
* Windows only.
* @return the directory where the app is ran from
*/
function getCustomUserDataPath () {
return path.join(path.dirname(app.getPath('exe')), 'data');
}
/**
* Windows only.
* @return the path to Update.exe created by Squirrel.Windows
*/
function getSquirrelUpdateExePath () {
return path.join(path.dirname(app.getPath('exe')), '..', 'Update.exe');
}
export default {
getThemePath,
getStylePath,
getImagePath,
getCustomUserDataPath,
getSquirrelUpdateExePath
};
| rafael-neri/whatsapp-webapp | src/scripts/common/utils/file-paths.js | JavaScript | apache-2.0 | 963 |
/**
* Copyright (C) 2015 Born Informatik AG (www.born.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wte4j.impl.service;
import org.wte4j.WteException;
/**
* Map JDBC types (as defined in <code>java.sql.Types</code>) to Java types. The
* mappings have been taken from [1]
* "JDBC 4.0 Specification, JSR 221, November 7, 2006, Appendix B, Table B-3"
*
*/
final class MapperSqlType {
private MapperSqlType() {
};
public static Class<?> map(int jdbcType) {
switch (jdbcType) {
case java.sql.Types.BIT:
case java.sql.Types.BOOLEAN:
return java.lang.Boolean.class;
case java.sql.Types.TINYINT:
case java.sql.Types.SMALLINT:
case java.sql.Types.INTEGER:
return java.lang.Integer.class;
case java.sql.Types.BIGINT:
return java.lang.Long.class;
case java.sql.Types.FLOAT:
case java.sql.Types.DOUBLE:
return java.lang.Double.class;
case java.sql.Types.REAL:
return java.lang.Float.class;
case java.sql.Types.NUMERIC: // according to [1] Table B-1
case java.sql.Types.DECIMAL:
return java.math.BigDecimal.class;
case java.sql.Types.CHAR:
case java.sql.Types.VARCHAR:
case java.sql.Types.LONGVARCHAR:
return java.lang.String.class;
case java.sql.Types.DATE:
return java.sql.Date.class;
case java.sql.Types.TIME:
return java.sql.Time.class;
case java.sql.Types.TIMESTAMP:
return java.sql.Timestamp.class;
case java.sql.Types.STRUCT:
return java.sql.Struct.class;
case java.sql.Types.ARRAY:
return java.sql.Array.class;
case java.sql.Types.BLOB:
return java.sql.Blob.class;
case java.sql.Types.CLOB:
return java.sql.Clob.class;
case java.sql.Types.REF:
return java.sql.Ref.class;
case java.sql.Types.DATALINK:
return java.net.URL.class;
case java.sql.Types.ROWID:
return java.sql.RowId.class;
case java.sql.Types.NULL:
case java.sql.Types.OTHER:
case java.sql.Types.JAVA_OBJECT:
case java.sql.Types.DISTINCT:
case java.sql.Types.BINARY:
case java.sql.Types.VARBINARY:
case java.sql.Types.LONGVARBINARY:
default:
throw new WteException("invalid or unmapped SQL type (" + jdbcType
+ ")");
}
}
}
| bbrehman/wte4j | wte4j-core/src/main/java/org/wte4j/impl/service/MapperSqlType.java | Java | apache-2.0 | 2,743 |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/compiler/xla/tools/parser/hlo_parser.h"
#include "tensorflow/compiler/xla/literal_util.h"
#include "tensorflow/compiler/xla/service/hlo_opcode.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/compiler/xla/util.h"
#include "tensorflow/core/lib/gtl/map_util.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/lib/strings/stringprintf.h"
namespace xla {
namespace tools {
namespace {
using tensorflow::StringPiece;
using tensorflow::gtl::optional;
using tensorflow::str_util::Split;
using tensorflow::str_util::SplitAndParseAsInts;
using tensorflow::strings::Printf;
using tensorflow::strings::StrAppend;
using tensorflow::strings::StrCat;
const double kF16max = 65504;
// Parser for the HloModule::ToString() format text.
class HloParser {
public:
using LocTy = HloLexer::LocTy;
explicit HloParser(StringPiece str, const HloModuleConfig& config)
: lexer_(str), config_(config) {}
// Runs the parser. Returns false if an error occurred.
bool Run();
// Returns the parsed HloModule.
std::unique_ptr<HloModule> ConsumeHloModule() { return std::move(module_); }
// Returns the error information.
string GetError() const { return tensorflow::str_util::Join(error_, "\n"); }
private:
// ParseXXX returns false if an error occurred.
bool ParseHloModule();
bool ParseComputations();
bool ParseComputation(HloComputation** entry_computation);
bool ParseInstructionList(HloComputation::Builder* builder,
string* root_name);
bool ParseInstruction(HloComputation::Builder* builder, string* root_name);
bool ParseControlPredecessors(HloInstruction* instruction);
bool ParseLiteral(std::unique_ptr<Literal>* literal, const Shape& shape);
bool ParseTupleLiteral(std::unique_ptr<Literal>* literal, const Shape& shape);
bool ParseNonTupleLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape);
// Sets the sub-value of literal at the given index to the given value. The
// literal's shape must have the default layout.
bool SetValueInLiteral(int64 value, int64 linear_index, Literal* literal);
bool SetValueInLiteral(double value, int64 linear_index, Literal* literal);
bool SetValueInLiteral(bool value, int64 linear_index, Literal* literal);
template <typename LiteralNativeT, typename ParsedElemT>
bool SetValueInLiteralHelper(ParsedElemT value, int64 linear_index,
Literal* literal);
bool ParseOperands(std::vector<HloInstruction*>* operands);
// Fills parsed operands into 'operands' and expects a certain number of
// operands.
bool ParseOperands(std::vector<HloInstruction*>* operands,
const int expected_size);
// Describes the start, limit, and stride on every dimension of the operand
// being sliced.
struct SliceRanges {
std::vector<int64> starts;
std::vector<int64> limits;
std::vector<int64> strides;
};
// Types of attributes.
enum class AttrTy {
kInt64,
kInt32,
kFloat,
kString,
kBracedInt64List,
kHloComputation,
kWindow,
kConvolutionDimensionNumbers,
kSharding,
kInstructionList,
kSliceRanges,
kPaddingConfig,
kMetadata,
kFusionKind,
kDistribution,
};
struct AttrConfig {
bool required; // whether it's required or optional
AttrTy attr_type; // what type it is
void* result; // where to store the parsed result.
};
// attributes ::= (',' attribute)*
//
// Parses attributes given names and configs of the attributes. Each parsed
// result is passed back through the result pointer in corresponding
// AttrConfig. Note that the result pointer must point to a optional<T> typed
// variable which outlives this function. Returns false on error. You should
// not use the any of the results if this function failed.
//
// Example usage:
//
// std::unordered_map<string, AttrConfig> attrs;
// optional<int64> foo;
// attrs["foo"] = {/*required=*/false, AttrTy::kInt64, &foo};
// optional<Window> bar;
// attrs["bar"] = {/*required=*/true, AttrTy::kWindow, &bar};
// if (!ParseAttributes(attrs)) {
// return false; // Do not use 'foo' 'bar' if failed.
// }
// // Do something with 'bar'.
// if (foo) { // If attr foo is seen, do something with 'foo'. }
//
bool ParseAttributes(const std::unordered_map<string, AttrConfig>& attrs);
// sub_attributes ::= '{' (','? attribute)* '}'
//
// Usage is the same as ParseAttributes. See immediately above.
bool ParseSubAttributes(const std::unordered_map<string, AttrConfig>& attrs);
// Parses one attribute. If it has already been seen, return error. Returns
// true and adds to seen_attrs on success.
//
// Do not call this except in ParseAttributes or ParseSubAttributes.
bool ParseAttributeHelper(const std::unordered_map<string, AttrConfig>& attrs,
std::unordered_set<string>* seen_attrs);
// Parses a name and finds the corresponding hlo computation.
bool ParseComputationName(HloComputation** value);
// Parses a list of names and finds the corresponding hlo instructions.
bool ParseInstructionNames(std::vector<HloInstruction*>* instructions);
bool ParseWindow(Window* window);
bool ParseConvolutionDimensionNumbers(ConvolutionDimensionNumbers* dnums);
bool ParsePaddingConfig(PaddingConfig* padding);
bool ParseMetadata(OpMetadata* metadata);
bool ParseSharding(OpSharding* sharding);
bool ParseSingleSharding(OpSharding* sharding, bool lbrace_pre_lexed);
// Parses a sub-attribute of the window attribute, e.g.,size=1x2x3.
bool ParseDxD(const string& name, std::vector<int64>* result);
// Parses window's pad sub-attriute, e.g., pad=0_0x3x3.
bool ParseWindowPad(std::vector<std::vector<int64>>* pad);
bool ParseSliceRanges(SliceRanges* result);
bool ParseInt64List(const TokKind start, const TokKind end,
const TokKind delim, std::vector<int64>* result);
bool ParseParamListToShape(Shape* shape, LocTy* shape_loc);
bool ParseParamList();
bool ParseName(string* result);
bool ParseAttributeName(string* result);
bool ParseString(string* result);
bool ParseShape(Shape* result);
bool ParseOpcode(HloOpcode* result);
bool ParseFusionKind(HloInstruction::FusionKind* result);
bool ParseRandomDistribution(RandomDistribution* result);
bool ParseInt64(int64* result);
bool ParseDouble(double* result);
bool ParseBool(bool* result);
bool ParseToken(TokKind kind, const string& msg);
// Returns true if the current token is the beginning of a shape.
bool CanBeShape();
// Returns true if the current token is the beginning of a
// param_list_to_shape.
bool CanBeParamListToShape();
// Logs the current parsing line and the given message. Always returns false.
bool TokenError(StringPiece msg);
bool Error(LocTy loc, StringPiece msg);
// If the current token is 'kind', eats it (i.e. lexes the next token) and
// returns true.
bool EatIfPresent(TokKind kind);
// Parses a shape, and returns true if the result is compatible with the given
// shape.
bool EatShapeAndCheckCompatible(const Shape& shape);
// Adds the instruction to the pool. Returns false and emits an error if the
// instruction already exists.
bool AddInstruction(const string& name, HloInstruction* instruction,
LocTy name_loc);
// Adds the computation to the pool. Returns false and emits an error if the
// computation already exists.
bool AddComputation(const string& name, HloComputation* computation,
LocTy name_loc);
// The map from the instruction name to the instruction. This does not own the
// instructions.
std::unordered_map<string, HloInstruction*> instruction_pool_;
std::unordered_map<string, HloComputation*> computation_pool_;
HloLexer lexer_;
std::unique_ptr<HloModule> module_;
std::vector<std::unique_ptr<HloComputation>> computations_;
const HloModuleConfig config_;
std::vector<string> error_;
};
bool HloParser::Error(LocTy loc, StringPiece msg) {
auto line_col = lexer_.GetLineAndColumn(loc);
const unsigned line = line_col.first;
const unsigned col = line_col.second;
std::vector<string> error_lines;
error_lines.push_back(
StrCat("was parsing ", line, ":", col, ": error: ", msg));
error_lines.push_back(lexer_.GetLine(loc).ToString());
error_lines.push_back(col == 0 ? "" : StrCat(string(col - 1, ' '), "^"));
error_.push_back(tensorflow::str_util::Join(error_lines, "\n"));
VLOG(1) << "Error: " << error_.back();
return false;
}
bool HloParser::TokenError(StringPiece msg) {
return Error(lexer_.GetLoc(), msg);
}
bool HloParser::Run() {
lexer_.Lex();
return ParseHloModule();
}
// ::= 'HloModule' name computations
bool HloParser::ParseHloModule() {
if (lexer_.GetKind() != TokKind::kw_HloModule) {
return TokenError("expects HloModule");
}
// Eat 'HloModule'
lexer_.Lex();
string name;
if (!ParseName(&name)) {
return false;
}
module_ = MakeUnique<HloModule>(name, config_);
return ParseComputations();
}
// computations ::= (computation)+
bool HloParser::ParseComputations() {
HloComputation* entry_computation = nullptr;
do {
if (!ParseComputation(&entry_computation)) {
return false;
}
} while (lexer_.GetKind() != TokKind::kEof);
for (int i = 0; i < computations_.size(); i++) {
// If entry_computation is not nullptr, it means the computation it pointed
// to is marked with "ENTRY"; otherwise, no computation is marked with
// "ENTRY", and we use the last computation as the entry computation. We
// add the non-entry computations as embedded computations to the module.
if ((entry_computation != nullptr &&
computations_[i].get() != entry_computation) ||
(entry_computation == nullptr && i != computations_.size() - 1)) {
module_->AddEmbeddedComputation(std::move(computations_[i]));
continue;
}
auto computation =
module_->AddEntryComputation(std::move(computations_[i]));
// The parameters and result layouts were set to default layout. Here we
// set the layouts to what the hlo text says.
for (int p = 0; p < computation->num_parameters(); p++) {
const Shape& param_shape = computation->parameter_instruction(p)->shape();
if (param_shape.has_layout()) {
module_->mutable_entry_computation_layout()
->mutable_parameter_layout(p)
->ResetLayout(param_shape.layout());
}
}
const Shape& result_shape = computation->root_instruction()->shape();
if (result_shape.has_layout()) {
module_->mutable_entry_computation_layout()
->mutable_result_layout()
->ResetLayout(result_shape.layout());
}
}
return true;
}
// computation ::= ('ENTRY')? name (param_list_to_shape)? instruction_list
bool HloParser::ParseComputation(HloComputation** entry_computation) {
LocTy maybe_entry_loc = lexer_.GetLoc();
const bool is_entry_computation = EatIfPresent(TokKind::kw_ENTRY);
string name;
LocTy name_loc = lexer_.GetLoc();
if (!ParseName(&name)) {
return false;
}
auto builder = MakeUnique<HloComputation::Builder>(name);
LocTy shape_loc = nullptr;
Shape shape;
if (CanBeParamListToShape() && !ParseParamListToShape(&shape, &shape_loc)) {
return false;
}
string root_name;
if (!ParseInstructionList(builder.get(), &root_name)) {
return false;
}
HloInstruction* root =
tensorflow::gtl::FindPtrOrNull(instruction_pool_, root_name);
// This means some instruction was marked as ROOT but we didn't find it in the
// pool, which should not happen.
if (!root_name.empty() && root == nullptr) {
LOG(FATAL) << "instruction " << root_name
<< " was marked as ROOT but the parser has not seen it before";
}
// Now root can be either an existing instruction or a nullptr. If it's a
// nullptr, the implementation of Builder will set the last instruction as
// root instruction.
computations_.emplace_back(builder->Build(root));
HloComputation* computation = computations_.back().get();
if (!root) {
root = computation->root_instruction();
} else {
CHECK_EQ(root, computation->root_instruction());
}
// If param_list_to_shape was present, check compatibility.
if (shape_loc != nullptr && !ShapeUtil::Compatible(root->shape(), shape)) {
return Error(
shape_loc,
StrCat("Shape of computation ", name, ", ",
ShapeUtil::HumanString(shape),
", is not compatible with that of its root instruction ",
root_name, ", ", ShapeUtil::HumanString(root->shape())));
}
if (is_entry_computation) {
if (*entry_computation != nullptr) {
return Error(maybe_entry_loc, "expects only one ENTRY");
}
*entry_computation = computation;
}
return AddComputation(name, computation, name_loc);
}
// instruction_list ::= '{' instruction_list1 '}'
// instruction_list1 ::= (instruction)+
bool HloParser::ParseInstructionList(HloComputation::Builder* builder,
string* root_name) {
if (!ParseToken(TokKind::kLbrace,
"expects '{' at the beginning of instruction list.")) {
return false;
}
do {
if (!ParseInstruction(builder, root_name)) {
return false;
}
} while (lexer_.GetKind() != TokKind::kRbrace);
return ParseToken(TokKind::kRbrace,
"expects '}' at the end of instruction list.");
}
// instruction ::= ('ROOT')? name '=' shape opcode operands (attribute)*
bool HloParser::ParseInstruction(HloComputation::Builder* builder,
string* root_name) {
string name;
Shape shape;
HloOpcode opcode;
std::vector<HloInstruction*> operands;
LocTy maybe_root_loc = lexer_.GetLoc();
bool is_root = EatIfPresent(TokKind::kw_ROOT);
const LocTy name_loc = lexer_.GetLoc();
if (!ParseName(&name) ||
!ParseToken(TokKind::kEqual, "expects '=' in instruction") ||
!ParseShape(&shape) || !ParseOpcode(&opcode)) {
return false;
}
if (is_root) {
if (!root_name->empty()) {
return Error(maybe_root_loc, "one computation should have only one ROOT");
}
*root_name = name;
}
// Add optional attributes.
std::unordered_map<string, AttrConfig> attrs;
optional<OpSharding> sharding;
attrs["sharding"] = {/*required=*/false, AttrTy::kSharding, &sharding};
optional<std::vector<HloInstruction*>> predecessors;
attrs["control-predecessors"] = {/*required=*/false, AttrTy::kInstructionList,
&predecessors};
optional<OpMetadata> metadata;
attrs["metadata"] = {/*required=*/false, AttrTy::kMetadata, &metadata};
HloInstruction* instruction;
switch (opcode) {
case HloOpcode::kParameter: {
int64 parameter_number;
if (!ParseToken(TokKind::kLparen,
"expects '(' before parameter number") ||
!ParseInt64(¶meter_number) ||
!ParseToken(TokKind::kRparen, "expects ')' after parameter number") ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateParameter(parameter_number, shape, name));
break;
}
case HloOpcode::kConstant: {
std::unique_ptr<Literal> literal;
if (!ParseToken(TokKind::kLparen,
"expects '(' before constant literal") ||
!ParseLiteral(&literal, shape) ||
!ParseToken(TokKind::kRparen, "expects ')' after constant literal") ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateConstant(std::move(literal)));
break;
}
// Unary ops.
case HloOpcode::kAbs:
case HloOpcode::kRoundNearestAfz:
case HloOpcode::kBitcast:
case HloOpcode::kCeil:
case HloOpcode::kCopy:
case HloOpcode::kCos:
case HloOpcode::kExp:
case HloOpcode::kImag:
case HloOpcode::kIsFinite:
case HloOpcode::kFloor:
case HloOpcode::kLog:
case HloOpcode::kNot:
case HloOpcode::kNegate:
case HloOpcode::kReal:
case HloOpcode::kSign:
case HloOpcode::kSin:
case HloOpcode::kSort:
case HloOpcode::kTanh: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateUnary(shape, opcode, operands[0]));
break;
}
// Binary ops.
case HloOpcode::kAdd:
case HloOpcode::kDivide:
case HloOpcode::kMultiply:
case HloOpcode::kSubtract:
case HloOpcode::kAtan2:
case HloOpcode::kComplex:
case HloOpcode::kEq:
case HloOpcode::kGe:
case HloOpcode::kGt:
case HloOpcode::kLe:
case HloOpcode::kLt:
case HloOpcode::kNe:
case HloOpcode::kMaximum:
case HloOpcode::kMinimum:
case HloOpcode::kPower:
case HloOpcode::kRemainder:
case HloOpcode::kAnd:
case HloOpcode::kOr:
case HloOpcode::kShiftLeft:
case HloOpcode::kShiftRightArithmetic:
case HloOpcode::kShiftRightLogical: {
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateBinary(
shape, opcode, operands[0], operands[1]));
break;
}
// Ternary ops.
case HloOpcode::kClamp:
case HloOpcode::kSelect: {
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateTernary(
shape, opcode, operands[0], operands[1], operands[2]));
break;
}
// Other supported ops.
case HloOpcode::kConvert: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateConvert(shape, operands[0]));
break;
}
case HloOpcode::kBitcastConvert: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateBitcastConvert(shape, operands[0]));
break;
}
case HloOpcode::kCrossReplicaSum: {
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateCrossReplicaSum(shape, operands));
break;
}
case HloOpcode::kReshape: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateReshape(shape, operands[0]));
break;
}
case HloOpcode::kTuple: {
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateTuple(operands));
break;
}
case HloOpcode::kWhile: {
optional<HloComputation*> condition;
optional<HloComputation*> body;
attrs["condition"] = {/*required=*/true, AttrTy::kHloComputation,
&condition};
attrs["body"] = {/*required=*/true, AttrTy::kHloComputation, &body};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateWhile(
shape, *condition, *body, /*init=*/operands[0]));
break;
}
case HloOpcode::kRecv: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/0) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateRecv(shape.tuple_shapes(0), *channel_id));
break;
}
case HloOpcode::kRecvDone: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
if (channel_id != operands[0]->channel_id()) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateRecvDone(operands[0]));
break;
}
case HloOpcode::kSend: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateSend(operands[0], *channel_id));
break;
}
case HloOpcode::kSendDone: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
if (channel_id != operands[0]->channel_id()) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateSendDone(operands[0]));
break;
}
case HloOpcode::kGetTupleElement: {
optional<int64> index;
attrs["index"] = {/*required=*/true, AttrTy::kInt64, &index};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateGetTupleElement(shape, operands[0], *index));
break;
}
case HloOpcode::kCall: {
optional<HloComputation*> to_apply;
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&to_apply};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateCall(shape, operands, *to_apply));
break;
}
case HloOpcode::kReduceWindow: {
optional<HloComputation*> reduce_computation;
optional<Window> window;
attrs["window"] = {/*required=*/false, AttrTy::kWindow, &window};
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&reduce_computation};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
if (!window) {
window.emplace();
}
instruction = builder->AddInstruction(HloInstruction::CreateReduceWindow(
shape, /*operand=*/operands[0], /*init_value=*/operands[1], *window,
*reduce_computation));
break;
}
case HloOpcode::kConvolution: {
optional<Window> window;
optional<ConvolutionDimensionNumbers> dnums;
attrs["window"] = {/*required=*/false, AttrTy::kWindow, &window};
attrs["dim_labels"] = {/*required=*/true,
AttrTy::kConvolutionDimensionNumbers, &dnums};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
if (!window) {
window.emplace();
}
instruction = builder->AddInstruction(HloInstruction::CreateConvolve(
shape, /*lhs=*/operands[0], /*rhs=*/operands[1], *window, *dnums));
break;
}
case HloOpcode::kBroadcast: {
optional<std::vector<int64>> broadcast_dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&broadcast_dimensions};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateBroadcast(
shape, operands[0], *broadcast_dimensions));
break;
}
case HloOpcode::kConcatenate: {
optional<std::vector<int64>> dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions};
if (!ParseOperands(&operands) || !ParseAttributes(attrs) ||
dimensions->size() != 1) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateConcatenate(
shape, operands, dimensions->at(0)));
break;
}
case HloOpcode::kMap: {
optional<HloComputation*> to_apply;
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&to_apply};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateMap(shape, operands, *to_apply));
break;
}
case HloOpcode::kReduce: {
optional<HloComputation*> reduce_computation;
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&reduce_computation};
optional<std::vector<int64>> dimensions_to_reduce;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions_to_reduce};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateReduce(
shape, /*operand=*/operands[0], /*init_value=*/operands[1],
*dimensions_to_reduce, *reduce_computation));
break;
}
case HloOpcode::kReverse: {
optional<std::vector<int64>> dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateReverse(shape, operands[0], *dimensions));
break;
}
case HloOpcode::kSelectAndScatter: {
optional<HloComputation*> select;
attrs["select"] = {/*required=*/true, AttrTy::kHloComputation, &select};
optional<HloComputation*> scatter;
attrs["scatter"] = {/*required=*/true, AttrTy::kHloComputation, &scatter};
optional<Window> window;
attrs["window"] = {/*required=*/false, AttrTy::kWindow, &window};
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
if (!window) {
window.emplace();
}
instruction =
builder->AddInstruction(HloInstruction::CreateSelectAndScatter(
shape, /*operand=*/operands[0], *select, *window,
/*source=*/operands[1], /*init_value=*/operands[2], *scatter));
break;
}
case HloOpcode::kSlice: {
optional<SliceRanges> slice_ranges;
attrs["slice"] = {/*required=*/true, AttrTy::kSliceRanges, &slice_ranges};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateSlice(
shape, operands[0], slice_ranges->starts, slice_ranges->limits,
slice_ranges->strides));
break;
}
case HloOpcode::kDynamicSlice: {
optional<std::vector<int64>> dynamic_slice_sizes;
attrs["dynamic_slice_sizes"] = {
/*required=*/true, AttrTy::kBracedInt64List, &dynamic_slice_sizes};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateDynamicSlice(
shape, /*operand=*/operands[0], /*start_indices=*/operands[1],
*dynamic_slice_sizes));
break;
}
case HloOpcode::kDynamicUpdateSlice: {
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateDynamicUpdateSlice(
shape, /*operand=*/operands[0], /*update=*/operands[1],
/*start_indices=*/operands[2]));
break;
}
case HloOpcode::kTranspose: {
optional<std::vector<int64>> dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateTranspose(shape, operands[0], *dimensions));
break;
}
case HloOpcode::kBatchNormTraining: {
optional<float> epsilon;
attrs["epsilon"] = {/*required=*/true, AttrTy::kFloat, &epsilon};
optional<int64> feature_index;
attrs["feature_index"] = {/*required=*/true, AttrTy::kInt64,
&feature_index};
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateBatchNormTraining(
shape, /*operand=*/operands[0], /*scale=*/operands[1],
/*offset=*/operands[2], *epsilon, *feature_index));
break;
}
case HloOpcode::kBatchNormInference: {
optional<float> epsilon;
attrs["epsilon"] = {/*required=*/true, AttrTy::kFloat, &epsilon};
optional<int64> feature_index;
attrs["feature_index"] = {/*required=*/true, AttrTy::kInt64,
&feature_index};
if (!ParseOperands(&operands, /*expected_size=*/5) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateBatchNormInference(
shape, /*operand=*/operands[0], /*scale=*/operands[1],
/*offset=*/operands[2], /*mean=*/operands[3],
/*variance=*/operands[4], *epsilon, *feature_index));
break;
}
case HloOpcode::kBatchNormGrad: {
optional<float> epsilon;
attrs["epsilon"] = {/*required=*/true, AttrTy::kFloat, &epsilon};
optional<int64> feature_index;
attrs["feature_index"] = {/*required=*/true, AttrTy::kInt64,
&feature_index};
if (!ParseOperands(&operands, /*expected_size=*/5) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateBatchNormGrad(
shape, /*operand=*/operands[0], /*scale=*/operands[1],
/*mean=*/operands[2], /*variance=*/operands[3],
/*grad_output=*/operands[4], *epsilon, *feature_index));
break;
}
case HloOpcode::kPad: {
optional<PaddingConfig> padding;
attrs["padding"] = {/*required=*/true, AttrTy::kPaddingConfig, &padding};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreatePad(
shape, operands[0], /*padding_value=*/operands[1], *padding));
break;
}
case HloOpcode::kFusion: {
optional<HloComputation*> fusion_computation;
attrs["calls"] = {/*required=*/true, AttrTy::kHloComputation,
&fusion_computation};
optional<HloInstruction::FusionKind> fusion_kind;
attrs["kind"] = {/*required=*/true, AttrTy::kFusionKind, &fusion_kind};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateFusion(
shape, *fusion_kind, operands, *fusion_computation));
break;
}
case HloOpcode::kInfeed: {
optional<string> config;
attrs["infeed_config"] = {/*required=*/false, AttrTy::kString, &config};
if (!ParseOperands(&operands, /*expected_size=*/0) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateInfeed(shape, config ? *config : ""));
break;
}
case HloOpcode::kOutfeed: {
optional<string> config;
attrs["outfeed_config"] = {/*required=*/false, AttrTy::kString, &config};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateOutfeed(
shape, operands[0], config ? *config : ""));
break;
}
case HloOpcode::kRng: {
optional<RandomDistribution> distribution;
attrs["distribution"] = {/*required=*/true, AttrTy::kDistribution,
&distribution};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateRng(shape, *distribution, operands));
break;
}
case HloOpcode::kReducePrecision: {
optional<int64> exponent_bits;
optional<int64> mantissa_bits;
attrs["exponent_bits"] = {/*required=*/true, AttrTy::kInt64,
&exponent_bits};
attrs["mantissa_bits"] = {/*required=*/true, AttrTy::kInt64,
&mantissa_bits};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateReducePrecision(
shape, operands[0], static_cast<int>(*exponent_bits),
static_cast<int>(*mantissa_bits)));
break;
}
case HloOpcode::kConditional: {
optional<HloComputation*> true_computation;
optional<HloComputation*> false_computation;
attrs["true_computation"] = {/*required=*/true, AttrTy::kHloComputation,
&true_computation};
attrs["false_computation"] = {/*required=*/true, AttrTy::kHloComputation,
&false_computation};
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateConditional(
shape, /*pred=*/operands[0],
/*true_computation_arg=*/operands[1], *true_computation,
/*false_computation_arg=*/operands[2], *false_computation));
break;
}
case HloOpcode::kCustomCall: {
optional<string> custom_call_target;
attrs["custom_call_target"] = {/*required=*/true, AttrTy::kString,
&custom_call_target};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateCustomCall(
shape, operands, *custom_call_target));
break;
}
case HloOpcode::kDot: {
optional<std::vector<int64>> lhs_contracting_dims;
attrs["lhs_contracting_dims"] = {
/*required=*/false, AttrTy::kBracedInt64List, &lhs_contracting_dims};
optional<std::vector<int64>> rhs_contracting_dims;
attrs["rhs_contracting_dims"] = {
/*required=*/false, AttrTy::kBracedInt64List, &rhs_contracting_dims};
optional<std::vector<int64>> lhs_batch_dims;
attrs["lhs_batch_dims"] = {/*required=*/false, AttrTy::kBracedInt64List,
&lhs_batch_dims};
optional<std::vector<int64>> rhs_batch_dims;
attrs["rhs_batch_dims"] = {/*required=*/false, AttrTy::kBracedInt64List,
&rhs_batch_dims};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
DotDimensionNumbers dnum;
if (lhs_contracting_dims) {
*dnum.mutable_lhs_contracting_dimensions() = {
lhs_contracting_dims->begin(), lhs_contracting_dims->end()};
}
if (rhs_contracting_dims) {
*dnum.mutable_rhs_contracting_dimensions() = {
rhs_contracting_dims->begin(), rhs_contracting_dims->end()};
}
if (lhs_batch_dims) {
*dnum.mutable_lhs_batch_dimensions() = {lhs_batch_dims->begin(),
lhs_batch_dims->end()};
}
if (rhs_batch_dims) {
*dnum.mutable_rhs_batch_dimensions() = {rhs_batch_dims->begin(),
rhs_batch_dims->end()};
}
instruction = builder->AddInstruction(
HloInstruction::CreateDot(shape, operands[0], operands[1], dnum));
break;
}
case HloOpcode::kTrace:
return TokenError(StrCat("parsing not yet implemented for op: ",
HloOpcodeString(opcode)));
}
instruction->set_name(name);
// Add common attrs (sharding, control predecessors) to the instruction, if
// they were seen.
if (sharding) {
instruction->set_sharding(
HloSharding::FromProto(sharding.value()).ValueOrDie());
}
if (predecessors) {
for (auto* pre : *predecessors) {
Status status = pre->AddControlDependencyTo(instruction);
if (!status.ok()) {
return Error(name_loc, StrCat("error adding control dependency for: ",
name, " status: ", status.ToString()));
}
}
}
if (metadata) {
instruction->set_metadata(*metadata);
}
return AddInstruction(name, instruction, name_loc);
} // NOLINT(readability/fn_size)
// ::= '{' (single_sharding | tuple_sharding) '}'
//
// tuple_sharding ::= single_sharding* (',' single_sharding)*
bool HloParser::ParseSharding(OpSharding* sharding) {
// A single sharding starts with '{' and is not followed by '{'.
// A tuple sharding starts with '{' and is followed by '{', or is '{''}' for
// an empty tuple.
if (!ParseToken(TokKind::kLbrace,
"expected '{' to start sharding attribute")) {
return false;
}
if (lexer_.GetKind() != TokKind::kLbrace &&
lexer_.GetKind() != TokKind::kRbrace) {
return ParseSingleSharding(sharding, /*lbrace_pre_lexed=*/true);
}
// Tuple sharding.
// Allow empty tuple shardings.
if (lexer_.GetKind() != TokKind::kRbrace) {
do {
if (!ParseSingleSharding(sharding->add_tuple_shardings(),
/*lbrace_pre_lexed=*/false)) {
return false;
}
} while (EatIfPresent(TokKind::kComma));
}
sharding->set_type(OpSharding::Type::OpSharding_Type_TUPLE);
return ParseToken(TokKind::kRbrace, "expected '}' to end sharding attribute");
}
// ::= '{' 'replicated'? 'maximal'? ('device=' int)? shape?
// ('devices=' ('[' dims ']')* device_list)? '}'
// dims ::= int_list device_list ::= int_list
bool HloParser::ParseSingleSharding(OpSharding* sharding,
bool lbrace_pre_lexed) {
if (!lbrace_pre_lexed &&
!ParseToken(TokKind::kLbrace,
"expected '{' to start sharding attribute")) {
return false;
}
LocTy loc = lexer_.GetLoc();
bool maximal = false;
bool replicated = false;
std::vector<int64> devices;
std::vector<int64> tile_assignment_dimensions;
Shape tile_shape;
while (lexer_.GetKind() != TokKind::kRbrace) {
switch (lexer_.GetKind()) {
case TokKind::kw_maximal:
maximal = true;
lexer_.Lex();
break;
case TokKind::kw_replicated:
replicated = true;
lexer_.Lex();
break;
case TokKind::kAttributeName: {
if (lexer_.GetStrVal() == "device") {
if (lexer_.Lex() != TokKind::kInt) {
return TokenError("device= attribute must be an integer");
}
devices = {lexer_.GetInt64Val()};
lexer_.Lex();
} else if (lexer_.GetStrVal() == "devices") {
lexer_.Lex();
if (!ParseToken(TokKind::kLsquare,
"expected '[' to start sharding devices shape")) {
return false;
}
do {
int64 dim;
if (!ParseInt64(&dim)) {
return false;
}
tile_assignment_dimensions.push_back(dim);
} while (EatIfPresent(TokKind::kComma));
if (!ParseToken(TokKind::kRsquare,
"expected ']' to start sharding devices shape")) {
return false;
}
do {
int64 device;
if (!ParseInt64(&device)) {
return false;
}
devices.push_back(device);
} while (EatIfPresent(TokKind::kComma));
} else {
return TokenError(
"unknown attribute in sharding: expected device= or devices=");
}
break;
}
case TokKind::kShape:
tile_shape = lexer_.GetShapeVal();
lexer_.Lex();
break;
case TokKind::kRbrace:
break;
default:
return TokenError("unexpected token");
}
}
if (replicated) {
if (!devices.empty()) {
return Error(loc,
"replicated shardings should not have any devices assigned");
}
if (!ShapeUtil::Equal(tile_shape, Shape())) {
return Error(loc,
"replicated shardings should not have any tile shape set");
}
sharding->set_type(OpSharding::Type::OpSharding_Type_REPLICATED);
} else if (maximal) {
if (devices.size() != 1) {
return Error(loc,
"maximal shardings should have exactly one device assigned");
}
if (!ShapeUtil::Equal(tile_shape, Shape())) {
return Error(loc, "maximal shardings should not have any tile shape set");
}
sharding->set_type(OpSharding::Type::OpSharding_Type_MAXIMAL);
sharding->add_tile_assignment_devices(devices[0]);
} else {
if (devices.size() <= 1) {
return Error(
loc, "non-maximal shardings must have more than one device assigned");
}
if (ShapeUtil::Equal(tile_shape, Shape())) {
return Error(loc, "non-maximal shardings should have a tile shape set");
}
if (tile_assignment_dimensions.empty()) {
return Error(
loc,
"non-maximal shardings must have a tile assignment list including "
"dimensions");
}
sharding->set_type(OpSharding::Type::OpSharding_Type_OTHER);
*sharding->mutable_tile_shape() = tile_shape;
for (int64 dim : tile_assignment_dimensions) {
sharding->add_tile_assignment_dimensions(dim);
}
for (int64 device : devices) {
sharding->add_tile_assignment_devices(device);
}
}
lexer_.Lex();
return true;
}
// '{' name+ '}'
bool HloParser::ParseInstructionNames(
std::vector<HloInstruction*>* instructions) {
if (!ParseToken(TokKind::kLbrace,
"expects '{' at the beginning of instruction name list")) {
return false;
}
LocTy loc = lexer_.GetLoc();
do {
string name;
if (!ParseName(&name)) {
return Error(loc, "expects a instruction name");
}
HloInstruction* instr =
tensorflow::gtl::FindPtrOrNull(instruction_pool_, name);
if (!instr) {
return TokenError(
Printf("instruction '%s' is not defined", name.c_str()));
}
instructions->push_back(instr);
} while (EatIfPresent(TokKind::kComma));
return ParseToken(TokKind::kRbrace,
"expects '}' at the end of instruction name list");
}
bool HloParser::SetValueInLiteral(int64 value, int64 linear_index,
Literal* literal) {
const Shape& shape = literal->shape();
switch (shape.element_type()) {
case S8:
return SetValueInLiteralHelper<int8>(value, linear_index, literal);
case S16:
return SetValueInLiteralHelper<int16>(value, linear_index, literal);
case S32:
return SetValueInLiteralHelper<int32>(value, linear_index, literal);
case S64:
return SetValueInLiteralHelper<int64>(value, linear_index, literal);
case U8:
return SetValueInLiteralHelper<uint8>(value, linear_index, literal);
case U16:
return SetValueInLiteralHelper<uint8>(value, linear_index, literal);
case U32:
return SetValueInLiteralHelper<uint32>(value, linear_index, literal);
case U64:
return SetValueInLiteralHelper<uint64>(value, linear_index, literal);
default:
LOG(FATAL) << "unknown integral primitive type "
<< PrimitiveType_Name(shape.element_type());
}
}
bool HloParser::SetValueInLiteral(double value, int64 linear_index,
Literal* literal) {
const Shape& shape = literal->shape();
switch (shape.element_type()) {
case F16:
return SetValueInLiteralHelper<half>(value, linear_index, literal);
case BF16:
return SetValueInLiteralHelper<bfloat16>(value, linear_index, literal);
case F32:
return SetValueInLiteralHelper<float>(value, linear_index, literal);
case F64:
return SetValueInLiteralHelper<double>(value, linear_index, literal);
default:
LOG(FATAL) << "unknown floating point primitive type "
<< PrimitiveType_Name(shape.element_type());
}
}
bool HloParser::SetValueInLiteral(bool value, int64 linear_index,
Literal* literal) {
const Shape& shape = literal->shape();
switch (shape.element_type()) {
case PRED:
return SetValueInLiteralHelper<bool>(value, linear_index, literal);
default:
LOG(FATAL) << PrimitiveType_Name(shape.element_type())
<< " is not PRED type";
}
}
template <typename LiteralNativeT, typename ParsedElemT>
bool HloParser::SetValueInLiteralHelper(ParsedElemT value, int64 linear_index,
Literal* literal) {
// Check that linear_index is in range.
if (linear_index >= ShapeUtil::ElementsIn(literal->shape())) {
return TokenError(
StrCat("trys to set value ", value, " to a literal in shape ",
ShapeUtil::HumanString(literal->shape()), " at linear index ",
linear_index, ", but the index is out of range"));
}
if (std::isnan(value) ||
(std::numeric_limits<ParsedElemT>::has_infinity &&
(std::numeric_limits<ParsedElemT>::infinity() == value ||
-std::numeric_limits<ParsedElemT>::infinity() == value))) {
// Skip range checking for non-finite value.
} else if (literal->shape().element_type() == F16 ||
literal->shape().element_type() == BF16) {
if (value > kF16max || value < -kF16max) {
return TokenError(StrCat(
"value ", value, " is out of range for literal's primitive type ",
PrimitiveType_Name(literal->shape().element_type())));
}
} else if (value > static_cast<ParsedElemT>(
std::numeric_limits<LiteralNativeT>::max()) ||
value < static_cast<ParsedElemT>(
std::numeric_limits<LiteralNativeT>::lowest())) {
// Value is out of range for LiteralNativeT.
return TokenError(StrCat(
"value ", value, " is out of range for literal's primitive type ",
PrimitiveType_Name(literal->shape().element_type())));
}
literal->GetMutableArraySlice<LiteralNativeT>().at(linear_index) =
static_cast<LiteralNativeT>(value);
return true;
}
bool HloParser::EatShapeAndCheckCompatible(const Shape& shape) {
Shape new_shape;
if (!ParseShape(&new_shape)) {
return TokenError(StrCat("expects shape ", ShapeUtil::HumanString(shape)));
}
if (!ShapeUtil::Compatible(shape, new_shape)) {
return TokenError(StrCat(
"expects shape ", ShapeUtil::HumanString(shape),
", but sees a different shape: ", ShapeUtil::HumanString(new_shape)));
}
return true;
}
// literal
// ::= tuple
// ::= non_tuple
bool HloParser::ParseLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape) {
return ShapeUtil::IsTuple(shape) ? ParseTupleLiteral(literal, shape)
: ParseNonTupleLiteral(literal, shape);
}
// tuple
// ::= shape '(' literal_list ')'
// literal_list
// ::= /*empty*/
// ::= literal (',' literal)*
bool HloParser::ParseTupleLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape) {
if (!EatShapeAndCheckCompatible(shape)) {
return TokenError(StrCat("expects tuple constant in shape ",
ShapeUtil::HumanString(shape)));
}
if (!ParseToken(TokKind::kLparen, "expects '(' in front of tuple elements")) {
return false;
}
std::vector<std::unique_ptr<Literal>> elements(
ShapeUtil::TupleElementCount(shape));
if (lexer_.GetKind() == TokKind::kRparen) {
// empty
} else {
// literal, (',' literal)*
for (int i = 0; i < elements.size(); i++) {
if (i > 0) {
ParseToken(TokKind::kComma, "exepcts ',' to separate tuple elements");
}
if (!ParseLiteral(&elements[i],
ShapeUtil::GetTupleElementShape(shape, i))) {
return TokenError(StrCat("expects the ", i, "th element"));
}
}
}
*literal = Literal::MakeTupleOwned(std::move(elements));
return ParseToken(TokKind::kRparen,
StrCat("expects ')' at the end of the tuple with ",
ShapeUtil::TupleElementCount(shape), "elements"));
}
// non_tuple
// ::= rank01
// ::= rank2345
// rank2345 ::= shape nested_array
bool HloParser::ParseNonTupleLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape) {
const int64 rank = ShapeUtil::Rank(shape);
if (rank > 1 && !EatShapeAndCheckCompatible(shape)) {
return false;
}
// Create a literal with the given shape in default layout.
*literal = Literal::CreateFromDimensions(shape.element_type(),
AsInt64Slice(shape.dimensions()));
int64 nest_level = 0;
int64 linear_index = 0;
// elems_seen_per_dim[i] is how many elements or sub-arrays we have seen for
// the dimension i. For example, to parse f32[2,3] {{1, 2, 3}, {4, 5, 6}},
// when we are parsing the 2nd '{' (right before '1'), we are seeing a
// sub-array of the dimension 0, so elems_seen_per_dim[0]++. When we are at
// the first '}' (right after '3'), it means the sub-array ends, and the
// sub-array is supposed to contain exactly 3 elements, so check if
// elems_seen_per_dim[1] is 3.
std::vector<int64> elems_seen_per_dim(rank);
auto get_index_str = [&elems_seen_per_dim](int dim) -> string {
std::vector<int64> elems_seen_until_dim(elems_seen_per_dim.begin(),
elems_seen_per_dim.begin() + dim);
return StrCat("[",
tensorflow::str_util::Join(
elems_seen_until_dim, ",",
[](string* out, const int64& num_elems) {
tensorflow::strings::StrAppend(out, num_elems - 1);
}),
"]");
};
do {
switch (lexer_.GetKind()) {
default:
return TokenError("unexpected token type in a literal");
case TokKind::kLbrace: {
nest_level++;
if (nest_level > rank) {
return TokenError(Printf(
"expects nested array in rank %lld, but sees larger", rank));
}
if (nest_level > 1) {
elems_seen_per_dim[nest_level - 2]++;
if (elems_seen_per_dim[nest_level - 2] >
shape.dimensions(nest_level - 2)) {
return TokenError(Printf(
"expects %lld elements in the %sth element, but sees more",
shape.dimensions(nest_level - 2),
get_index_str(nest_level - 2).c_str()));
}
}
lexer_.Lex();
break;
}
case TokKind::kRbrace: {
nest_level--;
if (elems_seen_per_dim[nest_level] != shape.dimensions(nest_level)) {
return TokenError(Printf(
"expects %lld elements in the %sth element, but sees %lld",
shape.dimensions(nest_level), get_index_str(nest_level).c_str(),
elems_seen_per_dim[nest_level]));
}
elems_seen_per_dim[nest_level] = 0;
lexer_.Lex();
break;
}
case TokKind::kComma:
case TokKind::kComment:
// Skip.
lexer_.Lex();
break;
case TokKind::kw_true:
case TokKind::kw_false:
case TokKind::kInt:
case TokKind::kDecimal:
case TokKind::kw_nan:
case TokKind::kw_inf:
case TokKind::kNegInf: {
if (rank > 0) {
if (nest_level != rank) {
return TokenError(
Printf("expects nested array in rank %lld, but sees %lld", rank,
nest_level));
}
elems_seen_per_dim[rank - 1]++;
if (elems_seen_per_dim[rank - 1] > shape.dimensions(rank - 1)) {
return TokenError(
Printf("expects %lld elements on the minor-most dimension, but "
"sees more",
shape.dimensions(rank - 1)));
}
}
if (lexer_.GetKind() == TokKind::kw_true ||
lexer_.GetKind() == TokKind::kw_false) {
// TODO(congliu): bool type literals with rank >= 1 are actually
// printed in a compact form instead of "true" or "false". Fix that.
if (!SetValueInLiteral(lexer_.GetKind() == TokKind::kw_true,
linear_index++, literal->get())) {
return false;
}
lexer_.Lex();
} else if (primitive_util::IsIntegralType(shape.element_type())) {
LocTy loc = lexer_.GetLoc();
int64 value;
if (!ParseInt64(&value)) {
return Error(loc, StrCat("expects integer for primitive type: ",
PrimitiveType_Name(shape.element_type())));
}
if (!SetValueInLiteral(value, linear_index++, literal->get())) {
return false;
}
} else if (primitive_util::IsFloatingPointType(shape.element_type())) {
LocTy loc = lexer_.GetLoc();
double value;
if (!ParseDouble(&value)) {
return Error(
loc, StrCat("expect floating point value for primitive type: ",
PrimitiveType_Name(shape.element_type())));
}
if (!SetValueInLiteral(value, linear_index++, literal->get())) {
return false;
}
} else {
return TokenError(StrCat("unsupported premitive type ",
PrimitiveType_Name(shape.element_type())));
}
break;
}
} // end of switch
} while (nest_level > 0);
*literal = (*literal)->Relayout(shape.layout());
return true;
}
// operands ::= '(' operands1 ')'
// operands1
// ::= /*empty*/
// ::= operand (, operand)*
// operand ::= (shape)? name
bool HloParser::ParseOperands(std::vector<HloInstruction*>* operands) {
if (!ParseToken(TokKind::kLparen,
"expects '(' at the beginning of operands")) {
return false;
}
if (lexer_.GetKind() == TokKind::kRparen) {
// empty
} else {
do {
LocTy loc = lexer_.GetLoc();
string name;
if (CanBeShape()) {
Shape shape;
if (!ParseShape(&shape)) {
return false;
}
}
if (!ParseName(&name)) {
return false;
}
HloInstruction* instruction =
tensorflow::gtl::FindPtrOrNull(instruction_pool_, name);
if (!instruction) {
return Error(loc, StrCat("instruction does not exist: ", name));
}
operands->push_back(instruction);
} while (EatIfPresent(TokKind::kComma));
}
return ParseToken(TokKind::kRparen, "expects ')' at the end of operands");
}
bool HloParser::ParseOperands(std::vector<HloInstruction*>* operands,
const int expected_size) {
LocTy loc = lexer_.GetLoc();
if (!ParseOperands(operands)) {
return false;
}
if (expected_size != operands->size()) {
return Error(loc, StrCat("expects ", expected_size, " operands, but has ",
operands->size(), " operands"));
}
return true;
}
// sub_attributes ::= '{' (','? attribute)* '}'
bool HloParser::ParseSubAttributes(
const std::unordered_map<string, AttrConfig>& attrs) {
LocTy loc = lexer_.GetLoc();
if (!ParseToken(TokKind::kLbrace, "expects '{' to start sub attributes")) {
return false;
}
std::unordered_set<string> seen_attrs;
if (lexer_.GetKind() == TokKind::kRbrace) {
// empty
} else {
do {
EatIfPresent(TokKind::kComma);
if (!ParseAttributeHelper(attrs, &seen_attrs)) {
return false;
}
} while (lexer_.GetKind() != TokKind::kRbrace);
}
// Check that all required attrs were seen.
for (const auto& attr_it : attrs) {
if (attr_it.second.required &&
seen_attrs.find(attr_it.first) == seen_attrs.end()) {
return Error(loc, Printf("sub-attribute %s is expected but not seen",
attr_it.first.c_str()));
}
}
return ParseToken(TokKind::kRbrace, "expects '}' to end sub attributes");
}
// attributes ::= (',' attribute)*
bool HloParser::ParseAttributes(
const std::unordered_map<string, AttrConfig>& attrs) {
LocTy loc = lexer_.GetLoc();
std::unordered_set<string> seen_attrs;
while (EatIfPresent(TokKind::kComma)) {
if (!ParseAttributeHelper(attrs, &seen_attrs)) {
return false;
}
}
// Check that all required attrs were seen.
for (const auto& attr_it : attrs) {
if (attr_it.second.required &&
seen_attrs.find(attr_it.first) == seen_attrs.end()) {
return Error(loc, Printf("attribute %s is expected but not seen",
attr_it.first.c_str()));
}
}
return true;
}
bool HloParser::ParseAttributeHelper(
const std::unordered_map<string, AttrConfig>& attrs,
std::unordered_set<string>* seen_attrs) {
LocTy loc = lexer_.GetLoc();
string name;
if (!ParseAttributeName(&name)) {
return Error(loc, "error parsing attributes");
}
VLOG(1) << "Parsing attribute " << name;
if (!seen_attrs->insert(name).second) {
return Error(loc, Printf("attribute %s already exists", name.c_str()));
}
auto attr_it = attrs.find(name);
if (attr_it == attrs.end()) {
return Error(loc, Printf("unexpected attribute %s", name.c_str()));
}
AttrTy attr_type = attr_it->second.attr_type;
void* attr_out_ptr = attr_it->second.result;
bool success = [&] {
LocTy attr_loc = lexer_.GetLoc();
switch (attr_type) {
case AttrTy::kInt64: {
int64 result;
if (!ParseInt64(&result)) {
return false;
}
static_cast<optional<int64>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kInt32: {
int64 result;
if (!ParseInt64(&result)) {
return false;
}
if (result != static_cast<int32>(result)) {
return Error(attr_loc, "value out of range for int32");
}
static_cast<optional<int32>*>(attr_out_ptr)
->emplace(static_cast<int32>(result));
return true;
}
case AttrTy::kFloat: {
double result;
if (!ParseDouble(&result)) {
return false;
}
if (result > std::numeric_limits<float>::max() ||
result < std::numeric_limits<float>::lowest()) {
return Error(attr_loc, "value out of range for float");
}
static_cast<optional<float>*>(attr_out_ptr)
->emplace(static_cast<float>(result));
return true;
}
case AttrTy::kHloComputation: {
HloComputation* result;
if (!ParseComputationName(&result)) {
return false;
}
static_cast<optional<HloComputation*>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kWindow: {
Window result;
if (!ParseWindow(&result)) {
return false;
}
static_cast<optional<Window>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kConvolutionDimensionNumbers: {
ConvolutionDimensionNumbers result;
if (!ParseConvolutionDimensionNumbers(&result)) {
return false;
}
static_cast<optional<ConvolutionDimensionNumbers>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kSharding: {
OpSharding sharding;
if (!ParseSharding(&sharding)) {
return false;
}
static_cast<optional<OpSharding>*>(attr_out_ptr)->emplace(sharding);
return true;
}
case AttrTy::kInstructionList: {
std::vector<HloInstruction*> result;
if (!ParseInstructionNames(&result)) {
return false;
}
static_cast<optional<std::vector<HloInstruction*>>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kFusionKind: {
HloInstruction::FusionKind result;
if (!ParseFusionKind(&result)) {
return false;
}
static_cast<optional<HloInstruction::FusionKind>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kBracedInt64List: {
std::vector<int64> result;
if (!ParseInt64List(TokKind::kLbrace, TokKind::kRbrace, TokKind::kComma,
&result)) {
return false;
}
static_cast<optional<std::vector<int64>>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kSliceRanges: {
SliceRanges result;
if (!ParseSliceRanges(&result)) {
return false;
}
static_cast<optional<SliceRanges>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kPaddingConfig: {
PaddingConfig result;
if (!ParsePaddingConfig(&result)) {
return false;
}
static_cast<optional<PaddingConfig>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kString: {
string result;
if (!ParseString(&result)) {
return false;
}
static_cast<optional<string>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kMetadata: {
OpMetadata result;
if (!ParseMetadata(&result)) {
return false;
}
static_cast<optional<OpMetadata>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kDistribution: {
RandomDistribution result;
if (!ParseRandomDistribution(&result)) {
return false;
}
static_cast<optional<RandomDistribution>*>(attr_out_ptr)
->emplace(result);
return true;
}
}
}();
if (!success) {
return Error(loc, Printf("error parsing attribute %s", name.c_str()));
}
return true;
}
bool HloParser::ParseComputationName(HloComputation** value) {
string name;
LocTy loc = lexer_.GetLoc();
if (!ParseName(&name)) {
return Error(loc, "expects computation name");
}
*value = tensorflow::gtl::FindPtrOrNull(computation_pool_, name);
if (*value == nullptr) {
return Error(loc, StrCat("computation does not exist: ", name));
}
return true;
}
// ::= '{' size stride? pad? lhs_dilate? rhs_dilate? '}'
// The subattributes can appear in any order. 'size=' is required, others are
// optional.
bool HloParser::ParseWindow(Window* window) {
LocTy loc = lexer_.GetLoc();
if (!ParseToken(TokKind::kLbrace, "expected '{' to start window attribute")) {
return false;
}
std::vector<int64> size;
std::vector<int64> stride;
std::vector<std::vector<int64>> pad;
std::vector<int64> lhs_dilate;
std::vector<int64> rhs_dilate;
std::vector<int64> rhs_reversal;
while (lexer_.GetKind() != TokKind::kRbrace) {
LocTy attr_loc = lexer_.GetLoc();
string field_name;
if (!ParseAttributeName(&field_name)) {
return Error(attr_loc, "expects sub-attributes in window");
}
bool ok = [&] {
if (field_name == "size") {
return ParseDxD("size", &size);
}
if (field_name == "stride") {
return ParseDxD("stride", &stride);
}
if (field_name == "lhs_dilate") {
return ParseDxD("lhs_dilate", &lhs_dilate);
}
if (field_name == "rhs_dilate") {
return ParseDxD("rls_dilate", &rhs_dilate);
}
if (field_name == "pad") {
return ParseWindowPad(&pad);
}
if (field_name == "rhs_reversal") {
return ParseDxD("rhs_reversal", &rhs_reversal);
}
return Error(loc, StrCat("unexpected attribute name: ", field_name));
}();
if (!ok) {
return false;
}
}
if (size.empty()) {
return Error(loc,
"sub-attribute 'size=' is required in the window attribute");
}
if (!stride.empty() && stride.size() != size.size()) {
return Error(loc, "expects 'stride=' has the same size as 'size='");
}
if (!lhs_dilate.empty() && lhs_dilate.size() != size.size()) {
return Error(loc, "expects 'lhs_dilate=' has the same size as 'size='");
}
if (!rhs_dilate.empty() && rhs_dilate.size() != size.size()) {
return Error(loc, "expects 'rhs_dilate=' has the same size as 'size='");
}
if (!pad.empty() && pad.size() != size.size()) {
return Error(loc, "expects 'pad=' has the same size as 'size='");
}
for (int i = 0; i < size.size(); i++) {
window->add_dimensions()->set_size(size[i]);
if (!pad.empty()) {
window->mutable_dimensions(i)->set_padding_low(pad[i][0]);
window->mutable_dimensions(i)->set_padding_high(pad[i][1]);
}
// If some field is not present, it has the default value.
window->mutable_dimensions(i)->set_stride(stride.empty() ? 1 : stride[i]);
window->mutable_dimensions(i)->set_base_dilation(
lhs_dilate.empty() ? 1 : lhs_dilate[i]);
window->mutable_dimensions(i)->set_window_dilation(
rhs_dilate.empty() ? 1 : rhs_dilate[i]);
window->mutable_dimensions(i)->set_window_reversal(
rhs_reversal.empty() ? false : (rhs_reversal[i] == 1));
}
return ParseToken(TokKind::kRbrace, "expected '}' to end window attribute");
}
// This is the inverse of HloInstruction::ConvolutionDimensionNumbersToString.
// The string looks like "dim_labels=0bf_0io->0bf".
bool HloParser::ParseConvolutionDimensionNumbers(
ConvolutionDimensionNumbers* dnums) {
if (lexer_.GetKind() != TokKind::kDimLabels) {
return TokenError("expects dim labels pattern, e.g., 'bf0_0io->0bf'");
}
string str = lexer_.GetStrVal();
// The str is expected to have 3 items, lhs, rhs, out, and it must looks like
// lhs_rhs->out, that is, the first separator is "_" and the second is "->".
// So we replace the "->" with "_" and then split on "_".
str = tensorflow::str_util::StringReplace(str, /*oldsub=*/"->",
/*newsub=*/"_",
/*replace_all=*/false);
std::vector<string> lhs_rhs_out = Split(str, "_");
if (lhs_rhs_out.size() != 3) {
LOG(FATAL) << "expects 3 items: lhs, rhs, and output dims, but sees "
<< str;
}
const int64 rank = lhs_rhs_out[0].length();
if (rank != lhs_rhs_out[1].length() || rank != lhs_rhs_out[2].length()) {
return TokenError(
"convolution lhs, rhs, and output must have the same rank");
}
if (rank < 2) {
return TokenError("convolution rank must >=2");
}
auto is_unique = [](string str) -> bool {
std::sort(str.begin(), str.end());
return std::unique(str.begin(), str.end()) == str.end();
};
// lhs
{
const string& lhs = lhs_rhs_out[0];
if (!is_unique(lhs)) {
return TokenError(
StrCat("expects unique lhs dimension numbers, but sees ", lhs));
}
for (int i = 0; i < rank - 2; i++) {
dnums->add_input_spatial_dimensions(-1);
}
for (int i = 0; i < rank; i++) {
char c = lhs[i];
if (c == 'b') {
dnums->set_input_batch_dimension(i);
} else if (c == 'f') {
dnums->set_input_feature_dimension(i);
} else if (c < '0' + rank && c >= '0') {
dnums->set_input_spatial_dimensions(c - '0', i);
} else {
return TokenError(
Printf("expects [0-%lldbf] in lhs dimension numbers", rank - 1));
}
}
}
// rhs
{
const string& rhs = lhs_rhs_out[1];
if (!is_unique(rhs)) {
return TokenError(
StrCat("expects unique rhs dimension numbers, but sees ", rhs));
}
for (int i = 0; i < rank - 2; i++) {
dnums->add_kernel_spatial_dimensions(-1);
}
for (int i = 0; i < rank; i++) {
char c = rhs[i];
if (c == 'i') {
dnums->set_kernel_input_feature_dimension(i);
} else if (c == 'o') {
dnums->set_kernel_output_feature_dimension(i);
} else if (c < '0' + rank && c >= '0') {
dnums->set_kernel_spatial_dimensions(c - '0', i);
} else {
return TokenError(
Printf("expects [0-%lldio] in rhs dimension numbers", rank - 1));
}
}
}
// output
{
const string& out = lhs_rhs_out[2];
if (!is_unique(out)) {
return TokenError(
StrCat("expects unique output dimension numbers, but sees ", out));
}
for (int i = 0; i < rank - 2; i++) {
dnums->add_output_spatial_dimensions(-1);
}
for (int i = 0; i < rank; i++) {
char c = out[i];
if (c == 'b') {
dnums->set_output_batch_dimension(i);
} else if (c == 'f') {
dnums->set_output_feature_dimension(i);
} else if (c < '0' + rank && c >= '0') {
dnums->set_output_spatial_dimensions(c - '0', i);
} else {
return TokenError(
Printf("expects [0-%lldbf] in output dimension numbers", rank - 1));
}
}
}
lexer_.Lex();
return true;
}
// ::= '{' ranges '}'
// ::= /*empty*/
// ::= range (',' range)*
// range ::= '[' start ':' limit (':' stride)? ']'
//
// The slice ranges are printed as:
//
// {[dim0_start:dim0_limit:dim0stride], [dim1_start:dim1_limit], ...}
//
// This function extracts the starts, limits, and strides as 3 vectors to the
// result. If stride is not present, stride is 1. For example, if the slice
// ranges is printed as:
//
// {[2:3:4], [5:6:7], [8:9]}
//
// The the parsed result will be:
//
// {/*starts=*/{2, 5, 8}, /*limits=*/{3, 6, 9}, /*strides=*/{4, 7, 1}}
//
bool HloParser::ParseSliceRanges(SliceRanges* result) {
if (!ParseToken(TokKind::kLbrace, "expects '{' to start ranges")) {
return false;
}
std::vector<std::vector<int64>> ranges;
if (lexer_.GetKind() == TokKind::kRbrace) {
// empty
return ParseToken(TokKind::kRbrace, "expects '}' to end ranges");
}
do {
LocTy loc = lexer_.GetLoc();
ranges.emplace_back();
if (!ParseInt64List(TokKind::kLsquare, TokKind::kRsquare, TokKind::kColon,
&ranges.back())) {
return false;
}
const auto& range = ranges.back();
if (range.size() != 2 && range.size() != 3) {
return Error(loc, Printf("expects [start:limit:step] or [start:limit], "
"but sees %ld elements.",
range.size()));
}
} while (EatIfPresent(TokKind::kComma));
for (const auto& range : ranges) {
result->starts.push_back(range[0]);
result->limits.push_back(range[1]);
result->strides.push_back(range.size() == 3 ? range[2] : 1);
}
return ParseToken(TokKind::kRbrace, "expects '}' to end ranges");
}
// int64list ::= start int64_elements end
// int64_elements
// ::= /*empty*/
// ::= int64_val (delim int64_val)*
bool HloParser::ParseInt64List(const TokKind start, const TokKind end,
const TokKind delim,
std::vector<int64>* result) {
if (!ParseToken(start, StrCat("expects an int64 list starting with ",
TokKindToString(start)))) {
return false;
}
if (lexer_.GetKind() == end) {
// empty
} else {
do {
int64 i;
if (!ParseInt64(&i)) {
return false;
}
result->push_back(i);
} while (EatIfPresent(delim));
}
return ParseToken(
end, StrCat("expects an int64 list to end with ", TokKindToString(end)));
}
// param_list_to_shape ::= param_list '->' shape
bool HloParser::ParseParamListToShape(Shape* shape, LocTy* shape_loc) {
if (!ParseParamList() || !ParseToken(TokKind::kArrow, "expects '->'")) {
return false;
}
*shape_loc = lexer_.GetLoc();
return ParseShape(shape);
}
bool HloParser::CanBeParamListToShape() {
return lexer_.GetKind() == TokKind::kLparen;
}
// param_list ::= '(' param_list1 ')'
// param_list1
// ::= /*empty*/
// ::= param (',' param)*
// param ::= name shape
bool HloParser::ParseParamList() {
if (!ParseToken(TokKind::kLparen,
"expects '(' at the beginning of param list")) {
return false;
}
if (lexer_.GetKind() == TokKind::kRparen) {
// empty
} else {
do {
Shape shape;
string name;
if (!ParseName(&name) || !ParseShape(&shape)) {
return false;
}
} while (EatIfPresent(TokKind::kComma));
}
return ParseToken(TokKind::kRparen, "expects ')' at the end of param list");
}
// shape ::= shape_val_
// shape ::= '(' tuple_elements ')'
// tuple_elements
// ::= /*empty*/
// ::= shape (',' shape)*
bool HloParser::ParseShape(Shape* result) {
if (EatIfPresent(TokKind::kLparen)) { // Tuple
std::vector<Shape> shapes;
if (lexer_.GetKind() == TokKind::kRparen) {
/*empty*/
} else {
// shape (',' shape)*
do {
shapes.emplace_back();
if (!ParseShape(&shapes.back())) {
return false;
}
} while (EatIfPresent(TokKind::kComma));
}
*result = ShapeUtil::MakeTupleShape(shapes);
return ParseToken(TokKind::kRparen, "expects ')' at the end of tuple.");
}
if (lexer_.GetKind() != TokKind::kShape) {
return TokenError("expects shape");
}
*result = lexer_.GetShapeVal();
lexer_.Lex();
return true;
}
bool HloParser::CanBeShape() {
// A non-tuple shape starts with a kShape token; a tuple shape starts with
// '('.
return lexer_.GetKind() == TokKind::kShape ||
lexer_.GetKind() == TokKind::kLparen;
}
bool HloParser::ParseName(string* result) {
VLOG(1) << "ParseName";
if (lexer_.GetKind() != TokKind::kIdent &&
lexer_.GetKind() != TokKind::kName) {
return TokenError("expects name");
}
*result = lexer_.GetStrVal();
lexer_.Lex();
return true;
}
bool HloParser::ParseAttributeName(string* result) {
if (lexer_.GetKind() != TokKind::kAttributeName) {
return TokenError("expects attribute name");
}
*result = lexer_.GetStrVal();
lexer_.Lex();
return true;
}
bool HloParser::ParseString(string* result) {
VLOG(1) << "ParseString";
if (lexer_.GetKind() != TokKind::kString) {
return TokenError("expects string");
}
*result = lexer_.GetStrVal();
lexer_.Lex();
return true;
}
bool HloParser::ParseDxD(const string& name, std::vector<int64>* result) {
LocTy loc = lexer_.GetLoc();
if (!result->empty()) {
return Error(loc,
Printf("sub-attribute '%s=' already exists", name.c_str()));
}
// 1D
if (lexer_.GetKind() == TokKind::kInt) {
int64 number;
if (!ParseInt64(&number)) {
return Error(loc, Printf("expects sub-attribute '%s=i'", name.c_str()));
}
result->push_back(number);
return true;
}
// 2D or higher.
if (lexer_.GetKind() == TokKind::kDxD) {
string str = lexer_.GetStrVal();
if (!SplitAndParseAsInts(str, 'x', result)) {
return Error(loc,
Printf("expects sub-attribute '%s=ixj...'", name.c_str()));
}
lexer_.Lex();
return true;
}
return TokenError("expects token type kInt or kDxD");
}
bool HloParser::ParseWindowPad(std::vector<std::vector<int64>>* pad) {
LocTy loc = lexer_.GetLoc();
if (!pad->empty()) {
return Error(loc, "sub-attribute 'pad=' already exists");
}
if (lexer_.GetKind() != TokKind::kPad) {
return TokenError("expects window pad pattern, e.g., '0_0x3_3'");
}
string str = lexer_.GetStrVal();
std::vector<string> padding_str = Split(str, 'x');
for (int i = 0; i < padding_str.size(); i++) {
std::vector<int64> low_high;
if (!SplitAndParseAsInts(padding_str[i], '_', &low_high) ||
low_high.size() != 2) {
return Error(loc,
"expects padding_low and padding_high separated by '_'");
}
pad->push_back(low_high);
}
lexer_.Lex();
return true;
}
// This is the inverse xla::ToString(PaddingConfig). The padding config string
// looks like "0_0_0x3_3_1". The string is first separated by 'x', each
// substring represents one PaddingConfigDimension. The substring is 3 (or 2)
// numbers joined by '_'.
bool HloParser::ParsePaddingConfig(PaddingConfig* padding) {
if (lexer_.GetKind() != TokKind::kPad) {
return TokenError("expects padding config, e.g., '0_0_0x3_3_1'");
}
LocTy loc = lexer_.GetLoc();
string str = lexer_.GetStrVal();
std::vector<string> padding_str = Split(str, 'x');
for (const auto& padding_dim_str : padding_str) {
std::vector<int64> padding_dim;
if (!SplitAndParseAsInts(padding_dim_str, '_', &padding_dim) ||
(padding_dim.size() != 2 && padding_dim.size() != 3)) {
return Error(loc,
"expects padding config pattern like 'low_high_interior' or "
"'low_high'");
}
auto* dim = padding->add_dimensions();
dim->set_edge_padding_low(padding_dim[0]);
dim->set_edge_padding_high(padding_dim[1]);
dim->set_interior_padding(padding_dim.size() == 3 ? padding_dim[2] : 0);
}
lexer_.Lex();
return true;
}
// '{' metadata_string '}'
bool HloParser::ParseMetadata(OpMetadata* metadata) {
std::unordered_map<string, AttrConfig> attrs;
optional<string> op_type;
optional<string> op_name;
optional<string> source_file;
optional<int32> source_line;
attrs["op_type"] = {/*required=*/false, AttrTy::kString, &op_type};
attrs["op_name"] = {/*required=*/false, AttrTy::kString, &op_name};
attrs["source_file"] = {/*required=*/false, AttrTy::kString, &source_file};
attrs["source_line"] = {/*required=*/false, AttrTy::kInt32, &source_line};
if (!ParseSubAttributes(attrs)) {
return false;
}
if (op_type) {
metadata->set_op_type(*op_type);
}
if (op_name) {
metadata->set_op_name(*op_name);
}
if (source_file) {
metadata->set_source_file(*source_file);
}
if (source_line) {
metadata->set_source_line(*source_line);
}
return true;
}
bool HloParser::ParseOpcode(HloOpcode* result) {
VLOG(1) << "ParseOpcode";
if (lexer_.GetKind() != TokKind::kIdent) {
return TokenError("expects opcode");
}
string val = lexer_.GetStrVal();
auto status_or_result = StringToHloOpcode(val);
if (!status_or_result.ok()) {
return TokenError(
Printf("expects opcode but sees: %s, error: %s", val.c_str(),
status_or_result.status().error_message().c_str()));
}
*result = status_or_result.ValueOrDie();
lexer_.Lex();
return true;
}
bool HloParser::ParseFusionKind(HloInstruction::FusionKind* result) {
VLOG(1) << "ParseFusionKind";
if (lexer_.GetKind() != TokKind::kIdent) {
return TokenError("expects fusion kind");
}
string val = lexer_.GetStrVal();
auto status_or_result = StringToFusionKind(val);
if (!status_or_result.ok()) {
return TokenError(
Printf("expects fusion kind but sees: %s, error: %s", val.c_str(),
status_or_result.status().error_message().c_str()));
}
*result = status_or_result.ValueOrDie();
lexer_.Lex();
return true;
}
bool HloParser::ParseRandomDistribution(RandomDistribution* result) {
VLOG(1) << "ParseRandomDistribution";
if (lexer_.GetKind() != TokKind::kIdent) {
return TokenError("expects random distribution");
}
string val = lexer_.GetStrVal();
auto status_or_result = StringToRandomDistribution(val);
if (!status_or_result.ok()) {
return TokenError(
Printf("expects random distribution but sees: %s, error: %s",
val.c_str(), status_or_result.status().error_message().c_str()));
}
*result = status_or_result.ValueOrDie();
lexer_.Lex();
return true;
}
bool HloParser::ParseInt64(int64* result) {
VLOG(1) << "ParseInt64";
if (lexer_.GetKind() != TokKind::kInt) {
return TokenError("expects integer");
}
*result = lexer_.GetInt64Val();
lexer_.Lex();
return true;
}
bool HloParser::ParseDouble(double* result) {
switch (lexer_.GetKind()) {
case TokKind::kDecimal:
*result = lexer_.GetDecimalVal();
break;
case TokKind::kInt:
*result = static_cast<double>(lexer_.GetInt64Val());
break;
case TokKind::kw_nan:
*result = std::numeric_limits<double>::quiet_NaN();
break;
case TokKind::kw_inf:
*result = std::numeric_limits<double>::infinity();
break;
case TokKind::kNegInf:
*result = -std::numeric_limits<double>::infinity();
break;
default:
return TokenError("expects decimal or integer");
}
lexer_.Lex();
return true;
}
bool HloParser::ParseBool(bool* result) {
if (lexer_.GetKind() != TokKind::kw_true &&
lexer_.GetKind() != TokKind::kw_false) {
return TokenError("expects true or false");
}
*result = lexer_.GetKind() == TokKind::kw_true;
lexer_.Lex();
return true;
}
bool HloParser::ParseToken(TokKind kind, const string& msg) {
VLOG(1) << "ParseToken " << TokKindToString(kind) << " " << msg;
if (lexer_.GetKind() != kind) {
return TokenError(msg);
}
lexer_.Lex();
return true;
}
bool HloParser::EatIfPresent(TokKind kind) {
if (lexer_.GetKind() != kind) {
return false;
}
lexer_.Lex();
return true;
}
bool HloParser::AddInstruction(const string& name, HloInstruction* instruction,
LocTy name_loc) {
auto result = instruction_pool_.insert({name, instruction});
if (!result.second) {
return Error(name_loc, StrCat("instruction already exists: ", name));
}
return true;
}
bool HloParser::AddComputation(const string& name, HloComputation* computation,
LocTy name_loc) {
auto result = computation_pool_.insert({name, computation});
if (!result.second) {
return Error(name_loc, StrCat("computation already exists: ", name));
}
return true;
}
} // namespace
StatusOr<std::unique_ptr<HloModule>> Parse(StringPiece str,
const HloModuleConfig& config) {
HloParser parser(str, config);
if (!parser.Run()) {
return InvalidArgument("Syntax error:\n%s", parser.GetError().c_str());
}
return parser.ConsumeHloModule();
}
StatusOr<std::unique_ptr<HloModule>> Parse(StringPiece str) {
HloModuleConfig config;
return Parse(str, config);
}
} // namespace tools
} // namespace xla
| JingJunYin/tensorflow | tensorflow/compiler/xla/tools/parser/hlo_parser.cc | C++ | apache-2.0 | 84,539 |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.buffer.ByteBuf;
import io.netty.util.internal.NativeLibraryLoader;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import org.apache.tomcat.jni.Buffer;
import org.apache.tomcat.jni.Library;
import org.apache.tomcat.jni.Pool;
import org.apache.tomcat.jni.SSL;
import org.apache.tomcat.jni.SSLContext;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Set;
/**
* Tells if <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support
* are available.
*/
public final class OpenSsl {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(OpenSsl.class);
private static final String LINUX = "linux";
private static final String UNKNOWN = "unknown";
private static final Throwable UNAVAILABILITY_CAUSE;
private static final Set<String> AVAILABLE_CIPHER_SUITES;
static {
Throwable cause = null;
// Test if netty-tcnative is in the classpath first.
try {
Class.forName("org.apache.tomcat.jni.SSL", false, OpenSsl.class.getClassLoader());
} catch (ClassNotFoundException t) {
cause = t;
logger.debug(
"netty-tcnative not in the classpath; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable.");
}
// If in the classpath, try to load the native library and initialize netty-tcnative.
if (cause == null) {
try {
// The JNI library was not already loaded. Load it now.
loadTcNative();
} catch (Throwable t) {
cause = t;
logger.debug(
"Failed to load netty-tcnative; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable, unless the " +
"application has already loaded the symbols by some other means. " +
"See http://netty.io/wiki/forked-tomcat-native.html for more information.", t);
}
try {
initializeTcNative();
// The library was initialized successfully. If loading the library failed above,
// reset the cause now since it appears that the library was loaded by some other
// means.
cause = null;
} catch (Throwable t) {
if (cause == null) {
cause = t;
}
logger.debug(
"Failed to initialize netty-tcnative; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable. " +
"See http://netty.io/wiki/forked-tomcat-native.html for more information.", t);
}
}
UNAVAILABILITY_CAUSE = cause;
if (cause == null) {
final Set<String> availableCipherSuites = new LinkedHashSet<String>(128);
final long aprPool = Pool.create(0);
try {
final long sslCtx = SSLContext.make(aprPool, SSL.SSL_PROTOCOL_ALL, SSL.SSL_MODE_SERVER);
try {
SSLContext.setOptions(sslCtx, SSL.SSL_OP_ALL);
SSLContext.setCipherSuite(sslCtx, "ALL");
final long ssl = SSL.newSSL(sslCtx, true);
try {
for (String c: SSL.getCiphers(ssl)) {
// Filter out bad input.
if (c == null || c.length() == 0 || availableCipherSuites.contains(c)) {
continue;
}
availableCipherSuites.add(c);
}
} finally {
SSL.freeSSL(ssl);
}
} finally {
SSLContext.free(sslCtx);
}
} catch (Exception e) {
logger.warn("Failed to get the list of available OpenSSL cipher suites.", e);
} finally {
Pool.destroy(aprPool);
}
AVAILABLE_CIPHER_SUITES = Collections.unmodifiableSet(availableCipherSuites);
} else {
AVAILABLE_CIPHER_SUITES = Collections.emptySet();
}
}
/**
* Returns {@code true} if and only if
* <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support
* are available.
*/
public static boolean isAvailable() {
return UNAVAILABILITY_CAUSE == null;
}
/**
* Returns {@code true} if the used version of openssl supports
* <a href="https://tools.ietf.org/html/rfc7301">ALPN</a>.
*/
public static boolean isAlpnSupported() {
return version() >= 0x10002000L;
}
/**
* Returns the version of the used available OpenSSL library or {@code -1} if {@link #isAvailable()}
* returns {@code false}.
*/
public static int version() {
if (isAvailable()) {
return SSL.version();
}
return -1;
}
/**
* Returns the version string of the used available OpenSSL library or {@code null} if {@link #isAvailable()}
* returns {@code false}.
*/
public static String versionString() {
if (isAvailable()) {
return SSL.versionString();
}
return null;
}
/**
* Ensure that <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and
* its OpenSSL support are available.
*
* @throws UnsatisfiedLinkError if unavailable
*/
public static void ensureAvailability() {
if (UNAVAILABILITY_CAUSE != null) {
throw (Error) new UnsatisfiedLinkError(
"failed to load the required native library").initCause(UNAVAILABILITY_CAUSE);
}
}
/**
* Returns the cause of unavailability of
* <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support.
*
* @return the cause if unavailable. {@code null} if available.
*/
public static Throwable unavailabilityCause() {
return UNAVAILABILITY_CAUSE;
}
/**
* Returns all the available OpenSSL cipher suites.
* Please note that the returned array may include the cipher suites that are insecure or non-functional.
*/
public static Set<String> availableCipherSuites() {
return AVAILABLE_CIPHER_SUITES;
}
/**
* Returns {@code true} if and only if the specified cipher suite is available in OpenSSL.
* Both Java-style cipher suite and OpenSSL-style cipher suite are accepted.
*/
public static boolean isCipherSuiteAvailable(String cipherSuite) {
String converted = CipherSuiteConverter.toOpenSsl(cipherSuite);
if (converted != null) {
cipherSuite = converted;
}
return AVAILABLE_CIPHER_SUITES.contains(cipherSuite);
}
static boolean isError(long errorCode) {
return errorCode != SSL.SSL_ERROR_NONE;
}
static long memoryAddress(ByteBuf buf) {
assert buf.isDirect();
return buf.hasMemoryAddress() ? buf.memoryAddress() : Buffer.address(buf.nioBuffer());
}
private OpenSsl() { }
private static void loadTcNative() throws Exception {
String os = normalizeOs(SystemPropertyUtil.get("os.name", ""));
String arch = normalizeArch(SystemPropertyUtil.get("os.arch", ""));
Set<String> libNames = new LinkedHashSet<String>(3);
// First, try loading the platform-specific library. Platform-specific
// libraries will be available if using a tcnative uber jar.
libNames.add("netty-tcnative-" + os + '-' + arch);
if (LINUX.equalsIgnoreCase(os)) {
// Fedora SSL lib so naming (libssl.so.10 vs libssl.so.1.0.0)..
libNames.add("netty-tcnative-" + os + '-' + arch + "-fedora");
}
// finally the default library.
libNames.add("netty-tcnative");
NativeLibraryLoader.loadFirstAvailable(SSL.class.getClassLoader(),
libNames.toArray(new String[libNames.size()]));
}
private static void initializeTcNative() throws Exception {
Library.initialize("provided");
SSL.initialize(null);
}
private static String normalizeOs(String value) {
value = normalize(value);
if (value.startsWith("aix")) {
return "aix";
}
if (value.startsWith("hpux")) {
return "hpux";
}
if (value.startsWith("os400")) {
// Avoid the names such as os4000
if (value.length() <= 5 || !Character.isDigit(value.charAt(5))) {
return "os400";
}
}
if (value.startsWith(LINUX)) {
return LINUX;
}
if (value.startsWith("macosx") || value.startsWith("osx")) {
return "osx";
}
if (value.startsWith("freebsd")) {
return "freebsd";
}
if (value.startsWith("openbsd")) {
return "openbsd";
}
if (value.startsWith("netbsd")) {
return "netbsd";
}
if (value.startsWith("solaris") || value.startsWith("sunos")) {
return "sunos";
}
if (value.startsWith("windows")) {
return "windows";
}
return UNKNOWN;
}
private static String normalizeArch(String value) {
value = normalize(value);
if (value.matches("^(x8664|amd64|ia32e|em64t|x64)$")) {
return "x86_64";
}
if (value.matches("^(x8632|x86|i[3-6]86|ia32|x32)$")) {
return "x86_32";
}
if (value.matches("^(ia64|itanium64)$")) {
return "itanium_64";
}
if (value.matches("^(sparc|sparc32)$")) {
return "sparc_32";
}
if (value.matches("^(sparcv9|sparc64)$")) {
return "sparc_64";
}
if (value.matches("^(arm|arm32)$")) {
return "arm_32";
}
if ("aarch64".equals(value)) {
return "aarch_64";
}
if (value.matches("^(ppc|ppc32)$")) {
return "ppc_32";
}
if ("ppc64".equals(value)) {
return "ppc_64";
}
if ("ppc64le".equals(value)) {
return "ppcle_64";
}
if ("s390".equals(value)) {
return "s390_32";
}
if ("s390x".equals(value)) {
return "s390_64";
}
return UNKNOWN;
}
private static String normalize(String value) {
return value.toLowerCase(Locale.US).replaceAll("[^a-z0-9]+", "");
}
}
| yrcourage/netty | handler/src/main/java/io/netty/handler/ssl/OpenSsl.java | Java | apache-2.0 | 11,625 |
/*
* Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
* Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
* Created By:
* Maintained By:
*/
//= require can.jquery-all
//= require models/cacheable
(function(ns, can) {
can.Model.Cacheable("CMS.Models.Document", {
root_object : "document"
, root_collection : "documents"
, findAll : "GET /api/documents"
, create : function(params) {
var _params = {
document : {
title : params.document.title
, description : params.document.description
, link : params.document.link
}
};
return $.ajax({
type : "POST"
, "url" : "/api/documents"
, dataType : "json"
, data : _params
});
}
, search : function(request, response) {
return $.ajax({
type : "get"
, url : "/api/documents"
, dataType : "json"
, data : {s : request.term}
, success : function(data) {
response($.map( data, function( item ) {
return can.extend({}, item.document, {
label: item.document.title
? item.document.title
+ (item.document.link_url
? " (" + item.document.link_url + ")"
: "")
: item.document.link_url
, value: item.document.id
});
}));
}
});
}
}, {
init : function () {
this._super && this._super();
// this.bind("change", function(ev, attr, how, newVal, oldVal) {
// var obj;
// if(obj = CMS.Models.ObjectDocument.findInCacheById(this.id) && attr !== "id") {
// obj.attr(attr, newVal);
// }
// });
var that = this;
this.each(function(value, name) {
if (value === null)
that.attr(name, undefined);
});
}
});
can.Model.Cacheable("CMS.Models.ObjectDocument", {
root_object : "object_document"
, root_collection : "object_documents"
, findAll: "GET /api/object_documents"
, create: "POST /api/object_documents"
, destroy : "DELETE /api/object_documents/{id}"
}, {
init : function() {
var _super = this._super;
function reinit() {
var that = this;
typeof _super === "function" && _super.call(this);
this.attr("document", CMS.Models.get_instance(
"Document", this.document_id || (this.document && this.document.id)));
this.attr("documentable", CMS.Models.get_instance(
this.documentable_type || (this.documentable && this.documentable.type),
this.documentable_id || (this.documentable && this.documentable.id)));
/*this.attr(
"document"
, CMS.Models.Document.findInCacheById(this.document_id)
|| new CMS.Models.Document(this.document && this.document.serialize ? this.document.serialize() : this.document));
*/
this.each(function(value, name) {
if (value === null)
that.removeAttr(name);
});
}
this.bind("created", can.proxy(reinit, this));
reinit.call(this);
}
});
})(this, can);
| hamyuan/ggrc-self-test | src/ggrc/assets/javascripts/pbc/document.js | JavaScript | apache-2.0 | 3,489 |
/**
*
* Copyright 2017 Paul Schaub, 2020 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.omemo;
import static org.jivesoftware.smackx.omemo.util.OmemoConstants.OMEMO_NAMESPACE_V_AXOLOTL;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.WeakHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jivesoftware.smack.ConnectionListener;
import org.jivesoftware.smack.Manager;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smack.packet.MessageBuilder;
import org.jivesoftware.smack.packet.Stanza;
import org.jivesoftware.smack.util.Async;
import org.jivesoftware.smackx.carbons.CarbonManager;
import org.jivesoftware.smackx.carbons.packet.CarbonExtension;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.hints.element.StoreHint;
import org.jivesoftware.smackx.mam.MamManager;
import org.jivesoftware.smackx.muc.MultiUserChat;
import org.jivesoftware.smackx.muc.MultiUserChatManager;
import org.jivesoftware.smackx.muc.RoomInfo;
import org.jivesoftware.smackx.omemo.element.OmemoBundleElement;
import org.jivesoftware.smackx.omemo.element.OmemoDeviceListElement;
import org.jivesoftware.smackx.omemo.element.OmemoDeviceListElement_VAxolotl;
import org.jivesoftware.smackx.omemo.element.OmemoElement;
import org.jivesoftware.smackx.omemo.exceptions.CannotEstablishOmemoSessionException;
import org.jivesoftware.smackx.omemo.exceptions.CorruptedOmemoKeyException;
import org.jivesoftware.smackx.omemo.exceptions.CryptoFailedException;
import org.jivesoftware.smackx.omemo.exceptions.NoOmemoSupportException;
import org.jivesoftware.smackx.omemo.exceptions.NoRawSessionException;
import org.jivesoftware.smackx.omemo.exceptions.UndecidedOmemoIdentityException;
import org.jivesoftware.smackx.omemo.internal.OmemoCachedDeviceList;
import org.jivesoftware.smackx.omemo.internal.OmemoDevice;
import org.jivesoftware.smackx.omemo.listener.OmemoMessageListener;
import org.jivesoftware.smackx.omemo.listener.OmemoMucMessageListener;
import org.jivesoftware.smackx.omemo.trust.OmemoFingerprint;
import org.jivesoftware.smackx.omemo.trust.OmemoTrustCallback;
import org.jivesoftware.smackx.omemo.trust.TrustState;
import org.jivesoftware.smackx.omemo.util.MessageOrOmemoMessage;
import org.jivesoftware.smackx.omemo.util.OmemoConstants;
import org.jivesoftware.smackx.pep.PepEventListener;
import org.jivesoftware.smackx.pep.PepManager;
import org.jivesoftware.smackx.pubsub.PubSubException;
import org.jivesoftware.smackx.pubsub.PubSubManager;
import org.jivesoftware.smackx.pubsub.packet.PubSub;
import org.jxmpp.jid.BareJid;
import org.jxmpp.jid.DomainBareJid;
import org.jxmpp.jid.EntityBareJid;
import org.jxmpp.jid.EntityFullJid;
/**
* Manager that allows sending messages encrypted with OMEMO.
* This class also provides some methods useful for a client that implements OMEMO.
*
* @author Paul Schaub
*/
public final class OmemoManager extends Manager {
private static final Logger LOGGER = Logger.getLogger(OmemoManager.class.getName());
private static final Integer UNKNOWN_DEVICE_ID = -1;
private static final WeakHashMap<XMPPConnection, TreeMap<Integer, OmemoManager>> INSTANCES = new WeakHashMap<>();
private final OmemoService<?, ?, ?, ?, ?, ?, ?, ?, ?> service;
private final HashSet<OmemoMessageListener> omemoMessageListeners = new HashSet<>();
private final HashSet<OmemoMucMessageListener> omemoMucMessageListeners = new HashSet<>();
private final PepManager pepManager;
private OmemoTrustCallback trustCallback;
private BareJid ownJid;
private Integer deviceId;
/**
* Private constructor.
*
* @param connection connection
* @param deviceId deviceId
*/
private OmemoManager(XMPPConnection connection, Integer deviceId) {
super(connection);
service = OmemoService.getInstance();
pepManager = PepManager.getInstanceFor(connection);
this.deviceId = deviceId;
if (connection.isAuthenticated()) {
initBareJidAndDeviceId(this);
} else {
connection.addConnectionListener(new ConnectionListener() {
@Override
public void authenticated(XMPPConnection connection, boolean resumed) {
initBareJidAndDeviceId(OmemoManager.this);
}
});
}
service.registerRatchetForManager(this);
// StanzaListeners
resumeStanzaAndPEPListeners();
}
/**
* Return an OmemoManager instance for the given connection and deviceId.
* If there was an OmemoManager for the connection and id before, return it. Otherwise create a new OmemoManager
* instance and return it.
*
* @param connection XmppConnection.
* @param deviceId MUST NOT be null and MUST be greater than 0.
*
* @return OmemoManager instance for the given connection and deviceId.
*/
public static synchronized OmemoManager getInstanceFor(XMPPConnection connection, Integer deviceId) {
if (deviceId == null || deviceId < 1) {
throw new IllegalArgumentException("DeviceId MUST NOT be null and MUST be greater than 0.");
}
TreeMap<Integer, OmemoManager> managersOfConnection = INSTANCES.get(connection);
if (managersOfConnection == null) {
managersOfConnection = new TreeMap<>();
INSTANCES.put(connection, managersOfConnection);
}
OmemoManager manager = managersOfConnection.get(deviceId);
if (manager == null) {
manager = new OmemoManager(connection, deviceId);
managersOfConnection.put(deviceId, manager);
}
return manager;
}
/**
* Returns an OmemoManager instance for the given connection. If there was one manager for the connection before,
* return it. If there were multiple managers before, return the one with the lowest deviceId.
* If there was no manager before, return a new one. As soon as the connection gets authenticated, the manager
* will look for local deviceIDs and select the lowest one as its id. If there are not local deviceIds, the manager
* will assign itself a random id.
*
* @param connection XmppConnection.
*
* @return OmemoManager instance for the given connection and a determined deviceId.
*/
public static synchronized OmemoManager getInstanceFor(XMPPConnection connection) {
TreeMap<Integer, OmemoManager> managers = INSTANCES.get(connection);
if (managers == null) {
managers = new TreeMap<>();
INSTANCES.put(connection, managers);
}
OmemoManager manager;
if (managers.size() == 0) {
manager = new OmemoManager(connection, UNKNOWN_DEVICE_ID);
managers.put(UNKNOWN_DEVICE_ID, manager);
} else {
manager = managers.get(managers.firstKey());
}
return manager;
}
/**
* Set a TrustCallback for this particular OmemoManager.
* TrustCallbacks are used to query and modify trust decisions.
*
* @param callback trustCallback.
*/
public void setTrustCallback(OmemoTrustCallback callback) {
if (trustCallback != null) {
throw new IllegalStateException("TrustCallback can only be set once.");
}
trustCallback = callback;
}
/**
* Return the TrustCallback of this manager.
*
* @return callback that is used for trust decisions.
*/
OmemoTrustCallback getTrustCallback() {
return trustCallback;
}
/**
* Initializes the OmemoManager. This method must be called before the manager can be used.
*
* @throws CorruptedOmemoKeyException if the OMEMO key is corrupted.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
* @throws IOException if an I/O error occurred.
*/
public synchronized void initialize()
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, InterruptedException,
SmackException.NoResponseException, SmackException.NotConnectedException, XMPPException.XMPPErrorException,
PubSubException.NotALeafNodeException, IOException {
if (!connection().isAuthenticated()) {
throw new SmackException.NotLoggedInException();
}
if (getTrustCallback() == null) {
throw new IllegalStateException("No TrustCallback set.");
}
getOmemoService().init(new LoggedInOmemoManager(this));
}
/**
* Initialize the manager without blocking. Once the manager is successfully initialized, the finishedCallback will
* be notified. It will also get notified, if an error occurs.
*
* @param finishedCallback callback that gets called once the manager is initialized.
*/
public void initializeAsync(final InitializationFinishedCallback finishedCallback) {
Async.go(new Runnable() {
@Override
public void run() {
try {
initialize();
finishedCallback.initializationFinished(OmemoManager.this);
} catch (Exception e) {
finishedCallback.initializationFailed(e);
}
}
});
}
/**
* Return a set of all OMEMO capable devices of a contact.
* Note, that this method does not explicitly refresh the device list of the contact, so it might be outdated.
*
* @see #requestDeviceListUpdateFor(BareJid)
*
* @param contact contact we want to get a set of device of.
* @return set of known devices of that contact.
*
* @throws IOException if an I/O error occurred.
*/
public Set<OmemoDevice> getDevicesOf(BareJid contact) throws IOException {
OmemoCachedDeviceList list = getOmemoService().getOmemoStoreBackend().loadCachedDeviceList(getOwnDevice(), contact);
HashSet<OmemoDevice> devices = new HashSet<>();
for (int deviceId : list.getActiveDevices()) {
devices.add(new OmemoDevice(contact, deviceId));
}
return devices;
}
/**
* OMEMO encrypt a cleartext message for a single recipient.
* Note that this method does NOT set the 'to' attribute of the message.
*
* @param recipient recipients bareJid
* @param message text to encrypt
* @return encrypted message
*
* @throws CryptoFailedException when something crypto related fails
* @throws UndecidedOmemoIdentityException When there are undecided devices
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public OmemoMessage.Sent encrypt(BareJid recipient, String message)
throws CryptoFailedException, UndecidedOmemoIdentityException,
InterruptedException, SmackException.NotConnectedException,
SmackException.NoResponseException, SmackException.NotLoggedInException, IOException {
Set<BareJid> recipients = new HashSet<>();
recipients.add(recipient);
return encrypt(recipients, message);
}
/**
* OMEMO encrypt a cleartext message for multiple recipients.
*
* @param recipients recipients barejids
* @param message text to encrypt
* @return encrypted message.
*
* @throws CryptoFailedException When something crypto related fails
* @throws UndecidedOmemoIdentityException When there are undecided devices.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoMessage.Sent encrypt(Set<BareJid> recipients, String message)
throws CryptoFailedException, UndecidedOmemoIdentityException,
InterruptedException, SmackException.NotConnectedException,
SmackException.NoResponseException, SmackException.NotLoggedInException, IOException {
LoggedInOmemoManager guard = new LoggedInOmemoManager(this);
Set<OmemoDevice> devices = getDevicesOf(getOwnJid());
for (BareJid recipient : recipients) {
devices.addAll(getDevicesOf(recipient));
}
return service.createOmemoMessage(guard, devices, message);
}
/**
* Encrypt a message for all recipients in the MultiUserChat.
*
* @param muc multiUserChat
* @param message message to send
* @return encrypted message
*
* @throws UndecidedOmemoIdentityException when there are undecided devices.
* @throws CryptoFailedException if the OMEMO cryptography failed.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws NoOmemoSupportException When the muc doesn't support OMEMO.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoMessage.Sent encrypt(MultiUserChat muc, String message)
throws UndecidedOmemoIdentityException, CryptoFailedException,
XMPPException.XMPPErrorException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException, NoOmemoSupportException,
SmackException.NotLoggedInException, IOException {
if (!multiUserChatSupportsOmemo(muc)) {
throw new NoOmemoSupportException();
}
Set<BareJid> recipients = new HashSet<>();
for (EntityFullJid e : muc.getOccupants()) {
recipients.add(muc.getOccupant(e).getJid().asBareJid());
}
return encrypt(recipients, message);
}
/**
* Manually decrypt an OmemoElement.
* This method should only be used for use-cases, where the internal listeners don't pick up on an incoming message.
* (for example MAM query results).
*
* @param sender bareJid of the message sender (must be the jid of the contact who sent the message)
* @param omemoElement omemoElement
* @return decrypted OmemoMessage
*
* @throws SmackException.NotLoggedInException if the Manager is not authenticated
* @throws CorruptedOmemoKeyException if our or their key is corrupted
* @throws NoRawSessionException if the message was not a preKeyMessage, but we had no session with the contact
* @throws CryptoFailedException if decryption fails
* @throws IOException if an I/O error occurred.
*/
public OmemoMessage.Received decrypt(BareJid sender, OmemoElement omemoElement)
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, NoRawSessionException,
CryptoFailedException, IOException {
LoggedInOmemoManager managerGuard = new LoggedInOmemoManager(this);
return getOmemoService().decryptMessage(managerGuard, sender, omemoElement);
}
/**
* Decrypt messages from a MAM query.
*
* @param mamQuery The MAM query
* @return list of decrypted OmemoMessages
*
* @throws SmackException.NotLoggedInException if the Manager is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public List<MessageOrOmemoMessage> decryptMamQueryResult(MamManager.MamQuery mamQuery)
throws SmackException.NotLoggedInException, IOException {
return new ArrayList<>(getOmemoService().decryptMamQueryResult(new LoggedInOmemoManager(this), mamQuery));
}
/**
* Trust that a fingerprint belongs to an OmemoDevice.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
*/
public void trustOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
trustCallback.setTrust(device, fingerprint, TrustState.trusted);
}
/**
* Distrust the fingerprint/OmemoDevice tuple.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
*/
public void distrustOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
trustCallback.setTrust(device, fingerprint, TrustState.untrusted);
}
/**
* Returns true, if the fingerprint/OmemoDevice tuple is trusted, otherwise false.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
* @return <code>true</code> if this is a trusted OMEMO identity.
*/
public boolean isTrustedOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
return trustCallback.getTrust(device, fingerprint) == TrustState.trusted;
}
/**
* Returns true, if the fingerprint/OmemoDevice tuple is decided by the user.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
* @return <code>true</code> if the trust is decided for the identity.
*/
public boolean isDecidedOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
return trustCallback.getTrust(device, fingerprint) != TrustState.undecided;
}
/**
* Send a ratchet update message. This can be used to advance the ratchet of a session in order to maintain forward
* secrecy.
*
* @param recipient recipient
*
* @throws CorruptedOmemoKeyException When the used identityKeys are corrupted
* @throws CryptoFailedException When something fails with the crypto
* @throws CannotEstablishOmemoSessionException When we can't establish a session with the recipient
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws NoSuchAlgorithmException if no such algorithm is available.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws IOException if an I/O error occurred.
*/
public synchronized void sendRatchetUpdateMessage(OmemoDevice recipient)
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, InterruptedException,
SmackException.NoResponseException, NoSuchAlgorithmException, SmackException.NotConnectedException,
CryptoFailedException, CannotEstablishOmemoSessionException, IOException {
XMPPConnection connection = connection();
MessageBuilder message = connection.getStanzaFactory()
.buildMessageStanza()
.to(recipient.getJid());
OmemoElement element = getOmemoService().createRatchetUpdateElement(new LoggedInOmemoManager(this), recipient);
message.addExtension(element);
// Set MAM Storage hint
StoreHint.set(message);
connection.sendStanza(message.build());
}
/**
* Returns true, if the contact has any active devices published in a deviceList.
*
* @param contact contact
* @return true if contact has at least one OMEMO capable device.
*
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws IOException if an I/O error occurred.
*/
public synchronized boolean contactSupportsOmemo(BareJid contact)
throws InterruptedException, PubSubException.NotALeafNodeException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, SmackException.NoResponseException, IOException {
OmemoCachedDeviceList deviceList = getOmemoService().refreshDeviceList(connection(), getOwnDevice(), contact);
return !deviceList.getActiveDevices().isEmpty();
}
/**
* Returns true, if the MUC with the EntityBareJid multiUserChat is non-anonymous and members only (prerequisite
* for OMEMO encryption in MUC).
*
* @param multiUserChat MUC
* @return true if chat supports OMEMO
*
* @throws XMPPException.XMPPErrorException if there was an XMPP protocol level error
* @throws SmackException.NotConnectedException if the connection is not connected
* @throws InterruptedException if the thread is interrupted
* @throws SmackException.NoResponseException if the server does not respond
*/
public boolean multiUserChatSupportsOmemo(MultiUserChat multiUserChat)
throws XMPPException.XMPPErrorException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException {
EntityBareJid jid = multiUserChat.getRoom();
RoomInfo roomInfo = MultiUserChatManager.getInstanceFor(connection()).getRoomInfo(jid);
return roomInfo.isNonanonymous() && roomInfo.isMembersOnly();
}
/**
* Returns true, if the Server supports PEP.
*
* @param connection XMPPConnection
* @param server domainBareJid of the server to test
* @return true if server supports pep
*
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
*/
public static boolean serverSupportsOmemo(XMPPConnection connection, DomainBareJid server)
throws XMPPException.XMPPErrorException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException {
return ServiceDiscoveryManager.getInstanceFor(connection)
.discoverInfo(server).containsFeature(PubSub.NAMESPACE);
}
/**
* Return the fingerprint of our identity key.
*
* @return our own OMEMO fingerprint
*
* @throws SmackException.NotLoggedInException if we don't know our bareJid yet.
* @throws CorruptedOmemoKeyException if our identityKey is corrupted.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoFingerprint getOwnFingerprint()
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, IOException {
if (getOwnJid() == null) {
throw new SmackException.NotLoggedInException();
}
return getOmemoService().getOmemoStoreBackend().getFingerprint(getOwnDevice());
}
/**
* Get the fingerprint of a contacts device.
*
* @param device contacts OmemoDevice
* @return fingerprint of the given OMEMO device.
*
* @throws CannotEstablishOmemoSessionException if we have no session yet, and are unable to create one.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws CorruptedOmemoKeyException if the copy of the fingerprint we have is corrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoFingerprint getFingerprint(OmemoDevice device)
throws CannotEstablishOmemoSessionException, SmackException.NotLoggedInException,
CorruptedOmemoKeyException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException, IOException {
if (getOwnJid() == null) {
throw new SmackException.NotLoggedInException();
}
if (device.equals(getOwnDevice())) {
return getOwnFingerprint();
}
return getOmemoService().getOmemoStoreBackend()
.getFingerprintAndMaybeBuildSession(new LoggedInOmemoManager(this), device);
}
/**
* Return all OmemoFingerprints of active devices of a contact.
* TODO: Make more fail-safe
*
* @param contact contact
* @return Map of all active devices of the contact and their fingerprints.
*
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws CorruptedOmemoKeyException if the OMEMO key is corrupted.
* @throws CannotEstablishOmemoSessionException if no OMEMO session could be established.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
*/
public synchronized HashMap<OmemoDevice, OmemoFingerprint> getActiveFingerprints(BareJid contact)
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException,
CannotEstablishOmemoSessionException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException, IOException {
if (getOwnJid() == null) {
throw new SmackException.NotLoggedInException();
}
HashMap<OmemoDevice, OmemoFingerprint> fingerprints = new HashMap<>();
OmemoCachedDeviceList deviceList = getOmemoService().getOmemoStoreBackend().loadCachedDeviceList(getOwnDevice(),
contact);
for (int id : deviceList.getActiveDevices()) {
OmemoDevice device = new OmemoDevice(contact, id);
OmemoFingerprint fingerprint = getFingerprint(device);
if (fingerprint != null) {
fingerprints.put(device, fingerprint);
}
}
return fingerprints;
}
/**
* Add an OmemoMessageListener. This listener will be informed about incoming OMEMO messages
* (as well as KeyTransportMessages) and OMEMO encrypted message carbons.
*
* @param listener OmemoMessageListener
*/
public void addOmemoMessageListener(OmemoMessageListener listener) {
omemoMessageListeners.add(listener);
}
/**
* Remove an OmemoMessageListener.
*
* @param listener OmemoMessageListener
*/
public void removeOmemoMessageListener(OmemoMessageListener listener) {
omemoMessageListeners.remove(listener);
}
/**
* Add an OmemoMucMessageListener. This listener will be informed about incoming OMEMO encrypted MUC messages.
*
* @param listener OmemoMessageListener.
*/
public void addOmemoMucMessageListener(OmemoMucMessageListener listener) {
omemoMucMessageListeners.add(listener);
}
/**
* Remove an OmemoMucMessageListener.
*
* @param listener OmemoMucMessageListener
*/
public void removeOmemoMucMessageListener(OmemoMucMessageListener listener) {
omemoMucMessageListeners.remove(listener);
}
/**
* Request a deviceList update from contact contact.
*
* @param contact contact we want to obtain the deviceList from.
*
* @throws InterruptedException if the calling thread was interrupted.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
*/
public synchronized void requestDeviceListUpdateFor(BareJid contact)
throws InterruptedException, PubSubException.NotALeafNodeException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, SmackException.NoResponseException, IOException {
getOmemoService().refreshDeviceList(connection(), getOwnDevice(), contact);
}
/**
* Publish a new device list with just our own deviceId in it.
*
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws InterruptedException if the calling thread was interrupted.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
*/
public void purgeDeviceList()
throws SmackException.NotLoggedInException, InterruptedException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, SmackException.NoResponseException, IOException, PubSubException.NotALeafNodeException {
getOmemoService().purgeDeviceList(new LoggedInOmemoManager(this));
}
public List<Exception> purgeEverything() throws NotConnectedException, InterruptedException, IOException {
List<Exception> exceptions = new ArrayList<>(5);
PubSubManager pm = PubSubManager.getInstanceFor(getConnection(), getOwnJid());
try {
requestDeviceListUpdateFor(getOwnJid());
} catch (SmackException.NoResponseException | PubSubException.NotALeafNodeException
| XMPPException.XMPPErrorException e) {
exceptions.add(e);
}
OmemoCachedDeviceList deviceList = OmemoService.getInstance().getOmemoStoreBackend()
.loadCachedDeviceList(getOwnDevice(), getOwnJid());
for (int id : deviceList.getAllDevices()) {
try {
pm.getLeafNode(OmemoConstants.PEP_NODE_BUNDLE_FROM_DEVICE_ID(id)).deleteAllItems();
} catch (SmackException.NoResponseException | PubSubException.NotALeafNodeException
| XMPPException.XMPPErrorException | PubSubException.NotAPubSubNodeException e) {
exceptions.add(e);
}
try {
pm.deleteNode(OmemoConstants.PEP_NODE_BUNDLE_FROM_DEVICE_ID(id));
} catch (SmackException.NoResponseException | XMPPException.XMPPErrorException e) {
exceptions.add(e);
}
}
try {
pm.getLeafNode(OmemoConstants.PEP_NODE_DEVICE_LIST).deleteAllItems();
} catch (SmackException.NoResponseException | PubSubException.NotALeafNodeException
| XMPPException.XMPPErrorException | PubSubException.NotAPubSubNodeException e) {
exceptions.add(e);
}
try {
pm.deleteNode(OmemoConstants.PEP_NODE_DEVICE_LIST);
} catch (SmackException.NoResponseException | XMPPException.XMPPErrorException e) {
exceptions.add(e);
}
return exceptions;
}
/**
* Rotate the signedPreKey published in our OmemoBundle and republish it. This should be done every now and
* then (7-14 days). The old signedPreKey should be kept for some more time (a month or so) to enable decryption
* of messages that have been sent since the key was changed.
*
* @throws CorruptedOmemoKeyException When the IdentityKeyPair is damaged.
* @throws InterruptedException XMPP error
* @throws XMPPException.XMPPErrorException XMPP error
* @throws SmackException.NotConnectedException XMPP error
* @throws SmackException.NoResponseException XMPP error
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
*/
public synchronized void rotateSignedPreKey()
throws CorruptedOmemoKeyException, SmackException.NotLoggedInException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, InterruptedException, SmackException.NoResponseException,
IOException, PubSubException.NotALeafNodeException {
if (!connection().isAuthenticated()) {
throw new SmackException.NotLoggedInException();
}
// generate key
getOmemoService().getOmemoStoreBackend().changeSignedPreKey(getOwnDevice());
// publish
OmemoBundleElement bundle = getOmemoService().getOmemoStoreBackend().packOmemoBundle(getOwnDevice());
OmemoService.publishBundle(connection(), getOwnDevice(), bundle);
}
/**
* Return true, if the given Stanza contains an OMEMO element 'encrypted'.
*
* @param stanza stanza
* @return true if stanza has extension 'encrypted'
*/
static boolean stanzaContainsOmemoElement(Stanza stanza) {
return stanza.hasExtension(OmemoElement.NAME_ENCRYPTED, OMEMO_NAMESPACE_V_AXOLOTL);
}
/**
* Throw an IllegalStateException if no OmemoService is set.
*/
private void throwIfNoServiceSet() {
if (service == null) {
throw new IllegalStateException("No OmemoService set in OmemoManager.");
}
}
/**
* Returns a pseudo random number from the interval [1, Integer.MAX_VALUE].
*
* @return a random deviceId.
*/
public static int randomDeviceId() {
return new Random().nextInt(Integer.MAX_VALUE - 1) + 1;
}
/**
* Return the BareJid of the user.
*
* @return our own bare JID.
*/
public BareJid getOwnJid() {
if (ownJid == null && connection().isAuthenticated()) {
ownJid = connection().getUser().asBareJid();
}
return ownJid;
}
/**
* Return the deviceId of this OmemoManager.
*
* @return this OmemoManagers deviceId.
*/
public synchronized Integer getDeviceId() {
return deviceId;
}
/**
* Return the OmemoDevice of the user.
*
* @return our own OmemoDevice
*/
public synchronized OmemoDevice getOwnDevice() {
BareJid jid = getOwnJid();
if (jid == null) {
return null;
}
return new OmemoDevice(jid, getDeviceId());
}
/**
* Set the deviceId of the manager to nDeviceId.
*
* @param nDeviceId new deviceId
*/
synchronized void setDeviceId(int nDeviceId) {
// Move this instance inside the HashMaps
INSTANCES.get(connection()).remove(getDeviceId());
INSTANCES.get(connection()).put(nDeviceId, this);
this.deviceId = nDeviceId;
}
/**
* Notify all registered OmemoMessageListeners about a received OmemoMessage.
*
* @param stanza original stanza
* @param decryptedMessage decrypted OmemoMessage.
*/
void notifyOmemoMessageReceived(Stanza stanza, OmemoMessage.Received decryptedMessage) {
for (OmemoMessageListener l : omemoMessageListeners) {
l.onOmemoMessageReceived(stanza, decryptedMessage);
}
}
/**
* Notify all registered OmemoMucMessageListeners of an incoming OmemoMessageElement in a MUC.
*
* @param muc MultiUserChat the message was received in.
* @param stanza Original Stanza.
* @param decryptedMessage Decrypted OmemoMessage.
*/
void notifyOmemoMucMessageReceived(MultiUserChat muc,
Stanza stanza,
OmemoMessage.Received decryptedMessage) {
for (OmemoMucMessageListener l : omemoMucMessageListeners) {
l.onOmemoMucMessageReceived(muc, stanza, decryptedMessage);
}
}
/**
* Notify all registered OmemoMessageListeners of an incoming OMEMO encrypted Carbon Copy.
* Remember: If you want to receive OMEMO encrypted carbon copies, you have to enable carbons using
* {@link CarbonManager#enableCarbons()}.
*
* @param direction direction of the carbon copy
* @param carbonCopy carbon copy itself
* @param wrappingMessage wrapping message
* @param decryptedCarbonCopy decrypted carbon copy OMEMO element
*/
void notifyOmemoCarbonCopyReceived(CarbonExtension.Direction direction,
Message carbonCopy,
Message wrappingMessage,
OmemoMessage.Received decryptedCarbonCopy) {
for (OmemoMessageListener l : omemoMessageListeners) {
l.onOmemoCarbonCopyReceived(direction, carbonCopy, wrappingMessage, decryptedCarbonCopy);
}
}
/**
* Register stanza listeners needed for OMEMO.
* This method is called automatically in the constructor and should only be used to restore the previous state
* after {@link #stopStanzaAndPEPListeners()} was called.
*/
public void resumeStanzaAndPEPListeners() {
CarbonManager carbonManager = CarbonManager.getInstanceFor(connection());
// Remove listeners to avoid them getting added twice
connection().removeAsyncStanzaListener(this::internalOmemoMessageStanzaListener);
carbonManager.removeCarbonCopyReceivedListener(this::internalOmemoCarbonCopyListener);
// Add listeners
pepManager.addPepEventListener(OmemoConstants.PEP_NODE_DEVICE_LIST, OmemoDeviceListElement.class, pepOmemoDeviceListEventListener);
connection().addAsyncStanzaListener(this::internalOmemoMessageStanzaListener, OmemoManager::isOmemoMessage);
carbonManager.addCarbonCopyReceivedListener(this::internalOmemoCarbonCopyListener);
}
/**
* Remove active stanza listeners needed for OMEMO.
*/
public void stopStanzaAndPEPListeners() {
pepManager.removePepEventListener(pepOmemoDeviceListEventListener);
connection().removeAsyncStanzaListener(this::internalOmemoMessageStanzaListener);
CarbonManager.getInstanceFor(connection()).removeCarbonCopyReceivedListener(this::internalOmemoCarbonCopyListener);
}
/**
* Build a fresh session with a contacts device.
* This might come in handy if a session is broken.
*
* @param contactsDevice OmemoDevice of a contact.
*
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws CorruptedOmemoKeyException if our or their identityKey is corrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws CannotEstablishOmemoSessionException if no new session can be established.
* @throws SmackException.NotLoggedInException if the connection is not authenticated.
*/
public void rebuildSessionWith(OmemoDevice contactsDevice)
throws InterruptedException, SmackException.NoResponseException, CorruptedOmemoKeyException,
SmackException.NotConnectedException, CannotEstablishOmemoSessionException,
SmackException.NotLoggedInException {
if (!connection().isAuthenticated()) {
throw new SmackException.NotLoggedInException();
}
getOmemoService().buildFreshSessionWithDevice(connection(), getOwnDevice(), contactsDevice);
}
/**
* Get our connection.
*
* @return the connection of this manager
*/
XMPPConnection getConnection() {
return connection();
}
/**
* Return the OMEMO service object.
*
* @return the OmemoService object related to this OmemoManager.
*/
OmemoService<?, ?, ?, ?, ?, ?, ?, ?, ?> getOmemoService() {
throwIfNoServiceSet();
return service;
}
/**
* StanzaListener that listens for incoming Stanzas which contain OMEMO elements.
*/
private void internalOmemoMessageStanzaListener(final Stanza packet) {
Async.go(new Runnable() {
@Override
public void run() {
try {
getOmemoService().onOmemoMessageStanzaReceived(packet,
new LoggedInOmemoManager(OmemoManager.this));
} catch (SmackException.NotLoggedInException | IOException e) {
LOGGER.log(Level.SEVERE, "Exception while processing OMEMO stanza", e);
}
}
});
}
/**
* CarbonCopyListener that listens for incoming carbon copies which contain OMEMO elements.
*/
private void internalOmemoCarbonCopyListener(final CarbonExtension.Direction direction,
final Message carbonCopy,
final Message wrappingMessage) {
Async.go(new Runnable() {
@Override
public void run() {
if (isOmemoMessage(carbonCopy)) {
try {
getOmemoService().onOmemoCarbonCopyReceived(direction, carbonCopy, wrappingMessage,
new LoggedInOmemoManager(OmemoManager.this));
} catch (SmackException.NotLoggedInException | IOException e) {
LOGGER.log(Level.SEVERE, "Exception while processing OMEMO stanza", e);
}
}
}
});
}
@SuppressWarnings("UnnecessaryLambda")
private final PepEventListener<OmemoDeviceListElement> pepOmemoDeviceListEventListener =
(from, receivedDeviceList, id, message) -> {
// Device List <list>
OmemoCachedDeviceList deviceList;
try {
getOmemoService().getOmemoStoreBackend().mergeCachedDeviceList(getOwnDevice(), from,
receivedDeviceList);
if (!from.asBareJid().equals(getOwnJid())) {
return;
}
deviceList = getOmemoService().cleanUpDeviceList(getOwnDevice());
} catch (IOException e) {
LOGGER.log(Level.SEVERE,
"IOException while processing OMEMO PEP device updates. Message: " + message,
e);
return;
}
final OmemoDeviceListElement_VAxolotl newDeviceList = new OmemoDeviceListElement_VAxolotl(deviceList);
if (!newDeviceList.copyDeviceIds().equals(receivedDeviceList.copyDeviceIds())) {
LOGGER.log(Level.FINE, "Republish deviceList due to changes:" +
" Received: " + Arrays.toString(receivedDeviceList.copyDeviceIds().toArray()) +
" Published: " + Arrays.toString(newDeviceList.copyDeviceIds().toArray()));
Async.go(new Runnable() {
@Override
public void run() {
try {
OmemoService.publishDeviceList(connection(), newDeviceList);
} catch (InterruptedException | XMPPException.XMPPErrorException |
SmackException.NotConnectedException | SmackException.NoResponseException | PubSubException.NotALeafNodeException e) {
LOGGER.log(Level.WARNING, "Could not publish our deviceList upon an received update.", e);
}
}
});
}
};
/**
* StanzaFilter that filters messages containing a OMEMO element.
*/
private static boolean isOmemoMessage(Stanza stanza) {
return stanza instanceof Message && OmemoManager.stanzaContainsOmemoElement(stanza);
}
/**
* Guard class which ensures that the wrapped OmemoManager knows its BareJid.
*/
public static class LoggedInOmemoManager {
private final OmemoManager manager;
public LoggedInOmemoManager(OmemoManager manager)
throws SmackException.NotLoggedInException {
if (manager == null) {
throw new IllegalArgumentException("OmemoManager cannot be null.");
}
if (manager.getOwnJid() == null) {
if (manager.getConnection().isAuthenticated()) {
manager.ownJid = manager.getConnection().getUser().asBareJid();
} else {
throw new SmackException.NotLoggedInException();
}
}
this.manager = manager;
}
public OmemoManager get() {
return manager;
}
}
/**
* Callback which can be used to get notified, when the OmemoManager finished initializing.
*/
public interface InitializationFinishedCallback {
void initializationFinished(OmemoManager manager);
void initializationFailed(Exception cause);
}
/**
* Get the bareJid of the user from the authenticated XMPP connection.
* If our deviceId is unknown, use the bareJid to look up deviceIds available in the omemoStore.
* If there are ids available, choose the smallest one. Otherwise generate a random deviceId.
*
* @param manager OmemoManager
*/
private static void initBareJidAndDeviceId(OmemoManager manager) {
if (!manager.getConnection().isAuthenticated()) {
throw new IllegalStateException("Connection MUST be authenticated.");
}
if (manager.ownJid == null) {
manager.ownJid = manager.getConnection().getUser().asBareJid();
}
if (UNKNOWN_DEVICE_ID.equals(manager.deviceId)) {
SortedSet<Integer> storedDeviceIds = manager.getOmemoService().getOmemoStoreBackend().localDeviceIdsOf(manager.ownJid);
if (storedDeviceIds.size() > 0) {
manager.setDeviceId(storedDeviceIds.first());
} else {
manager.setDeviceId(randomDeviceId());
}
}
}
}
| igniterealtime/Smack | smack-omemo/src/main/java/org/jivesoftware/smackx/omemo/OmemoManager.java | Java | apache-2.0 | 49,782 |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.support.vectordrawable.app;
import android.os.Bundle;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SeekBar;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.vectordrawable.graphics.drawable.SeekableAnimatedVectorDrawable;
import com.example.android.support.vectordrawable.R;
/**
* Demonstrates usage of {@link SeekableAnimatedVectorDrawable}.
*/
public class SeekableDemo extends AppCompatActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.seekable_demo);
final ImageView image = findViewById(R.id.image);
final Button start = findViewById(R.id.start);
final Button stop = findViewById(R.id.stop);
final SeekBar seekBar = findViewById(R.id.seek);
final SeekableAnimatedVectorDrawable avd =
SeekableAnimatedVectorDrawable.create(this, R.drawable.ic_hourglass_animation);
if (avd == null) {
finish();
return;
}
avd.registerAnimationCallback(new SeekableAnimatedVectorDrawable.AnimationCallback() {
@Override
public void onAnimationStart(@NonNull SeekableAnimatedVectorDrawable drawable) {
onAnimationRunning();
}
@Override
public void onAnimationEnd(@NonNull SeekableAnimatedVectorDrawable drawable) {
start.setEnabled(true);
start.setText(R.string.start);
stop.setEnabled(false);
seekBar.setProgress(0);
}
@Override
public void onAnimationPause(@NonNull SeekableAnimatedVectorDrawable drawable) {
start.setEnabled(true);
start.setText(R.string.resume);
stop.setEnabled(true);
}
@Override
public void onAnimationResume(@NonNull SeekableAnimatedVectorDrawable drawable) {
onAnimationRunning();
}
private void onAnimationRunning() {
start.setEnabled(true);
start.setText(R.string.pause);
stop.setEnabled(true);
}
@Override
public void onAnimationUpdate(@NonNull SeekableAnimatedVectorDrawable drawable) {
seekBar.setProgress((int) drawable.getCurrentPlayTime());
}
});
image.setImageDrawable(avd);
seekBar.setMax((int) avd.getTotalDuration());
start.setOnClickListener((v) -> {
if (!avd.isRunning()) {
avd.start();
} else if (!avd.isPaused()) {
avd.pause();
} else {
avd.resume();
}
});
stop.setOnClickListener((v) -> avd.stop());
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) {
avd.setCurrentPlayTime(progress);
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
}
}
| AndroidX/androidx | vectordrawable/integration-tests/testapp/src/main/java/com/example/android/support/vectordrawable/app/SeekableDemo.java | Java | apache-2.0 | 4,134 |
// (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { TranslateModule } from '@ngx-translate/core';
import { CoreComponentsModule } from '@components/components.module';
import { CoreDirectivesModule } from '@directives/directives.module';
import { CorePipesModule } from '@pipes/pipes.module';
import { AddonModChatComponentsModule } from '../../components/components.module';
import { AddonModChatSessionsPage } from './sessions';
@NgModule({
declarations: [
AddonModChatSessionsPage,
],
imports: [
CoreComponentsModule,
CoreDirectivesModule,
CorePipesModule,
AddonModChatComponentsModule,
IonicPageModule.forChild(AddonModChatSessionsPage),
TranslateModule.forChild()
],
})
export class AddonModChatSessionsPageModule {}
| FMCorz/moodlemobile2 | src/addon/mod/chat/pages/sessions/sessions.module.ts | TypeScript | apache-2.0 | 1,440 |
/**
* Copyright (c) 2010 Yahoo! Inc. All rights reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.oozie.service;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.oozie.CoordinatorActionBean;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.command.coord.CoordActionCheckCommand;
import org.apache.oozie.command.coord.CoordActionCheckXCommand;
import org.apache.oozie.command.wf.ActionCheckCommand;
import org.apache.oozie.command.wf.ActionCheckXCommand;
import org.apache.oozie.executor.jpa.CoordActionsRunningGetJPAExecutor;
import org.apache.oozie.executor.jpa.JPAExecutorException;
import org.apache.oozie.executor.jpa.WorkflowActionsRunningGetJPAExecutor;
import org.apache.oozie.util.XCallable;
import org.apache.oozie.util.XLog;
/**
* The Action Checker Service queue ActionCheckCommands to check the status of
* running actions and CoordActionCheckCommands to check the status of
* coordinator actions. The delay between checks on the same action can be
* configured.
*/
public class ActionCheckerService implements Service {
public static final String CONF_PREFIX = Service.CONF_PREFIX + "ActionCheckerService.";
/**
* The frequency at which the ActionCheckService will run.
*/
public static final String CONF_ACTION_CHECK_INTERVAL = CONF_PREFIX + "action.check.interval";
/**
* The time, in seconds, between an ActionCheck for the same action.
*/
public static final String CONF_ACTION_CHECK_DELAY = CONF_PREFIX + "action.check.delay";
/**
* The number of callables to be queued in a batch.
*/
public static final String CONF_CALLABLE_BATCH_SIZE = CONF_PREFIX + "callable.batch.size";
protected static final String INSTRUMENTATION_GROUP = "actionchecker";
protected static final String INSTR_CHECK_ACTIONS_COUNTER = "checks_wf_actions";
protected static final String INSTR_CHECK_COORD_ACTIONS_COUNTER = "checks_coord_actions";
private static boolean useXCommand = true;
/**
* {@link ActionCheckRunnable} is the runnable which is scheduled to run and
* queue Action checks.
*/
static class ActionCheckRunnable implements Runnable {
private int actionCheckDelay;
private List<XCallable<Void>> callables;
private StringBuilder msg = null;
public ActionCheckRunnable(int actionCheckDelay) {
this.actionCheckDelay = actionCheckDelay;
}
public void run() {
XLog.Info.get().clear();
XLog LOG = XLog.getLog(getClass());
msg = new StringBuilder();
try {
runWFActionCheck();
runCoordActionCheck();
}
catch (CommandException ce) {
LOG.error("Unable to run action checks, ", ce);
}
LOG.debug("QUEUING [{0}] for potential checking", msg.toString());
if (null != callables) {
boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables);
if (ret == false) {
LOG.warn("Unable to queue the callables commands for CheckerService. "
+ "Most possibly command queue is full. Queue size is :"
+ Services.get().get(CallableQueueService.class).queueSize());
}
callables = null;
}
}
/**
* check workflow actions
*
* @throws CommandException
*/
private void runWFActionCheck() throws CommandException {
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService == null) {
throw new CommandException(ErrorCode.E0610);
}
List<WorkflowActionBean> actions;
try {
actions = jpaService
.execute(new WorkflowActionsRunningGetJPAExecutor(actionCheckDelay));
}
catch (JPAExecutorException je) {
throw new CommandException(je);
}
if (actions == null || actions.size() == 0) {
return;
}
msg.append(" WF_ACTIONS : " + actions.size());
for (WorkflowActionBean action : actions) {
Services.get().get(InstrumentationService.class).get().incr(INSTRUMENTATION_GROUP,
INSTR_CHECK_ACTIONS_COUNTER, 1);
if (useXCommand) {
queueCallable(new ActionCheckXCommand(action.getId()));
}
else {
queueCallable(new ActionCheckCommand(action.getId()));
}
}
}
/**
* check coordinator actions
*
* @throws CommandException
*/
private void runCoordActionCheck() throws CommandException {
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService == null) {
throw new CommandException(ErrorCode.E0610);
}
List<CoordinatorActionBean> cactions;
try {
cactions = jpaService.execute(new CoordActionsRunningGetJPAExecutor(
actionCheckDelay));
}
catch (JPAExecutorException je) {
throw new CommandException(je);
}
if (cactions == null || cactions.size() == 0) {
return;
}
msg.append(" COORD_ACTIONS : " + cactions.size());
for (CoordinatorActionBean caction : cactions) {
Services.get().get(InstrumentationService.class).get().incr(INSTRUMENTATION_GROUP,
INSTR_CHECK_COORD_ACTIONS_COUNTER, 1);
if (useXCommand) {
queueCallable(new CoordActionCheckXCommand(caction.getId(), actionCheckDelay));
}
else {
queueCallable(new CoordActionCheckCommand(caction.getId(), actionCheckDelay));
}
}
}
/**
* Adds callables to a list. If the number of callables in the list
* reaches {@link ActionCheckerService#CONF_CALLABLE_BATCH_SIZE}, the
* entire batch is queued and the callables list is reset.
*
* @param callable the callable to queue.
*/
private void queueCallable(XCallable<Void> callable) {
if (callables == null) {
callables = new ArrayList<XCallable<Void>>();
}
callables.add(callable);
if (callables.size() == Services.get().getConf().getInt(CONF_CALLABLE_BATCH_SIZE, 10)) {
boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables);
if (ret == false) {
XLog.getLog(getClass()).warn(
"Unable to queue the callables commands for CheckerService. "
+ "Most possibly command queue is full. Queue size is :"
+ Services.get().get(CallableQueueService.class).queueSize());
}
callables = new ArrayList<XCallable<Void>>();
}
}
}
/**
* Initializes the Action Check service.
*
* @param services services instance.
*/
@Override
public void init(Services services) {
Configuration conf = services.getConf();
Runnable actionCheckRunnable = new ActionCheckRunnable(conf.getInt(CONF_ACTION_CHECK_DELAY, 600));
services.get(SchedulerService.class).schedule(actionCheckRunnable, 10,
conf.getInt(CONF_ACTION_CHECK_INTERVAL, 60), SchedulerService.Unit.SEC);
if (Services.get().getConf().getBoolean(USE_XCOMMAND, true) == false) {
useXCommand = false;
}
}
/**
* Destroy the Action Checker Services.
*/
@Override
public void destroy() {
}
/**
* Return the public interface for the action checker service.
*
* @return {@link ActionCheckerService}.
*/
@Override
public Class<? extends Service> getInterface() {
return ActionCheckerService.class;
}
}
| sunmeng007/oozie | core/src/main/java/org/apache/oozie/service/ActionCheckerService.java | Java | apache-2.0 | 9,013 |
package com.google.api.ads.dfp.jaxws.v201508;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* The content partner related validation errors.
*
*
* <p>Java class for ContentPartnerError complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ContentPartnerError">
* <complexContent>
* <extension base="{https://www.google.com/apis/ads/publisher/v201508}ApiError">
* <sequence>
* <element name="reason" type="{https://www.google.com/apis/ads/publisher/v201508}ContentPartnerError.Reason" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ContentPartnerError", propOrder = {
"reason"
})
public class ContentPartnerError
extends ApiError
{
@XmlSchemaType(name = "string")
protected ContentPartnerErrorReason reason;
/**
* Gets the value of the reason property.
*
* @return
* possible object is
* {@link ContentPartnerErrorReason }
*
*/
public ContentPartnerErrorReason getReason() {
return reason;
}
/**
* Sets the value of the reason property.
*
* @param value
* allowed object is
* {@link ContentPartnerErrorReason }
*
*/
public void setReason(ContentPartnerErrorReason value) {
this.reason = value;
}
}
| shyTNT/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201508/ContentPartnerError.java | Java | apache-2.0 | 1,711 |
<?php namespace Neomerx\JsonApi\Parameters\Headers;
/**
* Copyright 2015 info@neomerx.com (www.neomerx.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use \InvalidArgumentException;
use \Neomerx\JsonApi\Contracts\Parameters\Headers\MediaTypeInterface;
/**
* @package Neomerx\JsonApi
*/
class MediaType implements MediaTypeInterface
{
/**
* @var string
*/
private $type;
/**
* @var string
*/
private $subType;
/**
* @var string
*/
private $mediaType;
/**
* @var array<string,string>|null
*/
private $parameters;
/**
* @param string $type
* @param string $subType
* @param array<string,string>|null $parameters
*/
public function __construct($type, $subType, $parameters = null)
{
$type = trim($type);
if (empty($type) === true) {
throw new InvalidArgumentException('type');
}
$subType = trim($subType);
if (empty($subType) === true) {
throw new InvalidArgumentException('subType');
}
if ($parameters !== null && is_array($parameters) === false) {
throw new InvalidArgumentException('parameters');
}
$this->type = $type;
$this->subType = $subType;
$this->mediaType = $type . '/' . $subType;
$this->parameters = $parameters;
}
/**
* @inheritdoc
*/
public function getType()
{
return $this->type;
}
/**
* @inheritdoc
*/
public function getSubType()
{
return $this->subType;
}
/**
* @inheritdoc
*/
public function getMediaType()
{
return $this->mediaType;
}
/**
* @inheritdoc
*/
public function getParameters()
{
return $this->parameters;
}
/**
* @inheritdoc
*/
public function matchesTo(MediaTypeInterface $mediaType)
{
return
$this->isTypeMatches($mediaType) &&
$this->isSubTypeMatches($mediaType) &&
$this->isMediaParametersEqual($mediaType);
}
/**
* @inheritdoc
*/
public function equalsTo(MediaTypeInterface $mediaType)
{
return
$this->isTypeEquals($mediaType) &&
$this->isSubTypeEquals($mediaType) &&
$this->isMediaParametersEqual($mediaType);
}
/**
* Parse media type.
*
* @param int $position
* @param string $mediaType
*
* @return MediaType
*/
public static function parse($position, $mediaType)
{
$position ?: null;
$fields = explode(';', $mediaType);
if (strpos($fields[0], '/') === false) {
throw new InvalidArgumentException('mediaType');
}
list($type, $subType) = explode('/', $fields[0], 2);
$parameters = null;
$count = count($fields);
for ($idx = 1; $idx < $count; ++$idx) {
if (strpos($fields[$idx], '=') === false) {
throw new InvalidArgumentException('mediaType');
}
list($key, $value) = explode('=', $fields[$idx], 2);
$parameters[trim($key)] = trim($value, ' "');
}
return new MediaType($type, $subType, $parameters);
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isTypeMatches(MediaTypeInterface $mediaType)
{
return $this->getType() === $mediaType->getType() || $mediaType->getType() === '*';
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isTypeEquals(MediaTypeInterface $mediaType)
{
return $this->getType() === $mediaType->getType();
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isSubTypeMatches(MediaTypeInterface $mediaType)
{
return $this->getSubType() === $mediaType->getSubType() || $mediaType->getSubType() === '*';
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isSubTypeEquals(MediaTypeInterface $mediaType)
{
return $this->getSubType() === $mediaType->getSubType();
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isMediaParametersEqual(MediaTypeInterface $mediaType)
{
if ($this->getParameters() === null && $mediaType->getParameters() === null) {
return true;
} elseif ($this->getParameters() !== null && $mediaType->getParameters() !== null) {
$intersect = array_intersect($this->getParameters(), $mediaType->getParameters());
return (count($this->getParameters()) === count($intersect));
}
return false;
}
}
| creocoder/json-api | src/Parameters/Headers/MediaType.php | PHP | apache-2.0 | 5,426 |
/**
* Copyright 2017 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
import com.github.ambry.utils.Pair;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
/**
* Hold the data structures needed by {@link BlobStoreStats} to serve requests. The class also exposes helper methods
* used to modify and access the stored data structures.
*/
class ScanResults {
// A NavigableMap that stores buckets for container valid data size. The key of the map is the end time of each
// bucket and the value is the corresponding valid data size map. For example, there are two buckets with end time
// t1 and t2. Bucket with end time t2 includes all events whose operation time is greater than or equal to t1 but
// strictly less than t2.
// Each bucket except for the very first one contains the delta in valid data size that occurred prior to the bucket
// end time. The very first bucket's end time is the forecast start time for containers and it contains the valid data
// size map at the forecast start time. The very first bucket is used as a base value, requested valid data size is
// computed by applying the deltas from appropriate buckets on the base value.
private final NavigableMap<Long, Map<String, Map<String, Long>>> containerBuckets = new TreeMap<>();
// A NavigableMap that stores buckets for log segment valid data size. The rest of the structure is similar
// to containerBuckets.
private final NavigableMap<Long, NavigableMap<String, Long>> logSegmentBuckets = new TreeMap<>();
final long containerForecastStartTimeMs;
final long containerLastBucketTimeMs;
final long containerForecastEndTimeMs;
final long logSegmentForecastStartTimeMs;
final long logSegmentLastBucketTimeMs;
final long logSegmentForecastEndTimeMs;
Offset scannedEndOffset = null;
/**
* Create the bucket data structures in advance based on the given scanStartTime and segmentScanTimeOffset.
*/
ScanResults(long startTimeInMs, long logSegmentForecastOffsetMs, int bucketCount, long bucketSpanInMs) {
long containerBucketTimeMs = startTimeInMs;
long logSegmentBucketTimeMs = startTimeInMs - logSegmentForecastOffsetMs;
for (int i = 0; i < bucketCount; i++) {
containerBuckets.put(containerBucketTimeMs, new HashMap<>());
logSegmentBuckets.put(logSegmentBucketTimeMs, new TreeMap<>(LogSegmentNameHelper.COMPARATOR));
containerBucketTimeMs += bucketSpanInMs;
logSegmentBucketTimeMs += bucketSpanInMs;
}
containerForecastStartTimeMs = containerBuckets.firstKey();
containerLastBucketTimeMs = containerBuckets.lastKey();
containerForecastEndTimeMs = containerLastBucketTimeMs + bucketSpanInMs;
logSegmentForecastStartTimeMs = logSegmentBuckets.firstKey();
logSegmentLastBucketTimeMs = logSegmentBuckets.lastKey();
logSegmentForecastEndTimeMs = logSegmentLastBucketTimeMs + bucketSpanInMs;
}
/**
* Given a reference time, return the key of the appropriate container bucket whose end time is strictly greater than
* the reference time.
* @param referenceTimeInMs the reference time or operation time of an event.
* @return the appropriate bucket key (bucket end time) to indicate which bucket will an event with
* the given reference time as operation time belong to.
*/
Long getContainerBucketKey(long referenceTimeInMs) {
return containerBuckets.higherKey(referenceTimeInMs);
}
/**
* Given a reference time, return the key of the appropriate log segment bucket whose end time is strictly greater
* than the reference time.
* @param referenceTimeInMs the reference time or operation time of an event.
* @return the appropriate bucket key (bucket end time) to indicate which bucket will an event with
* the given reference time as operation time belong to.
*/
Long getLogSegmentBucketKey(long referenceTimeInMs) {
return logSegmentBuckets.higherKey(referenceTimeInMs);
}
/**
* Helper function to update the container base value bucket with the given value.
* @param serviceId the serviceId of the map entry to be updated
* @param containerId the containerId of the map entry to be updated
* @param value the value to be added
*/
void updateContainerBaseBucket(String serviceId, String containerId, long value) {
updateContainerBucket(containerBuckets.firstKey(), serviceId, containerId, value);
}
/**
* Helper function to update the log segment base value bucket with the given value.
* @param logSegmentName the log segment name of the map entry to be updated
* @param value the value to be added
*/
void updateLogSegmentBaseBucket(String logSegmentName, long value) {
updateLogSegmentBucket(logSegmentBuckets.firstKey(), logSegmentName, value);
}
/**
* Helper function to update a container bucket with the given value.
* @param bucketKey the bucket key to specify which bucket will be updated
* @param serviceId the serviceId of the map entry to be updated
* @param containerId the containerId of the map entry to be updated
* @param value the value to be added
*/
void updateContainerBucket(Long bucketKey, String serviceId, String containerId, long value) {
if (bucketKey != null && containerBuckets.containsKey(bucketKey)) {
Map<String, Map<String, Long>> existingBucketEntry = containerBuckets.get(bucketKey);
updateNestedMapHelper(existingBucketEntry, serviceId, containerId, value);
}
}
/**
* Helper function to update a log segment bucket with a given value.
* @param bucketKey the bucket key to specify which bucket will be updated
* @param logSegmentName the log segment name of the map entry to be updated
* @param value the value to be added
*/
void updateLogSegmentBucket(Long bucketKey, String logSegmentName, long value) {
if (bucketKey != null && logSegmentBuckets.containsKey(bucketKey)) {
Map<String, Long> existingBucketEntry = logSegmentBuckets.get(bucketKey);
updateMapHelper(existingBucketEntry, logSegmentName, value);
}
}
/**
* Given a reference time in milliseconds return the corresponding valid data size per log segment map by aggregating
* all buckets whose end time is less than or equal to the reference time.
* @param referenceTimeInMS the reference time in ms until which deletes and expiration are relevant
* @return a {@link Pair} whose first element is the end time of the last bucket that was aggregated and whose second
* element is the requested valid data size per log segment {@link NavigableMap}.
*/
Pair<Long, NavigableMap<String, Long>> getValidSizePerLogSegment(Long referenceTimeInMS) {
NavigableMap<String, Long> validSizePerLogSegment = new TreeMap<>(logSegmentBuckets.firstEntry().getValue());
NavigableMap<Long, NavigableMap<String, Long>> subMap =
logSegmentBuckets.subMap(logSegmentBuckets.firstKey(), false, referenceTimeInMS, true);
for (Map.Entry<Long, NavigableMap<String, Long>> bucket : subMap.entrySet()) {
for (Map.Entry<String, Long> bucketEntry : bucket.getValue().entrySet()) {
updateMapHelper(validSizePerLogSegment, bucketEntry.getKey(), bucketEntry.getValue());
}
}
Long lastReferenceBucketTimeInMs = subMap.isEmpty() ? logSegmentBuckets.firstKey() : subMap.lastKey();
return new Pair<>(lastReferenceBucketTimeInMs, validSizePerLogSegment);
}
/**
* Given a reference time in ms return the corresponding valid data size per container map by aggregating all buckets
* whose end time is less than or equal to the reference time.
* @param referenceTimeInMs the reference time in ms until which deletes and expiration are relevant.
* @return a {@link Pair} whose first element is the end time of the last bucket that was aggregated and whose second
* element is the requested valid data size per container {@link Map}.
*/
Map<String, Map<String, Long>> getValidSizePerContainer(Long referenceTimeInMs) {
Map<String, Map<String, Long>> validSizePerContainer = new HashMap<>();
for (Map.Entry<String, Map<String, Long>> accountEntry : containerBuckets.firstEntry().getValue().entrySet()) {
validSizePerContainer.put(accountEntry.getKey(), new HashMap<>(accountEntry.getValue()));
}
NavigableMap<Long, Map<String, Map<String, Long>>> subMap =
containerBuckets.subMap(containerBuckets.firstKey(), false, referenceTimeInMs, true);
for (Map.Entry<Long, Map<String, Map<String, Long>>> bucket : subMap.entrySet()) {
for (Map.Entry<String, Map<String, Long>> accountEntry : bucket.getValue().entrySet()) {
for (Map.Entry<String, Long> containerEntry : accountEntry.getValue().entrySet()) {
updateNestedMapHelper(validSizePerContainer, accountEntry.getKey(), containerEntry.getKey(),
containerEntry.getValue());
}
}
}
return validSizePerContainer;
}
/**
* Helper function to update nested map data structure.
* @param nestedMap nested {@link Map} to be updated
* @param firstKey of the nested map
* @param secondKey of the nested map
* @param value the value to be added at the corresponding entry
*/
private void updateNestedMapHelper(Map<String, Map<String, Long>> nestedMap, String firstKey, String secondKey,
Long value) {
if (!nestedMap.containsKey(firstKey)) {
nestedMap.put(firstKey, new HashMap<String, Long>());
}
updateMapHelper(nestedMap.get(firstKey), secondKey, value);
}
/**
* Helper function to update map data structure.
* @param map {@link Map} to be updated
* @param key of the map
* @param value the value to be added at the corresponding entry
*/
private void updateMapHelper(Map<String, Long> map, String key, Long value) {
Long newValue = map.containsKey(key) ? map.get(key) + value : value;
map.put(key, newValue);
}
}
| xiahome/ambry | ambry-store/src/main/java/com.github.ambry.store/ScanResults.java | Java | apache-2.0 | 10,385 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.application;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.wicket.util.collections.UrlExternalFormComparator;
import org.apache.wicket.util.file.File;
import org.apache.wicket.util.listener.IChangeListener;
import org.apache.wicket.util.time.Duration;
import org.apache.wicket.util.watch.IModificationWatcher;
import org.apache.wicket.util.watch.ModificationWatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Custom ClassLoader that reverses the classloader lookups, and that is able to notify a listener
* when a class file is changed.
*
* @author <a href="mailto:jbq@apache.org">Jean-Baptiste Quenot</a>
*/
public class ReloadingClassLoader extends URLClassLoader
{
private static final Logger log = LoggerFactory.getLogger(ReloadingClassLoader.class);
private static final Set<URL> urls = new TreeSet<URL>(new UrlExternalFormComparator());
private static final List<String> patterns = new ArrayList<String>();
private IChangeListener listener;
private final Duration pollFrequency = Duration.seconds(3);
private final IModificationWatcher watcher;
static
{
addClassLoaderUrls(ReloadingClassLoader.class.getClassLoader());
excludePattern("org.apache.wicket.*");
includePattern("org.apache.wicket.examples.*");
}
/**
*
* @param name
* @return true if class if found, false otherwise
*/
protected boolean tryClassHere(String name)
{
// don't include classes in the java or javax.servlet package
if (name != null && (name.startsWith("java.") || name.startsWith("javax.servlet")))
{
return false;
}
// Scan includes, then excludes
boolean tryHere;
// If no explicit includes, try here
if (patterns == null || patterns.size() == 0)
{
tryHere = true;
}
else
{
// See if it matches include patterns
tryHere = false;
for (String rawpattern : patterns)
{
if (rawpattern.length() <= 1)
{
continue;
}
// FIXME it seems that only "includes" are handled. "Excludes" are ignored
boolean isInclude = rawpattern.substring(0, 1).equals("+");
String pattern = rawpattern.substring(1);
if (WildcardMatcherHelper.match(pattern, name) != null)
{
tryHere = isInclude;
}
}
}
return tryHere;
}
/**
* Include a pattern
*
* @param pattern
* the pattern to include
*/
public static void includePattern(String pattern)
{
patterns.add("+" + pattern);
}
/**
* Exclude a pattern
*
* @param pattern
* the pattern to exclude
*/
public static void excludePattern(String pattern)
{
patterns.add("-" + pattern);
}
/**
* Returns the list of all configured inclusion or exclusion patterns
*
* @return list of patterns as String
*/
public static List<String> getPatterns()
{
return patterns;
}
/**
* Add the location of a directory containing class files
*
* @param url
* the URL for the directory
*/
public static void addLocation(URL url)
{
urls.add(url);
}
/**
* Returns the list of all configured locations of directories containing class files
*
* @return list of locations as URL
*/
public static Set<URL> getLocations()
{
return urls;
}
/**
* Add all the url locations we can find for the provided class loader
*
* @param loader
* class loader
*/
private static void addClassLoaderUrls(ClassLoader loader)
{
if (loader != null)
{
final Enumeration<URL> resources;
try
{
resources = loader.getResources("");
}
catch (IOException e)
{
throw new RuntimeException(e);
}
while (resources.hasMoreElements())
{
URL location = resources.nextElement();
ReloadingClassLoader.addLocation(location);
}
}
}
/**
* Create a new reloading ClassLoader from a list of URLs, and initialize the
* ModificationWatcher to detect class file modifications
*
* @param parent
* the parent classloader in case the class file cannot be loaded from the above
* locations
*/
public ReloadingClassLoader(ClassLoader parent)
{
super(new URL[] { }, parent);
// probably doubles from this class, but just in case
addClassLoaderUrls(parent);
for (URL url : urls)
{
addURL(url);
}
watcher = new ModificationWatcher(pollFrequency);
}
/**
* Gets a resource from this <code>ClassLoader</class>. If the
* resource does not exist in this one, we check the parent.
* Please note that this is the exact opposite of the
* <code>ClassLoader</code> spec. We use it to work around inconsistent class loaders from third
* party vendors.
*
* @param name
* of resource
*/
@Override
public final URL getResource(final String name)
{
URL resource = findResource(name);
ClassLoader parent = getParent();
if (resource == null && parent != null)
{
resource = parent.getResource(name);
}
return resource;
}
/**
* Loads the class from this <code>ClassLoader</class>. If the
* class does not exist in this one, we check the parent. Please
* note that this is the exact opposite of the
* <code>ClassLoader</code> spec. We use it to load the class from the same classloader as
* WicketFilter or WicketServlet. When found, the class file is watched for modifications.
*
* @param name
* the name of the class
* @param resolve
* if <code>true</code> then resolve the class
* @return the resulting <code>Class</code> object
* @exception ClassNotFoundException
* if the class could not be found
*/
@Override
public final Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException
{
// First check if it's already loaded
Class<?> clazz = findLoadedClass(name);
if (clazz == null)
{
final ClassLoader parent = getParent();
if (tryClassHere(name))
{
try
{
clazz = findClass(name);
watchForModifications(clazz);
}
catch (ClassNotFoundException cnfe)
{
if (parent == null)
{
// Propagate exception
throw cnfe;
}
}
}
if (clazz == null)
{
if (parent == null)
{
throw new ClassNotFoundException(name);
}
else
{
// Will throw a CFNE if not found in parent
// see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6500212
// clazz = parent.loadClass(name);
clazz = Class.forName(name, false, parent);
}
}
}
if (resolve)
{
resolveClass(clazz);
}
return clazz;
}
/**
* Sets the listener that will be notified when a class changes
*
* @param listener
* the listener to notify upon class change
*/
public void setListener(IChangeListener listener)
{
this.listener = listener;
}
/**
* Watch changes of a class file by locating it in the list of location URLs and adding the
* corresponding file to the ModificationWatcher
*
* @param clz
* the class to watch
*/
private void watchForModifications(Class<?> clz)
{
// Watch class in the future
Iterator<URL> locationsIterator = urls.iterator();
File clzFile = null;
while (locationsIterator.hasNext())
{
// FIXME only works for directories, but JARs etc could be checked
// as well
URL location = locationsIterator.next();
String clzLocation = location.getFile() + clz.getName().replaceAll("\\.", "/") +
".class";
log.debug("clzLocation=" + clzLocation);
clzFile = new File(clzLocation);
final File finalClzFile = clzFile;
if (clzFile.exists())
{
log.info("Watching changes of class " + clzFile);
watcher.add(clzFile, new IChangeListener()
{
@Override
public void onChange()
{
log.info("Class file " + finalClzFile + " has changed, reloading");
try
{
listener.onChange();
}
catch (Exception e)
{
log.error("Could not notify listener", e);
// If an error occurs when the listener is notified,
// remove the watched object to avoid rethrowing the
// exception at next check
// FIXME check if class file has been deleted
watcher.remove(finalClzFile);
}
}
});
break;
}
else
{
log.debug("Class file does not exist: " + clzFile);
}
}
if (clzFile != null && !clzFile.exists())
{
log.debug("Could not locate class " + clz.getName());
}
}
/**
* Remove the ModificationWatcher from the current reloading class loader
*/
public void destroy()
{
watcher.destroy();
}
}
| mafulafunk/wicket | wicket-core/src/main/java/org/apache/wicket/application/ReloadingClassLoader.java | Java | apache-2.0 | 9,526 |
package jadx.tests.integration.conditions;
import org.junit.jupiter.api.Test;
import jadx.tests.api.SmaliTest;
import static jadx.tests.api.utils.assertj.JadxAssertions.assertThat;
@SuppressWarnings("CommentedOutCode")
public class TestTernary4 extends SmaliTest {
// @formatter:off
/*
private Set test(HashMap<String, Object> hashMap) {
boolean z;
HashSet hashSet = new HashSet();
synchronized (this.defaultValuesByPath) {
for (String next : this.defaultValuesByPath.keySet()) {
Object obj = hashMap.get(next);
if (obj != null) {
z = !getValueObject(next).equals(obj);
} else {
z = this.valuesByPath.get(next) != null;;
}
if (z) {
hashSet.add(next);
}
}
}
return hashSet;
}
*/
// @formatter:on
@Test
public void test() {
assertThat(getClassNodeFromSmali())
.code()
.removeBlockComments()
.doesNotContain("5")
.doesNotContain("try");
}
}
| skylot/jadx | jadx-core/src/test/java/jadx/tests/integration/conditions/TestTernary4.java | Java | apache-2.0 | 944 |
package jadx.tests.integration.debuginfo;
import org.junit.jupiter.api.Test;
import jadx.core.dex.nodes.ClassNode;
import jadx.tests.api.SmaliTest;
import static jadx.tests.api.utils.JadxMatchers.containsOne;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestVariablesNames extends SmaliTest {
// @formatter:off
/*
public static class TestCls {
public void test(String s, int k) {
f1(s);
int i = k + 3;
String s2 = "i" + i;
f2(i, s2);
double d = i * 5;
String s3 = "d" + d;
f3(d, s3);
}
private void f1(String s) {
}
private void f2(int i, String i2) {
}
private void f3(double d, String d2) {
}
}
*/
// @formatter:on
/**
* Parameter register reused in variables assign with different types and names
* No variables names in debug info
*/
@Test
public void test() {
ClassNode cls = getClassNodeFromSmaliWithPath("debuginfo", "TestVariablesNames");
String code = cls.getCode().toString();
// TODO: don't use current variables naming in tests
assertThat(code, containsOne("f1(str);"));
assertThat(code, containsOne("f2(i2, \"i\" + i2);"));
assertThat(code, containsOne("f3(d, \"d\" + d);"));
}
}
| skylot/jadx | jadx-core/src/test/java/jadx/tests/integration/debuginfo/TestVariablesNames.java | Java | apache-2.0 | 1,206 |
/*!
* commander
* Copyright(c) 2011 TJ Holowaychuk <tj@vision-media.ca>
* MIT Licensed
*/
/**
* Module dependencies.
*/
var EventEmitter = require('events').EventEmitter
, spawn = require('child_process').spawn
, keypress = require('keypress')
, fs = require('fs')
, exists = fs.existsSync
, path = require('path')
, tty = require('tty')
, dirname = path.dirname
, basename = path.basename;
/**
* Expose the root command.
*/
exports = module.exports = new Command;
/**
* Expose `Command`.
*/
exports.Command = Command;
/**
* Expose `Option`.
*/
exports.Option = Option;
/**
* Initialize a new `Option` with the given `flags` and `description`.
*
* @param {String} flags
* @param {String} description
* @api public
*/
function Option(flags, description) {
this.flags = flags;
this.required = ~flags.indexOf('<');
this.optional = ~flags.indexOf('[');
this.bool = !~flags.indexOf('-no-');
flags = flags.split(/[ ,|]+/);
if (flags.length > 1 && !/^[[<]/.test(flags[1])) this.short = flags.shift();
this.long = flags.shift();
this.description = description || '';
}
/**
* Return option name.
*
* @return {String}
* @api private
*/
Option.prototype.name = function(){
return this.long
.replace('--', '')
.replace('no-', '');
};
/**
* Check if `arg` matches the short or long flag.
*
* @param {String} arg
* @return {Boolean}
* @api private
*/
Option.prototype.is = function(arg){
return arg == this.short
|| arg == this.long;
};
/**
* Initialize a new `Command`.
*
* @param {String} name
* @api public
*/
function Command(name) {
this.commands = [];
this.options = [];
this._args = [];
this._name = name;
}
/**
* Inherit from `EventEmitter.prototype`.
*/
Command.prototype.__proto__ = EventEmitter.prototype;
/**
* Add command `name`.
*
* The `.action()` callback is invoked when the
* command `name` is specified via __ARGV__,
* and the remaining arguments are applied to the
* function for access.
*
* When the `name` is "*" an un-matched command
* will be passed as the first arg, followed by
* the rest of __ARGV__ remaining.
*
* Examples:
*
* program
* .version('0.0.1')
* .option('-C, --chdir <path>', 'change the working directory')
* .option('-c, --config <path>', 'set config path. defaults to ./deploy.conf')
* .option('-T, --no-tests', 'ignore test hook')
*
* program
* .command('setup')
* .description('run remote setup commands')
* .action(function(){
* console.log('setup');
* });
*
* program
* .command('exec <cmd>')
* .description('run the given remote command')
* .action(function(cmd){
* console.log('exec "%s"', cmd);
* });
*
* program
* .command('*')
* .description('deploy the given env')
* .action(function(env){
* console.log('deploying "%s"', env);
* });
*
* program.parse(process.argv);
*
* @param {String} name
* @param {String} [desc]
* @return {Command} the new command
* @api public
*/
Command.prototype.command = function(name, desc){
var args = name.split(/ +/);
var cmd = new Command(args.shift());
if (desc) cmd.description(desc);
if (desc) this.executables = true;
this.commands.push(cmd);
cmd.parseExpectedArgs(args);
cmd.parent = this;
if (desc) return this;
return cmd;
};
/**
* Add an implicit `help [cmd]` subcommand
* which invokes `--help` for the given command.
*
* @api private
*/
Command.prototype.addImplicitHelpCommand = function() {
this.command('help [cmd]', 'display help for [cmd]');
};
/**
* Parse expected `args`.
*
* For example `["[type]"]` becomes `[{ required: false, name: 'type' }]`.
*
* @param {Array} args
* @return {Command} for chaining
* @api public
*/
Command.prototype.parseExpectedArgs = function(args){
if (!args.length) return;
var self = this;
args.forEach(function(arg){
switch (arg[0]) {
case '<':
self._args.push({ required: true, name: arg.slice(1, -1) });
break;
case '[':
self._args.push({ required: false, name: arg.slice(1, -1) });
break;
}
});
return this;
};
/**
* Register callback `fn` for the command.
*
* Examples:
*
* program
* .command('help')
* .description('display verbose help')
* .action(function(){
* // output help here
* });
*
* @param {Function} fn
* @return {Command} for chaining
* @api public
*/
Command.prototype.action = function(fn){
var self = this;
this.parent.on(this._name, function(args, unknown){
// Parse any so-far unknown options
unknown = unknown || [];
var parsed = self.parseOptions(unknown);
// Output help if necessary
outputHelpIfNecessary(self, parsed.unknown);
// If there are still any unknown options, then we simply
// die, unless someone asked for help, in which case we give it
// to them, and then we die.
if (parsed.unknown.length > 0) {
self.unknownOption(parsed.unknown[0]);
}
// Leftover arguments need to be pushed back. Fixes issue #56
if (parsed.args.length) args = parsed.args.concat(args);
self._args.forEach(function(arg, i){
if (arg.required && null == args[i]) {
self.missingArgument(arg.name);
}
});
// Always append ourselves to the end of the arguments,
// to make sure we match the number of arguments the user
// expects
if (self._args.length) {
args[self._args.length] = self;
} else {
args.push(self);
}
fn.apply(this, args);
});
return this;
};
/**
* Define option with `flags`, `description` and optional
* coercion `fn`.
*
* The `flags` string should contain both the short and long flags,
* separated by comma, a pipe or space. The following are all valid
* all will output this way when `--help` is used.
*
* "-p, --pepper"
* "-p|--pepper"
* "-p --pepper"
*
* Examples:
*
* // simple boolean defaulting to false
* program.option('-p, --pepper', 'add pepper');
*
* --pepper
* program.pepper
* // => Boolean
*
* // simple boolean defaulting to false
* program.option('-C, --no-cheese', 'remove cheese');
*
* program.cheese
* // => true
*
* --no-cheese
* program.cheese
* // => true
*
* // required argument
* program.option('-C, --chdir <path>', 'change the working directory');
*
* --chdir /tmp
* program.chdir
* // => "/tmp"
*
* // optional argument
* program.option('-c, --cheese [type]', 'add cheese [marble]');
*
* @param {String} flags
* @param {String} description
* @param {Function|Mixed} fn or default
* @param {Mixed} defaultValue
* @return {Command} for chaining
* @api public
*/
Command.prototype.option = function(flags, description, fn, defaultValue){
var self = this
, option = new Option(flags, description)
, oname = option.name()
, name = camelcase(oname);
// default as 3rd arg
if ('function' != typeof fn) defaultValue = fn, fn = null;
// preassign default value only for --no-*, [optional], or <required>
if (false == option.bool || option.optional || option.required) {
// when --no-* we make sure default is true
if (false == option.bool) defaultValue = true;
// preassign only if we have a default
if (undefined !== defaultValue) self[name] = defaultValue;
}
// register the option
this.options.push(option);
// when it's passed assign the value
// and conditionally invoke the callback
this.on(oname, function(val){
// coercion
if (null != val && fn) val = fn(val);
// unassigned or bool
if ('boolean' == typeof self[name] || 'undefined' == typeof self[name]) {
// if no value, bool true, and we have a default, then use it!
if (null == val) {
self[name] = option.bool
? defaultValue || true
: false;
} else {
self[name] = val;
}
} else if (null !== val) {
// reassign
self[name] = val;
}
});
return this;
};
/**
* Parse `argv`, settings options and invoking commands when defined.
*
* @param {Array} argv
* @return {Command} for chaining
* @api public
*/
Command.prototype.parse = function(argv){
// implicit help
if (this.executables) this.addImplicitHelpCommand();
// store raw args
this.rawArgs = argv;
// guess name
this._name = this._name || basename(argv[1]);
// process argv
var parsed = this.parseOptions(this.normalize(argv.slice(2)));
var args = this.args = parsed.args;
var result = this.parseArgs(this.args, parsed.unknown);
// executable sub-commands, skip .parseArgs()
if (this.executables) return this.executeSubCommand(argv, args, parsed.unknown);
return result;
};
/**
* Execute a sub-command executable.
*
* @param {Array} argv
* @param {Array} args
* @param {Array} unknown
* @api private
*/
Command.prototype.executeSubCommand = function(argv, args, unknown) {
args = args.concat(unknown);
if (!args.length) this.help();
if ('help' == args[0] && 1 == args.length) this.help();
// <cmd> --help
if ('help' == args[0]) {
args[0] = args[1];
args[1] = '--help';
}
// executable
var dir = dirname(argv[1]);
var bin = basename(argv[1]) + '-' + args[0];
// check for ./<bin> first
var local = path.join(dir, bin);
// run it
args = args.slice(1);
var proc = spawn(local, args, { stdio: 'inherit', customFds: [0, 1, 2] });
proc.on('error', function(err){
if (err.code == "ENOENT") {
console.error('\n %s(1) does not exist, try --help\n', bin);
} else if (err.code == "EACCES") {
console.error('\n %s(1) not executable. try chmod or run with root\n', bin);
}
});
this.runningCommand = proc;
};
/**
* Normalize `args`, splitting joined short flags. For example
* the arg "-abc" is equivalent to "-a -b -c".
* This also normalizes equal sign and splits "--abc=def" into "--abc def".
*
* @param {Array} args
* @return {Array}
* @api private
*/
Command.prototype.normalize = function(args){
var ret = []
, arg
, index;
for (var i = 0, len = args.length; i < len; ++i) {
arg = args[i];
if (arg.length > 1 && '-' == arg[0] && '-' != arg[1]) {
arg.slice(1).split('').forEach(function(c){
ret.push('-' + c);
});
} else if (/^--/.test(arg) && ~(index = arg.indexOf('='))) {
ret.push(arg.slice(0, index), arg.slice(index + 1));
} else {
ret.push(arg);
}
}
return ret;
};
/**
* Parse command `args`.
*
* When listener(s) are available those
* callbacks are invoked, otherwise the "*"
* event is emitted and those actions are invoked.
*
* @param {Array} args
* @return {Command} for chaining
* @api private
*/
Command.prototype.parseArgs = function(args, unknown){
var cmds = this.commands
, len = cmds.length
, name;
if (args.length) {
name = args[0];
if (this.listeners(name).length) {
this.emit(args.shift(), args, unknown);
} else {
this.emit('*', args);
}
} else {
outputHelpIfNecessary(this, unknown);
// If there were no args and we have unknown options,
// then they are extraneous and we need to error.
if (unknown.length > 0) {
this.unknownOption(unknown[0]);
}
}
return this;
};
/**
* Return an option matching `arg` if any.
*
* @param {String} arg
* @return {Option}
* @api private
*/
Command.prototype.optionFor = function(arg){
for (var i = 0, len = this.options.length; i < len; ++i) {
if (this.options[i].is(arg)) {
return this.options[i];
}
}
};
/**
* Parse options from `argv` returning `argv`
* void of these options.
*
* @param {Array} argv
* @return {Array}
* @api public
*/
Command.prototype.parseOptions = function(argv){
var args = []
, len = argv.length
, literal
, option
, arg;
var unknownOptions = [];
// parse options
for (var i = 0; i < len; ++i) {
arg = argv[i];
// literal args after --
if ('--' == arg) {
literal = true;
continue;
}
if (literal) {
args.push(arg);
continue;
}
// find matching Option
option = this.optionFor(arg);
// option is defined
if (option) {
// requires arg
if (option.required) {
arg = argv[++i];
if (null == arg) return this.optionMissingArgument(option);
if ('-' == arg[0] && '-' != arg) return this.optionMissingArgument(option, arg);
this.emit(option.name(), arg);
// optional arg
} else if (option.optional) {
arg = argv[i+1];
if (null == arg || ('-' == arg[0] && '-' != arg)) {
arg = null;
} else {
++i;
}
this.emit(option.name(), arg);
// bool
} else {
this.emit(option.name());
}
continue;
}
// looks like an option
if (arg.length > 1 && '-' == arg[0]) {
unknownOptions.push(arg);
// If the next argument looks like it might be
// an argument for this option, we pass it on.
// If it isn't, then it'll simply be ignored
if (argv[i+1] && '-' != argv[i+1][0]) {
unknownOptions.push(argv[++i]);
}
continue;
}
// arg
args.push(arg);
}
return { args: args, unknown: unknownOptions };
};
/**
* Argument `name` is missing.
*
* @param {String} name
* @api private
*/
Command.prototype.missingArgument = function(name){
console.error();
console.error(" error: missing required argument `%s'", name);
console.error();
process.exit(1);
};
/**
* `Option` is missing an argument, but received `flag` or nothing.
*
* @param {String} option
* @param {String} flag
* @api private
*/
Command.prototype.optionMissingArgument = function(option, flag){
console.error();
if (flag) {
console.error(" error: option `%s' argument missing, got `%s'", option.flags, flag);
} else {
console.error(" error: option `%s' argument missing", option.flags);
}
console.error();
process.exit(1);
};
/**
* Unknown option `flag`.
*
* @param {String} flag
* @api private
*/
Command.prototype.unknownOption = function(flag){
console.error();
console.error(" error: unknown option `%s'", flag);
console.error();
process.exit(1);
};
/**
* Set the program version to `str`.
*
* This method auto-registers the "-V, --version" flag
* which will print the version number when passed.
*
* @param {String} str
* @param {String} flags
* @return {Command} for chaining
* @api public
*/
Command.prototype.version = function(str, flags){
if (0 == arguments.length) return this._version;
this._version = str;
flags = flags || '-V, --version';
this.option(flags, 'output the version number');
this.on('version', function(){
console.log(str);
process.exit(0);
});
return this;
};
/**
* Set the description `str`.
*
* @param {String} str
* @return {String|Command}
* @api public
*/
Command.prototype.description = function(str){
if (0 == arguments.length) return this._description;
this._description = str;
return this;
};
/**
* Set / get the command usage `str`.
*
* @param {String} str
* @return {String|Command}
* @api public
*/
Command.prototype.usage = function(str){
var args = this._args.map(function(arg){
return arg.required
? '<' + arg.name + '>'
: '[' + arg.name + ']';
});
var usage = '[options'
+ (this.commands.length ? '] [command' : '')
+ ']'
+ (this._args.length ? ' ' + args : '');
if (0 == arguments.length) return this._usage || usage;
this._usage = str;
return this;
};
/**
* Return the largest option length.
*
* @return {Number}
* @api private
*/
Command.prototype.largestOptionLength = function(){
return this.options.reduce(function(max, option){
return Math.max(max, option.flags.length);
}, 0);
};
/**
* Return help for options.
*
* @return {String}
* @api private
*/
Command.prototype.optionHelp = function(){
var width = this.largestOptionLength();
// Prepend the help information
return [pad('-h, --help', width) + ' ' + 'output usage information']
.concat(this.options.map(function(option){
return pad(option.flags, width)
+ ' ' + option.description;
}))
.join('\n');
};
/**
* Return command help documentation.
*
* @return {String}
* @api private
*/
Command.prototype.commandHelp = function(){
if (!this.commands.length) return '';
return [
''
, ' Commands:'
, ''
, this.commands.map(function(cmd){
var args = cmd._args.map(function(arg){
return arg.required
? '<' + arg.name + '>'
: '[' + arg.name + ']';
}).join(' ');
return pad(cmd._name
+ (cmd.options.length
? ' [options]'
: '') + ' ' + args, 22)
+ (cmd.description()
? ' ' + cmd.description()
: '');
}).join('\n').replace(/^/gm, ' ')
, ''
].join('\n');
};
/**
* Return program help documentation.
*
* @return {String}
* @api private
*/
Command.prototype.helpInformation = function(){
return [
''
, ' Usage: ' + this._name + ' ' + this.usage()
, '' + this.commandHelp()
, ' Options:'
, ''
, '' + this.optionHelp().replace(/^/gm, ' ')
, ''
, ''
].join('\n');
};
/**
* Prompt for a `Number`.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptForNumber = function(str, fn){
var self = this;
this.promptSingleLine(str, function parseNumber(val){
val = Number(val);
if (isNaN(val)) return self.promptSingleLine(str + '(must be a number) ', parseNumber);
fn(val);
});
};
/**
* Prompt for a `Date`.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptForDate = function(str, fn){
var self = this;
this.promptSingleLine(str, function parseDate(val){
val = new Date(val);
if (isNaN(val.getTime())) return self.promptSingleLine(str + '(must be a date) ', parseDate);
fn(val);
});
};
/**
* Prompt for a `Regular Expression`.
*
* @param {String} str
* @param {Object} pattern regular expression object to test
* @param {Function} fn
* @api private
*/
Command.prototype.promptForRegexp = function(str, pattern, fn){
var self = this;
this.promptSingleLine(str, function parseRegexp(val){
if(!pattern.test(val)) return self.promptSingleLine(str + '(regular expression mismatch) ', parseRegexp);
fn(val);
});
};
/**
* Single-line prompt.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptSingleLine = function(str, fn){
// determine if the 2nd argument is a regular expression
if (arguments[1].global !== undefined && arguments[1].multiline !== undefined) {
return this.promptForRegexp(str, arguments[1], arguments[2]);
} else if ('function' == typeof arguments[2]) {
return this['promptFor' + (fn.name || fn)](str, arguments[2]);
}
process.stdout.write(str);
process.stdin.setEncoding('utf8');
process.stdin.once('data', function(val){
fn(val.trim());
}).resume();
};
/**
* Multi-line prompt.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptMultiLine = function(str, fn){
var buf = [];
console.log(str);
process.stdin.setEncoding('utf8');
process.stdin.on('data', function(val){
if ('\n' == val || '\r\n' == val) {
process.stdin.removeAllListeners('data');
fn(buf.join('\n'));
} else {
buf.push(val.trimRight());
}
}).resume();
};
/**
* Prompt `str` and callback `fn(val)`
*
* Commander supports single-line and multi-line prompts.
* To issue a single-line prompt simply add white-space
* to the end of `str`, something like "name: ", whereas
* for a multi-line prompt omit this "description:".
*
*
* Examples:
*
* program.prompt('Username: ', function(name){
* console.log('hi %s', name);
* });
*
* program.prompt('Description:', function(desc){
* console.log('description was "%s"', desc.trim());
* });
*
* @param {String|Object} str
* @param {Function} fn
* @api public
*/
Command.prototype.prompt = function(str, fn){
var self = this;
if ('string' == typeof str) {
if (/ $/.test(str)) return this.promptSingleLine.apply(this, arguments);
this.promptMultiLine(str, fn);
} else {
var keys = Object.keys(str)
, obj = {};
function next() {
var key = keys.shift()
, label = str[key];
if (!key) return fn(obj);
self.prompt(label, function(val){
obj[key] = val;
next();
});
}
next();
}
};
/**
* Prompt for password with `str`, `mask` char and callback `fn(val)`.
*
* The mask string defaults to '', aka no output is
* written while typing, you may want to use "*" etc.
*
* Examples:
*
* program.password('Password: ', function(pass){
* console.log('got "%s"', pass);
* process.stdin.destroy();
* });
*
* program.password('Password: ', '*', function(pass){
* console.log('got "%s"', pass);
* process.stdin.destroy();
* });
*
* @param {String} str
* @param {String} mask
* @param {Function} fn
* @api public
*/
Command.prototype.password = function(str, mask, fn){
var self = this
, buf = '';
// default mask
if ('function' == typeof mask) {
fn = mask;
mask = '';
}
keypress(process.stdin);
function setRawMode(mode) {
if (process.stdin.setRawMode) {
process.stdin.setRawMode(mode);
} else {
tty.setRawMode(mode);
}
};
setRawMode(true);
process.stdout.write(str);
// keypress
process.stdin.on('keypress', function(c, key){
if (key && 'enter' == key.name) {
console.log();
process.stdin.pause();
process.stdin.removeAllListeners('keypress');
setRawMode(false);
if (!buf.trim().length) return self.password(str, mask, fn);
fn(buf);
return;
}
if (key && key.ctrl && 'c' == key.name) {
console.log('%s', buf);
process.exit();
}
process.stdout.write(mask);
buf += c;
}).resume();
};
/**
* Confirmation prompt with `str` and callback `fn(bool)`
*
* Examples:
*
* program.confirm('continue? ', function(ok){
* console.log(' got %j', ok);
* process.stdin.destroy();
* });
*
* @param {String} str
* @param {Function} fn
* @api public
*/
Command.prototype.confirm = function(str, fn, verbose){
var self = this;
this.prompt(str, function(ok){
if (!ok.trim()) {
if (!verbose) str += '(yes or no) ';
return self.confirm(str, fn, true);
}
fn(parseBool(ok));
});
};
/**
* Choice prompt with `list` of items and callback `fn(index, item)`
*
* Examples:
*
* var list = ['tobi', 'loki', 'jane', 'manny', 'luna'];
*
* console.log('Choose the coolest pet:');
* program.choose(list, function(i){
* console.log('you chose %d "%s"', i, list[i]);
* process.stdin.destroy();
* });
*
* @param {Array} list
* @param {Number|Function} index or fn
* @param {Function} fn
* @api public
*/
Command.prototype.choose = function(list, index, fn){
var self = this
, hasDefault = 'number' == typeof index;
if (!hasDefault) {
fn = index;
index = null;
}
list.forEach(function(item, i){
if (hasDefault && i == index) {
console.log('* %d) %s', i + 1, item);
} else {
console.log(' %d) %s', i + 1, item);
}
});
function again() {
self.prompt(' : ', function(val){
val = parseInt(val, 10) - 1;
if (hasDefault && isNaN(val)) val = index;
if (null == list[val]) {
again();
} else {
fn(val, list[val]);
}
});
}
again();
};
/**
* Output help information for this command
*
* @api public
*/
Command.prototype.outputHelp = function(){
process.stdout.write(this.helpInformation());
this.emit('--help');
};
/**
* Output help information and exit.
*
* @api public
*/
Command.prototype.help = function(){
this.outputHelp();
process.exit();
};
/**
* Camel-case the given `flag`
*
* @param {String} flag
* @return {String}
* @api private
*/
function camelcase(flag) {
return flag.split('-').reduce(function(str, word){
return str + word[0].toUpperCase() + word.slice(1);
});
}
/**
* Parse a boolean `str`.
*
* @param {String} str
* @return {Boolean}
* @api private
*/
function parseBool(str) {
return /^y|yes|ok|true$/i.test(str);
}
/**
* Pad `str` to `width`.
*
* @param {String} str
* @param {Number} width
* @return {String}
* @api private
*/
function pad(str, width) {
var len = Math.max(0, width - str.length);
return str + Array(len + 1).join(' ');
}
/**
* Output help information if necessary
*
* @param {Command} command to output help for
* @param {Array} array of options to search for -h or --help
* @api private
*/
function outputHelpIfNecessary(cmd, options) {
options = options || [];
for (var i = 0; i < options.length; i++) {
if (options[i] == '--help' || options[i] == '-h') {
cmd.outputHelp();
process.exit(0);
}
}
}
| nickperez1285/truck-hunt-hackathon | server/node_modules/winser/node_modules/commander/index.js | JavaScript | apache-2.0 | 25,491 |
package com.cardshifter.gdx.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.ui.*;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import com.cardshifter.api.incoming.UseAbilityMessage;
import com.cardshifter.api.messages.Message;
import com.cardshifter.api.outgoing.*;
import com.cardshifter.gdx.*;
import com.cardshifter.gdx.ui.CardshifterClientContext;
import com.cardshifter.gdx.ui.EntityView;
import com.cardshifter.gdx.ui.PlayerView;
import com.cardshifter.gdx.ui.cards.CardView;
import com.cardshifter.gdx.ui.cards.CardViewSmall;
import com.cardshifter.gdx.ui.zones.CompactHiddenZoneView;
import com.cardshifter.gdx.ui.zones.DefaultZoneView;
import com.cardshifter.gdx.ui.zones.ZoneView;
import java.util.*;
import java.util.List;
/**
* Created by Simon on 1/31/2015.
*/
public class GameScreen implements Screen, TargetableCallback {
private final CardshifterGame game;
private final CardshifterClient client;
private final int playerIndex;
private final int gameId;
private final Table table;
private final Map<Integer, ZoneView> zoneViews = new HashMap<Integer, ZoneView>();
private final Map<Integer, EntityView> entityViews = new HashMap<Integer, EntityView>();
private final Map<String, Container<Actor>> holders = new HashMap<String, Container<Actor>>();
private final List<EntityView> targetsSelected = new ArrayList<EntityView>();
private final Screen parentScreen;
private AvailableTargetsMessage targetsAvailable;
private final TargetableCallback onTarget = new TargetableCallback() {
@Override
public boolean addEntity(EntityView view) {
if (targetsSelected.contains(view)) {
targetsSelected.remove(view);
Gdx.app.log("GameScreen", "Removing selection " + view.getId());
view.setTargetable(TargetStatus.TARGETABLE, this);
return false;
}
if (targetsAvailable != null && targetsAvailable.getMax() == 1 && targetsAvailable.getMin() == 1) {
Gdx.app.log("GameScreen", "Sending selection " + view.getId());
client.send(new UseAbilityMessage(gameId, targetsAvailable.getEntity(), targetsAvailable.getAction(), new int[]{ view.getId() }));
return false;
}
Gdx.app.log("GameScreen", "Adding selection " + view.getId());
view.setTargetable(TargetStatus.TARGETED, this);
return targetsSelected.add(view);
}
};
private final CardshifterClientContext context;
//private final float screenWidth;
private final float screenHeight;
public GameScreen(final CardshifterGame game, final CardshifterClient client, NewGameMessage message, final Screen parentScreen) {
this.parentScreen = parentScreen;
this.game = game;
this.client = client;
this.playerIndex = message.getPlayerIndex();
this.gameId = message.getGameId();
this.context = new CardshifterClientContext(game.skin, message.getGameId(), client, game.stage);
//this.screenWidth = CardshifterGame.STAGE_WIDTH;
this.screenHeight = CardshifterGame.STAGE_HEIGHT;
this.table = new Table(game.skin);
Table leftTable = new Table(game.skin);
Table topTable = new Table(game.skin);
//Table rightTable = new Table(game.skin);
Table centerTable = new Table(game.skin);
TextButton backToMenu = new TextButton("Back to menu", game.skin);
backToMenu.addListener(new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
game.setScreen(parentScreen);
}
});
leftTable.add(backToMenu).expandX().fill().row();
addZoneHolder(leftTable, 1 - this.playerIndex, "").expandY().fillY();
addZoneHolder(leftTable, this.playerIndex, "").expandY().fillY();
leftTable.add("controls").row();
TextButton actionDone = new TextButton("Done", game.skin);
actionDone.addListener(new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
if (targetsAvailable != null) {
int selected = targetsSelected.size();
if (selected >= targetsAvailable.getMin() && selected <= targetsAvailable.getMax()) {
int[] targets = new int[targetsSelected.size()];
for (int i = 0; i < targets.length; i++) {
targets[i] = targetsSelected.get(i).getId();
}
UseAbilityMessage message = new UseAbilityMessage(gameId, targetsAvailable.getEntity(), targetsAvailable.getAction(), targets);
client.send(message);
}
}
}
});
leftTable.add(actionDone);
topTable.add(leftTable).left().expandY().fillY();
topTable.add(centerTable).center().expandX().expandY().fill();
//topTable.add(rightTable).right().width(150).expandY().fillY();
addZoneHolder(centerTable, 1 - this.playerIndex, "Hand").top().height(this.screenHeight/4);
addZoneHolder(centerTable, 1 - this.playerIndex, "Battlefield").height(this.screenHeight/4);
addZoneHolder(centerTable, this.playerIndex, "Battlefield").height(this.screenHeight/4);
this.table.add(topTable).expand().fill().row();
addZoneHolder(this.table, this.playerIndex, "Hand").height(140).expandX().fill();
this.table.setFillParent(true);
}
private Cell<Container<Actor>> addZoneHolder(Table table, int i, String name) {
Container<Actor> container = new Container<Actor>();
container.setName(name);
// container.fill();
Cell<Container<Actor>> cell = table.add(container).expandX().fillX();
table.row();
holders.put(i + name, container);
return cell;
}
@Override
public void render(float delta) {
}
@Override
public void resize(int width, int height) {
}
@Override
public void show() {
game.stage.addActor(table);
}
@Override
public void hide() {
table.remove();
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void dispose() {
}
public Map<Class<? extends Message>, SpecificHandler<?>> getHandlers() {
Map<Class<? extends Message>, SpecificHandler<?>> handlers =
new HashMap<Class<? extends Message>, SpecificHandler<?>>();
handlers.put(AvailableTargetsMessage.class, new SpecificHandler<AvailableTargetsMessage>() {
@Override
public void handle(AvailableTargetsMessage message) {
targetsAvailable = message;
targetsSelected.clear();
for (EntityView view : entityViews.values()) {
view.setTargetable(TargetStatus.NOT_TARGETABLE, onTarget);
}
for (int id : message.getTargets()) {
EntityView view = entityViews.get(id);
if (view != null) {
view.setTargetable(TargetStatus.TARGETABLE, onTarget);
}
}
}
});
handlers.put(UsableActionMessage.class, new SpecificHandler<UsableActionMessage>() {
@Override
public void handle(UsableActionMessage message) {
int id = message.getId();
EntityView view = entityViews.get(id);
if (view != null) {
view.usableAction(message);
if (view instanceof CardViewSmall) {
((CardViewSmall)view).setUsable(GameScreen.this);
}
}
}
});
handlers.put(CardInfoMessage.class, new SpecificHandler<CardInfoMessage>() {
@Override
public void handle(CardInfoMessage message) {
ZoneView zone = getZoneView(message.getZone());
if (zone != null) {
zone.removeCard(message.getId());
}
EntityView entityView = entityViews.remove(message.getId());
if (entityView != null) {
entityView.remove();
}
if (zone != null) {
entityViews.put(message.getId(), zone.addCard(message));
}
}
});
handlers.put(EntityRemoveMessage.class, new SpecificHandler<EntityRemoveMessage>() {
@Override
public void handle(EntityRemoveMessage message) {
EntityView view = entityViews.get(message.getEntity());
for (ZoneView zone : zoneViews.values()) {
if (zone.hasCard(message.getEntity())) {
zone.removeCard(message.getEntity());
}
}
if (view != null) {
view.entityRemoved();
entityViews.remove(message.getEntity());
}
}
});
handlers.put(GameOverMessage.class, new SpecificHandler<GameOverMessage>() {
@Override
public void handle(GameOverMessage message) {
Dialog dialog = new Dialog("Game Over!", context.getSkin()) {
@Override
protected void result(Object object) {
game.setScreen(parentScreen);
}
};
dialog.button("OK");
dialog.show(context.getStage());
}
});
handlers.put(PlayerMessage.class, new SpecificHandler<PlayerMessage>() {
@Override
public void handle(PlayerMessage message) {
PlayerView playerView = new PlayerView(context, message);
entityViews.put(message.getId(), playerView);
Container<Actor> holder = holders.get(String.valueOf(message.getIndex()));
if (holder != null) {
holder.setActor(playerView.getActor());
}
}
});
handlers.put(ResetAvailableActionsMessage.class, new SpecificHandler<ResetAvailableActionsMessage>() {
@Override
public void handle(ResetAvailableActionsMessage message) {
for (EntityView view : entityViews.values()) {
view.setTargetable(TargetStatus.NOT_TARGETABLE, null);
view.clearUsableActions();
}
}
});
handlers.put(UpdateMessage.class, new SpecificHandler<UpdateMessage>() {
@Override
public void handle(UpdateMessage message) {
EntityView entityView = entityViews.get(message.getId());
if (entityView != null) {
entityView.set(message.getKey(), message.getValue());
}
}
});
handlers.put(ZoneChangeMessage.class, new SpecificHandler<ZoneChangeMessage>() {
@Override
public void handle(ZoneChangeMessage message) {
ZoneView oldZone = getZoneView(message.getSourceZone()); // can be null
ZoneView destinationZone = getZoneView(message.getDestinationZone());
int id = message.getEntity();
CardView entityView = (CardView) entityViews.remove(id); // can be null
if (oldZone != null) {
oldZone.removeCard(id);
}
if (destinationZone != null) {
CardView newCardView = destinationZone.addCard(new CardInfoMessage(message.getDestinationZone(), id,
entityView == null ? null : entityView.getInfo()));
if (entityView != null) {
entityView.zoneMove(message, destinationZone, newCardView);
}
entityViews.put(id, newCardView);
}
else {
if (entityView != null) {
entityView.zoneMove(message, destinationZone, null);
}
}
/*
Send to AI Medium: ZoneChangeMessage [entity=95, sourceZone=72, destinationZone=73]
Send to AI Medium: CardInfo: 95 in zone 73 - {SCRAP=1, TAUNT=1, MAX_HEALTH=1, SICKNESS=1, MANA_COST=2, name=The Chopper, ATTACK=2, creatureType=Mech, HEALTH=1, ATTACK_AVAILABLE=1}
Send to Zomis: ZoneChangeMessage [entity=95, sourceZone=72, destinationZone=73]
if card is already known, send ZoneChange only
if card is not known, send ZoneChange first and then CardInfo
when cards are created from nowhere, ZoneChange with source -1 is sent and then CardInfo
*/
}
});
handlers.put(ZoneMessage.class, new SpecificHandler<ZoneMessage>() {
@Override
public void handle(ZoneMessage message) {
Gdx.app.log("GameScreen", "Zone " + message);
ZoneView zoneView = createZoneView(message);
if (zoneView != null) {
PlayerView view = (PlayerView) entityViews.get(message.getOwner());
if (view == null) {
Gdx.app.log("GameScreen", "no playerView for " + message.getOwner());
return;
}
String key = view.getIndex() + message.getName();
Container<Actor> container = holders.get(key);
if (container == null) {
Gdx.app.log("GameScreen", "no container for " + key);
return;
}
Gdx.app.log("GameScreen", "putting zoneview for " + key);
container.setActor(zoneView.getActor());
zoneViews.put(message.getId(), zoneView);
}
}
});
return handlers;
}
private ZoneView createZoneView(ZoneMessage message) {
String type = message.getName();
if (type.equals("Battlefield")) {
return new DefaultZoneView(context, message, this.entityViews);
}
if (type.equals("Hand")) {
return new DefaultZoneView(context, message, this.entityViews);
}
if (type.equals("Deck")) {
return new CompactHiddenZoneView(game, message);
}
if (type.equals("Cards")) {
return null; // Card models only
}
throw new RuntimeException("Unknown ZoneView type: " + message.getName());
}
private ZoneView getZoneView(int id) {
return this.zoneViews.get(id);
}
public boolean checkCardDrop(CardViewSmall cardView) {
Table table = (Table)cardView.getActor();
Vector2 stageLoc = table.localToStageCoordinates(new Vector2());
Rectangle tableRect = new Rectangle(stageLoc.x, stageLoc.y, table.getWidth(), table.getHeight());
for (Container<Actor> actor : this.holders.values()) {
if (actor.getName() == "Battlefield") {
Vector2 stageBattlefieldLoc = actor.localToStageCoordinates(new Vector2(actor.getActor().getX(), actor.getActor().getY()));
Vector2 modifiedSBL = new Vector2(stageBattlefieldLoc.x - actor.getWidth()/2, stageBattlefieldLoc.y - actor.getHeight()/2);
Rectangle deckRect = new Rectangle(modifiedSBL.x, modifiedSBL.y, actor.getWidth() * 0.8f, actor.getHeight());
//uncomment this to see the bug where battlefields pop up in strange places
/*
Image squareImage = new Image(new Texture(Gdx.files.internal("cardbg.png")));
squareImage.setPosition(modifiedSBL.x, modifiedSBL.y);
squareImage.setSize(deckRect.width, deckRect.height);
this.game.stage.addActor(squareImage);
*/
if (tableRect.overlaps(deckRect)) {
//this.addEntity(cardView);
System.out.println("target found!");
return true;
}
}
}
return false;
//these can be used to double check the location of the rectangles
/*
Image squareImage = new Image(new Texture(Gdx.files.internal("cardbg.png")));
squareImage.setPosition(modifiedSBL.x, modifiedSBL.y);
squareImage.setSize(deckRect.width, deckRect.height);
this.game.stage.addActor(squareImage);
*/
/*
Image squareImage = new Image(new Texture(Gdx.files.internal("cardbg.png")));
squareImage.setPosition(stageLoc.x, stageLoc.y);
squareImage.setSize(tableRect.width, tableRect.height);
this.game.stage.addActor(squareImage);
*/
}
@Override
public boolean addEntity(EntityView view) {
//called by the CardViewSmall when not in mulligan mode, nothing will happen
return false;
}
}
| Cardshifter/Cardshifter | gdx/core/src/com/cardshifter/gdx/screens/GameScreen.java | Java | apache-2.0 | 17,117 |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.inspections.quickfix;
import com.intellij.codeInsight.CodeInsightUtilCore;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.template.TemplateBuilder;
import com.intellij.codeInsight.template.TemplateBuilderFactory;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Function;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyPsiUtils;
import com.jetbrains.python.psi.types.PyClassType;
import com.jetbrains.python.psi.types.PyClassTypeImpl;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Available on self.my_something when my_something is unresolved.
* User: dcheryasov
*/
public class AddFieldQuickFix implements LocalQuickFix {
private final String myInitializer;
private final String myClassName;
private final String myIdentifier;
private boolean replaceInitializer = false;
public AddFieldQuickFix(@NotNull final String identifier, @NotNull final String initializer, final String className, boolean replace) {
myIdentifier = identifier;
myInitializer = initializer;
myClassName = className;
replaceInitializer = replace;
}
@NotNull
public String getName() {
return PyBundle.message("QFIX.NAME.add.field.$0.to.class.$1", myIdentifier, myClassName);
}
@NotNull
public String getFamilyName() {
return "Add field to class";
}
@NotNull
public static PsiElement appendToMethod(PyFunction init, Function<String, PyStatement> callback) {
// add this field as the last stmt of the constructor
final PyStatementList statementList = init.getStatementList();
// name of 'self' may be different for fancier styles
String selfName = PyNames.CANONICAL_SELF;
final PyParameter[] params = init.getParameterList().getParameters();
if (params.length > 0) {
selfName = params[0].getName();
}
final PyStatement newStmt = callback.fun(selfName);
final PsiElement result = PyUtil.addElementToStatementList(newStmt, statementList, true);
PyPsiUtils.removeRedundantPass(statementList);
return result;
}
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
// expect the descriptor to point to the unresolved identifier.
final PsiElement element = descriptor.getPsiElement();
final PyClassType type = getClassType(element);
if (type == null) return;
final PyClass cls = type.getPyClass();
if (!FileModificationService.getInstance().preparePsiElementForWrite(cls)) return;
WriteAction.run(() -> {
PsiElement initStatement;
if (!type.isDefinition()) {
initStatement = addFieldToInit(project, cls, myIdentifier, new CreateFieldCallback(project, myIdentifier, myInitializer));
}
else {
PyStatement field = PyElementGenerator.getInstance(project)
.createFromText(LanguageLevel.getDefault(), PyStatement.class, myIdentifier + " = " + myInitializer);
initStatement = PyUtil.addElementToStatementList(field, cls.getStatementList(), true);
}
if (initStatement != null) {
showTemplateBuilder(initStatement, cls.getContainingFile());
return;
}
// somehow we failed. tell about this
PyUtil.showBalloon(project, PyBundle.message("QFIX.failed.to.add.field"), MessageType.ERROR);
});
}
@Override
public boolean startInWriteAction() {
return false;
}
private static PyClassType getClassType(@NotNull final PsiElement element) {
if (element instanceof PyQualifiedExpression) {
final PyExpression qualifier = ((PyQualifiedExpression)element).getQualifier();
if (qualifier == null) return null;
final PyType type = TypeEvalContext.userInitiated(element.getProject(), element.getContainingFile()).getType(qualifier);
return type instanceof PyClassType ? (PyClassType)type : null;
}
final PyClass aClass = PsiTreeUtil.getParentOfType(element, PyClass.class);
return aClass != null ? new PyClassTypeImpl(aClass, false) : null;
}
private void showTemplateBuilder(PsiElement initStatement, @NotNull final PsiFile file) {
initStatement = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(initStatement);
if (initStatement instanceof PyAssignmentStatement) {
final TemplateBuilder builder = TemplateBuilderFactory.getInstance().createTemplateBuilder(initStatement);
final PyExpression assignedValue = ((PyAssignmentStatement)initStatement).getAssignedValue();
final PyExpression leftExpression = ((PyAssignmentStatement)initStatement).getLeftHandSideExpression();
if (assignedValue != null && leftExpression != null) {
if (replaceInitializer)
builder.replaceElement(assignedValue, myInitializer);
else
builder.replaceElement(leftExpression.getLastChild(), myIdentifier);
final VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return;
final Editor editor = FileEditorManager.getInstance(file.getProject()).openTextEditor(
new OpenFileDescriptor(file.getProject(), virtualFile), true);
if (editor == null) return;
builder.run(editor, false);
}
}
}
@Nullable
public static PsiElement addFieldToInit(Project project, PyClass cls, String itemName, Function<String, PyStatement> callback) {
if (cls != null && itemName != null) {
PyFunction init = cls.findMethodByName(PyNames.INIT, false, null);
if (init != null) {
return appendToMethod(init, callback);
}
else { // no init! boldly copy ancestor's.
for (PyClass ancestor : cls.getAncestorClasses(null)) {
init = ancestor.findMethodByName(PyNames.INIT, false, null);
if (init != null) break;
}
PyFunction newInit = createInitMethod(project, cls, init);
appendToMethod(newInit, callback);
PsiElement addAnchor = null;
PyFunction[] meths = cls.getMethods();
if (meths.length > 0) addAnchor = meths[0].getPrevSibling();
PyStatementList clsContent = cls.getStatementList();
newInit = (PyFunction) clsContent.addAfter(newInit, addAnchor);
PyUtil.showBalloon(project, PyBundle.message("QFIX.added.constructor.$0.for.field.$1", cls.getName(), itemName), MessageType.INFO);
final PyStatementList statementList = newInit.getStatementList();
final PyStatement[] statements = statementList.getStatements();
return statements.length != 0 ? statements[0] : null;
}
}
return null;
}
@NotNull
private static PyFunction createInitMethod(Project project, PyClass cls, @Nullable PyFunction ancestorInit) {
// found it; copy its param list and make a call to it.
String paramList = ancestorInit != null ? ancestorInit.getParameterList().getText() : "(self)";
String functionText = "def " + PyNames.INIT + paramList + ":\n";
if (ancestorInit == null) functionText += " pass";
else {
final PyClass ancestorClass = ancestorInit.getContainingClass();
if (ancestorClass != null && !PyUtil.isObjectClass(ancestorClass)) {
StringBuilder sb = new StringBuilder();
PyParameter[] params = ancestorInit.getParameterList().getParameters();
boolean seen = false;
if (cls.isNewStyleClass(null)) {
// form the super() call
sb.append("super(");
if (!LanguageLevel.forElement(cls).isPy3K()) {
sb.append(cls.getName());
// NOTE: assume that we have at least the first param
String self_name = params[0].getName();
sb.append(", ").append(self_name);
}
sb.append(").").append(PyNames.INIT).append("(");
}
else {
sb.append(ancestorClass.getName());
sb.append(".__init__(self");
seen = true;
}
for (int i = 1; i < params.length; i += 1) {
if (seen) sb.append(", ");
else seen = true;
sb.append(params[i].getText());
}
sb.append(")");
functionText += " " + sb.toString();
}
else {
functionText += " pass";
}
}
return PyElementGenerator.getInstance(project).createFromText(
LanguageLevel.getDefault(), PyFunction.class, functionText,
new int[]{0}
);
}
private static class CreateFieldCallback implements Function<String, PyStatement> {
private final Project myProject;
private final String myItemName;
private final String myInitializer;
private CreateFieldCallback(Project project, String itemName, String initializer) {
myProject = project;
myItemName = itemName;
myInitializer = initializer;
}
public PyStatement fun(String selfName) {
return PyElementGenerator.getInstance(myProject).createFromText(LanguageLevel.getDefault(), PyStatement.class, selfName + "." + myItemName + " = " + myInitializer);
}
}
}
| jk1/intellij-community | python/src/com/jetbrains/python/inspections/quickfix/AddFieldQuickFix.java | Java | apache-2.0 | 10,326 |
// Copyright 2013 Matthew Baird
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package core
import (
"encoding/json"
"fmt"
"github.com/mattbaird/elastigo/api"
)
// Validate allows a user to validate a potentially expensive query without executing it.
// see http://www.elasticsearch.org/guide/reference/api/validate.html
func Validate(index string, _type string, args map[string]interface{}) (api.BaseResponse, error) {
var url string
var retval api.BaseResponse
if len(_type) > 0 {
url = fmt.Sprintf("/%s/%s/_validate/", index, _type)
} else {
url = fmt.Sprintf("/%s/_validate/", index)
}
body, err := api.DoCommand("GET", url, args, nil)
if err != nil {
return retval, err
}
if err == nil {
// marshall into json
jsonErr := json.Unmarshal(body, &retval)
if jsonErr != nil {
return retval, jsonErr
}
}
return retval, err
}
type Validation struct {
Valid bool `json:"valid"`
Shards api.Status `json:"_shards"`
Explainations []Explaination `json:"explanations,omitempty"`
}
type Explaination struct {
Index string `json:"index"`
Valid bool `json:"valid"`
Error string `json:"error"`
}
| icecrime/vossibility-collector | vendor/src/github.com/mattbaird/elastigo/core/validate.go | GO | apache-2.0 | 1,659 |
<?php
namespace Topxia\Service\User\Dao\Impl;
use Topxia\Service\Common\BaseDao;
use Topxia\Service\User\Dao\UserFortuneLogDao;
class UserFortuneLogDaoImpl extends BaseDao implements UserFortuneLogDao
{
protected $table = 'user_fortune_log';
public function addLog(array $log)
{
$affected = $this->getConnection()->insert($this->table, $log);
if ($affected <= 0) {
throw $this->createDaoException('Insert log error');
}
return $this->getLog($this->getConnection()->lastInsertId());
}
public function getLog($id)
{
$sql = "SELECT * FROM {$this->table} WHERE id = ? LIMIT 1";
return $this->getConnection()->fetchAssoc($sql, array($id));
}
} | 18826252059/im | src/Topxia/Service/User/Dao/Impl/UserFortuneLogDaoImpl.php | PHP | apache-2.0 | 735 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
import urllib
from tempest.common import rest_client
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
CONF = config.CONF
LOG = logging.getLogger(__name__)
class SnapshotsClientJSON(rest_client.RestClient):
"""Client class to send CRUD Volume API requests."""
def __init__(self, auth_provider):
super(SnapshotsClientJSON, self).__init__(auth_provider)
self.service = CONF.volume.catalog_type
self.build_interval = CONF.volume.build_interval
self.build_timeout = CONF.volume.build_timeout
def list_snapshots(self, params=None):
"""List all the snapshot."""
url = 'snapshots'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshots']
def list_snapshots_with_detail(self, params=None):
"""List the details of all snapshots."""
url = 'snapshots/detail'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshots']
def get_snapshot(self, snapshot_id):
"""Returns the details of a single snapshot."""
url = "snapshots/%s" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshot']
def create_snapshot(self, volume_id, **kwargs):
"""
Creates a new snapshot.
volume_id(Required): id of the volume.
force: Create a snapshot even if the volume attached (Default=False)
display_name: Optional snapshot Name.
display_description: User friendly snapshot description.
"""
post_body = {'volume_id': volume_id}
post_body.update(kwargs)
post_body = json.dumps({'snapshot': post_body})
resp, body = self.post('snapshots', post_body)
body = json.loads(body)
return resp, body['snapshot']
def update_snapshot(self, snapshot_id, **kwargs):
"""Updates a snapshot."""
put_body = json.dumps({'snapshot': kwargs})
resp, body = self.put('snapshots/%s' % snapshot_id, put_body)
body = json.loads(body)
return resp, body['snapshot']
# NOTE(afazekas): just for the wait function
def _get_snapshot_status(self, snapshot_id):
resp, body = self.get_snapshot(snapshot_id)
status = body['status']
# NOTE(afazekas): snapshot can reach an "error"
# state in a "normal" lifecycle
if (status == 'error'):
raise exceptions.SnapshotBuildErrorException(
snapshot_id=snapshot_id)
return status
# NOTE(afazkas): Wait reinvented again. It is not in the correct layer
def wait_for_snapshot_status(self, snapshot_id, status):
"""Waits for a Snapshot to reach a given status."""
start_time = time.time()
old_value = value = self._get_snapshot_status(snapshot_id)
while True:
dtime = time.time() - start_time
time.sleep(self.build_interval)
if value != old_value:
LOG.info('Value transition from "%s" to "%s"'
'in %d second(s).', old_value,
value, dtime)
if (value == status):
return value
if dtime > self.build_timeout:
message = ('Time Limit Exceeded! (%ds)'
'while waiting for %s, '
'but we got %s.' %
(self.build_timeout, status, value))
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
old_value = value
value = self._get_snapshot_status(snapshot_id)
def delete_snapshot(self, snapshot_id):
"""Delete Snapshot."""
return self.delete("snapshots/%s" % str(snapshot_id))
def is_resource_deleted(self, id):
try:
self.get_snapshot(id)
except exceptions.NotFound:
return True
return False
def reset_snapshot_status(self, snapshot_id, status):
"""Reset the specified snapshot's status."""
post_body = json.dumps({'os-reset_status': {"status": status}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
return resp, body
def update_snapshot_status(self, snapshot_id, status, progress):
"""Update the specified snapshot's status."""
post_body = {
'status': status,
'progress': progress
}
post_body = json.dumps({'os-update_snapshot_status': post_body})
url = 'snapshots/%s/action' % str(snapshot_id)
resp, body = self.post(url, post_body)
return resp, body
def create_snapshot_metadata(self, snapshot_id, metadata):
"""Create metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.post(url, put_body)
body = json.loads(body)
return resp, body['metadata']
def get_snapshot_metadata(self, snapshot_id):
"""Get metadata of the snapshot."""
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['metadata']
def update_snapshot_metadata(self, snapshot_id, metadata):
"""Update metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.put(url, put_body)
body = json.loads(body)
return resp, body['metadata']
def update_snapshot_metadata_item(self, snapshot_id, id, meta_item):
"""Update metadata item for the snapshot."""
put_body = json.dumps({'meta': meta_item})
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.put(url, put_body)
body = json.loads(body)
return resp, body['meta']
def delete_snapshot_metadata_item(self, snapshot_id, id):
"""Delete metadata item for the snapshot."""
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.delete(url)
return resp, body
def force_delete_snapshot(self, snapshot_id):
"""Force Delete Snapshot."""
post_body = json.dumps({'os-force_delete': {}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
return resp, body
| vedujoshi/os_tempest | tempest/services/volume/json/snapshots_client.py | Python | apache-2.0 | 7,293 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/qldb/model/TagResourceResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::QLDB::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
TagResourceResult::TagResourceResult()
{
}
TagResourceResult::TagResourceResult(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
*this = result;
}
TagResourceResult& TagResourceResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
AWS_UNREFERENCED_PARAM(result);
return *this;
}
| jt70471/aws-sdk-cpp | aws-cpp-sdk-qldb/source/model/TagResourceResult.cpp | C++ | apache-2.0 | 803 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.rewriter.rules.subplan;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.ListSet;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
import org.apache.hyracks.algebricks.rewriter.util.PhysicalOptimizationsUtil;
/**
* The rule searches for SUBPLAN operator with a optional PROJECT operator and
* an AGGREGATE followed by a join operator.
*
* <pre>
* Before
*
* plan__parent
* SUBPLAN {
* PROJECT?
* AGGREGATE
* plan__nested_A
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
*
* where $condition does not equal a constant true.
*
* After (This is a general application of the rule, specifics may vary based on the query plan.)
*
* plan__parent
* GROUP_BY {
* PROJECT?
* AGGREGATE
* plan__nested_A
* SELECT( algebricks:not( is_null( $right ) ) )
* NESTED_TUPLE_SOURCE
* }
* SUBPLAN {
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
* </pre>
*
* @author prestonc
*/
public class IntroduceGroupByForSubplanRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
return false;
}
SubplanOperator subplan = (SubplanOperator) op0;
Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator();
ILogicalPlan p = null;
while (plansIter.hasNext()) {
p = plansIter.next();
}
if (p == null) {
return false;
}
if (p.getRoots().size() != 1) {
return false;
}
Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0);
AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue();
Mutable<ILogicalOperator> botRef = subplanRoot;
AbstractLogicalOperator op2;
// Project is optional
if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) {
op2 = op1;
} else {
ProjectOperator project = (ProjectOperator) op1;
botRef = project.getInputs().get(0);
op2 = (AbstractLogicalOperator) botRef.getValue();
}
if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
AggregateOperator aggregate = (AggregateOperator) op2;
Set<LogicalVariable> free = new HashSet<LogicalVariable>();
VariableUtilities.getUsedVariables(aggregate, free);
Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0);
AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue();
while (op3.getInputs().size() == 1) {
Set<LogicalVariable> prod = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariables(op3, prod);
free.removeAll(prod);
VariableUtilities.getUsedVariables(op3, free);
botRef = op3Ref;
op3Ref = op3.getInputs().get(0);
op3 = (AbstractLogicalOperator) op3Ref.getValue();
}
if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN
&& op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
return false;
}
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3;
if (join.getCondition().getValue() == ConstantExpression.TRUE) {
return false;
}
VariableUtilities.getUsedVariables(join, free);
AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue();
// see if there's an NTS at the end of the pipeline
NestedTupleSourceOperator outerNts = getNts(b0);
if (outerNts == null) {
AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue();
outerNts = getNts(b1);
if (outerNts == null) {
return false;
}
}
Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
if (pkVars == null || pkVars.size() < 1) {
// there is no non-trivial primary key, group-by keys are all live variables
// that were produced by descendant or self
ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
pkVars = new HashSet<LogicalVariable>();
//get live variables
VariableUtilities.getLiveVariables(subplanInput, pkVars);
//get produced variables
Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariablesInDescendantsAndSelf(subplanInput, producedVars);
//retain the intersection
pkVars.retainAll(producedVars);
}
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);
Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
LogicalVariable testForNull = null;
AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue();
switch (right.getOperatorTag()) {
case UNNEST: {
UnnestOperator innerUnnest = (UnnestOperator) right;
// Select [ $y != null ]
testForNull = innerUnnest.getVariable();
break;
}
case RUNNINGAGGREGATE: {
ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue();
Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>();
VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars);
if (!producedVars.isEmpty()) {
// Select [ $y != null ]
testForNull = producedVars.iterator().next();
}
break;
}
case DATASOURCESCAN: {
DataSourceScanOperator innerScan = (DataSourceScanOperator) right;
// Select [ $y != null ]
if (innerScan.getVariables().size() == 1) {
testForNull = innerScan.getVariables().get(0);
}
break;
}
default:
break;
}
if (testForNull == null) {
testForNull = context.newVar();
AssignOperator tmpAsgn = new AssignOperator(testForNull,
new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
rightRef.setValue(tmpAsgn);
context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
}
IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_MISSING);
ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
new MutableObject<ILogicalExpression>(isNullTest));
SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
null);
GroupByOperator g = new GroupByOperator();
Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
opRef.setValue(g);
selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts));
List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs();
prodInpList.clear();
prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull));
ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue()));
g.getNestedPlans().add(gPlan);
subplanRoot.setValue(op3Ref.getValue());
g.getInputs().add(newSubplanRef);
HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>();
VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars);
underVars.removeAll(pkVars);
Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList());
context.updatePrimaryKeys(mappedVars);
for (LogicalVariable uv : underVars) {
g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv))));
}
OperatorPropertiesUtil.typeOpRec(subplanRoot, context);
OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context);
context.computeAndSetTypeEnvironmentForOperator(g);
return true;
}
private NestedTupleSourceOperator getNts(AbstractLogicalOperator op) {
AbstractLogicalOperator alo = op;
do {
if (alo.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
return (NestedTupleSourceOperator) alo;
}
if (alo.getInputs().size() != 1) {
return null;
}
alo = (AbstractLogicalOperator) alo.getInputs().get(0).getValue();
} while (true);
}
protected Set<LogicalVariable> computeGbyVars(AbstractLogicalOperator op, Set<LogicalVariable> freeVars,
IOptimizationContext context) throws AlgebricksException {
PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
List<FunctionalDependency> fdList = context.getFDList(op);
if (fdList == null) {
return null;
}
// check if any of the FDs is a key
List<LogicalVariable> all = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(op, all);
all.retainAll(freeVars);
for (FunctionalDependency fd : fdList) {
if (fd.getTail().containsAll(all)) {
return new HashSet<LogicalVariable>(fd.getHead());
}
}
return null;
}
private Map<LogicalVariable, LogicalVariable> buildVarExprList(Collection<LogicalVariable> vars,
IOptimizationContext context, GroupByOperator g,
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> outVeList) throws AlgebricksException {
Map<LogicalVariable, LogicalVariable> m = new HashMap<LogicalVariable, LogicalVariable>();
for (LogicalVariable ov : vars) {
LogicalVariable newVar = context.newVar();
ILogicalExpression varExpr = new VariableReferenceExpression(newVar);
outVeList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(ov,
new MutableObject<ILogicalExpression>(varExpr)));
for (ILogicalPlan p : g.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
OperatorManipulationUtil.substituteVarRec((AbstractLogicalOperator) r.getValue(), ov, newVar, true,
context);
}
}
AbstractLogicalOperator opUnder = (AbstractLogicalOperator) g.getInputs().get(0).getValue();
OperatorManipulationUtil.substituteVarRec(opUnder, ov, newVar, true, context);
m.put(ov, newVar);
}
return m;
}
}
| ty1er/incubator-asterixdb | hyracks-fullstack/algebricks/algebricks-rewriter/src/main/java/org/apache/hyracks/algebricks/rewriter/rules/subplan/IntroduceGroupByForSubplanRule.java | Java | apache-2.0 | 15,726 |
/**
* Copyright 2009 - 2011 Sergio Bossa (sergio.bossa@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package terrastore.store.features;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.msgpack.MessagePackable;
import org.msgpack.MessageTypeException;
import org.msgpack.MessageUnpackable;
import org.msgpack.Packer;
import org.msgpack.Unpacker;
import terrastore.util.io.MsgPackUtils;
/**
* Update object carrying data about the update function, timeout and parameters.
*
* @author Sergio Bossa
*/
public class Update implements MessagePackable, MessageUnpackable, Serializable {
private static final long serialVersionUID = 12345678901L;
//
private String functionName;
private long timeoutInMillis;
private Map<String, Object> parameters;
public Update(String functionName, long timeoutInMillis, Map<String, Object> parameters) {
this.functionName = functionName;
this.timeoutInMillis = timeoutInMillis;
this.parameters = parameters;
}
public Update() {
}
public long getTimeoutInMillis() {
return timeoutInMillis;
}
public String getFunctionName() {
return functionName;
}
public Map<String, Object> getParameters() {
return parameters;
}
@Override
public void messagePack(Packer packer) throws IOException {
MsgPackUtils.packString(packer, functionName);
MsgPackUtils.packLong(packer, timeoutInMillis);
MsgPackUtils.packGenericMap(packer, parameters);
}
@Override
public void messageUnpack(Unpacker unpacker) throws IOException, MessageTypeException {
functionName = MsgPackUtils.unpackString(unpacker);
timeoutInMillis = MsgPackUtils.unpackLong(unpacker);
parameters = MsgPackUtils.unpackGenericMap(unpacker);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Update) {
Update other = (Update) obj;
return new EqualsBuilder().append(this.functionName, other.functionName).append(this.timeoutInMillis, other.timeoutInMillis).append(this.parameters, other.parameters).
isEquals();
} else {
return false;
}
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(functionName).append(timeoutInMillis).append(parameters).toHashCode();
}
}
| byzhang/terrastore | src/main/java/terrastore/store/features/Update.java | Java | apache-2.0 | 3,088 |
package assertion_test
import (
"errors"
. "github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/ginkgo"
. "github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/gomega"
. "github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/gomega/internal/assertion"
"github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/gomega/internal/fakematcher"
)
var _ = Describe("Assertion", func() {
var (
a *Assertion
failureMessage string
failureCallerSkip int
matcher *fakematcher.FakeMatcher
)
input := "The thing I'm testing"
var fakeFailHandler = func(message string, callerSkip ...int) {
failureMessage = message
if len(callerSkip) == 1 {
failureCallerSkip = callerSkip[0]
}
}
BeforeEach(func() {
matcher = &fakematcher.FakeMatcher{}
failureMessage = ""
failureCallerSkip = 0
a = New(input, fakeFailHandler, 1)
})
Context("when called", func() {
It("should pass the provided input value to the matcher", func() {
a.Should(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.ShouldNot(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.To(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.ToNot(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.NotTo(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
})
})
Context("when the matcher succeeds", func() {
BeforeEach(func() {
matcher.MatchesToReturn = true
matcher.ErrToReturn = nil
})
Context("and a positive assertion is being made", func() {
It("should not call the failure callback", func() {
a.Should(matcher)
Ω(failureMessage).Should(Equal(""))
})
It("should be true", func() {
Ω(a.Should(matcher)).Should(BeTrue())
})
})
Context("and a negative assertion is being made", func() {
It("should call the failure callback", func() {
a.ShouldNot(matcher)
Ω(failureMessage).Should(Equal("negative: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
It("should be false", func() {
Ω(a.ShouldNot(matcher)).Should(BeFalse())
})
})
})
Context("when the matcher fails", func() {
BeforeEach(func() {
matcher.MatchesToReturn = false
matcher.ErrToReturn = nil
})
Context("and a positive assertion is being made", func() {
It("should call the failure callback", func() {
a.Should(matcher)
Ω(failureMessage).Should(Equal("positive: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
It("should be false", func() {
Ω(a.Should(matcher)).Should(BeFalse())
})
})
Context("and a negative assertion is being made", func() {
It("should not call the failure callback", func() {
a.ShouldNot(matcher)
Ω(failureMessage).Should(Equal(""))
})
It("should be true", func() {
Ω(a.ShouldNot(matcher)).Should(BeTrue())
})
})
})
Context("When reporting a failure", func() {
BeforeEach(func() {
matcher.MatchesToReturn = false
matcher.ErrToReturn = nil
})
Context("and there is an optional description", func() {
It("should append the description to the failure message", func() {
a.Should(matcher, "A description")
Ω(failureMessage).Should(Equal("A description\npositive: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
Context("and there are multiple arguments to the optional description", func() {
It("should append the formatted description to the failure message", func() {
a.Should(matcher, "A description of [%d]", 3)
Ω(failureMessage).Should(Equal("A description of [3]\npositive: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
})
Context("When the matcher returns an error", func() {
BeforeEach(func() {
matcher.ErrToReturn = errors.New("Kaboom!")
})
Context("and a positive assertion is being made", func() {
It("should call the failure callback", func() {
matcher.MatchesToReturn = true
a.Should(matcher)
Ω(failureMessage).Should(Equal("Kaboom!"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
Context("and a negative assertion is being made", func() {
It("should call the failure callback", func() {
matcher.MatchesToReturn = false
a.ShouldNot(matcher)
Ω(failureMessage).Should(Equal("Kaboom!"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
It("should always be false", func() {
Ω(a.Should(matcher)).Should(BeFalse())
Ω(a.ShouldNot(matcher)).Should(BeFalse())
})
})
Context("when there are extra parameters", func() {
It("(a simple example)", func() {
Ω(func() (string, int, error) {
return "foo", 0, nil
}()).Should(Equal("foo"))
})
Context("when the parameters are all nil or zero", func() {
It("should invoke the matcher", func() {
matcher.MatchesToReturn = true
matcher.ErrToReturn = nil
var typedNil []string
a = New(input, fakeFailHandler, 1, 0, nil, typedNil)
result := a.Should(matcher)
Ω(result).Should(BeTrue())
Ω(matcher.ReceivedActual).Should(Equal(input))
Ω(failureMessage).Should(BeZero())
})
})
Context("when any of the parameters are not nil or zero", func() {
It("should call the failure callback", func() {
matcher.MatchesToReturn = false
matcher.ErrToReturn = nil
a = New(input, fakeFailHandler, 1, errors.New("foo"))
result := a.Should(matcher)
Ω(result).Should(BeFalse())
Ω(matcher.ReceivedActual).Should(BeZero(), "The matcher doesn't even get called")
Ω(failureMessage).Should(ContainSubstring("foo"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 1)
result = a.ShouldNot(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("1"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
result = a.To(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("foo"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
result = a.ToNot(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("foo"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
result = a.NotTo(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("foo"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
})
Context("Making an assertion without a registered fail handler", func() {
It("should panic", func() {
defer func() {
e := recover()
RegisterFailHandler(Fail)
if e == nil {
Fail("expected a panic to have occured")
}
}()
RegisterFailHandler(nil)
Ω(true).Should(BeTrue())
})
})
})
| tacgomes/bosh-agent | internal/github.com/onsi/gomega/internal/assertion/assertion_test.go | GO | apache-2.0 | 6,917 |
package fake_command_runner_matchers
import (
"fmt"
"os/exec"
"github.com/cloudfoundry/gunk/command_runner/fake_command_runner"
)
func HaveKilled(spec fake_command_runner.CommandSpec) *HaveKilledMatcher {
return &HaveKilledMatcher{Spec: spec}
}
type HaveKilledMatcher struct {
Spec fake_command_runner.CommandSpec
killed []*exec.Cmd
}
func (m *HaveKilledMatcher) Match(actual interface{}) (bool, error) {
runner, ok := actual.(*fake_command_runner.FakeCommandRunner)
if !ok {
return false, fmt.Errorf("Not a fake command runner: %#v.", actual)
}
m.killed = runner.KilledCommands()
matched := false
for _, cmd := range m.killed {
if m.Spec.Matches(cmd) {
matched = true
break
}
}
if matched {
return true, nil
} else {
return false, nil
}
}
func (m *HaveKilledMatcher) FailureMessage(actual interface{}) (message string) {
return fmt.Sprintf("Expected to kill:%s\n\nActually killed:%s", prettySpec(m.Spec), prettyCommands(m.killed))
}
func (m *HaveKilledMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return fmt.Sprintf("Expected to not kill the following commands:%s", prettySpec(m.Spec))
}
| glyn/gunk | command_runner/fake_command_runner/matchers/have_killed.go | GO | apache-2.0 | 1,160 |
/*
* Copyright 2012, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.writer.pool;
import org.jf.dexlib2.iface.reference.StringReference;
import org.jf.dexlib2.writer.StringSection;
import org.jf.util.ExceptionWithContext;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class StringPool extends StringTypeBasePool implements StringSection<CharSequence, StringReference> {
public StringPool(@Nonnull DexPool dexPool) {
super(dexPool);
}
public void intern(@Nonnull CharSequence string) {
internedItems.put(string.toString(), 0);
}
public void internNullable(@Nullable CharSequence string) {
if (string != null) {
intern(string);
}
}
@Override public int getItemIndex(@Nonnull StringReference key) {
Integer index = internedItems.get(key.toString());
if (index == null) {
throw new ExceptionWithContext("Item not found.: %s", key.toString());
}
return index;
}
@Override public boolean hasJumboIndexes() {
return internedItems.size() > 65536;
}
}
| 4455jkjh/apktool | dexlib2/src/main/java/org/jf/dexlib2/writer/pool/StringPool.java | Java | apache-2.0 | 2,633 |
#pragma once
#include "base/worker_thread.hpp"
#include "ugc/storage.hpp"
#include "ugc/types.hpp"
#include <functional>
class Index;
struct FeatureID;
namespace ugc
{
class Api
{
public:
using UGCCallback = std::function<void(UGC const &)>;
using UGCUpdateCallback = std::function<void(UGCUpdate const &)>;
explicit Api(Index const & index, std::string const & filename);
void GetUGC(FeatureID const & id, UGCCallback callback);
void GetUGCUpdate(FeatureID const & id, UGCUpdateCallback callback);
void SetUGCUpdate(FeatureID const & id, UGCUpdate const & ugc);
static UGC MakeTestUGC1(Time now = Clock::now());
static UGC MakeTestUGC2(Time now = Clock::now());
private:
void GetUGCImpl(FeatureID const & id, UGCCallback callback);
void GetUGCUpdateImpl(FeatureID const & id, UGCUpdateCallback callback);
void SetUGCUpdateImpl(FeatureID const & id, UGCUpdate const & ugc);
Index const & m_index;
base::WorkerThread m_thread;
Storage m_storage;
};
} // namespace ugc
| dobriy-eeh/omim | ugc/api.hpp | C++ | apache-2.0 | 1,008 |
if(typeof(Control)=='undefined')
Control={};
Control.TextArea=Class.create();
Object.extend(Control.TextArea.prototype,{
onChangeTimeoutLength:500,
element:false,
onChangeTimeout:false,
initialize:function(textarea){
this.element=$(textarea);
$(this.element).observe('keyup',this.doOnChange.bindAsEventListener(this));
$(this.element).observe('paste',this.doOnChange.bindAsEventListener(this));
$(this.element).observe('input',this.doOnChange.bindAsEventListener(this));
},
doOnChange:function(event){
if(this.onChangeTimeout)
window.clearTimeout(this.onChangeTimeout);
this.onChangeTimeout=window.setTimeout(function(){
if(this.notify)
this.notify('change',this.getValue());
}.bind(this),this.onChangeTimeoutLength);
},
getValue:function(){
return this.element.value;
},
getSelection:function(){
if(!!document.selection)
return document.selection.createRange().text;
else if(!!this.element.setSelectionRange)
return this.element.value.substring(this.element.selectionStart,this.element.selectionEnd);
else
return false;
},
replaceSelection:function(text){
var scrollTop=this.element.scrollTop;
if(!!document.selection){
this.element.focus();
var old=document.selection.createRange().text;
var range=document.selection.createRange();
range.text=text;
range-=old.length-text.length;
}else if(!!this.element.setSelectionRange){
var selection_start=this.element.selectionStart;
this.element.value=this.element.value.substring(0,selection_start)+text+this.element.value.substring(this.element.selectionEnd);
this.element.setSelectionRange(selection_start+text.length,selection_start+text.length);
}
this.doOnChange();
this.element.focus();
this.element.scrollTop=scrollTop;
},
wrapSelection:function(before,after){
this.replaceSelection(before+this.getSelection()+after);
},
insertBeforeSelection:function(text){
this.replaceSelection(text+this.getSelection());
},
insertAfterSelection:function(text){
this.replaceSelection(this.getSelection()+text);
},
injectEachSelectedLine:function(callback,before,after){
this.replaceSelection((before||'')+$A(this.getSelection().split("\n")).inject([],callback).join("\n")+(after||''));
},
insertBeforeEachSelectedLine:function(text,before,after){
this.injectEachSelectedLine(function(lines,line){
lines.push(text+line);
return lines;
},before,after);
}
});
if(typeof(Object.Event)!='undefined')
Object.Event.extend(Control.TextArea);Control.TextArea.BBCode=Class.create();
Object.extend(Control.TextArea.BBCode.prototype,{
textarea:false,
tooltip:false,
toolbar:false,
emotions:false,
wrapper:false,
controllers:false,
initialize:function(textarea){
this.textarea=new Control.TextArea(textarea);
this._initLayout();
this._initEmotions();
this._initToolbar();
},
hide:function(){
this.wrapper.parentNode.appendChild(this.textarea.element.remove());
this.wrapper.hide();
},
show:function(){
this.controllers.appendChild(this.textarea.element.remove());
this.wrapper.show();
},
_initLayout:function(){
this.wrapper=$(document.createElement('div'));
this.wrapper.id="editor_wrapper";
this.wrapper.className="clearfix";
this.textarea.element.parentNode.insertBefore(this.wrapper,this.textarea.element);
this.emotions=$(document.createElement('div'));
this.emotions.id="bbcode_emotions";
this.emotions.innerHTML="<h5>表情图标</h5>";
this.wrapper.appendChild(this.emotions);
this.controllers=$(document.createElement('div'));
this.controllers.id="bbcode_controllers";
this.wrapper.appendChild(this.controllers);
this.toolbar=$(document.createElement('div'));
this.toolbar.id="bbcode_toolbar";
this.controllers.appendChild(this.toolbar);
this.tooltip=$(document.createElement('div'));
this.tooltip.id="bbcode_tooltip";
this.tooltip.innerHTML="提示:选择您需要装饰的文字, 按上列按钮即可添加上相应的标签";
this.controllers.appendChild(this.tooltip);
this.controllers.appendChild(this.textarea.element.remove());
},
_initEmotions:function(){
this._addEmotion("biggrin",function(){this.insertAfterSelection(" :D ");});
this._addEmotion("smile",function(){this.insertAfterSelection(" :) ");});
this._addEmotion("sad",function(){this.insertAfterSelection(" :( ");});
this._addEmotion("surprised",function(){this.insertAfterSelection(" :o ");});
this._addEmotion("eek",function(){this.insertAfterSelection(" :shock: ");});
this._addEmotion("confused",function(){this.insertAfterSelection(" :? ");});
this._addEmotion("cool",function(){this.insertAfterSelection(" 8) ");});
this._addEmotion("lol",function(){this.insertAfterSelection(" :lol: ");});
this._addEmotion("mad",function(){this.insertAfterSelection(" :x ");});
this._addEmotion("razz",function(){this.insertAfterSelection(" :P ");});
this._addEmotion("redface",function(){this.insertAfterSelection(" :oops: ");});
this._addEmotion("cry",function(){this.insertAfterSelection(" :cry: ");});
this._addEmotion("evil",function(){this.insertAfterSelection(" :evil: ");});
this._addEmotion("twisted",function(){this.insertAfterSelection(" :twisted: ");});
this._addEmotion("rolleyes",function(){this.insertAfterSelection(" :roll: ");});
this._addEmotion("wink",function(){this.insertAfterSelection(" :wink: ");});
this._addEmotion("exclaim",function(){this.insertAfterSelection(" :!: ");});
this._addEmotion("question",function(){this.insertAfterSelection(" :?: ");});
this._addEmotion("idea",function(){this.insertAfterSelection(" :idea: ");});
this._addEmotion("arrow",function(){this.insertAfterSelection(" :arrow: ");});
},
_addEmotion:function(icon,callback){
var img=$(document.createElement('img'));
img.src="http://www.javaeye.com/images/smiles/icon_"+icon+".gif";
img.observe('click',callback.bindAsEventListener(this.textarea));
this.emotions.appendChild(img);
},
_initToolbar:function(){
this._addButton("B",function(){this.wrapSelection('[b]','[/b]');},function(){this.innerHTML='粗体: [b]文字[/b] (alt+b)';},{id:'button_bold'});
this._addButton("I",function(){this.wrapSelection('[i]','[/i]');},function(){this.innerHTML='斜体: [i]文字[/i] (alt+i)';},{id:'button_italic'});
this._addButton("U",function(){this.wrapSelection('[u]','[/u]');},function(){this.innerHTML='下划线: [u]文字[/u] (alt+u)';},{id:'button_underline'});
this._addButton("Quote",function(){this.wrapSelection('[quote]','[/quote]');},function(){this.innerHTML='引用文字: [quote]文字[/quote] 或者 [quote="javaeye"]文字[/quote] (alt+q)';});
this._addButton("Code",function(){this.wrapSelection('[code="java"]','[/code]');},function(){this.innerHTML='代码: [code="ruby"]...[/code] (支持java, ruby, js, xml, html, php, python, c, c++, c#, sql)';});
this._addButton("List",function(){this.insertBeforeEachSelectedLine('[*]','[list]\n','\n[/list]')},function(){this.innerHTML='列表: [list] [*]文字 [*]文字 [/list] 或者 顺序列表: [list=1] [*]文字 [*]文字 [/list]';});
this._addButton("Img",function(){this.wrapSelection('[img]','[/img]');},function(){this.innerHTML='插入图像: [img]http://image_url[/img] (alt+p)';});
this._addButton("URL",function(){this.wrapSelection('[url]','[/url]');},function(){this.innerHTML='插入URL: [url]http://url[/url] 或 [url=http://url]URL文字[/url] (alt+w)';});
this._addButton("Flash",function(){this.wrapSelection('[flash=200,200]','[/flash]');},function(){this.innerHTML='插入Flash: [flash=宽,高]http://your_flash.swf[/flash]';});
this._addButton("Table",function(){this.injectEachSelectedLine(function(lines,line){lines.push("|"+line+"|");return lines;},'[table]\n','\n[/table]');},function(){this.innerHTML='插入表格: [table]用换行和|来编辑格子[/table]';});
var color_select=[
"<br />字体颜色: ",
"<select id='select_color'>",
"<option value='black' style='color: black;'>标准</option>",
"<option value='darkred' style='color: darkred;'>深红</option>",
"<option value='red' style='color: red;'>红色</option>",
"<option value='orange' style='color: orange;'>橙色</option>",
"<option value='brown' style='color: brown;'>棕色</option>",
"<option value='yellow' style='color: yellow;'>黄色</option>",
"<option value='green' style='color: green;'>绿色</option>",
"<option value='olive' style='color: olive;'>橄榄</option>",
"<option value='cyan' style='color: cyan;'>青色</option>",
"<option value='blue' style='color: blue;'>蓝色</option>",
"<option value='darkblue' style='color: darkblue;'>深蓝</option>",
"<option value='indigo' style='color: indigo;'>靛蓝</option>",
"<option value='violet' style='color: violet;'>紫色</option>",
"<option value='gray' style='color: gray;'>灰色</option>",
"<option value='white' style='color: white;'>白色</option>",
"<option value='black' style='color: black;'>黑色</option>",
"</select>"
];
this.toolbar.insert(color_select.join(""));
$('select_color').observe('change',this._change_color.bindAsEventListener(this.textarea));
$('select_color').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="字体颜色: [color=red]文字[/color] 提示:您可以使用 color=#FF0000";});
var font_select=[
" 字体大小: ",
"<select id='select_font'>",
"<option value='0'>标准</option>",
"<option value='xx-small'>1 (xx-small)</option>",
"<option value='x-small'>2 (x-small)</option>",
"<option value='small'>3 (small)</option>",
"<option value='medium'>4 (medium)</option>",
"<option value='large'>5 (large)</option>",
"<option value='x-large'>6 (x-large)</option>",
"<option value='xx-large'>7 (xx-large)</option>",
"</select>"
];
this.toolbar.insert(font_select.join(""));
$('select_font').observe('change',this._change_font.bindAsEventListener(this.textarea));
$('select_font').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="字体大小: [size=x-small]小字体文字[/size]";});
var align_select=[
" 对齐: ",
"<select id='select_align'>",
"<option value='0'>标准</option>",
"<option value='left'>居左</option>",
"<option value='center'>居中</option>",
"<option value='right'>居右</option>",
"</select>"
]
this.toolbar.insert(align_select.join(""));
$('select_align').observe('change',this._change_align.bindAsEventListener(this.textarea));
$('select_align').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="对齐: [align=center]文字[/align]";});
},
_addButton:function(value,callback,tooltip,attrs){
var input=$(document.createElement('input'));
input.type="button";
input.value=value;
input.observe('click',callback.bindAsEventListener(this.textarea));
input.observe('mouseover',tooltip.bindAsEventListener(this.tooltip));
Object.extend(input,attrs||{});
this.toolbar.appendChild(input);
},
_change_color:function(){
this.wrapSelection('[color='+$F('select_color')+']','[/color]');
$('select_color').selectedIndex=0;
},
_change_font:function(){
this.wrapSelection('[size='+$F('select_font')+']','[/size]');
$('select_font').selectedIndex=0;
},
_change_align:function(){
this.wrapSelection('[align='+$F('select_align')+']','[/align]');
$('select_align').selectedIndex=0;
}
});if(typeof(tinyMCE)!='undefined'){
tinyMCE.init({
plugins:"javaeye,media,table,emotions,contextmenu,fullscreen,inlinepopups",
mode:"none",
language:"zh",
theme:"advanced",
theme_advanced_buttons1:"formatselect,fontselect,fontsizeselect,separator,forecolor,backcolor,separator,bold,italic,underline,strikethrough,separator,bullist,numlist",
theme_advanced_buttons2:"undo,redo,cut,copy,paste,separator,justifyleft,justifycenter,justifyright,separator,outdent,indent,separator,link,unlink,image,media,emotions,table,separator,quote,code,separator,fullscreen",
theme_advanced_buttons3:"",
theme_advanced_toolbar_location:"top",
theme_advanced_toolbar_align:"left",
theme_advanced_fonts:"宋体=宋体;黑体=黑体;仿宋=仿宋;楷体=楷体;隶书=隶书;幼圆=幼圆;Arial=arial,helvetica,sans-serif;Comic Sans MS=comic sans ms,sans-serif;Courier New=courier new,courier;Tahoma=tahoma,arial,helvetica,sans-serif;Times New Roman=times new roman,times;Verdana=verdana,geneva;Webdings=webdings;Wingdings=wingdings,zapf dingbats",
convert_fonts_to_spans:true,
remove_trailing_nbsp:true,
remove_linebreaks:false,
width:"100%",
extended_valid_elements:"pre[name|class],object[classid|codebase|width|height|align],param[name|value],embed[quality|type|pluginspage|width|height|src|align|wmode]",
relative_urls:false,
content_css:"/javascripts/tinymce/plugins/javaeye/css/content.css",
save_callback:"removeBRInPre"
});
}
function removeBRInPre(element_id,html,body){
return html.replace(/<pre([^>]*)>((?:.|\n)*?)<\/pre>/gi,function(a,b,c){
c=c.replace(/<br\s*\/?>\n*/gi,'\n');
return'<pre'+b+'>'+c+'</pre>';
});
}
Control.TextArea.Editor=Class.create();
Object.extend(Control.TextArea.Editor.prototype,{
bbcode_editor:false,
rich_editor:false,
mode:false,
in_preview:false,
initialize:function(textarea,mode,autosave){
this.editor_bbcode_flag=$("editor_bbcode_flag");
this.textarea=textarea;
this.switchMode(mode);
if(autosave)this._initAutosave();
},
switchMode:function(mode,convert){
if(this.in_preview&&this.mode==mode){
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").removeClassName("activetab");
$("editor_tab_"+mode).addClassName("activetab");
$("editor_preview").hide();
$("editor_main").show();
this.in_preview=false;
return;
}
if(this.mode==mode)return;
if(convert){
var old_text=this.getValue();
if(old_text!=""){
if(!confirm("切换编辑器模式可能导致格式和内容丢失,你确定吗?"))return;
$('editor_switch_spinner').show();
}
}
this.mode=mode;
if($("editor_switch")){
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").removeClassName("activetab");
$("editor_tab_"+mode).addClassName("activetab");
$("editor_preview").hide();
$("editor_main").show();
this.in_preview=false;
}
if(this.mode=="rich"){
this.editor_bbcode_flag.value="false";
if(this.bbcode_editor)this.bbcode_editor.hide();
this.rich_editor=true;
tinyMCE.execCommand('mceAddControl',false,this.textarea);
}else{
this.editor_bbcode_flag.value="true";
if(this.rich_editor)tinyMCE.execCommand('mceRemoveControl',false,this.textarea);
this.bbcode_editor?this.bbcode_editor.show():this.bbcode_editor=new Control.TextArea.BBCode(this.textarea);
}
if(convert&&old_text!=""){
new Ajax.Request(this.mode=="rich"?'/editor/bbcode2html':'/editor/html2bbcode',{
method:'post',
parameters:{text:old_text},
asynchronous:true,
onSuccess:function(transport){this.setValue(transport.responseText);$('editor_switch_spinner').hide();}.bind(this)
});
}
},
getValue:function(){
return this.mode=="bbcode"?this.bbcode_editor.textarea.element.value:tinyMCE.activeEditor.getContent();
},
setValue:function(value){
if(this.mode=="bbcode"){
this.bbcode_editor.textarea.element.value=value;
}else{
tinyMCE.get(this.textarea).setContent(value);
}
},
preview:function(){
this.in_preview=true;
$('editor_switch_spinner').show();
$("editor_preview").show();
$("editor_main").hide();
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").addClassName("activetab");
new Ajax.Updater("editor_preview","/editor/preview",{
parameters:{text:this.getValue(),mode:this.mode},
evalScripts:true,
onSuccess:function(){$('editor_switch_spinner').hide();}
});
},
insertImage:function(url){
if(this.mode=="bbcode"){
this.bbcode_editor.textarea.insertAfterSelection("\n[img]"+url+"[/img]\n");
}else{
tinyMCE.execCommand("mceInsertContent", false, "<br/><img src='"+url+"'/><br/> ");
}
},
_initAutosave:function(){
this.autosave_url=window.location.href;
new Ajax.Request('/editor/check_autosave',{
method:'post',
parameters:{url:this.autosave_url},
asynchronous:true,
onSuccess:this._loadAutosave.bind(this)
});
setInterval(this._autosave.bind(this),90*1000);
},
_loadAutosave:function(transport){
var text=transport.responseText;
if(text!="nil"){
eval("this.auto_save = "+text);
$('editor_auto_save_update').update('<span style="color:red">您有一份自动保存于'+this.auto_save.updated_at+'的草稿,<a href="#" onclick=\'editor._setAutosave();return false;\'>恢复</a>还是<a href="#" onclick=\'editor._discardAutosave();return false;\'>丢弃</a>呢?</span>');
}
},
_setAutosave:function(){
$("editor_auto_save_id").value=this.auto_save.id;
$('editor_auto_save_update').update("");
this.auto_save.bbcode?this.switchMode("bbcode"):this.switchMode("rich");
this.setValue(this.auto_save.body);
},
_discardAutosave:function(){
$("editor_auto_save_id").value=this.auto_save.id;
$('editor_auto_save_update').update("");
},
_autosave:function(){
var body=this.getValue();
if(body.length<100)return;
new Ajax.Request('/editor/autosave',{
method:'post',
parameters:{
url:this.autosave_url,
body:body,
bbcode:this.mode=="bbcode"
},
asynchronous:true,
onSuccess:function(transport){
$('editor_auto_save_id').value=transport.responseText;
$('editor_auto_save_update').update('<span style="color:red">JavaEye编辑器帮您自动保存草稿于:'+new Date().toLocaleString()+'</span>');
}
});
}
}); | zzsoszz/MyPaper | database/oracle/死锁/oracle 性能 V$PROCESS - Oracle + J2EE 一个都不能少 - JavaEye技术网站.files/compress.js | JavaScript | apache-2.0 | 16,972 |
// Lucene version compatibility level 4.8.1
using Lucene.Net.Analysis.TokenAttributes;
using Lucene.Net.Analysis.Util;
namespace Lucene.Net.Analysis.Cjk
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A <see cref="TokenFilter"/> that normalizes CJK width differences:
/// <list type="bullet">
/// <item><description>Folds fullwidth ASCII variants into the equivalent basic latin</description></item>
/// <item><description>Folds halfwidth Katakana variants into the equivalent kana</description></item>
/// </list>
/// <para>
/// NOTE: this filter can be viewed as a (practical) subset of NFKC/NFKD
/// Unicode normalization. See the normalization support in the ICU package
/// for full normalization.
/// </para>
/// </summary>
public sealed class CJKWidthFilter : TokenFilter
{
private ICharTermAttribute termAtt;
/// <summary>
/// halfwidth kana mappings: 0xFF65-0xFF9D
/// <para/>
/// note: 0xFF9C and 0xFF9D are only mapped to 0x3099 and 0x309A
/// as a fallback when they cannot properly combine with a preceding
/// character into a composed form.
/// </summary>
private static readonly char[] KANA_NORM = new char[] {
(char)0x30fb, (char)0x30f2, (char)0x30a1, (char)0x30a3, (char)0x30a5, (char)0x30a7, (char)0x30a9, (char)0x30e3, (char)0x30e5,
(char)0x30e7, (char)0x30c3, (char)0x30fc, (char)0x30a2, (char)0x30a4, (char)0x30a6, (char)0x30a8, (char)0x30aa, (char)0x30ab,
(char)0x30ad, (char)0x30af, (char)0x30b1, (char)0x30b3, (char)0x30b5, (char)0x30b7, (char)0x30b9, (char)0x30bb, (char)0x30bd,
(char)0x30bf, (char)0x30c1, (char)0x30c4, (char)0x30c6, (char)0x30c8, (char)0x30ca, (char)0x30cb, (char)0x30cc, (char)0x30cd,
(char)0x30ce, (char)0x30cf, (char)0x30d2, (char)0x30d5, (char)0x30d8, (char)0x30db, (char)0x30de, (char)0x30df, (char)0x30e0,
(char)0x30e1, (char)0x30e2, (char)0x30e4, (char)0x30e6, (char)0x30e8, (char)0x30e9, (char)0x30ea, (char)0x30eb, (char)0x30ec,
(char)0x30ed, (char)0x30ef, (char)0x30f3, (char)0x3099, (char)0x309A
};
public CJKWidthFilter(TokenStream input)
: base(input)
{
termAtt = AddAttribute<ICharTermAttribute>();
}
public override bool IncrementToken()
{
if (m_input.IncrementToken())
{
char[] text = termAtt.Buffer;
int length = termAtt.Length;
for (int i = 0; i < length; i++)
{
char ch = text[i];
if (ch >= 0xFF01 && ch <= 0xFF5E)
{
// Fullwidth ASCII variants
text[i] = (char)(text[i] - 0xFEE0);
}
else if (ch >= 0xFF65 && ch <= 0xFF9F)
{
// Halfwidth Katakana variants
if ((ch == 0xFF9E || ch == 0xFF9F) && i > 0 && Combine(text, i, ch))
{
length = StemmerUtil.Delete(text, i--, length);
}
else
{
text[i] = KANA_NORM[ch - 0xFF65];
}
}
}
termAtt.Length = length;
return true;
}
else
{
return false;
}
}
/// <summary>kana combining diffs: 0x30A6-0x30FD </summary>
private static readonly sbyte[] KANA_COMBINE_VOICED = new sbyte[] {
78, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1,
0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1
};
private static readonly sbyte[] KANA_COMBINE_HALF_VOICED = new sbyte[] {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 2,
0, 0, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
/// <summary>
/// returns true if we successfully combined the voice mark </summary>
private static bool Combine(char[] text, int pos, char ch)
{
char prev = text[pos - 1];
if (prev >= 0x30A6 && prev <= 0x30FD)
{
text[pos - 1] += (char)((ch == 0xFF9F) ? KANA_COMBINE_HALF_VOICED[prev - 0x30A6] : KANA_COMBINE_VOICED[prev - 0x30A6]);
return text[pos - 1] != prev;
}
return false;
}
}
} | apache/lucenenet | src/Lucene.Net.Analysis.Common/Analysis/Cjk/CJKWidthFilter.cs | C# | apache-2.0 | 5,800 |
<?php
namespace Topxia\Service\User\Dao;
interface UserFortuneLogDao
{
public function addLog(array $log);
} | 18826252059/im | src/Topxia/Service/User/Dao/UserFortuneLogDao.php | PHP | apache-2.0 | 114 |
import os
from segments import Segment, theme
from utils import colors, glyphs
class CurrentDir(Segment):
bg = colors.background(theme.CURRENTDIR_BG)
fg = colors.foreground(theme.CURRENTDIR_FG)
def init(self, cwd):
home = os.path.expanduser('~')
self.text = cwd.replace(home, '~')
class ReadOnly(Segment):
bg = colors.background(theme.READONLY_BG)
fg = colors.foreground(theme.READONLY_FG)
def init(self, cwd):
self.text = ' ' + glyphs.WRITE_ONLY + ' '
if os.access(cwd, os.W_OK):
self.active = False
class Venv(Segment):
bg = colors.background(theme.VENV_BG)
fg = colors.foreground(theme.VENV_FG)
def init(self):
env = os.getenv('VIRTUAL_ENV')
if env is None:
self.active = False
return
env_name = os.path.basename(env)
self.text = glyphs.VIRTUAL_ENV + ' ' + env_name | nimiq/promptastic | segments/filesystem.py | Python | apache-2.0 | 916 |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.services.cache;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.services.cache.Cacheable;
import com.pivotal.gemfirexd.internal.iapi.services.cache.CacheableFactory;
/**
* An extension to {@link ConcurrentCache} for GemFireXD that sets the identity
* on a {@link CacheEntry} before inserting into the cache. This is to avoid
* deadlock scenario with DDL read-write locks:
*
* distributed write lock (other VM) -> local write lock -> cache hit with
* existing entry -> {@link CacheEntry#waitUntilIdentityIsSet()}
*
* cache miss -> cache put -> {@link Cacheable#setIdentity(Object)} -> read from
* SYSTABLES -> local read lock
*
* See bug #40683 for more details.
*
* Currently this is only used for <code>TDCacheble</code>s while for other
* {@link Cacheable}s the normal {@link ConcurrentCache} is used.
*
* @see ConcurrentCache
*
* @author swale
*/
final class GfxdConcurrentCache extends ConcurrentCache {
/**
* Creates a new cache manager.
*
* @param holderFactory
* factory which creates <code>Cacheable</code>s
* @param name
* the name of the cache
* @param initialSize
* the initial capacity of the cache
* @param maxSize
* maximum number of elements in the cache
*/
GfxdConcurrentCache(CacheableFactory holderFactory, String name,
int initialSize, int maxSize) {
super(holderFactory, name, initialSize, maxSize);
}
// Overrides of ConcurrentCache
/**
* Find an object in the cache. If it is not present, add it to the cache. The
* returned object is kept until <code>release()</code> is called.
*
* @param key
* identity of the object to find
* @return the cached object, or <code>null</code> if it cannot be found
*/
@Override
public Cacheable find(Object key) throws StandardException {
if (stopped) {
return null;
}
Cacheable item;
CacheEntry entry = cache.get(key);
while (true) {
if (entry != null) {
// Found an entry in the cache, lock it.
entry.lock();
if (entry.isValid()) {
try {
// Entry is still valid. Return it.
item = entry.getCacheable();
// The object is already cached. Increase the use count and
// return it.
entry.keep(true);
return item;
} finally {
entry.unlock();
}
}
else {
// This entry has been removed from the cache while we were
// waiting for the lock. Unlock it and try again.
entry.unlock();
entry = cache.get(key);
}
}
else {
entry = new CacheEntry(true);
// Lock the entry before it's inserted in free slot.
entry.lock();
try {
// The object is not cached. Insert the entry into a free
// slot and retrieve a reusable Cacheable.
item = insertIntoFreeSlot(key, entry);
} finally {
entry.unlock();
}
// Set the identity without holding the lock on the entry. If we
// hold the lock, we may run into a deadlock if the user code in
// setIdentity() re-enters the buffer manager.
Cacheable itemWithIdentity = item.setIdentity(key);
if (itemWithIdentity != null) {
entry.setCacheable(itemWithIdentity);
// add the entry to cache
CacheEntry oldEntry = cache.putIfAbsent(key, entry);
if (oldEntry != null) {
// Someone inserted the entry while we created a new
// one. Retry with the entry currently in the cache.
entry = oldEntry;
}
else {
// We successfully inserted a new entry.
return itemWithIdentity;
}
}
else {
return null;
}
}
}
}
/**
* Create an object in the cache. The object is kept until
* <code>release()</code> is called.
*
* @param key
* identity of the object to create
* @param createParameter
* parameters passed to <code>Cacheable.createIdentity()</code>
* @return a reference to the cached object, or <code>null</code> if the
* object cannot be created
* @exception StandardException
* if the object is already in the cache, or if some other error
* occurs
* @see Cacheable#createIdentity(Object,Object)
*/
@Override
public Cacheable create(Object key, Object createParameter)
throws StandardException {
if (stopped) {
return null;
}
Cacheable item;
CacheEntry entry = new CacheEntry(true);
// Lock the entry before it's inserted in free slot.
entry.lock();
try {
// The object is not cached. Insert the entry into a free
// slot and retrieve a reusable Cacheable.
item = insertIntoFreeSlot(key, entry);
} finally {
entry.unlock();
}
// Create the identity without holding the lock on the entry.
// Otherwise, we may run into a deadlock if the user code in
// createIdentity() re-enters the buffer manager.
Cacheable itemWithIdentity = item.createIdentity(key, createParameter);
if (itemWithIdentity != null) {
entry.setCacheable(itemWithIdentity);
if (cache.putIfAbsent(key, entry) != null) {
// We can't create the object if it's already in the cache.
throw StandardException.newException(SQLState.OBJECT_EXISTS_IN_CACHE,
name, key);
}
}
return itemWithIdentity;
}
}
| papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/services/cache/GfxdConcurrentCache.java | Java | apache-2.0 | 6,422 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.ByteStreams;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Ensure classes from a registered jar are available in the UDFContext.
* Please see PIG-2532 for additional details.
*/
public class TestRegisteredJarVisibility {
private static final Log LOG = LogFactory.getLog(TestRegisteredJarVisibility.class);
private static final String JAR_FILE_NAME = "test-foo-loader.jar";
private static final String PACKAGE_NAME = "org.apache.pig.test";
// Actual data is not important. Reusing an existing input file.
private static final File INPUT_FILE = new File("test/data/pigunit/top_queries_input_data.txt");
private static MiniCluster cluster;
private static File jarFile;
@BeforeClass()
public static void setUp() throws IOException {
String testResourcesDir = "test/resources/" + PACKAGE_NAME.replace(".", "/");
String testBuildDataDir = "build/test/data";
// Create the test data directory if needed
File testDataDir = new File(testBuildDataDir,
TestRegisteredJarVisibility.class.getCanonicalName());
testDataDir.mkdirs();
jarFile = new File(testDataDir, JAR_FILE_NAME);
File[] javaFiles = new File[]{
new File(testResourcesDir, "RegisteredJarVisibilityLoader.java"),
new File(testResourcesDir, "RegisteredJarVisibilitySchema.java")};
List<File> classFiles = compile(javaFiles);
// Canonical class name to class file
Map<String, File> filesToJar = Maps.newHashMap();
for (File classFile : classFiles) {
filesToJar.put(
PACKAGE_NAME + "." + classFile.getName().replace(".class", ""),
classFile);
}
jar(filesToJar);
cluster = MiniCluster.buildCluster();
}
@AfterClass()
public static void tearDown() {
cluster.shutDown();
}
@Test()
public void testRegisteredJarVisibilitySchemaNotOnClasspath() {
boolean exceptionThrown = false;
try {
Class.forName("org.apache.pig.test.FooSchema");
} catch (ClassNotFoundException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
}
@Test()
public void testRegisteredJarVisibility() throws IOException {
cluster.getFileSystem().copyFromLocalFile(
new Path("file://" + INPUT_FILE.getAbsolutePath()), new Path(INPUT_FILE.getName()));
PigServer pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
String query = "register " + jarFile.getAbsolutePath() + ";\n"
+ "a = load '" + INPUT_FILE.getName()
+ "' using org.apache.pig.test.RegisteredJarVisibilityLoader();";
LOG.info("Running pig script:\n" + query);
pigServer.registerScript(new ByteArrayInputStream(query.getBytes()));
pigServer.openIterator("a");
pigServer.shutdown();
}
private static List<File> compile(File[] javaFiles) {
LOG.info("Compiling: " + Arrays.asList(javaFiles));
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> compilationUnits1 =
fileManager.getJavaFileObjects(javaFiles);
JavaCompiler.CompilationTask task =
compiler.getTask(null, fileManager, null, null, null, compilationUnits1);
task.call();
List<File> classFiles = Lists.newArrayList();
for (File javaFile : javaFiles) {
File classFile = new File(javaFile.getAbsolutePath().replace(".java", ".class"));
classFile.deleteOnExit();
Assert.assertTrue(classFile.exists());
classFiles.add(classFile);
LOG.info("Created " + classFile.getAbsolutePath());
}
return classFiles;
}
/**
* Create a jar file containing the generated classes.
*
* @param filesToJar map of canonical class name to class file
* @throws IOException on error
*/
private static void jar(Map<String, File> filesToJar) throws IOException {
LOG.info("Creating jar file containing: " + filesToJar);
JarOutputStream jos = new JarOutputStream(new FileOutputStream(jarFile.getAbsolutePath()));
try {
for (Map.Entry<String, File> entry : filesToJar.entrySet()) {
String zipEntryName = entry.getKey().replace(".", "/") + ".class";
LOG.info("Adding " + zipEntryName + " to " + jarFile.getAbsolutePath());
jos.putNextEntry(new ZipEntry(zipEntryName));
InputStream classInputStream = new FileInputStream(entry.getValue().getAbsolutePath());
try {
ByteStreams.copy(classInputStream, jos);
} finally {
classInputStream.close();
}
}
} finally {
jos.close();
}
Assert.assertTrue(jarFile.exists());
LOG.info("Created " + jarFile.getAbsolutePath());
}
}
| internetarchive/pig | test/org/apache/pig/test/TestRegisteredJarVisibility.java | Java | apache-2.0 | 6,888 |
package com.kodcu.service.extension.chart;
import com.kodcu.controller.ApplicationController;
import com.kodcu.other.Current;
import com.kodcu.service.ThreadService;
import javafx.scene.chart.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Created by usta on 31.03.2015.
*/
@Component("area-bean")
public class AreaChartBuilderService extends XYChartBuilderService {
private final ThreadService threadService;
private final Current current;
private final ApplicationController controller;
@Autowired
public AreaChartBuilderService(ThreadService threadService, Current current, ApplicationController controller) {
super(threadService, current, controller);
this.threadService = threadService;
this.current = current;
this.controller = controller;
}
@Override
protected XYChart<Number, Number> createXYChart() {
final NumberAxis xAxis = new NumberAxis();
final NumberAxis yAxis = new NumberAxis();
final XYChart<Number, Number> lineChart = new AreaChart<Number, Number>(xAxis, yAxis);
return lineChart;
}
}
| gastaldi/AsciidocFX | src/main/java/com/kodcu/service/extension/chart/AreaChartBuilderService.java | Java | apache-2.0 | 1,187 |
package com.taobao.zeus.broadcast.alarm;
import java.net.InetAddress;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.NoSuchProviderException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.taobao.zeus.model.LogDescriptor;
import com.taobao.zeus.store.UserManager;
import com.taobao.zeus.store.mysql.MysqlLogManager;
import com.taobao.zeus.store.mysql.persistence.ZeusUser;
import com.taobao.zeus.util.Environment;
public class MailAlarm extends AbstractZeusAlarm {
private static Logger log = LoggerFactory.getLogger(MailAlarm.class);
@Autowired
private UserManager userManager;
@Autowired
private MysqlLogManager zeusLogManager;
private static String host = Environment.getHost();// 邮件服务器
private static String port = Environment.getPort();// 端口
private static String from = Environment.getSendFrom();// 发送者
private static String user = Environment.getUsername();// 用户名
private static String password = Environment.getPassword();// 密码
@Override
public void alarm(String jobId, List<String> users, String title, String content)
throws Exception {
List<ZeusUser> userList = userManager.findListByUidByOrder(users);
List<String> emails = new ArrayList<String>();
if (userList != null && userList.size() > 0) {
for (ZeusUser user : userList) {
String userEmail = user.getEmail();
if (userEmail != null && !userEmail.isEmpty()
&& userEmail.contains("@")) {
if (userEmail.contains(";")) {
String[] userEmails = userEmail.split(";");
for (String ems : userEmails) {
if (ems.contains("@")) {
emails.add(ems);
}
}
} else {
emails.add(userEmail);
}
}
}
if (emails.size() > 0) {
content = content.replace("<br/>", "\r\n");
sendEmail(jobId, emails, title, content);
/*try{
LogDescriptor logDescriptor = new LogDescriptor();
logDescriptor.setLogType("email");
logDescriptor.setIp(InetAddress.getLocalHost().getHostAddress());
logDescriptor.setUserName("zeus");
logDescriptor.setUrl(jobId);
logDescriptor.setRpc(emails.toString());
logDescriptor.setDelegate(title);
logDescriptor.setMethod("");
// logDescriptor.setDescription((content.length()>4000 ? content.substring(4000) : content));
logDescriptor.setDescription("");
zeusLogManager.addLog(logDescriptor);
}catch(Exception ex){
log.error(ex.toString());
}*/
}
}
}
public void sendEmail(String jobId, List<String> emails, String subject,
String body) {
try {
log.info( "jobId: " + jobId +" begin to send the email!");
Properties props = new Properties();
props.put("mail.smtp.host", host);
props.put("mail.smtp.port", port);
props.put("mail.smtp.auth", "true");
Transport transport = null;
Session session = Session.getDefaultInstance(props, null);
transport = session.getTransport("smtp");
transport.connect(host, user, password);
MimeMessage msg = new MimeMessage(session);
msg.setSentDate(new Date());
InternetAddress fromAddress = new InternetAddress(from);
msg.setFrom(fromAddress);
InternetAddress[] toAddress = new InternetAddress[emails.size()];
for (int i = 0; i < emails.size(); i++) {
toAddress[i] = new InternetAddress(emails.get(i));
}
msg.setRecipients(Message.RecipientType.TO, toAddress);
msg.setSubject(subject, "UTF-8");
msg.setText(body, "UTF-8");
msg.saveChanges();
transport.sendMessage(msg, msg.getAllRecipients());
log.info("jobId: " + jobId + " send email: " + emails + "; from: " + from + " subject: "
+ subject + ", send success!");
} catch (NoSuchProviderException e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
} catch (MessagingException e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
} catch (Exception e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
}
}
}
| wwzhe/dataworks-zeus | schedule/src/main/java/com/taobao/zeus/broadcast/alarm/MailAlarm.java | Java | apache-2.0 | 4,315 |
/**
* Copyright 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibmcloud.contest.phonebook;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
/**
* Throw a 400 status code
*/
public class BadRequestException extends WebApplicationException {
private static final long serialVersionUID = 1L;
public BadRequestException() {
super(Response.status(Status.BAD_REQUEST).build());
}
}
| ibmkendrick/phonebookdemo-v2 | src/main/java/com/ibmcloud/contest/phonebook/BadRequestException.java | Java | apache-2.0 | 1,011 |
<?php
namespace ctala\transaccion\classes;
/**
* Description of Helper
*
* @author ctala
*/
class Helper {
/**
* Esta función permite la redirección para los pagos.
*
* @param type $url
* @param type $variables
*/
public static function redirect($url, $variables) {
foreach ($variables as $key => $value) {
$args_array[] = '<input type="hidden" name="' . $key . '" value="' . $value . '" />';
}
?>
<html>
<head>
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.11.2/jquery.min.js"></script>
</head>
<body style="background-image:url('https://webpay3g.transbank.cl/webpayserver/imagenes/background.gif')">
<form name="WS1" id="WS1" action="<?= $url ?>" method="POST" onl>
<?php
foreach ($args_array as $arg) {
echo $arg;
}
?>
<input type="submit" id="submit_webpayplus_payment_form" style="visibility: hidden;">
</form>
<script>
$(document).ready(function () {
$("#WS1").submit();
});
</script>
</body>
</html>
<?php
}
}
| NAITUSEIRL/prestashop_pagofacil | vendor/ctala/transaccion-default/classes/Helper.php | PHP | apache-2.0 | 1,353 |
package org.zstack.sdk.zwatch.monitorgroup.api;
import org.zstack.sdk.zwatch.monitorgroup.entity.MonitorTemplateInventory;
public class CreateMonitorTemplateResult {
public MonitorTemplateInventory inventory;
public void setInventory(MonitorTemplateInventory inventory) {
this.inventory = inventory;
}
public MonitorTemplateInventory getInventory() {
return this.inventory;
}
}
| zstackorg/zstack | sdk/src/main/java/org/zstack/sdk/zwatch/monitorgroup/api/CreateMonitorTemplateResult.java | Java | apache-2.0 | 417 |
/*-
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Modifications:
* -Changed package name
* -Removed Android dependencies
* -Removed/replaced Java SE dependencies
* -Removed/replaced annotations
*/
package com.google.authenticator.blackberry;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.io.ByteArrayOutputStream;
import java.util.Vector;
/**
* Immutable URI reference. A URI reference includes a URI and a fragment, the
* component of the URI following a '#'. Builds and parses URI references
* which conform to
* <a href="http://www.faqs.org/rfcs/rfc2396.html">RFC 2396</a>.
*
* <p>In the interest of performance, this class performs little to no
* validation. Behavior is undefined for invalid input. This class is very
* forgiving--in the face of invalid input, it will return garbage
* rather than throw an exception unless otherwise specified.
*/
public abstract class Uri {
/*
This class aims to do as little up front work as possible. To accomplish
that, we vary the implementation dependending on what the user passes in.
For example, we have one implementation if the user passes in a
URI string (StringUri) and another if the user passes in the
individual components (OpaqueUri).
*Concurrency notes*: Like any truly immutable object, this class is safe
for concurrent use. This class uses a caching pattern in some places where
it doesn't use volatile or synchronized. This is safe to do with ints
because getting or setting an int is atomic. It's safe to do with a String
because the internal fields are final and the memory model guarantees other
threads won't see a partially initialized instance. We are not guaranteed
that some threads will immediately see changes from other threads on
certain platforms, but we don't mind if those threads reconstruct the
cached result. As a result, we get thread safe caching with no concurrency
overhead, which means the most common case, access from a single thread,
is as fast as possible.
From the Java Language spec.:
"17.5 Final Field Semantics
... when the object is seen by another thread, that thread will always
see the correctly constructed version of that object's final fields.
It will also see versions of any object or array referenced by
those final fields that are at least as up-to-date as the final fields
are."
In that same vein, all non-transient fields within Uri
implementations should be final and immutable so as to ensure true
immutability for clients even when they don't use proper concurrency
control.
For reference, from RFC 2396:
"4.3. Parsing a URI Reference
A URI reference is typically parsed according to the four main
components and fragment identifier in order to determine what
components are present and whether the reference is relative or
absolute. The individual components are then parsed for their
subparts and, if not opaque, to verify their validity.
Although the BNF defines what is allowed in each component, it is
ambiguous in terms of differentiating between an authority component
and a path component that begins with two slash characters. The
greedy algorithm is used for disambiguation: the left-most matching
rule soaks up as much of the URI reference string as it is capable of
matching. In other words, the authority component wins."
The "four main components" of a hierarchical URI consist of
<scheme>://<authority><path>?<query>
*/
/**
* NOTE: EMPTY accesses this field during its own initialization, so this
* field *must* be initialized first, or else EMPTY will see a null value!
*
* Placeholder for strings which haven't been cached. This enables us
* to cache null. We intentionally create a new String instance so we can
* compare its identity and there is no chance we will confuse it with
* user data.
*/
private static final String NOT_CACHED = new String("NOT CACHED");
/**
* The empty URI, equivalent to "".
*/
public static final Uri EMPTY = new HierarchicalUri(null, Part.NULL,
PathPart.EMPTY, Part.NULL, Part.NULL);
/**
* Prevents external subclassing.
*/
private Uri() {}
/**
* Returns true if this URI is hierarchical like "http://google.com".
* Absolute URIs are hierarchical if the scheme-specific part starts with
* a '/'. Relative URIs are always hierarchical.
*/
public abstract boolean isHierarchical();
/**
* Returns true if this URI is opaque like "mailto:nobody@google.com". The
* scheme-specific part of an opaque URI cannot start with a '/'.
*/
public boolean isOpaque() {
return !isHierarchical();
}
/**
* Returns true if this URI is relative, i.e. if it doesn't contain an
* explicit scheme.
*
* @return true if this URI is relative, false if it's absolute
*/
public abstract boolean isRelative();
/**
* Returns true if this URI is absolute, i.e. if it contains an
* explicit scheme.
*
* @return true if this URI is absolute, false if it's relative
*/
public boolean isAbsolute() {
return !isRelative();
}
/**
* Gets the scheme of this URI. Example: "http"
*
* @return the scheme or null if this is a relative URI
*/
public abstract String getScheme();
/**
* Gets the scheme-specific part of this URI, i.e. everything between the
* scheme separator ':' and the fragment separator '#'. If this is a
* relative URI, this method returns the entire URI. Decodes escaped octets.
*
* <p>Example: "//www.google.com/search?q=android"
*
* @return the decoded scheme-specific-part
*/
public abstract String getSchemeSpecificPart();
/**
* Gets the scheme-specific part of this URI, i.e. everything between the
* scheme separator ':' and the fragment separator '#'. If this is a
* relative URI, this method returns the entire URI. Leaves escaped octets
* intact.
*
* <p>Example: "//www.google.com/search?q=android"
*
* @return the decoded scheme-specific-part
*/
public abstract String getEncodedSchemeSpecificPart();
/**
* Gets the decoded authority part of this URI. For
* server addresses, the authority is structured as follows:
* {@code [ userinfo '@' ] host [ ':' port ]}
*
* <p>Examples: "google.com", "bob@google.com:80"
*
* @return the authority for this URI or null if not present
*/
public abstract String getAuthority();
/**
* Gets the encoded authority part of this URI. For
* server addresses, the authority is structured as follows:
* {@code [ userinfo '@' ] host [ ':' port ]}
*
* <p>Examples: "google.com", "bob@google.com:80"
*
* @return the authority for this URI or null if not present
*/
public abstract String getEncodedAuthority();
/**
* Gets the decoded user information from the authority.
* For example, if the authority is "nobody@google.com", this method will
* return "nobody".
*
* @return the user info for this URI or null if not present
*/
public abstract String getUserInfo();
/**
* Gets the encoded user information from the authority.
* For example, if the authority is "nobody@google.com", this method will
* return "nobody".
*
* @return the user info for this URI or null if not present
*/
public abstract String getEncodedUserInfo();
/**
* Gets the encoded host from the authority for this URI. For example,
* if the authority is "bob@google.com", this method will return
* "google.com".
*
* @return the host for this URI or null if not present
*/
public abstract String getHost();
/**
* Gets the port from the authority for this URI. For example,
* if the authority is "google.com:80", this method will return 80.
*
* @return the port for this URI or -1 if invalid or not present
*/
public abstract int getPort();
/**
* Gets the decoded path.
*
* @return the decoded path, or null if this is not a hierarchical URI
* (like "mailto:nobody@google.com") or the URI is invalid
*/
public abstract String getPath();
/**
* Gets the encoded path.
*
* @return the encoded path, or null if this is not a hierarchical URI
* (like "mailto:nobody@google.com") or the URI is invalid
*/
public abstract String getEncodedPath();
/**
* Gets the decoded query component from this URI. The query comes after
* the query separator ('?') and before the fragment separator ('#'). This
* method would return "q=android" for
* "http://www.google.com/search?q=android".
*
* @return the decoded query or null if there isn't one
*/
public abstract String getQuery();
/**
* Gets the encoded query component from this URI. The query comes after
* the query separator ('?') and before the fragment separator ('#'). This
* method would return "q=android" for
* "http://www.google.com/search?q=android".
*
* @return the encoded query or null if there isn't one
*/
public abstract String getEncodedQuery();
/**
* Gets the decoded fragment part of this URI, everything after the '#'.
*
* @return the decoded fragment or null if there isn't one
*/
public abstract String getFragment();
/**
* Gets the encoded fragment part of this URI, everything after the '#'.
*
* @return the encoded fragment or null if there isn't one
*/
public abstract String getEncodedFragment();
/**
* Gets the decoded path segments.
*
* @return decoded path segments, each without a leading or trailing '/'
*/
public abstract String[] getPathSegments();
/**
* Gets the decoded last segment in the path.
*
* @return the decoded last segment or null if the path is empty
*/
public abstract String getLastPathSegment();
/**
* Compares this Uri to another object for equality. Returns true if the
* encoded string representations of this Uri and the given Uri are
* equal. Case counts. Paths are not normalized. If one Uri specifies a
* default port explicitly and the other leaves it implicit, they will not
* be considered equal.
*/
public boolean equals(Object o) {
if (!(o instanceof Uri)) {
return false;
}
Uri other = (Uri) o;
return toString().equals(other.toString());
}
/**
* Hashes the encoded string represention of this Uri consistently with
* {@link #equals(Object)}.
*/
public int hashCode() {
return toString().hashCode();
}
/**
* Compares the string representation of this Uri with that of
* another.
*/
public int compareTo(Uri other) {
return toString().compareTo(other.toString());
}
/**
* Returns the encoded string representation of this URI.
* Example: "http://google.com/"
*/
public abstract String toString();
/**
* Constructs a new builder, copying the attributes from this Uri.
*/
public abstract Builder buildUpon();
/** Index of a component which was not found. */
private final static int NOT_FOUND = -1;
/** Placeholder value for an index which hasn't been calculated yet. */
private final static int NOT_CALCULATED = -2;
/**
* Error message presented when a user tries to treat an opaque URI as
* hierarchical.
*/
private static final String NOT_HIERARCHICAL
= "This isn't a hierarchical URI.";
/** Default encoding. */
private static final String DEFAULT_ENCODING = "UTF-8";
/**
* Creates a Uri which parses the given encoded URI string.
*
* @param uriString an RFC 2396-compliant, encoded URI
* @throws NullPointerException if uriString is null
* @return Uri for this given uri string
*/
public static Uri parse(String uriString) {
return new StringUri(uriString);
}
/**
* An implementation which wraps a String URI. This URI can be opaque or
* hierarchical, but we extend AbstractHierarchicalUri in case we need
* the hierarchical functionality.
*/
private static class StringUri extends AbstractHierarchicalUri {
/** Used in parcelling. */
static final int TYPE_ID = 1;
/** URI string representation. */
private final String uriString;
private StringUri(String uriString) {
if (uriString == null) {
throw new NullPointerException("uriString");
}
this.uriString = uriString;
}
/** Cached scheme separator index. */
private volatile int cachedSsi = NOT_CALCULATED;
/** Finds the first ':'. Returns -1 if none found. */
private int findSchemeSeparator() {
return cachedSsi == NOT_CALCULATED
? cachedSsi = uriString.indexOf(':')
: cachedSsi;
}
/** Cached fragment separator index. */
private volatile int cachedFsi = NOT_CALCULATED;
/** Finds the first '#'. Returns -1 if none found. */
private int findFragmentSeparator() {
return cachedFsi == NOT_CALCULATED
? cachedFsi = uriString.indexOf('#', findSchemeSeparator())
: cachedFsi;
}
public boolean isHierarchical() {
int ssi = findSchemeSeparator();
if (ssi == NOT_FOUND) {
// All relative URIs are hierarchical.
return true;
}
if (uriString.length() == ssi + 1) {
// No ssp.
return false;
}
// If the ssp starts with a '/', this is hierarchical.
return uriString.charAt(ssi + 1) == '/';
}
public boolean isRelative() {
// Note: We return true if the index is 0
return findSchemeSeparator() == NOT_FOUND;
}
private volatile String scheme = NOT_CACHED;
public String getScheme() {
boolean cached = (scheme != NOT_CACHED);
return cached ? scheme : (scheme = parseScheme());
}
private String parseScheme() {
int ssi = findSchemeSeparator();
return ssi == NOT_FOUND ? null : uriString.substring(0, ssi);
}
private Part ssp;
private Part getSsp() {
return ssp == null ? ssp = Part.fromEncoded(parseSsp()) : ssp;
}
public String getEncodedSchemeSpecificPart() {
return getSsp().getEncoded();
}
public String getSchemeSpecificPart() {
return getSsp().getDecoded();
}
private String parseSsp() {
int ssi = findSchemeSeparator();
int fsi = findFragmentSeparator();
// Return everything between ssi and fsi.
return fsi == NOT_FOUND
? uriString.substring(ssi + 1)
: uriString.substring(ssi + 1, fsi);
}
private Part authority;
private Part getAuthorityPart() {
if (authority == null) {
String encodedAuthority
= parseAuthority(this.uriString, findSchemeSeparator());
return authority = Part.fromEncoded(encodedAuthority);
}
return authority;
}
public String getEncodedAuthority() {
return getAuthorityPart().getEncoded();
}
public String getAuthority() {
return getAuthorityPart().getDecoded();
}
private PathPart path;
private PathPart getPathPart() {
return path == null
? path = PathPart.fromEncoded(parsePath())
: path;
}
public String getPath() {
return getPathPart().getDecoded();
}
public String getEncodedPath() {
return getPathPart().getEncoded();
}
public String[] getPathSegments() {
return getPathPart().getPathSegments().segments;
}
private String parsePath() {
String uriString = this.uriString;
int ssi = findSchemeSeparator();
// If the URI is absolute.
if (ssi > -1) {
// Is there anything after the ':'?
boolean schemeOnly = ssi + 1 == uriString.length();
if (schemeOnly) {
// Opaque URI.
return null;
}
// A '/' after the ':' means this is hierarchical.
if (uriString.charAt(ssi + 1) != '/') {
// Opaque URI.
return null;
}
} else {
// All relative URIs are hierarchical.
}
return parsePath(uriString, ssi);
}
private Part query;
private Part getQueryPart() {
return query == null
? query = Part.fromEncoded(parseQuery()) : query;
}
public String getEncodedQuery() {
return getQueryPart().getEncoded();
}
private String parseQuery() {
// It doesn't make sense to cache this index. We only ever
// calculate it once.
int qsi = uriString.indexOf('?', findSchemeSeparator());
if (qsi == NOT_FOUND) {
return null;
}
int fsi = findFragmentSeparator();
if (fsi == NOT_FOUND) {
return uriString.substring(qsi + 1);
}
if (fsi < qsi) {
// Invalid.
return null;
}
return uriString.substring(qsi + 1, fsi);
}
public String getQuery() {
return getQueryPart().getDecoded();
}
private Part fragment;
private Part getFragmentPart() {
return fragment == null
? fragment = Part.fromEncoded(parseFragment()) : fragment;
}
public String getEncodedFragment() {
return getFragmentPart().getEncoded();
}
private String parseFragment() {
int fsi = findFragmentSeparator();
return fsi == NOT_FOUND ? null : uriString.substring(fsi + 1);
}
public String getFragment() {
return getFragmentPart().getDecoded();
}
public String toString() {
return uriString;
}
/**
* Parses an authority out of the given URI string.
*
* @param uriString URI string
* @param ssi scheme separator index, -1 for a relative URI
*
* @return the authority or null if none is found
*/
static String parseAuthority(String uriString, int ssi) {
int length = uriString.length();
// If "//" follows the scheme separator, we have an authority.
if (length > ssi + 2
&& uriString.charAt(ssi + 1) == '/'
&& uriString.charAt(ssi + 2) == '/') {
// We have an authority.
// Look for the start of the path, query, or fragment, or the
// end of the string.
int end = ssi + 3;
LOOP: while (end < length) {
switch (uriString.charAt(end)) {
case '/': // Start of path
case '?': // Start of query
case '#': // Start of fragment
break LOOP;
}
end++;
}
return uriString.substring(ssi + 3, end);
} else {
return null;
}
}
/**
* Parses a path out of this given URI string.
*
* @param uriString URI string
* @param ssi scheme separator index, -1 for a relative URI
*
* @return the path
*/
static String parsePath(String uriString, int ssi) {
int length = uriString.length();
// Find start of path.
int pathStart;
if (length > ssi + 2
&& uriString.charAt(ssi + 1) == '/'
&& uriString.charAt(ssi + 2) == '/') {
// Skip over authority to path.
pathStart = ssi + 3;
LOOP: while (pathStart < length) {
switch (uriString.charAt(pathStart)) {
case '?': // Start of query
case '#': // Start of fragment
return ""; // Empty path.
case '/': // Start of path!
break LOOP;
}
pathStart++;
}
} else {
// Path starts immediately after scheme separator.
pathStart = ssi + 1;
}
// Find end of path.
int pathEnd = pathStart;
LOOP: while (pathEnd < length) {
switch (uriString.charAt(pathEnd)) {
case '?': // Start of query
case '#': // Start of fragment
break LOOP;
}
pathEnd++;
}
return uriString.substring(pathStart, pathEnd);
}
public Builder buildUpon() {
if (isHierarchical()) {
return new Builder()
.scheme(getScheme())
.authority(getAuthorityPart())
.path(getPathPart())
.query(getQueryPart())
.fragment(getFragmentPart());
} else {
return new Builder()
.scheme(getScheme())
.opaquePart(getSsp())
.fragment(getFragmentPart());
}
}
}
/**
* Creates an opaque Uri from the given components. Encodes the ssp
* which means this method cannot be used to create hierarchical URIs.
*
* @param scheme of the URI
* @param ssp scheme-specific-part, everything between the
* scheme separator (':') and the fragment separator ('#'), which will
* get encoded
* @param fragment fragment, everything after the '#', null if undefined,
* will get encoded
*
* @throws NullPointerException if scheme or ssp is null
* @return Uri composed of the given scheme, ssp, and fragment
*
* @see Builder if you don't want the ssp and fragment to be encoded
*/
public static Uri fromParts(String scheme, String ssp,
String fragment) {
if (scheme == null) {
throw new NullPointerException("scheme");
}
if (ssp == null) {
throw new NullPointerException("ssp");
}
return new OpaqueUri(scheme, Part.fromDecoded(ssp),
Part.fromDecoded(fragment));
}
/**
* Opaque URI.
*/
private static class OpaqueUri extends Uri {
/** Used in parcelling. */
static final int TYPE_ID = 2;
private final String scheme;
private final Part ssp;
private final Part fragment;
private OpaqueUri(String scheme, Part ssp, Part fragment) {
this.scheme = scheme;
this.ssp = ssp;
this.fragment = fragment == null ? Part.NULL : fragment;
}
public boolean isHierarchical() {
return false;
}
public boolean isRelative() {
return scheme == null;
}
public String getScheme() {
return this.scheme;
}
public String getEncodedSchemeSpecificPart() {
return ssp.getEncoded();
}
public String getSchemeSpecificPart() {
return ssp.getDecoded();
}
public String getAuthority() {
return null;
}
public String getEncodedAuthority() {
return null;
}
public String getPath() {
return null;
}
public String getEncodedPath() {
return null;
}
public String getQuery() {
return null;
}
public String getEncodedQuery() {
return null;
}
public String getFragment() {
return fragment.getDecoded();
}
public String getEncodedFragment() {
return fragment.getEncoded();
}
public String[] getPathSegments() {
return new String[0];
}
public String getLastPathSegment() {
return null;
}
public String getUserInfo() {
return null;
}
public String getEncodedUserInfo() {
return null;
}
public String getHost() {
return null;
}
public int getPort() {
return -1;
}
private volatile String cachedString = NOT_CACHED;
public String toString() {
boolean cached = cachedString != NOT_CACHED;
if (cached) {
return cachedString;
}
StringBuffer sb = new StringBuffer();
sb.append(scheme).append(':');
sb.append(getEncodedSchemeSpecificPart());
if (!fragment.isEmpty()) {
sb.append('#').append(fragment.getEncoded());
}
return cachedString = sb.toString();
}
public Builder buildUpon() {
return new Builder()
.scheme(this.scheme)
.opaquePart(this.ssp)
.fragment(this.fragment);
}
}
/**
* Wrapper for path segment array.
*/
static class PathSegments {
static final PathSegments EMPTY = new PathSegments(null, 0);
final String[] segments;
final int size;
PathSegments(String[] segments, int size) {
this.segments = segments;
this.size = size;
}
public String get(int index) {
if (index >= size) {
throw new IndexOutOfBoundsException();
}
return segments[index];
}
public int size() {
return this.size;
}
}
/**
* Builds PathSegments.
*/
static class PathSegmentsBuilder {
String[] segments;
int size = 0;
void add(String segment) {
if (segments == null) {
segments = new String[4];
} else if (size + 1 == segments.length) {
String[] expanded = new String[segments.length * 2];
System.arraycopy(segments, 0, expanded, 0, segments.length);
segments = expanded;
}
segments[size++] = segment;
}
PathSegments build() {
if (segments == null) {
return PathSegments.EMPTY;
}
try {
return new PathSegments(segments, size);
} finally {
// Makes sure this doesn't get reused.
segments = null;
}
}
}
/**
* Support for hierarchical URIs.
*/
private abstract static class AbstractHierarchicalUri extends Uri {
public String getLastPathSegment() {
// TODO: If we haven't parsed all of the segments already, just
// grab the last one directly so we only allocate one string.
String[] segments = getPathSegments();
int size = segments.length;
if (size == 0) {
return null;
}
return segments[size - 1];
}
private Part userInfo;
private Part getUserInfoPart() {
return userInfo == null
? userInfo = Part.fromEncoded(parseUserInfo()) : userInfo;
}
public final String getEncodedUserInfo() {
return getUserInfoPart().getEncoded();
}
private String parseUserInfo() {
String authority = getEncodedAuthority();
if (authority == null) {
return null;
}
int end = authority.indexOf('@');
return end == NOT_FOUND ? null : authority.substring(0, end);
}
public String getUserInfo() {
return getUserInfoPart().getDecoded();
}
private volatile String host = NOT_CACHED;
public String getHost() {
boolean cached = (host != NOT_CACHED);
return cached ? host
: (host = parseHost());
}
private String parseHost() {
String authority = getEncodedAuthority();
if (authority == null) {
return null;
}
// Parse out user info and then port.
int userInfoSeparator = authority.indexOf('@');
int portSeparator = authority.indexOf(':', userInfoSeparator);
String encodedHost = portSeparator == NOT_FOUND
? authority.substring(userInfoSeparator + 1)
: authority.substring(userInfoSeparator + 1, portSeparator);
return decode(encodedHost);
}
private volatile int port = NOT_CALCULATED;
public int getPort() {
return port == NOT_CALCULATED
? port = parsePort()
: port;
}
private int parsePort() {
String authority = getEncodedAuthority();
if (authority == null) {
return -1;
}
// Make sure we look for the port separtor *after* the user info
// separator. We have URLs with a ':' in the user info.
int userInfoSeparator = authority.indexOf('@');
int portSeparator = authority.indexOf(':', userInfoSeparator);
if (portSeparator == NOT_FOUND) {
return -1;
}
String portString = decode(authority.substring(portSeparator + 1));
try {
return Integer.parseInt(portString);
} catch (NumberFormatException e) {
return -1;
}
}
}
/**
* Hierarchical Uri.
*/
private static class HierarchicalUri extends AbstractHierarchicalUri {
/** Used in parcelling. */
static final int TYPE_ID = 3;
private final String scheme; // can be null
private final Part authority;
private final PathPart path;
private final Part query;
private final Part fragment;
private HierarchicalUri(String scheme, Part authority, PathPart path,
Part query, Part fragment) {
this.scheme = scheme;
this.authority = Part.nonNull(authority);
this.path = path == null ? PathPart.NULL : path;
this.query = Part.nonNull(query);
this.fragment = Part.nonNull(fragment);
}
public boolean isHierarchical() {
return true;
}
public boolean isRelative() {
return scheme == null;
}
public String getScheme() {
return scheme;
}
private Part ssp;
private Part getSsp() {
return ssp == null
? ssp = Part.fromEncoded(makeSchemeSpecificPart()) : ssp;
}
public String getEncodedSchemeSpecificPart() {
return getSsp().getEncoded();
}
public String getSchemeSpecificPart() {
return getSsp().getDecoded();
}
/**
* Creates the encoded scheme-specific part from its sub parts.
*/
private String makeSchemeSpecificPart() {
StringBuffer builder = new StringBuffer();
appendSspTo(builder);
return builder.toString();
}
private void appendSspTo(StringBuffer builder) {
String encodedAuthority = authority.getEncoded();
if (encodedAuthority != null) {
// Even if the authority is "", we still want to append "//".
builder.append("//").append(encodedAuthority);
}
String encodedPath = path.getEncoded();
if (encodedPath != null) {
builder.append(encodedPath);
}
if (!query.isEmpty()) {
builder.append('?').append(query.getEncoded());
}
}
public String getAuthority() {
return this.authority.getDecoded();
}
public String getEncodedAuthority() {
return this.authority.getEncoded();
}
public String getEncodedPath() {
return this.path.getEncoded();
}
public String getPath() {
return this.path.getDecoded();
}
public String getQuery() {
return this.query.getDecoded();
}
public String getEncodedQuery() {
return this.query.getEncoded();
}
public String getFragment() {
return this.fragment.getDecoded();
}
public String getEncodedFragment() {
return this.fragment.getEncoded();
}
public String[] getPathSegments() {
return this.path.getPathSegments().segments;
}
private volatile String uriString = NOT_CACHED;
/**
* {@inheritDoc}
*/
public String toString() {
boolean cached = (uriString != NOT_CACHED);
return cached ? uriString
: (uriString = makeUriString());
}
private String makeUriString() {
StringBuffer builder = new StringBuffer();
if (scheme != null) {
builder.append(scheme).append(':');
}
appendSspTo(builder);
if (!fragment.isEmpty()) {
builder.append('#').append(fragment.getEncoded());
}
return builder.toString();
}
public Builder buildUpon() {
return new Builder()
.scheme(scheme)
.authority(authority)
.path(path)
.query(query)
.fragment(fragment);
}
}
/**
* Helper class for building or manipulating URI references. Not safe for
* concurrent use.
*
* <p>An absolute hierarchical URI reference follows the pattern:
* {@code <scheme>://<authority><absolute path>?<query>#<fragment>}
*
* <p>Relative URI references (which are always hierarchical) follow one
* of two patterns: {@code <relative or absolute path>?<query>#<fragment>}
* or {@code //<authority><absolute path>?<query>#<fragment>}
*
* <p>An opaque URI follows this pattern:
* {@code <scheme>:<opaque part>#<fragment>}
*/
public static final class Builder {
private String scheme;
private Part opaquePart;
private Part authority;
private PathPart path;
private Part query;
private Part fragment;
/**
* Constructs a new Builder.
*/
public Builder() {}
/**
* Sets the scheme.
*
* @param scheme name or {@code null} if this is a relative Uri
*/
public Builder scheme(String scheme) {
this.scheme = scheme;
return this;
}
Builder opaquePart(Part opaquePart) {
this.opaquePart = opaquePart;
return this;
}
/**
* Encodes and sets the given opaque scheme-specific-part.
*
* @param opaquePart decoded opaque part
*/
public Builder opaquePart(String opaquePart) {
return opaquePart(Part.fromDecoded(opaquePart));
}
/**
* Sets the previously encoded opaque scheme-specific-part.
*
* @param opaquePart encoded opaque part
*/
public Builder encodedOpaquePart(String opaquePart) {
return opaquePart(Part.fromEncoded(opaquePart));
}
Builder authority(Part authority) {
// This URI will be hierarchical.
this.opaquePart = null;
this.authority = authority;
return this;
}
/**
* Encodes and sets the authority.
*/
public Builder authority(String authority) {
return authority(Part.fromDecoded(authority));
}
/**
* Sets the previously encoded authority.
*/
public Builder encodedAuthority(String authority) {
return authority(Part.fromEncoded(authority));
}
Builder path(PathPart path) {
// This URI will be hierarchical.
this.opaquePart = null;
this.path = path;
return this;
}
/**
* Sets the path. Leaves '/' characters intact but encodes others as
* necessary.
*
* <p>If the path is not null and doesn't start with a '/', and if
* you specify a scheme and/or authority, the builder will prepend the
* given path with a '/'.
*/
public Builder path(String path) {
return path(PathPart.fromDecoded(path));
}
/**
* Sets the previously encoded path.
*
* <p>If the path is not null and doesn't start with a '/', and if
* you specify a scheme and/or authority, the builder will prepend the
* given path with a '/'.
*/
public Builder encodedPath(String path) {
return path(PathPart.fromEncoded(path));
}
/**
* Encodes the given segment and appends it to the path.
*/
public Builder appendPath(String newSegment) {
return path(PathPart.appendDecodedSegment(path, newSegment));
}
/**
* Appends the given segment to the path.
*/
public Builder appendEncodedPath(String newSegment) {
return path(PathPart.appendEncodedSegment(path, newSegment));
}
Builder query(Part query) {
// This URI will be hierarchical.
this.opaquePart = null;
this.query = query;
return this;
}
/**
* Encodes and sets the query.
*/
public Builder query(String query) {
return query(Part.fromDecoded(query));
}
/**
* Sets the previously encoded query.
*/
public Builder encodedQuery(String query) {
return query(Part.fromEncoded(query));
}
Builder fragment(Part fragment) {
this.fragment = fragment;
return this;
}
/**
* Encodes and sets the fragment.
*/
public Builder fragment(String fragment) {
return fragment(Part.fromDecoded(fragment));
}
/**
* Sets the previously encoded fragment.
*/
public Builder encodedFragment(String fragment) {
return fragment(Part.fromEncoded(fragment));
}
/**
* Encodes the key and value and then appends the parameter to the
* query string.
*
* @param key which will be encoded
* @param value which will be encoded
*/
public Builder appendQueryParameter(String key, String value) {
// This URI will be hierarchical.
this.opaquePart = null;
String encodedParameter = encode(key, null) + "="
+ encode(value, null);
if (query == null) {
query = Part.fromEncoded(encodedParameter);
return this;
}
String oldQuery = query.getEncoded();
if (oldQuery == null || oldQuery.length() == 0) {
query = Part.fromEncoded(encodedParameter);
} else {
query = Part.fromEncoded(oldQuery + "&" + encodedParameter);
}
return this;
}
/**
* Constructs a Uri with the current attributes.
*
* @throws UnsupportedOperationException if the URI is opaque and the
* scheme is null
*/
public Uri build() {
if (opaquePart != null) {
if (this.scheme == null) {
throw new UnsupportedOperationException(
"An opaque URI must have a scheme.");
}
return new OpaqueUri(scheme, opaquePart, fragment);
} else {
// Hierarchical URIs should not return null for getPath().
PathPart path = this.path;
if (path == null || path == PathPart.NULL) {
path = PathPart.EMPTY;
} else {
// If we have a scheme and/or authority, the path must
// be absolute. Prepend it with a '/' if necessary.
if (hasSchemeOrAuthority()) {
path = PathPart.makeAbsolute(path);
}
}
return new HierarchicalUri(
scheme, authority, path, query, fragment);
}
}
private boolean hasSchemeOrAuthority() {
return scheme != null
|| (authority != null && authority != Part.NULL);
}
/**
* {@inheritDoc}
*/
public String toString() {
return build().toString();
}
}
/**
* Searches the query string for parameter values with the given key.
*
* @param key which will be encoded
*
* @throws UnsupportedOperationException if this isn't a hierarchical URI
* @throws NullPointerException if key is null
*
* @return a list of decoded values
*/
public String[] getQueryParameters(String key) {
if (isOpaque()) {
throw new UnsupportedOperationException(NOT_HIERARCHICAL);
}
String query = getEncodedQuery();
if (query == null) {
return new String[0];
}
String encodedKey;
try {
encodedKey = URLEncoder.encode(key, DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
}
// Prepend query with "&" making the first parameter the same as the
// rest.
query = "&" + query;
// Parameter prefix.
String prefix = "&" + encodedKey + "=";
Vector values = new Vector();
int start = 0;
int length = query.length();
while (start < length) {
start = query.indexOf(prefix, start);
if (start == -1) {
// No more values.
break;
}
// Move start to start of value.
start += prefix.length();
// Find end of value.
int end = query.indexOf('&', start);
if (end == -1) {
end = query.length();
}
String value = query.substring(start, end);
values.addElement(decode(value));
start = end;
}
int size = values.size();
String[] result = new String[size];
values.copyInto(result);
return result;
}
/**
* Searches the query string for the first value with the given key.
*
* @param key which will be encoded
* @throws UnsupportedOperationException if this isn't a hierarchical URI
* @throws NullPointerException if key is null
*
* @return the decoded value or null if no parameter is found
*/
public String getQueryParameter(String key) {
if (isOpaque()) {
throw new UnsupportedOperationException(NOT_HIERARCHICAL);
}
String query = getEncodedQuery();
if (query == null) {
return null;
}
String encodedKey;
try {
encodedKey = URLEncoder.encode(key, DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
}
String prefix = encodedKey + "=";
if (query.length() < prefix.length()) {
return null;
}
int start;
if (query.startsWith(prefix)) {
// It's the first parameter.
start = prefix.length();
} else {
// It must be later in the query string.
prefix = "&" + prefix;
start = query.indexOf(prefix);
if (start == -1) {
// Not found.
return null;
}
start += prefix.length();
}
// Find end of value.
int end = query.indexOf('&', start);
if (end == -1) {
end = query.length();
}
String value = query.substring(start, end);
return decode(value);
}
private static final char[] HEX_DIGITS = "0123456789ABCDEF".toCharArray();
/**
* Encodes characters in the given string as '%'-escaped octets
* using the UTF-8 scheme. Leaves letters ("A-Z", "a-z"), numbers
* ("0-9"), and unreserved characters ("_-!.~'()*") intact. Encodes
* all other characters.
*
* @param s string to encode
* @return an encoded version of s suitable for use as a URI component,
* or null if s is null
*/
public static String encode(String s) {
return encode(s, null);
}
/**
* Encodes characters in the given string as '%'-escaped octets
* using the UTF-8 scheme. Leaves letters ("A-Z", "a-z"), numbers
* ("0-9"), and unreserved characters ("_-!.~'()*") intact. Encodes
* all other characters with the exception of those specified in the
* allow argument.
*
* @param s string to encode
* @param allow set of additional characters to allow in the encoded form,
* null if no characters should be skipped
* @return an encoded version of s suitable for use as a URI component,
* or null if s is null
*/
public static String encode(String s, String allow) {
if (s == null) {
return null;
}
// Lazily-initialized buffers.
StringBuffer encoded = null;
int oldLength = s.length();
// This loop alternates between copying over allowed characters and
// encoding in chunks. This results in fewer method calls and
// allocations than encoding one character at a time.
int current = 0;
while (current < oldLength) {
// Start in "copying" mode where we copy over allowed chars.
// Find the next character which needs to be encoded.
int nextToEncode = current;
while (nextToEncode < oldLength
&& isAllowed(s.charAt(nextToEncode), allow)) {
nextToEncode++;
}
// If there's nothing more to encode...
if (nextToEncode == oldLength) {
if (current == 0) {
// We didn't need to encode anything!
return s;
} else {
// Presumably, we've already done some encoding.
encoded.append(s.substring(current, oldLength));
return encoded.toString();
}
}
if (encoded == null) {
encoded = new StringBuffer();
}
if (nextToEncode > current) {
// Append allowed characters leading up to this point.
encoded.append(s.substring(current, nextToEncode));
} else {
// assert nextToEncode == current
}
// Switch to "encoding" mode.
// Find the next allowed character.
current = nextToEncode;
int nextAllowed = current + 1;
while (nextAllowed < oldLength
&& !isAllowed(s.charAt(nextAllowed), allow)) {
nextAllowed++;
}
// Convert the substring to bytes and encode the bytes as
// '%'-escaped octets.
String toEncode = s.substring(current, nextAllowed);
try {
byte[] bytes = toEncode.getBytes(DEFAULT_ENCODING);
int bytesLength = bytes.length;
for (int i = 0; i < bytesLength; i++) {
encoded.append('%');
encoded.append(HEX_DIGITS[(bytes[i] & 0xf0) >> 4]);
encoded.append(HEX_DIGITS[bytes[i] & 0xf]);
}
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
}
current = nextAllowed;
}
// Encoded could still be null at this point if s is empty.
return encoded == null ? s : encoded.toString();
}
/**
* Returns true if the given character is allowed.
*
* @param c character to check
* @param allow characters to allow
* @return true if the character is allowed or false if it should be
* encoded
*/
private static boolean isAllowed(char c, String allow) {
return (c >= 'A' && c <= 'Z')
|| (c >= 'a' && c <= 'z')
|| (c >= '0' && c <= '9')
|| "_-!.~'()*".indexOf(c) != NOT_FOUND
|| (allow != null && allow.indexOf(c) != NOT_FOUND);
}
/** Unicode replacement character: \\uFFFD. */
private static final byte[] REPLACEMENT = { (byte) 0xFF, (byte) 0xFD };
/**
* Decodes '%'-escaped octets in the given string using the UTF-8 scheme.
* Replaces invalid octets with the unicode replacement character
* ("\\uFFFD").
*
* @param s encoded string to decode
* @return the given string with escaped octets decoded, or null if
* s is null
*/
public static String decode(String s) {
/*
Compared to java.net.URLEncoderDecoder.decode(), this method decodes a
chunk at a time instead of one character at a time, and it doesn't
throw exceptions. It also only allocates memory when necessary--if
there's nothing to decode, this method won't do much.
*/
if (s == null) {
return null;
}
// Lazily-initialized buffers.
StringBuffer decoded = null;
ByteArrayOutputStream out = null;
int oldLength = s.length();
// This loop alternates between copying over normal characters and
// escaping in chunks. This results in fewer method calls and
// allocations than decoding one character at a time.
int current = 0;
while (current < oldLength) {
// Start in "copying" mode where we copy over normal characters.
// Find the next escape sequence.
int nextEscape = s.indexOf('%', current);
if (nextEscape == NOT_FOUND) {
if (decoded == null) {
// We didn't actually decode anything.
return s;
} else {
// Append the remainder and return the decoded string.
decoded.append(s.substring(current, oldLength));
return decoded.toString();
}
}
// Prepare buffers.
if (decoded == null) {
// Looks like we're going to need the buffers...
// We know the new string will be shorter. Using the old length
// may overshoot a bit, but it will save us from resizing the
// buffer.
decoded = new StringBuffer(oldLength);
out = new ByteArrayOutputStream(4);
} else {
// Clear decoding buffer.
out.reset();
}
// Append characters leading up to the escape.
if (nextEscape > current) {
decoded.append(s.substring(current, nextEscape));
current = nextEscape;
} else {
// assert current == nextEscape
}
// Switch to "decoding" mode where we decode a string of escape
// sequences.
// Decode and append escape sequences. Escape sequences look like
// "%ab" where % is literal and a and b are hex digits.
try {
do {
if (current + 2 >= oldLength) {
// Truncated escape sequence.
out.write(REPLACEMENT);
} else {
int a = Character.digit(s.charAt(current + 1), 16);
int b = Character.digit(s.charAt(current + 2), 16);
if (a == -1 || b == -1) {
// Non hex digits.
out.write(REPLACEMENT);
} else {
// Combine the hex digits into one byte and write.
out.write((a << 4) + b);
}
}
// Move passed the escape sequence.
current += 3;
} while (current < oldLength && s.charAt(current) == '%');
// Decode UTF-8 bytes into a string and append it.
decoded.append(new String(out.toByteArray(), DEFAULT_ENCODING));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("AssertionError: " + e);
} catch (IOException e) {
throw new RuntimeException("AssertionError: " + e);
}
}
// If we don't have a buffer, we didn't have to decode anything.
return decoded == null ? s : decoded.toString();
}
/**
* Support for part implementations.
*/
static abstract class AbstractPart {
/**
* Enum which indicates which representation of a given part we have.
*/
static class Representation {
static final int BOTH = 0;
static final int ENCODED = 1;
static final int DECODED = 2;
}
volatile String encoded;
volatile String decoded;
AbstractPart(String encoded, String decoded) {
this.encoded = encoded;
this.decoded = decoded;
}
abstract String getEncoded();
final String getDecoded() {
boolean hasDecoded = decoded != NOT_CACHED;
return hasDecoded ? decoded : (decoded = decode(encoded));
}
}
/**
* Immutable wrapper of encoded and decoded versions of a URI part. Lazily
* creates the encoded or decoded version from the other.
*/
static class Part extends AbstractPart {
/** A part with null values. */
static final Part NULL = new EmptyPart(null);
/** A part with empty strings for values. */
static final Part EMPTY = new EmptyPart("");
private Part(String encoded, String decoded) {
super(encoded, decoded);
}
boolean isEmpty() {
return false;
}
String getEncoded() {
boolean hasEncoded = encoded != NOT_CACHED;
return hasEncoded ? encoded : (encoded = encode(decoded));
}
/**
* Returns given part or {@link #NULL} if the given part is null.
*/
static Part nonNull(Part part) {
return part == null ? NULL : part;
}
/**
* Creates a part from the encoded string.
*
* @param encoded part string
*/
static Part fromEncoded(String encoded) {
return from(encoded, NOT_CACHED);
}
/**
* Creates a part from the decoded string.
*
* @param decoded part string
*/
static Part fromDecoded(String decoded) {
return from(NOT_CACHED, decoded);
}
/**
* Creates a part from the encoded and decoded strings.
*
* @param encoded part string
* @param decoded part string
*/
static Part from(String encoded, String decoded) {
// We have to check both encoded and decoded in case one is
// NOT_CACHED.
if (encoded == null) {
return NULL;
}
if (encoded.length() == 0) {
return EMPTY;
}
if (decoded == null) {
return NULL;
}
if (decoded .length() == 0) {
return EMPTY;
}
return new Part(encoded, decoded);
}
private static class EmptyPart extends Part {
public EmptyPart(String value) {
super(value, value);
}
/**
* {@inheritDoc}
*/
boolean isEmpty() {
return true;
}
}
}
/**
* Immutable wrapper of encoded and decoded versions of a path part. Lazily
* creates the encoded or decoded version from the other.
*/
static class PathPart extends AbstractPart {
/** A part with null values. */
static final PathPart NULL = new PathPart(null, null);
/** A part with empty strings for values. */
static final PathPart EMPTY = new PathPart("", "");
private PathPart(String encoded, String decoded) {
super(encoded, decoded);
}
String getEncoded() {
boolean hasEncoded = encoded != NOT_CACHED;
// Don't encode '/'.
return hasEncoded ? encoded : (encoded = encode(decoded, "/"));
}
/**
* Cached path segments. This doesn't need to be volatile--we don't
* care if other threads see the result.
*/
private PathSegments pathSegments;
/**
* Gets the individual path segments. Parses them if necessary.
*
* @return parsed path segments or null if this isn't a hierarchical
* URI
*/
PathSegments getPathSegments() {
if (pathSegments != null) {
return pathSegments;
}
String path = getEncoded();
if (path == null) {
return pathSegments = PathSegments.EMPTY;
}
PathSegmentsBuilder segmentBuilder = new PathSegmentsBuilder();
int previous = 0;
int current;
while ((current = path.indexOf('/', previous)) > -1) {
// This check keeps us from adding a segment if the path starts
// '/' and an empty segment for "//".
if (previous < current) {
String decodedSegment
= decode(path.substring(previous, current));
segmentBuilder.add(decodedSegment);
}
previous = current + 1;
}
// Add in the final path segment.
if (previous < path.length()) {
segmentBuilder.add(decode(path.substring(previous)));
}
return pathSegments = segmentBuilder.build();
}
static PathPart appendEncodedSegment(PathPart oldPart,
String newSegment) {
// If there is no old path, should we make the new path relative
// or absolute? I pick absolute.
if (oldPart == null) {
// No old path.
return fromEncoded("/" + newSegment);
}
String oldPath = oldPart.getEncoded();
if (oldPath == null) {
oldPath = "";
}
int oldPathLength = oldPath.length();
String newPath;
if (oldPathLength == 0) {
// No old path.
newPath = "/" + newSegment;
} else if (oldPath.charAt(oldPathLength - 1) == '/') {
newPath = oldPath + newSegment;
} else {
newPath = oldPath + "/" + newSegment;
}
return fromEncoded(newPath);
}
static PathPart appendDecodedSegment(PathPart oldPart, String decoded) {
String encoded = encode(decoded);
// TODO: Should we reuse old PathSegments? Probably not.
return appendEncodedSegment(oldPart, encoded);
}
/**
* Creates a path from the encoded string.
*
* @param encoded part string
*/
static PathPart fromEncoded(String encoded) {
return from(encoded, NOT_CACHED);
}
/**
* Creates a path from the decoded string.
*
* @param decoded part string
*/
static PathPart fromDecoded(String decoded) {
return from(NOT_CACHED, decoded);
}
/**
* Creates a path from the encoded and decoded strings.
*
* @param encoded part string
* @param decoded part string
*/
static PathPart from(String encoded, String decoded) {
if (encoded == null) {
return NULL;
}
if (encoded.length() == 0) {
return EMPTY;
}
return new PathPart(encoded, decoded);
}
/**
* Prepends path values with "/" if they're present, not empty, and
* they don't already start with "/".
*/
static PathPart makeAbsolute(PathPart oldPart) {
boolean encodedCached = oldPart.encoded != NOT_CACHED;
// We don't care which version we use, and we don't want to force
// unneccessary encoding/decoding.
String oldPath = encodedCached ? oldPart.encoded : oldPart.decoded;
if (oldPath == null || oldPath.length() == 0
|| oldPath.startsWith("/")) {
return oldPart;
}
// Prepend encoded string if present.
String newEncoded = encodedCached
? "/" + oldPart.encoded : NOT_CACHED;
// Prepend decoded string if present.
boolean decodedCached = oldPart.decoded != NOT_CACHED;
String newDecoded = decodedCached
? "/" + oldPart.decoded
: NOT_CACHED;
return new PathPart(newEncoded, newDecoded);
}
}
/**
* Creates a new Uri by appending an already-encoded path segment to a
* base Uri.
*
* @param baseUri Uri to append path segment to
* @param pathSegment encoded path segment to append
* @return a new Uri based on baseUri with the given segment appended to
* the path
* @throws NullPointerException if baseUri is null
*/
public static Uri withAppendedPath(Uri baseUri, String pathSegment) {
Builder builder = baseUri.buildUpon();
builder = builder.appendEncodedPath(pathSegment);
return builder.build();
}
}
| google/google-authenticator | mobile/blackberry/src/com/google/authenticator/blackberry/Uri.java | Java | apache-2.0 | 65,026 |
/**
* @license
* Copyright 2020 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
foam.CLASS({
package: 'foam.nanos.jetty',
name: 'JettyThreadPoolConfig',
documentation: 'model of org.eclipse.jetty.server.ThreadPool',
properties: [
{
name: 'minThreads',
class: 'Int',
value: 8
},
{
name: 'maxThreads',
class: 'Int',
value: 200
},
{
name: 'idleTimeout',
class: 'Int',
value: 60000
}
]
});
| jacksonic/vjlofvhjfgm | src/foam/nanos/jetty/JettyThreadPoolConfig.js | JavaScript | apache-2.0 | 513 |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using QuantConnect.Orders;
using QuantConnect.Interfaces;
using QuantConnect.Securities;
using System.Collections.Generic;
using QuantConnect.Data.Custom.AlphaStreams;
using QuantConnect.Algorithm.Framework.Alphas;
using QuantConnect.Algorithm.Framework.Execution;
using QuantConnect.Algorithm.Framework.Portfolio;
namespace QuantConnect.Algorithm.CSharp
{
/// <summary>
/// Example algorithm consuming an alpha streams portfolio state and trading based on it
/// </summary>
public class AlphaStreamsBasicTemplateAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition
{
/// <summary>
/// Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.
/// </summary>
public override void Initialize()
{
SetStartDate(2018, 04, 04);
SetEndDate(2018, 04, 06);
SetAlpha(new AlphaStreamAlphaModule());
SetExecution(new ImmediateExecutionModel());
Settings.MinimumOrderMarginPortfolioPercentage = 0.01m;
SetPortfolioConstruction(new EqualWeightingAlphaStreamsPortfolioConstructionModel());
SetSecurityInitializer(new BrokerageModelSecurityInitializer(BrokerageModel,
new FuncSecuritySeeder(GetLastKnownPrices)));
foreach (var alphaId in new [] { "623b06b231eb1cc1aa3643a46", "9fc8ef73792331b11dbd5429a" })
{
AddData<AlphaStreamsPortfolioState>(alphaId);
}
}
public override void OnOrderEvent(OrderEvent orderEvent)
{
Log($"OnOrderEvent: {orderEvent}");
}
/// <summary>
/// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm.
/// </summary>
public bool CanRunLocally { get; } = true;
/// <summary>
/// This is used by the regression test system to indicate which languages this algorithm is written in.
/// </summary>
public Language[] Languages { get; } = { Language.CSharp };
/// <summary>
/// This is used by the regression test system to indicate what the expected statistics are from running the algorithm
/// </summary>
public virtual Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string>
{
{"Total Trades", "2"},
{"Average Win", "0%"},
{"Average Loss", "-0.12%"},
{"Compounding Annual Return", "-14.722%"},
{"Drawdown", "0.200%"},
{"Expectancy", "-1"},
{"Net Profit", "-0.116%"},
{"Sharpe Ratio", "0"},
{"Probabilistic Sharpe Ratio", "0%"},
{"Loss Rate", "100%"},
{"Win Rate", "0%"},
{"Profit-Loss Ratio", "0"},
{"Alpha", "0"},
{"Beta", "0"},
{"Annual Standard Deviation", "0"},
{"Annual Variance", "0"},
{"Information Ratio", "2.474"},
{"Tracking Error", "0.339"},
{"Treynor Ratio", "0"},
{"Total Fees", "$0.00"},
{"Estimated Strategy Capacity", "$83000.00"},
{"Lowest Capacity Asset", "BTCUSD XJ"},
{"Fitness Score", "0.017"},
{"Kelly Criterion Estimate", "0"},
{"Kelly Criterion Probability Value", "0"},
{"Sortino Ratio", "79228162514264337593543950335"},
{"Return Over Maximum Drawdown", "-138.588"},
{"Portfolio Turnover", "0.034"},
{"Total Insights Generated", "0"},
{"Total Insights Closed", "0"},
{"Total Insights Analysis Completed", "0"},
{"Long Insight Count", "0"},
{"Short Insight Count", "0"},
{"Long/Short Ratio", "100%"},
{"Estimated Monthly Alpha Value", "$0"},
{"Total Accumulated Estimated Alpha Value", "$0"},
{"Mean Population Estimated Insight Value", "$0"},
{"Mean Population Direction", "0%"},
{"Mean Population Magnitude", "0%"},
{"Rolling Averaged Population Direction", "0%"},
{"Rolling Averaged Population Magnitude", "0%"},
{"OrderListHash", "2b94bc50a74caebe06c075cdab1bc6da"}
};
}
}
| AlexCatarino/Lean | Algorithm.CSharp/AlphaStreamsBasicTemplateAlgorithm.cs | C# | apache-2.0 | 5,088 |
/*
* Copyright (c) 2009 Kathryn Huxtable and Kenneth Orr.
*
* This file is part of the SeaGlass Pluggable Look and Feel.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* $Id: org.eclipse.jdt.ui.prefs 172 2009-10-06 18:31:12Z kathryn@kathrynhuxtable.org $
*/
package com.seaglasslookandfeel.ui;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics;
import java.beans.PropertyChangeEvent;
import javax.swing.JComponent;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.synth.SynthContext;
import javax.swing.text.Style;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyleContext;
import javax.swing.text.StyledDocument;
import com.seaglasslookandfeel.SeaGlassContext;
/**
* SeaGlass TextPaneUI delegate.
*
* Based on SynthTextPaneUI by Georges Saab and David Karlton.
*
* The only reason this exists is that we had to modify SynthTextPaneUI.
*
* @see javax.swing.plaf.synth.SynthTextPaneUI
*/
public class SeaGlassTextPaneUI extends SeaGlassEditorPaneUI {
/**
* Creates a UI for the JTextPane.
*
* @param c the JTextPane object
* @return the UI object
*/
public static ComponentUI createUI(JComponent c) {
return new SeaGlassTextPaneUI();
}
/**
* Fetches the name used as a key to lookup properties through the
* UIManager. This is used as a prefix to all the standard
* text properties.
*
* @return the name ("TextPane")
*/
@Override
protected String getPropertyPrefix() {
return "TextPane";
}
/**
* Installs the UI for a component. This does the following
* things.
* <ol>
* <li>
* Sets opaqueness of the associated component according to its style,
* if the opaque property has not already been set by the client program.
* <li>
* Installs the default caret and highlighter into the
* associated component. These properties are only set if their
* current value is either {@code null} or an instance of
* {@link UIResource}.
* <li>
* Attaches to the editor and model. If there is no
* model, a default one is created.
* <li>
* Creates the view factory and the view hierarchy used
* to represent the model.
* </ol>
*
* @param c the editor component
* @see javax.swing.plaf.basic.BasicTextUI#installUI
* @see ComponentUI#installUI
*/
@Override
public void installUI(JComponent c) {
super.installUI(c);
updateForeground(c.getForeground());
updateFont(c.getFont());
}
/**
* This method gets called when a bound property is changed
* on the associated JTextComponent. This is a hook
* which UI implementations may change to reflect how the
* UI displays bound properties of JTextComponent subclasses.
* If the font, foreground or document has changed, the
* the appropriate property is set in the default style of
* the document.
*
* @param evt the property change event
*/
@Override
protected void propertyChange(PropertyChangeEvent evt) {
super.propertyChange(evt);
String name = evt.getPropertyName();
if (name.equals("foreground")) {
updateForeground((Color)evt.getNewValue());
} else if (name.equals("font")) {
updateFont((Font)evt.getNewValue());
} else if (name.equals("document")) {
JComponent comp = getComponent();
updateForeground(comp.getForeground());
updateFont(comp.getFont());
}
}
/**
* Update the color in the default style of the document.
*
* @param color the new color to use or null to remove the color attribute
* from the document's style
*/
private void updateForeground(Color color) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (color == null) {
style.removeAttribute(StyleConstants.Foreground);
} else {
StyleConstants.setForeground(style, color);
}
}
/**
* Update the font in the default style of the document.
*
* @param font the new font to use or null to remove the font attribute
* from the document's style
*/
private void updateFont(Font font) {
StyledDocument doc = (StyledDocument)getComponent().getDocument();
Style style = doc.getStyle(StyleContext.DEFAULT_STYLE);
if (style == null) {
return;
}
if (font == null) {
style.removeAttribute(StyleConstants.FontFamily);
style.removeAttribute(StyleConstants.FontSize);
style.removeAttribute(StyleConstants.Bold);
style.removeAttribute(StyleConstants.Italic);
} else {
StyleConstants.setFontFamily(style, font.getName());
StyleConstants.setFontSize(style, font.getSize());
StyleConstants.setBold(style, font.isBold());
StyleConstants.setItalic(style, font.isItalic());
}
}
@Override
void paintBackground(SynthContext context, Graphics g, JComponent c) {
((SeaGlassContext)context).getPainter().paintTextPaneBackground(context, g, 0, 0,
c.getWidth(), c.getHeight());
}
/**
* @inheritDoc
*/
@Override
public void paintBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
((SeaGlassContext)context).getPainter().paintTextPaneBorder(context, g, x, y, w, h);
}
}
| khuxtable/seaglass | src/main/java/com/seaglasslookandfeel/ui/SeaGlassTextPaneUI.java | Java | apache-2.0 | 6,509 |
/**
* Most of the code in the Qalingo project is copyrighted Hoteia and licensed
* under the Apache License Version 2.0 (release version 0.8.0)
* http://www.apache.org/licenses/LICENSE-2.0
*
* Copyright (c) Hoteia, 2012-2014
* http://www.hoteia.com - http://twitter.com/hoteia - contact@hoteia.com
*
*/
package org.hoteia.qalingo.core.service.pojo;
import java.util.List;
import java.util.Set;
import org.dozer.Mapper;
import org.hoteia.qalingo.core.domain.Customer;
import org.hoteia.qalingo.core.domain.CustomerMarketArea;
import org.hoteia.qalingo.core.domain.CustomerWishlist;
import org.hoteia.qalingo.core.domain.MarketArea;
import org.hoteia.qalingo.core.pojo.customer.CustomerPojo;
import org.hoteia.qalingo.core.pojo.customer.CustomerWishlistPojo;
import org.hoteia.qalingo.core.pojo.util.mapper.PojoUtil;
import org.hoteia.qalingo.core.service.CustomerService;
import org.hoteia.qalingo.core.service.MarketService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service("customerPojoService")
@Transactional(readOnly = true)
public class CustomerPojoService {
private final Logger logger = LoggerFactory.getLogger(getClass());
@Autowired
private Mapper dozerBeanMapper;
@Autowired
protected MarketService marketService;
@Autowired
private CustomerService customerService;
public List<CustomerPojo> getAllCustomers() {
List<Customer> customers = customerService.findCustomers();
logger.debug("Found {} customers", customers.size());
return PojoUtil.mapAll(dozerBeanMapper, customers, CustomerPojo.class);
}
public CustomerPojo getCustomerById(final String id) {
Customer customer = customerService.getCustomerById(id);
logger.debug("Found customer {} for id {}", customer, id);
return customer == null ? null : dozerBeanMapper.map(customer, CustomerPojo.class);
}
public CustomerPojo getCustomerByLoginOrEmail(final String usernameOrEmail) {
Customer customer = customerService.getCustomerByLoginOrEmail(usernameOrEmail);
logger.debug("Found customer {} for usernameOrEmail {}", customer, usernameOrEmail);
return customer == null ? null : dozerBeanMapper.map(customer, CustomerPojo.class);
}
public CustomerPojo getCustomerByPermalink(final String permalink) {
Customer customer = customerService.getCustomerByPermalink(permalink);
logger.debug("Found customer {} for usernameOrEmail {}", customer, permalink);
return customer == null ? null : dozerBeanMapper.map(customer, CustomerPojo.class);
}
@Transactional
public void saveOrUpdate(final CustomerPojo customerJsonPojo) throws Exception {
Customer customer = dozerBeanMapper.map(customerJsonPojo, Customer.class);
logger.info("Saving customer {}", customer);
customerService.saveOrUpdateCustomer(customer);
}
public List<CustomerWishlistPojo> getWishlist(final Customer customer, final MarketArea marketArea) {
final CustomerMarketArea customerMarketArea = customer.getCurrentCustomerMarketArea(marketArea.getId());
Set<CustomerWishlist> wishlistProducts = customerMarketArea.getWishlistProducts();
List<CustomerWishlistPojo> wishlists = PojoUtil.mapAll(dozerBeanMapper, wishlistProducts, CustomerWishlistPojo.class);
return wishlists;
}
public void addProductSkuToWishlist(MarketArea marketArea, Customer customer, String catalogCategoryCode, String productSkuCode) throws Exception {
customerService.addProductSkuToWishlist(marketArea, customer, catalogCategoryCode, productSkuCode);
}
} | eric-stanley/qalingo-engine | apis/api-core/api-core-common/src/main/java/org/hoteia/qalingo/core/service/pojo/CustomerPojoService.java | Java | apache-2.0 | 3,861 |
<?php
/**
* Copyright (c) 2012 Robin Appelman <icewind@owncloud.com>
* This file is licensed under the Affero General Public License version 3 or
* later.
* See the COPYING-README file.
*/
abstract class OC_Archive{
/**
* open any of the supported archive types
* @param string $path
* @return OC_Archive|void
*/
public static function open($path) {
$ext=substr($path, strrpos($path, '.'));
switch($ext) {
case '.zip':
return new OC_Archive_ZIP($path);
case '.gz':
case '.bz':
case '.bz2':
case '.tgz':
case '.tar':
return new OC_Archive_TAR($path);
}
}
/**
* @param $source
*/
abstract function __construct($source);
/**
* add an empty folder to the archive
* @param string $path
* @return bool
*/
abstract function addFolder($path);
/**
* add a file to the archive
* @param string $path
* @param string $source either a local file or string data
* @return bool
*/
abstract function addFile($path, $source='');
/**
* rename a file or folder in the archive
* @param string $source
* @param string $dest
* @return bool
*/
abstract function rename($source, $dest);
/**
* get the uncompressed size of a file in the archive
* @param string $path
* @return int
*/
abstract function filesize($path);
/**
* get the last modified time of a file in the archive
* @param string $path
* @return int
*/
abstract function mtime($path);
/**
* get the files in a folder
* @param string $path
* @return array
*/
abstract function getFolder($path);
/**
* get all files in the archive
* @return array
*/
abstract function getFiles();
/**
* get the content of a file
* @param string $path
* @return string
*/
abstract function getFile($path);
/**
* extract a single file from the archive
* @param string $path
* @param string $dest
* @return bool
*/
abstract function extractFile($path, $dest);
/**
* extract the archive
* @param string $dest
* @return bool
*/
abstract function extract($dest);
/**
* check if a file or folder exists in the archive
* @param string $path
* @return bool
*/
abstract function fileExists($path);
/**
* remove a file or folder from the archive
* @param string $path
* @return bool
*/
abstract function remove($path);
/**
* get a file handler
* @param string $path
* @param string $mode
* @return resource
*/
abstract function getStream($path, $mode);
/**
* add a folder and all its content
* @param string $path
* @param string $source
* @return boolean|null
*/
function addRecursive($path, $source) {
$dh = opendir($source);
if(is_resource($dh)) {
$this->addFolder($path);
while (($file = readdir($dh)) !== false) {
if($file=='.' or $file=='..') {
continue;
}
if(is_dir($source.'/'.$file)) {
$this->addRecursive($path.'/'.$file, $source.'/'.$file);
}else{
$this->addFile($path.'/'.$file, $source.'/'.$file);
}
}
}
}
}
| kebenxiaoming/owncloudRedis | lib/private/archive.php | PHP | apache-2.0 | 2,984 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.context;
import org.activiti.engine.impl.persistence.entity.DeploymentEntity;
import org.activiti.engine.impl.persistence.entity.ExecutionEntity;
import org.activiti.engine.impl.util.ProcessDefinitionUtil;
import org.activiti.engine.repository.ProcessDefinition;
/**
* @author Tom Baeyens
*/
public class ExecutionContext {
protected ExecutionEntity execution;
public ExecutionContext(ExecutionEntity execution) {
this.execution = execution;
}
public ExecutionEntity getExecution() {
return execution;
}
public ExecutionEntity getProcessInstance() {
return execution.getProcessInstance();
}
public ProcessDefinition getProcessDefinition() {
return ProcessDefinitionUtil.getProcessDefinition(execution.getProcessDefinitionId());
}
public DeploymentEntity getDeployment() {
String deploymentId = getProcessDefinition().getDeploymentId();
DeploymentEntity deployment = Context.getCommandContext().getDeploymentEntityManager().findById(deploymentId);
return deployment;
}
}
| roberthafner/flowable-engine | modules/flowable-engine/src/main/java/org/activiti/engine/impl/context/ExecutionContext.java | Java | apache-2.0 | 1,626 |
package nak.liblinear;
import static nak.liblinear.Linear.info;
/**
* Trust Region Newton Method optimization
*/
class Tron {
private final Function fun_obj;
private final double eps;
private final int max_iter;
public Tron( final Function fun_obj ) {
this(fun_obj, 0.1);
}
public Tron( final Function fun_obj, double eps ) {
this(fun_obj, eps, 1000);
}
public Tron( final Function fun_obj, double eps, int max_iter ) {
this.fun_obj = fun_obj;
this.eps = eps;
this.max_iter = max_iter;
}
void tron(double[] w) {
// Parameters for updating the iterates.
double eta0 = 1e-4, eta1 = 0.25, eta2 = 0.75;
// Parameters for updating the trust region size delta.
double sigma1 = 0.25, sigma2 = 0.5, sigma3 = 4;
int n = fun_obj.get_nr_variable();
int i, cg_iter;
double delta, snorm, one = 1.0;
double alpha, f, fnew, prered, actred, gs;
int search = 1, iter = 1;
double[] s = new double[n];
double[] r = new double[n];
double[] w_new = new double[n];
double[] g = new double[n];
for (i = 0; i < n; i++)
w[i] = 0;
f = fun_obj.fun(w);
fun_obj.grad(w, g);
delta = euclideanNorm(g);
double gnorm1 = delta;
double gnorm = gnorm1;
if (gnorm <= eps * gnorm1) search = 0;
iter = 1;
while (iter <= max_iter && search != 0) {
cg_iter = trcg(delta, g, s, r);
System.arraycopy(w, 0, w_new, 0, n);
daxpy(one, s, w_new);
gs = dot(g, s);
prered = -0.5 * (gs - dot(s, r));
fnew = fun_obj.fun(w_new);
// Compute the actual reduction.
actred = f - fnew;
// On the first iteration, adjust the initial step bound.
snorm = euclideanNorm(s);
if (iter == 1) delta = Math.min(delta, snorm);
// Compute prediction alpha*snorm of the step.
if (fnew - f - gs <= 0)
alpha = sigma3;
else
alpha = Math.max(sigma1, -0.5 * (gs / (fnew - f - gs)));
// Update the trust region bound according to the ratio of actual to
// predicted reduction.
if (actred < eta0 * prered)
delta = Math.min(Math.max(alpha, sigma1) * snorm, sigma2 * delta);
else if (actred < eta1 * prered)
delta = Math.max(sigma1 * delta, Math.min(alpha * snorm, sigma2 * delta));
else if (actred < eta2 * prered)
delta = Math.max(sigma1 * delta, Math.min(alpha * snorm, sigma3 * delta));
else
delta = Math.max(delta, Math.min(alpha * snorm, sigma3 * delta));
info("iter %2d act %5.3e pre %5.3e delta %5.3e f %5.3e |g| %5.3e CG %3d%n", iter, actred, prered, delta, f, gnorm, cg_iter);
if (actred > eta0 * prered) {
iter++;
System.arraycopy(w_new, 0, w, 0, n);
f = fnew;
fun_obj.grad(w, g);
gnorm = euclideanNorm(g);
if (gnorm <= eps * gnorm1) break;
}
if (f < -1.0e+32) {
info("WARNING: f < -1.0e+32%n");
break;
}
if (Math.abs(actred) <= 0 && prered <= 0) {
info("WARNING: actred and prered <= 0%n");
break;
}
if (Math.abs(actred) <= 1.0e-12 * Math.abs(f) && Math.abs(prered) <= 1.0e-12 * Math.abs(f)) {
info("WARNING: actred and prered too small%n");
break;
}
}
}
private int trcg(double delta, double[] g, double[] s, double[] r) {
int n = fun_obj.get_nr_variable();
double one = 1;
double[] d = new double[n];
double[] Hd = new double[n];
double rTr, rnewTrnew, cgtol;
for (int i = 0; i < n; i++) {
s[i] = 0;
r[i] = -g[i];
d[i] = r[i];
}
cgtol = 0.1 * euclideanNorm(g);
int cg_iter = 0;
rTr = dot(r, r);
while (true) {
if (euclideanNorm(r) <= cgtol) break;
cg_iter++;
fun_obj.Hv(d, Hd);
double alpha = rTr / dot(d, Hd);
daxpy(alpha, d, s);
if (euclideanNorm(s) > delta) {
info("cg reaches trust region boundary%n");
alpha = -alpha;
daxpy(alpha, d, s);
double std = dot(s, d);
double sts = dot(s, s);
double dtd = dot(d, d);
double dsq = delta * delta;
double rad = Math.sqrt(std * std + dtd * (dsq - sts));
if (std >= 0)
alpha = (dsq - sts) / (std + rad);
else
alpha = (rad - std) / dtd;
daxpy(alpha, d, s);
alpha = -alpha;
daxpy(alpha, Hd, r);
break;
}
alpha = -alpha;
daxpy(alpha, Hd, r);
rnewTrnew = dot(r, r);
double beta = rnewTrnew / rTr;
scale(beta, d);
daxpy(one, r, d);
rTr = rnewTrnew;
}
return (cg_iter);
}
/**
* constant times a vector plus a vector
*
* <pre>
* vector2 += constant * vector1
* </pre>
*
* @since 1.8
*/
private static void daxpy(double constant, double vector1[], double vector2[]) {
if (constant == 0) return;
assert vector1.length == vector2.length;
for (int i = 0; i < vector1.length; i++) {
vector2[i] += constant * vector1[i];
}
}
/**
* returns the dot product of two vectors
*
* @since 1.8
*/
private static double dot(double vector1[], double vector2[]) {
double product = 0;
assert vector1.length == vector2.length;
for (int i = 0; i < vector1.length; i++) {
product += vector1[i] * vector2[i];
}
return product;
}
/**
* returns the euclidean norm of a vector
*
* @since 1.8
*/
private static double euclideanNorm(double vector[]) {
int n = vector.length;
if (n < 1) {
return 0;
}
if (n == 1) {
return Math.abs(vector[0]);
}
// this algorithm is (often) more accurate than just summing up the squares and taking the square-root afterwards
double scale = 0; // scaling factor that is factored out
double sum = 1; // basic sum of squares from which scale has been factored out
for (int i = 0; i < n; i++) {
if (vector[i] != 0) {
double abs = Math.abs(vector[i]);
// try to get the best scaling factor
if (scale < abs) {
double t = scale / abs;
sum = 1 + sum * (t * t);
scale = abs;
} else {
double t = abs / scale;
sum += t * t;
}
}
}
return scale * Math.sqrt(sum);
}
/**
* scales a vector by a constant
*
* @since 1.8
*/
private static void scale(double constant, double vector[]) {
if (constant == 1.0) return;
for (int i = 0; i < vector.length; i++) {
vector[i] *= constant;
}
}
}
| scalanlp/nak | src/main/java/nak/liblinear/Tron.java | Java | apache-2.0 | 7,591 |
/*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package rawledger
import (
cb "github.com/hyperledger/fabric/protos/common"
ab "github.com/hyperledger/fabric/protos/orderer"
)
// Iterator is useful for a chain Reader to stream blocks as they are created
type Iterator interface {
// Next blocks until there is a new block available, or returns an error if the next block is no longer retrievable
Next() (*cb.Block, cb.Status)
// ReadyChan supplies a channel which will block until Next will not block
ReadyChan() <-chan struct{}
}
// Reader allows the caller to inspect the raw ledger
type Reader interface {
// Iterator retrieves an Iterator, as specified by an cb.SeekInfo message, returning an iterator, and it's starting block number
Iterator(startType ab.SeekInfo_StartType, specified uint64) (Iterator, uint64)
// Height returns the highest block number in the chain, plus one
Height() uint64
}
// Writer allows the caller to modify the raw ledger
type Writer interface {
// Append a new block to the ledger
Append(blockContents []*cb.Envelope, proof []byte) *cb.Block
}
// ReadWriter encapsulated both the reading and writing functions of the rawledger
type ReadWriter interface {
Reader
Writer
}
| stonejiang208/fabric | orderer/rawledger/rawledger.go | GO | apache-2.0 | 1,766 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.source.xml;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiElement;
import com.intellij.psi.XmlElementFactory;
import com.intellij.psi.impl.source.xml.behavior.DefaultXmlPsiPolicy;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.xml.*;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class XmlTagValueImpl implements XmlTagValue{
private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.xml.XmlTagValueImpl");
private final XmlTag myTag;
private final XmlTagChild[] myElements;
private volatile XmlText[] myTextElements;
private volatile String myText;
private volatile String myTrimmedText;
public XmlTagValueImpl(@NotNull XmlTagChild[] bodyElements, @NotNull XmlTag tag) {
myTag = tag;
myElements = bodyElements;
}
@Override
@NotNull
public XmlTagChild[] getChildren() {
return myElements;
}
@Override
@NotNull
public XmlText[] getTextElements() {
XmlText[] textElements = myTextElements;
if (textElements == null) {
textElements = Arrays.stream(myElements)
.filter(element -> element instanceof XmlText)
.map(element -> (XmlText)element).toArray(XmlText[]::new);
myTextElements = textElements = textElements.length == 0 ? XmlText.EMPTY_ARRAY : textElements;
}
return textElements;
}
@Override
@NotNull
public String getText() {
String text = myText;
if (text == null) {
final StringBuilder consolidatedText = new StringBuilder();
for (final XmlTagChild element : myElements) {
consolidatedText.append(element.getText());
}
myText = text = consolidatedText.toString();
}
return text;
}
@Override
@NotNull
public TextRange getTextRange() {
if(myElements.length == 0){
final ASTNode child = XmlChildRole.START_TAG_END_FINDER.findChild( (ASTNode)myTag);
if(child != null)
return new TextRange(child.getStartOffset() + 1, child.getStartOffset() + 1);
return new TextRange(myTag.getTextRange().getEndOffset(), myTag.getTextRange().getEndOffset());
}
return new TextRange(myElements[0].getTextRange().getStartOffset(), myElements[myElements.length - 1].getTextRange().getEndOffset());
}
@Override
@NotNull
public String getTrimmedText() {
String trimmedText = myTrimmedText;
if (trimmedText == null) {
final StringBuilder consolidatedText = new StringBuilder();
final XmlText[] textElements = getTextElements();
for (final XmlText textElement : textElements) {
consolidatedText.append(textElement.getValue());
}
myTrimmedText = trimmedText = consolidatedText.toString().trim();
}
return trimmedText;
}
@Override
public void setText(String value) {
setText(value, false);
}
@Override
public void setEscapedText(String value) {
setText(value, true);
}
private void setText(String value, boolean defaultPolicy) {
try {
XmlText text = null;
if (value != null) {
final XmlText[] texts = getTextElements();
if (texts.length == 0) {
text = (XmlText)myTag.add(XmlElementFactory.getInstance(myTag.getProject()).createDisplayText("x"));
} else {
text = texts[0];
}
if (StringUtil.isEmpty(value)) {
text.delete();
}
else {
if (defaultPolicy && text instanceof XmlTextImpl) {
((XmlTextImpl)text).doSetValue(value, new DefaultXmlPsiPolicy());
} else {
text.setValue(value);
}
}
}
if(myElements.length > 0){
for (final XmlTagChild child : myElements) {
if (child != text) {
child.delete();
}
}
}
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
@Override
public boolean hasCDATA() {
for (XmlText xmlText : getTextElements()) {
PsiElement[] children = xmlText.getChildren();
for (PsiElement child : children) {
if (child.getNode().getElementType() == XmlElementType.XML_CDATA) {
return true;
}
}
}
return false;
}
public static XmlTagValue createXmlTagValue(XmlTag tag) {
final List<XmlTagChild> bodyElements = new ArrayList<>();
tag.processElements(new PsiElementProcessor() {
boolean insideBody;
@Override
public boolean execute(@NotNull PsiElement element) {
final ASTNode treeElement = element.getNode();
if (insideBody) {
if (treeElement != null && treeElement.getElementType() == XmlTokenType.XML_END_TAG_START) return false;
if (!(element instanceof XmlTagChild)) return true;
bodyElements.add((XmlTagChild)element);
}
else if (treeElement != null && treeElement.getElementType() == XmlTokenType.XML_TAG_END) insideBody = true;
return true;
}
}, tag);
XmlTagChild[] tagChildren = bodyElements.toArray(XmlTagChild.EMPTY_ARRAY);
return new XmlTagValueImpl(tagChildren, tag);
}
}
| paplorinc/intellij-community | xml/xml-psi-impl/src/com/intellij/psi/impl/source/xml/XmlTagValueImpl.java | Java | apache-2.0 | 5,970 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.spring.security;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import javax.security.auth.login.AccountNotFoundException;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.syncope.common.keymaster.client.api.ConfParamOps;
import org.apache.syncope.common.lib.SyncopeConstants;
import org.apache.syncope.common.lib.types.AnyTypeKind;
import org.apache.syncope.common.lib.types.AuditElements;
import org.apache.syncope.common.lib.types.EntitlementsHolder;
import org.apache.syncope.common.lib.types.IdRepoEntitlement;
import org.apache.syncope.core.persistence.api.ImplementationLookup;
import org.apache.syncope.core.persistence.api.dao.AccessTokenDAO;
import org.apache.syncope.core.persistence.api.dao.AnySearchDAO;
import org.apache.syncope.core.persistence.api.entity.AnyType;
import org.apache.syncope.core.persistence.api.entity.resource.Provision;
import org.apache.syncope.core.provisioning.api.utils.RealmUtils;
import org.apache.syncope.core.persistence.api.dao.AnyTypeDAO;
import org.apache.syncope.core.persistence.api.dao.DelegationDAO;
import org.apache.syncope.core.persistence.api.dao.GroupDAO;
import org.apache.syncope.core.persistence.api.dao.RealmDAO;
import org.apache.syncope.core.persistence.api.dao.RoleDAO;
import org.apache.syncope.core.persistence.api.dao.UserDAO;
import org.apache.syncope.core.persistence.api.dao.search.AttrCond;
import org.apache.syncope.core.persistence.api.dao.search.SearchCond;
import org.apache.syncope.core.persistence.api.entity.AccessToken;
import org.apache.syncope.core.persistence.api.entity.Delegation;
import org.apache.syncope.core.persistence.api.entity.DynRealm;
import org.apache.syncope.core.persistence.api.entity.Realm;
import org.apache.syncope.core.persistence.api.entity.Role;
import org.apache.syncope.core.persistence.api.entity.resource.ExternalResource;
import org.apache.syncope.core.persistence.api.entity.user.User;
import org.apache.syncope.core.provisioning.api.AuditManager;
import org.apache.syncope.core.provisioning.api.ConnectorManager;
import org.apache.syncope.core.provisioning.api.MappingManager;
import org.apache.syncope.core.spring.ApplicationContextProvider;
import org.identityconnectors.framework.common.objects.Uid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.security.authentication.AuthenticationCredentialsNotFoundException;
import org.springframework.security.authentication.DisabledException;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.web.authentication.session.SessionAuthenticationException;
import org.springframework.transaction.annotation.Transactional;
/**
* Domain-sensible (via {@code @Transactional}) access to authentication / authorization data.
*
* @see JWTAuthenticationProvider
* @see UsernamePasswordAuthenticationProvider
* @see SyncopeAuthenticationDetails
*/
public class AuthDataAccessor {
protected static final Logger LOG = LoggerFactory.getLogger(AuthDataAccessor.class);
public static final String GROUP_OWNER_ROLE = "GROUP_OWNER";
protected static final Encryptor ENCRYPTOR = Encryptor.getInstance();
protected static final Set<SyncopeGrantedAuthority> ANONYMOUS_AUTHORITIES =
Set.of(new SyncopeGrantedAuthority(IdRepoEntitlement.ANONYMOUS));
protected static final Set<SyncopeGrantedAuthority> MUST_CHANGE_PASSWORD_AUTHORITIES =
Set.of(new SyncopeGrantedAuthority(IdRepoEntitlement.MUST_CHANGE_PASSWORD));
protected final SecurityProperties securityProperties;
protected final RealmDAO realmDAO;
protected final UserDAO userDAO;
protected final GroupDAO groupDAO;
protected final AnyTypeDAO anyTypeDAO;
protected final AnySearchDAO anySearchDAO;
protected final AccessTokenDAO accessTokenDAO;
protected final ConfParamOps confParamOps;
protected final RoleDAO roleDAO;
protected final DelegationDAO delegationDAO;
protected final ConnectorManager connectorManager;
protected final AuditManager auditManager;
protected final MappingManager mappingManager;
protected final ImplementationLookup implementationLookup;
private Map<String, JWTSSOProvider> jwtSSOProviders;
public AuthDataAccessor(
final SecurityProperties securityProperties,
final RealmDAO realmDAO,
final UserDAO userDAO,
final GroupDAO groupDAO,
final AnyTypeDAO anyTypeDAO,
final AnySearchDAO anySearchDAO,
final AccessTokenDAO accessTokenDAO,
final ConfParamOps confParamOps,
final RoleDAO roleDAO,
final DelegationDAO delegationDAO,
final ConnectorManager connectorManager,
final AuditManager auditManager,
final MappingManager mappingManager,
final ImplementationLookup implementationLookup) {
this.securityProperties = securityProperties;
this.realmDAO = realmDAO;
this.userDAO = userDAO;
this.groupDAO = groupDAO;
this.anyTypeDAO = anyTypeDAO;
this.anySearchDAO = anySearchDAO;
this.accessTokenDAO = accessTokenDAO;
this.confParamOps = confParamOps;
this.roleDAO = roleDAO;
this.delegationDAO = delegationDAO;
this.connectorManager = connectorManager;
this.auditManager = auditManager;
this.mappingManager = mappingManager;
this.implementationLookup = implementationLookup;
}
public JWTSSOProvider getJWTSSOProvider(final String issuer) {
synchronized (this) {
if (jwtSSOProviders == null) {
jwtSSOProviders = new HashMap<>();
implementationLookup.getJWTSSOProviderClasses().stream().
map(clazz -> (JWTSSOProvider) ApplicationContextProvider.getBeanFactory().
createBean(clazz, AbstractBeanDefinition.AUTOWIRE_BY_TYPE, true)).
forEach(jwtSSOProvider -> jwtSSOProviders.put(jwtSSOProvider.getIssuer(), jwtSSOProvider));
}
}
if (issuer == null) {
throw new AuthenticationCredentialsNotFoundException("A null issuer is not permitted");
}
JWTSSOProvider provider = jwtSSOProviders.get(issuer);
if (provider == null) {
throw new AuthenticationCredentialsNotFoundException(
"Could not find any registered JWTSSOProvider for issuer " + issuer);
}
return provider;
}
protected String getDelegationKey(final SyncopeAuthenticationDetails details, final String delegatedKey) {
if (details.getDelegatedBy() == null) {
return null;
}
String delegatingKey = SyncopeConstants.UUID_PATTERN.matcher(details.getDelegatedBy()).matches()
? details.getDelegatedBy()
: userDAO.findKey(details.getDelegatedBy());
if (delegatingKey == null) {
throw new SessionAuthenticationException(
"Delegating user " + details.getDelegatedBy() + " cannot be found");
}
LOG.debug("Delegation request: delegating:{}, delegated:{}", delegatingKey, delegatedKey);
return delegationDAO.findValidFor(delegatingKey, delegatedKey).
orElseThrow(() -> new SessionAuthenticationException(
"Delegation by " + delegatingKey + " was requested but none found"));
}
/**
* Attempts to authenticate the given credentials against internal storage and pass-through resources (if
* configured): the first succeeding causes global success.
*
* @param domain domain
* @param authentication given credentials
* @return {@code null} if no matching user was found, authentication result otherwise
*/
@Transactional(noRollbackFor = DisabledException.class)
public Triple<User, Boolean, String> authenticate(final String domain, final Authentication authentication) {
User user = null;
String[] authAttrValues = confParamOps.get(
domain, "authentication.attributes", new String[] { "username" }, String[].class);
for (int i = 0; user == null && i < authAttrValues.length; i++) {
if ("username".equals(authAttrValues[i])) {
user = userDAO.findByUsername(authentication.getName());
} else {
AttrCond attrCond = new AttrCond(AttrCond.Type.EQ);
attrCond.setSchema(authAttrValues[i]);
attrCond.setExpression(authentication.getName());
try {
List<User> users = anySearchDAO.search(SearchCond.getLeaf(attrCond), AnyTypeKind.USER);
if (users.size() == 1) {
user = users.get(0);
} else {
LOG.warn("Search condition {} does not uniquely match a user", attrCond);
}
} catch (IllegalArgumentException e) {
LOG.error("While searching user for authentication via {}", attrCond, e);
}
}
}
Boolean authenticated = null;
String delegationKey = null;
if (user != null) {
authenticated = false;
if (user.isSuspended() != null && user.isSuspended()) {
throw new DisabledException("User " + user.getUsername() + " is suspended");
}
String[] authStatuses = confParamOps.get(
domain, "authentication.statuses", new String[] {}, String[].class);
if (!ArrayUtils.contains(authStatuses, user.getStatus())) {
throw new DisabledException("User " + user.getUsername() + " not allowed to authenticate");
}
boolean userModified = false;
authenticated = authenticate(user, authentication.getCredentials().toString());
if (authenticated) {
delegationKey = getDelegationKey(
SyncopeAuthenticationDetails.class.cast(authentication.getDetails()), user.getKey());
if (confParamOps.get(domain, "log.lastlogindate", true, Boolean.class)) {
user.setLastLoginDate(new Date());
userModified = true;
}
if (user.getFailedLogins() != 0) {
user.setFailedLogins(0);
userModified = true;
}
} else {
user.setFailedLogins(user.getFailedLogins() + 1);
userModified = true;
}
if (userModified) {
userDAO.save(user);
}
}
return Triple.of(user, authenticated, delegationKey);
}
protected boolean authenticate(final User user, final String password) {
boolean authenticated = ENCRYPTOR.verify(password, user.getCipherAlgorithm(), user.getPassword());
LOG.debug("{} authenticated on internal storage: {}", user.getUsername(), authenticated);
for (Iterator<? extends ExternalResource> itor = getPassthroughResources(user).iterator();
itor.hasNext() && !authenticated;) {
ExternalResource resource = itor.next();
String connObjectKey = null;
try {
AnyType userType = anyTypeDAO.findUser();
Provision provision = resource.getProvision(userType).
orElseThrow(() -> new AccountNotFoundException(
"Unable to locate provision for user type " + userType.getKey()));
connObjectKey = mappingManager.getConnObjectKeyValue(user, provision).
orElseThrow(() -> new AccountNotFoundException(
"Unable to locate conn object key value for " + userType.getKey()));
Uid uid = connectorManager.getConnector(resource).authenticate(connObjectKey, password, null);
if (uid != null) {
authenticated = true;
}
} catch (Exception e) {
LOG.debug("Could not authenticate {} on {}", user.getUsername(), resource.getKey(), e);
}
LOG.debug("{} authenticated on {} as {}: {}",
user.getUsername(), resource.getKey(), connObjectKey, authenticated);
}
return authenticated;
}
protected Set<? extends ExternalResource> getPassthroughResources(final User user) {
Set<? extends ExternalResource> result = null;
// 1. look for assigned resources, pick the ones whose account policy has authentication resources
for (ExternalResource resource : userDAO.findAllResources(user)) {
if (resource.getAccountPolicy() != null && !resource.getAccountPolicy().getResources().isEmpty()) {
if (result == null) {
result = resource.getAccountPolicy().getResources();
} else {
result.retainAll(resource.getAccountPolicy().getResources());
}
}
}
// 2. look for realms, pick the ones whose account policy has authentication resources
for (Realm realm : realmDAO.findAncestors(user.getRealm())) {
if (realm.getAccountPolicy() != null && !realm.getAccountPolicy().getResources().isEmpty()) {
if (result == null) {
result = realm.getAccountPolicy().getResources();
} else {
result.retainAll(realm.getAccountPolicy().getResources());
}
}
}
return result == null ? Set.of() : result;
}
protected Set<SyncopeGrantedAuthority> getAdminAuthorities() {
return EntitlementsHolder.getInstance().getValues().stream().
map(entitlement -> new SyncopeGrantedAuthority(entitlement, SyncopeConstants.ROOT_REALM)).
collect(Collectors.toSet());
}
protected Set<SyncopeGrantedAuthority> buildAuthorities(final Map<String, Set<String>> entForRealms) {
Set<SyncopeGrantedAuthority> authorities = new HashSet<>();
entForRealms.forEach((entitlement, realms) -> {
Pair<Set<String>, Set<String>> normalized = RealmUtils.normalize(realms);
SyncopeGrantedAuthority authority = new SyncopeGrantedAuthority(entitlement);
authority.addRealms(normalized.getLeft());
authority.addRealms(normalized.getRight());
authorities.add(authority);
});
return authorities;
}
protected Set<SyncopeGrantedAuthority> getUserAuthorities(final User user) {
if (user.isMustChangePassword()) {
return MUST_CHANGE_PASSWORD_AUTHORITIES;
}
Map<String, Set<String>> entForRealms = new HashMap<>();
// Give entitlements as assigned by roles (with static or dynamic realms, where applicable) - assigned
// either statically and dynamically
userDAO.findAllRoles(user).stream().
filter(role -> !GROUP_OWNER_ROLE.equals(role.getKey())).
forEach(role -> role.getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
Set<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.addAll(role.getRealms().stream().map(Realm::getFullPath).collect(Collectors.toSet()));
if (!entitlement.endsWith("_CREATE") && !entitlement.endsWith("_DELETE")) {
realms.addAll(role.getDynRealms().stream().map(DynRealm::getKey).collect(Collectors.toList()));
}
}));
// Give group entitlements for owned groups
groupDAO.findOwnedByUser(user.getKey()).forEach(group -> {
Role groupOwnerRole = roleDAO.find(GROUP_OWNER_ROLE);
if (groupOwnerRole == null) {
LOG.warn("Role {} was not found", GROUP_OWNER_ROLE);
} else {
groupOwnerRole.getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
HashSet<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.add(RealmUtils.getGroupOwnerRealm(group.getRealm().getFullPath(), group.getKey()));
});
}
});
return buildAuthorities(entForRealms);
}
protected Set<SyncopeGrantedAuthority> getDelegatedAuthorities(final Delegation delegation) {
Map<String, Set<String>> entForRealms = new HashMap<>();
delegation.getRoles().stream().filter(role -> !GROUP_OWNER_ROLE.equals(role.getKey())).
forEach(role -> role.getEntitlements().forEach(entitlement -> {
Set<String> realms = Optional.ofNullable(entForRealms.get(entitlement)).orElseGet(() -> {
HashSet<String> r = new HashSet<>();
entForRealms.put(entitlement, r);
return r;
});
realms.addAll(role.getRealms().stream().map(Realm::getFullPath).collect(Collectors.toSet()));
if (!entitlement.endsWith("_CREATE") && !entitlement.endsWith("_DELETE")) {
realms.addAll(role.getDynRealms().stream().map(DynRealm::getKey).collect(Collectors.toList()));
}
}));
return buildAuthorities(entForRealms);
}
@Transactional
public Set<SyncopeGrantedAuthority> getAuthorities(final String username, final String delegationKey) {
Set<SyncopeGrantedAuthority> authorities;
if (securityProperties.getAnonymousUser().equals(username)) {
authorities = ANONYMOUS_AUTHORITIES;
} else if (securityProperties.getAdminUser().equals(username)) {
authorities = getAdminAuthorities();
} else if (delegationKey != null) {
Delegation delegation = Optional.ofNullable(delegationDAO.find(delegationKey)).
orElseThrow(() -> new UsernameNotFoundException(
"Could not find delegation " + delegationKey));
authorities = delegation.getRoles().isEmpty()
? getUserAuthorities(delegation.getDelegating())
: getDelegatedAuthorities(delegation);
} else {
User user = Optional.ofNullable(userDAO.findByUsername(username)).
orElseThrow(() -> new UsernameNotFoundException(
"Could not find any user with username " + username));
authorities = getUserAuthorities(user);
}
return authorities;
}
@Transactional
public Pair<String, Set<SyncopeGrantedAuthority>> authenticate(final JWTAuthentication authentication) {
String username;
Set<SyncopeGrantedAuthority> authorities;
if (securityProperties.getAdminUser().equals(authentication.getClaims().getSubject())) {
AccessToken accessToken = accessTokenDAO.find(authentication.getClaims().getJWTID());
if (accessToken == null) {
throw new AuthenticationCredentialsNotFoundException(
"Could not find an Access Token for JWT " + authentication.getClaims().getJWTID());
}
username = securityProperties.getAdminUser();
authorities = getAdminAuthorities();
} else {
JWTSSOProvider jwtSSOProvider = getJWTSSOProvider(authentication.getClaims().getIssuer());
Pair<User, Set<SyncopeGrantedAuthority>> resolved = jwtSSOProvider.resolve(authentication.getClaims());
if (resolved == null || resolved.getLeft() == null) {
throw new AuthenticationCredentialsNotFoundException(
"Could not find User " + authentication.getClaims().getSubject()
+ " for JWT " + authentication.getClaims().getJWTID());
}
User user = resolved.getLeft();
String delegationKey = getDelegationKey(authentication.getDetails(), user.getKey());
username = user.getUsername();
authorities = resolved.getRight() == null
? Set.of()
: delegationKey == null
? resolved.getRight()
: getAuthorities(username, delegationKey);
LOG.debug("JWT {} issued by {} resolved to User {} with authorities {}",
authentication.getClaims().getJWTID(),
authentication.getClaims().getIssuer(),
username + Optional.ofNullable(delegationKey).
map(d -> " [under delegation " + delegationKey + "]").orElse(StringUtils.EMPTY),
authorities);
if (BooleanUtils.isTrue(user.isSuspended())) {
throw new DisabledException("User " + username + " is suspended");
}
List<String> authStatuses = List.of(confParamOps.get(authentication.getDetails().getDomain(),
"authentication.statuses", new String[] {}, String[].class));
if (!authStatuses.contains(user.getStatus())) {
throw new DisabledException("User " + username + " not allowed to authenticate");
}
if (BooleanUtils.isTrue(user.isMustChangePassword())) {
LOG.debug("User {} must change password, resetting authorities", username);
authorities = MUST_CHANGE_PASSWORD_AUTHORITIES;
}
}
return Pair.of(username, authorities);
}
@Transactional
public void removeExpired(final String tokenKey) {
accessTokenDAO.delete(tokenKey);
}
@Transactional(readOnly = true)
public void audit(
final String username,
final String delegationKey,
final AuditElements.Result result,
final Object output,
final Object... input) {
auditManager.audit(
username + Optional.ofNullable(delegationKey).
map(d -> " [under delegation " + delegationKey + "]").orElse(StringUtils.EMPTY),
AuditElements.EventCategoryType.LOGIC, AuditElements.AUTHENTICATION_CATEGORY, null,
AuditElements.LOGIN_EVENT, result, null, output, input);
}
}
| apache/syncope | core/spring/src/main/java/org/apache/syncope/core/spring/security/AuthDataAccessor.java | Java | apache-2.0 | 23,971 |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package admin.keepalive;
import hydra.*;
//import util.*;
/**
*
* A class used to store keys for Admin API region "keep alive" Tests
*
*/
public class TestPrms extends BasePrms {
//---------------------------------------------------------------------
// Default Values
//---------------------------------------------------------------------
// Test-specific parameters
/** (boolean) controls whether CacheLoader is defined
*/
public static Long defineCacheLoaderRemote;
/*
* Returns boolean value of TestPrms.defineCacheLoaderRemote.
* Defaults to false.
*/
public static boolean getDefineCacheLoaderRemote() {
Long key = defineCacheLoaderRemote;
return (tasktab().booleanAt(key, tab().booleanAt(key, false)));
}
}
| papicella/snappy-store | tests/core/src/main/java/admin/keepalive/TestPrms.java | Java | apache-2.0 | 1,450 |
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/**
* @var yii\base\View $this
* @var app\modules\workflow\models\WorkflowForm $model
* @var yii\widgets\ActiveForm $form
*/
?>
<div class="workflow-search">
<?php $form = ActiveForm::begin(array('method' => 'get')); ?>
<?= $form->field($model, 'id'); ?>
<?= $form->field($model, 'previous_user_id'); ?>
<?= $form->field($model, 'next_user_id'); ?>
<?= $form->field($model, 'module'); ?>
<?= $form->field($model, 'wf_table'); ?>
<?php // echo $form->field($model, 'wf_id'); ?>
<?php // echo $form->field($model, 'status_from'); ?>
<?php // echo $form->field($model, 'status_to'); ?>
<?php // echo $form->field($model, 'actions_next'); ?>
<?php // echo $form->field($model, 'date_create'); ?>
<div class="form-group">
<?= Html::submitButton('Search', array('class' => 'btn btn-primary')); ?>
<?= Html::resetButton('Reset', array('class' => 'btn btn-default')); ?>
</div>
<?php ActiveForm::end(); ?>
</div>
| FrenzelGmbH/qvCRUD | qvCRUD/modules/workflow/views/workflow/_search.php | PHP | apache-2.0 | 1,004 |
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.bindings.java.msg;
import java.util.Set;
import org.apache.etch.bindings.java.msg.Validator.Level;
/**
* Interface which defines the value factory which helps
* the idl compiler serialize and deserialize messages,
* convert values, etc.
*/
public interface ValueFactory
{
//////////
// Type //
//////////
/**
* Translates a type id into the appropriate Type object. If the type does
* not exist, and if dynamic typing is enabled, adds it to the dynamic types.
* @param id a type id.
* @return id translated into the appropriate Type.
*/
public Type getType( Integer id );
/**
* Translates a type name into the appropriate Type object. If the type does
* not exist, and if dynamic typing is enabled, adds it to the dynamic types.
* @param name a type name.
* @return name translated into the appropriate Type.
*/
public Type getType( String name );
/**
* Adds the type if it doesn't already exist. Use this to dynamically add
* types to a ValueFactory. The type is per instance of the ValueFactory,
* not global. Not available if dynamic typing is locked.
* @param type
*/
public void addType( Type type );
/**
* Locks the dynamic typing so that no new types may be created by addType
* or getType.
*/
public void lockDynamicTypes();
/**
* Unlocks the dynamic typing so that new types may be created by addType
* or getType.
*/
public void unlockDynamicTypes();
/**
* @return a collection of all the types.
*/
public Set<Type> getTypes();
/////////////////////
// STRING ENCODING //
/////////////////////
/**
* @return the encoding to use for strings.
*/
public String getStringEncoding();
////////////////
// MESSAGE ID //
////////////////
/**
* @param msg the message whose well-known message-id field is to be
* returned.
* @return the value of the well-known message-id field. This is a
* unique identifier for this message on a particular transport
* during a particular session. If there is no well-known message-id
* field defined, or if the message-id field has not been set, then
* return null.
*/
public Long getMessageId( Message msg );
/**
* Sets the value of the well-known message-id field. This is a
* unique identifier for this message on a particular transport
* during a particular session. If there is no well-known message-id
* field defined then nothing is done. If msgid is null, then the
* field is cleared.
* @param msg the message whose well-known message-id field is to
* be set.
* @param msgid the value of the well-known message-id field.
*/
public void setMessageId( Message msg, Long msgid );
/**
* @return well-known message field for message id.
*/
public Field get_mf__messageId();
/////////////////
// IN REPLY TO //
/////////////////
/**
* @param msg the message whose well-known in-reply-to field is to
* be returned.
* @return the value of the in-reply-to field, or null if there is
* none or if there is no such field defined.
*/
public Long getInReplyTo( Message msg );
/**
* @param msg the message whose well-known in-reply-to field is to
* be set.
* @param msgid the value of the well-known in-reply-to field. If
* there is no well-known in-reply-to field defined then nothing
* is done. If msgid is null, then the field is cleared.
*/
public void setInReplyTo( Message msg, Long msgid );
/**
* @return well-known message field for in reply to.
*/
public Field get_mf__inReplyTo();
//////////////////////
// VALUE CONVERSION //
//////////////////////
/**
* Converts a value to a struct value representation to be exported
* to a tagged data output.
* @param value a custom type defined by a service, or a well-known
* standard type (e.g., date).
* @return a struct value representing the value.
* @throws UnsupportedOperationException if the type cannot be exported.
*/
public StructValue exportCustomValue( Object value )
throws UnsupportedOperationException;
/**
* Converts a struct value imported from a tagged data input to
* a normal type.
* @param struct a struct value representation of a custom type, or a
* well known standard type.
* @return a custom type, or a well known standard type.
* @throws UnsupportedOperationException if the type cannot be imported.
*/
public Object importCustomValue( StructValue struct )
throws UnsupportedOperationException;
/**
* @param c the class of a custom value.
* @return the struct type of a custom value class.
* @throws UnsupportedOperationException
* @see #exportCustomValue(Object)
*/
public Type getCustomStructType( Class<?> c )
throws UnsupportedOperationException;
/**
* @return well-known message type for exception thrown by one-way
* message.
*/
public Type get_mt__exception();
/**
* @return the validation level of field StructValue.put and TaggedDataOutput.
*/
public Level getLevel();
/**
* Sets the validation level of field StructValue.put and TaggedDataOutput.
* @param level
* @return the old value
*/
public Level setLevel( Level level );
}
| OBIGOGIT/etch | binding-java/runtime/src/main/java/org/apache/etch/bindings/java/msg/ValueFactory.java | Java | apache-2.0 | 5,950 |
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2017 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.create.img;
import net.imagej.ops.Ops;
import net.imagej.ops.special.chain.UFViaUFSameIO;
import net.imagej.ops.special.function.Functions;
import net.imagej.ops.special.function.UnaryFunctionOp;
import net.imglib2.Interval;
import net.imglib2.img.Img;
import net.imglib2.type.numeric.real.DoubleType;
import org.scijava.plugin.Plugin;
/**
* Creates an {@link Img} from an {@link Interval} with no additional hints.
* {@link Interval} contents are not copied.
*
* @author Curtis Rueden
*/
@Plugin(type = Ops.Create.Img.class)
public class CreateImgFromInterval extends
UFViaUFSameIO<Interval, Img<DoubleType>> implements Ops.Create.Img
{
@Override
@SuppressWarnings({"rawtypes", "unchecked"})
public UnaryFunctionOp<Interval, Img<DoubleType>> createWorker(
final Interval input)
{
// NB: Intended to match CreateImgFromDimsAndType.
return (UnaryFunctionOp) Functions.unary(ops(), Ops.Create.Img.class,
Img.class, input, new DoubleType());
}
}
| gab1one/imagej-ops | src/main/java/net/imagej/ops/create/img/CreateImgFromInterval.java | Java | bsd-2-clause | 2,532 |
# frozen_string_literal: true
require "unpack_strategy"
shared_examples "UnpackStrategy::detect" do
it "is correctly detected" do
expect(UnpackStrategy.detect(path)).to be_a described_class
end
end
shared_examples "#extract" do |children: []|
specify "#extract" do
mktmpdir do |unpack_dir|
described_class.new(path).extract(to: unpack_dir)
expect(unpack_dir.children(false).map(&:to_s)).to match_array children
end
end
end
| sjackman/linuxbrew | Library/Homebrew/test/unpack_strategy/shared_examples.rb | Ruby | bsd-2-clause | 458 |
//
// Programmer: Craig Stuart Sapp <craig@ccrma.stanford.edu>
// Creation Date: Sat Aug 25 14:12:42 PDT 2018
// Last Modified: Sat Aug 25 19:47:08 PDT 2018
// Filename: tool-trillspell.cpp
// URL: https://github.com/craigsapp/humlib/blob/master/include/tool-trillspell.cpp
// Syntax: C++11; humlib
// vim: syntax=cpp ts=3 noexpandtab nowrap
//
// Description: Interface for trill tool, which assigns intervals to
// trill, mordent and turn ornaments based on the key
// signature and previous notes in a measure.
//
#include "tool-trillspell.h"
#include "Convert.h"
#include "HumRegex.h"
#include <algorithm>
#include <cmath>
using namespace std;
namespace hum {
// START_MERGE
/////////////////////////////////
//
// Tool_trillspell::Tool_trillspell -- Set the recognized options for the tool.
//
Tool_trillspell::Tool_trillspell(void) {
define("x=b", "mark trills with x (interpretation)");
}
///////////////////////////////
//
// Tool_trillspell::run -- Primary interfaces to the tool.
//
bool Tool_trillspell::run(HumdrumFileSet& infiles) {
bool status = true;
for (int i=0; i<infiles.getCount(); i++) {
status &= run(infiles[i]);
}
return status;
}
bool Tool_trillspell::run(const string& indata, ostream& out) {
HumdrumFile infile(indata);
return run(infile, out);
}
bool Tool_trillspell::run(HumdrumFile& infile, ostream& out) {
bool status = run(infile);
return status;
}
bool Tool_trillspell::run(HumdrumFile& infile) {
processFile(infile);
infile.createLinesFromTokens();
return true;
}
///////////////////////////////
//
// Tool_trillspell::processFile -- Adjust intervals of ornaments.
//
void Tool_trillspell::processFile(HumdrumFile& infile) {
m_xmark = getBoolean("x");
analyzeOrnamentAccidentals(infile);
}
//////////////////////////////
//
// Tool_trillspell::analyzeOrnamentAccidentals --
//
bool Tool_trillspell::analyzeOrnamentAccidentals(HumdrumFile& infile) {
int i, j, k;
int kindex;
int track;
// ktracks == List of **kern spines in data.
// rtracks == Reverse mapping from track to ktrack index (part/staff index).
vector<HTp> ktracks = infile.getKernSpineStartList();
vector<int> rtracks(infile.getMaxTrack()+1, -1);
for (i=0; i<(int)ktracks.size(); i++) {
track = ktracks[i]->getTrack();
rtracks[track] = i;
}
int kcount = (int)ktracks.size();
// keysigs == key signature spellings of diatonic pitch classes. This array
// is duplicated into dstates after each barline.
vector<vector<int> > keysigs;
keysigs.resize(kcount);
for (i=0; i<kcount; i++) {
keysigs[i].resize(7);
std::fill(keysigs[i].begin(), keysigs[i].end(), 0);
}
// dstates == diatonic states for every pitch in a spine.
// sub-spines are considered as a single unit, although there are
// score conventions which would keep a separate voices on a staff
// with different accidental states (i.e., two parts superimposed
// on the same staff, but treated as if on separate staves).
// Eventually this algorithm should be adjusted for dealing with
// cross-staff notes, where the cross-staff notes should be following
// the accidentals of a different spine...
vector<vector<int> > dstates; // diatonic states
dstates.resize(kcount);
for (i=0; i<kcount; i++) {
dstates[i].resize(70); // 10 octave limit for analysis
// may cause problems; maybe fix later.
std::fill(dstates[i].begin(), dstates[i].end(), 0);
}
for (i=0; i<infile.getLineCount(); i++) {
if (!infile[i].hasSpines()) {
continue;
}
if (infile[i].isInterpretation()) {
for (j=0; j<infile[i].getFieldCount(); j++) {
if (!infile[i].token(j)->isKern()) {
continue;
}
if (infile[i].token(j)->compare(0, 3, "*k[") == 0) {
track = infile[i].token(j)->getTrack();
kindex = rtracks[track];
fillKeySignature(keysigs[kindex], *infile[i].token(j));
// resetting key states of current measure. What to do if this
// key signature is in the middle of a measure?
resetDiatonicStatesWithKeySignature(dstates[kindex],
keysigs[kindex]);
}
}
} else if (infile[i].isBarline()) {
for (j=0; j<infile[i].getFieldCount(); j++) {
if (!infile[i].token(j)->isKern()) {
continue;
}
if (infile[i].token(j)->isInvisible()) {
continue;
}
track = infile[i].token(j)->getTrack();
kindex = rtracks[track];
// reset the accidental states in dstates to match keysigs.
resetDiatonicStatesWithKeySignature(dstates[kindex],
keysigs[kindex]);
}
}
if (!infile[i].isData()) {
continue;
}
for (j=0; j<infile[i].getFieldCount(); j++) {
if (!infile[i].token(j)->isKern()) {
continue;
}
if (infile[i].token(j)->isNull()) {
continue;
}
if (infile[i].token(j)->isRest()) {
continue;
}
int subcount = infile[i].token(j)->getSubtokenCount();
track = infile[i].token(j)->getTrack();
HumRegex hre;
int rindex = rtracks[track];
for (k=0; k<subcount; k++) {
string subtok = infile[i].token(j)->getSubtoken(k);
int b40 = Convert::kernToBase40(subtok);
int diatonic = Convert::kernToBase7(subtok);
if (diatonic < 0) {
// Deal with extra-low notes later.
continue;
}
int accid = Convert::kernToAccidentalCount(subtok);
dstates.at(rindex).at(diatonic) = accid;
// check for accidentals on trills, mordents and turns.
// N.B.: augmented-second intervals are not considered.
if ((subtok.find("t") != string::npos) && !hre.search(subtok, "[tT]x")) {
int nextup = getBase40(diatonic + 1, dstates[rindex][diatonic+1]);
int interval = nextup - b40;
if (interval == 6) {
// Set to major-second trill
hre.replaceDestructive(subtok, "T", "t", "g");
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Tt]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
} else {
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Tt]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
}
} else if ((subtok.find("T") != string::npos) && !hre.search(subtok, "[tT]x")) {
int nextup = getBase40(diatonic + 1, dstates[rindex][diatonic+1]);
int interval = nextup - b40;
if (interval == 5) {
// Set to minor-second trill
hre.replaceDestructive(subtok, "t", "T", "g");
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Tt]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
} else {
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Tt]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
}
} else if ((subtok.find("M") != string::npos) && !hre.search(subtok, "[Mm]x")) {
// major-second upper mordent
int nextup = getBase40(diatonic + 1, dstates[rindex][diatonic+1]);
int interval = nextup - b40;
if (interval == 5) {
// Set to minor-second upper mordent
hre.replaceDestructive(subtok, "m", "M", "g");
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Mm]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
} else {
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Mm]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
}
} else if ((subtok.find("m") != string::npos) && !hre.search(subtok, "[Mm]x")) {
// minor-second upper mordent
int nextup = getBase40(diatonic + 1, dstates[rindex][diatonic+1]);
int interval = nextup - b40;
if (interval == 6) {
// Set to major-second upper mordent
hre.replaceDestructive(subtok, "M", "m", "g");
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Mm]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
} else {
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Mm]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
}
} else if ((subtok.find("W") != string::npos) && !hre.search(subtok, "[Ww]x")) {
// major-second lower mordent
int nextdn = getBase40(diatonic - 1, dstates[rindex][diatonic-1]);
int interval = b40 - nextdn;
if (interval == 5) {
// Set to minor-second lower mordent
hre.replaceDestructive(subtok, "w", "W", "g");
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Ww]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
} else {
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Ww]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
}
} else if ((subtok.find("w") != string::npos) && !hre.search(subtok, "[Ww]x")) {
// minor-second lower mordent
int nextdn = getBase40(diatonic - 1, dstates[rindex][diatonic-1]);
int interval = b40 - nextdn;
if (interval == 6) {
// Set to major-second lower mordent
hre.replaceDestructive(subtok, "W", "w", "g");
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Ww]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
} else {
if (m_xmark) {
hre.replaceDestructive(subtok, "$1x", "([Ww]+)", "g");
}
infile[i].token(j)->replaceSubtoken(k, subtok);
}
}
// deal with turns and inverted turns here.
}
}
}
return true;
}
//////////////////////////////
//
// Tool_trillspell::resetDiatonicStatesWithKeySignature -- Only used in
// Tool_trillspell::analyzeKernAccidentals(). Resets the accidental
// states for notes
//
void Tool_trillspell::resetDiatonicStatesWithKeySignature(vector<int>&
states, vector<int>& signature) {
for (int i=0; i<(int)states.size(); i++) {
states[i] = signature[i % 7];
}
}
//////////////////////////////
//
// Tool_trillspell::fillKeySignature -- Read key signature notes and
// assign +1 to sharps, -1 to flats in the diatonic input array. Used
// only by Tool_trillspell::analyzeOrnamentAccidentals().
//
void Tool_trillspell::fillKeySignature(vector<int>& states,
const string& keysig) {
std::fill(states.begin(), states.end(), 0);
if (keysig.find("f#") != string::npos) { states[3] = +1; }
if (keysig.find("c#") != string::npos) { states[0] = +1; }
if (keysig.find("g#") != string::npos) { states[4] = +1; }
if (keysig.find("d#") != string::npos) { states[1] = +1; }
if (keysig.find("a#") != string::npos) { states[5] = +1; }
if (keysig.find("e#") != string::npos) { states[2] = +1; }
if (keysig.find("b#") != string::npos) { states[6] = +1; }
if (keysig.find("b-") != string::npos) { states[6] = -1; }
if (keysig.find("e-") != string::npos) { states[2] = -1; }
if (keysig.find("a-") != string::npos) { states[5] = -1; }
if (keysig.find("d-") != string::npos) { states[1] = -1; }
if (keysig.find("g-") != string::npos) { states[4] = -1; }
if (keysig.find("c-") != string::npos) { states[0] = -1; }
if (keysig.find("f-") != string::npos) { states[3] = -1; }
}
//////////////////////////////
//
// Tool_trillspell::getBase40 --
//
int Tool_trillspell::getBase40(int diatonic, int accidental) {
return Convert::base7ToBase40(diatonic) + accidental;
}
// END_MERGE
} // end namespace hum
| humdrum-tools/minHumdrum | src/tool-trillspell.cpp | C++ | bsd-2-clause | 11,287 |
class Openrct2 < Formula
desc "Open source re-implementation of RollerCoaster Tycoon 2"
homepage "https://openrct2.io/"
url "https://github.com/OpenRCT2/OpenRCT2.git",
:tag => "v0.2.4",
:revision => "d645338752fbda54bed2cf2a4183ae8b44be6e95"
head "https://github.com/OpenRCT2/OpenRCT2.git", :branch => "develop"
bottle do
cellar :any
sha256 "40527c354be56c735286b5a9a5e8f7d58de0d510190e0a1da09da552a44f877a" => :catalina
sha256 "0aba8b54f6f4d5022c3a2339bbb12dd8bd3ada5894e9bdc0a2cfeb973facca63" => :mojave
sha256 "6065b8ac863f4634f38d51dc444c2b68a361b1e9135b959c1be23321976f821d" => :high_sierra
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "freetype" # for sdl2_ttf
depends_on "icu4c"
depends_on "jansson"
depends_on "libpng"
depends_on "libzip"
depends_on :macos => :high_sierra # "missing: Threads_FOUND" on Sierra
depends_on "openssl@1.1"
depends_on "sdl2"
depends_on "sdl2_ttf"
depends_on "speexdsp"
resource "title-sequences" do
url "https://github.com/OpenRCT2/title-sequences/releases/download/v0.1.2a/title-sequence-v0.1.2a.zip"
sha256 "7536dbd7c8b91554306e5823128f6bb7e94862175ef09d366d25e4bce573d155"
end
resource "objects" do
url "https://github.com/OpenRCT2/objects/releases/download/v1.0.10/objects.zip"
sha256 "4f261964f1c01a04b7600d3d082fb4d3d9ec0d543c4eb66a819eb2ad01417aa0"
end
def install
# Avoid letting CMake download things during the build process.
(buildpath/"data/title").install resource("title-sequences")
(buildpath/"data/object").install resource("objects")
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make", "install"
end
# By default macOS build only looks up data in app bundle Resources
libexec.install bin/"openrct2"
(bin/"openrct2").write <<~EOS
#!/bin/bash
exec "#{libexec}/openrct2" "$@" "--openrct-data-path=#{pkgshare}"
EOS
end
test do
assert_match "OpenRCT2, v#{version}", shell_output("#{bin}/openrct2 -v")
end
end
| nbari/homebrew-core | Formula/openrct2.rb | Ruby | bsd-2-clause | 2,081 |