code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle
import paddle.fluid.core as core
from op_test import OpTest
class ApiFMaxTest(unittest.TestCase):
"""ApiFMaxTest"""
def setUp(self):
"""setUp"""
if core.is_compiled_with_cuda():
self.place = core.CUDAPlace(0)
else:
self.place = core.CPUPlace()
self.input_x = np.random.rand(10, 15).astype("float32")
self.input_y = np.random.rand(10, 15).astype("float32")
self.input_z = np.random.rand(15).astype("float32")
self.input_a = np.array([0, np.nan, np.nan]).astype('int64')
self.input_b = np.array([2, np.inf, -np.inf]).astype('int64')
self.input_c = np.array([4, 1, 3]).astype('int64')
self.np_expected1 = np.fmax(self.input_x, self.input_y)
self.np_expected2 = np.fmax(self.input_x, self.input_z)
self.np_expected3 = np.fmax(self.input_a, self.input_c)
self.np_expected4 = np.fmax(self.input_b, self.input_c)
def test_static_api(self):
"""test_static_api"""
paddle.enable_static()
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
data_x = paddle.static.data("x", shape=[10, 15], dtype="float32")
data_y = paddle.static.data("y", shape=[10, 15], dtype="float32")
result_fmax = paddle.fmax(data_x, data_y)
exe = paddle.static.Executor(self.place)
res, = exe.run(feed={"x": self.input_x,
"y": self.input_y},
fetch_list=[result_fmax])
self.assertTrue(np.allclose(res, self.np_expected1))
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
data_x = paddle.static.data("x", shape=[10, 15], dtype="float32")
data_z = paddle.static.data("z", shape=[15], dtype="float32")
result_fmax = paddle.fmax(data_x, data_z)
exe = paddle.static.Executor(self.place)
res, = exe.run(feed={"x": self.input_x,
"z": self.input_z},
fetch_list=[result_fmax])
self.assertTrue(np.allclose(res, self.np_expected2))
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
data_a = paddle.static.data("a", shape=[3], dtype="int64")
data_c = paddle.static.data("c", shape=[3], dtype="int64")
result_fmax = paddle.fmax(data_a, data_c)
exe = paddle.static.Executor(self.place)
res, = exe.run(feed={"a": self.input_a,
"c": self.input_c},
fetch_list=[result_fmax])
self.assertTrue(np.allclose(res, self.np_expected3))
with paddle.static.program_guard(paddle.static.Program(),
paddle.static.Program()):
data_b = paddle.static.data("b", shape=[3], dtype="int64")
data_c = paddle.static.data("c", shape=[3], dtype="int64")
result_fmax = paddle.fmax(data_b, data_c)
exe = paddle.static.Executor(self.place)
res, = exe.run(feed={"b": self.input_b,
"c": self.input_c},
fetch_list=[result_fmax])
self.assertTrue(np.allclose(res, self.np_expected4))
def test_dynamic_api(self):
"""test_dynamic_api"""
paddle.disable_static()
x = paddle.to_tensor(self.input_x)
y = paddle.to_tensor(self.input_y)
z = paddle.to_tensor(self.input_z)
a = paddle.to_tensor(self.input_a)
b = paddle.to_tensor(self.input_b)
c = paddle.to_tensor(self.input_c)
res = paddle.fmax(x, y)
res = res.numpy()
self.assertTrue(np.allclose(res, self.np_expected1))
# test broadcast
res = paddle.fmax(x, z)
res = res.numpy()
self.assertTrue(np.allclose(res, self.np_expected2))
res = paddle.fmax(a, c)
res = res.numpy()
self.assertTrue(np.allclose(res, self.np_expected3))
res = paddle.fmax(b, c)
res = res.numpy()
self.assertTrue(np.allclose(res, self.np_expected4))
class TestElementwiseFmaxOp(OpTest):
"""TestElementwiseFmaxOp"""
def setUp(self):
"""setUp"""
self.op_type = "elementwise_fmax"
# If x and y have the same value, the max() is not differentiable.
# So we generate test data by the following method
# to avoid them being too close to each other.
x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
sgn = np.random.choice([-1, 1], [13, 17]).astype("float64")
y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype("float64")
self.inputs = {'X': x, 'Y': y}
self.outputs = {'Out': np.fmax(self.inputs['X'], self.inputs['Y'])}
def test_check_output(self):
"""test_check_output"""
self.check_output()
def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
def test_check_grad_ingore_x(self):
"""test_check_grad_ingore_x"""
self.check_grad(
['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
"""test_check_grad_ingore_y"""
self.check_grad(
['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y'))
class TestElementwiseFmax2Op(OpTest):
"""TestElementwiseFmax2Op"""
def setUp(self):
"""setUp"""
self.op_type = "elementwise_fmax"
# If x and y have the same value, the max() is not differentiable.
# So we generate test data by the following method
# to avoid them being too close to each other.
x = np.random.uniform(0.1, 1, [13, 17]).astype("float64")
sgn = np.random.choice([-1, 1], [13, 17]).astype("float64")
y = x + sgn * np.random.uniform(0.1, 1, [13, 17]).astype("float64")
y[2, 10:] = np.nan
self.inputs = {'X': x, 'Y': y}
self.outputs = {'Out': np.fmax(self.inputs['X'], self.inputs['Y'])}
def test_check_output(self):
"""test_check_output"""
self.check_output()
def test_check_grad_normal(self):
"""test_check_grad_normal"""
self.check_grad(['X', 'Y'], 'Out')
def test_check_grad_ingore_x(self):
"""test_check_grad_ingore_x"""
self.check_grad(
['Y'], 'Out', max_relative_error=0.005, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
"""test_check_grad_ingore_y"""
self.check_grad(
['X'], 'Out', max_relative_error=0.005, no_grad_set=set('Y'))
| luotao1/Paddle | python/paddle/fluid/tests/unittests/test_fmax_op.py | Python | apache-2.0 | 7,540 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.util.bean;
import java.io.Serializable;
import javax.enterprise.inject.spi.InjectionPoint;
import org.jboss.weld.injection.ForwardingInjectionPoint;
import org.jboss.weld.serialization.InjectionPointHolder;
public class SerializableForwardingInjectionPoint extends ForwardingInjectionPoint implements Serializable {
private static final long serialVersionUID = 7803445899943317029L;
private final InjectionPointHolder ip;
public SerializableForwardingInjectionPoint(String contextId, InjectionPoint ip) {
this.ip = new InjectionPointHolder(contextId, ip);
}
@Override
protected InjectionPoint delegate() {
return ip.get();
}
}
| antoinesd/weld-core | impl/src/main/java/org/jboss/weld/util/bean/SerializableForwardingInjectionPoint.java | Java | apache-2.0 | 1,478 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.flume.appender;
import java.util.Properties;
import org.apache.flume.Event;
import org.apache.flume.api.RpcClient;
import org.apache.flume.api.RpcClientFactory;
import org.apache.logging.log4j.core.appender.AppenderLoggingException;
import org.apache.logging.log4j.core.appender.ManagerFactory;
/**
* Manager for FlumeAvroAppenders.
*/
public class FlumeAvroManager extends AbstractFlumeManager {
private static final int MAX_RECONNECTS = 3;
private static final int MINIMUM_TIMEOUT = 1000;
private static AvroManagerFactory factory = new AvroManagerFactory();
private final Agent[] agents;
private final int batchSize;
private final int retries;
private final int connectTimeout;
private final int requestTimeout;
private final int current = 0;
private RpcClient rpcClient = null;
/**
* Constructor
* @param name The unique name of this manager.
* @param agents An array of Agents.
* @param batchSize The number of events to include in a batch.
* @param retries The number of times to retry connecting before giving up.
* @param connectTimeout The connection timeout in ms.
* @param requestTimeout The request timeout in ms.
*
*/
protected FlumeAvroManager(final String name, final String shortName, final Agent[] agents, final int batchSize,
final int retries, final int connectTimeout, final int requestTimeout) {
super(name);
this.agents = agents;
this.batchSize = batchSize;
this.retries = retries;
this.connectTimeout = connectTimeout;
this.requestTimeout = requestTimeout;
this.rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
/**
* Returns a FlumeAvroManager.
* @param name The name of the manager.
* @param agents The agents to use.
* @param batchSize The number of events to include in a batch.
* @param retries The number of times to retry connecting before giving up.
* @param connectTimeout The connection timeout in ms.
* @param requestTimeout The request timeout in ms.
* @return A FlumeAvroManager.
*/
public static FlumeAvroManager getManager(final String name, final Agent[] agents, int batchSize,
final int retries, final int connectTimeout, final int requestTimeout) {
if (agents == null || agents.length == 0) {
throw new IllegalArgumentException("At least one agent is required");
}
if (batchSize <= 0) {
batchSize = 1;
}
final StringBuilder sb = new StringBuilder("FlumeAvro[");
boolean first = true;
for (final Agent agent : agents) {
if (!first) {
sb.append(",");
}
sb.append(agent.getHost()).append(":").append(agent.getPort());
first = false;
}
sb.append("]");
return getManager(sb.toString(), factory,
new FactoryData(name, agents, batchSize, retries, connectTimeout, requestTimeout));
}
/**
* Returns the agents.
* @return The agent array.
*/
public Agent[] getAgents() {
return agents;
}
/**
* Returns the index of the current agent.
* @return The index for the current agent.
*/
public int getCurrent() {
return current;
}
public int getRetries() {
return retries;
}
public int getConnectTimeout() {
return connectTimeout;
}
public int getRequestTimeout() {
return requestTimeout;
}
public int getBatchSize() {
return batchSize;
}
public synchronized void send(final BatchEvent events) {
if (rpcClient == null) {
rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
if (rpcClient != null) {
try {
LOGGER.trace("Sending batch of {} events", events.getEvents().size());
rpcClient.appendBatch(events.getEvents());
} catch (final Exception ex) {
rpcClient.close();
rpcClient = null;
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg, ex);
throw new AppenderLoggingException("No Flume agents are available");
}
} else {
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg);
throw new AppenderLoggingException("No Flume agents are available");
}
}
@Override
public synchronized void send(final Event event) {
if (rpcClient == null) {
rpcClient = connect(agents, retries, connectTimeout, requestTimeout);
}
if (rpcClient != null) {
try {
rpcClient.append(event);
} catch (final Exception ex) {
rpcClient.close();
rpcClient = null;
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg, ex);
throw new AppenderLoggingException("No Flume agents are available");
}
} else {
final String msg = "Unable to write to " + getName() + " at " + agents[current].getHost() + ":" +
agents[current].getPort();
LOGGER.warn(msg);
throw new AppenderLoggingException("No Flume agents are available");
}
}
/**
* There is a very good chance that this will always return the first agent even if it isn't available.
* @param agents The list of agents to choose from
* @return The FlumeEventAvroServer.
*/
private RpcClient connect(final Agent[] agents, int retries, final int connectTimeout, final int requestTimeout) {
try {
final Properties props = new Properties();
props.put("client.type", agents.length > 1 ? "default_failover" : "default");
int count = 1;
final StringBuilder sb = new StringBuilder();
for (final Agent agent : agents) {
if (sb.length() > 0) {
sb.append(" ");
}
final String hostName = "host" + count++;
props.put("hosts." + hostName, agent.getHost() + ":" + agent.getPort());
sb.append(hostName);
}
props.put("hosts", sb.toString());
if (batchSize > 0) {
props.put("batch-size", Integer.toString(batchSize));
}
if (retries > 1) {
if (retries > MAX_RECONNECTS) {
retries = MAX_RECONNECTS;
}
props.put("max-attempts", Integer.toString(retries * agents.length));
}
if (requestTimeout >= MINIMUM_TIMEOUT) {
props.put("request-timeout", Integer.toString(requestTimeout));
}
if (connectTimeout >= MINIMUM_TIMEOUT) {
props.put("connect-timeout", Integer.toString(connectTimeout));
}
return RpcClientFactory.getInstance(props);
} catch (final Exception ex) {
LOGGER.error("Unable to create Flume RPCClient: {}", ex.getMessage());
return null;
}
}
@Override
protected void releaseSub() {
if (rpcClient != null) {
try {
rpcClient.close();
} catch (final Exception ex) {
LOGGER.error("Attempt to close RPC client failed", ex);
}
}
rpcClient = null;
}
/**
* Factory data.
*/
private static class FactoryData {
private final String name;
private final Agent[] agents;
private final int batchSize;
private final int retries;
private final int conntectTimeout;
private final int requestTimeout;
/**
* Constructor.
* @param name The name of the Appender.
* @param agents The agents.
* @param batchSize The number of events to include in a batch.
*/
public FactoryData(final String name, final Agent[] agents, final int batchSize, final int retries,
final int connectTimeout, final int requestTimeout) {
this.name = name;
this.agents = agents;
this.batchSize = batchSize;
this.retries = retries;
this.conntectTimeout = connectTimeout;
this.requestTimeout = requestTimeout;
}
}
/**
* Avro Manager Factory.
*/
private static class AvroManagerFactory implements ManagerFactory<FlumeAvroManager, FactoryData> {
/**
* Create the FlumeAvroManager.
* @param name The name of the entity to manage.
* @param data The data required to create the entity.
* @return The FlumeAvroManager.
*/
@Override
public FlumeAvroManager createManager(final String name, final FactoryData data) {
try {
return new FlumeAvroManager(name, data.name, data.agents, data.batchSize, data.retries,
data.conntectTimeout, data.requestTimeout);
} catch (final Exception ex) {
LOGGER.error("Could not create FlumeAvroManager", ex);
}
return null;
}
}
}
| ClarenceAu/log4j2 | log4j-flume-ng/src/main/java/org/apache/logging/log4j/flume/appender/FlumeAvroManager.java | Java | apache-2.0 | 10,591 |
package net.stickycode.configuration.value;
import net.stickycode.configuration.ConfigurationValue;
public class SystemValue
implements ConfigurationValue {
private String value;
public SystemValue(String value) {
this.value = value;
}
@Override
public String get() {
return value;
}
@Override
public boolean hasPrecedence(ConfigurationValue v) {
if (ApplicationValue.class.isAssignableFrom(v.getClass()))
return false;
if (SystemValue.class.isAssignableFrom(v.getClass()))
return false;
return true;
}
@Override
public String toString() {
return getClass().getSimpleName() + "{" + value + "}";
}
}
| walterDurin/stickycode | net.stickycode.configuration/sticky-configuration/src/main/java/net/stickycode/configuration/value/SystemValue.java | Java | apache-2.0 | 672 |
// Generated by xsd compiler for android/java
// DO NOT CHANGE!
package com.ebay.marketplace.search.v1.services;
import com.leansoft.nano.annotation.*;
/**
*
* Reserved for future use.
*
*/
@RootElement(name = "findItemsForFavoriteSearchResponse", namespace = "http://www.ebay.com/marketplace/search/v1/services")
public class FindItemsForFavoriteSearchResponse extends BaseFindingServiceResponse {
@Element
private CategoryHistogramContainer categoryHistogramContainer;
@Element
private AspectHistogramContainer aspectHistogramContainer;
@Element
private ConditionHistogramContainer conditionHistogramContainer;
/**
* public getter
*
*
* Reserved for future use.
*
*
* @returns com.ebay.marketplace.search.v1.services.CategoryHistogramContainer
*/
public CategoryHistogramContainer getCategoryHistogramContainer() {
return this.categoryHistogramContainer;
}
/**
* public setter
*
*
* Reserved for future use.
*
*
* @param com.ebay.marketplace.search.v1.services.CategoryHistogramContainer
*/
public void setCategoryHistogramContainer(CategoryHistogramContainer categoryHistogramContainer) {
this.categoryHistogramContainer = categoryHistogramContainer;
}
/**
* public getter
*
*
* Reserved for future use.
*
*
* @returns com.ebay.marketplace.search.v1.services.AspectHistogramContainer
*/
public AspectHistogramContainer getAspectHistogramContainer() {
return this.aspectHistogramContainer;
}
/**
* public setter
*
*
* Reserved for future use.
*
*
* @param com.ebay.marketplace.search.v1.services.AspectHistogramContainer
*/
public void setAspectHistogramContainer(AspectHistogramContainer aspectHistogramContainer) {
this.aspectHistogramContainer = aspectHistogramContainer;
}
/**
* public getter
*
*
* Reserved for future use.
*
*
* @returns com.ebay.marketplace.search.v1.services.ConditionHistogramContainer
*/
public ConditionHistogramContainer getConditionHistogramContainer() {
return this.conditionHistogramContainer;
}
/**
* public setter
*
*
* Reserved for future use.
*
*
* @param com.ebay.marketplace.search.v1.services.ConditionHistogramContainer
*/
public void setConditionHistogramContainer(ConditionHistogramContainer conditionHistogramContainer) {
this.conditionHistogramContainer = conditionHistogramContainer;
}
} | bulldog2011/nano-rest | sample/EBaySearch/src/com/ebay/marketplace/search/v1/services/FindItemsForFavoriteSearchResponse.java | Java | apache-2.0 | 2,540 |
package transaction
import (
. "DNA/common"
"DNA/common/serialization"
"DNA/core/contract"
"DNA/core/contract/program"
sig "DNA/core/signature"
"DNA/core/transaction/payload"
. "DNA/errors"
"crypto/sha256"
"errors"
"fmt"
"io"
"sort"
)
//for different transaction types with different payload format
//and transaction process methods
type TransactionType byte
const (
BookKeeping TransactionType = 0x00
IssueAsset TransactionType = 0x01
BookKeeper TransactionType = 0x02
PrivacyPayload TransactionType = 0x20
RegisterAsset TransactionType = 0x40
TransferAsset TransactionType = 0x80
Record TransactionType = 0x81
DeployCode TransactionType = 0xd0
DataFile TransactionType = 0x12
)
//Payload define the func for loading the payload data
//base on payload type which have different struture
type Payload interface {
// Get payload data
Data(version byte) []byte
//Serialize payload data
Serialize(w io.Writer, version byte) error
Deserialize(r io.Reader, version byte) error
}
//Transaction is used for carry information or action to Ledger
//validated transaction will be added to block and updates state correspondingly
var TxStore ILedgerStore
type Transaction struct {
TxType TransactionType
PayloadVersion byte
Payload Payload
Attributes []*TxAttribute
UTXOInputs []*UTXOTxInput
BalanceInputs []*BalanceTxInput
Outputs []*TxOutput
Programs []*program.Program
//Inputs/Outputs map base on Asset (needn't serialize)
AssetOutputs map[Uint256][]*TxOutput
AssetInputAmount map[Uint256]Fixed64
AssetOutputAmount map[Uint256]Fixed64
hash *Uint256
}
//Serialize the Transaction
func (tx *Transaction) Serialize(w io.Writer) error {
err := tx.SerializeUnsigned(w)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction txSerializeUnsigned Serialize failed.")
}
//Serialize Transaction's programs
lens := uint64(len(tx.Programs))
err = serialization.WriteVarUint(w, lens)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction WriteVarUint failed.")
}
if lens > 0 {
for _, p := range tx.Programs {
err = p.Serialize(w)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction Programs Serialize failed.")
}
}
}
return nil
}
//Serialize the Transaction data without contracts
func (tx *Transaction) SerializeUnsigned(w io.Writer) error {
//txType
w.Write([]byte{byte(tx.TxType)})
//PayloadVersion
w.Write([]byte{tx.PayloadVersion})
//Payload
if tx.Payload == nil {
return errors.New("Transaction Payload is nil.")
}
tx.Payload.Serialize(w, tx.PayloadVersion)
//[]*txAttribute
err := serialization.WriteVarUint(w, uint64(len(tx.Attributes)))
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction item txAttribute length serialization failed.")
}
if len(tx.Attributes) > 0 {
for _, attr := range tx.Attributes {
attr.Serialize(w)
}
}
//[]*UTXOInputs
err = serialization.WriteVarUint(w, uint64(len(tx.UTXOInputs)))
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction item UTXOInputs length serialization failed.")
}
if len(tx.UTXOInputs) > 0 {
for _, utxo := range tx.UTXOInputs {
utxo.Serialize(w)
}
}
// TODO BalanceInputs
//[]*Outputs
err = serialization.WriteVarUint(w, uint64(len(tx.Outputs)))
if err != nil {
return NewDetailErr(err, ErrNoCode, "Transaction item Outputs length serialization failed.")
}
if len(tx.Outputs) > 0 {
for _, output := range tx.Outputs {
output.Serialize(w)
}
}
return nil
}
//deserialize the Transaction
func (tx *Transaction) Deserialize(r io.Reader) error {
// tx deserialize
err := tx.DeserializeUnsigned(r)
if err != nil {
return NewDetailErr(err, ErrNoCode, "transaction Deserialize error")
}
// tx program
lens, err := serialization.ReadVarUint(r, 0)
if err != nil {
return NewDetailErr(err, ErrNoCode, "transaction tx program Deserialize error")
}
programHashes := []*program.Program{}
if lens > 0 {
for i := 0; i < int(lens); i++ {
outputHashes := new(program.Program)
outputHashes.Deserialize(r)
programHashes = append(programHashes, outputHashes)
}
tx.Programs = programHashes
}
return nil
}
func (tx *Transaction) DeserializeUnsigned(r io.Reader) error {
var txType [1]byte
_, err := io.ReadFull(r, txType[:])
if err != nil {
return err
}
tx.TxType = TransactionType(txType[0])
return tx.DeserializeUnsignedWithoutType(r)
}
func (tx *Transaction) DeserializeUnsignedWithoutType(r io.Reader) error {
var payloadVersion [1]byte
_, err := io.ReadFull(r, payloadVersion[:])
tx.PayloadVersion = payloadVersion[0]
if err != nil {
return err
}
//payload
//tx.Payload.Deserialize(r)
switch tx.TxType {
case RegisterAsset:
tx.Payload = new(payload.RegisterAsset)
case IssueAsset:
tx.Payload = new(payload.IssueAsset)
case TransferAsset:
tx.Payload = new(payload.TransferAsset)
case BookKeeping:
tx.Payload = new(payload.BookKeeping)
case Record:
tx.Payload = new(payload.Record)
case BookKeeper:
tx.Payload = new(payload.BookKeeper)
case PrivacyPayload:
tx.Payload = new(payload.PrivacyPayload)
case DataFile:
tx.Payload = new(payload.DataFile)
default:
return errors.New("[Transaction],invalide transaction type.")
}
err = tx.Payload.Deserialize(r, tx.PayloadVersion)
if err != nil {
return NewDetailErr(err, ErrNoCode, "Payload Parse error")
}
//attributes
Len, err := serialization.ReadVarUint(r, 0)
if err != nil {
return err
}
if Len > uint64(0) {
for i := uint64(0); i < Len; i++ {
attr := new(TxAttribute)
err = attr.Deserialize(r)
if err != nil {
return err
}
tx.Attributes = append(tx.Attributes, attr)
}
}
//UTXOInputs
Len, err = serialization.ReadVarUint(r, 0)
if err != nil {
return err
}
if Len > uint64(0) {
for i := uint64(0); i < Len; i++ {
utxo := new(UTXOTxInput)
err = utxo.Deserialize(r)
if err != nil {
return err
}
tx.UTXOInputs = append(tx.UTXOInputs, utxo)
}
}
//TODO balanceInputs
//Outputs
Len, err = serialization.ReadVarUint(r, 0)
if err != nil {
return err
}
if Len > uint64(0) {
for i := uint64(0); i < Len; i++ {
output := new(TxOutput)
output.Deserialize(r)
tx.Outputs = append(tx.Outputs, output)
}
}
return nil
}
func (tx *Transaction) GetProgramHashes() ([]Uint160, error) {
if tx == nil {
return []Uint160{}, errors.New("[Transaction],GetProgramHashes transaction is nil.")
}
hashs := []Uint160{}
uniqHashes := []Uint160{}
// add inputUTXO's transaction
referenceWithUTXO_Output, err := tx.GetReference()
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes failed.")
}
for _, output := range referenceWithUTXO_Output {
programHash := output.ProgramHash
hashs = append(hashs, programHash)
}
for _, attribute := range tx.Attributes {
if attribute.Usage == Script {
dataHash, err := Uint160ParseFromBytes(attribute.Data)
if err != nil {
return nil, NewDetailErr(errors.New("[Transaction], GetProgramHashes err."), ErrNoCode, "")
}
hashs = append(hashs, Uint160(dataHash))
}
}
switch tx.TxType {
case RegisterAsset:
issuer := tx.Payload.(*payload.RegisterAsset).Issuer
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
case IssueAsset:
result := tx.GetMergedAssetIDValueFromOutputs()
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetTransactionResults failed.")
}
for k := range result {
tx, err := TxStore.GetTransaction(k)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, fmt.Sprintf("[Transaction], GetTransaction failed With AssetID:=%x", k))
}
if tx.TxType != RegisterAsset {
return nil, NewDetailErr(errors.New("[Transaction] error"), ErrNoCode, fmt.Sprintf("[Transaction], Transaction Type ileage With AssetID:=%x", k))
}
switch v1 := tx.Payload.(type) {
case *payload.RegisterAsset:
hashs = append(hashs, v1.Controller)
default:
return nil, NewDetailErr(errors.New("[Transaction] error"), ErrNoCode, fmt.Sprintf("[Transaction], payload is illegal", k))
}
}
case DataFile:
issuer := tx.Payload.(*payload.DataFile).Issuer
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
case TransferAsset:
case Record:
case BookKeeper:
issuer := tx.Payload.(*payload.BookKeeper).Issuer
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction - BookKeeper], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction - BookKeeper], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
case PrivacyPayload:
issuer := tx.Payload.(*payload.PrivacyPayload).EncryptAttr.(*payload.EcdhAes256).FromPubkey
signatureRedeemScript, err := contract.CreateSignatureRedeemScript(issuer)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes CreateSignatureRedeemScript failed.")
}
astHash, err := ToCodeHash(signatureRedeemScript)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetProgramHashes ToCodeHash failed.")
}
hashs = append(hashs, astHash)
default:
}
//remove dupilicated hashes
uniq := make(map[Uint160]bool)
for _, v := range hashs {
uniq[v] = true
}
for k := range uniq {
uniqHashes = append(uniqHashes, k)
}
sort.Sort(byProgramHashes(uniqHashes))
return uniqHashes, nil
}
func (tx *Transaction) SetPrograms(programs []*program.Program) {
tx.Programs = programs
}
func (tx *Transaction) GetPrograms() []*program.Program {
return tx.Programs
}
func (tx *Transaction) GetOutputHashes() ([]Uint160, error) {
//TODO: implement Transaction.GetOutputHashes()
return []Uint160{}, nil
}
func (tx *Transaction) GenerateAssetMaps() {
//TODO: implement Transaction.GenerateAssetMaps()
}
func (tx *Transaction) GetMessage() []byte {
return sig.GetHashData(tx)
}
func (tx *Transaction) Hash() Uint256 {
if tx.hash == nil {
d := sig.GetHashData(tx)
temp := sha256.Sum256([]byte(d))
f := Uint256(sha256.Sum256(temp[:]))
tx.hash = &f
}
return *tx.hash
}
func (tx *Transaction) SetHash(hash Uint256) {
tx.hash = &hash
}
func (tx *Transaction) Type() InventoryType {
return TRANSACTION
}
func (tx *Transaction) Verify() error {
//TODO: Verify()
return nil
}
func (tx *Transaction) GetReference() (map[*UTXOTxInput]*TxOutput, error) {
if tx.TxType == RegisterAsset {
return nil, nil
}
//UTXO input / Outputs
reference := make(map[*UTXOTxInput]*TxOutput)
// Key index,v UTXOInput
for _, utxo := range tx.UTXOInputs {
transaction, err := TxStore.GetTransaction(utxo.ReferTxID)
if err != nil {
return nil, NewDetailErr(err, ErrNoCode, "[Transaction], GetReference failed.")
}
index := utxo.ReferTxOutputIndex
reference[utxo] = transaction.Outputs[index]
}
return reference, nil
}
func (tx *Transaction) GetTransactionResults() (TransactionResult, error) {
result := make(map[Uint256]Fixed64)
outputResult := tx.GetMergedAssetIDValueFromOutputs()
InputResult, err := tx.GetMergedAssetIDValueFromReference()
if err != nil {
return nil, err
}
//calc the balance of input vs output
for outputAssetid, outputValue := range outputResult {
if inputValue, ok := InputResult[outputAssetid]; ok {
result[outputAssetid] = inputValue - outputValue
} else {
result[outputAssetid] -= outputValue
}
}
for inputAssetid, inputValue := range InputResult {
if _, exist := result[inputAssetid]; !exist {
result[inputAssetid] += inputValue
}
}
return result, nil
}
func (tx *Transaction) GetMergedAssetIDValueFromOutputs() TransactionResult {
var result = make(map[Uint256]Fixed64)
for _, v := range tx.Outputs {
amout, ok := result[v.AssetID]
if ok {
result[v.AssetID] = amout + v.Value
} else {
result[v.AssetID] = v.Value
}
}
return result
}
func (tx *Transaction) GetMergedAssetIDValueFromReference() (TransactionResult, error) {
reference, err := tx.GetReference()
if err != nil {
return nil, err
}
var result = make(map[Uint256]Fixed64)
for _, v := range reference {
amout, ok := result[v.AssetID]
if ok {
result[v.AssetID] = amout + v.Value
} else {
result[v.AssetID] = v.Value
}
}
return result, nil
}
type byProgramHashes []Uint160
func (a byProgramHashes) Len() int { return len(a) }
func (a byProgramHashes) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
func (a byProgramHashes) Less(i, j int) bool {
if a[i].CompareTo(a[j]) > 0 {
return false
} else {
return true
}
}
| BooniesFX/DNA | core/transaction/transaction.go | GO | apache-2.0 | 13,443 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.benchmark.impl.statistic.bestscore;
import java.util.List;
import org.optaplanner.benchmark.config.statistic.ProblemStatisticType;
import org.optaplanner.benchmark.impl.result.SubSingleBenchmarkResult;
import org.optaplanner.benchmark.impl.statistic.ProblemBasedSubSingleStatistic;
import org.optaplanner.core.api.domain.solution.Solution;
import org.optaplanner.core.api.solver.Solver;
import org.optaplanner.core.api.solver.event.BestSolutionChangedEvent;
import org.optaplanner.core.api.solver.event.SolverEventListener;
import org.optaplanner.core.impl.score.definition.ScoreDefinition;
public class BestScoreSubSingleStatistic extends ProblemBasedSubSingleStatistic<BestScoreStatisticPoint> {
private final BestScoreSubSingleStatisticListener listener;
public BestScoreSubSingleStatistic(SubSingleBenchmarkResult subSingleBenchmarkResult) {
super(subSingleBenchmarkResult, ProblemStatisticType.BEST_SCORE);
listener = new BestScoreSubSingleStatisticListener();
}
// ************************************************************************
// Lifecycle methods
// ************************************************************************
public void open(Solver solver) {
solver.addEventListener(listener);
}
public void close(Solver solver) {
solver.removeEventListener(listener);
}
private class BestScoreSubSingleStatisticListener implements SolverEventListener<Solution> {
public void bestSolutionChanged(BestSolutionChangedEvent<Solution> event) {
pointList.add(new BestScoreStatisticPoint(
event.getTimeMillisSpent(), event.getNewBestSolution().getScore()));
}
}
// ************************************************************************
// CSV methods
// ************************************************************************
@Override
protected String getCsvHeader() {
return BestScoreStatisticPoint.buildCsvLine("timeMillisSpent", "score");
}
@Override
protected BestScoreStatisticPoint createPointFromCsvLine(ScoreDefinition scoreDefinition,
List<String> csvLine) {
return new BestScoreStatisticPoint(Long.valueOf(csvLine.get(0)),
scoreDefinition.parseScore(csvLine.get(1)));
}
}
| eshen1991/optaplanner | optaplanner-benchmark/src/main/java/org/optaplanner/benchmark/impl/statistic/bestscore/BestScoreSubSingleStatistic.java | Java | apache-2.0 | 2,938 |
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import struct
import socket
import logging
import netaddr
from ryu.ofproto import ether
from ryu.ofproto import inet
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_2_parser
from ryu.lib import hub
from ryu.lib import mac
LOG = logging.getLogger('ryu.lib.ofctl_v1_2')
DEFAULT_TIMEOUT = 1.0
def str_to_int(src):
if isinstance(src, str):
if src.startswith("0x") or src.startswith("0X"):
dst = int(src, 16)
else:
dst = int(src)
else:
dst = src
return dst
def to_action(dp, dic):
ofp = dp.ofproto
parser = dp.ofproto_parser
action_type = dic.get('type')
if action_type == 'OUTPUT':
out_port = int(dic.get('port', ofp.OFPP_ANY))
max_len = int(dic.get('max_len', ofp.OFPCML_MAX))
result = parser.OFPActionOutput(out_port, max_len)
elif action_type == 'COPY_TTL_OUT':
result = parser.OFPActionCopyTtlOut()
elif action_type == 'COPY_TTL_IN':
result = parser.OFPActionCopyTtlIn()
elif action_type == 'SET_MPLS_TTL':
mpls_ttl = int(dic.get('mpls_ttl'))
result = parser.OFPActionSetMplsTtl(mpls_ttl)
elif action_type == 'DEC_MPLS_TTL':
result = parser.OFPActionDecMplsTtl()
elif action_type == 'PUSH_VLAN':
ethertype = int(dic.get('ethertype'))
result = parser.OFPActionPushVlan(ethertype)
elif action_type == 'POP_VLAN':
result = parser.OFPActionPopVlan()
elif action_type == 'PUSH_MPLS':
ethertype = int(dic.get('ethertype'))
result = parser.OFPActionPushMpls(ethertype)
elif action_type == 'POP_MPLS':
ethertype = int(dic.get('ethertype'))
result = parser.OFPActionPopMpls(ethertype)
elif action_type == 'SET_QUEUE':
queue_id = int(dic.get('queue_id'))
result = parser.OFPActionSetQueue(queue_id)
elif action_type == 'GROUP':
group_id = int(dic.get('group_id'))
result = parser.OFPActionGroup(group_id)
elif action_type == 'SET_NW_TTL':
nw_ttl = int(dic.get('nw_ttl'))
result = parser.OFPActionSetNwTtl(nw_ttl)
elif action_type == 'DEC_NW_TTL':
result = parser.OFPActionDecNwTtl()
elif action_type == 'SET_FIELD':
field = dic.get('field')
value = dic.get('value')
result = parser.OFPActionSetField(**{field: value})
else:
result = None
return result
def to_actions(dp, acts):
inst = []
actions = []
ofp = dp.ofproto
parser = dp.ofproto_parser
for a in acts:
action = to_action(dp, a)
if action is not None:
actions.append(action)
else:
action_type = a.get('type')
if action_type == 'GOTO_TABLE':
table_id = int(a.get('table_id'))
inst.append(parser.OFPInstructionGotoTable(table_id))
elif action_type == 'WRITE_METADATA':
metadata = str_to_int(a.get('metadata'))
metadata_mask = (str_to_int(a['metadata_mask'])
if 'metadata_mask' in a
else parser.UINT64_MAX)
inst.append(
parser.OFPInstructionWriteMetadata(
metadata, metadata_mask))
else:
LOG.debug('Unknown action type: %s' % action_type)
inst.append(parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS,
actions))
return inst
def action_to_str(act):
action_type = act.cls_action_type
if action_type == ofproto_v1_2.OFPAT_OUTPUT:
buf = 'OUTPUT:' + str(act.port)
elif action_type == ofproto_v1_2.OFPAT_COPY_TTL_OUT:
buf = 'COPY_TTL_OUT'
elif action_type == ofproto_v1_2.OFPAT_COPY_TTL_IN:
buf = 'COPY_TTL_IN'
elif action_type == ofproto_v1_2.OFPAT_SET_MPLS_TTL:
buf = 'SET_MPLS_TTL:' + str(act.mpls_ttl)
elif action_type == ofproto_v1_2.OFPAT_DEC_MPLS_TTL:
buf = 'DEC_MPLS_TTL'
elif action_type == ofproto_v1_2.OFPAT_PUSH_VLAN:
buf = 'PUSH_VLAN:' + str(act.ethertype)
elif action_type == ofproto_v1_2.OFPAT_POP_VLAN:
buf = 'POP_VLAN'
elif action_type == ofproto_v1_2.OFPAT_PUSH_MPLS:
buf = 'PUSH_MPLS:' + str(act.ethertype)
elif action_type == ofproto_v1_2.OFPAT_POP_MPLS:
buf = 'POP_MPLS:' + str(act.ethertype)
elif action_type == ofproto_v1_2.OFPAT_SET_QUEUE:
buf = 'SET_QUEUE:' + str(act.queue_id)
elif action_type == ofproto_v1_2.OFPAT_GROUP:
buf = 'GROUP:' + str(act.group_id)
elif action_type == ofproto_v1_2.OFPAT_SET_NW_TTL:
buf = 'SET_NW_TTL:' + str(act.nw_ttl)
elif action_type == ofproto_v1_2.OFPAT_DEC_NW_TTL:
buf = 'DEC_NW_TTL'
elif action_type == ofproto_v1_2.OFPAT_SET_FIELD:
buf = 'SET_FIELD: {%s:%s}' % (act.key, act.value)
else:
buf = 'UNKNOWN'
return buf
def actions_to_str(instructions):
actions = []
for instruction in instructions:
if isinstance(instruction,
ofproto_v1_2_parser.OFPInstructionActions):
for a in instruction.actions:
actions.append(action_to_str(a))
elif isinstance(instruction,
ofproto_v1_2_parser.OFPInstructionGotoTable):
buf = 'GOTO_TABLE:' + str(instruction.table_id)
actions.append(buf)
elif isinstance(instruction,
ofproto_v1_2_parser.OFPInstructionWriteMetadata):
buf = ('WRITE_METADATA:0x%x/0x%x' % (instruction.metadata,
instruction.metadata_mask)
if instruction.metadata_mask
else 'WRITE_METADATA:0x%x' % instruction.metadata)
actions.append(buf)
else:
continue
return actions
def to_match(dp, attrs):
convert = {'in_port': int,
'in_phy_port': int,
'metadata': to_match_metadata,
'dl_dst': to_match_eth,
'dl_src': to_match_eth,
'eth_dst': to_match_eth,
'eth_src': to_match_eth,
'dl_type': int,
'eth_type': int,
'dl_vlan': to_match_vid,
'vlan_vid': to_match_vid,
'vlan_pcp': int,
'ip_dscp': int,
'ip_ecn': int,
'nw_proto': int,
'ip_proto': int,
'nw_src': to_match_ip,
'nw_dst': to_match_ip,
'ipv4_src': to_match_ip,
'ipv4_dst': to_match_ip,
'tp_src': int,
'tp_dst': int,
'tcp_src': int,
'tcp_dst': int,
'udp_src': int,
'udp_dst': int,
'sctp_src': int,
'sctp_dst': int,
'icmpv4_type': int,
'icmpv4_code': int,
'arp_op': int,
'arp_spa': to_match_ip,
'arp_tpa': to_match_ip,
'arp_sha': to_match_eth,
'arp_tha': to_match_eth,
'ipv6_src': to_match_ip,
'ipv6_dst': to_match_ip,
'ipv6_flabel': int,
'icmpv6_type': int,
'icmpv6_code': int,
'ipv6_nd_target': to_match_ip,
'ipv6_nd_sll': to_match_eth,
'ipv6_nd_tll': to_match_eth,
'mpls_label': int,
'mpls_tc': int}
keys = {'dl_dst': 'eth_dst',
'dl_src': 'eth_src',
'dl_type': 'eth_type',
'dl_vlan': 'vlan_vid',
'nw_src': 'ipv4_src',
'nw_dst': 'ipv4_dst',
'nw_proto': 'ip_proto'}
if attrs.get('dl_type') == ether.ETH_TYPE_ARP or \
attrs.get('eth_type') == ether.ETH_TYPE_ARP:
if 'nw_src' in attrs and 'arp_spa' not in attrs:
attrs['arp_spa'] = attrs['nw_src']
del attrs['nw_src']
if 'nw_dst' in attrs and 'arp_tpa' not in attrs:
attrs['arp_tpa'] = attrs['nw_dst']
del attrs['nw_dst']
kwargs = {}
for key, value in attrs.items():
if key in convert:
value = convert[key](value)
if key in keys:
# For old field name
key = keys[key]
if key == 'tp_src' or key == 'tp_dst':
# TCP/UDP port
conv = {inet.IPPROTO_TCP: {'tp_src': 'tcp_src',
'tp_dst': 'tcp_dst'},
inet.IPPROTO_UDP: {'tp_src': 'udp_src',
'tp_dst': 'udp_dst'}}
ip_proto = attrs.get('nw_proto', attrs.get('ip_proto', 0))
key = conv[ip_proto][key]
kwargs[key] = value
else:
# others
kwargs[key] = value
return dp.ofproto_parser.OFPMatch(**kwargs)
def to_match_eth(value):
if '/' in value:
value = value.split('/')
return value[0], value[1]
else:
return value
def to_match_ip(value):
if '/' in value:
ip = netaddr.ip.IPNetwork(value)
ip_addr = str(ip.ip)
ip_mask = str(ip.netmask)
return ip_addr, ip_mask
else:
return value
def to_match_vid(value):
# NOTE: If "vlan_id/dl_vlan" field is described as decimal int value
# (and decimal string value), it is treated as values of
# VLAN tag, and OFPVID_PRESENT(0x1000) bit is automatically
# applied. OTOH, If it is described as hexadecimal string,
# treated as values of oxm_value (including OFPVID_PRESENT
# bit), and OFPVID_PRESENT bit is NOT automatically applied.
if isinstance(value, int):
# described as decimal int value
return value | ofproto_v1_2.OFPVID_PRESENT
else:
if '/' in value:
val = value.split('/')
return int(val[0], 0), int(val[1], 0)
else:
if value.isdigit():
# described as decimal string value
return int(value, 10) | ofproto_v1_2.OFPVID_PRESENT
else:
return int(value, 0)
def to_match_metadata(value):
if '/' in value:
value = value.split('/')
return str_to_int(value[0]), str_to_int(value[1])
else:
return str_to_int(value)
def match_to_str(ofmatch):
keys = {'eth_src': 'dl_src',
'eth_dst': 'dl_dst',
'eth_type': 'dl_type',
'vlan_vid': 'dl_vlan',
'ipv4_src': 'nw_src',
'ipv4_dst': 'nw_dst',
'ip_proto': 'nw_proto',
'tcp_src': 'tp_src',
'tcp_dst': 'tp_dst',
'udp_src': 'tp_src',
'udp_dst': 'tp_dst'
}
match = {}
ofmatch = ofmatch.to_jsondict()['OFPMatch']
ofmatch = ofmatch['oxm_fields']
for match_field in ofmatch:
key = match_field['OXMTlv']['field']
if key in keys:
key = keys[key]
mask = match_field['OXMTlv']['mask']
value = match_field['OXMTlv']['value']
if key == 'dl_vlan':
value = match_vid_to_str(value, mask)
elif key == 'metadata':
value = match_metadata_to_str(value, mask)
else:
if mask is not None:
value = value + '/' + mask
else:
value = value
match.setdefault(key, value)
return match
def match_metadata_to_str(value, mask):
return ('%d/%d' % (value, mask) if mask else '%d' % value)
def match_vid_to_str(value, mask):
if mask is not None:
value = '0x%04x/0x%04x' % (value, mask)
else:
if value & ofproto_v1_2.OFPVID_PRESENT:
value = str(value & ~ofproto_v1_2.OFPVID_PRESENT)
else:
value = '0x%04x' % value
return value
def send_stats_request(dp, stats, waiters, msgs):
dp.set_xid(stats)
waiters_per_dp = waiters.setdefault(dp.id, {})
lock = hub.Event()
waiters_per_dp[stats.xid] = (lock, msgs)
dp.send_msg(stats)
lock.wait(timeout=DEFAULT_TIMEOUT)
if not lock.is_set():
del waiters_per_dp[stats.xid]
def get_desc_stats(dp, waiters):
stats = dp.ofproto_parser.OFPDescStatsRequest(dp)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
s = {}
for msg in msgs:
stats = msg.body
s = {'mfr_desc': stats.mfr_desc,
'hw_desc': stats.hw_desc,
'sw_desc': stats.sw_desc,
'serial_num': stats.serial_num,
'dp_desc': stats.dp_desc}
desc = {str(dp.id): s}
return desc
def get_queue_stats(dp, waiters):
ofp = dp.ofproto
stats = dp.ofproto_parser.OFPQueueStatsRequest(dp, 0, ofp.OFPP_ANY,
ofp.OFPQ_ALL)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
s = []
for msg in msgs:
stats = msg.body
for stat in stats:
s.append({'port_no': stat.port_no,
'queue_id': stat.queue_id,
'tx_bytes': stat.tx_bytes,
'tx_errors': stat.tx_errors,
'tx_packets': stat.tx_packets})
desc = {str(dp.id): s}
return desc
def get_flow_stats(dp, waiters, flow={}):
table_id = int(flow.get('table_id', dp.ofproto.OFPTT_ALL))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = int(flow.get('out_group', dp.ofproto.OFPG_ANY))
cookie = int(flow.get('cookie', 0))
cookie_mask = int(flow.get('cookie_mask', 0))
match = to_match(dp, flow.get('match', {}))
stats = dp.ofproto_parser.OFPFlowStatsRequest(
dp, table_id, out_port, out_group, cookie, cookie_mask, match)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
flows = []
for msg in msgs:
for stats in msg.body:
actions = actions_to_str(stats.instructions)
match = match_to_str(stats.match)
s = {'priority': stats.priority,
'cookie': stats.cookie,
'idle_timeout': stats.idle_timeout,
'hard_timeout': stats.hard_timeout,
'actions': actions,
'match': match,
'byte_count': stats.byte_count,
'duration_sec': stats.duration_sec,
'duration_nsec': stats.duration_nsec,
'packet_count': stats.packet_count,
'table_id': stats.table_id,
'length': stats.length}
flows.append(s)
flows = {str(dp.id): flows}
return flows
def get_port_stats(dp, waiters):
stats = dp.ofproto_parser.OFPPortStatsRequest(
dp, dp.ofproto.OFPP_ANY, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
ports = []
for msg in msgs:
for stats in msg.body:
s = {'port_no': stats.port_no,
'rx_packets': stats.rx_packets,
'tx_packets': stats.tx_packets,
'rx_bytes': stats.rx_bytes,
'tx_bytes': stats.tx_bytes,
'rx_dropped': stats.rx_dropped,
'tx_dropped': stats.tx_dropped,
'rx_errors': stats.rx_errors,
'tx_errors': stats.tx_errors,
'rx_frame_err': stats.rx_frame_err,
'rx_over_err': stats.rx_over_err,
'rx_crc_err': stats.rx_crc_err,
'collisions': stats.collisions}
ports.append(s)
ports = {str(dp.id): ports}
return ports
def get_group_stats(dp, waiters):
stats = dp.ofproto_parser.OFPGroupStatsRequest(
dp, dp.ofproto.OFPG_ALL, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
groups = []
for msg in msgs:
for stats in msg.body:
bucket_counters = []
for bucket_counter in stats.bucket_counters:
c = {'packet_count': bucket_counter.packet_count,
'byte_count': bucket_counter.byte_count}
bucket_counters.append(c)
g = {'length': stats.length,
'group_id': stats.group_id,
'ref_count': stats.ref_count,
'packet_count': stats.packet_count,
'byte_count': stats.byte_count,
'bucket_stats': bucket_counters}
groups.append(g)
groups = {str(dp.id): groups}
return groups
def get_group_features(dp, waiters):
ofp = dp.ofproto
type_convert = {ofp.OFPGT_ALL: 'ALL',
ofp.OFPGT_SELECT: 'SELECT',
ofp.OFPGT_INDIRECT: 'INDIRECT',
ofp.OFPGT_FF: 'FF'}
cap_convert = {ofp.OFPGFC_SELECT_WEIGHT: 'SELECT_WEIGHT',
ofp.OFPGFC_SELECT_LIVENESS: 'SELECT_LIVENESS',
ofp.OFPGFC_CHAINING: 'CHAINING',
ofp.OFPGFC_CHAINING_CHECKS: 'CHAINING_CHECKS'}
act_convert = {ofp.OFPAT_OUTPUT: 'OUTPUT',
ofp.OFPAT_COPY_TTL_OUT: 'COPY_TTL_OUT',
ofp.OFPAT_COPY_TTL_IN: 'COPY_TTL_IN',
ofp.OFPAT_SET_MPLS_TTL: 'SET_MPLS_TTL',
ofp.OFPAT_DEC_MPLS_TTL: 'DEC_MPLS_TTL',
ofp.OFPAT_PUSH_VLAN: 'PUSH_VLAN',
ofp.OFPAT_POP_VLAN: 'POP_VLAN',
ofp.OFPAT_PUSH_MPLS: 'PUSH_MPLS',
ofp.OFPAT_POP_MPLS: 'POP_MPLS',
ofp.OFPAT_SET_QUEUE: 'SET_QUEUE',
ofp.OFPAT_GROUP: 'GROUP',
ofp.OFPAT_SET_NW_TTL: 'SET_NW_TTL',
ofp.OFPAT_DEC_NW_TTL: 'DEC_NW_TTL',
ofp.OFPAT_SET_FIELD: 'SET_FIELD'}
stats = dp.ofproto_parser.OFPGroupFeaturesStatsRequest(dp, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
features = []
for msg in msgs:
feature = msg.body
types = []
for k, v in type_convert.items():
if (1 << k) & feature.types:
types.append(v)
capabilities = []
for k, v in cap_convert.items():
if k & feature.capabilities:
capabilities.append(v)
max_groups = []
for k, v in type_convert.items():
max_groups.append({v: feature.max_groups[k]})
actions = []
for k1, v1 in type_convert.items():
acts = []
for k2, v2 in act_convert.items():
if (1 << k2) & feature.actions[k1]:
acts.append(v2)
actions.append({v1: acts})
f = {'types': types,
'capabilities': capabilities,
'max_groups': max_groups,
'actions': actions}
features.append(f)
features = {str(dp.id): features}
return features
def get_group_desc(dp, waiters):
type_convert = {dp.ofproto.OFPGT_ALL: 'ALL',
dp.ofproto.OFPGT_SELECT: 'SELECT',
dp.ofproto.OFPGT_INDIRECT: 'INDIRECT',
dp.ofproto.OFPGT_FF: 'FF'}
stats = dp.ofproto_parser.OFPGroupDescStatsRequest(dp, 0)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
descs = []
for msg in msgs:
for stats in msg.body:
buckets = []
for bucket in stats.buckets:
actions = []
for action in bucket.actions:
actions.append(action_to_str(action))
b = {'weight': bucket.weight,
'watch_port': bucket.watch_port,
'watch_group': bucket.watch_group,
'actions': actions}
buckets.append(b)
d = {'type': type_convert.get(stats.type),
'group_id': stats.group_id,
'buckets': buckets}
descs.append(d)
descs = {str(dp.id): descs}
return descs
def get_port_desc(dp, waiters):
stats = dp.ofproto_parser.OFPFeaturesRequest(dp)
msgs = []
send_stats_request(dp, stats, waiters, msgs)
descs = []
for msg in msgs:
stats = msg.ports
for stat in stats.values():
d = {'port_no': stat.port_no,
'hw_addr': stat.hw_addr,
'name': stat.name,
'config': stat.config,
'state': stat.state,
'curr': stat.curr,
'advertised': stat.advertised,
'supported': stat.supported,
'peer': stat.peer,
'curr_speed': stat.curr_speed,
'max_speed': stat.max_speed}
descs.append(d)
descs = {str(dp.id): descs}
return descs
def mod_flow_entry(dp, flow, cmd):
cookie = int(flow.get('cookie', 0))
cookie_mask = int(flow.get('cookie_mask', 0))
table_id = int(flow.get('table_id', 0))
idle_timeout = int(flow.get('idle_timeout', 0))
hard_timeout = int(flow.get('hard_timeout', 0))
priority = int(flow.get('priority', 0))
buffer_id = int(flow.get('buffer_id', dp.ofproto.OFP_NO_BUFFER))
out_port = int(flow.get('out_port', dp.ofproto.OFPP_ANY))
out_group = int(flow.get('out_group', dp.ofproto.OFPG_ANY))
flags = int(flow.get('flags', 0))
match = to_match(dp, flow.get('match', {}))
inst = to_actions(dp, flow.get('actions', []))
flow_mod = dp.ofproto_parser.OFPFlowMod(
dp, cookie, cookie_mask, table_id, cmd, idle_timeout,
hard_timeout, priority, buffer_id, out_port, out_group,
flags, match, inst)
dp.send_msg(flow_mod)
def mod_group_entry(dp, group, cmd):
type_convert = {'ALL': dp.ofproto.OFPGT_ALL,
'SELECT': dp.ofproto.OFPGT_SELECT,
'INDIRECT': dp.ofproto.OFPGT_INDIRECT,
'FF': dp.ofproto.OFPGT_FF}
type_ = type_convert.get(group.get('type', 'ALL'))
if type_ is None:
LOG.debug('Unknown type: %s', group.get('type'))
group_id = int(group.get('group_id', 0))
buckets = []
for bucket in group.get('buckets', []):
weight = int(bucket.get('weight', 0))
watch_port = int(bucket.get('watch_port', dp.ofproto.OFPP_ANY))
watch_group = int(bucket.get('watch_group', dp.ofproto.OFPG_ANY))
actions = []
for dic in bucket.get('actions', []):
action = to_action(dp, dic)
if action is not None:
actions.append(action)
buckets.append(dp.ofproto_parser.OFPBucket(
weight, watch_port, watch_group, actions))
group_mod = dp.ofproto_parser.OFPGroupMod(
dp, cmd, type_, group_id, buckets)
dp.send_msg(group_mod)
def mod_port_behavior(dp, port_config):
port_no = int(port_config.get('port_no', 0))
hw_addr = port_config.get('hw_addr')
config = int(port_config.get('config', 0))
mask = int(port_config.get('mask', 0))
advertise = int(port_config.get('advertise'))
port_mod = dp.ofproto_parser.OFPPortMod(
dp, port_no, hw_addr, config, mask, advertise)
dp.send_msg(port_mod)
def send_experimenter(dp, exp):
experimenter = exp.get('experimenter', 0)
exp_type = exp.get('exp_type', 0)
data_type = exp.get('data_type', 'ascii')
if data_type != 'ascii' and data_type != 'base64':
LOG.debug('Unknown data type: %s', data_type)
data = exp.get('data', '')
if data_type == 'base64':
data = base64.b64decode(data)
expmsg = dp.ofproto_parser.OFPExperimenter(
dp, experimenter, exp_type, data)
dp.send_msg(expmsg)
| Neil741/ryu-master | ryu/lib/ofctl_v1_2.py | Python | apache-2.0 | 24,202 |
// Copyright (c) 2006-7 John Maddock
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0. (See accompanying file
// LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_MATH_TOOLS_CONFIG_HPP
#define BOOST_MATH_TOOLS_CONFIG_HPP
#ifdef _MSC_VER
#pragma once
#endif
#include <boost/config.hpp>
#include <boost/cstdint.hpp> // for geofeatures_boost::uintmax_t
#include <boost/detail/workaround.hpp>
#include <boost/type_traits/is_integral.hpp>
#include <algorithm> // for min and max
#include <boost/config/no_tr1/cmath.hpp>
#include <climits>
#include <cfloat>
#if (defined(macintosh) || defined(__APPLE__) || defined(__APPLE_CC__))
# include <math.h>
#endif
#ifndef BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS
# include <limits>
#endif
#include <boost/math/tools/user.hpp>
#if (defined(__CYGWIN__) || defined(__FreeBSD__) || defined(__NetBSD__) \
|| (defined(__hppa) && !defined(__OpenBSD__)) || (defined(__NO_LONG_DOUBLE_MATH) && (DBL_MANT_DIG != LDBL_MANT_DIG))) \
&& !defined(BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS)
# define BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS
#endif
#if BOOST_WORKAROUND(__BORLANDC__, BOOST_TESTED_AT(0x582))
//
// Borland post 5.8.2 uses Dinkumware's std C lib which
// doesn't have true long double precision. Earlier
// versions are problematic too:
//
# define BOOST_MATH_NO_REAL_CONCEPT_TESTS
# define BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS
# define BOOST_MATH_CONTROL_FP _control87(MCW_EM,MCW_EM)
# include <float.h>
#endif
#ifdef __IBMCPP__
//
// For reasons I don't unserstand, the tests with IMB's compiler all
// pass at long double precision, but fail with real_concept, those tests
// are disabled for now. (JM 2012).
# define BOOST_MATH_NO_REAL_CONCEPT_TESTS
#endif
#ifdef sun
// Any use of __float128 in program startup code causes a segfault (tested JM 2015, Solaris 11).
# define BOOST_MATH_DISABLE_FLOAT128
#endif
#ifdef __HAIKU__
//
// Not sure what's up with the math detection on Haiku, but linking fails with
// float128 code enabled, and we don't have an implementation of __expl, so
// disabling long double functions for now as well.
# define BOOST_MATH_DISABLE_FLOAT128
# define BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS
#endif
#if (defined(macintosh) || defined(__APPLE__) || defined(__APPLE_CC__)) && ((LDBL_MANT_DIG == 106) || (__LDBL_MANT_DIG__ == 106)) && !defined(BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS)
//
// Darwin's rather strange "double double" is rather hard to
// support, it should be possible given enough effort though...
//
# define BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS
#endif
#if defined(unix) && defined(__INTEL_COMPILER) && (__INTEL_COMPILER <= 1000) && !defined(BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS)
//
// Intel compiler prior to version 10 has sporadic problems
// calling the long double overloads of the std lib math functions:
// calling ::powl is OK, but std::pow(long double, long double)
// may segfault depending upon the value of the arguments passed
// and the specific Linux distribution.
//
// We'll be conservative and disable long double support for this compiler.
//
// Comment out this #define and try building the tests to determine whether
// your Intel compiler version has this issue or not.
//
# define BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS
#endif
#if defined(unix) && defined(__INTEL_COMPILER)
//
// Intel compiler has sporadic issues compiling std::fpclassify depending on
// the exact OS version used. Use our own code for this as we know it works
// well on Intel processors:
//
#define BOOST_MATH_DISABLE_STD_FPCLASSIFY
#endif
#if defined(BOOST_MSVC) && !defined(_WIN32_WCE)
// Better safe than sorry, our tests don't support hardware exceptions:
# define BOOST_MATH_CONTROL_FP _control87(MCW_EM,MCW_EM)
#endif
#ifdef __IBMCPP__
# define BOOST_MATH_NO_DEDUCED_FUNCTION_POINTERS
#endif
#if (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901))
# define BOOST_MATH_USE_C99
#endif
#if (defined(__hpux) && !defined(__hppa))
# define BOOST_MATH_USE_C99
#endif
#if defined(__GNUC__) && defined(_GLIBCXX_USE_C99)
# define BOOST_MATH_USE_C99
#endif
#if defined(_LIBCPP_VERSION) && !defined(_MSC_VER)
# define BOOST_MATH_USE_C99
#endif
#if defined(__CYGWIN__) || defined(__HP_aCC) || defined(BOOST_INTEL) \
|| defined(BOOST_NO_NATIVE_LONG_DOUBLE_FP_CLASSIFY) \
|| (defined(__GNUC__) && !defined(BOOST_MATH_USE_C99))\
|| defined(BOOST_MATH_NO_LONG_DOUBLE_MATH_FUNCTIONS)
# define BOOST_MATH_NO_NATIVE_LONG_DOUBLE_FP_CLASSIFY
#endif
#if BOOST_WORKAROUND(__SUNPRO_CC, <= 0x590)
# include "boost/type.hpp"
# include "boost/non_type.hpp"
# define BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(t) geofeatures_boost::type<t>* = 0
# define BOOST_MATH_EXPLICIT_TEMPLATE_TYPE_SPEC(t) geofeatures_boost::type<t>*
# define BOOST_MATH_EXPLICIT_TEMPLATE_NON_TYPE(t, v) geofeatures_boost::non_type<t, v>* = 0
# define BOOST_MATH_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v) geofeatures_boost::non_type<t, v>*
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_TYPE(t) \
, BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(t)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_TYPE_SPEC(t) \
, BOOST_MATH_EXPLICIT_TEMPLATE_TYPE_SPEC(t)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_NON_TYPE(t, v) \
, BOOST_MATH_EXPLICIT_TEMPLATE_NON_TYPE(t, v)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v) \
, BOOST_MATH_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v)
#else
// no workaround needed: expand to nothing
# define BOOST_MATH_EXPLICIT_TEMPLATE_TYPE(t)
# define BOOST_MATH_EXPLICIT_TEMPLATE_TYPE_SPEC(t)
# define BOOST_MATH_EXPLICIT_TEMPLATE_NON_TYPE(t, v)
# define BOOST_MATH_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_TYPE(t)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_TYPE_SPEC(t)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_NON_TYPE(t, v)
# define BOOST_MATH_APPEND_EXPLICIT_TEMPLATE_NON_TYPE_SPEC(t, v)
#endif // __SUNPRO_CC
#if (defined(__SUNPRO_CC) || defined(__hppa) || defined(__GNUC__)) && !defined(BOOST_MATH_SMALL_CONSTANT)
// Sun's compiler emits a hard error if a constant underflows,
// as does aCC on PA-RISC, while gcc issues a large number of warnings:
# define BOOST_MATH_SMALL_CONSTANT(x) 0.0
#else
# define BOOST_MATH_SMALL_CONSTANT(x) x
#endif
#if BOOST_WORKAROUND(BOOST_MSVC, < 1400)
//
// Define if constants too large for a float cause "bad"
// values to be stored in the data, rather than infinity
// or a suitably large value.
//
# define BOOST_MATH_BUGGY_LARGE_FLOAT_CONSTANTS
#endif
//
// Tune performance options for specific compilers:
//
#ifdef BOOST_MSVC
# define BOOST_MATH_POLY_METHOD 2
#elif defined(BOOST_INTEL)
# define BOOST_MATH_POLY_METHOD 2
# define BOOST_MATH_RATIONAL_METHOD 2
#elif defined(__GNUC__)
# define BOOST_MATH_POLY_METHOD 3
# define BOOST_MATH_RATIONAL_METHOD 3
# define BOOST_MATH_INT_TABLE_TYPE(RT, IT) RT
# define BOOST_MATH_INT_VALUE_SUFFIX(RV, SUF) RV##.0L
#endif
#if defined(BOOST_NO_LONG_LONG) && !defined(BOOST_MATH_INT_TABLE_TYPE)
# define BOOST_MATH_INT_TABLE_TYPE(RT, IT) RT
# define BOOST_MATH_INT_VALUE_SUFFIX(RV, SUF) RV##.0L
#endif
//
// The maximum order of polynomial that will be evaluated
// via an unrolled specialisation:
//
#ifndef BOOST_MATH_MAX_POLY_ORDER
# define BOOST_MATH_MAX_POLY_ORDER 17
#endif
//
// Set the method used to evaluate polynomials and rationals:
//
#ifndef BOOST_MATH_POLY_METHOD
# define BOOST_MATH_POLY_METHOD 1
#endif
#ifndef BOOST_MATH_RATIONAL_METHOD
# define BOOST_MATH_RATIONAL_METHOD 0
#endif
//
// decide whether to store constants as integers or reals:
//
#ifndef BOOST_MATH_INT_TABLE_TYPE
# define BOOST_MATH_INT_TABLE_TYPE(RT, IT) IT
#endif
#ifndef BOOST_MATH_INT_VALUE_SUFFIX
# define BOOST_MATH_INT_VALUE_SUFFIX(RV, SUF) RV##SUF
#endif
//
// Test whether to support __float128:
//
#if defined(_GLIBCXX_USE_FLOAT128) && defined(BOOST_GCC) && !defined(__STRICT_ANSI__) \
&& !defined(BOOST_MATH_DISABLE_FLOAT128) || defined(BOOST_MATH_USE_FLOAT128)
//
// Only enable this when the compiler really is GCC as clang and probably
// intel too don't support __float128 yet :-(
//
#ifndef BOOST_MATH_USE_FLOAT128
# define BOOST_MATH_USE_FLOAT128
#endif
# if defined(BOOST_INTEL) && defined(BOOST_INTEL_CXX_VERSION) && (BOOST_INTEL_CXX_VERSION >= 1310) && defined(__GNUC__)
# if (__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ >= 6))
# define BOOST_MATH_FLOAT128_TYPE __float128
# endif
# elif defined(__GNUC__)
# define BOOST_MATH_FLOAT128_TYPE __float128
# endif
# ifndef BOOST_MATH_FLOAT128_TYPE
# define BOOST_MATH_FLOAT128_TYPE _Quad
# endif
#endif
//
// Check for WinCE with no iostream support:
//
#if defined(_WIN32_WCE) && !defined(__SGI_STL_PORT)
# define BOOST_MATH_NO_LEXICAL_CAST
#endif
//
// Helper macro for controlling the FP behaviour:
//
#ifndef BOOST_MATH_CONTROL_FP
# define BOOST_MATH_CONTROL_FP
#endif
//
// Helper macro for using statements:
//
#define BOOST_MATH_STD_USING_CORE \
using std::abs;\
using std::acos;\
using std::cos;\
using std::fmod;\
using std::modf;\
using std::tan;\
using std::asin;\
using std::cosh;\
using std::frexp;\
using std::pow;\
using std::tanh;\
using std::atan;\
using std::exp;\
using std::ldexp;\
using std::sin;\
using std::atan2;\
using std::fabs;\
using std::log;\
using std::sinh;\
using std::ceil;\
using std::floor;\
using std::log10;\
using std::sqrt;
#define BOOST_MATH_STD_USING BOOST_MATH_STD_USING_CORE
namespace geofeatures_boost {} namespace boost = geofeatures_boost; namespace geofeatures_boost{ namespace math{
namespace tools
{
template <class T>
inline T max BOOST_PREVENT_MACRO_SUBSTITUTION(T a, T b, T c)
{
return (std::max)((std::max)(a, b), c);
}
template <class T>
inline T max BOOST_PREVENT_MACRO_SUBSTITUTION(T a, T b, T c, T d)
{
return (std::max)((std::max)(a, b), (std::max)(c, d));
}
} // namespace tools
template <class T>
void suppress_unused_variable_warning(const T&)
{
}
namespace detail{
template <class T>
struct is_integer_for_rounding
{
static const bool value = geofeatures_boost::is_integral<T>::value
#ifndef BOOST_NO_LIMITS_COMPILE_TIME_CONSTANTS
|| (std::numeric_limits<T>::is_specialized && std::numeric_limits<T>::is_integer)
#endif
;
};
}
}} // namespace geofeatures_boost namespace math
#ifdef __GLIBC_PREREQ
# if __GLIBC_PREREQ(2,14)
# define BOOST_MATH_HAVE_FIXED_GLIBC
# endif
#endif
#if ((defined(__linux__) && !defined(__UCLIBC__) && !defined(BOOST_MATH_HAVE_FIXED_GLIBC)) || defined(__QNX__) || defined(__IBMCPP__)) && !defined(BOOST_NO_FENV_H)
//
// This code was introduced in response to this glibc bug: http://sourceware.org/bugzilla/show_bug.cgi?id=2445
// Basically powl and expl can return garbage when the result is small and certain exception flags are set
// on entrance to these functions. This appears to have been fixed in Glibc 2.14 (May 2011).
// Much more information in this message thread: https://groups.google.com/forum/#!topic/boost-list/ZT99wtIFlb4
//
#include <boost/detail/fenv.hpp>
# ifdef FE_ALL_EXCEPT
namespace geofeatures_boost {} namespace boost = geofeatures_boost; namespace geofeatures_boost{ namespace math{
namespace detail
{
struct fpu_guard
{
fpu_guard()
{
fegetexceptflag(&m_flags, FE_ALL_EXCEPT);
feclearexcept(FE_ALL_EXCEPT);
}
~fpu_guard()
{
fesetexceptflag(&m_flags, FE_ALL_EXCEPT);
}
private:
fexcept_t m_flags;
};
} // namespace detail
}} // namespaces
# define BOOST_FPU_EXCEPTION_GUARD geofeatures_boost::math::detail::fpu_guard local_guard_object;
# define BOOST_MATH_INSTRUMENT_FPU do{ fexcept_t cpu_flags; fegetexceptflag(&cpu_flags, FE_ALL_EXCEPT); BOOST_MATH_INSTRUMENT_VARIABLE(cpu_flags); } while(0);
# else
# define BOOST_FPU_EXCEPTION_GUARD
# define BOOST_MATH_INSTRUMENT_FPU
# endif
#else // All other platforms.
# define BOOST_FPU_EXCEPTION_GUARD
# define BOOST_MATH_INSTRUMENT_FPU
#endif
#ifdef BOOST_MATH_INSTRUMENT
# include <iostream>
# include <iomanip>
# include <typeinfo>
# define BOOST_MATH_INSTRUMENT_CODE(x) \
std::cout << std::setprecision(35) << __FILE__ << ":" << __LINE__ << " " << x << std::endl;
# define BOOST_MATH_INSTRUMENT_VARIABLE(name) BOOST_MATH_INSTRUMENT_CODE(BOOST_STRINGIZE(name) << " = " << name)
#else
# define BOOST_MATH_INSTRUMENT_CODE(x)
# define BOOST_MATH_INSTRUMENT_VARIABLE(name)
#endif
#endif // BOOST_MATH_TOOLS_CONFIG_HPP
| sachindeorah/geofeatures | GeoFeatures/boost/math/tools/config.hpp | C++ | apache-2.0 | 12,678 |
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles database requests from other nova services."""
import copy
from nova.api.ec2 import ec2utils
from nova import block_device
from nova.cells import rpcapi as cells_rpcapi
from nova.compute import api as compute_api
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova.conductor.tasks import live_migrate
from nova.db import base
from nova import exception
from nova.image import glance
from nova import manager
from nova import network
from nova.network.security_group import openstack_driver
from nova import notifications
from nova.objects import base as nova_object
from nova.objects import instance as instance_obj
from nova.objects import migration as migration_obj
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common.rpc import common as rpc_common
from nova.openstack.common import timeutils
from nova import quota
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova.scheduler import utils as scheduler_utils
LOG = logging.getLogger(__name__)
# Instead of having a huge list of arguments to instance_update(), we just
# accept a dict of fields to update and use this whitelist to validate it.
allowed_updates = ['task_state', 'vm_state', 'expected_task_state',
'power_state', 'access_ip_v4', 'access_ip_v6',
'launched_at', 'terminated_at', 'host', 'node',
'memory_mb', 'vcpus', 'root_gb', 'ephemeral_gb',
'instance_type_id', 'root_device_name', 'launched_on',
'progress', 'vm_mode', 'default_ephemeral_device',
'default_swap_device', 'root_device_name',
'system_metadata', 'updated_at'
]
# Fields that we want to convert back into a datetime object.
datetime_fields = ['launched_at', 'terminated_at', 'updated_at']
class ConductorManager(manager.Manager):
"""Mission: Conduct things.
The methods in the base API for nova-conductor are various proxy operations
performed on behalf of the nova-compute service running on compute nodes.
Compute nodes are not allowed to directly access the database, so this set
of methods allows them to get specific work done without locally accessing
the database.
The nova-conductor service also exposes an API in the 'compute_task'
namespace. See the ComputeTaskManager class for details.
"""
RPC_API_VERSION = '1.58'
def __init__(self, *args, **kwargs):
super(ConductorManager, self).__init__(service_name='conductor',
*args, **kwargs)
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self._network_api = None
self._compute_api = None
self.compute_task_mgr = ComputeTaskManager()
self.quotas = quota.QUOTAS
self.cells_rpcapi = cells_rpcapi.CellsAPI()
def create_rpc_dispatcher(self, *args, **kwargs):
kwargs['additional_apis'] = [self.compute_task_mgr]
return super(ConductorManager, self).create_rpc_dispatcher(*args,
**kwargs)
@property
def network_api(self):
# NOTE(danms): We need to instantiate our network_api on first use
# to avoid the circular dependency that exists between our init
# and network_api's
if self._network_api is None:
self._network_api = network.API()
return self._network_api
@property
def compute_api(self):
if self._compute_api is None:
self._compute_api = compute_api.API()
return self._compute_api
def ping(self, context, arg):
# NOTE(russellb) This method can be removed in 2.0 of this API. It is
# now a part of the base rpc API.
return jsonutils.to_primitive({'service': 'conductor', 'arg': arg})
@rpc_common.client_exceptions(KeyError, ValueError,
exception.InvalidUUID,
exception.InstanceNotFound,
exception.UnexpectedTaskStateError)
def instance_update(self, context, instance_uuid,
updates, service=None):
for key, value in updates.iteritems():
if key not in allowed_updates:
LOG.error(_("Instance update attempted for "
"'%(key)s' on %(instance_uuid)s"),
{'key': key, 'instance_uuid': instance_uuid})
raise KeyError("unexpected update keyword '%s'" % key)
if key in datetime_fields and isinstance(value, basestring):
updates[key] = timeutils.parse_strtime(value)
old_ref, instance_ref = self.db.instance_update_and_get_original(
context, instance_uuid, updates)
notifications.send_update(context, old_ref, instance_ref, service)
return jsonutils.to_primitive(instance_ref)
@rpc_common.client_exceptions(exception.InstanceNotFound)
def instance_get(self, context, instance_id):
return jsonutils.to_primitive(
self.db.instance_get(context, instance_id))
@rpc_common.client_exceptions(exception.InstanceNotFound)
def instance_get_by_uuid(self, context, instance_uuid,
columns_to_join=None):
return jsonutils.to_primitive(
self.db.instance_get_by_uuid(context, instance_uuid,
columns_to_join))
# NOTE(hanlind): This method can be removed in v2.0 of the RPC API.
def instance_get_all(self, context):
return jsonutils.to_primitive(self.db.instance_get_all(context))
def instance_get_all_by_host(self, context, host, node=None,
columns_to_join=None):
if node is not None:
result = self.db.instance_get_all_by_host_and_node(
context.elevated(), host, node)
else:
result = self.db.instance_get_all_by_host(context.elevated(), host,
columns_to_join)
return jsonutils.to_primitive(result)
# NOTE(comstud): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
@rpc_common.client_exceptions(exception.MigrationNotFound)
def migration_get(self, context, migration_id):
migration_ref = self.db.migration_get(context.elevated(),
migration_id)
return jsonutils.to_primitive(migration_ref)
# NOTE(comstud): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
def migration_get_unconfirmed_by_dest_compute(self, context,
confirm_window,
dest_compute):
migrations = self.db.migration_get_unconfirmed_by_dest_compute(
context, confirm_window, dest_compute)
return jsonutils.to_primitive(migrations)
def migration_get_in_progress_by_host_and_node(self, context,
host, node):
migrations = self.db.migration_get_in_progress_by_host_and_node(
context, host, node)
return jsonutils.to_primitive(migrations)
# NOTE(comstud): This method can be removed in v2.0 of the RPC API.
def migration_create(self, context, instance, values):
values.update({'instance_uuid': instance['uuid'],
'source_compute': instance['host'],
'source_node': instance['node']})
migration_ref = self.db.migration_create(context.elevated(), values)
return jsonutils.to_primitive(migration_ref)
@rpc_common.client_exceptions(exception.MigrationNotFound)
def migration_update(self, context, migration, status):
migration_ref = self.db.migration_update(context.elevated(),
migration['id'],
{'status': status})
return jsonutils.to_primitive(migration_ref)
@rpc_common.client_exceptions(exception.AggregateHostExists)
def aggregate_host_add(self, context, aggregate, host):
host_ref = self.db.aggregate_host_add(context.elevated(),
aggregate['id'], host)
return jsonutils.to_primitive(host_ref)
@rpc_common.client_exceptions(exception.AggregateHostNotFound)
def aggregate_host_delete(self, context, aggregate, host):
self.db.aggregate_host_delete(context.elevated(),
aggregate['id'], host)
@rpc_common.client_exceptions(exception.AggregateNotFound)
def aggregate_get(self, context, aggregate_id):
aggregate = self.db.aggregate_get(context.elevated(), aggregate_id)
return jsonutils.to_primitive(aggregate)
def aggregate_get_by_host(self, context, host, key=None):
aggregates = self.db.aggregate_get_by_host(context.elevated(),
host, key)
return jsonutils.to_primitive(aggregates)
def aggregate_metadata_add(self, context, aggregate, metadata,
set_delete=False):
new_metadata = self.db.aggregate_metadata_add(context.elevated(),
aggregate['id'],
metadata, set_delete)
return jsonutils.to_primitive(new_metadata)
@rpc_common.client_exceptions(exception.AggregateMetadataNotFound)
def aggregate_metadata_delete(self, context, aggregate, key):
self.db.aggregate_metadata_delete(context.elevated(),
aggregate['id'], key)
def aggregate_metadata_get_by_host(self, context, host,
key='availability_zone'):
result = self.db.aggregate_metadata_get_by_host(context, host, key)
return jsonutils.to_primitive(result)
def bw_usage_update(self, context, uuid, mac, start_period,
bw_in=None, bw_out=None,
last_ctr_in=None, last_ctr_out=None,
last_refreshed=None,
update_cells=True):
if [bw_in, bw_out, last_ctr_in, last_ctr_out].count(None) != 4:
self.db.bw_usage_update(context, uuid, mac, start_period,
bw_in, bw_out, last_ctr_in, last_ctr_out,
last_refreshed,
update_cells=update_cells)
usage = self.db.bw_usage_get(context, uuid, start_period, mac)
return jsonutils.to_primitive(usage)
# NOTE(russellb) This method can be removed in 2.0 of this API. It is
# deprecated in favor of the method in the base API.
def get_backdoor_port(self, context):
return self.backdoor_port
def security_group_get_by_instance(self, context, instance):
group = self.db.security_group_get_by_instance(context,
instance['uuid'])
return jsonutils.to_primitive(group)
def security_group_rule_get_by_security_group(self, context, secgroup):
rules = self.db.security_group_rule_get_by_security_group(
context, secgroup['id'])
return jsonutils.to_primitive(rules, max_depth=4)
def provider_fw_rule_get_all(self, context):
rules = self.db.provider_fw_rule_get_all(context)
return jsonutils.to_primitive(rules)
def agent_build_get_by_triple(self, context, hypervisor, os, architecture):
info = self.db.agent_build_get_by_triple(context, hypervisor, os,
architecture)
return jsonutils.to_primitive(info)
def block_device_mapping_update_or_create(self, context, values,
create=None):
if create is None:
bdm = self.db.block_device_mapping_update_or_create(context,
values)
elif create is True:
bdm = self.db.block_device_mapping_create(context, values)
else:
bdm = self.db.block_device_mapping_update(context,
values['id'],
values)
# NOTE:comstud): 'bdm' is always in the new format, so we
# account for this in cells/messaging.py
self.cells_rpcapi.bdm_update_or_create_at_top(context, bdm,
create=create)
def block_device_mapping_get_all_by_instance(self, context, instance,
legacy=True):
bdms = self.db.block_device_mapping_get_all_by_instance(
context, instance['uuid'])
if legacy:
bdms = block_device.legacy_mapping(bdms)
return jsonutils.to_primitive(bdms)
def block_device_mapping_destroy(self, context, bdms=None,
instance=None, volume_id=None,
device_name=None):
if bdms is not None:
for bdm in bdms:
self.db.block_device_mapping_destroy(context, bdm['id'])
# NOTE(comstud): bdm['id'] will be different in API cell,
# so we must try to destroy by device_name or volume_id.
# We need an instance_uuid in order to do this properly,
# too.
# I hope to clean a lot of this up in the object
# implementation.
instance_uuid = (bdm['instance_uuid'] or
(instance and instance['uuid']))
if not instance_uuid:
continue
# Better to be safe than sorry. device_name is not
# NULLable, however it could be an empty string.
if bdm['device_name']:
self.cells_rpcapi.bdm_destroy_at_top(
context, instance_uuid,
device_name=bdm['device_name'])
elif bdm['volume_id']:
self.cells_rpcapi.bdm_destroy_at_top(
context, instance_uuid,
volume_id=bdm['volume_id'])
elif instance is not None and volume_id is not None:
self.db.block_device_mapping_destroy_by_instance_and_volume(
context, instance['uuid'], volume_id)
self.cells_rpcapi.bdm_destroy_at_top(
context, instance['uuid'], volume_id=volume_id)
elif instance is not None and device_name is not None:
self.db.block_device_mapping_destroy_by_instance_and_device(
context, instance['uuid'], device_name)
self.cells_rpcapi.bdm_destroy_at_top(
context, instance['uuid'], device_name=device_name)
else:
# NOTE(danms): This shouldn't happen
raise exception.Invalid(_("Invalid block_device_mapping_destroy"
" invocation"))
def instance_get_all_by_filters(self, context, filters, sort_key,
sort_dir, columns_to_join=None):
result = self.db.instance_get_all_by_filters(
context, filters, sort_key, sort_dir,
columns_to_join=columns_to_join)
return jsonutils.to_primitive(result)
# NOTE(hanlind): This method can be removed in v2.0 of the RPC API.
def instance_get_all_hung_in_rebooting(self, context, timeout):
result = self.db.instance_get_all_hung_in_rebooting(context, timeout)
return jsonutils.to_primitive(result)
def instance_get_active_by_window(self, context, begin, end=None,
project_id=None, host=None):
# Unused, but cannot remove until major RPC version bump
result = self.db.instance_get_active_by_window(context, begin, end,
project_id, host)
return jsonutils.to_primitive(result)
def instance_get_active_by_window_joined(self, context, begin, end=None,
project_id=None, host=None):
result = self.db.instance_get_active_by_window_joined(
context, begin, end, project_id, host)
return jsonutils.to_primitive(result)
def instance_destroy(self, context, instance):
self.db.instance_destroy(context, instance['uuid'])
def instance_info_cache_delete(self, context, instance):
self.db.instance_info_cache_delete(context, instance['uuid'])
def instance_info_cache_update(self, context, instance, values):
self.db.instance_info_cache_update(context, instance['uuid'],
values)
def instance_type_get(self, context, instance_type_id):
result = self.db.flavor_get(context, instance_type_id)
return jsonutils.to_primitive(result)
def instance_fault_create(self, context, values):
result = self.db.instance_fault_create(context, values)
return jsonutils.to_primitive(result)
# NOTE(kerrin): This method can be removed in v2.0 of the RPC API.
def vol_get_usage_by_time(self, context, start_time):
result = self.db.vol_get_usage_by_time(context, start_time)
return jsonutils.to_primitive(result)
# NOTE(kerrin): The last_refreshed argument is unused by this method
# and can be removed in v2.0 of the RPC API.
def vol_usage_update(self, context, vol_id, rd_req, rd_bytes, wr_req,
wr_bytes, instance, last_refreshed=None,
update_totals=False):
vol_usage = self.db.vol_usage_update(context, vol_id,
rd_req, rd_bytes,
wr_req, wr_bytes,
instance['uuid'],
instance['project_id'],
instance['user_id'],
instance['availability_zone'],
update_totals)
# We have just updated the database, so send the notification now
self.notifier.info(context, 'volume.usage',
compute_utils.usage_volume_info(vol_usage))
@rpc_common.client_exceptions(exception.ComputeHostNotFound,
exception.HostBinaryNotFound)
def service_get_all_by(self, context, topic=None, host=None, binary=None):
if not any((topic, host, binary)):
result = self.db.service_get_all(context)
elif all((topic, host)):
if topic == 'compute':
result = self.db.service_get_by_compute_host(context, host)
# FIXME(comstud) Potentially remove this on bump to v2.0
result = [result]
else:
result = self.db.service_get_by_host_and_topic(context,
host, topic)
elif all((host, binary)):
result = self.db.service_get_by_args(context, host, binary)
elif topic:
result = self.db.service_get_all_by_topic(context, topic)
elif host:
result = self.db.service_get_all_by_host(context, host)
return jsonutils.to_primitive(result)
def action_event_start(self, context, values):
evt = self.db.action_event_start(context, values)
return jsonutils.to_primitive(evt)
def action_event_finish(self, context, values):
evt = self.db.action_event_finish(context, values)
return jsonutils.to_primitive(evt)
def service_create(self, context, values):
svc = self.db.service_create(context, values)
return jsonutils.to_primitive(svc)
@rpc_common.client_exceptions(exception.ServiceNotFound)
def service_destroy(self, context, service_id):
self.db.service_destroy(context, service_id)
def compute_node_create(self, context, values):
result = self.db.compute_node_create(context, values)
return jsonutils.to_primitive(result)
def compute_node_update(self, context, node, values, prune_stats=False):
result = self.db.compute_node_update(context, node['id'], values,
prune_stats)
return jsonutils.to_primitive(result)
def compute_node_delete(self, context, node):
result = self.db.compute_node_delete(context, node['id'])
return jsonutils.to_primitive(result)
@rpc_common.client_exceptions(exception.ServiceNotFound)
def service_update(self, context, service, values):
svc = self.db.service_update(context, service['id'], values)
return jsonutils.to_primitive(svc)
def task_log_get(self, context, task_name, begin, end, host, state=None):
result = self.db.task_log_get(context, task_name, begin, end, host,
state)
return jsonutils.to_primitive(result)
def task_log_begin_task(self, context, task_name, begin, end, host,
task_items=None, message=None):
result = self.db.task_log_begin_task(context.elevated(), task_name,
begin, end, host, task_items,
message)
return jsonutils.to_primitive(result)
def task_log_end_task(self, context, task_name, begin, end, host,
errors, message=None):
result = self.db.task_log_end_task(context.elevated(), task_name,
begin, end, host, errors, message)
return jsonutils.to_primitive(result)
def notify_usage_exists(self, context, instance, current_period=False,
ignore_missing_network_data=True,
system_metadata=None, extra_usage_info=None):
compute_utils.notify_usage_exists(self.notifier, context, instance,
current_period,
ignore_missing_network_data,
system_metadata, extra_usage_info)
def security_groups_trigger_handler(self, context, event, args):
self.security_group_api.trigger_handler(event, context, *args)
def security_groups_trigger_members_refresh(self, context, group_ids):
self.security_group_api.trigger_members_refresh(context, group_ids)
def network_migrate_instance_start(self, context, instance, migration):
self.network_api.migrate_instance_start(context, instance, migration)
def network_migrate_instance_finish(self, context, instance, migration):
self.network_api.migrate_instance_finish(context, instance, migration)
def quota_commit(self, context, reservations, project_id=None,
user_id=None):
quota.QUOTAS.commit(context, reservations, project_id=project_id,
user_id=user_id)
def quota_rollback(self, context, reservations, project_id=None,
user_id=None):
quota.QUOTAS.rollback(context, reservations, project_id=project_id,
user_id=user_id)
def get_ec2_ids(self, context, instance):
ec2_ids = {}
ec2_ids['instance-id'] = ec2utils.id_to_ec2_inst_id(instance['uuid'])
ec2_ids['ami-id'] = ec2utils.glance_id_to_ec2_id(context,
instance['image_ref'])
for image_type in ['kernel', 'ramdisk']:
if '%s_id' % image_type in instance:
image_id = instance['%s_id' % image_type]
ec2_image_type = ec2utils.image_type(image_type)
ec2_id = ec2utils.glance_id_to_ec2_id(context, image_id,
ec2_image_type)
ec2_ids['%s-id' % image_type] = ec2_id
return ec2_ids
# NOTE(danms): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
def compute_stop(self, context, instance, do_cast=True):
# NOTE(mriedem): Clients using an interface before 1.43 will be sending
# dicts so we need to handle that here since compute/api::stop()
# requires an object.
if isinstance(instance, dict):
instance = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), instance)
self.compute_api.stop(context, instance, do_cast)
# NOTE(comstud): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
def compute_confirm_resize(self, context, instance, migration_ref):
if isinstance(instance, dict):
attrs = ['metadata', 'system_metadata', 'info_cache',
'security_groups']
instance = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), instance,
expected_attrs=attrs)
if isinstance(migration_ref, dict):
migration_ref = migration_obj.Migration._from_db_object(
context.elevated(), migration_ref)
self.compute_api.confirm_resize(context, instance,
migration=migration_ref)
def compute_unrescue(self, context, instance):
self.compute_api.unrescue(context, instance)
def _object_dispatch(self, target, method, context, args, kwargs):
"""Dispatch a call to an object method.
This ensures that object methods get called and any exception
that is raised gets wrapped in a ClientException for forwarding
back to the caller (without spamming the conductor logs).
"""
try:
# NOTE(danms): Keep the getattr inside the try block since
# a missing method is really a client problem
return getattr(target, method)(context, *args, **kwargs)
except Exception:
raise rpc_common.ClientException()
def object_class_action(self, context, objname, objmethod,
objver, args, kwargs):
"""Perform a classmethod action on an object."""
objclass = nova_object.NovaObject.obj_class_from_name(objname,
objver)
return self._object_dispatch(objclass, objmethod, context,
args, kwargs)
def object_action(self, context, objinst, objmethod, args, kwargs):
"""Perform an action on an object."""
oldobj = copy.copy(objinst)
result = self._object_dispatch(objinst, objmethod, context,
args, kwargs)
updates = dict()
# NOTE(danms): Diff the object with the one passed to us and
# generate a list of changes to forward back
for field in objinst.fields:
if not objinst.obj_attr_is_set(field):
# Avoid demand-loading anything
continue
if (not oldobj.obj_attr_is_set(field) or
oldobj[field] != objinst[field]):
updates[field] = objinst._attr_to_primitive(field)
# This is safe since a field named this would conflict with the
# method anyway
updates['obj_what_changed'] = objinst.obj_what_changed()
return updates, result
# NOTE(danms): This method is now deprecated and can be removed in
# v2.0 of the RPC API
def compute_reboot(self, context, instance, reboot_type):
self.compute_api.reboot(context, instance, reboot_type)
class ComputeTaskManager(base.Base):
"""Namespace for compute methods.
This class presents an rpc API for nova-conductor under the 'compute_task'
namespace. The methods here are compute operations that are invoked
by the API service. These methods see the operation to completion, which
may involve coordinating activities on multiple compute nodes.
"""
RPC_API_NAMESPACE = 'compute_task'
RPC_API_VERSION = '1.6'
def __init__(self):
super(ComputeTaskManager, self).__init__()
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
self.image_service = glance.get_default_image_service()
self.quotas = quota.QUOTAS
@rpc_common.client_exceptions(exception.NoValidHost,
exception.ComputeServiceUnavailable,
exception.InvalidHypervisorType,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.MigrationPreCheckError)
def migrate_server(self, context, instance, scheduler_hint, live, rebuild,
flavor, block_migration, disk_over_commit, reservations=None):
if instance and not isinstance(instance, instance_obj.Instance):
# NOTE(danms): Until v2 of the RPC API, we need to tolerate
# old-world instance objects here
attrs = ['metadata', 'system_metadata', 'info_cache',
'security_groups']
instance = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), instance,
expected_attrs=attrs)
if live and not rebuild and not flavor:
self._live_migrate(context, instance, scheduler_hint,
block_migration, disk_over_commit)
elif not live and not rebuild and flavor:
instance_uuid = instance['uuid']
with compute_utils.EventReporter(context, ConductorManager(),
'cold_migrate', instance_uuid):
self._cold_migrate(context, instance, flavor,
scheduler_hint['filter_properties'],
reservations)
else:
raise NotImplementedError()
def _cold_migrate(self, context, instance, flavor, filter_properties,
reservations):
image_ref = instance.image_ref
image = compute_utils.get_image_metadata(
context, self.image_service, image_ref, instance)
request_spec = scheduler_utils.build_request_spec(
context, image, [instance], instance_type=flavor)
try:
hosts = self.scheduler_rpcapi.select_destinations(
context, request_spec, filter_properties)
host_state = hosts[0]
except exception.NoValidHost as ex:
vm_state = instance['vm_state']
if not vm_state:
vm_state = vm_states.ACTIVE
updates = {'vm_state': vm_state, 'task_state': None}
self._set_vm_state_and_notify(context, 'migrate_server',
updates, ex, request_spec)
if reservations:
self.quotas.rollback(context, reservations)
LOG.warning(_("No valid host found for cold migrate"))
return
try:
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
# context is not serializable
filter_properties.pop('context', None)
# TODO(timello): originally, instance_type in request_spec
# on compute.api.resize does not have 'extra_specs', so we
# remove it for now to keep tests backward compatibility.
request_spec['instance_type'].pop('extra_specs')
(host, node) = (host_state['host'], host_state['nodename'])
self.compute_rpcapi.prep_resize(
context, image, instance,
flavor, host,
reservations, request_spec=request_spec,
filter_properties=filter_properties, node=node)
except Exception as ex:
with excutils.save_and_reraise_exception():
updates = {'vm_state': vm_states.ERROR,
'task_state': None}
self._set_vm_state_and_notify(context, 'migrate_server',
updates, ex, request_spec)
if reservations:
self.quotas.rollback(context, reservations)
def _set_vm_state_and_notify(self, context, method, updates, ex,
request_spec):
scheduler_utils.set_vm_state_and_notify(
context, 'compute_task', method, updates,
ex, request_spec, self.db)
def _live_migrate(self, context, instance, scheduler_hint,
block_migration, disk_over_commit):
destination = scheduler_hint.get("host")
try:
live_migrate.execute(context, instance, destination,
block_migration, disk_over_commit)
except (exception.NoValidHost,
exception.ComputeServiceUnavailable,
exception.InvalidHypervisorType,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.MigrationPreCheckError) as ex:
with excutils.save_and_reraise_exception():
#TODO(johngarbutt) - eventually need instance actions here
request_spec = {'instance_properties': {
'uuid': instance['uuid'], },
}
scheduler_utils.set_vm_state_and_notify(context,
'compute_task', 'migrate_server',
dict(vm_state=instance['vm_state'],
task_state=None,
expected_task_state=task_states.MIGRATING,),
ex, request_spec, self.db)
except Exception as ex:
with excutils.save_and_reraise_exception():
request_spec = {'instance_properties': {
'uuid': instance['uuid'], },
}
scheduler_utils.set_vm_state_and_notify(context,
'compute_task', 'migrate_server',
{'vm_state': vm_states.ERROR},
ex, request_spec, self.db)
def build_instances(self, context, instances, image, filter_properties,
admin_password, injected_files, requested_networks,
security_groups, block_device_mapping, legacy_bdm=True):
request_spec = scheduler_utils.build_request_spec(context, image,
instances)
# NOTE(alaski): For compatibility until a new scheduler method is used.
request_spec.update({'block_device_mapping': block_device_mapping,
'security_group': security_groups})
self.scheduler_rpcapi.run_instance(context, request_spec=request_spec,
admin_password=admin_password, injected_files=injected_files,
requested_networks=requested_networks, is_first_time=True,
filter_properties=filter_properties,
legacy_bdm_in_spec=legacy_bdm)
def _get_image(self, context, image_id):
if not image_id:
return None
return self.image_service.show(context, image_id)
def _delete_image(self, context, image_id):
(image_service, image_id) = glance.get_remote_image_service(context,
image_id)
return image_service.delete(context, image_id)
def _schedule_instances(self, context, image, filter_properties,
*instances):
request_spec = scheduler_utils.build_request_spec(context, image,
instances)
# dict(host='', nodename='', limits='')
hosts = self.scheduler_rpcapi.select_destinations(context,
request_spec, filter_properties)
return hosts
def unshelve_instance(self, context, instance):
sys_meta = instance.system_metadata
if instance.vm_state == vm_states.SHELVED:
instance.task_state = task_states.POWERING_ON
instance.save(expected_task_state=task_states.UNSHELVING)
self.compute_rpcapi.start_instance(context, instance)
snapshot_id = sys_meta.get('shelved_image_id')
if snapshot_id:
self._delete_image(context, snapshot_id)
elif instance.vm_state == vm_states.SHELVED_OFFLOADED:
try:
with compute_utils.EventReporter(context, self.db,
'get_image_info', instance.uuid):
image = self._get_image(context,
sys_meta['shelved_image_id'])
except exception.ImageNotFound:
with excutils.save_and_reraise_exception():
LOG.error(_('Unshelve attempted but vm_state not SHELVED '
'or SHELVED_OFFLOADED'), instance=instance)
instance.vm_state = vm_states.ERROR
instance.save()
hosts = self._schedule_instances(context, image, [], instance)
host = hosts.pop(0)['host']
self.compute_rpcapi.unshelve_instance(context, instance, host,
image)
else:
LOG.error(_('Unshelve attempted but vm_state not SHELVED or '
'SHELVED_OFFLOADED'), instance=instance)
instance.vm_state = vm_states.ERROR
instance.save()
return
for key in ['shelved_at', 'shelved_image_id', 'shelved_host']:
if key in sys_meta:
del(sys_meta[key])
instance.system_metadata = sys_meta
instance.save()
| pombredanne/MOG | nova/conductor/manager.py | Python | apache-2.0 | 39,345 |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Orchestration.Airflow.Service.V1.Snippets
{
using Google.Api.Gax;
using System;
using System.Linq;
using System.Threading.Tasks;
/// <summary>Generated snippets.</summary>
public sealed class AllGeneratedImageVersionsClientSnippets
{
/// <summary>Snippet for ListImageVersions</summary>
public void ListImageVersionsRequestObject()
{
// Snippet: ListImageVersions(ListImageVersionsRequest, CallSettings)
// Create client
ImageVersionsClient imageVersionsClient = ImageVersionsClient.Create();
// Initialize request argument(s)
ListImageVersionsRequest request = new ListImageVersionsRequest
{
Parent = "",
IncludePastReleases = false,
};
// Make the request
PagedEnumerable<ListImageVersionsResponse, ImageVersion> response = imageVersionsClient.ListImageVersions(request);
// Iterate over all response items, lazily performing RPCs as required
foreach (ImageVersion item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListImageVersionsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (ImageVersion item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<ImageVersion> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (ImageVersion item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListImageVersionsAsync</summary>
public async Task ListImageVersionsRequestObjectAsync()
{
// Snippet: ListImageVersionsAsync(ListImageVersionsRequest, CallSettings)
// Create client
ImageVersionsClient imageVersionsClient = await ImageVersionsClient.CreateAsync();
// Initialize request argument(s)
ListImageVersionsRequest request = new ListImageVersionsRequest
{
Parent = "",
IncludePastReleases = false,
};
// Make the request
PagedAsyncEnumerable<ListImageVersionsResponse, ImageVersion> response = imageVersionsClient.ListImageVersionsAsync(request);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((ImageVersion item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListImageVersionsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (ImageVersion item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<ImageVersion> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (ImageVersion item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListImageVersions</summary>
public void ListImageVersions()
{
// Snippet: ListImageVersions(string, string, int?, CallSettings)
// Create client
ImageVersionsClient imageVersionsClient = ImageVersionsClient.Create();
// Initialize request argument(s)
string parent = "";
// Make the request
PagedEnumerable<ListImageVersionsResponse, ImageVersion> response = imageVersionsClient.ListImageVersions(parent);
// Iterate over all response items, lazily performing RPCs as required
foreach (ImageVersion item in response)
{
// Do something with each item
Console.WriteLine(item);
}
// Or iterate over pages (of server-defined size), performing one RPC per page
foreach (ListImageVersionsResponse page in response.AsRawResponses())
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (ImageVersion item in page)
{
// Do something with each item
Console.WriteLine(item);
}
}
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<ImageVersion> singlePage = response.ReadPage(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (ImageVersion item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
/// <summary>Snippet for ListImageVersionsAsync</summary>
public async Task ListImageVersionsAsync()
{
// Snippet: ListImageVersionsAsync(string, string, int?, CallSettings)
// Create client
ImageVersionsClient imageVersionsClient = await ImageVersionsClient.CreateAsync();
// Initialize request argument(s)
string parent = "";
// Make the request
PagedAsyncEnumerable<ListImageVersionsResponse, ImageVersion> response = imageVersionsClient.ListImageVersionsAsync(parent);
// Iterate over all response items, lazily performing RPCs as required
await response.ForEachAsync((ImageVersion item) =>
{
// Do something with each item
Console.WriteLine(item);
});
// Or iterate over pages (of server-defined size), performing one RPC per page
await response.AsRawResponses().ForEachAsync((ListImageVersionsResponse page) =>
{
// Do something with each page of items
Console.WriteLine("A page of results:");
foreach (ImageVersion item in page)
{
// Do something with each item
Console.WriteLine(item);
}
});
// Or retrieve a single page of known size (unless it's the final page), performing as many RPCs as required
int pageSize = 10;
Page<ImageVersion> singlePage = await response.ReadPageAsync(pageSize);
// Do something with the page of items
Console.WriteLine($"A page of {pageSize} results (unless it's the final page):");
foreach (ImageVersion item in singlePage)
{
// Do something with each item
Console.WriteLine(item);
}
// Store the pageToken, for when the next page is required.
string nextPageToken = singlePage.NextPageToken;
// End snippet
}
}
}
| googleapis/google-cloud-dotnet | apis/Google.Cloud.Orchestration.Airflow.Service.V1/Google.Cloud.Orchestration.Airflow.Service.V1.Snippets/ImageVersionsClientSnippets.g.cs | C# | apache-2.0 | 9,411 |
<?php
for ($i = 0; $i < $this->length; ++$i) {
if (is_object($objectOrArray)) {
$value = $this->readProperty($objectOrArray, $i);
// arrays need to be treated separately (due to PHP bug?)
// http://bugs.php.net/bug.php?id=52133
} elseif (is_array($objectOrArray)) {
$property = $this->elements[$i];
if (!array_key_exists($property, $objectOrArray)) {
$objectOrArray[$property] = $i + 1 < $this->length ? array() : null;
}
$value =& $objectOrArray[$property];
} else {
throw new UnexpectedTypeException($objectOrArray, 'object or array');
}
$objectOrArray =& $value;
}
return $value; | Ocramius/php-analyzer | tests/Scrutinizer/Tests/PhpAnalyzer/ControlFlow/Fixture/Integration/Regression/symfony_property_util$getValue.php | PHP | apache-2.0 | 681 |
/*
* Copyright 2015 AppDynamics, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.appdynamicspilot.rest;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
public class ShoppingCart implements java.io.Serializable {
Logger log = Logger.getLogger(ShoppingCart.class);
private List<ShoppingCartItem> items;
public ShoppingCart() {
items = new ArrayList<ShoppingCartItem>();
}
public void addItem(ShoppingCartItem item) {
items.add(item);
}
public void removeItem(ShoppingCartItem item) {
items.remove(item);
}
public List<ShoppingCartItem> getAllItems() {
return items;
}
public double getCartTotal() {
double total = 0.0;
for (ShoppingCartItem item : items) {
total += item.getPrice();
}
return total;
}
public void clear() {
items.clear();
}
}
| udayinfy/ECommerce-Java | ECommerce-Web/src/main/java/com/appdynamicspilot/rest/ShoppingCart.java | Java | apache-2.0 | 1,458 |
var express = require( 'express' ),
router = require( './router' );
var app = express();
app.set('port', (process.env.PORT || 3000));
router.define( app );
// Set up port
// ========================================================
app.listen(app.get('port'), function() {
console.log("Node app is running at localhost:" + app.get('port'));
});
| OnWhoseBehalf/on-behalf-api | src/server.js | JavaScript | apache-2.0 | 353 |
import backgroundImages from '@const/background-images';
import * as actions from './actions.js';
import Background from './class.js';
// const getObj = (indexString) => {
/*
const [type, index] = indexString.split('-')
if (typeof index === 'undefined') return {}
return store.getState().bgimgState.list[type][index]
*/
// }
const getListInitial = (type) => {
let list = [];
if (type === 'default') {
list = backgroundImages.map(
(filename, index) => new Background(filename, type + '-' + index)
);
}
/*
const dir = type == 'custom' ? thePath.bgimgs_custom : thePath.bgimgs
const parseData = (name) => {
return {
name: name
}
}
if (self.nw) {
const fs = require('fs')
const path = require('path')
const getList = (dir) => {
return fs.readdirSync(dir)
.filter(function (file) {
return !fs.lstatSync(path.join(dir, file)).isDirectory()
})
.map(function (filename) {
return {
name: filename,
time: fs.statSync(path.join(dir, filename)).mtime.getTime()
};
})
.sort(function (a, b) { return b.time - a.time; })
.map(function (o) { return o.name; })
}
getList(dir)
.forEach(function (name) {
list.push(parseData(
name,
type === 'default'
))
})
} else {
}
*/
return list;
};
export const initList = (currentIndex = 'default-0') => {
const listDefault = getListInitial('default');
const listCustom = getListInitial('custom');
const [type, index] = currentIndex.split('-');
const current = eval(
'list' + type.substr(0, 1).toUpperCase() + type.substr(1)
)[index];
// const currentPath = current ? {
// original: current.getPath(),
// blured: current.getPath('blured')
// } : {}
return (dispatch) => {
dispatch(
actions.init({
list: {
default: listDefault,
custom: listCustom,
},
current, //,
// currentIndex,
// currentPath
})
);
};
};
export const add = (/*filename*/) => {};
export const remove = (/*indexCustom*/) => {};
export const change = (obj) => {
return (dispatch) => {
dispatch(actions.change(obj));
};
};
export const mainImgLoaded = () => (dispatch) => dispatch(actions.mainLoaded());
| Diablohu/WhoCallsTheFleet-React | src/api/bgimg/api.js | JavaScript | apache-2.0 | 2,700 |
# Copyright (C) 2014-2016 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module ServerSelector
# Encapsulates specifications for selecting servers, with the
# primary preferred, given a list of candidates.
#
# @since 2.0.0
class PrimaryPreferred
include Selectable
# Get the name of the server mode type.
#
# @example Get the name of the server mode for this preference.
# preference.name
#
# @return [ Symbol ] :primary_preferred
#
# @since 2.0.0
def name
:primary_preferred
end
# Whether the slaveOk bit should be set on wire protocol messages.
# I.e. whether the operation can be performed on a secondary server.
#
# @return [ true ] true
#
# @since 2.0.0
def slave_ok?
true
end
# Whether tag sets are allowed to be defined for this server preference.
#
# @return [ true ] true
#
# @since 2.0.0
def tags_allowed?
true
end
# Convert this server preference definition into a format appropriate
# for a mongos server.
#
# @example Convert this server preference definition into a format
# for mongos.
# preference = Mongo::ServerSelector::PrimaryPreferred.new
# preference.to_mongos
#
# @return [ Hash ] The server preference formatted for a mongos server.
#
# @since 2.0.0
def to_mongos
preference = { :mode => 'primaryPreferred' }
preference.merge!({ :tags => tag_sets }) unless tag_sets.empty?
preference.merge!({ maxStalenessSeconds: max_staleness }) if max_staleness
preference
end
private
# Select servers taking into account any defined tag sets and
# local threshold, with the primary preferred.
#
# @example Select servers given a list of candidates,
# with the primary preferred.
# preference = Mongo::ServerSelector::PrimaryPreferred.new
# preference.select([candidate_1, candidate_2])
#
# @return [ Array ] A list of servers matching tag sets and acceptable
# latency with the primary preferred.
#
# @since 2.0.0
def select(candidates)
primary = primary(candidates)
secondaries = near_servers(secondaries(candidates))
primary.first ? primary : secondaries
end
def max_staleness_allowed?
true
end
end
end
end
| estolfo/mongo-ruby-driver | lib/mongo/server_selector/primary_preferred.rb | Ruby | apache-2.0 | 3,030 |
/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package options contains flags and options for initializing an apiserver
package options
import (
"net"
"strings"
"time"
"k8s.io/kubernetes/pkg/admission"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/validation"
"k8s.io/kubernetes/pkg/apimachinery/registered"
"k8s.io/kubernetes/pkg/apiserver"
"k8s.io/kubernetes/pkg/genericapiserver"
kubeletclient "k8s.io/kubernetes/pkg/kubelet/client"
"k8s.io/kubernetes/pkg/master/ports"
"k8s.io/kubernetes/pkg/util"
utilnet "k8s.io/kubernetes/pkg/util/net"
"github.com/spf13/pflag"
)
// APIServer runs a kubernetes api server.
type APIServer struct {
*genericapiserver.ServerRunOptions
APIGroupPrefix string
APIPrefix string
AdmissionControl string
AdmissionControlConfigFile string
AdvertiseAddress net.IP
AllowPrivileged bool
AuthorizationMode string
AuthorizationPolicyFile string
BasicAuthFile string
CloudConfigFile string
CloudProvider string
CorsAllowedOriginList []string
DeprecatedStorageVersion string
EnableLogsSupport bool
EnableProfiling bool
EnableWatchCache bool
EtcdPathPrefix string
EtcdServerList []string
EtcdServersOverrides []string
EventTTL time.Duration
ExternalHost string
KeystoneURL string
KubeletConfig kubeletclient.KubeletClientConfig
KubernetesServiceNodePort int
MasterCount int
MasterServiceNamespace string
MaxConnectionBytesPerSec int64
MinRequestTimeout int
OIDCCAFile string
OIDCClientID string
OIDCIssuerURL string
OIDCUsernameClaim string
RuntimeConfig util.ConfigurationMap
SSHKeyfile string
SSHUser string
ServiceAccountKeyFile string
ServiceAccountLookup bool
ServiceClusterIPRange net.IPNet // TODO: make this a list
ServiceNodePortRange utilnet.PortRange
StorageVersions string
TokenAuthFile string
}
// NewAPIServer creates a new APIServer object with default parameters
func NewAPIServer() *APIServer {
s := APIServer{
ServerRunOptions: genericapiserver.NewServerRunOptions(),
APIGroupPrefix: "/apis",
APIPrefix: "/api",
AdmissionControl: "AlwaysAdmit",
AuthorizationMode: "AlwaysAllow",
EnableLogsSupport: true,
EtcdPathPrefix: genericapiserver.DefaultEtcdPathPrefix,
EventTTL: 1 * time.Hour,
MasterCount: 1,
MasterServiceNamespace: api.NamespaceDefault,
RuntimeConfig: make(util.ConfigurationMap),
StorageVersions: registered.AllPreferredGroupVersions(),
KubeletConfig: kubeletclient.KubeletClientConfig{
Port: ports.KubeletPort,
EnableHttps: true,
HTTPTimeout: time.Duration(5) * time.Second,
},
}
return &s
}
// AddFlags adds flags for a specific APIServer to the specified FlagSet
func (s *APIServer) AddFlags(fs *pflag.FlagSet) {
// Note: the weird ""+ in below lines seems to be the only way to get gofmt to
// arrange these text blocks sensibly. Grrr.
fs.IntVar(&s.InsecurePort, "insecure-port", s.InsecurePort, ""+
"The port on which to serve unsecured, unauthenticated access. Default 8080. It is assumed "+
"that firewall rules are set up such that this port is not reachable from outside of "+
"the cluster and that port 443 on the cluster's public address is proxied to this "+
"port. This is performed by nginx in the default setup.")
fs.IntVar(&s.InsecurePort, "port", s.InsecurePort, "DEPRECATED: see --insecure-port instead")
fs.MarkDeprecated("port", "see --insecure-port instead")
fs.IPVar(&s.InsecureBindAddress, "insecure-bind-address", s.InsecureBindAddress, ""+
"The IP address on which to serve the --insecure-port (set to 0.0.0.0 for all interfaces). "+
"Defaults to localhost.")
fs.IPVar(&s.InsecureBindAddress, "address", s.InsecureBindAddress, "DEPRECATED: see --insecure-bind-address instead")
fs.MarkDeprecated("address", "see --insecure-bind-address instead")
fs.IPVar(&s.BindAddress, "bind-address", s.BindAddress, ""+
"The IP address on which to listen for the --secure-port port. The "+
"associated interface(s) must be reachable by the rest of the cluster, and by CLI/web "+
"clients. If blank, all interfaces will be used (0.0.0.0).")
fs.IPVar(&s.AdvertiseAddress, "advertise-address", s.AdvertiseAddress, ""+
"The IP address on which to advertise the apiserver to members of the cluster. This "+
"address must be reachable by the rest of the cluster. If blank, the --bind-address "+
"will be used. If --bind-address is unspecified, the host's default interface will "+
"be used.")
fs.IPVar(&s.BindAddress, "public-address-override", s.BindAddress, "DEPRECATED: see --bind-address instead")
fs.MarkDeprecated("public-address-override", "see --bind-address instead")
fs.IntVar(&s.SecurePort, "secure-port", s.SecurePort, ""+
"The port on which to serve HTTPS with authentication and authorization. If 0, "+
"don't serve HTTPS at all.")
fs.StringVar(&s.TLSCertFile, "tls-cert-file", s.TLSCertFile, ""+
"File containing x509 Certificate for HTTPS. (CA cert, if any, concatenated after server cert). "+
"If HTTPS serving is enabled, and --tls-cert-file and --tls-private-key-file are not provided, "+
"a self-signed certificate and key are generated for the public address and saved to /var/run/kubernetes.")
fs.StringVar(&s.TLSPrivateKeyFile, "tls-private-key-file", s.TLSPrivateKeyFile, "File containing x509 private key matching --tls-cert-file.")
fs.StringVar(&s.CertDirectory, "cert-dir", s.CertDirectory, "The directory where the TLS certs are located (by default /var/run/kubernetes). "+
"If --tls-cert-file and --tls-private-key-file are provided, this flag will be ignored.")
fs.StringVar(&s.APIPrefix, "api-prefix", s.APIPrefix, "The prefix for API requests on the server. Default '/api'.")
fs.MarkDeprecated("api-prefix", "--api-prefix is deprecated and will be removed when the v1 API is retired.")
fs.StringVar(&s.DeprecatedStorageVersion, "storage-version", s.DeprecatedStorageVersion, "The version to store the legacy v1 resources with. Defaults to server preferred")
fs.MarkDeprecated("storage-version", "--storage-version is deprecated and will be removed when the v1 API is retired. See --storage-versions instead.")
fs.StringVar(&s.StorageVersions, "storage-versions", s.StorageVersions, "The versions to store resources with. "+
"Different groups may be stored in different versions. Specified in the format \"group1/version1,group2/version2...\". "+
"This flag expects a complete list of storage versions of ALL groups registered in the server. "+
"It defaults to a list of preferred versions of all registered groups, which is derived from the KUBE_API_VERSIONS environment variable.")
fs.StringVar(&s.CloudProvider, "cloud-provider", s.CloudProvider, "The provider for cloud services. Empty string for no provider.")
fs.StringVar(&s.CloudConfigFile, "cloud-config", s.CloudConfigFile, "The path to the cloud provider configuration file. Empty string for no configuration file.")
fs.DurationVar(&s.EventTTL, "event-ttl", s.EventTTL, "Amount of time to retain events. Default 1 hour.")
fs.StringVar(&s.BasicAuthFile, "basic-auth-file", s.BasicAuthFile, "If set, the file that will be used to admit requests to the secure port of the API server via http basic authentication.")
fs.StringVar(&s.ClientCAFile, "client-ca-file", s.ClientCAFile, "If set, any request presenting a client certificate signed by one of the authorities in the client-ca-file is authenticated with an identity corresponding to the CommonName of the client certificate.")
fs.StringVar(&s.TokenAuthFile, "token-auth-file", s.TokenAuthFile, "If set, the file that will be used to secure the secure port of the API server via token authentication.")
fs.StringVar(&s.OIDCIssuerURL, "oidc-issuer-url", s.OIDCIssuerURL, "The URL of the OpenID issuer, only HTTPS scheme will be accepted. If set, it will be used to verify the OIDC JSON Web Token (JWT)")
fs.StringVar(&s.OIDCClientID, "oidc-client-id", s.OIDCClientID, "The client ID for the OpenID Connect client, must be set if oidc-issuer-url is set")
fs.StringVar(&s.OIDCCAFile, "oidc-ca-file", s.OIDCCAFile, "If set, the OpenID server's certificate will be verified by one of the authorities in the oidc-ca-file, otherwise the host's root CA set will be used")
fs.StringVar(&s.OIDCUsernameClaim, "oidc-username-claim", "sub", ""+
"The OpenID claim to use as the user name. Note that claims other than the default ('sub') is not "+
"guaranteed to be unique and immutable. This flag is experimental, please see the authentication documentation for further details.")
fs.StringVar(&s.ServiceAccountKeyFile, "service-account-key-file", s.ServiceAccountKeyFile, "File containing PEM-encoded x509 RSA private or public key, used to verify ServiceAccount tokens. If unspecified, --tls-private-key-file is used.")
fs.BoolVar(&s.ServiceAccountLookup, "service-account-lookup", s.ServiceAccountLookup, "If true, validate ServiceAccount tokens exist in etcd as part of authentication.")
fs.StringVar(&s.KeystoneURL, "experimental-keystone-url", s.KeystoneURL, "If passed, activates the keystone authentication plugin")
fs.StringVar(&s.AuthorizationMode, "authorization-mode", s.AuthorizationMode, "Ordered list of plug-ins to do authorization on secure port. Comma-delimited list of: "+strings.Join(apiserver.AuthorizationModeChoices, ","))
fs.StringVar(&s.AuthorizationPolicyFile, "authorization-policy-file", s.AuthorizationPolicyFile, "File with authorization policy in csv format, used with --authorization-mode=ABAC, on the secure port.")
fs.StringVar(&s.AdmissionControl, "admission-control", s.AdmissionControl, "Ordered list of plug-ins to do admission control of resources into cluster. Comma-delimited list of: "+strings.Join(admission.GetPlugins(), ", "))
fs.StringVar(&s.AdmissionControlConfigFile, "admission-control-config-file", s.AdmissionControlConfigFile, "File with admission control configuration.")
fs.StringSliceVar(&s.EtcdServerList, "etcd-servers", s.EtcdServerList, "List of etcd servers to watch (http://ip:port), comma separated. Mutually exclusive with -etcd-config")
fs.StringSliceVar(&s.EtcdServersOverrides, "etcd-servers-overrides", s.EtcdServersOverrides, "Per-resource etcd servers overrides, comma separated. The individual override format: group/resource#servers, where servers are http://ip:port, semicolon separated.")
fs.StringVar(&s.EtcdPathPrefix, "etcd-prefix", s.EtcdPathPrefix, "The prefix for all resource paths in etcd.")
fs.BoolVar(&s.EtcdQuorumRead, "etcd-quorum-read", s.EtcdQuorumRead, "If true, enable quorum read")
fs.StringSliceVar(&s.CorsAllowedOriginList, "cors-allowed-origins", s.CorsAllowedOriginList, "List of allowed origins for CORS, comma separated. An allowed origin can be a regular expression to support subdomain matching. If this list is empty CORS will not be enabled.")
fs.BoolVar(&s.AllowPrivileged, "allow-privileged", s.AllowPrivileged, "If true, allow privileged containers.")
fs.IPNetVar(&s.ServiceClusterIPRange, "service-cluster-ip-range", s.ServiceClusterIPRange, "A CIDR notation IP range from which to assign service cluster IPs. This must not overlap with any IP ranges assigned to nodes for pods.")
fs.IPNetVar(&s.ServiceClusterIPRange, "portal-net", s.ServiceClusterIPRange, "Deprecated: see --service-cluster-ip-range instead.")
fs.MarkDeprecated("portal-net", "see --service-cluster-ip-range instead.")
fs.Var(&s.ServiceNodePortRange, "service-node-port-range", "A port range to reserve for services with NodePort visibility. Example: '30000-32767'. Inclusive at both ends of the range.")
fs.Var(&s.ServiceNodePortRange, "service-node-ports", "Deprecated: see --service-node-port-range instead.")
fs.MarkDeprecated("service-node-ports", "see --service-node-port-range instead.")
fs.StringVar(&s.MasterServiceNamespace, "master-service-namespace", s.MasterServiceNamespace, "The namespace from which the kubernetes master services should be injected into pods")
fs.IntVar(&s.MasterCount, "apiserver-count", s.MasterCount, "The number of apiservers running in the cluster")
fs.Var(&s.RuntimeConfig, "runtime-config", "A set of key=value pairs that describe runtime configuration that may be passed to apiserver. apis/<groupVersion> key can be used to turn on/off specific api versions. apis/<groupVersion>/<resource> can be used to turn on/off specific resources. api/all and api/legacy are special keys to control all and legacy api versions respectively.")
fs.BoolVar(&s.EnableProfiling, "profiling", true, "Enable profiling via web interface host:port/debug/pprof/")
// TODO: enable cache in integration tests.
fs.BoolVar(&s.EnableWatchCache, "watch-cache", true, "Enable watch caching in the apiserver")
fs.StringVar(&s.ExternalHost, "external-hostname", "", "The hostname to use when generating externalized URLs for this master (e.g. Swagger API Docs.)")
fs.IntVar(&s.MaxRequestsInFlight, "max-requests-inflight", 400, "The maximum number of requests in flight at a given time. When the server exceeds this, it rejects requests. Zero for no limit.")
fs.IntVar(&s.MinRequestTimeout, "min-request-timeout", 1800, "An optional field indicating the minimum number of seconds a handler must keep a request open before timing it out. Currently only honored by the watch request handler, which picks a randomized value above this number as the connection timeout, to spread out load.")
fs.StringVar(&s.LongRunningRequestRE, "long-running-request-regexp", s.LongRunningRequestRE, "A regular expression matching long running requests which should be excluded from maximum inflight request handling.")
fs.StringVar(&s.SSHUser, "ssh-user", "", "If non-empty, use secure SSH proxy to the nodes, using this user name")
fs.StringVar(&s.SSHKeyfile, "ssh-keyfile", "", "If non-empty, use secure SSH proxy to the nodes, using this user keyfile")
fs.Int64Var(&s.MaxConnectionBytesPerSec, "max-connection-bytes-per-sec", 0, "If non-zero, throttle each user connection to this number of bytes/sec. Currently only applies to long-running requests")
// Kubelet related flags:
fs.BoolVar(&s.KubeletConfig.EnableHttps, "kubelet-https", s.KubeletConfig.EnableHttps, "Use https for kubelet connections")
fs.UintVar(&s.KubeletConfig.Port, "kubelet-port", s.KubeletConfig.Port, "Kubelet port")
fs.MarkDeprecated("kubelet-port", "kubelet-port is deprecated and will be removed")
fs.DurationVar(&s.KubeletConfig.HTTPTimeout, "kubelet-timeout", s.KubeletConfig.HTTPTimeout, "Timeout for kubelet operations")
fs.StringVar(&s.KubeletConfig.CertFile, "kubelet-client-certificate", s.KubeletConfig.CertFile, "Path to a client cert file for TLS.")
fs.StringVar(&s.KubeletConfig.KeyFile, "kubelet-client-key", s.KubeletConfig.KeyFile, "Path to a client key file for TLS.")
fs.StringVar(&s.KubeletConfig.CAFile, "kubelet-certificate-authority", s.KubeletConfig.CAFile, "Path to a cert. file for the certificate authority.")
// See #14282 for details on how to test/try this option out. TODO remove this comment once this option is tested in CI.
fs.IntVar(&s.KubernetesServiceNodePort, "kubernetes-service-node-port", 0, "If non-zero, the Kubernetes master service (which apiserver creates/maintains) will be of type NodePort, using this as the value of the port. If zero, the Kubernetes master service will be of type ClusterIP.")
// TODO: delete this flag as soon as we identify and fix all clients that send malformed updates, like #14126.
fs.BoolVar(&validation.RepairMalformedUpdates, "repair-malformed-updates", true, "If true, server will do its best to fix the update request to pass the validation, e.g., setting empty UID in update request to its existing value. This flag can be turned off after we fix all the clients that send malformed updates.")
}
| dcbw/kubernetes | cmd/kube-apiserver/app/options/options.go | GO | apache-2.0 | 16,709 |
/*
* Copyright 2016 Netbrasoft
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package br.com.netbrasoft.gnuob.api.category;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToCountCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToFindCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToFindCategoryById;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToMergeCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToPersistCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToRefreshCategory;
import static br.com.netbrasoft.gnuob.api.category.CategoryWebServiceWrapperHelper.wrapToRemoveCategory;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.CAN_NOT_INITIALIZE_THE_DEFAULT_WSDL_FROM_0;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.CATEGORY_WEB_SERVICE_REPOSITORY_NAME;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.GNUOB_SOAP_CATEGORY_WEBSERVICE_WSDL;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.HTTP_LOCALHOST_8080_GNUOB_SOAP_CATEGORY_WEB_SERVICE_IMPL_WSDL;
import static br.com.netbrasoft.gnuob.api.generic.NetbrasoftApiConstants.UNCHECKED_VALUE;
import static java.lang.System.getProperty;
import static org.slf4j.LoggerFactory.getLogger;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import org.javasimon.aop.Monitored;
import org.slf4j.Logger;
import org.springframework.stereotype.Repository;
import br.com.netbrasoft.gnuob.api.Category;
import br.com.netbrasoft.gnuob.api.CategoryWebServiceImpl;
import br.com.netbrasoft.gnuob.api.CategoryWebServiceImplService;
import br.com.netbrasoft.gnuob.api.MetaData;
import br.com.netbrasoft.gnuob.api.OrderBy;
import br.com.netbrasoft.gnuob.api.Paging;
import br.com.netbrasoft.gnuob.api.generic.IGenericTypeWebServiceRepository;
@Monitored
@Repository(CATEGORY_WEB_SERVICE_REPOSITORY_NAME)
public class CategoryWebServiceRepository<C extends Category> implements IGenericTypeWebServiceRepository<C> {
private static final Logger LOGGER = getLogger(CategoryWebServiceRepository.class);
private static final URL WSDL_LOCATION;
static {
URL url = null;
try {
url = new URL(getProperty(GNUOB_SOAP_CATEGORY_WEBSERVICE_WSDL,
HTTP_LOCALHOST_8080_GNUOB_SOAP_CATEGORY_WEB_SERVICE_IMPL_WSDL));
} catch (final MalformedURLException e) {
LOGGER.info(CAN_NOT_INITIALIZE_THE_DEFAULT_WSDL_FROM_0, getProperty(GNUOB_SOAP_CATEGORY_WEBSERVICE_WSDL,
HTTP_LOCALHOST_8080_GNUOB_SOAP_CATEGORY_WEB_SERVICE_IMPL_WSDL));
}
WSDL_LOCATION = url;
}
private transient CategoryWebServiceImpl categoryWebServiceImpl = null;
private CategoryWebServiceImpl getCategoryWebServiceImpl() {
if (categoryWebServiceImpl == null) {
categoryWebServiceImpl = new CategoryWebServiceImplService(WSDL_LOCATION).getCategoryWebServiceImplPort();
}
return categoryWebServiceImpl;
}
@Override
public long count(final MetaData credentials, final C categoryExample) {
return getCategoryWebServiceImpl().countCategory(wrapToCountCategory(categoryExample), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public List<C> find(final MetaData credentials, final C categoryExample, final Paging paging,
final OrderBy orderingProperty) {
return (List<C>) getCategoryWebServiceImpl()
.findCategory(wrapToFindCategory(categoryExample, paging, orderingProperty), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C find(final MetaData credentials, final C categoryExample) {
return (C) getCategoryWebServiceImpl().findCategoryById(wrapToFindCategoryById(categoryExample), credentials)
.getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C persist(final MetaData credentials, final C category) {
return (C) getCategoryWebServiceImpl().persistCategory(wrapToPersistCategory(category), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C merge(final MetaData credentials, final C category) {
return (C) getCategoryWebServiceImpl().mergeCategory(wrapToMergeCategory(category), credentials).getReturn();
}
@Override
@SuppressWarnings(UNCHECKED_VALUE)
public C refresh(final MetaData credentials, final C category) {
return (C) getCategoryWebServiceImpl().refreshCategory(wrapToRefreshCategory(category), credentials).getReturn();
}
@Override
public void remove(final MetaData credentials, final C category) {
getCategoryWebServiceImpl().removeCategory(wrapToRemoveCategory(category), credentials);
}
}
| Netbrasoft/gnuob-api | src/main/java/br/com/netbrasoft/gnuob/api/category/CategoryWebServiceRepository.java | Java | apache-2.0 | 5,406 |
package org.wso2.carbon.apimgt.rest.api.publisher.v1.dto;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.ArrayList;
import java.util.List;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.AlertTypeDTO;
import javax.validation.constraints.*;
import io.swagger.annotations.*;
import java.util.Objects;
import javax.xml.bind.annotation.*;
import org.wso2.carbon.apimgt.rest.api.util.annotations.Scope;
public class AlertTypesListDTO {
private Integer count = null;
private List<AlertTypeDTO> alerts = new ArrayList<>();
/**
* The number of alerts
**/
public AlertTypesListDTO count(Integer count) {
this.count = count;
return this;
}
@ApiModelProperty(example = "3", value = "The number of alerts")
@JsonProperty("count")
public Integer getCount() {
return count;
}
public void setCount(Integer count) {
this.count = count;
}
/**
**/
public AlertTypesListDTO alerts(List<AlertTypeDTO> alerts) {
this.alerts = alerts;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("alerts")
public List<AlertTypeDTO> getAlerts() {
return alerts;
}
public void setAlerts(List<AlertTypeDTO> alerts) {
this.alerts = alerts;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AlertTypesListDTO alertTypesList = (AlertTypesListDTO) o;
return Objects.equals(count, alertTypesList.count) &&
Objects.equals(alerts, alertTypesList.alerts);
}
@Override
public int hashCode() {
return Objects.hash(count, alerts);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class AlertTypesListDTO {\n");
sb.append(" count: ").append(toIndentedString(count)).append("\n");
sb.append(" alerts: ").append(toIndentedString(alerts)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| nuwand/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/gen/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/dto/AlertTypesListDTO.java | Java | apache-2.0 | 2,369 |
cordova.define('cordova/plugin_list', function(require, exports, module) {
module.exports = [
{
"file": "plugins/cordova-plugin-whitelist/whitelist.js",
"id": "cordova-plugin-whitelist.whitelist",
"pluginId": "cordova-plugin-whitelist",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/DirectoryEntry.js",
"id": "cordova-plugin-file.DirectoryEntry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.DirectoryEntry"
]
},
{
"file": "plugins/cordova-plugin-file/www/DirectoryReader.js",
"id": "cordova-plugin-file.DirectoryReader",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.DirectoryReader"
]
},
{
"file": "plugins/cordova-plugin-file/www/Entry.js",
"id": "cordova-plugin-file.Entry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Entry"
]
},
{
"file": "plugins/cordova-plugin-file/www/File.js",
"id": "cordova-plugin-file.File",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.File"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileEntry.js",
"id": "cordova-plugin-file.FileEntry",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileEntry"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileError.js",
"id": "cordova-plugin-file.FileError",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileError"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileReader.js",
"id": "cordova-plugin-file.FileReader",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileReader"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileSystem.js",
"id": "cordova-plugin-file.FileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileUploadOptions.js",
"id": "cordova-plugin-file.FileUploadOptions",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileUploadOptions"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileUploadResult.js",
"id": "cordova-plugin-file.FileUploadResult",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileUploadResult"
]
},
{
"file": "plugins/cordova-plugin-file/www/FileWriter.js",
"id": "cordova-plugin-file.FileWriter",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.FileWriter"
]
},
{
"file": "plugins/cordova-plugin-file/www/Flags.js",
"id": "cordova-plugin-file.Flags",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Flags"
]
},
{
"file": "plugins/cordova-plugin-file/www/LocalFileSystem.js",
"id": "cordova-plugin-file.LocalFileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.LocalFileSystem"
],
"merges": [
"window"
]
},
{
"file": "plugins/cordova-plugin-file/www/Metadata.js",
"id": "cordova-plugin-file.Metadata",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.Metadata"
]
},
{
"file": "plugins/cordova-plugin-file/www/ProgressEvent.js",
"id": "cordova-plugin-file.ProgressEvent",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.ProgressEvent"
]
},
{
"file": "plugins/cordova-plugin-file/www/fileSystems.js",
"id": "cordova-plugin-file.fileSystems",
"pluginId": "cordova-plugin-file"
},
{
"file": "plugins/cordova-plugin-file/www/requestFileSystem.js",
"id": "cordova-plugin-file.requestFileSystem",
"pluginId": "cordova-plugin-file",
"clobbers": [
"window.requestFileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/resolveLocalFileSystemURI.js",
"id": "cordova-plugin-file.resolveLocalFileSystemURI",
"pluginId": "cordova-plugin-file",
"merges": [
"window"
]
},
{
"file": "plugins/cordova-plugin-file/www/android/FileSystem.js",
"id": "cordova-plugin-file.androidFileSystem",
"pluginId": "cordova-plugin-file",
"merges": [
"FileSystem"
]
},
{
"file": "plugins/cordova-plugin-file/www/fileSystems-roots.js",
"id": "cordova-plugin-file.fileSystems-roots",
"pluginId": "cordova-plugin-file",
"runs": true
},
{
"file": "plugins/cordova-plugin-file/www/fileSystemPaths.js",
"id": "cordova-plugin-file.fileSystemPaths",
"pluginId": "cordova-plugin-file",
"merges": [
"cordova"
],
"runs": true
},
{
"file": "plugins/cordova-plugin-file-transfer/www/FileTransferError.js",
"id": "cordova-plugin-file-transfer.FileTransferError",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransferError"
]
},
{
"file": "plugins/cordova-plugin-file-transfer/www/FileTransfer.js",
"id": "cordova-plugin-file-transfer.FileTransfer",
"pluginId": "cordova-plugin-file-transfer",
"clobbers": [
"window.FileTransfer"
]
},
{
"file": "plugins/cordova-plugin-device/www/device.js",
"id": "cordova-plugin-device.device",
"pluginId": "cordova-plugin-device",
"clobbers": [
"device"
]
},
{
"file": "plugins/de.appplant.cordova.plugin.email-composer/www/email_composer.js",
"id": "de.appplant.cordova.plugin.email-composer.EmailComposer",
"pluginId": "de.appplant.cordova.plugin.email-composer",
"clobbers": [
"cordova.plugins.email",
"plugin.email"
]
}
];
module.exports.metadata =
// TOP OF METADATA
{}
// BOTTOM OF METADATA
}); | sergiolucas/Projects | Guellcom/calculadora/platforms/android/platform_www/cordova_plugins.js | JavaScript | apache-2.0 | 6,421 |
function CredentialTypesStrings (BaseString) {
BaseString.call(this, 'credential_types');
let t = this.t;
let ns = this.credential_types;
ns.deleteCredentialType = {
CREDENTIAL_TYPE_IN_USE: t.s('This credential type is currently being used by one or more credentials. Credentials that use this credential type must be deleted before the credential type can be deleted.')
};
}
CredentialTypesStrings.$inject = ['BaseStringService'];
export default CredentialTypesStrings;
| GoogleCloudPlatform/sap-deployment-automation | third_party/github.com/ansible/awx/awx/ui/client/src/credential-types/credential-types.strings.js | JavaScript | apache-2.0 | 504 |
package libnetwork
import (
"fmt"
)
// ErrNoSuchNetwork is returned when a network query finds no result
type ErrNoSuchNetwork string
func (nsn ErrNoSuchNetwork) Error() string {
return fmt.Sprintf("network %s not found", string(nsn))
}
// BadRequest denotes the type of this error
func (nsn ErrNoSuchNetwork) BadRequest() {}
// ErrNoSuchEndpoint is returned when a endpoint query finds no result
type ErrNoSuchEndpoint string
func (nse ErrNoSuchEndpoint) Error() string {
return fmt.Sprintf("endpoint %s not found", string(nse))
}
// BadRequest denotes the type of this error
func (nse ErrNoSuchEndpoint) BadRequest() {}
// ErrInvalidNetworkDriver is returned if an invalid driver
// name is passed.
type ErrInvalidNetworkDriver string
func (ind ErrInvalidNetworkDriver) Error() string {
return fmt.Sprintf("invalid driver bound to network: %s", string(ind))
}
// BadRequest denotes the type of this error
func (ind ErrInvalidNetworkDriver) BadRequest() {}
// ErrInvalidJoin is returned if a join is attempted on an endpoint
// which already has a container joined.
type ErrInvalidJoin struct{}
func (ij ErrInvalidJoin) Error() string {
return "a container has already joined the endpoint"
}
// BadRequest denotes the type of this error
func (ij ErrInvalidJoin) BadRequest() {}
// ErrNoContainer is returned when the endpoint has no container
// attached to it.
type ErrNoContainer struct{}
func (nc ErrNoContainer) Error() string {
return "a container has already joined the endpoint"
}
// Maskable denotes the type of this error
func (nc ErrNoContainer) Maskable() {}
// ErrInvalidID is returned when a query-by-id method is being invoked
// with an empty id parameter
type ErrInvalidID string
func (ii ErrInvalidID) Error() string {
return fmt.Sprintf("invalid id: %s", string(ii))
}
// BadRequest denotes the type of this error
func (ii ErrInvalidID) BadRequest() {}
// ErrInvalidName is returned when a query-by-name or resource create method is
// invoked with an empty name parameter
type ErrInvalidName string
func (in ErrInvalidName) Error() string {
return fmt.Sprintf("invalid name: %s", string(in))
}
// BadRequest denotes the type of this error
func (in ErrInvalidName) BadRequest() {}
// ErrInvalidConfigFile type is returned when an invalid LibNetwork config file is detected
type ErrInvalidConfigFile string
func (cf ErrInvalidConfigFile) Error() string {
return fmt.Sprintf("Invalid Config file %q", string(cf))
}
// NetworkTypeError type is returned when the network type string is not
// known to libnetwork.
type NetworkTypeError string
func (nt NetworkTypeError) Error() string {
return fmt.Sprintf("unknown driver %q", string(nt))
}
// NotFound denotes the type of this error
func (nt NetworkTypeError) NotFound() {}
// NetworkNameError is returned when a network with the same name already exists.
type NetworkNameError string
func (nnr NetworkNameError) Error() string {
return fmt.Sprintf("network with name %s already exists", string(nnr))
}
// Forbidden denotes the type of this error
func (nnr NetworkNameError) Forbidden() {}
// UnknownNetworkError is returned when libnetwork could not find in it's database
// a network with the same name and id.
type UnknownNetworkError struct {
name string
id string
}
func (une *UnknownNetworkError) Error() string {
return fmt.Sprintf("unknown network %s id %s", une.name, une.id)
}
// NotFound denotes the type of this error
func (une *UnknownNetworkError) NotFound() {}
// ActiveEndpointsError is returned when a network is deleted which has active
// endpoints in it.
type ActiveEndpointsError struct {
name string
id string
}
func (aee *ActiveEndpointsError) Error() string {
return fmt.Sprintf("network with name %s id %s has active endpoints", aee.name, aee.id)
}
// Forbidden denotes the type of this error
func (aee *ActiveEndpointsError) Forbidden() {}
// UnknownEndpointError is returned when libnetwork could not find in it's database
// an endpoint with the same name and id.
type UnknownEndpointError struct {
name string
id string
}
func (uee *UnknownEndpointError) Error() string {
return fmt.Sprintf("unknown endpoint %s id %s", uee.name, uee.id)
}
// NotFound denotes the type of this error
func (uee *UnknownEndpointError) NotFound() {}
// ActiveContainerError is returned when an endpoint is deleted which has active
// containers attached to it.
type ActiveContainerError struct {
name string
id string
}
func (ace *ActiveContainerError) Error() string {
return fmt.Sprintf("endpoint with name %s id %s has active containers", ace.name, ace.id)
}
// Forbidden denotes the type of this error
func (ace *ActiveContainerError) Forbidden() {}
// InvalidContainerIDError is returned when an invalid container id is passed
// in Join/Leave
type InvalidContainerIDError string
func (id InvalidContainerIDError) Error() string {
return fmt.Sprintf("invalid container id %s", string(id))
}
// BadRequest denotes the type of this error
func (id InvalidContainerIDError) BadRequest() {}
| Microsoft/libnetwork | error.go | GO | apache-2.0 | 5,043 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/servicediscovery/model/PublicDnsPropertiesMutable.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace ServiceDiscovery
{
namespace Model
{
PublicDnsPropertiesMutable::PublicDnsPropertiesMutable() :
m_sOAHasBeenSet(false)
{
}
PublicDnsPropertiesMutable::PublicDnsPropertiesMutable(JsonView jsonValue) :
m_sOAHasBeenSet(false)
{
*this = jsonValue;
}
PublicDnsPropertiesMutable& PublicDnsPropertiesMutable::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("SOA"))
{
m_sOA = jsonValue.GetObject("SOA");
m_sOAHasBeenSet = true;
}
return *this;
}
JsonValue PublicDnsPropertiesMutable::Jsonize() const
{
JsonValue payload;
if(m_sOAHasBeenSet)
{
payload.WithObject("SOA", m_sOA.Jsonize());
}
return payload;
}
} // namespace Model
} // namespace ServiceDiscovery
} // namespace Aws
| awslabs/aws-sdk-cpp | aws-cpp-sdk-servicediscovery/source/model/PublicDnsPropertiesMutable.cpp | C++ | apache-2.0 | 1,079 |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.agent.monitor.inventory;
import org.hawkular.agent.monitor.inventory.dmr.DMRResource;
import org.hawkular.agent.monitor.inventory.dmr.DMRResourceType;
import org.hawkular.dmrclient.Address;
import org.jboss.dmr.ModelNode;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.traverse.BreadthFirstIterator;
import org.jgrapht.traverse.DepthFirstIterator;
import org.junit.Assert;
import org.junit.Test;
public class ResourceManagerTest {
@Test
public void testEmptyResourceManager() {
ResourceManager<DMRResource> rm = new ResourceManager<>();
Assert.assertNull(rm.getResource(new ID("foo")));
Assert.assertTrue(rm.getAllResources().isEmpty());
Assert.assertTrue(rm.getRootResources().isEmpty());
Assert.assertFalse(rm.getBreadthFirstIterator().hasNext());
Assert.assertFalse(rm.getDepthFirstIterator().hasNext());
}
@Test
public void testResourceManager() {
DMRResourceType type = new DMRResourceType(new ID("resType"), new Name("resTypeName"));
ResourceManager<DMRResource> rm = new ResourceManager<>();
DMRResource root1 = new DMRResource(new ID("root1"), new Name("root1Name"), null, type, null, new Address(),
new ModelNode());
DMRResource root2 = new DMRResource(new ID("root2"), new Name("root2Name"), null, type, null, new Address(),
new ModelNode());
DMRResource child1 = new DMRResource(new ID("child1"), new Name("child1Name"), null, type, root1,
new Address(), new ModelNode());
DMRResource child2 = new DMRResource(new ID("child2"), new Name("child2Name"), null, type, root1,
new Address(), new ModelNode());
DMRResource grandChild1 = new DMRResource(new ID("grand1"), new Name("grand1Name"), null, type, child1,
new Address(), new ModelNode());
// add root1
rm.addResource(root1);
Assert.assertEquals(1, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(root1));
Assert.assertEquals(root1, rm.getResource(root1.getID()));
DepthFirstIterator<DMRResource, DefaultEdge> dIter = rm.getDepthFirstIterator();
Assert.assertEquals(root1, dIter.next());
Assert.assertFalse(dIter.hasNext());
BreadthFirstIterator<DMRResource, DefaultEdge> bIter = rm.getBreadthFirstIterator();
Assert.assertEquals(root1, bIter.next());
Assert.assertFalse(bIter.hasNext());
Assert.assertEquals(1, rm.getRootResources().size());
Assert.assertTrue(rm.getRootResources().contains(root1));
// add child1
rm.addResource(child1);
Assert.assertEquals(2, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(child1));
Assert.assertEquals(child1, rm.getResource(child1.getID()));
// add grandChild1
rm.addResource(grandChild1);
Assert.assertEquals(3, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(grandChild1));
Assert.assertEquals(grandChild1, rm.getResource(grandChild1.getID()));
// add root2
rm.addResource(root2);
Assert.assertEquals(4, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(root2));
Assert.assertEquals(root2, rm.getResource(root2.getID()));
Assert.assertEquals(2, rm.getRootResources().size());
Assert.assertTrue(rm.getRootResources().contains(root2));
// add child2
rm.addResource(child2);
Assert.assertEquals(5, rm.getAllResources().size());
Assert.assertTrue(rm.getAllResources().contains(child2));
Assert.assertEquals(child2, rm.getResource(child2.getID()));
//
// the tree now looks like:
//
// root1 root2
// / \
// child1 child2
// |
// grandchild1
//
Assert.assertEquals(2, rm.getChildren(root1).size());
Assert.assertTrue(rm.getChildren(root1).contains(child1));
Assert.assertTrue(rm.getChildren(root1).contains(child2));
Assert.assertEquals(1, rm.getChildren(child1).size());
Assert.assertTrue(rm.getChildren(child1).contains(grandChild1));
Assert.assertEquals(0, rm.getChildren(grandChild1).size());
Assert.assertEquals(0, rm.getChildren(root2).size());
Assert.assertEquals(null, rm.getParent(root1));
Assert.assertEquals(null, rm.getParent(root2));
Assert.assertEquals(root1, rm.getParent(child1));
Assert.assertEquals(root1, rm.getParent(child2));
Assert.assertEquals(child1, rm.getParent(grandChild1));
/*
* WHY DOESN'T THIS ITERATE LIKE IT SHOULD?
*
// iterate depth first which should be:
// root1 -> child1 -> grandchild1 -> child2 -> root2
dIter = rm.getDepthFirstIterator();
Assert.assertEquals(root1, dIter.next());
Assert.assertEquals(child1, dIter.next());
Assert.assertEquals(grandChild1, dIter.next());
Assert.assertEquals(child2, dIter.next());
Assert.assertEquals(root2, dIter.next());
Assert.assertFalse(dIter.hasNext());
// iterate breadth first which should be (assuming roots are done in order)
// root1 -> child1 -> child2 -> grandchild1 -> root2
bIter = rm.getBreadthFirstIterator();
Assert.assertEquals(root1, bIter.next());
Assert.assertEquals(child1, bIter.next());
Assert.assertEquals(child2, bIter.next());
Assert.assertEquals(grandChild1, bIter.next());
Assert.assertEquals(root2, bIter.next());
Assert.assertFalse(bIter.hasNext());
*
* THE ABOVE DOESN'T WORK AS EXPECTED
*/
}
}
| pavolloffay/hawkular-agent | hawkular-wildfly-monitor/src/test/java/org/hawkular/agent/monitor/inventory/ResourceManagerTest.java | Java | apache-2.0 | 6,531 |
<!DOCTYPE html>
<!--[if IE 8]> <html lang="en" class="ie8 no-js"> <![endif]-->
<!--[if IE 9]> <html lang="en" class="ie9 no-js"> <![endif]-->
<!--[if !IE]><!-->
<html lang="en">
<!--<![endif]-->
<!-- BEGIN HEAD -->
<head>
<meta charset="utf-8"/>
<title>DHP Đăng nhập quản trị</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta content="width=device-width, initial-scale=1.0" name="viewport"/>
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<meta content="" name="description"/>
<meta content="" name="author"/>
<!-- BEGIN GLOBAL MANDATORY STYLES -->
<link href="http://fonts.googleapis.com/css?family=Open+Sans:400,300,600,700&subset=all" rel="stylesheet" type="text/css"/>
<link href="/assets/global/plugins/font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css"/>
<link href="/assets/global/plugins/simple-line-icons/simple-line-icons.min.css" rel="stylesheet" type="text/css"/>
<link href="/assets/global/plugins/bootstrap/css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="/assets/global/plugins/uniform/css/uniform.default.css" rel="stylesheet" type="text/css"/>
<!-- END GLOBAL MANDATORY STYLES -->
<!-- BEGIN PAGE LEVEL STYLES -->
<link href="/assets/admin/pages/css/login.css" rel="stylesheet" type="text/css"/>
<!-- END PAGE LEVEL SCRIPTS -->
<!-- BEGIN THEME STYLES -->
<link href="/assets/global/css/components.css" rel="stylesheet" type="text/css"/>
<link href="/assets/global/css/plugins.css" rel="stylesheet" type="text/css"/>
<link href="/assets/admin/layout/css/layout.css" rel="stylesheet" type="text/css"/>
<link href="/assets/admin/layout/css/themes/default.css" rel="stylesheet" type="text/css" id="style_color"/>
<link href="/assets/admin/layout/css/custom.css" rel="stylesheet" type="text/css"/>
<!-- END THEME STYLES -->
<link rel="shortcut icon" href="favicon.ico"/>
</head>
<!-- END HEAD -->
<!-- BEGIN BODY -->
<body class="login">
<!-- BEGIN SIDEBAR TOGGLER BUTTON -->
<div class="menu-toggler sidebar-toggler">
</div>
<!-- END SIDEBAR TOGGLER BUTTON -->
<!-- BEGIN LOGO -->
<div class="logo">
<a href="index.html">
<img src="/assets/admin/layout/img/logo-big.png" alt=""/>
</a>
</div>
<!-- END LOGO -->
<!-- BEGIN LOGIN -->
<div class="content">
@if (count($errors) > 0)
<div class="alert alert-danger">
<strong>Lỗi!</strong> Sai thông tin đăng nhập.<br><br>
<ul>
@foreach ($errors->all() as $error)
<li>{{ $error }}</li>
@endforeach
</ul>
</div>
@endif
<!-- BEGIN LOGIN FORM -->
<form class="login-form" action="{{ url('/auth/login') }}" method="post">
<input type="hidden" name="_token" value="{{ csrf_token() }}">
<h3 class="form-title">Đăng nhập</h3>
<div class="alert alert-danger display-hide">
<button class="close" data-close="alert"></button>
<span>
Nhập email và mật khẩu. </span>
</div>
<div class="form-group">
<!--ie8, ie9 does not support html5 placeholder, so we just show field title for that-->
<label class="control-label visible-ie8 visible-ie9">Tên đăng nhập</label>
<input class="form-control form-control-solid placeholder-no-fix" type="email" autocomplete="off" placeholder="Email đăng nhập" name="email" value="{{ old('email') }}"/>
</div>
<div class="form-group">
<label class="control-label visible-ie8 visible-ie9">Mật khẩu</label>
<input class="form-control form-control-solid placeholder-no-fix" type="password" autocomplete="off" placeholder="Mật khẩu" name="password"/>
</div>
<div class="form-actions">
<button type="submit" class="btn btn-success uppercase">Đăng nhập</button>
<label class="rememberme check">
<input type="checkbox" name="remember" value="1"/>Nhớ </label>
<a id="forget-password" class="forget-password" href="{{ url('/password/email') }}">Quên mật khẩu?</a>
</div>
</form>
<!-- END LOGIN FORM -->
<!-- BEGIN FORGOT PASSWORD FORM -->
<form class="forget-form" action="index.html" method="post">
<h3>Khôi phục mật khẩu?</h3>
<p>
Vui lòng nhập email để khôi phục mật khẩu.
</p>
<div class="form-group">
<input class="form-control placeholder-no-fix" type="text" autocomplete="off" placeholder="Email" name="email"/>
</div>
<div class="form-actions">
<button type="button" id="back-btn" class="btn btn-default">Trở về</button>
<button type="submit" class="btn btn-success uppercase pull-right">Gửi</button>
</div>
</form>
<!-- END FORGOT PASSWORD FORM -->
</div>
<div class="copyright">
2015 © DHP Viet Nam
</div>
<!-- END LOGIN -->
<!-- BEGIN JAVASCRIPTS(Load javascripts at bottom, this will reduce page load time) -->
<!-- BEGIN CORE PLUGINS -->
<!--[if lt IE 9]>
<script src="/assets/global/plugins/respond.min.js"></script>
<script src="/assets/global/plugins/excanvas.min.js"></script>
<![endif]-->
<script src="/assets/global/plugins/jquery.min.js" type="text/javascript"></script>
<script src="/assets/global/plugins/jquery-migrate.min.js" type="text/javascript"></script>
<script src="/assets/global/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="/assets/global/plugins/jquery.blockui.min.js" type="text/javascript"></script>
<script src="/assets/global/plugins/uniform/jquery.uniform.min.js" type="text/javascript"></script>
<!-- END CORE PLUGINS -->
<!-- BEGIN PAGE LEVEL PLUGINS -->
<script src="/assets/global/plugins/jquery-validation/js/jquery.validate.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN PAGE LEVEL SCRIPTS -->
<script src="/assets/global/scripts/metronic.js" type="text/javascript"></script>
<script src="/assets/admin/layout/scripts/layout.js" type="text/javascript"></script>
<script src="/assets/admin/pages/scripts/login.js" type="text/javascript"></script>
<!-- END PAGE LEVEL SCRIPTS -->
<script>
jQuery(document).ready(function() {
Metronic.init(); // init metronic core components
Layout.init(); // init current layout
Login.init();
});
</script>
<!-- END JAVASCRIPTS -->
</body>
<!-- END BODY -->
</html> | imtoantran/md | resources/views/auth/login.blade.php | PHP | apache-2.0 | 6,104 |
// bsla_scanf.cpp -*-C++-*-
#include <bsla_scanf.h>
#include <bsls_ident.h>
BSLS_IDENT("$Id$ $CSID$")
// ----------------------------------------------------------------------------
// Copyright 2019 Bloomberg Finance L.P.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------- END-OF-FILE ----------------------------------
| che2/bde | groups/bsl/bsla/bsla_scanf.cpp | C++ | apache-2.0 | 915 |
/*
* Medical Image Registration ToolKit (MIRTK)
*
* Copyright 2015-2017 Imperial College London
* Copyright 2015-2017 Andreas Schuh
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "mirtk/Common.h"
#include "mirtk/Options.h"
#include "mirtk/ImageConfig.h"
#include "mirtk/IOConfig.h"
#include "mirtk/DataOp.h"
#include "mirtk/DataStatistics.h"
#include "mirtk/DataFunctions.h"
#if MIRTK_Image_WITH_VTK
#include "vtkDataSet.h"
#include "vtkSmartPointer.h"
#include "vtkPointData.h"
#include "vtkCellData.h"
#include "vtkDataArray.h"
#endif
using namespace mirtk;
using namespace mirtk::data;
using namespace mirtk::data::op;
using namespace mirtk::data::statistic;
// =============================================================================
// Help
// =============================================================================
// -----------------------------------------------------------------------------
void PrintHelp(const char *name)
{
cout << "\n";
cout << "Usage: " << name << " <input> [options]\n";
cout << "\n";
cout << "Description:\n";
cout << " This tool can be used for basic calculations from a sequence of data values read\n";
cout << " either from an image or a VTK pointset. It can be used, for example, to add two\n";
cout << " data sequences and to divide the result by a constant. The current sequence can\n";
cout << " be written to an output file again using :option:`-out`. Additionally, statistics\n";
cout << " of the current data sequence can be computed such as the mean or variance.\n";
cout << " The order of the data transformations and calculation of statistics is determined\n";
cout << " by the order of the command-line arguments.\n";
cout << "\n";
cout << " The data mask is used to include/exclude values from subsequent operations.\n";
cout << " Initially, all NaN values in the input data sequence are excluded.\n";
cout << " Further values can be excluded using one or more of the masking operations.\n";
cout << " Using the mask, operations can be performed on only a subset of the data,\n";
cout << " and the mask then reset using :option:`-reset-mask`.\n";
cout << "\n";
cout << " By default, data statistics are printed to STDOUT in a human readable format.\n";
cout << " This output can be appended to a text file using :option:`-append` instead.\n";
cout << " For a more machine readable output, e.g., as comma separated values (CSV),\n";
cout << " specify a delimiting string using :option:`-delimiter`. In this case, a header\n";
cout << " line is also printed when :option:`-header` is given with optional user\n";
cout << " specified column names for the individual output values.\n";
cout << "\n";
cout << "Input options:\n";
cout << " -pd, -point-data, -scalars <name> Name of input point data array. (default: active SCALARS array)\n";
cout << " -cd, -cell-data <name> Name of input cell data array. Overrides :option:`-pd`.\n";
cout << "\n";
cout << "Data masking options:\n";
cout << " -even\n";
cout << " Exclude values which are not an even number when cast to an integer.\n";
cout << " -odd\n";
cout << " Exclude values which are not an odd number when cast to an integer.\n";
cout << " -label <value|lower..upper>...\n";
cout << " Include data points with a value equal to either one of the given values.\n";
cout << " Closed intervals of values can be specified as \"lower..upper\".\n";
cout << " For example, \"-label 1 3 5..6 10 20..50\". This option is a shorthand for\n";
cout << " :option:`-mask-all` :option:`-threshold-inside` <lower> <upper> :option:`-invert-mask`\n";
cout << " where one :option:`-threshold-inside` operation is performed for each argument.\n";
cout << " -mask <value>... | <file> [<scalars>] [<value>]\n";
cout << " Exclude values equal a given threshold or with specified input mask <value>.\n";
cout << " The default mask value of values to be excluded is zero. When the input file\n";
cout << " is a point set file (e.g., .vtk, .vtp), the optional <scalars> argument can be\n";
cout << " used to specify the name of the point/cell data array to use as mask.\n";
cout << " Note that this operation does not modify the data values, but only marks them\n";
cout << " to be ignored from now on. Use :option:`-pad` following this operation to\n";
cout << " replace these values by a constant background value.\n";
cout << " -mask-all\n";
cout << " Exclude all values.\n";
cout << " -reset-mask\n";
cout << " Reset mask to include all values again.\n";
cout << " -invert-mask\n";
cout << " Invert mask to include all values that where excluded before and\n";
cout << " exclude all values that were included before.\n";
cout << " -set, -inside <value>\n";
cout << " Set new value for all currently included data values.\n";
cout << " -pad, -outside <value>\n";
cout << " Set new value for all currently excluded data values.\n";
cout << "\n";
cout << "Data thresholding options:\n";
cout << " -threshold <lower> [<upper>]\n";
cout << " This masking operation is equivalent to :option:`-threshold-outside`.\n";
cout << " When no upper threshold is specified, it defaults to +inf. Therefore,\n";
cout << " \"-threshold 0\" will exclude all negative values.\n";
cout << " -percentile-threshold, -pct-threshold <lower>\n";
cout << " This masking operation is equivalent to :option:`-threshold-outside-percentiles`.\n";
cout << " with an upper threshold of +inf. Therefore, \"-threshold 0\" excludes all negative values.\n";
cout << " -threshold-percentiles, -threshold-pcts <lower> <upper>\n";
cout << " This masking operation is equivalent to :option:`-threshold-outside-percentiles`.\n";
cout << " -threshold-inside, -mask-inside <lower> <upper>\n";
cout << " Exclude values which are inside a given closed interval.\n";
cout << " When the lower threshold is greater than the upper threshold,\n";
cout << " values less than or equal to the upper threshold and values greater\n";
cout << " than or equal to the lower threshold are excluded.\n";
cout << " -threshold-inside-percentiles, -threshold-inside-pcts, -mask-inside-percentiles, -mask-inside-pct <lower> <upper>\n";
cout << " Exclude values which are inside a given closed interval of percentiles.\n";
cout << " When the lower percentile is greater than the upper percentile,\n";
cout << " values less than or equal to the upper percentile and values greater\n";
cout << " than or equal to the lower percentile are excluded.\n";
cout << " -threshold-outside, -mask-outside <lower> <upper>\n";
cout << " Exclude values which are outside a given open interval.\n";
cout << " When the lower threshold is greater than the upper threshold,\n";
cout << " values inside the closed interval <upper>..<lower> are excluded.\n";
cout << " -threshold-outside-percentiles, -threshold-outside-pcts, -mask-outside-percentiles, -mask-outside-pcts <lower> <upper>\n";
cout << " Exclude values which are outside a given open interval of percentiles.\n";
cout << " When the lower percentile is greater than the upper percentile,\n";
cout << " values inside the closed interval <upper>..<lower> are excluded.\n";
cout << " -threshold-lt, -lower-threshold, -mask-lt <value>\n";
cout << " Exclude values less than a given threshold.\n";
cout << " -threshold-lt-percentile, -threshold-lt-pct, -lower-percentile-threshold, -lower-pct-threshold, -mask-lt-percentile, -mask-lt-pct <value>\n";
cout << " Exclude values less than a given precentile.\n";
cout << " -threshold-le, -mask-le, -mask-below <value>\n";
cout << " Exclude values less than or equal to a given threshold.\n";
cout << " -threshold-le-percentile, -threshold-le-pct, -mask-le-percentile, -mask-le-pct, -mask-below-percentile, -mask-below-pct <value>\n";
cout << " Exclude values less than or equal to a given percentile.\n";
cout << " -threshold-ge, -mask-ge, -mask-above <value>\n";
cout << " Exclude values greater than or equal to a given threshold.\n";
cout << " -threshold-ge-percentile, -threshold-ge-pct, -mask-ge-percentile, -mask-ge-pct, -mask-above-percentile, -mask-above-pct <value>\n";
cout << " Exclude values greater than or equal to a given percentile.\n";
cout << " -threshold-gt, -upper-threshold, -mask-gt <value>\n";
cout << " Exclude values greater than a given threshold.\n";
cout << " -threshold-gt-percentile, -threshold-gt-pct, -upper-percentile-threshold, -upper-pct-threshold, -mask-gt-percentile, -mask-gt-pct <value>\n";
cout << " Exclude values greater than a given percentile.\n";
cout << "\n";
cout << "Data rescaling options:\n";
cout << " -binarize <lower> [<upper>]\n";
cout << " Set values inside the closed interval <lower>..<upper> to one,\n";
cout << " and all other values to zero. The default upper threshold is +inf.\n";
cout << " When the lower threshold is greater than the upper threshold,\n";
cout << " values inside the closed interval <upper>..<lower> are set to zero\n";
cout << " and all other values to one instead. This operation is short for:\n";
cout << " :option:`-threshold-inside` <lower> <upper> :option:`-set` 1 :option:`-pad` 0\n";
cout << " -clamp <lower> <upper>\n";
cout << " Clamp values which are less than a lower or greater than an upper threshold.\n";
cout << " -clamp-percentiles, -clamp-pcts <lower> <upper>\n";
cout << " Clamp values which are less than a lower percentile or greater than an upper percentile.\n";
cout << " -clamp-below, -clamp-lt <value>\n";
cout << " Clamp values less than a given threshold.\n";
cout << " -clamp-below-percentile, -clamp-below-pct, -clamp-lt-percentile, -clamp-lt-pct <value>\n";
cout << " Clamp values less than a given percentile.\n";
cout << " -clamp-above, -clamp-gt <value>\n";
cout << " Clamp values greater than a given threshold.\n";
cout << " -clamp-above-percentile, -clamp-above-pct, -clamp-gt-percentile, -clamp-gt-pct <value>\n";
cout << " Clamp values greater than a given percentile.\n";
cout << " -rescale <min> <max>\n";
cout << " Linearly rescale values to the interval [min, max].\n";
cout << " -map <from> <to>...\n";
cout << " Replaces values equal to <from> by the specified <to> value. Multiple pairs of <from>\n";
cout << " and <to> value replacements can be specified in order to perform the substitutions in\n";
cout << " one step. For example, to swap the two values 1 and 2, use ``-map 1 2 2 1``.\n";
cout << "\n";
cout << "Arithmetic operation options:\n";
cout << " -add, -plus <value> | <file> [<scalars>]\n";
cout << " Add constant value or data sequence read from specified file.\n";
cout << " Another name for this option is the '+' sign, see Examples.\n";
cout << " -sub, -subtract, -minus <value> | <file> [<scalars>]\n";
cout << " Subtract constant value or data sequence read from specified file.\n";
cout << " Another name for this option is the '-' sign, see Examples.\n";
cout << " -mul, -multiply-with, -times <value> | <file> [<scalars>]\n";
cout << " Multiply by constant value or data sequence read from specified file.\n";
cout << " Another name for this option is the '*' sign, see Examples.\n";
cout << " -div, -divide-by, -over <value> | sum | <file> [<scalars>]\n";
cout << " Divide by constant value or data sequence read from specified file.\n";
cout << " When the argument is \"sum\", the divisor is the sum of the values.\n";
cout << " When dividing by zero values in the input file, the result is NaN.\n";
cout << " Use :option:`-mask` with argument NaN and :option:`-pad` to replace\n";
cout << " these undefined values by a constant such as zero.\n";
cout << " Another name for this option is the '/' sign, see Examples.\n";
cout << " -div-with-zero <value> | sum | <file> [<scalars>]\n";
cout << " Same as :option:`-div`, but set result to zero in case of division by zero.\n";
cout << " -abs\n";
cout << " Replace values by their respective absolute value.\n";
cout << " -pow, -power <exponent>\n";
cout << " Raise values to the power of the given exponent.\n";
cout << " -sq, -square\n";
cout << " Raise values to the power of 2 (i.e, -pow 2).\n";
cout << " -sqrt\n";
cout << " Calculate square root of each value (i.e, -pow .5).\n";
cout << " -exp\n";
cout << " Calculate exponential of data sequence.\n";
cout << " -log [<threshold>] [<base>]\n";
cout << " Compute logarithm after applying an optional threshold.\n";
cout << " (default threshold: min double, default base: e)\n";
cout << " -lb, -log2 [<threshold>]\n";
cout << " Compute binary logarithm, alias for :option:`-log` with base 2.\n";
cout << " -ln, -loge [<threshold>]\n";
cout << " Compute natural logarithm, alias for :option:`-log` with base e.\n";
cout << " -lg, -log10 [<threshold>]\n";
cout << " Compute logarithm to base 10, alias for :option:`-log` with base 10.\n";
cout << " -mod, -fmod <denominator>\n";
cout << " Compute modulo division of each value with specified denominator.\n";
cout << " -floor\n";
cout << " Round floating point values to largest integer value that is not greater.\n";
cout << " -ceil\n";
cout << " Round floating point values to smallest integer value that is greater.\n";
cout << " -round\n";
cout << " Round floating point values to the nearest integer value, away from zero for halfway cases.\n";
cout << "\n";
cout << "Data output options:\n";
cout << " -out, -o, -output <file> [<type>] [<name>]\n";
cout << " Write current data sequence to file in the format of the input file.\n";
cout << " Output data type can be: uchar, short, ushort, int, uint, float, double.\n";
cout << " The optional <name> argument can be used to save the modified data\n";
cout << " of an input point set data array with a different name along with the\n";
cout << " input data. Otherwise, the input data values are replaced by the modified\n";
cout << " values and stored with point data array name is unchanged.\n";
cout << " Another name for this option is the '=' sign, but the optional arguments are\n";
cout << " are not supported by this alternative notation. See Examples for usage.\n";
cout << "\n";
cout << "Data statistics options:\n";
cout << " -append <file>\n";
cout << " Append output to a file. (default: STDOUT)\n";
cout << " -delimiter, -delim, -d, -sep\n";
cout << " Delimiting character(s). (default: '')\n";
cout << " -header [<name>...]\n";
cout << " Request output of header line if delimiter was specified as well.\n";
cout << " If the output is appended to a text file, the header is only printed\n";
cout << " if it does not exist. If no or fewer custom column names are given,\n";
cout << " the default names for each statistic are printed. (default: none)\n";
cout << " -prefix <str>...\n";
cout << " One or more prefix strings to print. If no delimiter is specified,\n";
cout << " the concatenated strings are printed before each line of the output.\n";
cout << " Otherwise, each prefix string is printed as entry for the first columns\n";
cout << " in the delimited output row, separated by the specified delimiter. (default: none)\n";
cout << " -precision, -digits <int>\n";
cout << " Number of significant digits. (default: 5)\n";
cout << " -median\n";
cout << " Print median value, i.e., 50th percentile. (default: off)\n";
cout << " -mean, -avg, -average\n";
cout << " Print mean value. (default: on)\n";
cout << " -variance, -var\n";
cout << " Print variance of values. (default: off)\n";
cout << " -sigma, -std, -stddev, -stdev, -sd\n";
cout << " Print standard deviation of values. (default: on)\n";
cout << " -normal-distribution\n";
cout << " Print mean and standard deviation of values.\n";
cout << " Other option names: -mean+sigma, -mean+sd, -avg+std,... (default: off)\n";
cout << " -mad, -mean-absolute-difference, -mean-absolute-deviation\n";
cout << " Print mean absolute difference/deviation around the mean. (default: off)\n";
cout << " -mad-median, -median-absolute-difference, -median-absolute-deviation\n";
cout << " Print mean absolute difference/deviation around the median. (default: off)\n";
cout << " -minimum, -min\n";
cout << " Print minimum value. (default: off)\n";
cout << " -maximum, -max\n";
cout << " Print maximum value. (default: off)\n";
cout << " -extrema, -minmax\n";
cout << " Print minimum and maximum value. (default: on)\n";
cout << " -range\n";
cout << " Print range of values (i.e., max - min). (default: off)\n";
cout << " -percentile, -pct, -p <n>...\n";
cout << " Print n-th percentile. (default: none)\n";
cout << " -lower-percentile-mean, -lpctavg <n>\n";
cout << " Print mean intensity of values less than or equal to the n-th percentile. (default: off)\n";
cout << " -upper-percentile-mean, -upctavg <n>\n";
cout << " Print mean intensity of values greater than or equal to the n-th percentile. (default: off)\n";
cout << " -sum\n";
cout << " Print sum of values. Can be used to count values within a certain range using a thresholding\n";
cout << " followed by :option:`-set` 1 before summing these values. (default: off)\n";
cout << " -count\n";
cout << " Print number of values inside the mask, i.e., values not currently excluded. (default: off)\n";
PrintCommonOptions(cout);
cout << "\n";
cout << "Examples:\n";
cout << "\n";
cout << " " << name << " mni305.nii.gz\n";
cout << " Mean = 26.9753\n";
cout << " Standard deviation = 50.3525\n";
cout << " Extrema = [0, 254]\n";
cout << " Range = 254\n";
cout << "\n";
cout << " " << name << " mni305.nii.gz -pct 77\n";
cout << " 77th percentile = 25\n";
cout << "\n";
cout << " " << name << " mni305.nii.gz -padding 25 -range -percentile 25 50 75 -prefix MNI305 '[>25]'\n";
cout << " MNI305 [>25] range = 254\n";
cout << " MNI305 [>25] 25th percentile = 69\n";
cout << " MNI305 [>25] 50th percentile = 113\n";
cout << " MNI305 [>25] 75th percentile = 150\n";
cout << "\n";
cout << " " << name << " mni305.nii.gz -d , -prefix MNI305\n";
cout << " MNI305,26.9753,50.3525,0,254,254 [no newline at end of line]\n";
cout << "\n";
cout << " " << name << " mni305.nii.gz -d , -prefix MNI305 -header\n";
cout << " ,Mean,Sigma,Min,Max,Range\n";
cout << " MNI305,26.9753,50.3525,0,254,254\n";
cout << "\n";
cout << " " << name << " mni305.nii.gz -d , -prefix MNI305 -header ID Mean SD\n";
cout << " ID,Mean,SD,Min,Max,Range\n";
cout << " MNI305,26.9753,50.3525,0,254,254\n";
cout << "\n";
cout << " " << name << " a.nii.gz + b.nii.gz = c.nii.gz\n";
cout << "\n";
cout << " " << name << " a.vtk + b.nii.gz - 10 / c.nii = d.vtk\n";
cout << " Adds data values at identical sequential memory indices in a and b,\n";
cout << " subtracts the constant 10, and then divides by the values in image c.\n";
cout << "\n";
cout << " Note: Operations are always executed from left to right,\n";
cout << " i.e., no mathematical operator precedence is considered!\n";
cout << "\n";
}
// =============================================================================
// Main
// =============================================================================
// -----------------------------------------------------------------------------
// Some special options do not start with a '-' as otherwise required
#undef HAS_ARGUMENT
#define HAS_ARGUMENT \
_IsArgument(ARGIDX, argc, argv) && \
strcmp(argv[ARGIDX+1], "+") != 0 && \
strcmp(argv[ARGIDX+1], "/") != 0 && \
strcmp(argv[ARGIDX+1], "=") != 0
// -----------------------------------------------------------------------------
int main(int argc, char **argv)
{
InitializeIOLibrary();
// Initial data values
REQUIRES_POSARGS(1);
const char *input_name = POSARG(1);
UniquePtr<double[]> data;
int datatype = MIRTK_VOXEL_DOUBLE;
ImageAttributes attr;
#if MIRTK_Image_WITH_VTK
const char *scalars_name = nullptr;
bool cell_data = false;
for (ARGUMENTS_AFTER(1)) {
if (OPTION("-point-data") || OPTION("-pointdata") || OPTION("-pd") || OPTION("-scalars")) {
scalars_name = ARGUMENT;
cell_data = false;
}
else if (OPTION("-cell-data") || OPTION("-celldata") || OPTION("-cd")) {
scalars_name = ARGUMENT;
cell_data = true;
}
}
vtkSmartPointer<vtkDataSet> dataset;
vtkSmartPointer<vtkDataSetAttributes> arrays;
int n = Read(input_name, data, &datatype, &attr, &dataset, scalars_name, cell_data);
if (dataset) {
if (cell_data) {
arrays = dataset->GetCellData();
} else {
arrays = dataset->GetPointData();
}
}
#else // MIRTK_Image_WITH_VTK
int n = Read(input_name, data, &datatype, &attr);
#endif // MIRTK_Image_WITH_VTK
// Optional arguments
const double inf = numeric_limits<double>::infinity();
const double nan = numeric_limits<double>::quiet_NaN();
double a, b;
int p;
const char *append_name = NULL;
const char *delimiter = NULL;
bool print_header = false;
int digits = 5;
Array<string> header;
Array<string> prefix;
Array<UniquePtr<Op> > ops;
for (ARGUMENTS_AFTER(1)) {
if (OPTION("-append")) {
append_name = ARGUMENT;
} else if (OPTION("-point-data") || OPTION("-pointdata") || OPTION("-pd") || OPTION("-scalars")) {
#if MIRTK_Image_WITH_VTK
// Parsed before Read above
scalars_name = ARGUMENT;
cell_data = false;
#else
FatalError("Cannot process -point-data of VTK file because MIRTK Image library was built without VTK!");
#endif // MIRTK_Image_WITH_VTK
} else if (OPTION("-cell-data") || OPTION("-celldata") || OPTION("-cd")) {
#if MIRTK_Image_WITH_VTK
// Parsed before Read above
scalars_name = ARGUMENT;
cell_data = true;
#else
FatalError("Cannot process -cell-data of VTK file because MIRTK Image library was built without VTK!");
#endif // MIRTK_Image_WITH_VTK
} else if (OPTION("-prefix")) {
do {
prefix.push_back(ARGUMENT);
} while (HAS_ARGUMENT);
} else if (OPTION("-header")) {
print_header = true;
while (HAS_ARGUMENT) header.push_back(ARGUMENT);
// Masking
} else if (OPTION("-label")) {
ops.push_back(UniquePtr<Op>(new ResetMask(true)));
do {
const char *arg = ARGUMENT;
const Array<string> parts = Split(arg, "..");
if (parts.size() == 1) {
if (!FromString(parts[0], a)) a = nan;
b = a;
} else if (parts.size() == 2) {
if (!FromString(parts[0], a) || !FromString(parts[1], b)) {
a = b = nan;
}
} else {
a = b = nan;
}
if (IsNaN(a) || IsNaN(b)) {
FatalError("Invalid -label argument: " << arg);
}
ops.push_back(UniquePtr<Op>(new MaskInsideInterval(a, b)));
} while (HAS_ARGUMENT);
ops.push_back(UniquePtr<Op>(new InvertMask()));
} else if (OPTION("-mask-all")) {
ops.push_back(UniquePtr<Op>(new ResetMask(false)));
} else if (OPTION("-reset-mask")) {
ops.push_back(UniquePtr<Op>(new ResetMask(true)));
} else if (OPTION("-invert-mask")) {
ops.push_back(UniquePtr<Op>(new InvertMask()));
} else if (OPTION("-mask")) {
double c;
do {
const char *arg = ARGUMENT;
if (FromString(arg, c)) {
ops.push_back(UniquePtr<Op>(new Mask(c)));
} else {
const char *fname = arg;
const char *aname = nullptr;
if (HAS_ARGUMENT) {
arg = ARGUMENT;
if (HAS_ARGUMENT) {
aname = arg;
PARSE_ARGUMENT(c);
} else if (!FromString(arg, c)) {
aname = arg, c = 0.;
}
} else {
c = 0.;
#if MIRTK_Image_WITH_VTK
if (dataset && arrays->HasArray(fname)) {
aname = fname;
fname = input_name;
}
#endif
}
UniquePtr<Mask> op(new Mask(fname, c));
if (aname) {
#if MIRTK_Image_WITH_VTK
op->ArrayName(aname);
op->IsCellData(cell_data);
#else
FatalError("Cannot read point set files when build without VTK or wrong usage!");
#endif
}
ops.push_back(UniquePtr<Op>(op.release()));
break;
}
} while (HAS_ARGUMENT);
} else if (OPTION("-threshold-outside") || OPTION("-mask-outside")) {
PARSE_ARGUMENT(a);
PARSE_ARGUMENT(b);
ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(a, b)));
} else if (OPTION("-threshold-outside-percentiles") || OPTION("-threshold-outside-pcts") ||
OPTION("-mask-outside-percentiles") || OPTION("-mask-outside-pcts")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
Op *op = new MaskOutsideOpenInterval(&a->Value(), &b->Value());
ops.push_back(UniquePtr<Op>(op));
} else if (OPTION("-threshold")) {
PARSE_ARGUMENT(a);
if (HAS_ARGUMENT) PARSE_ARGUMENT(b);
else b = inf;
ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(a, b)));
} else if (OPTION("-percentile-threshold") || OPTION("-pct-threshold")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
Op *op = new MaskOutsideInterval(&a->Value(), inf);
ops.push_back(UniquePtr<Op>(op));
} else if (OPTION("-threshold-percentiles") || OPTION("-threshold-pcts")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
Op *op = new MaskOutsideInterval(&a->Value(), &b->Value());
ops.push_back(UniquePtr<Op>(op));
} else if (OPTION("-threshold-inside") || OPTION("-mask-inside")) {
PARSE_ARGUMENT(a);
PARSE_ARGUMENT(b);
ops.push_back(UniquePtr<Op>(new MaskInsideInterval(a, b)));
} else if (OPTION("-threshold-inside-percentiles") || OPTION("-threshold-inside-pcts") ||
OPTION("-mask-inside-percentiles") || OPTION("-mask-inside-pcts")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
Op *op = new MaskInsideInterval(&a->Value(), &b->Value());
ops.push_back(UniquePtr<Op>(op));
} else if (OPTION("-threshold-lt") || OPTION("-lower-threshold") || OPTION("-mask-lt")) {
PARSE_ARGUMENT(a);
ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(a, inf)));
} else if (OPTION("-threshold-lt-percentile") || OPTION("-threshold-lt-pct") ||
OPTION("-lower-percentile-threshold") || OPTION("-lower-pct-threshold") ||
OPTION("-mask-lt-percentile") || OPTION("-mask-lt-pct")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(&a->Value(), inf)));
} else if (OPTION("-threshold-le") || OPTION("-mask-below") || OPTION("-mask-le")) {
PARSE_ARGUMENT(a);
ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(a, inf)));
} else if (OPTION("-threshold-le-percentile") || OPTION("-threshold-le-pct") ||
OPTION("-mask-below-percentile") || OPTION("-mask-below-pct") ||
OPTION("-mask-le-percentile") || OPTION("-mask-le-pct")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(&a->Value(), inf)));
} else if (OPTION("-threshold-ge") || OPTION("-mask-above") || OPTION("-mask-ge")) {
PARSE_ARGUMENT(b);
ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(-inf, b)));
} else if (OPTION("-threshold-ge-percentile") || OPTION("-threshold-ge-pct") ||
OPTION("-mask-above-percentile") || OPTION("-mask-above-pct") ||
OPTION("-mask-ge-percentile") || OPTION("-mask-ge-pct")) {
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
ops.push_back(UniquePtr<Op>(new MaskOutsideOpenInterval(-inf, &b->Value())));
} else if (OPTION("-threshold-gt") || OPTION("-upper-threshold") || OPTION("-mask-gt")) {
PARSE_ARGUMENT(b);
ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(-inf, b)));
} else if (OPTION("-threshold-gt-percentile") || OPTION("-threshold-gt-pct") ||
OPTION("-upper-percentile-threshold") || OPTION("-upper-pct-threshold") ||
OPTION("-mask-gt-percentile") || OPTION("-mask-gt-pct")) {
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
ops.push_back(UniquePtr<Op>(new MaskOutsideInterval(-inf, &b->Value())));
} else if (OPTION("-even")) {
ops.push_back(UniquePtr<Op>(new MaskOddValues()));
} else if (OPTION("-odd")) {
ops.push_back(UniquePtr<Op>(new MaskEvenValues()));
// Clamping
} else if (OPTION("-clamp")) {
PARSE_ARGUMENT(a);
PARSE_ARGUMENT(b);
ops.push_back(UniquePtr<Op>(new Clamp(a, b)));
} else if (OPTION("-clamp-percentiles") || OPTION("-clamp-pcts")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
ops.push_back(UniquePtr<Op>(new Clamp(&a->Value(), &b->Value())));
} else if (OPTION("-clamp-lt") || OPTION("-clamp-below")) {
PARSE_ARGUMENT(a);
ops.push_back(UniquePtr<Op>(new LowerThreshold(a)));
} else if (OPTION("-clamp-lt-percentile") || OPTION("-clamp-lt-pct") ||
OPTION("-clamp-below-percentile") || OPTION("-clamp-below-pct")) {
PARSE_ARGUMENT(p);
Statistic *a = new Percentile(p);
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
ops.push_back(UniquePtr<Op>(new LowerThreshold(&a->Value())));
} else if (OPTION("-clamp-gt") || OPTION("-clamp-above")) {
PARSE_ARGUMENT(b);
ops.push_back(UniquePtr<Op>(new UpperThreshold(b)));
} else if (OPTION("-clamp-gt-percentile") || OPTION("-clamp-gt-pct") ||
OPTION("-clamp-above-percentile") || OPTION("-clamp-above-pct")) {
PARSE_ARGUMENT(p);
Statistic *b = new Percentile(p);
b->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(b));
ops.push_back(UniquePtr<Op>(new UpperThreshold(&b->Value())));
} else if (OPTION("-rescale")) {
double min, max;
if (!FromString(ARGUMENT, min)) {
cerr << "Invalid -rescale minimum, must be a number!" << endl;
exit(1);
}
if (!FromString(ARGUMENT, max)) {
cerr << "Invalid -rescale maximum, must be a number!" << endl;
exit(1);
}
ops.push_back(UniquePtr<Op>(new Rescale(min, max)));
} else if (OPTION("-set") || OPTION("-inside")) {
double inside_value;
if (!FromString(ARGUMENT, inside_value)) {
cerr << "Invalid -inside value, must be a number!" << endl;
exit(1);
}
ops.push_back(UniquePtr<Op>(new SetInsideValue(inside_value)));
} else if (OPTION("-pad") || OPTION("-outside")) {
double outside_value;
if (!FromString(ARGUMENT, outside_value)) {
cerr << "Invalid -outside value, must be a number!" << endl;
exit(1);
}
ops.push_back(UniquePtr<Op>(new SetOutsideValue(outside_value)));
// Data transformations
} else if (OPTION("-binarize")) {
PARSE_ARGUMENT(a);
if (HAS_ARGUMENT) PARSE_ARGUMENT(b);
else b = inf;
ops.push_back(UniquePtr<Op>(new Binarize(a, b)));
} else if (OPTION("-map")) {
UniquePtr<Map> map(new Map());
do {
const char * const arg1 = ARGUMENT;
const char * const arg2 = ARGUMENT;
if (!FromString(arg1, a) || !FromString(arg2, b)) {
FatalError("Arguments of -map option must be pairs of two numbers (i.e., number of arguments must be even)!");
}
map->Insert(a, b);
} while (HAS_ARGUMENT);
ops.push_back(UniquePtr<Op>(map.release()));
} else if (OPTION("-add") || OPTION("-plus") || OPTION("+")) {
const char *arg = ARGUMENT;
double c;
if (FromString(arg, c)) {
ops.push_back(UniquePtr<Op>(new Add(c)));
} else {
const char *fname = arg;
const char *aname = nullptr;
if (HAS_ARGUMENT) {
aname = ARGUMENT;
} else {
#if MIRTK_Image_WITH_VTK
if (dataset && arrays->HasArray(fname)) {
aname = fname;
fname = input_name;
}
#endif
}
UniquePtr<Add> op(new Add(fname));
if (aname) {
#if MIRTK_Image_WITH_VTK
op->ArrayName(aname);
op->IsCellData(cell_data);
#else
FatalError("Cannot read scalars from point set file when build without VTK or wrong usage!");
#endif
}
ops.push_back(UniquePtr<Op>(op.release()));
}
} else if (OPTION("-sub") || OPTION("-subtract") || OPTION("-minus") || OPTION("-")) {
const char *arg = ARGUMENT;
double c;
if (FromString(arg, c)) {
ops.push_back(UniquePtr<Op>(new Sub(c)));
} else {
const char *fname = arg;
const char *aname = nullptr;
if (HAS_ARGUMENT) {
aname = ARGUMENT;
} else {
#if MIRTK_Image_WITH_VTK
if (dataset && arrays->HasArray(fname)) {
aname = fname;
fname = input_name;
}
#endif
}
UniquePtr<Sub> op(new Sub(fname));
if (aname) {
#if MIRTK_Image_WITH_VTK
op->ArrayName(aname);
op->IsCellData(cell_data);
#else
FatalError("Cannot read point set files when build without VTK or wrong usage!");
#endif
}
ops.push_back(UniquePtr<Op>(op.release()));
}
} else if (OPTION("-mul") || OPTION("-multiply-by") || OPTION("-times") || OPTION("*")) {
const char *arg = ARGUMENT;
double c;
if (FromString(arg, c)) {
ops.push_back(UniquePtr<Op>(new Mul(c)));
} else {
const char *fname = arg;
const char *aname = nullptr;
if (HAS_ARGUMENT) {
aname = ARGUMENT;
} else {
#if MIRTK_Image_WITH_VTK
if (dataset && arrays->HasArray(fname)) {
aname = fname;
fname = input_name;
}
#endif
}
UniquePtr<Mul> op(new Mul(fname));
if (aname) {
#if MIRTK_Image_WITH_VTK
op->ArrayName(aname);
op->IsCellData(cell_data);
#else
FatalError("Cannot read point set files when build without VTK or wrong usage!");
#endif
}
ops.push_back(UniquePtr<Op>(op.release()));
}
} else if (OPTION("-div") || OPTION("-divide-by") || OPTION("-over") || OPTION("/")) {
const char *arg = ARGUMENT;
double c;
if (ToLower(arg) == "sum") {
Statistic *a = new Sum();
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
ops.push_back(UniquePtr<Op>(new Div(&a->Value())));
} else if (FromString(arg, c)) {
if (fequal(c, .0)) {
cerr << "Invalid -div argument, value must not be zero!" << endl;
exit(1);
}
ops.push_back(UniquePtr<Op>(new Div(c)));
} else {
const char *fname = arg;
const char *aname = nullptr;
if (HAS_ARGUMENT) {
aname = ARGUMENT;
} else {
#if MIRTK_Image_WITH_VTK
if (dataset && arrays->HasArray(fname)) {
aname = fname;
fname = input_name;
}
#endif
}
UniquePtr<Div> op(new Div(fname));
if (aname) {
#if MIRTK_Image_WITH_VTK
op->ArrayName(aname);
op->IsCellData(cell_data);
#else
FatalError("Cannot read point set files when build without VTK or wrong usage!");
#endif
}
ops.push_back(UniquePtr<Op>(op.release()));
}
} else if (OPTION("-div-with-zero")) {
const char *arg = ARGUMENT;
double c;
if (ToLower(arg) == "sum") {
Statistic *a = new Sum();
a->Hidden(verbose < 1);
ops.push_back(UniquePtr<Op>(a));
ops.push_back(UniquePtr<Op>(new DivWithZero(&a->Value())));
} else if (FromString(arg, c)) {
ops.push_back(UniquePtr<Op>(new DivWithZero(c)));
} else {
const char *fname = arg;
const char *aname = nullptr;
if (HAS_ARGUMENT) {
aname = ARGUMENT;
} else {
#if MIRTK_Image_WITH_VTK
if (dataset && arrays->HasArray(fname)) {
aname = fname;
fname = input_name;
}
#endif
}
UniquePtr<DivWithZero> op(new DivWithZero(fname));
if (aname) {
#if MIRTK_Image_WITH_VTK
op->ArrayName(aname);
op->IsCellData(cell_data);
#else
FatalError("Cannot read point set files when build without VTK or wrong usage!");
#endif
}
ops.push_back(UniquePtr<Op>(op.release()));
}
} else if (OPTION("-abs")) {
ops.push_back(UniquePtr<Op>(new Abs()));
} else if (OPTION("-pow") || OPTION("-power")) {
const char *arg = ARGUMENT;
double exponent;
if (!FromString(arg, exponent)) {
cerr << "Invalid -power value, must be a number!" << endl;
exit(1);
}
ops.push_back(UniquePtr<Op>(new Pow(exponent)));
} else if (OPTION("-sqrt")) {
ops.push_back(UniquePtr<Op>(new Pow(.5)));
} else if (OPTION("-square") || OPTION("-sq")) {
ops.push_back(UniquePtr<Op>(new Pow(2.0)));
} else if (OPTION("-exp")) {
ops.push_back(UniquePtr<Op>(new Exp()));
} else if (OPTION("-log") || OPTION("-log2") || OPTION("-loge") || OPTION("-log10") || OPTION("-lb") || OPTION("-ln") || OPTION("-lg")) {
a = numeric_limits<double>::min();
if (HAS_ARGUMENT) {
PARSE_ARGUMENT(a);
if (a <= .0) {
cerr << "Invalid -log threshold argument, must be a positive number" << endl;
exit(1);
}
}
Op *op = nullptr;
if (strcmp(OPTNAME, "-log") == 0) {
if (HAS_ARGUMENT) {
double base;
if (!FromString(ARGUMENT, base)) {
char c;
if (!FromString(ARGUMENT, c) || c != 'e') {
cerr << "Invalid -log base argument, must be a positive number or character e" << endl;
exit(1);
}
op = new Ln(a);
} else {
op = new Log(base, a);
}
} else {
op = new Ln(a);
}
} else if (strcmp(OPTNAME, "-log2") == 0 || strcmp(OPTNAME, "-lb") == 0) {
op = new Lb(a);
} else if (strcmp(OPTNAME, "-log10") == 0 || strcmp(OPTNAME, "-lg") == 0) {
op = new Lg(a);
} else if (strcmp(OPTNAME, "-loge") == 0 || strcmp(OPTNAME, "-ln") == 0) {
op = new Ln(a);
}
ops.push_back(UniquePtr<Op>(op));
} else if (OPTION("-mod") || OPTION("-fmod")) {
const char *arg = ARGUMENT;
double denominator;
if (!FromString(arg, denominator) || abs(denominator) < 1e-12) {
cerr << "Invalid -mod value, must be a non-zero number!" << endl;
exit(1);
}
ops.push_back(UniquePtr<Op>(new Mod(denominator)));
} else if (OPTION("-floor")) {
ops.push_back(UniquePtr<Op>(new Floor()));
} else if (OPTION("-ceil")) {
ops.push_back(UniquePtr<Op>(new Ceil()));
} else if (OPTION("-round")) {
ops.push_back(UniquePtr<Op>(new Round()));
} else if (OPTION("=")) {
const char *fname = ARGUMENT;
#if MIRTK_Image_WITH_VTK
ops.push_back(UniquePtr<Op>(new Write(fname, datatype, attr, dataset, scalars_name, scalars_name)));
#else
ops.push_back(UniquePtr<Op>(new Write(fname, datatype, attr)));
#endif
} else if (OPTION("-o") || OPTION("-out") || OPTION("-output")) {
const char *fname = ARGUMENT;
int dtype = datatype;
#if MIRTK_Image_WITH_VTK
const char *output_scalars_name = scalars_name;
#endif
if (HAS_ARGUMENT) {
const char *arg = ARGUMENT;
dtype = ToDataType(arg);
if (dtype == MIRTK_VOXEL_UNKNOWN) {
cerr << "Invalid -out data type " << arg << endl;
exit(1);
}
if (HAS_ARGUMENT) {
#if MIRTK_Image_WITH_VTK
output_scalars_name = ARGUMENT;
#else
Warning("Output scalars array name argument of -output option ignored");
#endif
}
}
#if MIRTK_Image_WITH_VTK
ops.push_back(UniquePtr<Op>(new Write(fname, dtype, attr, dataset, scalars_name, output_scalars_name, cell_data)));
#else
ops.push_back(UniquePtr<Op>(new Write(fname, dtype, attr)));
#endif
// Data statistics
} else if (OPTION("-median")) {
ops.push_back(UniquePtr<Op>(new Median()));
} else if (OPTION("-mean") || OPTION("-average") || OPTION("-avg")) {
ops.push_back(UniquePtr<Op>(new Mean()));
} else if (OPTION("-sigma") || OPTION("-stddev") || OPTION("-stdev") || OPTION("-std") || OPTION("-sd")) {
ops.push_back(UniquePtr<Op>(new StDev()));
} else if (OPTION("-normal-distribution") ||
OPTION("-mean+sigma") || OPTION("-mean+stddev") || OPTION("-mean+stdev") || OPTION("-mean+std") || OPTION("-mean+sd") ||
OPTION("-avg+sigma") || OPTION("-avg+stddev") || OPTION("-avg+stdev") || OPTION("-avg+std") || OPTION("-avg+sd")) {
ops.push_back(UniquePtr<Op>(new NormalDistribution()));
} else if (OPTION("-variance") || OPTION("-var")) {
ops.push_back(UniquePtr<Op>(new Var()));
} else if (OPTION("-mean-absolute-difference") || OPTION("-mean-absolute-deviation") || OPTION("-mad") || OPTION("-mad-mean")) {
ops.push_back(UniquePtr<Op>(new MeanAbsoluteDifference()));
} else if (OPTION("-median-absolute-difference") || OPTION("-median-absolute-deviation") || OPTION("-mad-median")) {
ops.push_back(UniquePtr<Op>(new MedianAbsoluteDifference()));
} else if (OPTION("-minimum") || OPTION("-min")) {
ops.push_back(UniquePtr<Op>(new Min()));
} else if (OPTION("-maximum") || OPTION("-max")) {
ops.push_back(UniquePtr<Op>(new Max()));
} else if (OPTION("-extrema") || OPTION("-minmax")) {
ops.push_back(UniquePtr<Op>(new Extrema()));
} else if (OPTION("-range")) {
ops.push_back(UniquePtr<Op>(new Range()));
} else if (OPTION("-percentile") || OPTION("-pct") || OPTION("-p")) {
do {
int p;
if (FromString(ARGUMENT, p) && 0 <= p && p <= 100) {
ops.push_back(UniquePtr<Op>(new Percentile(p)));
} else {
cerr << "Invalid -percentile value, must be integer in the range [0, 100]!" << endl;
exit(1);
}
} while (HAS_ARGUMENT);
} else if (OPTION("-lower-percentile-mean") || OPTION("-lpctavg")) {
do {
int p;
if (FromString(ARGUMENT, p) && 0 <= p && p <= 100) {
ops.push_back(UniquePtr<Op>(new LowerPercentileMean(p)));
} else {
cerr << "Invalid -lower-percentile-mean value, must be integer in the range [0, 100]!" << endl;
exit(1);
}
} while (HAS_ARGUMENT);
} else if (OPTION("-upper-percentile-mean") || OPTION("-upctavg")) {
do {
int p;
if (FromString(ARGUMENT, p) && 0 <= p && p <= 100) {
ops.push_back(UniquePtr<Op>(new UpperPercentileMean(p)));
} else {
cerr << "Invalid -upper-percentile-mean value, must be integer in the range [0, 100]!" << endl;
exit(1);
}
} while (HAS_ARGUMENT);
} else if (OPTION("-sum")) {
ops.push_back(UniquePtr<Op>(new Sum()));
} else if (OPTION("-count")) {
ops.push_back(UniquePtr<Op>(new Count()));
} else if (OPTION("-delimiter") || OPTION("-delim") || OPTION("-d") || OPTION("-sep")) {
delimiter = ARGUMENT;
} else if (OPTION("-precision") || OPTION("-digits")) {
if (!FromString(ARGUMENT, digits) || digits < 0) {
cerr << "Invalid -precision argument, value must be non-negative integer!" << endl;
exit(1);
}
} else {
HANDLE_COMMON_OR_UNKNOWN_OPTION();
}
}
// If delimiter explicitly set to empty string, use none
if (delimiter && delimiter[0] == '\0') delimiter = NULL;
// Default statistics to compute
if (ops.empty()) {
ops.push_back(UniquePtr<Statistic>(new Mean()));
ops.push_back(UniquePtr<Statistic>(new StDev()));
ops.push_back(UniquePtr<Statistic>(new Extrema()));
ops.push_back(UniquePtr<Statistic>(new Range()));
}
// Initial data mask
UniquePtr<bool[]> mask(new bool[n]);
for (int i = 0; i < n; ++i) {
if (IsNaN(data[i])) {
mask[i] = false;
} else {
mask[i] = true;
}
}
// Process input data, either transform it or compute statistics from it
for (size_t i = 0; i < ops.size(); ++i) {
ops[i]->Process(n, data.get(), mask.get());
}
mask.reset();
// Open output file to append to or use STDOUT if none specified
ofstream ofs;
if (append_name) {
if (print_header) {
ifstream ifs(append_name);
if (ifs.is_open()) {
print_header = false;
ifs.close();
}
}
ofs.open(append_name, ios_base::app);
if (!ofs.is_open()) {
FatalError("Cannot append to file " << append_name);
}
}
ostream &out = (ofs.is_open() ? ofs : cout);
// Print column names if requested
if (delimiter && print_header) {
size_t c = 0;
for (size_t i = 0; i < prefix.size(); ++i, ++c) {
if (c > 0) out << delimiter;
if (c < header.size()) out << header[c];
}
for (size_t i = 0; i < ops.size(); ++i) {
Statistic *stat = dynamic_cast<Statistic *>(ops[i].get());
if (stat != nullptr && !stat->Hidden()) {
for (size_t j = 0; j < stat->Names().size(); ++j, ++c) {
if (c > 0) out << delimiter;
if (c < header.size()) out << header[c];
else out << stat->Names()[j];
}
}
}
out << endl;
}
// Print image statistics
if (delimiter) {
for (size_t i = 0; i < prefix.size(); ++i) {
if (i > 0) out << delimiter;
out << prefix[i];
}
bool first = prefix.empty();
for (size_t i = 0; i < ops.size(); ++i) {
Statistic *stat = dynamic_cast<Statistic *>(ops[i].get());
if (stat != nullptr && !stat->Hidden() && !stat->Names().empty()) {
if (!first) out << delimiter;
else first = false;
stat->PrintValues(out, digits, delimiter);
}
}
// No newline at end of row if printing results to STDOUT which in this
// case is usually assigned to a string in a calling script
if (print_header || ofs.is_open()) out << endl;
} else {
string prefix_string;
for (size_t i = 0; i < prefix.size(); ++i) {
if (i > 0) prefix_string += ' ';
prefix_string += prefix[i];
}
for (size_t i = 0; i < ops.size(); ++i) {
Statistic *stat = dynamic_cast<Statistic *>(ops[i].get());
if (stat != nullptr && !stat->Hidden()) {
stat->Print(out, digits, prefix_string.c_str());
}
}
}
ofs.close();
return 0;
}
| BioMedIA/MIRTK | Applications/src/calculate-element-wise.cc | C++ | apache-2.0 | 50,080 |
<%#
Copyright 2013-2017 the original author or authors.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
import { SpyObject } from './spyobject';
import { Principal } from '../../../../main/webapp/app/shared/auth/principal.service';
import Spy = jasmine.Spy;
export class MockPrincipal extends SpyObject {
identitySpy: Spy;
fakeResponse: any;
constructor() {
super(Principal);
this.fakeResponse = {};
this.identitySpy = this.spy('identity').andReturn(Promise.resolve(this.fakeResponse));
}
setResponse(json: any): void {
this.fakeResponse = json;
}
}
| fjuriolli/scribble | node_modules/generator-jhipster/generators/client/templates/angular/src/test/javascript/spec/helpers/_mock-principal.service.ts | TypeScript | apache-2.0 | 1,203 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.macro;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.template.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.TypeConversionUtil;
import org.jetbrains.annotations.NotNull;
/**
* @author ven
*/
public class IterableComponentTypeMacro implements Macro {
public String getName() {
return "iterableComponentType";
}
public String getDescription() {
return CodeInsightBundle.message("macro.iterable.component.type");
}
public String getDefaultValue() {
return "a";
}
public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) {
if (params.length != 1) return null;
final Result result = params[0].calculateResult(context);
if (result == null) return null;
Project project = context.getProject();
PsiDocumentManager.getInstance(project).commitAllDocuments();
PsiExpression expr = MacroUtil.resultToPsiExpression(result, context);
if (expr == null) return null;
PsiType type = expr.getType();
if (type instanceof PsiArrayType) {
return new PsiTypeResult(((PsiArrayType)type).getComponentType(), project);
}
if (type instanceof PsiClassType) {
PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics();
PsiClass aClass = resolveResult.getElement();
if (aClass != null) {
PsiClass iterableClass = JavaPsiFacade.getInstance(project).findClass("java.lang.Iterable", aClass.getResolveScope());
if (iterableClass != null) {
PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(iterableClass, aClass, resolveResult.getSubstitutor());
if (substitutor != null) {
PsiType parameterType = substitutor.substitute(iterableClass.getTypeParameters()[0]);
if (parameterType instanceof PsiCapturedWildcardType) {
parameterType = ((PsiCapturedWildcardType)parameterType).getWildcard();
}
if (parameterType != null) {
if (parameterType instanceof PsiWildcardType) {
if (((PsiWildcardType)parameterType).isExtends()) {
return new PsiTypeResult(((PsiWildcardType)parameterType).getBound(), project);
}
else return null;
}
return new PsiTypeResult(parameterType, project);
}
}
}
}
}
return null;
}
public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) {
return calculateResult(params, context);
}
public LookupElement[] calculateLookupItems(@NotNull Expression[] params, ExpressionContext context) {
return LookupElement.EMPTY_ARRAY;
}
}
| joewalnes/idea-community | java/java-impl/src/com/intellij/codeInsight/template/macro/IterableComponentTypeMacro.java | Java | apache-2.0 | 3,485 |
const ng = require('angular');
ng.module('porybox.static', ['ngRoute']).config(['$routeProvider', $routeProvider => {
[
'about',
'donate',
'extracting-pokemon-files',
'faq',
'how-to-pk6-1-bvs',
'how-to-pk6-2-homebrew',
'how-to-pk6-3-4-save-files',
'how-to-pk6-6-decrypted-powersaves',
'how-to-pk7-1-bvs',
'how-to-pk7-2-homebrew',
'how-to-pk7-3-digital-save-files',
'how-to-pk7-4-tea',
'markdown',
'privacy-policy',
'tos'
].forEach(pageName => {
$routeProvider.when(`/${pageName}`, {templateUrl: `/static/${pageName}.html`});
});
$routeProvider.when('/extracting-pk6-files', {redirectTo: '/extracting-pokemon-files'});
}]);
| porybox/porybox | client/static/static.module.js | JavaScript | apache-2.0 | 696 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.AppTopics;
import com.intellij.CommonBundle;
import com.intellij.codeStyle.CodeStyleFacade;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.application.TransactionGuardImpl;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.PrioritizedDocumentListener;
import com.intellij.openapi.editor.impl.EditorFactoryImpl;
import com.intellij.openapi.editor.impl.TrailingSpacesStripper;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.impl.text.TextEditorImpl;
import com.intellij.openapi.fileTypes.BinaryFileTypeDecompilers;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.UnknownFileType;
import com.intellij.openapi.project.*;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem;
import com.intellij.pom.core.impl.PomModelImpl;
import com.intellij.psi.ExternalChangeAction;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SingleRootFileViewProvider;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.ui.UIBundle;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.nio.charset.Charset;
import java.util.*;
import java.util.List;
public class FileDocumentManagerImpl extends FileDocumentManager implements VirtualFileListener, VetoableProjectManagerListener, SafeWriteRequestor {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl");
public static final Key<Document> HARD_REF_TO_DOCUMENT_KEY = Key.create("HARD_REF_TO_DOCUMENT_KEY");
private static final Key<String> LINE_SEPARATOR_KEY = Key.create("LINE_SEPARATOR_KEY");
private static final Key<VirtualFile> FILE_KEY = Key.create("FILE_KEY");
private static final Key<Boolean> MUST_RECOMPUTE_FILE_TYPE = Key.create("Must recompute file type");
private final Set<Document> myUnsavedDocuments = ContainerUtil.newConcurrentSet();
private final MessageBus myBus;
private static final Object lock = new Object();
private final FileDocumentManagerListener myMultiCaster;
private final TrailingSpacesStripper myTrailingSpacesStripper = new TrailingSpacesStripper();
private boolean myOnClose;
private volatile MemoryDiskConflictResolver myConflictResolver = new MemoryDiskConflictResolver();
private final PrioritizedDocumentListener myPhysicalDocumentChangeTracker = new PrioritizedDocumentListener() {
@Override
public int getPriority() {
return Integer.MIN_VALUE;
}
@Override
public void documentChanged(DocumentEvent e) {
final Document document = e.getDocument();
if (!ApplicationManager.getApplication().hasWriteAction(ExternalChangeAction.ExternalDocumentChange.class)) {
myUnsavedDocuments.add(document);
}
final Runnable currentCommand = CommandProcessor.getInstance().getCurrentCommand();
Project project = currentCommand == null ? null : CommandProcessor.getInstance().getCurrentCommandProject();
if (project == null)
project = ProjectUtil.guessProjectForFile(getFile(document));
String lineSeparator = CodeStyleFacade.getInstance(project).getLineSeparator();
document.putUserData(LINE_SEPARATOR_KEY, lineSeparator);
// avoid documents piling up during batch processing
if (areTooManyDocumentsInTheQueue(myUnsavedDocuments)) {
saveAllDocumentsLater();
}
}
};
public FileDocumentManagerImpl(@NotNull VirtualFileManager virtualFileManager, @NotNull ProjectManager projectManager) {
virtualFileManager.addVirtualFileListener(this);
projectManager.addProjectManagerListener(this);
myBus = ApplicationManager.getApplication().getMessageBus();
myBus.connect().subscribe(ProjectManager.TOPIC, this);
InvocationHandler handler = (proxy, method, args) -> {
multiCast(method, args);
return null;
};
final ClassLoader loader = FileDocumentManagerListener.class.getClassLoader();
myMultiCaster = (FileDocumentManagerListener)Proxy.newProxyInstance(loader, new Class[]{FileDocumentManagerListener.class}, handler);
}
private static void unwrapAndRethrow(Exception e) {
Throwable unwrapped = e;
if (e instanceof InvocationTargetException) {
unwrapped = e.getCause() == null ? e : e.getCause();
}
if (unwrapped instanceof Error) throw (Error)unwrapped;
if (unwrapped instanceof RuntimeException) throw (RuntimeException)unwrapped;
LOG.error(unwrapped);
}
@SuppressWarnings("OverlyBroadCatchBlock")
private void multiCast(@NotNull Method method, Object[] args) {
try {
method.invoke(myBus.syncPublisher(AppTopics.FILE_DOCUMENT_SYNC), args);
}
catch (ClassCastException e) {
LOG.error("Arguments: "+ Arrays.toString(args), e);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
// Allows pre-save document modification
for (FileDocumentManagerListener listener : getListeners()) {
try {
method.invoke(listener, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
// stripping trailing spaces
try {
method.invoke(myTrailingSpacesStripper, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
@Override
@Nullable
public Document getDocument(@NotNull final VirtualFile file) {
ApplicationManager.getApplication().assertReadAccessAllowed();
DocumentEx document = (DocumentEx)getCachedDocument(file);
if (document == null) {
if (!file.isValid() || file.isDirectory() || isBinaryWithoutDecompiler(file)) return null;
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
if (file.getFileType().isBinary() && tooLarge) return null;
final CharSequence text = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
synchronized (lock) {
document = (DocumentEx)getCachedDocument(file);
if (document != null) return document; // Double checking
document = (DocumentEx)createDocument(text, file);
document.setModificationStamp(file.getModificationStamp());
final FileType fileType = file.getFileType();
document.setReadOnly(tooLarge || !file.isWritable() || fileType.isBinary());
if (!(file instanceof LightVirtualFile || file.getFileSystem() instanceof NonPhysicalFileSystem)) {
document.addDocumentListener(myPhysicalDocumentChangeTracker);
}
if (file instanceof LightVirtualFile) {
registerDocument(document, file);
}
else {
document.putUserData(FILE_KEY, file);
cacheDocument(file, document);
}
}
myMultiCaster.fileContentLoaded(file, document);
}
return document;
}
public static boolean areTooManyDocumentsInTheQueue(Collection<Document> documents) {
if (documents.size() > 100) return true;
int totalSize = 0;
for (Document document : documents) {
totalSize += document.getTextLength();
if (totalSize > FileUtilRt.LARGE_FOR_CONTENT_LOADING) return true;
}
return false;
}
private static Document createDocument(final CharSequence text, VirtualFile file) {
boolean acceptSlashR = file instanceof LightVirtualFile && StringUtil.indexOf(text, '\r') >= 0;
boolean freeThreaded = Boolean.TRUE.equals(file.getUserData(SingleRootFileViewProvider.FREE_THREADED));
return ((EditorFactoryImpl)EditorFactory.getInstance()).createDocument(text, acceptSlashR, freeThreaded);
}
@Override
@Nullable
public Document getCachedDocument(@NotNull VirtualFile file) {
Document hard = file.getUserData(HARD_REF_TO_DOCUMENT_KEY);
return hard != null ? hard : getDocumentFromCache(file);
}
public static void registerDocument(@NotNull final Document document, @NotNull VirtualFile virtualFile) {
synchronized (lock) {
document.putUserData(FILE_KEY, virtualFile);
virtualFile.putUserData(HARD_REF_TO_DOCUMENT_KEY, document);
}
}
@Override
@Nullable
public VirtualFile getFile(@NotNull Document document) {
return document.getUserData(FILE_KEY);
}
@TestOnly
public void dropAllUnsavedDocuments() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
throw new RuntimeException("This method is only for test mode!");
}
ApplicationManager.getApplication().assertWriteAccessAllowed();
if (!myUnsavedDocuments.isEmpty()) {
myUnsavedDocuments.clear();
fireUnsavedDocumentsDropped();
}
}
private void saveAllDocumentsLater() {
// later because some document might have been blocked by PSI right now
ApplicationManager.getApplication().invokeLater(() -> {
if (ApplicationManager.getApplication().isDisposed()) {
return;
}
final Document[] unsavedDocuments = getUnsavedDocuments();
for (Document document : unsavedDocuments) {
VirtualFile file = getFile(document);
if (file == null) continue;
Project project = ProjectUtil.guessProjectForFile(file);
if (project == null) continue;
if (PsiDocumentManager.getInstance(project).isDocumentBlockedByPsi(document)) continue;
saveDocument(document);
}
});
}
@Override
public void saveAllDocuments() {
saveAllDocuments(true);
}
/**
* @param isExplicit caused by user directly (Save action) or indirectly (e.g. Compile)
*/
public void saveAllDocuments(boolean isExplicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
myMultiCaster.beforeAllDocumentsSaving();
if (myUnsavedDocuments.isEmpty()) return;
final Map<Document, IOException> failedToSave = new HashMap<>();
final Set<Document> vetoed = new HashSet<>();
while (true) {
int count = 0;
for (Document document : myUnsavedDocuments) {
if (failedToSave.containsKey(document)) continue;
if (vetoed.contains(document)) continue;
try {
doSaveDocument(document, isExplicit);
}
catch (IOException e) {
//noinspection ThrowableResultOfMethodCallIgnored
failedToSave.put(document, e);
}
catch (SaveVetoException e) {
vetoed.add(document);
}
count++;
}
if (count == 0) break;
}
if (!failedToSave.isEmpty()) {
handleErrorsOnSave(failedToSave);
}
}
@Override
public void saveDocument(@NotNull final Document document) {
saveDocument(document, true);
}
public void saveDocument(@NotNull final Document document, final boolean explicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
if (!myUnsavedDocuments.contains(document)) return;
try {
doSaveDocument(document, explicit);
}
catch (IOException e) {
handleErrorsOnSave(Collections.singletonMap(document, e));
}
catch (SaveVetoException ignored) {
}
}
@Override
public void saveDocumentAsIs(@NotNull Document document) {
VirtualFile file = getFile(document);
boolean spaceStrippingEnabled = true;
if (file != null) {
spaceStrippingEnabled = TrailingSpacesStripper.isEnabled(file);
TrailingSpacesStripper.setEnabled(file, false);
}
try {
saveDocument(document);
}
finally {
if (file != null) {
TrailingSpacesStripper.setEnabled(file, spaceStrippingEnabled);
}
}
}
private static class SaveVetoException extends Exception {}
private void doSaveDocument(@NotNull final Document document, boolean isExplicit) throws IOException, SaveVetoException {
VirtualFile file = getFile(document);
if (file == null || file instanceof LightVirtualFile || file.isValid() && !isFileModified(file)) {
removeFromUnsaved(document);
return;
}
if (file.isValid() && needsRefresh(file)) {
file.refresh(false, false);
if (!myUnsavedDocuments.contains(document)) return;
}
if (!maySaveDocument(file, document, isExplicit)) {
throw new SaveVetoException();
}
WriteAction.run(() -> doSaveDocumentInWriteAction(document, file));
}
private boolean maySaveDocument(VirtualFile file, Document document, boolean isExplicit) {
return !myConflictResolver.hasConflict(file) &&
Arrays.stream(Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)).allMatch(vetoer -> vetoer.maySaveDocument(document, isExplicit));
}
private void doSaveDocumentInWriteAction(@NotNull final Document document, @NotNull final VirtualFile file) throws IOException {
if (!file.isValid()) {
removeFromUnsaved(document);
return;
}
if (!file.equals(getFile(document))) {
registerDocument(document, file);
}
if (!isSaveNeeded(document, file)) {
if (document instanceof DocumentEx) {
((DocumentEx)document).setModificationStamp(file.getModificationStamp());
}
removeFromUnsaved(document);
updateModifiedProperty(file);
return;
}
PomModelImpl.guardPsiModificationsIn(() -> {
myMultiCaster.beforeDocumentSaving(document);
LOG.assertTrue(file.isValid());
String text = document.getText();
String lineSeparator = getLineSeparator(document, file);
if (!lineSeparator.equals("\n")) {
text = StringUtil.convertLineSeparators(text, lineSeparator);
}
Project project = ProjectLocator.getInstance().guessProjectForFile(file);
LoadTextUtil.write(project, file, this, text, document.getModificationStamp());
myUnsavedDocuments.remove(document);
LOG.assertTrue(!myUnsavedDocuments.contains(document));
myTrailingSpacesStripper.clearLineModificationFlags(document);
});
}
private static void updateModifiedProperty(@NotNull VirtualFile file) {
for (Project project : ProjectManager.getInstance().getOpenProjects()) {
FileEditorManager fileEditorManager = FileEditorManager.getInstance(project);
for (FileEditor editor : fileEditorManager.getAllEditors(file)) {
if (editor instanceof TextEditorImpl) {
((TextEditorImpl)editor).updateModifiedProperty();
}
}
}
}
private void removeFromUnsaved(@NotNull Document document) {
myUnsavedDocuments.remove(document);
fireUnsavedDocumentsDropped();
LOG.assertTrue(!myUnsavedDocuments.contains(document));
}
private static boolean isSaveNeeded(@NotNull Document document, @NotNull VirtualFile file) throws IOException {
if (file.getFileType().isBinary() || document.getTextLength() > 1000 * 1000) { // don't compare if the file is too big
return true;
}
byte[] bytes = file.contentsToByteArray();
CharSequence loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, file, false, false);
return !Comparing.equal(document.getCharsSequence(), loaded);
}
private static boolean needsRefresh(final VirtualFile file) {
final VirtualFileSystem fs = file.getFileSystem();
return fs instanceof NewVirtualFileSystem && file.getTimeStamp() != ((NewVirtualFileSystem)fs).getTimeStamp(file);
}
@NotNull
public static String getLineSeparator(@NotNull Document document, @NotNull VirtualFile file) {
String lineSeparator = LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
lineSeparator = document.getUserData(LINE_SEPARATOR_KEY);
assert lineSeparator != null : document;
}
return lineSeparator;
}
@Override
@NotNull
public String getLineSeparator(@Nullable VirtualFile file, @Nullable Project project) {
String lineSeparator = file == null ? null : LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
CodeStyleFacade settingsManager = project == null
? CodeStyleFacade.getInstance()
: CodeStyleFacade.getInstance(project);
lineSeparator = settingsManager.getLineSeparator();
}
return lineSeparator;
}
@Override
public boolean requestWriting(@NotNull Document document, Project project) {
final VirtualFile file = getInstance().getFile(document);
if (project != null && file != null && file.isValid()) {
return !file.getFileType().isBinary() && ReadonlyStatusHandler.ensureFilesWritable(project, file);
}
if (document.isWritable()) {
return true;
}
document.fireReadOnlyModificationAttempt();
return false;
}
@Override
public void reloadFiles(@NotNull final VirtualFile... files) {
for (VirtualFile file : files) {
if (file.exists()) {
final Document doc = getCachedDocument(file);
if (doc != null) {
reloadFromDisk(doc);
}
}
}
}
@Override
@NotNull
public Document[] getUnsavedDocuments() {
if (myUnsavedDocuments.isEmpty()) {
return Document.EMPTY_ARRAY;
}
List<Document> list = new ArrayList<>(myUnsavedDocuments);
return list.toArray(new Document[list.size()]);
}
@Override
public boolean isDocumentUnsaved(@NotNull Document document) {
return myUnsavedDocuments.contains(document);
}
@Override
public boolean isFileModified(@NotNull VirtualFile file) {
final Document doc = getCachedDocument(file);
return doc != null && isDocumentUnsaved(doc) && doc.getModificationStamp() != file.getModificationStamp();
}
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
final VirtualFile file = event.getFile();
if (VirtualFile.PROP_WRITABLE.equals(event.getPropertyName())) {
final Document document = getCachedDocument(file);
if (document != null) {
ApplicationManager.getApplication().runWriteAction((ExternalChangeAction)() -> document.setReadOnly(!file.isWritable()));
}
}
else if (VirtualFile.PROP_NAME.equals(event.getPropertyName())) {
Document document = getCachedDocument(file);
if (document != null) {
// a file is linked to a document - chances are it is an "unknown text file" now
if (isBinaryWithoutDecompiler(file)) {
unbindFileFromDocument(file, document);
}
}
}
}
private void unbindFileFromDocument(@NotNull VirtualFile file, @NotNull Document document) {
removeDocumentFromCache(file);
file.putUserData(HARD_REF_TO_DOCUMENT_KEY, null);
document.putUserData(FILE_KEY, null);
}
private static boolean isBinaryWithDecompiler(@NotNull VirtualFile file) {
final FileType ft = file.getFileType();
return ft.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(ft) != null;
}
private static boolean isBinaryWithoutDecompiler(@NotNull VirtualFile file) {
final FileType fileType = file.getFileType();
return fileType.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(fileType) == null;
}
@Override
public void contentsChanged(@NotNull VirtualFileEvent event) {
if (event.isFromSave()) return;
final VirtualFile file = event.getFile();
final Document document = getCachedDocument(file);
if (document == null) {
myMultiCaster.fileWithNoDocumentChanged(file);
return;
}
if (isBinaryWithDecompiler(file)) {
myMultiCaster.fileWithNoDocumentChanged(file); // This will generate PSI event at FileManagerImpl
}
if (document.getModificationStamp() == event.getOldModificationStamp() || !isDocumentUnsaved(document)) {
reloadFromDisk(document);
}
}
@Override
public void reloadFromDisk(@NotNull final Document document) {
ApplicationManager.getApplication().assertIsDispatchThread();
final VirtualFile file = getFile(document);
assert file != null;
if (!fireBeforeFileContentReload(file, document)) {
return;
}
final Project project = ProjectLocator.getInstance().guessProjectForFile(file);
boolean[] isReloadable = {isReloadable(file, document, project)};
if (isReloadable[0]) {
CommandProcessor.getInstance().executeCommand(project, () -> ApplicationManager.getApplication().runWriteAction(
new ExternalChangeAction.ExternalDocumentChange(document, project) {
@Override
public void run() {
if (!isBinaryWithoutDecompiler(file)) {
LoadTextUtil.setCharsetWasDetectedFromBytes(file, null);
file.setBOM(null); // reset BOM in case we had one and the external change stripped it away
file.setCharset(null, null, false);
boolean wasWritable = document.isWritable();
document.setReadOnly(false);
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
CharSequence reloaded = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
isReloadable[0] = isReloadable(file, document, project);
if (isReloadable[0]) {
DocumentEx documentEx = (DocumentEx)document;
documentEx.replaceText(reloaded, file.getModificationStamp());
}
document.setReadOnly(!wasWritable);
}
}
}
), UIBundle.message("file.cache.conflict.action"), null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
}
if (isReloadable[0]) {
myMultiCaster.fileContentReloaded(file, document);
}
else {
unbindFileFromDocument(file, document);
myMultiCaster.fileWithNoDocumentChanged(file);
}
myUnsavedDocuments.remove(document);
}
private static boolean isReloadable(@NotNull VirtualFile file, @NotNull Document document, @Nullable Project project) {
PsiFile cachedPsiFile = project == null ? null : PsiDocumentManager.getInstance(project).getCachedPsiFile(document);
return !(FileUtilRt.isTooLarge(file.getLength()) && file.getFileType().isBinary()) &&
(cachedPsiFile == null || cachedPsiFile instanceof PsiFileImpl || isBinaryWithDecompiler(file));
}
@TestOnly
void setAskReloadFromDisk(@NotNull Disposable disposable, @NotNull MemoryDiskConflictResolver newProcessor) {
final MemoryDiskConflictResolver old = myConflictResolver;
myConflictResolver = newProcessor;
Disposer.register(disposable, () -> myConflictResolver = old);
}
@Override
public void fileDeleted(@NotNull VirtualFileEvent event) {
Document doc = getCachedDocument(event.getFile());
if (doc != null) {
myTrailingSpacesStripper.documentDeleted(doc);
}
}
@Override
public void beforeContentsChange(@NotNull VirtualFileEvent event) {
VirtualFile virtualFile = event.getFile();
// check file type in second order to avoid content detection running
if (virtualFile.getLength() == 0 && virtualFile.getFileType() == UnknownFileType.INSTANCE) {
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, Boolean.TRUE);
}
myConflictResolver.beforeContentChange(event);
}
public static boolean recomputeFileTypeIfNecessary(@NotNull VirtualFile virtualFile) {
if (virtualFile.getUserData(MUST_RECOMPUTE_FILE_TYPE) != null) {
virtualFile.getFileType();
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, null);
return true;
}
return false;
}
@Override
public boolean canClose(@NotNull Project project) {
if (!myUnsavedDocuments.isEmpty()) {
myOnClose = true;
try {
saveAllDocuments();
}
finally {
myOnClose = false;
}
}
return myUnsavedDocuments.isEmpty();
}
private void fireUnsavedDocumentsDropped() {
myMultiCaster.unsavedDocumentsDropped();
}
private boolean fireBeforeFileContentReload(final VirtualFile file, @NotNull Document document) {
for (FileDocumentSynchronizationVetoer vetoer : Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)) {
try {
if (!vetoer.mayReloadFileContent(file, document)) {
return false;
}
}
catch (Exception e) {
LOG.error(e);
}
}
myMultiCaster.beforeFileContentReload(file, document);
return true;
}
@NotNull
private static FileDocumentManagerListener[] getListeners() {
return FileDocumentManagerListener.EP_NAME.getExtensions();
}
private static int getPreviewCharCount(@NotNull VirtualFile file) {
Charset charset = EncodingManager.getInstance().getEncoding(file, false);
float bytesPerChar = charset == null ? 2 : charset.newEncoder().averageBytesPerChar();
return (int)(FileUtilRt.LARGE_FILE_PREVIEW_SIZE / bytesPerChar);
}
private void handleErrorsOnSave(@NotNull Map<Document, IOException> failures) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
IOException ioException = ContainerUtil.getFirstItem(failures.values());
if (ioException != null) {
throw new RuntimeException(ioException);
}
return;
}
for (IOException exception : failures.values()) {
LOG.warn(exception);
}
final String text = StringUtil.join(failures.values(), Throwable::getMessage, "\n");
final DialogWrapper dialog = new DialogWrapper(null) {
{
init();
setTitle(UIBundle.message("cannot.save.files.dialog.title"));
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myOKAction.putValue(Action.NAME, UIBundle
.message(myOnClose ? "cannot.save.files.dialog.ignore.changes" : "cannot.save.files.dialog.revert.changes"));
myOKAction.putValue(DEFAULT_ACTION, null);
if (!myOnClose) {
myCancelAction.putValue(Action.NAME, CommonBundle.getCloseButtonText());
}
}
@Override
protected JComponent createCenterPanel() {
final JPanel panel = new JPanel(new BorderLayout(0, 5));
panel.add(new JLabel(UIBundle.message("cannot.save.files.dialog.message")), BorderLayout.NORTH);
final JTextPane area = new JTextPane();
area.setText(text);
area.setEditable(false);
area.setMinimumSize(new Dimension(area.getMinimumSize().width, 50));
panel.add(new JBScrollPane(area, ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER),
BorderLayout.CENTER);
return panel;
}
};
if (dialog.showAndGet()) {
for (Document document : failures.keySet()) {
reloadFromDisk(document);
}
}
}
private final Map<VirtualFile, Document> myDocumentCache = ContainerUtil.createConcurrentWeakValueMap();
// used in Upsource
protected void cacheDocument(@NotNull VirtualFile file, @NotNull Document document) {
myDocumentCache.put(file, document);
}
// used in Upsource
protected void removeDocumentFromCache(@NotNull VirtualFile file) {
myDocumentCache.remove(file);
}
// used in Upsource
protected Document getDocumentFromCache(@NotNull VirtualFile file) {
return myDocumentCache.get(file);
}
}
| semonte/intellij-community | platform/platform-impl/src/com/intellij/openapi/fileEditor/impl/FileDocumentManagerImpl.java | Java | apache-2.0 | 29,248 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using System;
using System.Net;
using System.Net.Sockets;
using System.Threading;
using System.Threading.Tasks;
using Org.Apache.REEF.Utilities.Diagnostics;
using Org.Apache.REEF.Utilities.Logging;
using Org.Apache.REEF.Wake.StreamingCodec;
using Org.Apache.REEF.Wake.Util;
namespace Org.Apache.REEF.Wake.Remote.Impl
{
/// <summary>
/// Server to handle incoming remote messages.
/// </summary>
/// <typeparam name="T">Generic Type of message. It is constrained to have implemented IWritable and IType interface</typeparam>
internal sealed class StreamingTransportServer<T> : IDisposable
{
private static readonly Logger LOGGER = Logger.GetLogger(typeof(TransportServer<>));
private TcpListener _listener;
private readonly CancellationTokenSource _cancellationSource;
private readonly IObserver<TransportEvent<T>> _remoteObserver;
private readonly ITcpPortProvider _tcpPortProvider;
private readonly IStreamingCodec<T> _streamingCodec;
private bool _disposed;
private Task _serverTask;
/// <summary>
/// Constructs a TransportServer to listen for remote events.
/// Listens on the specified remote endpoint. When it receives a remote
/// event, it will invoke the specified remote handler.
/// </summary>
/// <param name="address">Endpoint address to listen on</param>
/// <param name="remoteHandler">The handler to invoke when receiving incoming
/// remote messages</param>
/// <param name="tcpPortProvider">Find port numbers if listenport is 0</param>
/// <param name="streamingCodec">Streaming codec</param>
internal StreamingTransportServer(
IPAddress address,
IObserver<TransportEvent<T>> remoteHandler,
ITcpPortProvider tcpPortProvider,
IStreamingCodec<T> streamingCodec)
{
_listener = new TcpListener(address, 0);
_remoteObserver = remoteHandler;
_tcpPortProvider = tcpPortProvider;
_cancellationSource = new CancellationTokenSource();
_cancellationSource.Token.ThrowIfCancellationRequested();
_streamingCodec = streamingCodec;
_disposed = false;
}
/// <summary>
/// Returns the listening endpoint for the TransportServer
/// </summary>
public IPEndPoint LocalEndpoint
{
get { return _listener.LocalEndpoint as IPEndPoint; }
}
/// <summary>
/// Starts listening for incoming remote messages.
/// </summary>
public void Run()
{
FindAPortAndStartListener();
_serverTask = Task.Run(() => StartServer());
}
private void FindAPortAndStartListener()
{
var foundAPort = false;
var exception = new SocketException((int)SocketError.AddressAlreadyInUse);
for (var enumerator = _tcpPortProvider.GetEnumerator();
!foundAPort && enumerator.MoveNext();)
{
_listener = new TcpListener(LocalEndpoint.Address, enumerator.Current);
try
{
_listener.Start();
foundAPort = true;
}
catch (SocketException e)
{
exception = e;
}
}
if (!foundAPort)
{
Exceptions.Throw(exception, "Could not find a port to listen on", LOGGER);
}
LOGGER.Log(Level.Info,
String.Format("Listening on {0}", _listener.LocalEndpoint.ToString()));
}
/// <summary>
/// Close the TransportServer and all open connections
/// </summary>
public void Dispose()
{
if (!_disposed)
{
_cancellationSource.Cancel();
try
{
_listener.Stop();
}
catch (SocketException)
{
LOGGER.Log(Level.Info, "Disposing of transport server before listener is created.");
}
if (_serverTask != null)
{
_serverTask.Wait();
// Give the TransportServer Task 500ms to shut down, ignore any timeout errors
try
{
CancellationTokenSource serverDisposeTimeout = new CancellationTokenSource(500);
_serverTask.Wait(serverDisposeTimeout.Token);
}
catch (Exception e)
{
Console.Error.WriteLine(e);
}
finally
{
_serverTask.Dispose();
}
}
}
_disposed = true;
}
/// <summary>
/// Helper method to start TransportServer. This will
/// be run in an asynchronous Task.
/// </summary>
/// <returns>An asynchronous Task for the running server.</returns>
private async Task StartServer()
{
try
{
while (!_cancellationSource.Token.IsCancellationRequested)
{
TcpClient client = await _listener.AcceptTcpClientAsync().ConfigureAwait(false);
ProcessClient(client).Forget();
}
}
catch (InvalidOperationException)
{
LOGGER.Log(Level.Info, "TransportServer has been closed.");
}
catch (OperationCanceledException)
{
LOGGER.Log(Level.Info, "TransportServer has been closed.");
}
}
/// <summary>
/// Receives event from connected TcpClient and invokes handler on the event.
/// </summary>
/// <param name="client">The connected client</param>
private async Task ProcessClient(TcpClient client)
{
// Keep reading messages from client until they disconnect or timeout
CancellationToken token = _cancellationSource.Token;
using (ILink<T> link = new StreamingLink<T>(client, _streamingCodec))
{
while (!token.IsCancellationRequested)
{
T message = await link.ReadAsync(token);
if (message == null)
{
break;
}
TransportEvent<T> transportEvent = new TransportEvent<T>(message, link);
_remoteObserver.OnNext(transportEvent);
}
LOGGER.Log(Level.Error,
"ProcessClient close the Link. IsCancellationRequested: " + token.IsCancellationRequested);
}
}
}
} | yunseong/incubator-reef | lang/cs/Org.Apache.REEF.Wake/Remote/Impl/StreamingTransportServer.cs | C# | apache-2.0 | 7,834 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""mixup: Beyond Empirical Risk Minimization.
Adaption to SSL of MixUp: https://arxiv.org/abs/1710.09412
"""
import functools
import os
import tensorflow as tf
from absl import app
from absl import flags
from libml import data, utils, models
from libml.utils import EasyDict
FLAGS = flags.FLAGS
class Mixup(models.MultiModel):
def augment(self, x, l, beta, **kwargs):
del kwargs
mix = tf.distributions.Beta(beta, beta).sample([tf.shape(x)[0], 1, 1, 1])
mix = tf.maximum(mix, 1 - mix)
xmix = x * mix + x[::-1] * (1 - mix)
lmix = l * mix[:, :, 0, 0] + l[::-1] * (1 - mix[:, :, 0, 0])
return xmix, lmix
def model(self, batch, lr, wd, ema, **kwargs):
hwc = [self.dataset.height, self.dataset.width, self.dataset.colors]
xt_in = tf.placeholder(tf.float32, [batch] + hwc, 'xt') # For training
x_in = tf.placeholder(tf.float32, [None] + hwc, 'x')
y_in = tf.placeholder(tf.float32, [batch] + hwc, 'y')
l_in = tf.placeholder(tf.int32, [batch], 'labels')
wd *= lr
classifier = lambda x, **kw: self.classifier(x, **kw, **kwargs).logits
def get_logits(x):
logits = classifier(x, training=True)
return logits
x, labels_x = self.augment(xt_in, tf.one_hot(l_in, self.nclass), **kwargs)
logits_x = get_logits(x)
post_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
y, labels_y = self.augment(y_in, tf.nn.softmax(get_logits(y_in)), **kwargs)
labels_y = tf.stop_gradient(labels_y)
logits_y = get_logits(y)
loss_xe = tf.nn.softmax_cross_entropy_with_logits_v2(labels=labels_x, logits=logits_x)
loss_xe = tf.reduce_mean(loss_xe)
loss_xeu = tf.nn.softmax_cross_entropy_with_logits_v2(labels=labels_y, logits=logits_y)
loss_xeu = tf.reduce_mean(loss_xeu)
tf.summary.scalar('losses/xe', loss_xe)
tf.summary.scalar('losses/xeu', loss_xeu)
ema = tf.train.ExponentialMovingAverage(decay=ema)
ema_op = ema.apply(utils.model_vars())
ema_getter = functools.partial(utils.getter_ema, ema)
post_ops.append(ema_op)
post_ops.extend([tf.assign(v, v * (1 - wd)) for v in utils.model_vars('classify') if 'kernel' in v.name])
train_op = tf.train.AdamOptimizer(lr).minimize(loss_xe + loss_xeu, colocate_gradients_with_ops=True)
with tf.control_dependencies([train_op]):
train_op = tf.group(*post_ops)
return EasyDict(
xt=xt_in, x=x_in, y=y_in, label=l_in, train_op=train_op,
classify_raw=tf.nn.softmax(classifier(x_in, training=False)), # No EMA, for debugging.
classify_op=tf.nn.softmax(classifier(x_in, getter=ema_getter, training=False)))
def main(argv):
utils.setup_main()
del argv # Unused.
dataset = data.DATASETS()[FLAGS.dataset]()
log_width = utils.ilog2(dataset.width)
model = Mixup(
os.path.join(FLAGS.train_dir, dataset.name),
dataset,
lr=FLAGS.lr,
wd=FLAGS.wd,
arch=FLAGS.arch,
batch=FLAGS.batch,
nclass=dataset.nclass,
ema=FLAGS.ema,
beta=FLAGS.beta,
scales=FLAGS.scales or (log_width - 2),
filters=FLAGS.filters,
repeat=FLAGS.repeat)
model.train(FLAGS.train_kimg << 10, FLAGS.report_kimg << 10)
if __name__ == '__main__':
utils.setup_tf()
flags.DEFINE_float('wd', 0.02, 'Weight decay.')
flags.DEFINE_float('ema', 0.999, 'Exponential moving average of params.')
flags.DEFINE_float('beta', 0.5, 'Mixup beta distribution.')
flags.DEFINE_integer('scales', 0, 'Number of 2x2 downscalings in the classifier.')
flags.DEFINE_integer('filters', 32, 'Filter size of convolutions.')
flags.DEFINE_integer('repeat', 4, 'Number of residual layers per stage.')
FLAGS.set_default('dataset', 'cifar10.3@250-5000')
FLAGS.set_default('batch', 64)
FLAGS.set_default('lr', 0.002)
FLAGS.set_default('train_kimg', 1 << 16)
app.run(main)
| google-research/remixmatch | mixup.py | Python | apache-2.0 | 4,608 |
package connect
import (
"strings"
"testing"
)
func TestCatalogCommand_noTabs(t *testing.T) {
t.Parallel()
if strings.ContainsRune(New().Help(), '\t') {
t.Fatal("help has tabs")
}
}
| mhausenblas/burry.sh | vendor/github.com/hashicorp/consul/command/connect/connect_test.go | GO | apache-2.0 | 191 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef ORC_STRIPE_STREAM_HH
#define ORC_STRIPE_STREAM_HH
#include "orc/Int128.hh"
#include "orc/OrcFile.hh"
#include "orc/Reader.hh"
#include "Timezone.hh"
#include "TypeImpl.hh"
namespace orc {
class RowReaderImpl;
/**
* StripeStream Implementation
*/
class StripeStreamsImpl: public StripeStreams {
private:
const RowReaderImpl& reader;
const proto::StripeInformation& stripeInfo;
const proto::StripeFooter& footer;
const uint64_t stripeIndex;
const uint64_t stripeStart;
InputStream& input;
const Timezone& writerTimezone;
const Timezone& readerTimezone;
public:
StripeStreamsImpl(const RowReaderImpl& reader, uint64_t index,
const proto::StripeInformation& stripeInfo,
const proto::StripeFooter& footer,
uint64_t stripeStart,
InputStream& input,
const Timezone& writerTimezone,
const Timezone& readerTimezone);
virtual ~StripeStreamsImpl() override;
virtual const std::vector<bool> getSelectedColumns() const override;
virtual proto::ColumnEncoding getEncoding(uint64_t columnId
) const override;
virtual std::unique_ptr<SeekableInputStream>
getStream(uint64_t columnId,
proto::Stream_Kind kind,
bool shouldStream) const override;
MemoryPool& getMemoryPool() const override;
const Timezone& getWriterTimezone() const override;
const Timezone& getReaderTimezone() const override;
std::ostream* getErrorStream() const override;
bool getThrowOnHive11DecimalOverflow() const override;
int32_t getForcedScaleOnHive11Decimal() const override;
};
/**
* StreamInformation Implementation
*/
class StreamInformationImpl: public StreamInformation {
private:
StreamKind kind;
uint64_t column;
uint64_t offset;
uint64_t length;
public:
StreamInformationImpl(uint64_t _offset,
const proto::Stream& stream
): kind(static_cast<StreamKind>(stream.kind())),
column(stream.column()),
offset(_offset),
length(stream.length()) {
// PASS
}
~StreamInformationImpl() override;
StreamKind getKind() const override {
return kind;
}
uint64_t getColumnId() const override {
return column;
}
uint64_t getOffset() const override {
return offset;
}
uint64_t getLength() const override {
return length;
}
};
/**
* StripeInformation Implementation
*/
class StripeInformationImpl : public StripeInformation {
uint64_t offset;
uint64_t indexLength;
uint64_t dataLength;
uint64_t footerLength;
uint64_t numRows;
InputStream* stream;
MemoryPool& memory;
CompressionKind compression;
uint64_t blockSize;
mutable std::unique_ptr<proto::StripeFooter> stripeFooter;
void ensureStripeFooterLoaded() const;
public:
StripeInformationImpl(uint64_t _offset,
uint64_t _indexLength,
uint64_t _dataLength,
uint64_t _footerLength,
uint64_t _numRows,
InputStream* _stream,
MemoryPool& _memory,
CompressionKind _compression,
uint64_t _blockSize
) : offset(_offset),
indexLength(_indexLength),
dataLength(_dataLength),
footerLength(_footerLength),
numRows(_numRows),
stream(_stream),
memory(_memory),
compression(_compression),
blockSize(_blockSize) {
// PASS
}
virtual ~StripeInformationImpl() override {
// PASS
}
uint64_t getOffset() const override {
return offset;
}
uint64_t getLength() const override {
return indexLength + dataLength + footerLength;
}
uint64_t getIndexLength() const override {
return indexLength;
}
uint64_t getDataLength()const override {
return dataLength;
}
uint64_t getFooterLength() const override {
return footerLength;
}
uint64_t getNumberOfRows() const override {
return numRows;
}
uint64_t getNumberOfStreams() const override {
ensureStripeFooterLoaded();
return static_cast<uint64_t>(stripeFooter->streams_size());
}
std::unique_ptr<StreamInformation> getStreamInformation(uint64_t streamId
) const override;
ColumnEncodingKind getColumnEncoding(uint64_t colId) const override {
ensureStripeFooterLoaded();
return static_cast<ColumnEncodingKind>(stripeFooter->
columns(static_cast<int>(colId))
.kind());
}
uint64_t getDictionarySize(uint64_t colId) const override {
ensureStripeFooterLoaded();
return static_cast<ColumnEncodingKind>(stripeFooter->
columns(static_cast<int>(colId))
.dictionarysize());
}
const std::string& getWriterTimezone() const override {
ensureStripeFooterLoaded();
return stripeFooter->writertimezone();
}
};
}
#endif
| omalley/orc | c++/src/StripeStream.hh | C++ | apache-2.0 | 6,482 |
/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.perf.commands;
import com.thoughtworks.go.config.Agent;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.server.service.AgentService;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Optional;
import java.util.UUID;
public class RegisterAgentCommand extends AgentPerformanceCommand {
public RegisterAgentCommand(AgentService agentService) {
this.agentService = agentService;
}
@Override
Optional<String> execute() {
return registerAgent();
}
private Optional<String> registerAgent() {
InetAddress localHost = getInetAddress();
Agent agent = new Agent("Perf-Test-Agent-" + UUID.randomUUID(), localHost.getHostName(), localHost.getHostAddress(), UUID.randomUUID().toString());
AgentRuntimeInfo agentRuntimeInfo = AgentRuntimeInfo.fromServer(agent, false, "location", 233232L, "osx");
agentService.requestRegistration(agentRuntimeInfo);
return Optional.ofNullable(agent.getUuid());
}
private InetAddress getInetAddress() {
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
return localHost;
}
}
| marques-work/gocd | server/src/test-shared/java/com/thoughtworks/go/server/perf/commands/RegisterAgentCommand.java | Java | apache-2.0 | 1,951 |
/*
* Software License Agreement (Apache License)
*
* Copyright (c) 2014, Southwest Research Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef CERES_COSTS_UTILS_TEST_HPP_
#define CERES_COSTS_UTILS_TEST_HPP_
#include "ceres/ceres.h"
#include "ceres/rotation.h"
#include <industrial_extrinsic_cal/basic_types.h>
namespace industrial_extrinsic_cal
{
/* local prototypes of helper functions */
/*! \brief print a quaternion plus position as a homogeneous transform
* \param qx quaternion x value
* \param qy quaternion y value
* \param qz quaternion z value
* \param qw quaternion w value
* \param tx position x value
* \param ty position y value
* \param tz position z value
*/
void printQTasH(double qx, double qy, double qz, double qw, double tx, double ty, double tz);
/*! \brief print an angle axis transform as a homogeneous transform
* \param x angle axis x value
* \param y angle axis y value
* \param z angle axis z value
* \param tx position x value
* \param ty position y value
* \param tz position z value
*/
void printAATasH(double ax, double ay, double az, double tx, double ty, double tz);
/*! \brief print angle axis to homogeneous transform inverse
* \param ax angle axis x value
* \param ay angle axis y value
* \param az angle axis z value
* \param tx position x value
* \param ty position y value
* \param tz position z value
*/
void printAATasHI(double ax, double ay, double az, double tx, double ty, double tz);
/*! \brief print angle axis as euler angles
* \param ax angle axis x value
* \param ay angle axis y value
* \param az angle axis z value
*/
void printAAasEuler(double ax, double ay, double az);
/*! \brief print Camera Parameters
* \param CameraParameters include intrinsic and extrinsic
* \param words to provide as a header
*/
void printCameraParameters(CameraParameters C, std::string words);
/*! \brief computes image of point in cameras image plane
* \param C both intrinsic and extrinsic camera parameters
* \param P the point to be projected into image
*/
Observation projectPointWithDistortion(CameraParameters camera_parameters, Point3d point);
Observation projectPointNoDistortion(CameraParameters camera_params, Point3d point_to_project);
Observation projectPointWithDistortion(CameraParameters C, Point3d P)
{
double p[3];
double pt[3];
pt[0] = P.x;
pt[1] = P.y;
pt[2] = P.z;
/* transform point into camera frame */
/* note, camera transform takes points from camera frame into world frame */
double aa[3];
aa[0] = C.pb_extrinsics[0];
aa[1] = C.pb_extrinsics[1];
aa[2] = C.pb_extrinsics[2];
ceres::AngleAxisRotatePoint(aa, pt, p);
// apply camera translation
double xp1 = p[0] + C.pb_extrinsics[3];
double yp1 = p[1] + C.pb_extrinsics[4];
double zp1 = p[2] + C.pb_extrinsics[5];
// p[0] +=C.pb_extrinsics[3];
// p[1] +=C.pb_extrinsics[4];
// p[2] +=C.pb_extrinsics[5];
double xp = xp1 / zp1;
double yp = yp1 / zp1;
// calculate terms for polynomial distortion
double r2 = xp * xp + yp * yp;
double r4 = r2 * r2;
double r6 = r2 * r4;
double xp2 = xp * xp; /* temporary variables square of others */
double yp2 = yp * yp;
/* apply the distortion coefficients to refine pixel location */
double xpp = xp + C.distortion_k1 * r2 * xp + C.distortion_k2 * r4 * xp + C.distortion_k3 * r6 * xp +
C.distortion_p2 * (r2 + 2 * xp2) + 2 * C.distortion_p1 * xp * yp;
double ypp = yp + C.distortion_k1 * r2 * yp + C.distortion_k2 * r4 * yp + C.distortion_k3 * r6 * yp +
C.distortion_p1 * (r2 + 2 * yp2) + 2 * C.distortion_p2 * xp * yp;
/* perform projection using focal length and camera center into image plane */
Observation O;
O.point_id = 0;
O.image_loc_x = C.focal_length_x * xpp + C.center_x;
O.image_loc_y = C.focal_length_y * ypp + C.center_y;
return (O);
}
Observation projectPointNoDistortion(CameraParameters C, Point3d P)
{
double p[3]; // rotated into camera frame
double point[3]; // world location of point
double aa[3]; // angle axis representation of camera transform
double tx = C.position[0]; // location of origin in camera frame x
double ty = C.position[1]; // location of origin in camera frame y
double tz = C.position[2]; // location of origin in camera frame z
double fx = C.focal_length_x; // focal length x
double fy = C.focal_length_y; // focal length y
double cx = C.center_x; // optical center x
double cy = C.center_y; // optical center y
aa[0] = C.angle_axis[0];
aa[1] = C.angle_axis[1];
aa[2] = C.angle_axis[2];
point[0] = P.x;
point[1] = P.y;
point[2] = P.z;
/** rotate and translate points into camera frame */
ceres::AngleAxisRotatePoint(aa, point, p);
// apply camera translation
double xp1 = p[0] + tx;
double yp1 = p[1] + ty;
double zp1 = p[2] + tz;
// scale into the image plane by distance away from camera
double xp = xp1 / zp1;
double yp = yp1 / zp1;
// perform projection using focal length and camera center into image plane
Observation O;
O.image_loc_x = fx * xp + cx;
O.image_loc_y = fy * yp + cy;
return (O);
}
} // end of namespace
#endif
| drchrislewis/industrial_calibration | industrial_extrinsic_cal/include/industrial_extrinsic_cal/ceres_costs_utils_test.hpp | C++ | apache-2.0 | 5,769 |
/*
* (C) Johannes Kepler University Linz, Austria, 2005-2013
* Institute for Systems Engineering and Automation (SEA)
*
* The software may only be used for academic purposes (teaching, scientific
* research). Any redistribution or commercialization of the software program
* and documentation (or any part thereof) requires prior written permission of
* the JKU. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* This software program and documentation are copyrighted by Johannes Kepler
* University Linz, Austria (the JKU). The software program and documentation
* are supplied AS IS, without any accompanying services from the JKU. The JKU
* does not warrant that the operation of the program will be uninterrupted or
* error-free. The end-user understands that the program was developed for
* research purposes and is advised not to rely exclusively on the program for
* any reason.
*
* IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
* SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS,
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF THE
* AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. THE AUTHOR
* SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
* THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND THE AUTHOR HAS
* NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS,
* OR MODIFICATIONS.
*/
/*
* ArtifactIsNotACollectionException.java created on 13.03.2013
*
* (c) alexander noehrer
*/
package at.jku.sea.cloud.exceptions;
/**
* @author alexander noehrer
*/
public class ArtifactIsNotACollectionException extends RuntimeException {
private static final long serialVersionUID = 1L;
public ArtifactIsNotACollectionException(final long version, final long id) {
super("artifact (id=" + id + ", version=" + version + ") is not a collection");
}
}
| OnurKirkizoglu/master_thesis | at.jku.sea.cloud/src/main/java/at/jku/sea/cloud/exceptions/ArtifactIsNotACollectionException.java | Java | apache-2.0 | 2,111 |
/*
* Copyright 2010-2012 Ning, Inc.
*
* Ning licenses this file to you under the Apache License, version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.ning.metrics.collector.filtering;
import com.ning.metrics.collector.endpoint.ParsedRequest;
import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
public class TestPatternSetFilter
{
@Test(groups = "fast")
public void testNullValue() throws Exception
{
final Filter<ParsedRequest> filter = new PatternSetFilter(createFieldExtractor(null), createPatternSet("pattern1", "pattern2"));
Assert.assertEquals(filter.passesFilter(null, null), false);
}
@Test(groups = "fast")
public void testEmptySetPatternEventRESTRequestFilter() throws Exception
{
final Filter<ParsedRequest> filter = new PatternSetFilter(createFieldExtractor("test-host"), Collections.<Pattern>emptySet());
Assert.assertEquals(filter.passesFilter(null, null), false);
}
@Test(groups = "fast")
public void testSinglePatternEventRESTRequestFilter() throws Exception
{
final Filter<ParsedRequest> filterShouldMatch = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("test-host"));
Assert.assertEquals(filterShouldMatch.passesFilter(null, null), true);
final Filter<ParsedRequest> filterDoesNotMatch = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("mugen"));
Assert.assertEquals(filterDoesNotMatch.passesFilter(null, null), false);
}
@Test(groups = "fast")
public void testMultiplePatternEventRESTRequestFilter() throws Exception
{
final Filter<ParsedRequest> trueFilter = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("test-host", "nothing"));
Assert.assertTrue(trueFilter.passesFilter(null, null));
final Filter<ParsedRequest> falseFilter = new PatternSetFilter(createFieldExtractor("test-host"), createPatternSet("mugen", "nothing"));
Assert.assertFalse(falseFilter.passesFilter(null, null));
}
@Test(groups = "fast")
public void testSinglePatternEventInclusionFilter() throws Exception
{
final Filter<ParsedRequest> filterShouldMatch = new EventInclusionFilter(createFieldExtractor("test-host"), createPatternSet("test-host"));
Assert.assertEquals(filterShouldMatch.passesFilter(null, null), false);
final Filter<ParsedRequest> filterDoesNotMatch = new EventInclusionFilter(createFieldExtractor("test-host"), createPatternSet("mugen"));
Assert.assertEquals(filterDoesNotMatch.passesFilter(null, null), true);
}
private Set<Pattern> createPatternSet(final String... patterns)
{
final Set<Pattern> patternSet = new HashSet<Pattern>();
for (final String str : patterns) {
patternSet.add(Pattern.compile(str));
}
return patternSet;
}
private FieldExtractor createFieldExtractor(final String value)
{
return new FieldExtractor()
{
@Override
public String getField(final String eventName, final ParsedRequest annotation)
{
return value;
}
};
}
} | ning/collector | src/test/java/com/ning/metrics/collector/filtering/TestPatternSetFilter.java | Java | apache-2.0 | 3,816 |
# Copyright (C) 2014-2016 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
module Operation
# Adds behaviour for updating the selector for operations
# that may take a write concern.
#
# @since 2.4.0
module TakesWriteConcern
private
def update_selector_for_write_concern(sel, server)
if write_concern && server.features.collation_enabled?
sel.merge(writeConcern: write_concern.options)
else
sel
end
end
end
end
end
| estolfo/mongo-ruby-driver | lib/mongo/operation/takes_write_concern.rb | Ruby | apache-2.0 | 1,031 |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
namespace System.Web.Razor.Parser
{
[Flags]
public enum BalancingModes
{
None = 0,
BacktrackOnFailure = 1,
NoErrorOnFailure = 2,
AllowCommentsAndTemplates = 4,
AllowEmbeddedTransitions = 8
}
}
| Terminator-Aaron/Katana | aspnetwebsrc/System.Web.Razor/Parser/BalancingModes.cs | C# | apache-2.0 | 386 |
/*
* RemoveRelationKnowhow.java
* Created on 2013/06/28
*
* Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tubame.knowhow.plugin.ui.view.remove;
import tubame.common.util.CmnStringUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import tubame.knowhow.plugin.logic.KnowhowManagement;
import tubame.knowhow.plugin.model.view.CategoryViewType;
import tubame.knowhow.plugin.model.view.KnowhowDetailType;
import tubame.knowhow.plugin.model.view.KnowhowViewType;
import tubame.knowhow.plugin.model.view.PortabilityKnowhowListViewOperation;
import tubame.knowhow.plugin.ui.editor.multi.MaintenanceKnowhowMultiPageEditor;
import tubame.knowhow.plugin.ui.editor.multi.docbook.KnowhowDetailEditor;
import tubame.knowhow.util.PluginUtil;
/**
* Make a related item deletion process know-how information.<br/>
* Delete stick know-how related to the item to be deleted,<br/>
* the item that you want to match the key of its own from the reference list of
* key know-how detailed information,<br/>
* the parent category.<br/>
*/
public class RemoveRelationKnowhow implements RemoveRelationItemStrategy {
/** Logger */
private static final Logger LOGGER = LoggerFactory
.getLogger(RemoveRelationKnowhow.class);
/** Know-how entry view item */
private KnowhowViewType knowhowViewType;
/** Deleted items */
private PortabilityKnowhowListViewOperation portabilityKnowhowListViewOperation;
/**
* Constructor.<br/>
*
* @param portabilityKnowhowListViewOperation
* Deleted items
*/
public RemoveRelationKnowhow(
PortabilityKnowhowListViewOperation portabilityKnowhowListViewOperation) {
this.portabilityKnowhowListViewOperation = portabilityKnowhowListViewOperation;
this.knowhowViewType = (KnowhowViewType) portabilityKnowhowListViewOperation
.getKnowhowViewType();
}
/**
* {@inheritDoc}
*/
@Override
public void removeRelationItem() {
RemoveRelationKnowhow.LOGGER.debug(CmnStringUtil.EMPTY);
removeKnowhowDetail();
removeEntryViewItem();
}
/**
* Delete key reference to itself from the parent category that is
* registered in the entry view.<br/>
*
*/
private void removeEntryViewItem() {
CategoryViewType categoryViewType = (CategoryViewType) portabilityKnowhowListViewOperation
.getParent().getKnowhowViewType();
String removeTargetKey = null;
for (String knowhowRefKey : categoryViewType.getKnowhowRefKeies()) {
if (knowhowViewType.getRegisterKey().equals(knowhowRefKey)) {
removeTargetKey = knowhowRefKey;
}
}
if (removeTargetKey != null) {
categoryViewType.getKnowhowRefKeies().remove(removeTargetKey);
}
}
/**
* Delete the data that matches the key from its own know-how detail data
* list.<br/>
* Remove know-how detail data that matches the reference key know-how from
* its own know-how detail data list.<br/>
*
*/
private void removeKnowhowDetail() {
KnowhowDetailType removeTargetItem = null;
for (KnowhowDetailType knowhowDetailType : KnowhowManagement
.getKnowhowDetailTypes()) {
if (knowhowDetailType.getKnowhowDetailId().equals(
knowhowViewType.getKnowhowDetailRefKey())) {
removeTargetItem = knowhowDetailType;
}
}
if (removeTargetItem != null) {
KnowhowManagement.getKnowhowDetailTypes().remove(removeTargetItem);
clearKnowhoweDetaileditor(removeTargetItem);
}
}
/**
* Initialization of know-how detail page editor.<br/>
*
* @param removeTargetItem
* Deleted items
*/
private void clearKnowhoweDetaileditor(KnowhowDetailType removeTargetItem) {
MaintenanceKnowhowMultiPageEditor knowhowMultiPageEditor = PluginUtil
.getKnowhowEditor();
KnowhowDetailEditor detailEditor = knowhowMultiPageEditor
.getKnowhowDetailEditor();
if (detailEditor.getKnowhowDetailType() != null) {
if (removeTargetItem.getKnowhowDetailId().equals(
detailEditor.getKnowhowDetailType().getKnowhowDetailId())) {
knowhowMultiPageEditor.clearKnowhowDetail();
}
}
}
}
| azkaoru/migration-tool | src/tubame.knowhow/src/tubame/knowhow/plugin/ui/view/remove/RemoveRelationKnowhow.java | Java | apache-2.0 | 5,041 |
/*
* Copyright (C) 2013 Leszek Mzyk
* Modifications Copyright (C) 2015 eccyan <g00.eccyan@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.eccyan.widget;
import android.content.Context;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
/**
* A ViewPager subclass enabling infinte scrolling of the viewPager elements
*
* When used for paginating views (in opposite to fragments), no code changes
* should be needed only change xml's from <android.support.v4.view.ViewPager>
* to <com.imbryk.viewPager.LoopViewPager>
*
* If "blinking" can be seen when paginating to first or last view, simply call
* seBoundaryCaching( true ), or change DEFAULT_BOUNDARY_CASHING to true
*
* When using a FragmentPagerAdapter or FragmentStatePagerAdapter,
* additional changes in the adapter must be done.
* The adapter must be prepared to create 2 extra items e.g.:
*
* The original adapter creates 4 items: [0,1,2,3]
* The modified adapter will have to create 6 items [0,1,2,3,4,5]
* with mapping realPosition=(position-1)%count
* [0->3, 1->0, 2->1, 3->2, 4->3, 5->0]
*/
public class SpinningViewPager extends ViewPager {
private static final boolean DEFAULT_BOUNDARY_CASHING = false;
OnPageChangeListener mOuterPageChangeListener;
private LoopPagerAdapterWrapper mAdapter;
private boolean mBoundaryCaching = DEFAULT_BOUNDARY_CASHING;
/**
* helper function which may be used when implementing FragmentPagerAdapter
*
* @param position
* @param count
* @return (position-1)%count
*/
public static int toRealPosition( int position, int count ){
position = position-1;
if( position < 0 ){
position += count;
}else{
position = position%count;
}
return position;
}
/**
* If set to true, the boundary views (i.e. first and last) will never be destroyed
* This may help to prevent "blinking" of some views
*
* @param flag
*/
public void setBoundaryCaching(boolean flag) {
mBoundaryCaching = flag;
if (mAdapter != null) {
mAdapter.setBoundaryCaching(flag);
}
}
@Override
public void setAdapter(PagerAdapter adapter) {
mAdapter = new LoopPagerAdapterWrapper(adapter);
mAdapter.setBoundaryCaching(mBoundaryCaching);
super.setAdapter(mAdapter);
}
@Override
public PagerAdapter getAdapter() {
return mAdapter != null ? mAdapter.getRealAdapter() : mAdapter;
}
@Override
public int getCurrentItem() {
return mAdapter != null ? mAdapter.toRealPosition(super.getCurrentItem()) : 0;
}
public void setCurrentItem(int item, boolean smoothScroll) {
int realItem = mAdapter.toInnerPosition(item);
super.setCurrentItem(realItem, smoothScroll);
}
@Override
public void setCurrentItem(int item) {
if (getCurrentItem() != item) {
setCurrentItem(item, true);
}
}
@Override
public void setOnPageChangeListener(OnPageChangeListener listener) {
mOuterPageChangeListener = listener;
};
public SpinningViewPager(Context context) {
super(context);
init();
}
public SpinningViewPager(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
super.setOnPageChangeListener(onPageChangeListener);
}
private OnPageChangeListener onPageChangeListener = new OnPageChangeListener() {
private float mPreviousOffset = -1;
private float mPreviousPosition = -1;
@Override
public void onPageSelected(int position) {
int realPosition = mAdapter.toRealPosition(position);
if (mPreviousPosition != realPosition) {
mPreviousPosition = realPosition;
if (mOuterPageChangeListener != null) {
mOuterPageChangeListener.onPageSelected(realPosition);
}
}
}
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
int realPosition = position;
if (mAdapter != null) {
realPosition = mAdapter.toRealPosition(position);
if (positionOffset == 0
&& mPreviousOffset == 0
&& (position == 0 || position == mAdapter.getCount() - 1)) {
setCurrentItem(realPosition, false);
}
}
mPreviousOffset = positionOffset;
if (mOuterPageChangeListener != null) {
mOuterPageChangeListener.onPageScrolled(realPosition,
positionOffset, positionOffsetPixels);
}
}
@Override
public void onPageScrollStateChanged(int state) {
if (mAdapter != null) {
int position = SpinningViewPager.super.getCurrentItem();
int realPosition = mAdapter.toRealPosition(position);
if (state == ViewPager.SCROLL_STATE_IDLE
&& (position == 0 || position == mAdapter.getCount() - 1)) {
setCurrentItem(realPosition, false);
}
}
if (mOuterPageChangeListener != null) {
mOuterPageChangeListener.onPageScrollStateChanged(state);
}
}
};
}
| eccyan/SpinningTabStrip | spinning/src/main/java/com/eccyan/widget/SpinningViewPager.java | Java | apache-2.0 | 6,099 |
/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.util.core.io;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.apache.etch.util.FlexBuffer;
import org.apache.etch.util.core.Who;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
/** Test UdpConnection. */
public class TestUdpConnection
{
/** @throws Exception */
@Before @Ignore
public void init() throws Exception
{
aph = new MyPacketHandler();
ac = new UdpConnection( "udp://localhost:4011" );
ac.setSession( aph );
ac.start();
ac.waitUp( 4000 );
System.out.println( "ac up" );
bph = new MyPacketHandler();
bc = new UdpConnection( "udp://localhost:4010" );
bc.setSession( bph );
bc.start();
bc.waitUp( 4000 );
System.out.println( "bc up" );
}
/** @throws Exception */
@After @Ignore
public void fini() throws Exception
{
ac.close( false );
bc.close( false );
}
private MyPacketHandler aph;
private UdpConnection ac;
private MyPacketHandler bph;
private UdpConnection bc;
/** @throws Exception */
@Test @Ignore
public void blah() throws Exception
{
assertEquals( What.UP, aph.what );
assertEquals( What.UP, bph.what );
FlexBuffer buf = new FlexBuffer();
buf.put( 1 );
buf.put( 2 );
buf.put( 3 );
buf.put( 4 );
buf.put( 5 );
buf.setIndex( 0 );
ac.transportPacket( null, buf );
Thread.sleep( 500 );
assertEquals( What.PACKET, bph.what );
assertNotNull( bph.xsender );
assertNotSame( buf, bph.xbuf );
assertEquals( 0, bph.xbuf.index() );
assertEquals( 5, bph.xbuf.length() );
assertEquals( 1, bph.xbuf.get() );
assertEquals( 2, bph.xbuf.get() );
assertEquals( 3, bph.xbuf.get() );
assertEquals( 4, bph.xbuf.get() );
assertEquals( 5, bph.xbuf.get() );
}
/** */
public enum What
{
/** */ UP,
/** */ PACKET,
/** */ DOWN
}
/**
* receive packets from the udp connection
*/
public static class MyPacketHandler implements SessionPacket
{
/** */
public What what;
/** */
public Who xsender;
/** */
public FlexBuffer xbuf;
public void sessionPacket( Who sender, FlexBuffer buf ) throws Exception
{
assertEquals( What.UP, what );
what = What.PACKET;
xsender = sender;
xbuf = buf;
}
public void sessionControl( Object control, Object value )
{
// ignore.
}
public void sessionNotify( Object event )
{
if (event.equals( Session.UP ))
{
assertNull( what );
what = What.UP;
return;
}
if (event.equals( Session.DOWN ))
{
assertTrue( what == What.UP || what == What.PACKET );
what = What.DOWN;
return;
}
}
public Object sessionQuery( Object query )
{
// ignore.
return null;
}
}
}
| OBIGOGIT/etch | util/src/test/java/org/apache/etch/util/core/io/TestUdpConnection.java | Java | apache-2.0 | 3,671 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.library.sql.generator.implementation.grammar.builders.query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import org.apache.polygene.library.sql.generator.grammar.builders.query.OrderByBuilder;
import org.apache.polygene.library.sql.generator.grammar.query.OrderByClause;
import org.apache.polygene.library.sql.generator.grammar.query.SortSpecification;
import org.apache.polygene.library.sql.generator.implementation.grammar.common.SQLBuilderBase;
import org.apache.polygene.library.sql.generator.implementation.grammar.query.OrderByClauseImpl;
import org.apache.polygene.library.sql.generator.implementation.transformation.spi.SQLProcessorAggregator;
/**
* @author Stanislav Muhametsin
*/
public class OrderByBuilderImpl extends SQLBuilderBase
implements OrderByBuilder
{
private final List<SortSpecification> _sortSpecs;
public OrderByBuilderImpl( SQLProcessorAggregator processor )
{
super( processor );
this._sortSpecs = new ArrayList<SortSpecification>();
}
public OrderByBuilder addSortSpecs( SortSpecification... specs )
{
for( SortSpecification spec : specs )
{
Objects.requireNonNull( spec, "specification" );
}
this._sortSpecs.addAll( Arrays.asList( specs ) );
return this;
}
public List<SortSpecification> getSortSpecs()
{
return Collections.unmodifiableList( this._sortSpecs );
}
public OrderByClause createExpression()
{
return new OrderByClauseImpl( this.getProcessor(), this._sortSpecs );
}
}
| apache/zest-qi4j | libraries/sql-generator/src/main/java/org/apache/polygene/library/sql/generator/implementation/grammar/builders/query/OrderByBuilderImpl.java | Java | apache-2.0 | 2,500 |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: micro/go-plugins/registry/gossip/proto/gossip.proto
package gossip
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
// Update is the message broadcast
type Update struct {
// time to live for entry
Expires uint64 `protobuf:"varint,1,opt,name=expires,proto3" json:"expires,omitempty"`
// type of update
Type int32 `protobuf:"varint,2,opt,name=type,proto3" json:"type,omitempty"`
// what action is taken
Action int32 `protobuf:"varint,3,opt,name=action,proto3" json:"action,omitempty"`
// any other associated metadata about the data
Metadata map[string]string `protobuf:"bytes,6,rep,name=metadata,proto3" json:"metadata,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// the payload data;
Data []byte `protobuf:"bytes,7,opt,name=data,proto3" json:"data,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Update) Reset() { *m = Update{} }
func (m *Update) String() string { return proto.CompactTextString(m) }
func (*Update) ProtoMessage() {}
func (*Update) Descriptor() ([]byte, []int) {
return fileDescriptor_e81db501087fb3b4, []int{0}
}
func (m *Update) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Update.Unmarshal(m, b)
}
func (m *Update) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Update.Marshal(b, m, deterministic)
}
func (m *Update) XXX_Merge(src proto.Message) {
xxx_messageInfo_Update.Merge(m, src)
}
func (m *Update) XXX_Size() int {
return xxx_messageInfo_Update.Size(m)
}
func (m *Update) XXX_DiscardUnknown() {
xxx_messageInfo_Update.DiscardUnknown(m)
}
var xxx_messageInfo_Update proto.InternalMessageInfo
func (m *Update) GetExpires() uint64 {
if m != nil {
return m.Expires
}
return 0
}
func (m *Update) GetType() int32 {
if m != nil {
return m.Type
}
return 0
}
func (m *Update) GetAction() int32 {
if m != nil {
return m.Action
}
return 0
}
func (m *Update) GetMetadata() map[string]string {
if m != nil {
return m.Metadata
}
return nil
}
func (m *Update) GetData() []byte {
if m != nil {
return m.Data
}
return nil
}
func init() {
proto.RegisterType((*Update)(nil), "gossip.Update")
proto.RegisterMapType((map[string]string)(nil), "gossip.Update.MetadataEntry")
}
func init() {
proto.RegisterFile("micro/go-plugins/registry/gossip/proto/gossip.proto", fileDescriptor_e81db501087fb3b4)
}
var fileDescriptor_e81db501087fb3b4 = []byte{
// 223 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x8f, 0x41, 0x4b, 0x03, 0x31,
0x10, 0x85, 0x49, 0xb7, 0x4d, 0xed, 0xa8, 0x20, 0x83, 0x48, 0x10, 0x0f, 0x8b, 0xa7, 0xbd, 0xb8,
0x01, 0x7b, 0x29, 0x7a, 0xf6, 0xe8, 0x25, 0xe0, 0x0f, 0x88, 0x6d, 0x08, 0xc1, 0x76, 0x13, 0x92,
0xa9, 0x98, 0x9f, 0xea, 0xbf, 0x91, 0x26, 0x51, 0xf0, 0xf6, 0xbe, 0x99, 0x37, 0xbc, 0x37, 0xb0,
0x3e, 0xb8, 0x6d, 0xf4, 0xd2, 0xfa, 0x87, 0xb0, 0x3f, 0x5a, 0x37, 0x25, 0x19, 0x8d, 0x75, 0x89,
0x62, 0x96, 0xd6, 0xa7, 0xe4, 0x82, 0x0c, 0xd1, 0x93, 0x6f, 0x30, 0x16, 0x40, 0x5e, 0xe9, 0xfe,
0x9b, 0x01, 0x7f, 0x0b, 0x3b, 0x4d, 0x06, 0x05, 0x2c, 0xcd, 0x57, 0x70, 0xd1, 0x24, 0xc1, 0x7a,
0x36, 0xcc, 0xd5, 0x2f, 0x22, 0xc2, 0x9c, 0x72, 0x30, 0x62, 0xd6, 0xb3, 0x61, 0xa1, 0x8a, 0xc6,
0x1b, 0xe0, 0x7a, 0x4b, 0xce, 0x4f, 0xa2, 0x2b, 0xd3, 0x46, 0xb8, 0x81, 0xb3, 0x83, 0x21, 0xbd,
0xd3, 0xa4, 0x05, 0xef, 0xbb, 0xe1, 0xfc, 0xf1, 0x6e, 0x6c, 0xc9, 0x35, 0x67, 0x7c, 0x6d, 0xeb,
0x97, 0x89, 0x62, 0x56, 0x7f, 0xee, 0x53, 0x4a, 0xb9, 0x5a, 0xf6, 0x6c, 0xb8, 0x50, 0x45, 0xdf,
0x3e, 0xc3, 0xe5, 0x3f, 0x3b, 0x5e, 0x41, 0xf7, 0x61, 0x72, 0x29, 0xb8, 0x52, 0x27, 0x89, 0xd7,
0xb0, 0xf8, 0xd4, 0xfb, 0x63, 0x6d, 0xb7, 0x52, 0x15, 0x9e, 0x66, 0x1b, 0xf6, 0xce, 0xcb, 0xab,
0xeb, 0x9f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x8c, 0xfb, 0xd3, 0xd6, 0x21, 0x01, 0x00, 0x00,
}
| micro/go-plugins | registry/gossip/proto/gossip.pb.go | GO | apache-2.0 | 4,480 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.springframework.shell.event.ParseResult;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.internal.cli.shell.GfshExecutionStrategy;
import org.apache.geode.management.internal.cli.shell.OperationInvoker;
/**
* Immutable representation of the outcome of parsing a given shell line. * Extends
* {@link ParseResult} to add a field to specify the command string that was input by the user.
*
* <p>
* Some commands are required to be executed on a remote GemFire managing member. These should be
* marked with the annotation {@link CliMetaData#shellOnly()} set to <code>false</code>.
* {@link GfshExecutionStrategy} will detect whether the command is a remote command and send it to
* ManagementMBean via {@link OperationInvoker}.
*
*
* @since GemFire 7.0
*/
public class GfshParseResult extends ParseResult {
private String userInput;
private String commandName;
private Map<String, String> paramValueStringMap = new HashMap<>();
/**
* Creates a GfshParseResult instance to represent parsing outcome.
*
* @param method Method associated with the command
* @param instance Instance on which this method has to be executed
* @param arguments arguments of the method
* @param userInput user specified commands string
*/
protected GfshParseResult(final Method method, final Object instance, final Object[] arguments,
final String userInput) {
super(method, instance, arguments);
this.userInput = userInput.trim();
CliCommand cliCommand = method.getAnnotation(CliCommand.class);
commandName = cliCommand.value()[0];
Annotation[][] parameterAnnotations = method.getParameterAnnotations();
if (arguments == null) {
return;
}
for (int i = 0; i < arguments.length; i++) {
Object argument = arguments[i];
if (argument == null) {
continue;
}
CliOption cliOption = getCliOption(parameterAnnotations, i);
String argumentAsString;
if (argument instanceof Object[]) {
argumentAsString = StringUtils.join((Object[]) argument, ",");
} else {
argumentAsString = argument.toString();
}
// this maps are used for easy access of option values in String form.
// It's used in tests and validation of option values in pre-execution
paramValueStringMap.put(cliOption.key()[0], argumentAsString);
}
}
/**
* @return the userInput
*/
public String getUserInput() {
return userInput;
}
/**
* Used only in tests and command pre-execution for validating arguments
*/
public String getParamValue(String param) {
return paramValueStringMap.get(param);
}
/**
* Used only in tests and command pre-execution for validating arguments
*
* @return the unmodifiable paramValueStringMap
*/
public Map<String, String> getParamValueStrings() {
return Collections.unmodifiableMap(paramValueStringMap);
}
public String getCommandName() {
return commandName;
}
private CliOption getCliOption(Annotation[][] parameterAnnotations, int index) {
Annotation[] annotations = parameterAnnotations[index];
for (Annotation annotation : annotations) {
if (annotation instanceof CliOption) {
return (CliOption) annotation;
}
}
return null;
}
}
| pivotal-amurmann/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/GfshParseResult.java | Java | apache-2.0 | 4,495 |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2015 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.corecomponents;
import java.awt.Insets;
import java.io.File;
import java.util.Collection;
import java.util.Map;
import java.util.TreeMap;
import java.util.logging.Level;
import javax.swing.BorderFactory;
import javax.swing.UIManager;
import javax.swing.UIManager.LookAndFeelInfo;
import javax.swing.UnsupportedLookAndFeelException;
import org.netbeans.spi.sendopts.OptionProcessor;
import org.netbeans.swing.tabcontrol.plaf.DefaultTabbedContainerUI;
import org.openide.modules.ModuleInstall;
import org.openide.util.Lookup;
import org.openide.windows.WindowManager;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.casemodule.CaseActionException;
import org.sleuthkit.autopsy.casemodule.OpenFromArguments;
import org.sleuthkit.autopsy.coreutils.Logger;
/**
* Manages this module's life cycle. Opens the startup dialog during startup.
*/
public class Installer extends ModuleInstall {
private static Installer instance;
private static final Logger logger = Logger.getLogger(Installer.class.getName());
public synchronized static Installer getDefault() {
if (instance == null) {
instance = new Installer();
}
return instance;
}
private Installer() {
super();
}
@Override
public void restored() {
super.restored();
setupLAF();
UIManager.put("ViewTabDisplayerUI", "org.sleuthkit.autopsy.corecomponents.NoTabsTabDisplayerUI");
UIManager.put(DefaultTabbedContainerUI.KEY_VIEW_CONTENT_BORDER, BorderFactory.createEmptyBorder());
UIManager.put("TabbedPane.contentBorderInsets", new Insets(0, 0, 0, 0));
/*
* Open the passed in case, if an aut file was double clicked.
*/
WindowManager.getDefault().invokeWhenUIReady(() -> {
Collection<? extends OptionProcessor> processors = Lookup.getDefault().lookupAll(OptionProcessor.class);
for (OptionProcessor processor : processors) {
if (processor instanceof OpenFromArguments) {
OpenFromArguments argsProcessor = (OpenFromArguments) processor;
final String caseFile = argsProcessor.getDefaultArg();
if (caseFile != null && !caseFile.equals("") && caseFile.endsWith(".aut") && new File(caseFile).exists()) { //NON-NLS
new Thread(() -> {
// Create case.
try {
Case.open(caseFile);
} catch (Exception ex) {
logger.log(Level.SEVERE, "Error opening case: ", ex); //NON-NLS
}
}).start();
return;
}
}
}
Case.invokeStartupDialog(); // bring up the startup dialog
});
}
@Override
public void uninstalled() {
super.uninstalled();
}
@Override
public void close() {
new Thread(() -> {
try {
if (Case.isCaseOpen()) {
Case.getCurrentCase().closeCase();
}
} catch (CaseActionException | IllegalStateException unused) {
// Exception already logged. Shutting down, no need to do popup.
}
}).start();
}
private void setupLAF() {
//TODO apply custom skinning
//UIManager.put("nimbusBase", new Color());
//UIManager.put("nimbusBlueGrey", new Color());
//UIManager.put("control", new Color());
if (System.getProperty("os.name").toLowerCase().contains("mac")) { //NON-NLS
setupMacOsXLAF();
}
}
/**
* Set the look and feel to be the Cross Platform 'Metal', but keep Aqua
* dependent elements that set the Menu Bar to be in the correct place on
* Mac OS X.
*/
private void setupMacOsXLAF() {
try {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName());
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) {
logger.log(Level.WARNING, "Unable to set theme. ", ex); //NON-NLS
}
final String[] UI_MENU_ITEM_KEYS = new String[]{"MenuBarUI", //NON-NLS
};
Map<Object, Object> uiEntries = new TreeMap<>();
// Store the keys that deal with menu items
for (String key : UI_MENU_ITEM_KEYS) {
uiEntries.put(key, UIManager.get(key));
}
//use Metal if available
for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) { //NON-NLS
try {
UIManager.setLookAndFeel(info.getClassName());
} catch (ClassNotFoundException | InstantiationException |
IllegalAccessException | UnsupportedLookAndFeelException ex) {
logger.log(Level.WARNING, "Unable to set theme. ", ex); //NON-NLS
}
break;
}
}
// Overwrite the Metal menu item keys to use the Aqua versions
uiEntries.entrySet().stream().forEach((entry) -> {
UIManager.put(entry.getKey(), entry.getValue());
});
}
}
| mhmdfy/autopsy | Core/src/org/sleuthkit/autopsy/corecomponents/Installer.java | Java | apache-2.0 | 6,087 |
/*
* Package : org.ludo.codegenerator.core.gen.bean
* Source : IStereotype.java
*/
package org.ludo.codegenerator.core.gen.bean;
import java.io.Serializable;
import java.util.Date;
import java.util.ArrayList;
import java.util.List;
import org.ludo.codegenerator.core.gen.bean.impl.AttributBean;
import org.ludo.codegenerator.core.gen.bean.impl.ClasseBean;
import org.ludo.codegenerator.core.gen.bean.abst.IStereotypeAbstract;
/**
* <b>Description :</b>
* IStereotype
*
*/
public interface IStereotype extends IStereotypeAbstract, Serializable {
}
| ludo1026/tuto | generator-uml-to-config-xml/save/_3/src/org/ludo/codegenerator/core/gen/bean/IStereotype.java | Java | artistic-2.0 | 562 |
package org.glob3.mobile.specific;
import java.util.Map;
import org.glob3.mobile.generated.IByteBuffer;
import org.glob3.mobile.generated.IJSONParser;
import org.glob3.mobile.generated.JSONArray;
import org.glob3.mobile.generated.JSONBaseObject;
import org.glob3.mobile.generated.JSONBoolean;
import org.glob3.mobile.generated.JSONDouble;
import org.glob3.mobile.generated.JSONFloat;
import org.glob3.mobile.generated.JSONInteger;
import org.glob3.mobile.generated.JSONLong;
import org.glob3.mobile.generated.JSONNull;
import org.glob3.mobile.generated.JSONObject;
import org.glob3.mobile.generated.JSONString;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.JsonPrimitive;
public class JSONParser_JavaDesktop
extends
IJSONParser {
@Override
public JSONBaseObject parse(final IByteBuffer buffer,
final boolean nullAsObject) {
return parse(buffer.getAsString(), nullAsObject);
}
@Override
public JSONBaseObject parse(final String string,
final boolean nullAsObject) {
final JsonParser parser = new JsonParser();
final JsonElement element = parser.parse(string);
return convert(element, nullAsObject);
}
private JSONBaseObject convert(final JsonElement element,
final boolean nullAsObject) {
if (element.isJsonNull()) {
return nullAsObject ? new JSONNull() : null;
}
else if (element.isJsonObject()) {
final JsonObject jsonObject = (JsonObject) element;
final JSONObject result = new JSONObject();
for (final Map.Entry<String, JsonElement> entry : jsonObject.entrySet()) {
result.put(entry.getKey(), convert(entry.getValue(), nullAsObject));
}
return result;
}
else if (element.isJsonPrimitive()) {
final JsonPrimitive jsonPrimitive = (JsonPrimitive) element;
if (jsonPrimitive.isBoolean()) {
return new JSONBoolean(jsonPrimitive.getAsBoolean());
}
else if (jsonPrimitive.isNumber()) {
final double doubleValue = jsonPrimitive.getAsDouble();
final long longValue = (long) doubleValue;
if (doubleValue == longValue) {
final int intValue = (int) longValue;
return (intValue == longValue) ? new JSONInteger(intValue) : new JSONLong(longValue);
}
final float floatValue = (float) doubleValue;
return (floatValue == doubleValue) ? new JSONFloat(floatValue) : new JSONDouble(doubleValue);
}
else if (jsonPrimitive.isString()) {
return new JSONString(jsonPrimitive.getAsString());
}
else {
throw new RuntimeException("JSON unsopoerted" + element.getClass());
}
}
else if (element.isJsonArray()) {
final JsonArray jsonArray = (JsonArray) element;
final JSONArray result = new JSONArray();
for (final JsonElement child : jsonArray) {
result.add(convert(child, nullAsObject));
}
return result;
}
else {
throw new RuntimeException("JSON unsopoerted" + element.getClass());
}
}
}
| AeroGlass/g3m | JavaDesktop/G3MJavaDesktopSDK/src/org/glob3/mobile/specific/JSONParser_JavaDesktop.java | Java | bsd-2-clause | 3,373 |
/* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using XenAdmin.Core;
using XenAPI;
namespace XenAdmin.Actions
{
public class EnableHAAction : PureAsyncAction
{
private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
private readonly Dictionary<VM, VMStartupOptions> startupOptions;
private readonly SR[] heartbeatSRs;
private readonly long failuresToTolerate;
public EnableHAAction(Pool pool, Dictionary<VM, VMStartupOptions> startupOptions, List<SR> heartbeatSRs, long failuresToTolerate)
: base(pool.Connection, string.Format(Messages.ENABLING_HA_ON, Helpers.GetName(pool).Ellipsise(50)), Messages.ENABLING_HA, false)
{
if (heartbeatSRs.Count == 0)
throw new ArgumentException("You must specify at least 1 heartbeat SR");
Pool = pool;
this.startupOptions = startupOptions;
this.heartbeatSRs = heartbeatSRs.ToArray();
this.failuresToTolerate = failuresToTolerate;
}
public List<SR> HeartbeatSRs
{
get { return new List<SR>(heartbeatSRs); }
}
protected override void Run()
{
if (startupOptions != null)
{
double increment = 10.0 / Math.Max(startupOptions.Count, 1);
int i = 0;
// First set any VM restart priorities supplied
foreach (VM vm in startupOptions.Keys)
{
// Set new VM restart priority and ha_always_run
log.DebugFormat("Setting HA priority on {0} to {1}", vm.Name(), startupOptions[vm].HaRestartPriority);
XenAPI.VM.SetHaRestartPriority(this.Session, vm, (VM.HA_Restart_Priority)startupOptions[vm].HaRestartPriority);
// Set new VM order and start_delay
log.DebugFormat("Setting start order on {0} to {1}", vm.Name(), startupOptions[vm].Order);
XenAPI.VM.set_order(this.Session, vm.opaque_ref, startupOptions[vm].Order);
log.DebugFormat("Setting start order on {0} to {1}", vm.Name(), startupOptions[vm].StartDelay);
XenAPI.VM.set_start_delay(this.Session, vm.opaque_ref, startupOptions[vm].StartDelay);
this.PercentComplete = (int)(++i * increment);
}
}
this.PercentComplete = 10;
log.DebugFormat("Setting ha_host_failures_to_tolerate to {0}", failuresToTolerate);
XenAPI.Pool.set_ha_host_failures_to_tolerate(this.Session, Pool.opaque_ref, failuresToTolerate);
var refs = heartbeatSRs.Select(sr => new XenRef<SR>(sr.opaque_ref)).ToList();
try
{
log.Debug("Enabling HA for pool " + Pool.Name());
// NB the line below also performs a pool db sync
RelatedTask = XenAPI.Pool.async_enable_ha(this.Session, refs, new Dictionary<string, string>());
PollToCompletion(15, 100);
log.Debug("Success enabling HA on pool " + Pool.Name());
}
catch (Failure f)
{
if (f.ErrorDescription.Count > 1 && f.ErrorDescription[0] == "VDI_NOT_AVAILABLE")
{
var vdi = Connection.Resolve(new XenRef<VDI>(f.ErrorDescription[1]));
if (vdi != null)
throw new Failure(string.Format(FriendlyErrorNames.VDI_NOT_AVAILABLE, vdi.uuid));
}
throw;
}
this.Description = Messages.COMPLETED;
}
}
}
| kc284/xenadmin | XenModel/Actions/Pool/EnableHAAction.cs | C# | bsd-2-clause | 5,306 |
# frozen_string_literal: true
require_relative "../../helpers/file"
module Byebug
#
# Reopens the +info+ command to define the +file+ subcommand
#
class InfoCommand < Command
#
# Information about a particular source file
#
class FileCommand < Command
include Helpers::FileHelper
include Helpers::StringHelper
self.allow_in_post_mortem = true
def self.regexp
/^\s* f(?:ile)? (?:\s+ (.+))? \s*$/x
end
def self.description
<<-DESCRIPTION
inf[o] f[ile]
#{short_description}
It informs about file name, number of lines, possible breakpoints in
the file, last modification time and sha1 digest.
DESCRIPTION
end
def self.short_description
"Information about a particular source file."
end
def execute
file = @match[1] || frame.file
return errmsg(pr("info.errors.undefined_file", file: file)) unless File.exist?(file)
puts prettify <<-RUBY
File #{info_file_basic(file)}
Breakpoint line numbers: #{info_file_breakpoints(file)}
Modification time: #{info_file_mtime(file)}
Sha1 Signature: #{info_file_sha1(file)}
RUBY
end
private
def info_file_basic(file)
path = File.expand_path(file)
return unless File.exist?(path)
s = n_lines(path) == 1 ? "" : "s"
"#{path} (#{n_lines(path)} line#{s})"
end
def info_file_breakpoints(file)
breakpoints = Breakpoint.potential_lines(file)
return unless breakpoints
breakpoints.to_a.sort.join(" ")
end
def info_file_mtime(file)
File.stat(file).mtime
end
def info_file_sha1(file)
require "digest/sha1"
Digest::SHA1.hexdigest(file)
end
end
end
end
| deivid-rodriguez/byebug | lib/byebug/commands/info/file.rb | Ruby | bsd-2-clause | 1,863 |
/*
* Copyright (C) 2011 Google Inc. All rights reserved.
* Copyright (C) Research In Motion Limited 2011. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#if ENABLE(WEB_SOCKETS)
#include "WebSocketHandshake.h"
#include "Base64.h"
#include "Cookie.h"
#include "CookieJar.h"
#include "Document.h"
#include "HTTPHeaderMap.h"
#include "KURL.h"
#include "Logging.h"
#include "ScriptCallStack.h"
#include "ScriptExecutionContext.h"
#include "SecurityOrigin.h"
#include <wtf/CryptographicallyRandomNumber.h>
#include <wtf/MD5.h>
#include <wtf/SHA1.h>
#include <wtf/StdLibExtras.h>
#include <wtf/StringExtras.h>
#include <wtf/Vector.h>
#include <wtf/text/CString.h>
#include <wtf/text/StringBuilder.h>
#include <wtf/text/WTFString.h>
#include <wtf/unicode/CharacterNames.h>
namespace WebCore {
static const char randomCharacterInSecWebSocketKey[] = "!\"#$%&'()*+,-./:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
static String resourceName(const KURL& url)
{
String name = url.path();
if (name.isEmpty())
name = "/";
if (!url.query().isNull())
name += "?" + url.query();
ASSERT(!name.isEmpty());
ASSERT(!name.contains(' '));
return name;
}
static String hostName(const KURL& url, bool secure)
{
ASSERT(url.protocolIs("wss") == secure);
StringBuilder builder;
builder.append(url.host().lower());
if (url.port() && ((!secure && url.port() != 80) || (secure && url.port() != 443))) {
builder.append(':');
builder.append(String::number(url.port()));
}
return builder.toString();
}
static const size_t maxConsoleMessageSize = 128;
static String trimConsoleMessage(const char* p, size_t len)
{
String s = String(p, std::min<size_t>(len, maxConsoleMessageSize));
if (len > maxConsoleMessageSize)
s.append(horizontalEllipsis);
return s;
}
static uint32_t randomNumberLessThan(uint32_t n)
{
if (!n)
return 0;
if (n == std::numeric_limits<uint32_t>::max())
return cryptographicallyRandomNumber();
uint32_t max = std::numeric_limits<uint32_t>::max() - (std::numeric_limits<uint32_t>::max() % n);
ASSERT(!(max % n));
uint32_t v;
do {
v = cryptographicallyRandomNumber();
} while (v >= max);
return v % n;
}
static void generateHixie76SecWebSocketKey(uint32_t& number, String& key)
{
uint32_t space = randomNumberLessThan(12) + 1;
uint32_t max = 4294967295U / space;
number = randomNumberLessThan(max);
uint32_t product = number * space;
String s = String::number(product);
int n = randomNumberLessThan(12) + 1;
DEFINE_STATIC_LOCAL(String, randomChars, (randomCharacterInSecWebSocketKey));
for (int i = 0; i < n; i++) {
int pos = randomNumberLessThan(s.length() + 1);
int chpos = randomNumberLessThan(randomChars.length());
s.insert(randomChars.substring(chpos, 1), pos);
}
DEFINE_STATIC_LOCAL(String, spaceChar, (" "));
for (uint32_t i = 0; i < space; i++) {
int pos = randomNumberLessThan(s.length() - 1) + 1;
s.insert(spaceChar, pos);
}
ASSERT(s[0] != ' ');
ASSERT(s[s.length() - 1] != ' ');
key = s;
}
static void generateHixie76Key3(unsigned char key3[8])
{
cryptographicallyRandomValues(key3, 8);
}
static void setChallengeNumber(unsigned char* buf, uint32_t number)
{
unsigned char* p = buf + 3;
for (int i = 0; i < 4; i++) {
*p = number & 0xFF;
--p;
number >>= 8;
}
}
static void generateHixie76ExpectedChallengeResponse(uint32_t number1, uint32_t number2, unsigned char key3[8], unsigned char expectedChallenge[16])
{
unsigned char challenge[16];
setChallengeNumber(&challenge[0], number1);
setChallengeNumber(&challenge[4], number2);
memcpy(&challenge[8], key3, 8);
MD5 md5;
md5.addBytes(challenge, sizeof(challenge));
Vector<uint8_t, 16> digest;
md5.checksum(digest);
memcpy(expectedChallenge, digest.data(), 16);
}
static String generateSecWebSocketKey()
{
static const size_t nonceSize = 16;
unsigned char key[nonceSize];
cryptographicallyRandomValues(key, nonceSize);
return base64Encode(reinterpret_cast<char*>(key), nonceSize);
}
static String getExpectedWebSocketAccept(const String& secWebSocketKey)
{
static const char* const webSocketKeyGUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
static const size_t sha1HashSize = 20; // FIXME: This should be defined in SHA1.h.
SHA1 sha1;
CString keyData = secWebSocketKey.ascii();
sha1.addBytes(reinterpret_cast<const uint8_t*>(keyData.data()), keyData.length());
sha1.addBytes(reinterpret_cast<const uint8_t*>(webSocketKeyGUID), strlen(webSocketKeyGUID));
Vector<uint8_t, sha1HashSize> hash;
sha1.computeHash(hash);
return base64Encode(reinterpret_cast<const char*>(hash.data()), sha1HashSize);
}
WebSocketHandshake::WebSocketHandshake(const KURL& url, const String& protocol, ScriptExecutionContext* context, bool useHixie76Protocol)
: m_url(url)
, m_clientProtocol(protocol)
, m_secure(m_url.protocolIs("wss"))
, m_context(context)
, m_useHixie76Protocol(useHixie76Protocol)
, m_mode(Incomplete)
{
if (m_useHixie76Protocol) {
uint32_t number1;
uint32_t number2;
generateHixie76SecWebSocketKey(number1, m_hixie76SecWebSocketKey1);
generateHixie76SecWebSocketKey(number2, m_hixie76SecWebSocketKey2);
generateHixie76Key3(m_hixie76Key3);
generateHixie76ExpectedChallengeResponse(number1, number2, m_hixie76Key3, m_hixie76ExpectedChallengeResponse);
} else {
m_secWebSocketKey = generateSecWebSocketKey();
m_expectedAccept = getExpectedWebSocketAccept(m_secWebSocketKey);
}
}
WebSocketHandshake::~WebSocketHandshake()
{
}
const KURL& WebSocketHandshake::url() const
{
return m_url;
}
void WebSocketHandshake::setURL(const KURL& url)
{
m_url = url.copy();
}
const String WebSocketHandshake::host() const
{
return m_url.host().lower();
}
const String& WebSocketHandshake::clientProtocol() const
{
return m_clientProtocol;
}
void WebSocketHandshake::setClientProtocol(const String& protocol)
{
m_clientProtocol = protocol;
}
bool WebSocketHandshake::secure() const
{
return m_secure;
}
String WebSocketHandshake::clientOrigin() const
{
return m_context->securityOrigin()->toString();
}
String WebSocketHandshake::clientLocation() const
{
StringBuilder builder;
builder.append(m_secure ? "wss" : "ws");
builder.append("://");
builder.append(hostName(m_url, m_secure));
builder.append(resourceName(m_url));
return builder.toString();
}
CString WebSocketHandshake::clientHandshakeMessage() const
{
// Keep the following consistent with clientHandshakeRequest().
StringBuilder builder;
builder.append("GET ");
builder.append(resourceName(m_url));
builder.append(" HTTP/1.1\r\n");
Vector<String> fields;
if (m_useHixie76Protocol)
fields.append("Upgrade: WebSocket");
else
fields.append("Upgrade: websocket");
fields.append("Connection: Upgrade");
fields.append("Host: " + hostName(m_url, m_secure));
if (m_useHixie76Protocol)
fields.append("Origin: " + clientOrigin());
else
fields.append("Sec-WebSocket-Origin: " + clientOrigin());
if (!m_clientProtocol.isEmpty())
fields.append("Sec-WebSocket-Protocol: " + m_clientProtocol);
KURL url = httpURLForAuthenticationAndCookies();
if (m_context->isDocument()) {
Document* document = static_cast<Document*>(m_context);
String cookie = cookieRequestHeaderFieldValue(document, url);
if (!cookie.isEmpty())
fields.append("Cookie: " + cookie);
// Set "Cookie2: <cookie>" if cookies 2 exists for url?
}
if (m_useHixie76Protocol) {
fields.append("Sec-WebSocket-Key1: " + m_hixie76SecWebSocketKey1);
fields.append("Sec-WebSocket-Key2: " + m_hixie76SecWebSocketKey2);
} else {
fields.append("Sec-WebSocket-Key: " + m_secWebSocketKey);
fields.append("Sec-WebSocket-Version: 8");
}
// Fields in the handshake are sent by the client in a random order; the
// order is not meaningful. Thus, it's ok to send the order we constructed
// the fields.
for (size_t i = 0; i < fields.size(); i++) {
builder.append(fields[i]);
builder.append("\r\n");
}
builder.append("\r\n");
CString handshakeHeader = builder.toString().utf8();
// Hybi-10 handshake is complete at this point.
if (!m_useHixie76Protocol)
return handshakeHeader;
// Hixie-76 protocol requires sending eight-byte data (so-called "key3") after the request header fields.
char* characterBuffer = 0;
CString msg = CString::newUninitialized(handshakeHeader.length() + sizeof(m_hixie76Key3), characterBuffer);
memcpy(characterBuffer, handshakeHeader.data(), handshakeHeader.length());
memcpy(characterBuffer + handshakeHeader.length(), m_hixie76Key3, sizeof(m_hixie76Key3));
return msg;
}
WebSocketHandshakeRequest WebSocketHandshake::clientHandshakeRequest() const
{
// Keep the following consistent with clientHandshakeMessage().
// FIXME: do we need to store m_secWebSocketKey1, m_secWebSocketKey2 and
// m_key3 in WebSocketHandshakeRequest?
WebSocketHandshakeRequest request("GET", m_url);
if (m_useHixie76Protocol)
request.addHeaderField("Upgrade", "WebSocket");
else
request.addHeaderField("Upgrade", "websocket");
request.addHeaderField("Connection", "Upgrade");
request.addHeaderField("Host", hostName(m_url, m_secure));
if (m_useHixie76Protocol)
request.addHeaderField("Origin", clientOrigin());
else
request.addHeaderField("Sec-WebSocket-Origin", clientOrigin());
if (!m_clientProtocol.isEmpty())
request.addHeaderField("Sec-WebSocket-Protocol:", m_clientProtocol);
KURL url = httpURLForAuthenticationAndCookies();
if (m_context->isDocument()) {
Document* document = static_cast<Document*>(m_context);
String cookie = cookieRequestHeaderFieldValue(document, url);
if (!cookie.isEmpty())
request.addHeaderField("Cookie", cookie);
// Set "Cookie2: <cookie>" if cookies 2 exists for url?
}
if (m_useHixie76Protocol) {
request.addHeaderField("Sec-WebSocket-Key1", m_hixie76SecWebSocketKey1);
request.addHeaderField("Sec-WebSocket-Key2", m_hixie76SecWebSocketKey2);
request.setKey3(m_hixie76Key3);
} else {
request.addHeaderField("Sec-WebSocket-Key", m_secWebSocketKey);
request.addHeaderField("Sec-WebSocket-Version", "8");
}
return request;
}
void WebSocketHandshake::reset()
{
m_mode = Incomplete;
}
void WebSocketHandshake::clearScriptExecutionContext()
{
m_context = 0;
}
int WebSocketHandshake::readServerHandshake(const char* header, size_t len)
{
m_mode = Incomplete;
int statusCode;
String statusText;
int lineLength = readStatusLine(header, len, statusCode, statusText);
if (lineLength == -1)
return -1;
if (statusCode == -1) {
m_mode = Failed; // m_failureReason is set inside readStatusLine().
return len;
}
LOG(Network, "response code: %d", statusCode);
m_response.setStatusCode(statusCode);
m_response.setStatusText(statusText);
if (statusCode != 101) {
m_mode = Failed;
m_failureReason = "Unexpected response code: " + String::number(statusCode);
return len;
}
m_mode = Normal;
if (!strnstr(header, "\r\n\r\n", len)) {
// Just hasn't been received fully yet.
m_mode = Incomplete;
return -1;
}
const char* p = readHTTPHeaders(header + lineLength, header + len);
if (!p) {
LOG(Network, "readHTTPHeaders failed");
m_mode = Failed; // m_failureReason is set inside readHTTPHeaders().
return len;
}
if (!checkResponseHeaders()) {
LOG(Network, "header process failed");
m_mode = Failed;
return p - header;
}
if (!m_useHixie76Protocol) { // Hybi-10 handshake is complete at this point.
m_mode = Connected;
return p - header;
}
// In hixie-76 protocol, server's handshake contains sixteen-byte data (called "challenge response")
// after the header fields.
if (len < static_cast<size_t>(p - header + sizeof(m_hixie76ExpectedChallengeResponse))) {
// Just hasn't been received /expected/ yet.
m_mode = Incomplete;
return -1;
}
m_response.setChallengeResponse(static_cast<const unsigned char*>(static_cast<const void*>(p)));
if (memcmp(p, m_hixie76ExpectedChallengeResponse, sizeof(m_hixie76ExpectedChallengeResponse))) {
m_mode = Failed;
return (p - header) + sizeof(m_hixie76ExpectedChallengeResponse);
}
m_mode = Connected;
return (p - header) + sizeof(m_hixie76ExpectedChallengeResponse);
}
WebSocketHandshake::Mode WebSocketHandshake::mode() const
{
return m_mode;
}
String WebSocketHandshake::failureReason() const
{
return m_failureReason;
}
String WebSocketHandshake::serverWebSocketOrigin() const
{
return m_response.headerFields().get("sec-websocket-origin");
}
String WebSocketHandshake::serverWebSocketLocation() const
{
return m_response.headerFields().get("sec-websocket-location");
}
String WebSocketHandshake::serverWebSocketProtocol() const
{
return m_response.headerFields().get("sec-websocket-protocol");
}
String WebSocketHandshake::serverSetCookie() const
{
return m_response.headerFields().get("set-cookie");
}
String WebSocketHandshake::serverSetCookie2() const
{
return m_response.headerFields().get("set-cookie2");
}
String WebSocketHandshake::serverUpgrade() const
{
return m_response.headerFields().get("upgrade");
}
String WebSocketHandshake::serverConnection() const
{
return m_response.headerFields().get("connection");
}
String WebSocketHandshake::serverWebSocketAccept() const
{
return m_response.headerFields().get("sec-websocket-accept");
}
String WebSocketHandshake::serverWebSocketExtensions() const
{
return m_response.headerFields().get("sec-websocket-extensions");
}
const WebSocketHandshakeResponse& WebSocketHandshake::serverHandshakeResponse() const
{
return m_response;
}
KURL WebSocketHandshake::httpURLForAuthenticationAndCookies() const
{
KURL url = m_url.copy();
bool couldSetProtocol = url.setProtocol(m_secure ? "https" : "http");
ASSERT_UNUSED(couldSetProtocol, couldSetProtocol);
return url;
}
// Returns the header length (including "\r\n"), or -1 if we have not received enough data yet.
// If the line is malformed or the status code is not a 3-digit number,
// statusCode and statusText will be set to -1 and a null string, respectively.
int WebSocketHandshake::readStatusLine(const char* header, size_t headerLength, int& statusCode, String& statusText)
{
// Arbitrary size limit to prevent the server from sending an unbounded
// amount of data with no newlines and forcing us to buffer it all.
static const int maximumLength = 1024;
statusCode = -1;
statusText = String();
const char* space1 = 0;
const char* space2 = 0;
const char* p;
size_t consumedLength;
for (p = header, consumedLength = 0; consumedLength < headerLength; p++, consumedLength++) {
if (*p == ' ') {
if (!space1)
space1 = p;
else if (!space2)
space2 = p;
} else if (*p == '\0') {
// The caller isn't prepared to deal with null bytes in status
// line. WebSockets specification doesn't prohibit this, but HTTP
// does, so we'll just treat this as an error.
m_failureReason = "Status line contains embedded null";
return p + 1 - header;
} else if (*p == '\n')
break;
}
if (consumedLength == headerLength)
return -1; // We have not received '\n' yet.
const char* end = p + 1;
int lineLength = end - header;
if (lineLength > maximumLength) {
m_failureReason = "Status line is too long";
return maximumLength;
}
// The line must end with "\r\n".
if (lineLength < 2 || *(end - 2) != '\r') {
m_failureReason = "Status line does not end with CRLF";
return lineLength;
}
if (!space1 || !space2) {
m_failureReason = "No response code found: " + trimConsoleMessage(header, lineLength - 2);
return lineLength;
}
String statusCodeString(space1 + 1, space2 - space1 - 1);
if (statusCodeString.length() != 3) // Status code must consist of three digits.
return lineLength;
for (int i = 0; i < 3; ++i)
if (statusCodeString[i] < '0' || statusCodeString[i] > '9') {
m_failureReason = "Invalid status code: " + statusCodeString;
return lineLength;
}
bool ok = false;
statusCode = statusCodeString.toInt(&ok);
ASSERT(ok);
statusText = String(space2 + 1, end - space2 - 3); // Exclude "\r\n".
return lineLength;
}
const char* WebSocketHandshake::readHTTPHeaders(const char* start, const char* end)
{
m_response.clearHeaderFields();
Vector<char> name;
Vector<char> value;
for (const char* p = start; p < end; p++) {
name.clear();
value.clear();
for (; p < end; p++) {
switch (*p) {
case '\r':
if (name.isEmpty()) {
if (p + 1 < end && *(p + 1) == '\n')
return p + 2;
m_failureReason = "CR doesn't follow LF at " + trimConsoleMessage(p, end - p);
return 0;
}
m_failureReason = "Unexpected CR in name at " + trimConsoleMessage(name.data(), name.size());
return 0;
case '\n':
m_failureReason = "Unexpected LF in name at " + trimConsoleMessage(name.data(), name.size());
return 0;
case ':':
break;
default:
name.append(*p);
continue;
}
if (*p == ':') {
++p;
break;
}
}
for (; p < end && *p == 0x20; p++) { }
for (; p < end; p++) {
switch (*p) {
case '\r':
break;
case '\n':
m_failureReason = "Unexpected LF in value at " + trimConsoleMessage(value.data(), value.size());
return 0;
default:
value.append(*p);
}
if (*p == '\r') {
++p;
break;
}
}
if (p >= end || *p != '\n') {
m_failureReason = "CR doesn't follow LF after value at " + trimConsoleMessage(p, end - p);
return 0;
}
AtomicString nameStr = AtomicString::fromUTF8(name.data(), name.size());
String valueStr = String::fromUTF8(value.data(), value.size());
if (nameStr.isNull()) {
m_failureReason = "Invalid UTF-8 sequence in header name";
return 0;
}
if (valueStr.isNull()) {
m_failureReason = "Invalid UTF-8 sequence in header value";
return 0;
}
LOG(Network, "name=%s value=%s", nameStr.string().utf8().data(), valueStr.utf8().data());
m_response.addHeaderField(nameStr, valueStr);
}
ASSERT_NOT_REACHED();
return 0;
}
bool WebSocketHandshake::checkResponseHeaders()
{
const String& serverWebSocketLocation = this->serverWebSocketLocation();
const String& serverWebSocketOrigin = this->serverWebSocketOrigin();
const String& serverWebSocketProtocol = this->serverWebSocketProtocol();
const String& serverUpgrade = this->serverUpgrade();
const String& serverConnection = this->serverConnection();
const String& serverWebSocketAccept = this->serverWebSocketAccept();
const String& serverWebSocketExtensions = this->serverWebSocketExtensions();
if (serverUpgrade.isNull()) {
m_failureReason = "Error during WebSocket handshake: 'Upgrade' header is missing";
return false;
}
if (serverConnection.isNull()) {
m_failureReason = "Error during WebSocket handshake: 'Connection' header is missing";
return false;
}
if (m_useHixie76Protocol) {
if (serverWebSocketOrigin.isNull()) {
m_failureReason = "Error during WebSocket handshake: 'Sec-WebSocket-Origin' header is missing";
return false;
}
if (serverWebSocketLocation.isNull()) {
m_failureReason = "Error during WebSocket handshake: 'Sec-WebSocket-Location' header is missing";
return false;
}
} else {
if (serverWebSocketAccept.isNull()) {
m_failureReason = "Error during WebSocket handshake: 'Sec-WebSocket-Accept' header is missing";
return false;
}
}
if (!equalIgnoringCase(serverUpgrade, "websocket")) {
m_failureReason = "Error during WebSocket handshake: 'Upgrade' header value is not 'WebSocket'";
return false;
}
if (!equalIgnoringCase(serverConnection, "upgrade")) {
m_failureReason = "Error during WebSocket handshake: 'Connection' header value is not 'Upgrade'";
return false;
}
if (m_useHixie76Protocol) {
if (clientOrigin() != serverWebSocketOrigin) {
m_failureReason = "Error during WebSocket handshake: origin mismatch: " + clientOrigin() + " != " + serverWebSocketOrigin;
return false;
}
if (clientLocation() != serverWebSocketLocation) {
m_failureReason = "Error during WebSocket handshake: location mismatch: " + clientLocation() + " != " + serverWebSocketLocation;
return false;
}
if (!m_clientProtocol.isEmpty() && m_clientProtocol != serverWebSocketProtocol) {
m_failureReason = "Error during WebSocket handshake: protocol mismatch: " + m_clientProtocol + " != " + serverWebSocketProtocol;
return false;
}
} else {
if (serverWebSocketAccept != m_expectedAccept) {
m_failureReason = "Error during WebSocket handshake: Sec-WebSocket-Accept mismatch";
return false;
}
if (!serverWebSocketExtensions.isNull()) {
// WebSocket protocol extensions are not supported yet.
// We do not send Sec-WebSocket-Extensions header in our request, thus
// servers should not return this header, either.
m_failureReason = "Error during WebSocket handshake: Sec-WebSocket-Extensions header is invalid";
return false;
}
}
return true;
}
} // namespace WebCore
#endif // ENABLE(WEB_SOCKETS)
| Treeeater/WebPermission | websockets/WebSocketHandshake.cpp | C++ | bsd-2-clause | 24,458 |
/*
* Copyright (C) 1997 Martin Jones (mjones@kde.org)
* (C) 1997 Torben Weis (weis@kde.org)
* (C) 1998 Waldo Bastian (bastian@kde.org)
* (C) 1999 Lars Knoll (knoll@kde.org)
* (C) 1999 Antti Koivisto (koivisto@kde.org)
* Copyright (C) 2003, 2004, 2005, 2006, 2008, 2009, 2010 Apple Inc. All rights reserved.
* Copyright (C) 2006 Alexey Proskuryakov (ap@nypop.com)
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "RenderTableSection.h"
#include "CachedImage.h"
#include "Document.h"
#include "HitTestResult.h"
#include "HTMLNames.h"
#include "PaintInfo.h"
#include "RenderTableCell.h"
#include "RenderTableCol.h"
#include "RenderTableRow.h"
#include "RenderView.h"
#include <limits>
#include <wtf/HashSet.h>
#include <wtf/Vector.h>
using namespace std;
namespace WebCore {
using namespace HTMLNames;
// Those 2 variables are used to balance the memory consumption vs the repaint time on big tables.
static unsigned gMinTableSizeToUseFastPaintPathWithOverflowingCell = 75 * 75;
static float gMaxAllowedOverflowingCellRatioForFastPaintPath = 0.1f;
static inline void setRowLogicalHeightToRowStyleLogicalHeightIfNotRelative(RenderTableSection::RowStruct* row)
{
ASSERT(row && row->rowRenderer);
row->logicalHeight = row->rowRenderer->style()->logicalHeight();
if (row->logicalHeight.isRelative())
row->logicalHeight = Length();
}
RenderTableSection::RenderTableSection(Node* node)
: RenderBox(node)
, m_gridRows(0)
, m_cCol(0)
, m_cRow(-1)
, m_outerBorderStart(0)
, m_outerBorderEnd(0)
, m_outerBorderBefore(0)
, m_outerBorderAfter(0)
, m_needsCellRecalc(false)
, m_hasMultipleCellLevels(false)
{
// init RenderObject attributes
setInline(false); // our object is not Inline
}
RenderTableSection::~RenderTableSection()
{
clearGrid();
}
void RenderTableSection::styleDidChange(StyleDifference diff, const RenderStyle* oldStyle)
{
RenderBox::styleDidChange(diff, oldStyle);
propagateStyleToAnonymousChildren();
}
void RenderTableSection::willBeDestroyed()
{
RenderTable* recalcTable = table();
RenderBox::willBeDestroyed();
// recalc cell info because RenderTable has unguarded pointers
// stored that point to this RenderTableSection.
if (recalcTable)
recalcTable->setNeedsSectionRecalc();
}
void RenderTableSection::addChild(RenderObject* child, RenderObject* beforeChild)
{
// Make sure we don't append things after :after-generated content if we have it.
if (!beforeChild) {
if (RenderObject* afterContentRenderer = findAfterContentRenderer())
beforeChild = anonymousContainer(afterContentRenderer);
}
if (!child->isTableRow()) {
RenderObject* last = beforeChild;
if (!last)
last = lastChild();
if (last && last->isAnonymous() && !last->isBeforeOrAfterContent()) {
if (beforeChild == last)
beforeChild = last->firstChild();
last->addChild(child, beforeChild);
return;
}
// If beforeChild is inside an anonymous cell/row, insert into the cell or into
// the anonymous row containing it, if there is one.
RenderObject* lastBox = last;
while (lastBox && lastBox->parent()->isAnonymous() && !lastBox->isTableRow())
lastBox = lastBox->parent();
if (lastBox && lastBox->isAnonymous() && !lastBox->isBeforeOrAfterContent()) {
lastBox->addChild(child, beforeChild);
return;
}
RenderObject* row = new (renderArena()) RenderTableRow(document() /* anonymous table row */);
RefPtr<RenderStyle> newStyle = RenderStyle::create();
newStyle->inheritFrom(style());
newStyle->setDisplay(TABLE_ROW);
row->setStyle(newStyle.release());
addChild(row, beforeChild);
row->addChild(child);
return;
}
if (beforeChild)
setNeedsCellRecalc();
++m_cRow;
m_cCol = 0;
// make sure we have enough rows
if (!ensureRows(m_cRow + 1))
return;
m_grid[m_cRow].rowRenderer = toRenderTableRow(child);
if (!beforeChild)
setRowLogicalHeightToRowStyleLogicalHeightIfNotRelative(&m_grid[m_cRow]);
// If the next renderer is actually wrapped in an anonymous table row, we need to go up and find that.
while (beforeChild && beforeChild->parent() != this)
beforeChild = beforeChild->parent();
ASSERT(!beforeChild || beforeChild->isTableRow());
RenderBox::addChild(child, beforeChild);
toRenderTableRow(child)->updateBeforeAndAfterContent();
}
void RenderTableSection::removeChild(RenderObject* oldChild)
{
setNeedsCellRecalc();
RenderBox::removeChild(oldChild);
}
bool RenderTableSection::ensureRows(int numRows)
{
int nRows = m_gridRows;
if (numRows > nRows) {
if (numRows > static_cast<int>(m_grid.size())) {
size_t maxSize = numeric_limits<size_t>::max() / sizeof(RowStruct);
if (static_cast<size_t>(numRows) > maxSize)
return false;
m_grid.grow(numRows);
}
m_gridRows = numRows;
int nCols = max(1, table()->numEffCols());
for (int r = nRows; r < numRows; r++) {
m_grid[r].row = new Row(nCols);
m_grid[r].rowRenderer = 0;
m_grid[r].baseline = 0;
m_grid[r].logicalHeight = Length();
}
}
return true;
}
void RenderTableSection::addCell(RenderTableCell* cell, RenderTableRow* row)
{
int rSpan = cell->rowSpan();
int cSpan = cell->colSpan();
Vector<RenderTable::ColumnStruct>& columns = table()->columns();
int nCols = columns.size();
// ### mozilla still seems to do the old HTML way, even for strict DTD
// (see the annotation on table cell layouting in the CSS specs and the testcase below:
// <TABLE border>
// <TR><TD>1 <TD rowspan="2">2 <TD>3 <TD>4
// <TR><TD colspan="2">5
// </TABLE>
while (m_cCol < nCols && (cellAt(m_cRow, m_cCol).hasCells() || cellAt(m_cRow, m_cCol).inColSpan))
m_cCol++;
if (rSpan == 1) {
// we ignore height settings on rowspan cells
Length logicalHeight = cell->style()->logicalHeight();
if (logicalHeight.isPositive() || (logicalHeight.isRelative() && logicalHeight.value() >= 0)) {
Length cRowLogicalHeight = m_grid[m_cRow].logicalHeight;
switch (logicalHeight.type()) {
case Percent:
if (!(cRowLogicalHeight.isPercent()) ||
(cRowLogicalHeight.isPercent() && cRowLogicalHeight.percent() < logicalHeight.percent()))
m_grid[m_cRow].logicalHeight = logicalHeight;
break;
case Fixed:
if (cRowLogicalHeight.type() < Percent ||
(cRowLogicalHeight.isFixed() && cRowLogicalHeight.value() < logicalHeight.value()))
m_grid[m_cRow].logicalHeight = logicalHeight;
break;
case Relative:
default:
break;
}
}
}
// make sure we have enough rows
if (!ensureRows(m_cRow + rSpan))
return;
m_grid[m_cRow].rowRenderer = row;
int col = m_cCol;
// tell the cell where it is
bool inColSpan = false;
while (cSpan) {
int currentSpan;
if (m_cCol >= nCols) {
table()->appendColumn(cSpan);
currentSpan = cSpan;
} else {
if (cSpan < (int)columns[m_cCol].span)
table()->splitColumn(m_cCol, cSpan);
currentSpan = columns[m_cCol].span;
}
for (int r = 0; r < rSpan; r++) {
CellStruct& c = cellAt(m_cRow + r, m_cCol);
ASSERT(cell);
c.cells.append(cell);
// If cells overlap then we take the slow path for painting.
if (c.cells.size() > 1)
m_hasMultipleCellLevels = true;
if (inColSpan)
c.inColSpan = true;
}
m_cCol++;
cSpan -= currentSpan;
inColSpan = true;
}
cell->setRow(m_cRow);
cell->setCol(table()->effColToCol(col));
}
void RenderTableSection::setCellLogicalWidths()
{
Vector<LayoutUnit>& columnPos = table()->columnPositions();
LayoutStateMaintainer statePusher(view());
for (int i = 0; i < m_gridRows; i++) {
Row& row = *m_grid[i].row;
int cols = row.size();
for (int j = 0; j < cols; j++) {
CellStruct& current = row[j];
RenderTableCell* cell = current.primaryCell();
if (!cell || current.inColSpan)
continue;
int endCol = j;
int cspan = cell->colSpan();
while (cspan && endCol < cols) {
ASSERT(endCol < (int)table()->columns().size());
cspan -= table()->columns()[endCol].span;
endCol++;
}
int w = columnPos[endCol] - columnPos[j] - table()->hBorderSpacing();
int oldLogicalWidth = cell->logicalWidth();
if (w != oldLogicalWidth) {
cell->setNeedsLayout(true);
if (!table()->selfNeedsLayout() && cell->checkForRepaintDuringLayout()) {
if (!statePusher.didPush()) {
// Technically, we should also push state for the row, but since
// rows don't push a coordinate transform, that's not necessary.
statePusher.push(this, IntSize(x(), y()));
}
cell->repaint();
}
cell->updateLogicalWidth(w);
}
}
}
statePusher.pop(); // only pops if we pushed
}
LayoutUnit RenderTableSection::calcRowLogicalHeight()
{
#ifndef NDEBUG
setNeedsLayoutIsForbidden(true);
#endif
ASSERT(!needsLayout());
RenderTableCell* cell;
LayoutUnit spacing = table()->vBorderSpacing();
LayoutStateMaintainer statePusher(view());
m_rowPos.resize(m_gridRows + 1);
m_rowPos[0] = spacing;
for (int r = 0; r < m_gridRows; r++) {
m_rowPos[r + 1] = 0;
m_grid[r].baseline = 0;
LayoutUnit baseline = 0;
LayoutUnit bdesc = 0;
LayoutUnit ch = m_grid[r].logicalHeight.calcMinValue(0);
LayoutUnit pos = m_rowPos[r] + ch + (m_grid[r].rowRenderer ? spacing : 0);
m_rowPos[r + 1] = max(m_rowPos[r + 1], pos);
Row* row = m_grid[r].row;
int totalCols = row->size();
for (int c = 0; c < totalCols; c++) {
CellStruct& current = cellAt(r, c);
cell = current.primaryCell();
if (!cell || current.inColSpan)
continue;
if ((cell->row() + cell->rowSpan() - 1) > r)
continue;
int indx = max(r - cell->rowSpan() + 1, 0);
if (cell->hasOverrideHeight()) {
if (!statePusher.didPush()) {
// Technically, we should also push state for the row, but since
// rows don't push a coordinate transform, that's not necessary.
statePusher.push(this, locationOffset());
}
cell->clearIntrinsicPadding();
cell->clearOverrideSize();
cell->setChildNeedsLayout(true, false);
cell->layoutIfNeeded();
}
LayoutUnit adjustedPaddingBefore = cell->paddingBefore() - cell->intrinsicPaddingBefore();
LayoutUnit adjustedPaddingAfter = cell->paddingAfter() - cell->intrinsicPaddingAfter();
LayoutUnit adjustedLogicalHeight = cell->logicalHeight() - (cell->intrinsicPaddingBefore() + cell->intrinsicPaddingAfter());
// Explicit heights use the border box in quirks mode. In strict mode do the right
// thing and actually add in the border and padding.
ch = cell->style()->logicalHeight().calcValue(0) +
(document()->inQuirksMode() ? 0 : (adjustedPaddingBefore + adjustedPaddingAfter +
cell->borderBefore() + cell->borderAfter()));
ch = max(ch, adjustedLogicalHeight);
pos = m_rowPos[indx] + ch + (m_grid[r].rowRenderer ? spacing : 0);
m_rowPos[r + 1] = max(m_rowPos[r + 1], pos);
// find out the baseline
EVerticalAlign va = cell->style()->verticalAlign();
if (va == BASELINE || va == TEXT_BOTTOM || va == TEXT_TOP || va == SUPER || va == SUB) {
LayoutUnit b = cell->cellBaselinePosition();
if (b > cell->borderBefore() + cell->paddingBefore()) {
baseline = max(baseline, b - cell->intrinsicPaddingBefore());
bdesc = max(bdesc, m_rowPos[indx] + ch - (b - cell->intrinsicPaddingBefore()));
}
}
}
// do we have baseline aligned elements?
if (baseline) {
// increase rowheight if baseline requires
m_rowPos[r + 1] = max(m_rowPos[r + 1], baseline + bdesc + (m_grid[r].rowRenderer ? spacing : 0));
m_grid[r].baseline = baseline;
}
m_rowPos[r + 1] = max(m_rowPos[r + 1], m_rowPos[r]);
}
#ifndef NDEBUG
setNeedsLayoutIsForbidden(false);
#endif
ASSERT(!needsLayout());
statePusher.pop();
return m_rowPos[m_gridRows];
}
void RenderTableSection::layout()
{
ASSERT(needsLayout());
LayoutStateMaintainer statePusher(view(), this, locationOffset(), style()->isFlippedBlocksWritingMode());
for (RenderObject* child = children()->firstChild(); child; child = child->nextSibling()) {
if (child->isTableRow()) {
child->layoutIfNeeded();
ASSERT(!child->needsLayout());
}
}
statePusher.pop();
setNeedsLayout(false);
}
LayoutUnit RenderTableSection::layoutRows(LayoutUnit toAdd)
{
#ifndef NDEBUG
setNeedsLayoutIsForbidden(true);
#endif
ASSERT(!needsLayout());
LayoutUnit rHeight;
int rindx;
int totalRows = m_gridRows;
// Set the width of our section now. The rows will also be this width.
setLogicalWidth(table()->contentLogicalWidth());
m_overflow.clear();
m_overflowingCells.clear();
m_forceSlowPaintPathWithOverflowingCell = false;
if (toAdd && totalRows && (m_rowPos[totalRows] || !nextSibling())) {
LayoutUnit totalHeight = m_rowPos[totalRows] + toAdd;
LayoutUnit dh = toAdd;
int totalPercent = 0;
int numAuto = 0;
for (int r = 0; r < totalRows; r++) {
if (m_grid[r].logicalHeight.isAuto())
numAuto++;
else if (m_grid[r].logicalHeight.isPercent())
totalPercent += m_grid[r].logicalHeight.percent();
}
if (totalPercent) {
// try to satisfy percent
LayoutUnit add = 0;
totalPercent = min(totalPercent, 100);
int rh = m_rowPos[1] - m_rowPos[0];
for (int r = 0; r < totalRows; r++) {
if (totalPercent > 0 && m_grid[r].logicalHeight.isPercent()) {
LayoutUnit toAdd = min(dh, static_cast<LayoutUnit>((totalHeight * m_grid[r].logicalHeight.percent() / 100) - rh));
// If toAdd is negative, then we don't want to shrink the row (this bug
// affected Outlook Web Access).
toAdd = max<LayoutUnit>(0, toAdd);
add += toAdd;
dh -= toAdd;
totalPercent -= m_grid[r].logicalHeight.percent();
}
if (r < totalRows - 1)
rh = m_rowPos[r + 2] - m_rowPos[r + 1];
m_rowPos[r + 1] += add;
}
}
if (numAuto) {
// distribute over variable cols
LayoutUnit add = 0;
for (int r = 0; r < totalRows; r++) {
if (numAuto > 0 && m_grid[r].logicalHeight.isAuto()) {
LayoutUnit toAdd = dh / numAuto;
add += toAdd;
dh -= toAdd;
numAuto--;
}
m_rowPos[r + 1] += add;
}
}
if (dh > 0 && m_rowPos[totalRows]) {
// if some left overs, distribute equally.
LayoutUnit tot = m_rowPos[totalRows];
LayoutUnit add = 0;
LayoutUnit prev = m_rowPos[0];
for (int r = 0; r < totalRows; r++) {
// weight with the original height
add += dh * (m_rowPos[r + 1] - prev) / tot;
prev = m_rowPos[r + 1];
m_rowPos[r + 1] += add;
}
}
}
LayoutUnit hspacing = table()->hBorderSpacing();
LayoutUnit vspacing = table()->vBorderSpacing();
LayoutUnit nEffCols = table()->numEffCols();
LayoutStateMaintainer statePusher(view(), this, LayoutSize(x(), y()), style()->isFlippedBlocksWritingMode());
for (int r = 0; r < totalRows; r++) {
// Set the row's x/y position and width/height.
if (RenderTableRow* rowRenderer = m_grid[r].rowRenderer) {
rowRenderer->setLocation(LayoutPoint(0, m_rowPos[r]));
rowRenderer->setLogicalWidth(logicalWidth());
rowRenderer->setLogicalHeight(m_rowPos[r + 1] - m_rowPos[r] - vspacing);
rowRenderer->updateLayerTransform();
}
for (int c = 0; c < nEffCols; c++) {
CellStruct& cs = cellAt(r, c);
RenderTableCell* cell = cs.primaryCell();
if (!cell || cs.inColSpan)
continue;
rindx = cell->row();
rHeight = m_rowPos[rindx + cell->rowSpan()] - m_rowPos[rindx] - vspacing;
// Force percent height children to lay themselves out again.
// This will cause these children to grow to fill the cell.
// FIXME: There is still more work to do here to fully match WinIE (should
// it become necessary to do so). In quirks mode, WinIE behaves like we
// do, but it will clip the cells that spill out of the table section. In
// strict mode, Mozilla and WinIE both regrow the table to accommodate the
// new height of the cell (thus letting the percentages cause growth one
// time only). We may also not be handling row-spanning cells correctly.
//
// Note also the oddity where replaced elements always flex, and yet blocks/tables do
// not necessarily flex. WinIE is crazy and inconsistent, and we can't hope to
// match the behavior perfectly, but we'll continue to refine it as we discover new
// bugs. :)
bool cellChildrenFlex = false;
bool flexAllChildren = cell->style()->logicalHeight().isFixed()
|| (!table()->style()->logicalHeight().isAuto() && rHeight != cell->logicalHeight());
for (RenderObject* o = cell->firstChild(); o; o = o->nextSibling()) {
if (!o->isText() && o->style()->logicalHeight().isPercent() && (flexAllChildren || o->isReplaced() || (o->isBox() && toRenderBox(o)->scrollsOverflow()))) {
// Tables with no sections do not flex.
if (!o->isTable() || toRenderTable(o)->hasSections()) {
o->setNeedsLayout(true, false);
cellChildrenFlex = true;
}
}
}
if (HashSet<RenderBox*>* percentHeightDescendants = cell->percentHeightDescendants()) {
HashSet<RenderBox*>::iterator end = percentHeightDescendants->end();
for (HashSet<RenderBox*>::iterator it = percentHeightDescendants->begin(); it != end; ++it) {
RenderBox* box = *it;
if (!box->isReplaced() && !box->scrollsOverflow() && !flexAllChildren)
continue;
while (box != cell) {
if (box->normalChildNeedsLayout())
break;
box->setChildNeedsLayout(true, false);
box = box->containingBlock();
ASSERT(box);
if (!box)
break;
}
cellChildrenFlex = true;
}
}
if (cellChildrenFlex) {
cell->setChildNeedsLayout(true, false);
// Alignment within a cell is based off the calculated
// height, which becomes irrelevant once the cell has
// been resized based off its percentage.
cell->setOverrideHeightFromRowHeight(rHeight);
cell->layoutIfNeeded();
// If the baseline moved, we may have to update the data for our row. Find out the new baseline.
EVerticalAlign va = cell->style()->verticalAlign();
if (va == BASELINE || va == TEXT_BOTTOM || va == TEXT_TOP || va == SUPER || va == SUB) {
LayoutUnit baseline = cell->cellBaselinePosition();
if (baseline > cell->borderBefore() + cell->paddingBefore())
m_grid[r].baseline = max(m_grid[r].baseline, baseline);
}
}
LayoutUnit oldIntrinsicPaddingBefore = cell->intrinsicPaddingBefore();
LayoutUnit oldIntrinsicPaddingAfter = cell->intrinsicPaddingAfter();
LayoutUnit logicalHeightWithoutIntrinsicPadding = cell->logicalHeight() - oldIntrinsicPaddingBefore - oldIntrinsicPaddingAfter;
LayoutUnit intrinsicPaddingBefore = 0;
switch (cell->style()->verticalAlign()) {
case SUB:
case SUPER:
case TEXT_TOP:
case TEXT_BOTTOM:
case BASELINE: {
LayoutUnit b = cell->cellBaselinePosition();
if (b > cell->borderBefore() + cell->paddingBefore())
intrinsicPaddingBefore = getBaseline(r) - (b - oldIntrinsicPaddingBefore);
break;
}
case TOP:
break;
case MIDDLE:
intrinsicPaddingBefore = (rHeight - logicalHeightWithoutIntrinsicPadding) / 2;
break;
case BOTTOM:
intrinsicPaddingBefore = rHeight - logicalHeightWithoutIntrinsicPadding;
break;
default:
break;
}
LayoutUnit intrinsicPaddingAfter = rHeight - logicalHeightWithoutIntrinsicPadding - intrinsicPaddingBefore;
cell->setIntrinsicPaddingBefore(intrinsicPaddingBefore);
cell->setIntrinsicPaddingAfter(intrinsicPaddingAfter);
LayoutRect oldCellRect(cell->x(), cell->y() , cell->width(), cell->height());
LayoutPoint cellLocation(0, m_rowPos[rindx]);
if (!style()->isLeftToRightDirection())
cellLocation.setX(table()->columnPositions()[nEffCols] - table()->columnPositions()[table()->colToEffCol(cell->col() + cell->colSpan())] + hspacing);
else
cellLocation.setX(table()->columnPositions()[c] + hspacing);
cell->setLogicalLocation(cellLocation);
view()->addLayoutDelta(oldCellRect.location() - cell->location());
if (intrinsicPaddingBefore != oldIntrinsicPaddingBefore || intrinsicPaddingAfter != oldIntrinsicPaddingAfter)
cell->setNeedsLayout(true, false);
if (!cell->needsLayout() && view()->layoutState()->pageLogicalHeight() && view()->layoutState()->pageLogicalOffset(cell->logicalTop()) != cell->pageLogicalOffset())
cell->setChildNeedsLayout(true, false);
cell->layoutIfNeeded();
// FIXME: Make pagination work with vertical tables.
if (style()->isHorizontalWritingMode() && view()->layoutState()->pageLogicalHeight() && cell->height() != rHeight)
cell->setHeight(rHeight); // FIXME: Pagination might have made us change size. For now just shrink or grow the cell to fit without doing a relayout.
LayoutSize childOffset(cell->location() - oldCellRect.location());
if (childOffset.width() || childOffset.height()) {
view()->addLayoutDelta(childOffset);
// If the child moved, we have to repaint it as well as any floating/positioned
// descendants. An exception is if we need a layout. In this case, we know we're going to
// repaint ourselves (and the child) anyway.
if (!table()->selfNeedsLayout() && cell->checkForRepaintDuringLayout())
cell->repaintDuringLayoutIfMoved(oldCellRect);
}
}
}
#ifndef NDEBUG
setNeedsLayoutIsForbidden(false);
#endif
ASSERT(!needsLayout());
setLogicalHeight(m_rowPos[totalRows]);
unsigned totalCellsCount = nEffCols * totalRows;
int maxAllowedOverflowingCellsCount = totalCellsCount < gMinTableSizeToUseFastPaintPathWithOverflowingCell ? 0 : gMaxAllowedOverflowingCellRatioForFastPaintPath * totalCellsCount;
#ifndef NDEBUG
bool hasOverflowingCell = false;
#endif
// Now that our height has been determined, add in overflow from cells.
for (int r = 0; r < totalRows; r++) {
for (int c = 0; c < nEffCols; c++) {
CellStruct& cs = cellAt(r, c);
RenderTableCell* cell = cs.primaryCell();
if (!cell || cs.inColSpan)
continue;
if (r < totalRows - 1 && cell == primaryCellAt(r + 1, c))
continue;
addOverflowFromChild(cell);
#ifndef NDEBUG
hasOverflowingCell |= cell->hasVisualOverflow();
#endif
if (cell->hasVisualOverflow() && !m_forceSlowPaintPathWithOverflowingCell) {
m_overflowingCells.add(cell);
if (m_overflowingCells.size() > maxAllowedOverflowingCellsCount) {
// We need to set m_forcesSlowPaintPath only if there is a least one overflowing cells as the hit testing code rely on this information.
m_forceSlowPaintPathWithOverflowingCell = true;
// The slow path does not make any use of the overflowing cells info, don't hold on to the memory.
m_overflowingCells.clear();
}
}
}
}
ASSERT(hasOverflowingCell == this->hasOverflowingCell());
statePusher.pop();
return height();
}
LayoutUnit RenderTableSection::calcOuterBorderBefore() const
{
int totalCols = table()->numEffCols();
if (!m_gridRows || !totalCols)
return 0;
unsigned borderWidth = 0;
const BorderValue& sb = style()->borderBefore();
if (sb.style() == BHIDDEN)
return -1;
if (sb.style() > BHIDDEN)
borderWidth = sb.width();
const BorderValue& rb = firstChild()->style()->borderBefore();
if (rb.style() == BHIDDEN)
return -1;
if (rb.style() > BHIDDEN && rb.width() > borderWidth)
borderWidth = rb.width();
bool allHidden = true;
for (int c = 0; c < totalCols; c++) {
const CellStruct& current = cellAt(0, c);
if (current.inColSpan || !current.hasCells())
continue;
const BorderValue& cb = current.primaryCell()->style()->borderBefore(); // FIXME: Make this work with perpendicular and flipped cells.
// FIXME: Don't repeat for the same col group
RenderTableCol* colGroup = table()->colElement(c);
if (colGroup) {
const BorderValue& gb = colGroup->style()->borderBefore();
if (gb.style() == BHIDDEN || cb.style() == BHIDDEN)
continue;
allHidden = false;
if (gb.style() > BHIDDEN && gb.width() > borderWidth)
borderWidth = gb.width();
if (cb.style() > BHIDDEN && cb.width() > borderWidth)
borderWidth = cb.width();
} else {
if (cb.style() == BHIDDEN)
continue;
allHidden = false;
if (cb.style() > BHIDDEN && cb.width() > borderWidth)
borderWidth = cb.width();
}
}
if (allHidden)
return -1;
return borderWidth / 2;
}
LayoutUnit RenderTableSection::calcOuterBorderAfter() const
{
int totalCols = table()->numEffCols();
if (!m_gridRows || !totalCols)
return 0;
unsigned borderWidth = 0;
const BorderValue& sb = style()->borderAfter();
if (sb.style() == BHIDDEN)
return -1;
if (sb.style() > BHIDDEN)
borderWidth = sb.width();
const BorderValue& rb = lastChild()->style()->borderAfter();
if (rb.style() == BHIDDEN)
return -1;
if (rb.style() > BHIDDEN && rb.width() > borderWidth)
borderWidth = rb.width();
bool allHidden = true;
for (int c = 0; c < totalCols; c++) {
const CellStruct& current = cellAt(m_gridRows - 1, c);
if (current.inColSpan || !current.hasCells())
continue;
const BorderValue& cb = current.primaryCell()->style()->borderAfter(); // FIXME: Make this work with perpendicular and flipped cells.
// FIXME: Don't repeat for the same col group
RenderTableCol* colGroup = table()->colElement(c);
if (colGroup) {
const BorderValue& gb = colGroup->style()->borderAfter();
if (gb.style() == BHIDDEN || cb.style() == BHIDDEN)
continue;
allHidden = false;
if (gb.style() > BHIDDEN && gb.width() > borderWidth)
borderWidth = gb.width();
if (cb.style() > BHIDDEN && cb.width() > borderWidth)
borderWidth = cb.width();
} else {
if (cb.style() == BHIDDEN)
continue;
allHidden = false;
if (cb.style() > BHIDDEN && cb.width() > borderWidth)
borderWidth = cb.width();
}
}
if (allHidden)
return -1;
return (borderWidth + 1) / 2;
}
LayoutUnit RenderTableSection::calcOuterBorderStart() const
{
int totalCols = table()->numEffCols();
if (!m_gridRows || !totalCols)
return 0;
unsigned borderWidth = 0;
const BorderValue& sb = style()->borderStart();
if (sb.style() == BHIDDEN)
return -1;
if (sb.style() > BHIDDEN)
borderWidth = sb.width();
if (RenderTableCol* colGroup = table()->colElement(0)) {
const BorderValue& gb = colGroup->style()->borderStart();
if (gb.style() == BHIDDEN)
return -1;
if (gb.style() > BHIDDEN && gb.width() > borderWidth)
borderWidth = gb.width();
}
bool allHidden = true;
for (int r = 0; r < m_gridRows; r++) {
const CellStruct& current = cellAt(r, 0);
if (!current.hasCells())
continue;
// FIXME: Don't repeat for the same cell
const BorderValue& cb = current.primaryCell()->style()->borderStart(); // FIXME: Make this work with perpendicular and flipped cells.
const BorderValue& rb = current.primaryCell()->parent()->style()->borderStart();
if (cb.style() == BHIDDEN || rb.style() == BHIDDEN)
continue;
allHidden = false;
if (cb.style() > BHIDDEN && cb.width() > borderWidth)
borderWidth = cb.width();
if (rb.style() > BHIDDEN && rb.width() > borderWidth)
borderWidth = rb.width();
}
if (allHidden)
return -1;
return (borderWidth + (table()->style()->isLeftToRightDirection() ? 0 : 1)) / 2;
}
LayoutUnit RenderTableSection::calcOuterBorderEnd() const
{
int totalCols = table()->numEffCols();
if (!m_gridRows || !totalCols)
return 0;
unsigned borderWidth = 0;
const BorderValue& sb = style()->borderEnd();
if (sb.style() == BHIDDEN)
return -1;
if (sb.style() > BHIDDEN)
borderWidth = sb.width();
if (RenderTableCol* colGroup = table()->colElement(totalCols - 1)) {
const BorderValue& gb = colGroup->style()->borderEnd();
if (gb.style() == BHIDDEN)
return -1;
if (gb.style() > BHIDDEN && gb.width() > borderWidth)
borderWidth = gb.width();
}
bool allHidden = true;
for (int r = 0; r < m_gridRows; r++) {
const CellStruct& current = cellAt(r, totalCols - 1);
if (!current.hasCells())
continue;
// FIXME: Don't repeat for the same cell
const BorderValue& cb = current.primaryCell()->style()->borderEnd(); // FIXME: Make this work with perpendicular and flipped cells.
const BorderValue& rb = current.primaryCell()->parent()->style()->borderEnd();
if (cb.style() == BHIDDEN || rb.style() == BHIDDEN)
continue;
allHidden = false;
if (cb.style() > BHIDDEN && cb.width() > borderWidth)
borderWidth = cb.width();
if (rb.style() > BHIDDEN && rb.width() > borderWidth)
borderWidth = rb.width();
}
if (allHidden)
return -1;
return (borderWidth + (table()->style()->isLeftToRightDirection() ? 1 : 0)) / 2;
}
void RenderTableSection::recalcOuterBorder()
{
m_outerBorderBefore = calcOuterBorderBefore();
m_outerBorderAfter = calcOuterBorderAfter();
m_outerBorderStart = calcOuterBorderStart();
m_outerBorderEnd = calcOuterBorderEnd();
}
LayoutUnit RenderTableSection::firstLineBoxBaseline() const
{
if (!m_gridRows)
return -1;
LayoutUnit firstLineBaseline = m_grid[0].baseline;
if (firstLineBaseline)
return firstLineBaseline + m_rowPos[0];
firstLineBaseline = -1;
Row* firstRow = m_grid[0].row;
for (size_t i = 0; i < firstRow->size(); ++i) {
CellStruct& cs = firstRow->at(i);
RenderTableCell* cell = cs.primaryCell();
if (cell)
firstLineBaseline = max(firstLineBaseline, cell->logicalTop() + cell->paddingBefore() + cell->borderBefore() + cell->contentLogicalHeight());
}
return firstLineBaseline;
}
void RenderTableSection::paint(PaintInfo& paintInfo, const LayoutPoint& paintOffset)
{
// put this back in when all layout tests can handle it
// ASSERT(!needsLayout());
// avoid crashing on bugs that cause us to paint with dirty layout
if (needsLayout())
return;
unsigned totalRows = m_gridRows;
unsigned totalCols = table()->columns().size();
if (!totalRows || !totalCols)
return;
LayoutPoint adjustedPaintOffset = paintOffset + location();
PaintPhase phase = paintInfo.phase;
bool pushedClip = pushContentsClip(paintInfo, adjustedPaintOffset);
paintObject(paintInfo, adjustedPaintOffset);
if (pushedClip)
popContentsClip(paintInfo, phase, adjustedPaintOffset);
}
static inline bool compareCellPositions(RenderTableCell* elem1, RenderTableCell* elem2)
{
return elem1->row() < elem2->row();
}
// This comparison is used only when we have overflowing cells as we have an unsorted array to sort. We thus need
// to sort both on rows and columns to properly repaint.
static inline bool compareCellPositionsWithOverflowingCells(RenderTableCell* elem1, RenderTableCell* elem2)
{
if (elem1->row() != elem2->row())
return elem1->row() < elem2->row();
return elem1->col() < elem2->col();
}
void RenderTableSection::paintCell(RenderTableCell* cell, PaintInfo& paintInfo, const LayoutPoint& paintOffset)
{
LayoutPoint cellPoint = flipForWritingMode(cell, paintOffset, ParentToChildFlippingAdjustment);
PaintPhase paintPhase = paintInfo.phase;
RenderTableRow* row = toRenderTableRow(cell->parent());
if (paintPhase == PaintPhaseBlockBackground || paintPhase == PaintPhaseChildBlockBackground) {
// We need to handle painting a stack of backgrounds. This stack (from bottom to top) consists of
// the column group, column, row group, row, and then the cell.
RenderObject* col = table()->colElement(cell->col());
RenderObject* colGroup = 0;
if (col && col->parent()->style()->display() == TABLE_COLUMN_GROUP)
colGroup = col->parent();
// Column groups and columns first.
// FIXME: Columns and column groups do not currently support opacity, and they are being painted "too late" in
// the stack, since we have already opened a transparency layer (potentially) for the table row group.
// Note that we deliberately ignore whether or not the cell has a layer, since these backgrounds paint "behind" the
// cell.
cell->paintBackgroundsBehindCell(paintInfo, cellPoint, colGroup);
cell->paintBackgroundsBehindCell(paintInfo, cellPoint, col);
// Paint the row group next.
cell->paintBackgroundsBehindCell(paintInfo, cellPoint, this);
// Paint the row next, but only if it doesn't have a layer. If a row has a layer, it will be responsible for
// painting the row background for the cell.
if (!row->hasSelfPaintingLayer())
cell->paintBackgroundsBehindCell(paintInfo, cellPoint, row);
}
if ((!cell->hasSelfPaintingLayer() && !row->hasSelfPaintingLayer()) || paintInfo.phase == PaintPhaseCollapsedTableBorders)
cell->paint(paintInfo, cellPoint);
}
void RenderTableSection::paintObject(PaintInfo& paintInfo, const LayoutPoint& paintOffset)
{
// Check which rows and cols are visible and only paint these.
unsigned totalRows = m_gridRows;
unsigned totalCols = table()->columns().size();
PaintPhase paintPhase = paintInfo.phase;
LayoutUnit os = 2 * maximalOutlineSize(paintPhase);
unsigned startrow = 0;
unsigned endrow = totalRows;
LayoutRect localRepaintRect = paintInfo.rect;
localRepaintRect.moveBy(-paintOffset);
if (style()->isFlippedBlocksWritingMode()) {
if (style()->isHorizontalWritingMode())
localRepaintRect.setY(height() - localRepaintRect.maxY());
else
localRepaintRect.setX(width() - localRepaintRect.maxX());
}
if (!m_forceSlowPaintPathWithOverflowingCell) {
LayoutUnit before = (style()->isHorizontalWritingMode() ? localRepaintRect.y() : localRepaintRect.x()) - os;
// binary search to find a row
startrow = std::lower_bound(m_rowPos.begin(), m_rowPos.end(), before) - m_rowPos.begin();
// The binary search above gives us the first row with
// a y position >= the top of the paint rect. Thus, the previous
// may need to be repainted as well.
if (startrow == m_rowPos.size() || (startrow > 0 && (m_rowPos[startrow] > before)))
--startrow;
LayoutUnit after = (style()->isHorizontalWritingMode() ? localRepaintRect.maxY() : localRepaintRect.maxX()) + os;
endrow = std::lower_bound(m_rowPos.begin(), m_rowPos.end(), after) - m_rowPos.begin();
if (endrow == m_rowPos.size())
--endrow;
if (!endrow && m_rowPos[0] - table()->outerBorderBefore() <= after)
++endrow;
}
unsigned startcol = 0;
unsigned endcol = totalCols;
// FIXME: Implement RTL.
if (!m_forceSlowPaintPathWithOverflowingCell && style()->isLeftToRightDirection()) {
LayoutUnit start = (style()->isHorizontalWritingMode() ? localRepaintRect.x() : localRepaintRect.y()) - os;
Vector<LayoutUnit>& columnPos = table()->columnPositions();
startcol = std::lower_bound(columnPos.begin(), columnPos.end(), start) - columnPos.begin();
if ((startcol == columnPos.size()) || (startcol > 0 && (columnPos[startcol] > start)))
--startcol;
LayoutUnit end = (style()->isHorizontalWritingMode() ? localRepaintRect.maxX() : localRepaintRect.maxY()) + os;
endcol = std::lower_bound(columnPos.begin(), columnPos.end(), end) - columnPos.begin();
if (endcol == columnPos.size())
--endcol;
if (!endcol && columnPos[0] - table()->outerBorderStart() <= end)
++endcol;
}
if (startcol < endcol) {
if (!m_hasMultipleCellLevels && !m_overflowingCells.size()) {
// Draw the dirty cells in the order that they appear.
for (unsigned r = startrow; r < endrow; r++) {
for (unsigned c = startcol; c < endcol; c++) {
CellStruct& current = cellAt(r, c);
RenderTableCell* cell = current.primaryCell();
if (!cell || (r > startrow && primaryCellAt(r - 1, c) == cell) || (c > startcol && primaryCellAt(r, c - 1) == cell))
continue;
paintCell(cell, paintInfo, paintOffset);
}
}
} else {
// The overflowing cells should be scarce to avoid adding a lot of cells to the HashSet.
ASSERT(m_overflowingCells.size() < totalRows * totalCols * gMaxAllowedOverflowingCellRatioForFastPaintPath);
// To make sure we properly repaint the section, we repaint all the overflowing cells that we collected.
Vector<RenderTableCell*> cells;
copyToVector(m_overflowingCells, cells);
HashSet<RenderTableCell*> spanningCells;
for (unsigned r = startrow; r < endrow; r++) {
for (unsigned c = startcol; c < endcol; c++) {
CellStruct& current = cellAt(r, c);
if (!current.hasCells())
continue;
for (unsigned i = 0; i < current.cells.size(); ++i) {
if (m_overflowingCells.contains(current.cells[i]))
continue;
if (current.cells[i]->rowSpan() > 1 || current.cells[i]->colSpan() > 1) {
if (spanningCells.contains(current.cells[i]))
continue;
spanningCells.add(current.cells[i]);
}
cells.append(current.cells[i]);
}
}
}
// Sort the dirty cells by paint order.
if (!m_overflowingCells.size())
std::stable_sort(cells.begin(), cells.end(), compareCellPositions);
else
std::sort(cells.begin(), cells.end(), compareCellPositionsWithOverflowingCells);
int size = cells.size();
// Paint the cells.
for (int i = 0; i < size; ++i)
paintCell(cells[i], paintInfo, paintOffset);
}
}
}
void RenderTableSection::imageChanged(WrappedImagePtr, const IntRect*)
{
// FIXME: Examine cells and repaint only the rect the image paints in.
repaint();
}
void RenderTableSection::recalcCells()
{
m_cCol = 0;
m_cRow = -1;
clearGrid();
m_gridRows = 0;
for (RenderObject* row = firstChild(); row; row = row->nextSibling()) {
if (row->isTableRow()) {
m_cRow++;
m_cCol = 0;
if (!ensureRows(m_cRow + 1))
break;
RenderTableRow* tableRow = toRenderTableRow(row);
m_grid[m_cRow].rowRenderer = tableRow;
setRowLogicalHeightToRowStyleLogicalHeightIfNotRelative(&m_grid[m_cRow]);
for (RenderObject* cell = row->firstChild(); cell; cell = cell->nextSibling()) {
if (cell->isTableCell())
addCell(toRenderTableCell(cell), tableRow);
}
}
}
m_needsCellRecalc = false;
setNeedsLayout(true);
}
void RenderTableSection::setNeedsCellRecalc()
{
m_needsCellRecalc = true;
if (RenderTable* t = table())
t->setNeedsSectionRecalc();
}
void RenderTableSection::clearGrid()
{
int rows = m_gridRows;
while (rows--)
delete m_grid[rows].row;
}
int RenderTableSection::numColumns() const
{
int result = 0;
for (int r = 0; r < m_gridRows; ++r) {
for (int c = result; c < table()->numEffCols(); ++c) {
const CellStruct& cell = cellAt(r, c);
if (cell.hasCells() || cell.inColSpan)
result = c;
}
}
return result + 1;
}
void RenderTableSection::appendColumn(int pos)
{
for (int row = 0; row < m_gridRows; ++row)
m_grid[row].row->resize(pos + 1);
}
void RenderTableSection::splitColumn(int pos, int first)
{
if (m_cCol > pos)
m_cCol++;
for (int row = 0; row < m_gridRows; ++row) {
Row& r = *m_grid[row].row;
r.insert(pos + 1, CellStruct());
if (r[pos].hasCells()) {
r[pos + 1].cells.append(r[pos].cells);
RenderTableCell* cell = r[pos].primaryCell();
ASSERT(cell);
int colleft = cell->colSpan() - r[pos].inColSpan;
if (first > colleft)
r[pos + 1].inColSpan = 0;
else
r[pos + 1].inColSpan = first + r[pos].inColSpan;
} else {
r[pos + 1].inColSpan = 0;
}
}
}
// Hit Testing
bool RenderTableSection::nodeAtPoint(const HitTestRequest& request, HitTestResult& result, const LayoutPoint& pointInContainer, const LayoutPoint& accumulatedOffset, HitTestAction action)
{
// If we have no children then we have nothing to do.
if (!firstChild())
return false;
// Table sections cannot ever be hit tested. Effectively they do not exist.
// Just forward to our children always.
LayoutPoint adjustedLocation = accumulatedOffset + location();
if (hasOverflowClip() && !overflowClipRect(adjustedLocation).intersects(result.rectForPoint(pointInContainer)))
return false;
if (hasOverflowingCell()) {
for (RenderObject* child = lastChild(); child; child = child->previousSibling()) {
// FIXME: We have to skip over inline flows, since they can show up inside table rows
// at the moment (a demoted inline <form> for example). If we ever implement a
// table-specific hit-test method (which we should do for performance reasons anyway),
// then we can remove this check.
if (child->isBox() && !toRenderBox(child)->hasSelfPaintingLayer()) {
LayoutPoint childPoint = flipForWritingMode(toRenderBox(child), adjustedLocation, ParentToChildFlippingAdjustment);
if (child->nodeAtPoint(request, result, pointInContainer, childPoint, action)) {
updateHitTestResult(result, toLayoutPoint(pointInContainer - childPoint));
return true;
}
}
}
return false;
}
LayoutPoint location = pointInContainer - toLayoutSize(adjustedLocation);
if (style()->isFlippedBlocksWritingMode()) {
if (style()->isHorizontalWritingMode())
location.setY(height() - location.y());
else
location.setX(width() - location.x());
}
LayoutUnit offsetInColumnDirection = style()->isHorizontalWritingMode() ? location.y() : location.x();
// Find the first row that starts after offsetInColumnDirection.
unsigned nextRow = std::upper_bound(m_rowPos.begin(), m_rowPos.end(), offsetInColumnDirection) - m_rowPos.begin();
if (nextRow == m_rowPos.size())
return false;
// Now set hitRow to the index of the hit row, or 0.
unsigned hitRow = nextRow > 0 ? nextRow - 1 : 0;
Vector<LayoutUnit>& columnPos = table()->columnPositions();
LayoutUnit offsetInRowDirection = style()->isHorizontalWritingMode() ? location.x() : location.y();
if (!style()->isLeftToRightDirection())
offsetInRowDirection = columnPos[columnPos.size() - 1] - offsetInRowDirection;
unsigned nextColumn = std::lower_bound(columnPos.begin(), columnPos.end(), offsetInRowDirection) - columnPos.begin();
if (nextColumn == columnPos.size())
return false;
unsigned hitColumn = nextColumn > 0 ? nextColumn - 1 : 0;
CellStruct& current = cellAt(hitRow, hitColumn);
// If the cell is empty, there's nothing to do
if (!current.hasCells())
return false;
for (int i = current.cells.size() - 1; i >= 0; --i) {
RenderTableCell* cell = current.cells[i];
LayoutPoint cellPoint = flipForWritingMode(cell, adjustedLocation, ParentToChildFlippingAdjustment);
if (static_cast<RenderObject*>(cell)->nodeAtPoint(request, result, pointInContainer, cellPoint, action)) {
updateHitTestResult(result, toLayoutPoint(pointInContainer - cellPoint));
return true;
}
}
return false;
}
} // namespace WebCore
| Treeeater/WebPermission | rendering/RenderTableSection.cpp | C++ | bsd-2-clause | 49,679 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
import logging
import traceback
import mallet_lda
class MalletTagTopics(mallet_lda.MalletLDA):
"""
Topic modeling with separation based on tags
"""
def _basic_params(self):
self.name = 'mallet_lda_tags'
self.categorical = False
self.template_name = 'mallet_lda'
self.dry_run = False
self.topics = 50
self.dfr = len(self.extra_args) > 0
if self.dfr:
self.dfr_dir = self.extra_args[0]
def post_setup(self):
if self.named_args is not None:
if 'tags' in self.named_args:
self.tags = self.named_args['tags']
for filename in self.metadata.keys():
my_tags = [x for (x, y) in self.tags.iteritems()
if int(self.metadata[filename]['itemID'
]) in y]
if len(my_tags) > 0:
self.metadata[filename]['label'] = my_tags[0]
else:
del self.metadata[filename]
self.files.remove(filename)
if __name__ == '__main__':
try:
processor = MalletTagTopics(track_progress=False)
processor.process()
except:
logging.error(traceback.format_exc())
| ChristianFrisson/papermachines | chrome/content/papermachines/processors/mallet_lda_tags.py | Python | bsd-2-clause | 1,353 |
cask 'kanmusmemory' do
version '0.15'
sha256 'af64ae0846ab0b4366693bc602a81ba7e626bafee820862594c4bcbf92acfcef'
url "http://relog.xii.jp/download/kancolle/KanmusuMemory-#{version}-mac.dmg"
appcast 'https://github.com/ioriayane/KanmusuMemory/releases.atom',
:checkpoint => 'f8cddbd8afc99bff82204851ed915bf9f8246499cc870ee7481ec24135e29faa'
name 'KanmusuMemory'
homepage 'http://relog.xii.jp/mt5r/2013/08/post-349.html'
license :apache
app 'KanmusuMemory.app'
end
| williamboman/homebrew-cask | Casks/kanmusmemory.rb | Ruby | bsd-2-clause | 490 |
class Zmap < Formula
desc "Network scanner for Internet-wide network studies"
homepage "https://zmap.io"
url "https://github.com/zmap/zmap/archive/v2.1.1.tar.gz"
sha256 "29627520c81101de01b0213434adb218a9f1210bfd3f2dcfdfc1f975dbce6399"
revision 1
head "https://github.com/zmap/zmap.git"
bottle do
rebuild 1
sha256 "af12dfa471443be095ccbbb1d0fb8f706e966786d8526b2190f2cfe78f28550c" => :mojave
sha256 "d64ac689f0e80bc125a5e4899cc044395b0ba5c75ad365f65a3f6f8a62520137" => :high_sierra
sha256 "233f9e5e6964477295c0e9edbf607cd71571155510704124f374934f97eff55c" => :sierra
end
depends_on "byacc" => :build
depends_on "cmake" => :build
depends_on "gengetopt" => :build
depends_on "pkg-config" => :build
depends_on "gmp"
depends_on "json-c"
depends_on "libdnet"
def install
inreplace ["conf/zmap.conf", "src/zmap.c", "src/zopt.ggo.in"], "/etc", etc
system "cmake", ".", *std_cmake_args, "-DENABLE_DEVELOPMENT=OFF",
"-DRESPECT_INSTALL_PREFIX_CONFIG=ON"
system "make"
system "make", "install"
end
test do
system "#{sbin}/zmap", "--version"
end
end
| jdubois/homebrew-core | Formula/zmap.rb | Ruby | bsd-2-clause | 1,142 |
import sys
import os
import subprocess
import string
printable = set(string.printable)
def sanitize(txt):
txt = ''.join(filter(lambda c: c in printable, txt))
return txt
def traverse(t, outfile):
print>>outfile, sanitize(t.code+'\t'+t.description)
for c in t.children:
traverse(c, outfile)
def getEdges(t, outfile):
for c in t.children:
print >>outfile, sanitize(t.code+'\t'+c.code)
getEdges(c, outfile)
print 'cloning github repository sirrice/icd9.git'
subprocess.call('git clone https://github.com/sirrice/icd9.git', shell=1)
sys.path.append('icd9')
from icd9 import ICD9
tree = ICD9('icd9/codes.json')
toplevelnodes = tree.children
print 'creating name file'
outfile = file('code.names', 'w')
traverse(tree, outfile)
outfile.close()
print 'creating edges file'
outfile = file('code.edges', 'w')
getEdges(tree, outfile)
outfile.close()
print 'cleaning up'
#os.chdir('..')
#subprocess.call('rm -rf icd9', shell=1)
| yhalpern/anchorExplorer | examples/ICD9/load_ICD9_structure.py | Python | bsd-2-clause | 971 |
<?php
class GuildWarsMenu extends btThemeMenu {
public function __construct($sqlConnection) {
parent::__construct("guildwars", $sqlConnection);
}
public function displayLink() {
if($this->intMenuSection == 3) {
$menuLinkInfo = $this->menuItemObj->objLink->get_info();
$checkURL = parse_url($menuLinkInfo['link']);
if(!isset($checkURL['scheme']) || $checkURL['scheme'] = "") {
$menuLinkInfo['link'] = MAIN_ROOT.$menuLinkInfo['link'];
}
echo "<div style='display: inline-block; vertical-align: middle; height: 50px; padding-right: 20px'><a href='".$menuLinkInfo['link']."' target='".$menuLinkInfo['linktarget']."'>".$menuItemInfo['name']."</a></div>";
}
else {
parent::displayLink();
}
}
public function displayMenuCategory($loc="top") {
$menuCatInfo = $this->menuCatObj->get_info();
if($loc == "top") {
echo $this->getHeaderCode($menuCatInfo);
}
else {
echo "<br>";
}
}
public function displayLoggedOut() {
echo "
<form action='".MAIN_ROOT."login.php' method='post' style='padding: 0px; margin: 0px'>
<div class='usernameIMG'></div>
<div class='usernameTextDiv'>
<input name='user' type='text' class='loginTextbox'>
</div>
<div class='passwordIMG'></div>
<div class='passwordTextDiv'>
<input name='pass' type='password' class='loginTextbox'>
</div>
<div class='rememberMeCheckBox' id='fakeRememberMe'></div>
<div class='rememberMeIMG'></div>
<div id='fakeSubmit' class='loginButton'></div>
<input type='checkbox' name='rememberme' value='1' id='realRememberMe' style='display: none'>
<input type='submit' name='submit' id='realSubmit' style='display: none' value='Log In'>
</form>
";
}
public function displayLoggedIn() {
echo "
<div class='loggedInIMG'></div>
<div class='loggedInProfilePic'>".$this->memberObj->getProfilePic("45px", "60px")."</div>
<div class='loggedInMemberNameIMG'></div>
<div class='loggedInMemberNameText'>
".$this->memberObj->getMemberLink(array("color" => "false"))."
</div>
<div class='loggedInRankIMG'></div>
<div class='loggedInRankText'>
".$this->data['memberRank']."
</div>
<div class='loggedInMemberOptionsSection'>
<div class='loggedInMemberOptionsIMG'></div>
<div class='loggedInMemberOptions'>
<a href='".MAIN_ROOT."members'>My Account</a> - ".$this->data['pmLink']." - <a href='".MAIN_ROOT."members/signout.php'>Sign Out</a><br>
</div>
</div>
";
}
}
?> | bluethrust/cs4 | themes/guildwars/guildwarsmenu.php | PHP | bsd-2-clause | 2,632 |
cask :v1 => 'chunkulus' do
version :latest
sha256 :no_check
url 'http://presstube.com/screensavers/presstube-chunkulus-mac.zip'
homepage 'http://presstube.com/blog/2011/chunkulus/'
license :unknown # todo: change license and remove this comment; ':unknown' is a machine-generated placeholder
screen_saver 'presstube-chunkulus.app/Contents/Resources/Presstube - Chunkulus.saver'
postflight do
system '/usr/libexec/PlistBuddy', '-c', 'Set :CFBundleName Chunkulus (Presstube)', "#{staged_path}/presstube-chunkulus.app/Contents/Resources/Presstube - Chunkulus.saver/Contents/Info.plist"
end
caveats <<-EOS.undent
#{token} requires Adobe Air, available via
brew cask install adobe-air
EOS
end
| nelsonjchen/homebrew-cask | Casks/chunkulus.rb | Ruby | bsd-2-clause | 730 |
/* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided
* that the following conditions are met:
*
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or other
* materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.IO;
using XenAdmin.Network;
using XenAdmin.Core;
using XenAPI;
using XenAdmin.Actions;
using XenAdmin;
using System.Linq;
using System.Globalization;
using System.Xml;
namespace XenServerHealthCheck
{
public class XenServerHealthCheckBugTool
{
private static readonly log4net.ILog log = log4net.LogManager.GetLogger(System.Reflection.MethodBase.GetCurrentMethod().DeclaringType);
private static readonly List<string> reportExcluded =
new List<string>
{
"blobs",
"vncterm",
"xapi-debug"
};
private static readonly Dictionary<string, int> reportWithVerbosity =
new Dictionary<string, int>
{
{"host-crashdump-logs", 2},
{"system-logs", 2},
{"tapdisk-logs", 2},
{"xapi", 2},
{"xcp-rrdd-plugins", 2},
{"xenserver-install", 2},
{"xenserver-logs", 2}
};
public readonly string outputFile;
public XenServerHealthCheckBugTool()
{
string name = string.Format("{0}{1}.zip", Messages.BUGTOOL_FILE_PREFIX, DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss", CultureInfo.InvariantCulture));
string folder = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
if (Directory.Exists(folder))
Directory.Delete(folder);
Directory.CreateDirectory(folder);
if (!name.EndsWith(".zip"))
name = string.Concat(name, ".zip");
outputFile = string.Format(@"{0}\{1}", folder, name);
}
public void RunBugtool(IXenConnection connection, Session session)
{
if (connection == null || session == null)
return;
// Fetch the common capabilities of all hosts.
Dictionary<Host, List<string>> hostCapabilities = new Dictionary<Host, List<string>>();
foreach (Host host in connection.Cache.Hosts)
{
GetSystemStatusCapabilities action = new GetSystemStatusCapabilities(host);
action.RunExternal(session);
if (!action.Succeeded)
return;
List<string> keys = new List<string>();
XmlDocument doc = new XmlDocument();
doc.LoadXml(action.Result);
foreach (XmlNode node in doc.GetElementsByTagName("capability"))
{
foreach (XmlAttribute a in node.Attributes)
{
if (a.Name == "key")
keys.Add(a.Value);
}
}
hostCapabilities[host] = keys;
}
List<string> combination = null;
foreach (List<string> capabilities in hostCapabilities.Values)
{
if (capabilities == null)
continue;
if (combination == null)
{
combination = capabilities;
continue;
}
combination = Helpers.ListsCommonItems<string>(combination, capabilities);
}
if (combination == null || combination.Count <= 0)
return;
// The list of the reports which are required in Health Check Report.
List<string> reportIncluded = combination.Except(reportExcluded).ToList();
// Verbosity works for xen-bugtool since Dundee.
if (Helpers.DundeeOrGreater(connection))
{
List<string> verbReport = new List<string>(reportWithVerbosity.Keys);
int idx = -1;
for (int x = 0; x < verbReport.Count; x++)
{
idx = reportIncluded.IndexOf(verbReport[x]);
if (idx >= 0)
{
reportIncluded[idx] = reportIncluded[idx] + ":" + reportWithVerbosity[verbReport[x]].ToString();
}
}
}
// Ensure downloaded filenames are unique even for hosts with the same hostname: append a counter to the timestring
string filepath = Path.Combine(Path.GetTempPath(), Path.GetRandomFileName());
if (Directory.Exists(filepath))
Directory.Delete(filepath);
Directory.CreateDirectory(filepath);
string timestring = DateTime.Now.ToString("yyyy-MM-dd-HH-mm-ss");
// Collect all master/slave information to output as a separate text file with the report
List<string> mastersInfo = new List<string>();
int i = 0;
Pool p = Helpers.GetPool(connection);
foreach (Host host in connection.Cache.Hosts)
{
// master/slave information
if (p == null)
{
mastersInfo.Add(string.Format("Server '{0}' is a stand alone server",
host.Name()));
}
else
{
mastersInfo.Add(string.Format("Server '{0}' is a {1} of pool '{2}'",
host.Name(),
p.master.opaque_ref == host.opaque_ref ? "master" : "slave",
p.Name()));
}
HostWithStatus hostWithStatus = new HostWithStatus(host, 0);
SingleHostStatusAction statAction = new SingleHostStatusAction(hostWithStatus, reportIncluded, filepath, timestring + "-" + ++i);
statAction.RunExternal(session);
}
// output the slave/master info
string mastersDestination = string.Format("{0}\\{1}-Masters.txt", filepath, timestring);
WriteExtraInfoToFile(mastersInfo, mastersDestination);
// output the XenCenter metadata
var metadata = XenAdminConfigManager.Provider.GetXenCenterMetadata(false);
string metadataDestination = string.Format("{0}\\{1}-Metadata.json", filepath, timestring);
WriteExtraInfoToFile(new List<string> {metadata}, metadataDestination);
// Finish the collection of logs with bugtool.
// Start to zip the files.
ZipStatusReportAction zipAction = new ZipStatusReportAction(filepath, outputFile);
zipAction.RunExternal(session);
log.InfoFormat("Server Status Report is collected: {0}", outputFile);
}
private void WriteExtraInfoToFile(List<string> info, string fileName)
{
if (File.Exists(fileName))
File.Delete(fileName);
StreamWriter sw = null;
try
{
sw = new StreamWriter(fileName);
foreach (string s in info)
sw.WriteLine(s);
sw.Flush();
}
catch (Exception e)
{
log.ErrorFormat("Exception while writing {0} file: {1}", fileName, e);
}
finally
{
if (sw != null)
sw.Close();
}
}
}
}
| stephen-turner/xenadmin | XenServerHealthCheck/XenServerHealthCheckBugTool.cs | C# | bsd-2-clause | 8,841 |
class PureFtpd < Formula
desc "Secure and efficient FTP server"
homepage "https://www.pureftpd.org/"
url "https://download.pureftpd.org/pub/pure-ftpd/releases/pure-ftpd-1.0.49.tar.gz"
sha256 "767bf458c70b24f80c0bb7a1bbc89823399e75a0a7da141d30051a2b8cc892a5"
revision 1
bottle do
cellar :any
sha256 "aa0a342b50ae3761120370fc0e6605241e03545441c472d778ef030239784454" => :catalina
sha256 "e3a63b9af91de3c29eef40a76d7962cdf8623a8e8992aeb67bdf3948293c450d" => :mojave
sha256 "a6a9549f3d8bde87cf01210e9fa29b403ed258246a7928d195a57f0c5ace6988" => :high_sierra
sha256 "11dfcec52ae727128c8201a4779fc7feea1d547fe86989a621d4ba339f70de92" => :sierra
end
depends_on "libsodium"
depends_on "openssl@1.1"
def install
args = %W[
--disable-dependency-tracking
--prefix=#{prefix}
--mandir=#{man}
--sysconfdir=#{etc}
--with-everything
--with-pam
--with-tls
--with-bonjour
]
system "./configure", *args
system "make", "install"
end
plist_options manual: "pure-ftpd"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/pure-ftpd</string>
<string>--chrooteveryone</string>
<string>--createhomedir</string>
<string>--allowdotfiles</string>
<string>--login=puredb:#{etc}/pureftpd.pdb</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/pure-ftpd.log</string>
<key>StandardOutPath</key>
<string>#{var}/log/pure-ftpd.log</string>
</dict>
</plist>
EOS
end
test do
system bin/"pure-pw", "--help"
end
end
| lembacon/homebrew-core | Formula/pure-ftpd.rb | Ruby | bsd-2-clause | 2,125 |
<?php
$service_doc['departamentos|departments'] = array(
'en' => array (
'pattern' => '/ubigeo/departments',
'description' => 'Lists the ubigeo codes for all departments',
),
'es' => array(
'patron' => '/ubigeo/departamentos',
'descripción' => 'Lista los códigos de ubigeo de todos los departamentos',
)
);
$fdepas = function () use ($app, $db) {
$res = get_from_cache('departamentos');
if ($res === false) {
$stmt = $db->query("select * from ubigeo where nombreCompleto like '%//'");
$res = $stmt->fetchAll();
save_to_cache('departamentos', $res);
}
echo json_encode(array(
$app->request()->getResourceUri() => $res
));
};
$app->get('/ubigeo/departamentos', $fdepas)->name('departamentos');
$app->get('/ubigeo/departments', $fdepas)->name('departments');
| emedinaa/ubigeo-peru | app/srv_departamentos.php | PHP | bsd-2-clause | 885 |
#!/usr/bin/env python
"""Bootstrap setuptools installation
To use setuptools in your package's setup.py, include this
file in the same directory and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
To require a specific version of setuptools, set a download
mirror, or use an alternate download directory, simply supply
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import os
import shutil
import sys
import tempfile
import zipfile
import optparse
import subprocess
import platform
import textwrap
import contextlib
from distutils import log
try:
# noinspection PyCompatibility
from urllib.request import urlopen
except ImportError:
# noinspection PyCompatibility
from urllib2 import urlopen
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
DEFAULT_VERSION = "7.0"
DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
def _python_cmd(*args):
"""
Return True if the command succeeded.
"""
args = (sys.executable,) + args
return subprocess.call(args) == 0
def _install(archive_filename, install_args=()):
with archive_context(archive_filename):
# installing
log.warn('Installing Setuptools')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
def _build_egg(egg, archive_filename, to_dir):
with archive_context(archive_filename):
# building an egg
log.warn('Building a Setuptools egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
class ContextualZipFile(zipfile.ZipFile):
"""
Supplement ZipFile class to support context manager for Python 2.6
"""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __new__(cls, *args, **kwargs):
"""
Construct a ZipFile or ContextualZipFile as appropriate
"""
if hasattr(zipfile.ZipFile, '__exit__'):
return zipfile.ZipFile(*args, **kwargs)
return super(ContextualZipFile, cls).__new__(cls)
@contextlib.contextmanager
def archive_context(filename):
# extracting the archive
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
with ContextualZipFile(filename) as archive:
archive.extractall()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
yield
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
archive = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, archive, to_dir)
sys.path.insert(0, egg)
# Remove previously-imported pkg_resources if present (see
# https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
if 'pkg_resources' in sys.modules:
del sys.modules['pkg_resources']
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15):
to_dir = os.path.abspath(to_dir)
rep_modules = 'pkg_resources', 'setuptools'
imported = set(sys.modules).intersection(rep_modules)
try:
import pkg_resources
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("setuptools>=" + version)
return
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir, download_delay)
except pkg_resources.VersionConflict as VC_err:
if imported:
msg = textwrap.dedent("""
The required version of setuptools (>={version}) is not available,
and can't be installed while this script is running. Please
install a more recent version first, using
'easy_install -U setuptools'.
(Currently using {VC_err.args[0]!r})
""").format(VC_err=VC_err, version=version)
sys.stderr.write(msg)
sys.exit(2)
# otherwise, reload ok
del pkg_resources, sys.modules['pkg_resources']
return _do_download(version, download_base, to_dir, download_delay)
def _clean_check(cmd, target):
"""
Run the command to download target. If the command fails, clean up before
re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
ps_cmd = (
"[System.Net.WebRequest]::DefaultWebProxy.Credentials = "
"[System.Net.CredentialCache]::DefaultCredentials; "
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)"
% vars()
)
cmd = [
'powershell',
'-Command',
ps_cmd,
]
_clean_check(cmd, target)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
src = urlopen(url)
try:
# Read all the data in one block.
data = src.read()
finally:
src.close()
# Write all the data in one block to avoid creating a partial file.
with open(target, "wb") as dst:
dst.write(data)
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = (
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
)
viable_downloaders = (dl for dl in downloaders if dl.viable())
return next(viable_downloaders, None)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15, downloader_factory=get_best_downloader):
"""
Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an sdist for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
``downloader_factory`` should be a function taking no arguments and
returning a function for downloading a URL to a target.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
zip_name = "setuptools-%s.zip" % version
url = download_base + zip_name
saveto = os.path.join(to_dir, zip_name)
if not os.path.exists(saveto): # Avoid repeated downloads
log.warn("Downloading %s", url)
downloader = downloader_factory()
downloader(url, saveto)
return os.path.realpath(saveto)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the setuptools package
"""
return ['--user'] if options.user_install else []
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
parser.add_option(
'--version', help="Specify which version to download",
default=DEFAULT_VERSION,
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main():
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
archive = download_setuptools(
version=options.version,
download_base=options.download_base,
downloader_factory=options.downloader_factory,
)
return _install(archive, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())
| mtholder/peyotl | ez_setup.py | Python | bsd-2-clause | 10,592 |
import sys
import unittest
from streamlink.plugin.api.utils import itertags
def unsupported_versions_1979():
"""Unsupported python versions for itertags
3.7.0 - 3.7.2 and 3.8.0a1
- https://github.com/streamlink/streamlink/issues/1979
- https://bugs.python.org/issue34294
"""
v = sys.version_info
if (v.major == 3) and (
# 3.7.0 - 3.7.2
(v.minor == 7 and v.micro <= 2)
# 3.8.0a1
or (v.minor == 8 and v.micro == 0 and v.releaselevel == 'alpha' and v.serial <= 1)
):
return True
else:
return False
class TestPluginUtil(unittest.TestCase):
test_html = """
<!doctype html>
<html lang="en" class="no-js">
<title>Title</title>
<meta property="og:type" content= "website" />
<meta property="og:url" content="http://test.se/"/>
<meta property="og:site_name" content="Test" />
<script src="https://test.se/test.js"></script>
<link rel="stylesheet" type="text/css" href="https://test.se/test.css">
<script>Tester.ready(function () {
alert("Hello, world!"); });</script>
<p>
<a
href="http://test.se/foo">bar</a>
</p>
</html>
""" # noqa: W291
def test_itertags_single_text(self):
title = list(itertags(self.test_html, "title"))
self.assertTrue(len(title), 1)
self.assertEqual(title[0].tag, "title")
self.assertEqual(title[0].text, "Title")
self.assertEqual(title[0].attributes, {})
def test_itertags_attrs_text(self):
script = list(itertags(self.test_html, "script"))
self.assertTrue(len(script), 2)
self.assertEqual(script[0].tag, "script")
self.assertEqual(script[0].text, "")
self.assertEqual(script[0].attributes, {"src": "https://test.se/test.js"})
self.assertEqual(script[1].tag, "script")
self.assertEqual(script[1].text.strip(), """Tester.ready(function () {\nalert("Hello, world!"); });""")
self.assertEqual(script[1].attributes, {})
@unittest.skipIf(unsupported_versions_1979(),
"python3.7 issue, see bpo-34294")
def test_itertags_multi_attrs(self):
metas = list(itertags(self.test_html, "meta"))
self.assertTrue(len(metas), 3)
self.assertTrue(all(meta.tag == "meta" for meta in metas))
self.assertEqual(metas[0].text, None)
self.assertEqual(metas[1].text, None)
self.assertEqual(metas[2].text, None)
self.assertEqual(metas[0].attributes, {"property": "og:type", "content": "website"})
self.assertEqual(metas[1].attributes, {"property": "og:url", "content": "http://test.se/"})
self.assertEqual(metas[2].attributes, {"property": "og:site_name", "content": "Test"})
def test_multi_line_a(self):
anchor = list(itertags(self.test_html, "a"))
self.assertTrue(len(anchor), 1)
self.assertEqual(anchor[0].tag, "a")
self.assertEqual(anchor[0].text, "bar")
self.assertEqual(anchor[0].attributes, {"href": "http://test.se/foo"})
@unittest.skipIf(unsupported_versions_1979(),
"python3.7 issue, see bpo-34294")
def test_no_end_tag(self):
links = list(itertags(self.test_html, "link"))
self.assertTrue(len(links), 1)
self.assertEqual(links[0].tag, "link")
self.assertEqual(links[0].text, None)
self.assertEqual(links[0].attributes, {"rel": "stylesheet",
"type": "text/css",
"href": "https://test.se/test.css"})
def test_tag_inner_tag(self):
links = list(itertags(self.test_html, "p"))
self.assertTrue(len(links), 1)
self.assertEqual(links[0].tag, "p")
self.assertEqual(links[0].text.strip(), '<a \nhref="http://test.se/foo">bar</a>')
self.assertEqual(links[0].attributes, {})
| beardypig/streamlink | tests/test_plugin_utils.py | Python | bsd-2-clause | 3,845 |
import { inject, injectable } from 'inversify';
import { ServiceIdentifiers } from '../../../container/ServiceIdentifiers';
import * as ESTree from 'estree';
import { TNodeWithStatements } from '../../../types/node/TNodeWithStatements';
import { TObjectExpressionKeysTransformerCustomNodeFactory } from '../../../types/container/custom-nodes/TObjectExpressionKeysTransformerCustomNodeFactory';
import { IObjectExpressionExtractorResult } from '../../../interfaces/node-transformers/converting-transformers/object-expression-extractors/IObjectExpressionExtractorResult';
import { TStatement } from '../../../types/node/TStatement';
import { ICustomNode } from '../../../interfaces/custom-nodes/ICustomNode';
import { TInitialData } from '../../../types/TInitialData';
import { IObjectExpressionExtractor } from '../../../interfaces/node-transformers/converting-transformers/object-expression-extractors/IObjectExpressionExtractor';
import { ObjectExpressionKeysTransformerCustomNode } from '../../../enums/custom-nodes/ObjectExpressionKeysTransformerCustomNode';
import { ObjectExpressionVariableDeclarationHostNode } from '../../../custom-nodes/object-expression-keys-transformer-nodes/ObjectExpressionVariableDeclarationHostNode';
import { NodeAppender } from '../../../node/NodeAppender';
import { NodeGuards } from '../../../node/NodeGuards';
import { NodeStatementUtils } from '../../../node/NodeStatementUtils';
import { NodeUtils } from '../../../node/NodeUtils';
import { TNodeWithLexicalScope } from '../../../types/node/TNodeWithLexicalScope';
import { NodeLexicalScopeUtils } from '../../../node/NodeLexicalScopeUtils';
@injectable()
export class ObjectExpressionToVariableDeclarationExtractor implements IObjectExpressionExtractor {
/**
* @type {TObjectExpressionKeysTransformerCustomNodeFactory}
*/
private readonly objectExpressionKeysTransformerCustomNodeFactory: TObjectExpressionKeysTransformerCustomNodeFactory;
/**
* @param {TObjectExpressionKeysTransformerCustomNodeFactory} objectExpressionKeysTransformerCustomNodeFactory
*/
public constructor (
@inject(ServiceIdentifiers.Factory__IObjectExpressionKeysTransformerCustomNode)
objectExpressionKeysTransformerCustomNodeFactory: TObjectExpressionKeysTransformerCustomNodeFactory,
) {
this.objectExpressionKeysTransformerCustomNodeFactory = objectExpressionKeysTransformerCustomNodeFactory;
}
/**
* extracts object expression:
* var object = {
* foo: 1,
* bar: 2
* };
*
* to:
* var _0xabc123 = {
* foo: 1,
* bar: 2
* };
* var object = _0xabc123;
*
* @param {ObjectExpression} objectExpressionNode
* @param {Statement} hostStatement
* @returns {IObjectExpressionExtractorResult}
*/
public extract (
objectExpressionNode: ESTree.ObjectExpression,
hostStatement: ESTree.Statement
): IObjectExpressionExtractorResult {
return this.transformObjectExpressionToVariableDeclaration(
objectExpressionNode,
hostStatement
);
}
/**
* @param {ObjectExpression} objectExpressionNode
* @param {Statement} hostStatement
* @returns {Node}
*/
private transformObjectExpressionToVariableDeclaration (
objectExpressionNode: ESTree.ObjectExpression,
hostStatement: ESTree.Statement
): IObjectExpressionExtractorResult {
const hostNodeWithStatements: TNodeWithStatements = NodeStatementUtils.getScopeOfNode(hostStatement);
const lexicalScopeNode: TNodeWithLexicalScope | null = NodeGuards.isNodeWithLexicalScope(hostNodeWithStatements)
? hostNodeWithStatements
: NodeLexicalScopeUtils.getLexicalScope(hostNodeWithStatements) ?? null;
if (!lexicalScopeNode) {
throw new Error('Cannot find lexical scope node for the host statement node');
}
const properties: (ESTree.Property | ESTree.SpreadElement)[] = objectExpressionNode.properties;
const newObjectExpressionHostStatement: ESTree.VariableDeclaration = this.getObjectExpressionHostNode(
lexicalScopeNode,
properties
);
const statementsToInsert: TStatement[] = [newObjectExpressionHostStatement];
NodeAppender.insertBefore(hostNodeWithStatements, statementsToInsert, hostStatement);
NodeUtils.parentizeAst(newObjectExpressionHostStatement);
NodeUtils.parentizeNode(newObjectExpressionHostStatement, hostNodeWithStatements);
const newObjectExpressionIdentifier: ESTree.Identifier = this.getObjectExpressionIdentifierNode(newObjectExpressionHostStatement);
const newObjectExpressionNode: ESTree.ObjectExpression = this.getObjectExpressionNode(newObjectExpressionHostStatement);
return {
nodeToReplace: newObjectExpressionIdentifier,
objectExpressionHostStatement: newObjectExpressionHostStatement,
objectExpressionNode: newObjectExpressionNode
};
}
/**
* @param {TNodeWithLexicalScope} lexicalScopeNode
* @param {(Property | SpreadElement)[]} properties
* @returns {VariableDeclaration}
*/
private getObjectExpressionHostNode (
lexicalScopeNode: TNodeWithLexicalScope,
properties: (ESTree.Property | ESTree.SpreadElement)[]
): ESTree.VariableDeclaration {
const variableDeclarationHostNodeCustomNode: ICustomNode<TInitialData<ObjectExpressionVariableDeclarationHostNode>> =
this.objectExpressionKeysTransformerCustomNodeFactory(
ObjectExpressionKeysTransformerCustomNode.ObjectExpressionVariableDeclarationHostNode
);
variableDeclarationHostNodeCustomNode.initialize(lexicalScopeNode, properties);
const statementNode: TStatement = variableDeclarationHostNodeCustomNode.getNode()[0];
if (
!statementNode
|| !NodeGuards.isVariableDeclarationNode(statementNode)
) {
throw new Error('`objectExpressionHostCustomNode.getNode()[0]` should returns array with `VariableDeclaration` node');
}
return statementNode;
}
/**
* @param {VariableDeclaration} objectExpressionHostNode
* @returns {Identifier}
*/
private getObjectExpressionIdentifierNode (objectExpressionHostNode: ESTree.VariableDeclaration): ESTree.Identifier {
const newObjectExpressionIdentifierNode: ESTree.Pattern = objectExpressionHostNode.declarations[0].id;
if (!NodeGuards.isIdentifierNode(newObjectExpressionIdentifierNode)) {
throw new Error('`objectExpressionHostNode` should contain `VariableDeclarator` node with `Identifier` id property');
}
return newObjectExpressionIdentifierNode;
}
/**
* @param {VariableDeclaration} objectExpressionHostNode
* @returns {Identifier}
*/
private getObjectExpressionNode (objectExpressionHostNode: ESTree.VariableDeclaration): ESTree.ObjectExpression {
const newObjectExpressionNode: ESTree.Expression | null = objectExpressionHostNode.declarations[0].init ?? null;
if (!newObjectExpressionNode || !NodeGuards.isObjectExpressionNode(newObjectExpressionNode)) {
throw new Error('`objectExpressionHostNode` should contain `VariableDeclarator` node with `ObjectExpression` init property');
}
return newObjectExpressionNode;
}
}
| javascript-obfuscator/javascript-obfuscator | src/node-transformers/converting-transformers/object-expression-extractors/ObjectExpressionToVariableDeclarationExtractor.ts | TypeScript | bsd-2-clause | 7,524 |
function(modal) {
function ajaxifyLinks (context) {
$('a.address-choice', context).click(function() {
modal.loadUrl(this.href);
return false;
});
$('.pagination a', context).click(function() {
var page = this.getAttribute("data-page");
setPage(page);
return false;
});
};
var searchUrl = $('form.address-search', modal.body).attr('action')
function search() {
$.ajax({
url: searchUrl,
data: {q: $('#id_q').val()},
success: function(data, status) {
$('#search-results').html(data);
ajaxifyLinks($('#search-results'));
}
});
return false;
};
function setPage(page) {
if($('#id_q').val().length){
dataObj = {q: $('#id_q').val(), p: page};
}
else {
dataObj = {p: page};
}
$.ajax({
url: searchUrl,
data: dataObj,
success: function(data, status) {
$('#search-results').html(data);
ajaxifyLinks($('#search-results'));
}
});
return false;
}
ajaxifyLinks(modal.body);
function submitForm() {
var formdata = new FormData(this);
$.ajax({
url: this.action,
data: formdata,
processData: false,
contentType: false,
type: 'POST',
dataType: 'text',
success: function(response){
modal.loadResponseText(response);
}
});
return false;
}
$('form.address-create', modal.body).submit(submitForm);
$('form.address-edit', modal.body).submit(submitForm);
$('form.address-search', modal.body).submit(search);
$('#id_q').on('input', function() {
clearTimeout($.data(this, 'timer'));
var wait = setTimeout(search, 50);
$(this).data('timer', wait);
});
{% url 'wagtailadmin_tag_autocomplete' as autocomplete_url %}
$('#id_tags', modal.body).tagit({
autocomplete: {source: "{{ autocomplete_url|addslashes }}"}
});
function detectErrors() {
var errorSections = {};
// First count up all the errors
$('form.address-create .error-message').each(function(){
var parentSection = $(this).closest('section');
if(!errorSections[parentSection.attr('id')]){
errorSections[parentSection.attr('id')] = 0;
}
errorSections[parentSection.attr('id')] = errorSections[parentSection.attr('id')]+1;
});
// Now identify them on each tab
for(var index in errorSections) {
$('.tab-nav a[href=#'+ index +']').addClass('errors').attr('data-count', errorSections[index]);
}
}
detectErrors();
} | MechanisM/wagtailplus | wagtailplus/wagtailaddresses/templates/wagtailaddresses/component-chooser/chooser.js | JavaScript | bsd-2-clause | 3,032 |
require 'spec_helper'
describe Hbc::CLI do
it "lists the taps for Casks that show up in two taps" do
listing = Hbc::CLI.nice_listing(%w[
caskroom/cask/adium
caskroom/cask/google-chrome
passcod/homebrew-cask/adium
])
expect(listing).to eq(%w[
caskroom/cask/adium
google-chrome
passcod/cask/adium
])
end
describe ".process" do
let(:noop_command) { double('CLI::Noop') }
before {
allow(Hbc::CLI).to receive(:lookup_command) { noop_command }
allow(noop_command).to receive(:run)
}
it "respects the env variable when choosing what appdir to create" do
EnvHelper.with_env_var('HOMEBREW_CASK_OPTS', "--appdir=/custom/appdir") do
allow(Hbc).to receive(:init) {
expect(Hbc.appdir.to_s).to eq('/custom/appdir')
}
Hbc::CLI.process('noop')
end
end
# todo: merely invoking init causes an attempt to create the caskroom directory
#
# it "respects the ENV variable when choosing a non-default Caskroom location" do
# EnvHelper.with_env_var 'HOMEBREW_CASK_OPTS', "--caskroom=/custom/caskdir" do
# allow(Hbc).to receive(:init) {
# expect(Hbc.caskroom.to_s).to eq('/custom/caskdir')
# }
# Hbc::CLI.process('noop')
# end
# end
it "exits with a status of 1 when something goes wrong" do
Hbc::CLI.expects(:exit).with(1)
Hbc::CLI.expects(:lookup_command).raises(Hbc::CaskError)
allow(Hbc).to receive(:init) {
shutup {
Hbc::CLI.process('noop')
}
}
end
it "passes `--version` along to the subcommand" do
expect(Hbc::CLI).to receive(:run_command).with(noop_command, '--version')
shutup {
Hbc::CLI.process(['noop', '--version'])
}
end
end
end
| jppelteret/homebrew-cask | spec/cask/cli_spec.rb | Ruby | bsd-2-clause | 1,816 |
/*
* Copyright (c) 2008-2011, Matthias Mann
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Matthias Mann nor the names of its contributors may
* be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package de.matthiasmann.twl;
import de.matthiasmann.twl.model.IntegerModel;
import de.matthiasmann.twl.model.ListModel;
import de.matthiasmann.twl.renderer.Image;
import de.matthiasmann.twl.utils.TypeMapping;
/**
* A wheel widget.
*
* @param <T> The data type for the wheel items
*
* @author Matthias Mann
*/
public class WheelWidget<T> extends Widget {
public interface ItemRenderer {
public Widget getRenderWidget(Object data);
}
private final TypeMapping<ItemRenderer> itemRenderer;
private final L listener;
private final R renderer;
private final Runnable timerCB;
protected int itemHeight;
protected int numVisibleItems;
protected Image selectedOverlay;
private static final int TIMER_INTERVAL = 30;
private static final int MIN_SPEED = 3;
private static final int MAX_SPEED = 100;
protected Timer timer;
protected int dragStartY;
protected long lastDragTime;
protected long lastDragDelta;
protected int lastDragDist;
protected boolean hasDragStart;
protected boolean dragActive;
protected int scrollOffset;
protected int scrollAmount;
protected ListModel<T> model;
protected IntegerModel selectedModel;
protected int selected;
protected boolean cyclic;
public WheelWidget() {
this.itemRenderer = new TypeMapping<ItemRenderer>();
this.listener = new L();
this.renderer = new R();
this.timerCB = new Runnable() {
public void run() {
onTimer();
}
};
itemRenderer.put(String.class, new StringItemRenderer());
super.insertChild(renderer, 0);
setCanAcceptKeyboardFocus(true);
}
public WheelWidget(ListModel<T> model) {
this();
this.model = model;
}
public ListModel<T> getModel() {
return model;
}
public void setModel(ListModel<T> model) {
removeListener();
this.model = model;
addListener();
invalidateLayout();
}
public IntegerModel getSelectedModel() {
return selectedModel;
}
public void setSelectedModel(IntegerModel selectedModel) {
removeSelectedListener();
this.selectedModel = selectedModel;
addSelectedListener();
}
public int getSelected() {
return selected;
}
public void setSelected(int selected) {
int oldSelected = this.selected;
if(oldSelected != selected) {
this.selected = selected;
if(selectedModel != null) {
selectedModel.setValue(selected);
}
firePropertyChange("selected", oldSelected, selected);
}
}
public boolean isCyclic() {
return cyclic;
}
public void setCyclic(boolean cyclic) {
this.cyclic = cyclic;
}
public int getItemHeight() {
return itemHeight;
}
public int getNumVisibleItems() {
return numVisibleItems;
}
public boolean removeItemRenderer(Class<? extends T> clazz) {
if(itemRenderer.remove(clazz)) {
super.removeAllChildren();
invalidateLayout();
return true;
}
return false;
}
public void registerItemRenderer(Class<? extends T> clazz, ItemRenderer value) {
itemRenderer.put(clazz, value);
invalidateLayout();
}
public void scroll(int amount) {
scrollInt(amount);
scrollAmount = 0;
}
protected void scrollInt(int amount) {
int pos = selected;
int half = itemHeight / 2;
scrollOffset += amount;
while(scrollOffset >= half) {
scrollOffset -= itemHeight;
pos++;
}
while(scrollOffset <= -half) {
scrollOffset += itemHeight;
pos--;
}
if(!cyclic) {
int n = getNumEntries();
if(n > 0) {
while(pos >= n) {
pos--;
scrollOffset += itemHeight;
}
}
while(pos < 0) {
pos++;
scrollOffset -= itemHeight;
}
scrollOffset = Math.max(-itemHeight, Math.min(itemHeight, scrollOffset));
}
setSelected(pos);
if(scrollOffset == 0 && scrollAmount == 0) {
stopTimer();
} else {
startTimer();
}
}
public void autoScroll(int dir) {
if(dir != 0) {
if(scrollAmount != 0 && Integer.signum(scrollAmount) != Integer.signum(dir)) {
scrollAmount = dir;
} else {
scrollAmount += dir;
}
startTimer();
}
}
@Override
public int getPreferredInnerHeight() {
return numVisibleItems * itemHeight;
}
@Override
public int getPreferredInnerWidth() {
int width = 0;
for(int i=0,n=getNumEntries() ; i<n ; i++) {
Widget w = getItemRenderer(i);
if(w != null) {
width = Math.max(width, w.getPreferredWidth());
}
}
return width;
}
@Override
protected void paintOverlay(GUI gui) {
super.paintOverlay(gui);
if(selectedOverlay != null) {
int y = getInnerY() + itemHeight * (numVisibleItems/2);
if((numVisibleItems & 1) == 0) {
y -= itemHeight/2;
}
selectedOverlay.draw(getAnimationState(), getX(), y, getWidth(), itemHeight);
}
}
@Override
protected boolean handleEvent(Event evt) {
if(evt.isMouseDragEnd() && dragActive) {
int absDist = Math.abs(lastDragDist);
if(absDist > 3 && lastDragDelta > 0) {
int amount = (int)Math.min(1000, absDist * 100 / lastDragDelta);
autoScroll(amount * Integer.signum(lastDragDist));
}
hasDragStart = false;
dragActive = false;
return true;
}
if(evt.isMouseDragEvent()) {
if(hasDragStart) {
long time = getTime();
dragActive = true;
lastDragDist = dragStartY - evt.getMouseY();
lastDragDelta = Math.max(1, time - lastDragTime);
scroll(lastDragDist);
dragStartY = evt.getMouseY();
lastDragTime = time;
}
return true;
}
if(super.handleEvent(evt)) {
return true;
}
switch(evt.getType()) {
case MOUSE_WHEEL:
autoScroll(itemHeight * evt.getMouseWheelDelta());
return true;
case MOUSE_BTNDOWN:
if(evt.getMouseButton() == Event.MOUSE_LBUTTON) {
dragStartY = evt.getMouseY();
lastDragTime = getTime();
hasDragStart = true;
}
return true;
case KEY_PRESSED:
switch(evt.getKeyCode()) {
case Event.KEY_UP:
autoScroll(-itemHeight);
return true;
case Event.KEY_DOWN:
autoScroll(+itemHeight);
return true;
}
return false;
}
return evt.isMouseEvent();
}
protected long getTime() {
GUI gui = getGUI();
return (gui != null) ? gui.getCurrentTime() : 0;
}
protected int getNumEntries() {
return (model == null) ? 0 : model.getNumEntries();
}
protected Widget getItemRenderer(int i) {
T item = model.getEntry(i);
if(item != null) {
ItemRenderer ir = itemRenderer.get(item.getClass());
if(ir != null) {
Widget w = ir.getRenderWidget(item);
if(w != null) {
if(w.getParent() != renderer) {
w.setVisible(false);
renderer.add(w);
}
return w;
}
}
}
return null;
}
protected void startTimer() {
if(timer != null && !timer.isRunning()) {
timer.start();
}
}
protected void stopTimer() {
if(timer != null) {
timer.stop();
}
}
protected void onTimer() {
int amount = scrollAmount;
int newAmount = amount;
if(amount == 0 && !dragActive) {
amount = -scrollOffset;
}
if(amount != 0) {
int absAmount = Math.abs(amount);
int speed = absAmount * TIMER_INTERVAL / 200;
int dir = Integer.signum(amount) * Math.min(absAmount,
Math.max(MIN_SPEED, Math.min(MAX_SPEED, speed)));
if(newAmount != 0) {
newAmount -= dir;
}
scrollAmount = newAmount;
scrollInt(dir);
}
}
@Override
protected void layout() {
layoutChildFullInnerArea(renderer);
}
@Override
protected void applyTheme(ThemeInfo themeInfo) {
super.applyTheme(themeInfo);
applyThemeWheel(themeInfo);
}
protected void applyThemeWheel(ThemeInfo themeInfo) {
itemHeight = themeInfo.getParameter("itemHeight", 10);
numVisibleItems = themeInfo.getParameter("visibleItems", 5);
selectedOverlay = themeInfo.getImage("selectedOverlay");
invalidateLayout();
}
@Override
protected void afterAddToGUI(GUI gui) {
super.afterAddToGUI(gui);
addListener();
addSelectedListener();
timer = gui.createTimer();
timer.setCallback(timerCB);
timer.setDelay(TIMER_INTERVAL);
timer.setContinuous(true);
}
@Override
protected void beforeRemoveFromGUI(GUI gui) {
timer.stop();
timer = null;
removeListener();
removeSelectedListener();
super.beforeRemoveFromGUI(gui);
}
@Override
public void insertChild(Widget child, int index) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public void removeAllChildren() throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
@Override
public Widget removeChild(int index) throws UnsupportedOperationException {
throw new UnsupportedOperationException();
}
private void addListener() {
if(model != null) {
model.addChangeListener(listener);
}
}
private void removeListener() {
if(model != null) {
model.removeChangeListener(listener);
}
}
private void addSelectedListener() {
if(selectedModel != null) {
selectedModel.addCallback(listener);
syncSelected();
}
}
private void removeSelectedListener() {
if(selectedModel != null) {
selectedModel.removeCallback(listener);
}
}
void syncSelected() {
setSelected(selectedModel.getValue());
}
void entriesDeleted(int first, int last) {
if(selected > first) {
if(selected > last) {
setSelected(selected - (last-first+1));
} else {
setSelected(first);
}
}
invalidateLayout();
}
void entriesInserted(int first, int last) {
if(selected >= first) {
setSelected(selected + (last-first+1));
}
invalidateLayout();
}
class L implements ListModel.ChangeListener, Runnable {
public void allChanged() {
invalidateLayout();
}
public void entriesChanged(int first, int last) {
invalidateLayout();
}
public void entriesDeleted(int first, int last) {
WheelWidget.this.entriesDeleted(first, last);
}
public void entriesInserted(int first, int last) {
WheelWidget.this.entriesInserted(first, last);
}
public void run() {
syncSelected();
}
}
class R extends Widget {
public R() {
setTheme("");
setClip(true);
}
@Override
protected void paintWidget(GUI gui) {
if(model == null) {
return;
}
int width = getInnerWidth();
int x = getInnerX();
int y = getInnerY();
int numItems = model.getNumEntries();
int numDraw = numVisibleItems;
int startIdx = selected - numVisibleItems/2;
if((numDraw & 1) == 0) {
y -= itemHeight / 2;
numDraw++;
}
if(scrollOffset > 0) {
y -= scrollOffset;
numDraw++;
}
if(scrollOffset < 0) {
y -= itemHeight + scrollOffset;
numDraw++;
startIdx--;
}
main: for(int i=0 ; i<numDraw ; i++) {
int idx = startIdx + i;
while(idx < 0) {
if(!cyclic) {
continue main;
}
idx += numItems;
}
while(idx >= numItems) {
if(!cyclic) {
continue main;
}
idx -= numItems;
}
Widget w = getItemRenderer(idx);
if(w != null) {
w.setSize(width, itemHeight);
w.setPosition(x, y + i*itemHeight);
w.validateLayout();
paintChild(gui, w);
}
}
}
@Override
public void invalidateLayout() {
}
@Override
protected void sizeChanged() {
}
}
public static class StringItemRenderer extends Label implements WheelWidget.ItemRenderer {
public StringItemRenderer() {
setCache(false);
}
public Widget getRenderWidget(Object data) {
setText(String.valueOf(data));
return this;
}
@Override
protected void sizeChanged() {
}
}
}
| ColaMachine/MyBlock | src/main/java/de/matthiasmann/twl/WheelWidget.java | Java | bsd-2-clause | 16,353 |
using System;
using System.Collections.Generic;
namespace NMaier.SimpleDlna.Utilities
{
public abstract class Repository<TInterface>
where TInterface : class, IRepositoryItem
{
private static readonly Dictionary<string, TInterface> items =
BuildRepository();
private static Dictionary<string, TInterface> BuildRepository()
{
var items = new Dictionary<string, TInterface>();
var type = typeof(TInterface).Name;
var a = typeof(TInterface).Assembly;
foreach (Type t in a.GetTypes()) {
if (t.GetInterface(type) == null) {
continue;
}
var ctor = t.GetConstructor(new Type[] { });
if (ctor == null) {
continue;
}
try {
var item = ctor.Invoke(new object[] { }) as TInterface;
if (item == null) {
continue;
}
items.Add(item.Name.ToUpperInvariant(), item);
}
catch (Exception) {
continue;
}
}
return items;
}
public static IDictionary<string, IRepositoryItem> ListItems()
{
var rv = new Dictionary<string, IRepositoryItem>();
foreach (var v in items.Values) {
rv.Add(v.Name, v);
}
return rv;
}
public static TInterface Lookup(string name)
{
if (string.IsNullOrWhiteSpace(name)) {
throw new ArgumentException(
"Invalid repository name",
"name");
}
var n_p = name.Split(new char[] { ':' }, 2);
name = n_p[0].ToUpperInvariant().Trim();
var result = (TInterface)null;
if (!items.TryGetValue(name, out result)) {
throw new RepositoryLookupException(name);
}
if (n_p.Length == 1) {
return result;
}
var ctor = result.GetType().GetConstructor(new Type[] { });
if (ctor == null) {
throw new RepositoryLookupException(name);
}
var parameters = new AttributeCollection();
foreach (var p in n_p[1].Split(',')) {
var k_v = p.Split(new char[] { '=' }, 2);
if (k_v.Length == 2) {
parameters.Add(k_v[0], k_v[1]);
}
else {
parameters.Add(k_v[0], null);
}
}
try {
var item = ctor.Invoke(new object[] { }) as TInterface;
if (item == null) {
throw new RepositoryLookupException(name);
}
item.SetParameters(parameters);
return item;
}
catch (Exception ex) {
throw new RepositoryLookupException(string.Format(
"Cannot construct repository item: {0}",
ex.Message), ex);
}
}
}
}
| antonio-bakula/simpleDLNA | util/Repository.cs | C# | bsd-2-clause | 2,638 |
// Package tag.
package tag
println("tag27")
| ckxng/wakeup | tag/tag27.go | GO | bsd-2-clause | 46 |
namespace XenAdmin.Dialogs
{
partial class AboutDialog
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(AboutDialog));
this.label2 = new System.Windows.Forms.Label();
this.pictureBox1 = new System.Windows.Forms.PictureBox();
this.VersionLabel = new System.Windows.Forms.Label();
this.OkButton = new System.Windows.Forms.Button();
this.linkLabel1 = new System.Windows.Forms.LinkLabel();
this.tableLayoutPanel1 = new System.Windows.Forms.TableLayoutPanel();
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).BeginInit();
this.tableLayoutPanel1.SuspendLayout();
this.SuspendLayout();
//
// label2
//
resources.ApplyResources(this.label2, "label2");
this.label2.BackColor = System.Drawing.Color.Transparent;
this.label2.Name = "label2";
//
// pictureBox1
//
resources.ApplyResources(this.pictureBox1, "pictureBox1");
this.pictureBox1.Image = global::XenAdmin.Properties.Resources.about_box_graphic_423x79;
this.pictureBox1.Name = "pictureBox1";
this.pictureBox1.TabStop = false;
//
// VersionLabel
//
resources.ApplyResources(this.VersionLabel, "VersionLabel");
this.VersionLabel.BackColor = System.Drawing.Color.Transparent;
this.VersionLabel.Name = "VersionLabel";
//
// OkButton
//
resources.ApplyResources(this.OkButton, "OkButton");
this.OkButton.BackColor = System.Drawing.SystemColors.Control;
this.OkButton.DialogResult = System.Windows.Forms.DialogResult.OK;
this.OkButton.Name = "OkButton";
this.OkButton.UseVisualStyleBackColor = true;
this.OkButton.Click += new System.EventHandler(this.OkButton_Click);
//
// linkLabel1
//
resources.ApplyResources(this.linkLabel1, "linkLabel1");
this.linkLabel1.Name = "linkLabel1";
this.linkLabel1.TabStop = true;
this.linkLabel1.LinkClicked += new System.Windows.Forms.LinkLabelLinkClickedEventHandler(this.linkLabel1_LinkClicked);
//
// tableLayoutPanel1
//
resources.ApplyResources(this.tableLayoutPanel1, "tableLayoutPanel1");
this.tableLayoutPanel1.Controls.Add(this.VersionLabel, 0, 0);
this.tableLayoutPanel1.Controls.Add(this.OkButton, 0, 4);
this.tableLayoutPanel1.Controls.Add(this.linkLabel1, 0, 3);
this.tableLayoutPanel1.Controls.Add(this.label2, 0, 1);
this.tableLayoutPanel1.Name = "tableLayoutPanel1";
//
// AboutDialog
//
this.AcceptButton = this.OkButton;
resources.ApplyResources(this, "$this");
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Dpi;
this.BackColor = System.Drawing.Color.White;
this.CancelButton = this.OkButton;
this.Controls.Add(this.tableLayoutPanel1);
this.Controls.Add(this.pictureBox1);
this.HelpButton = false;
this.Name = "AboutDialog";
((System.ComponentModel.ISupportInitialize)(this.pictureBox1)).EndInit();
this.tableLayoutPanel1.ResumeLayout(false);
this.tableLayoutPanel1.PerformLayout();
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label label2;
private System.Windows.Forms.PictureBox pictureBox1;
private System.Windows.Forms.Label VersionLabel;
private System.Windows.Forms.Button OkButton;
private System.Windows.Forms.LinkLabel linkLabel1;
private System.Windows.Forms.TableLayoutPanel tableLayoutPanel1;
}
}
| agimofcarmen/xenadmin | XenAdmin/Dialogs/AboutDialog.Designer.cs | C# | bsd-2-clause | 5,068 |
class Cpm < Formula
desc "Fast CPAN module installer"
homepage "https://metacpan.org/pod/cpm"
url "https://cpan.metacpan.org/authors/id/S/SK/SKAJI/App-cpm-0.997002.tar.gz"
sha256 "19de1224b5c86d566eb0b85767775efb5bbab82ce98ee8c44f8843f26aabbbab"
license any_of: ["Artistic-1.0-Perl", "GPL-1.0-or-later"]
head "https://github.com/skaji/cpm.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "ec077e8877216d394c00f4bae315edc76d0dd293d1d24b691526ee596766dcc9"
sha256 cellar: :any_skip_relocation, big_sur: "c80c08f2faf3be4f3ffe1577c1002b2a9d44efbe66c8eae8068c2b68b537134f"
sha256 cellar: :any_skip_relocation, catalina: "5ee136ba90a46455007f9e8f5f3de12d55dc8bb888f366c03838cdbc52ab6f63"
sha256 cellar: :any_skip_relocation, mojave: "570d8a40888cc518229910ab30d0ee8c89a0b72a86d8cc12cea8222df885d5cb"
end
depends_on "perl"
resource "Module::Build::Tiny" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/Module-Build-Tiny-0.039.tar.gz"
sha256 "7d580ff6ace0cbe555bf36b86dc8ea232581530cbeaaea09bccb57b55797f11c"
end
resource "CPAN::Common::Index" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/CPAN-Common-Index-0.010.tar.gz"
sha256 "c43ddbb22fd42b06118fe6357f53700fbd77f531ba3c427faafbf303cbf4eaf0"
end
resource "CPAN::DistnameInfo" do
url "https://cpan.metacpan.org/authors/id/G/GB/GBARR/CPAN-DistnameInfo-0.12.tar.gz"
sha256 "2f24fbe9f7eeacbc269d35fc61618322fc17be499ee0cd9018f370934a9f2435"
end
resource "CPAN::Meta::Check" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/CPAN-Meta-Check-0.014.tar.gz"
sha256 "28a0572bfc1c0678d9ce7da48cf521097ada230f96eb3d063fcbae1cfe6a351f"
end
resource "Capture::Tiny" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/Capture-Tiny-0.48.tar.gz"
sha256 "6c23113e87bad393308c90a207013e505f659274736638d8c79bac9c67cc3e19"
end
resource "Class::Tiny" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/Class-Tiny-1.008.tar.gz"
sha256 "ee058a63912fa1fcb9a72498f56ca421a2056dc7f9f4b67837446d6421815615"
end
resource "Command::Runner" do
url "https://cpan.metacpan.org/authors/id/S/SK/SKAJI/Command-Runner-0.103.tar.gz"
sha256 "0f180b5c3b3fc9db7b83d4a5fdd959db34f7d6d2472f817dbf8b4b795a9dc82a"
end
resource "ExtUtils::Config" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/ExtUtils-Config-0.008.tar.gz"
sha256 "ae5104f634650dce8a79b7ed13fb59d67a39c213a6776cfdaa3ee749e62f1a8c"
end
resource "ExtUtils::Helpers" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/ExtUtils-Helpers-0.026.tar.gz"
sha256 "de901b6790a4557cf4ec908149e035783b125bf115eb9640feb1bc1c24c33416"
end
resource "ExtUtils::InstallPaths" do
url "https://cpan.metacpan.org/authors/id/L/LE/LEONT/ExtUtils-InstallPaths-0.012.tar.gz"
sha256 "84735e3037bab1fdffa3c2508567ad412a785c91599db3c12593a50a1dd434ed"
end
resource "ExtUtils::MakeMaker::CPANfile" do
url "https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI/ExtUtils-MakeMaker-CPANfile-0.09.tar.gz"
sha256 "2c077607d4b0a108569074dff76e8168659062ada3a6af78b30cca0d40f8e275"
end
resource "File::Copy::Recursive" do
url "https://cpan.metacpan.org/authors/id/D/DM/DMUEY/File-Copy-Recursive-0.45.tar.gz"
sha256 "d3971cf78a8345e38042b208bb7b39cb695080386af629f4a04ffd6549df1157"
end
resource "File::Which" do
url "https://cpan.metacpan.org/authors/id/P/PL/PLICEASE/File-Which-1.23.tar.gz"
sha256 "b79dc2244b2d97b6f27167fc3b7799ef61a179040f3abd76ce1e0a3b0bc4e078"
end
resource "File::pushd" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/File-pushd-1.016.tar.gz"
sha256 "d73a7f09442983b098260df3df7a832a5f660773a313ca273fa8b56665f97cdc"
end
resource "HTTP::Tinyish" do
url "https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/HTTP-Tinyish-0.17.tar.gz"
sha256 "47bd111e474566d733c41870e2374c81689db5e0b5a43adc48adb665d89fb067"
end
resource "IPC::Run3" do
url "https://cpan.metacpan.org/authors/id/R/RJ/RJBS/IPC-Run3-0.048.tar.gz"
sha256 "3d81c3cc1b5cff69cca9361e2c6e38df0352251ae7b41e2ff3febc850e463565"
end
resource "Menlo" do
url "https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/Menlo-1.9019.tar.gz"
sha256 "3b573f68e7b3a36a87c860be258599330fac248b518854dfb5657ac483dca565"
end
resource "Menlo::Legacy" do
url "https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/Menlo-Legacy-1.9022.tar.gz"
sha256 "a6acac3fee318a804b439de54acbc7c27f0b44cfdad8551bbc9cd45986abc201"
end
resource "Module::CPANfile" do
url "https://cpan.metacpan.org/authors/id/M/MI/MIYAGAWA/Module-CPANfile-1.1004.tar.gz"
sha256 "88efbe2e9a642dceaa186430fedfcf999aaf0e06f6cced28a714b8e56b514921"
end
resource "Parallel::Pipes" do
url "https://cpan.metacpan.org/authors/id/S/SK/SKAJI/Parallel-Pipes-0.005.tar.gz"
sha256 "44bd9e2be33d7b314f81c9b886a95d53514689090635f9fad53181f2d3051fd5"
end
resource "Parse::PMFile" do
url "https://cpan.metacpan.org/authors/id/I/IS/ISHIGAKI/Parse-PMFile-0.43.tar.gz"
sha256 "be61e807204738cf0c52ed321551992fdc7fa8faa43ed43ff489d0c269900623"
end
resource "String::ShellQuote" do
url "https://cpan.metacpan.org/authors/id/R/RO/ROSCH/String-ShellQuote-1.04.tar.gz"
sha256 "e606365038ce20d646d255c805effdd32f86475f18d43ca75455b00e4d86dd35"
end
resource "Tie::Handle::Offset" do
url "https://cpan.metacpan.org/authors/id/D/DA/DAGOLDEN/Tie-Handle-Offset-0.004.tar.gz"
sha256 "ee9f39055dc695aa244a252f56ffd37f8be07209b337ad387824721206d2a89e"
end
resource "URI" do
url "https://cpan.metacpan.org/authors/id/O/OA/OALDERS/URI-5.07.tar.gz"
sha256 "eeb6ed2ae212434e2021e29f7556f4024169421a5d8b001a89e65982944131ea"
end
resource "Win32::ShellQuote" do
url "https://cpan.metacpan.org/authors/id/H/HA/HAARG/Win32-ShellQuote-0.003001.tar.gz"
sha256 "aa74b0e3dc2d41cd63f62f853e521ffd76b8d823479a2619e22edb4049b4c0dc"
end
resource "YAML::PP" do
url "https://cpan.metacpan.org/authors/id/T/TI/TINITA/YAML-PP-0.026.tar.gz"
sha256 "4b858e671cf3e966ecc54408e8031740c2f28f87c294ee9679fb02e02d5a45eb"
end
resource "local::lib" do
url "https://cpan.metacpan.org/authors/id/H/HA/HAARG/local-lib-2.000024.tar.gz"
sha256 "2e9b917bd48a0615e42633b2a327494e04610d8f710765b9493d306cead98a05"
end
def install
ENV.prepend_create_path "PERL5LIB", libexec/"lib/perl5"
build_pl = [
"Module::Build::Tiny",
"Command::Runner",
"Parallel::Pipes",
]
resources.each do |r|
r.stage do
next if build_pl.include? r.name
system "perl", "Makefile.PL", "INSTALL_BASE=#{libexec}"
system "make", "install"
end
end
build_pl.each do |name|
resource(name).stage do
system "perl", "Build.PL", "--install_base", libexec
system "./Build"
system "./Build", "install"
end
end
system "perl", "Build.PL", "--install_base", libexec
system "./Build"
system "./Build", "install"
(bin/"cpm").write_env_script("#{libexec}/bin/cpm", PERL5LIB: ENV["PERL5LIB"])
man1.install_symlink libexec/"man/man1/cpm.1"
man3.install_symlink Dir[libexec/"man/man3/App::cpm*"].reject { |f| File.empty?(f) }
end
test do
system bin/"cpm", "install", "Perl::Tutorial"
expected = <<~EOS
NAME
Perl::Tutorial::HelloWorld - Hello World for Perl
SYNOPSIS
#!/usr/bin/perl
#
# The traditional first program.
# Strict and warnings are recommended.
use strict;
use warnings;
# Print a message.
print "Hello, World!\\n";
EOS
assert_match expected,
shell_output("PERL5LIB=local/lib/perl5 perldoc Perl::Tutorial::HelloWorld")
end
end
| spaam/homebrew-core | Formula/cpm.rb | Ruby | bsd-2-clause | 7,874 |
package tcp
import (
"expvar"
"fmt"
"time"
"github.com/elastic/beats/libbeat/common"
"github.com/elastic/beats/libbeat/logp"
"github.com/elastic/beats/packetbeat/flows"
"github.com/elastic/beats/packetbeat/protos"
"github.com/tsg/gopacket/layers"
)
const TCP_MAX_DATA_IN_STREAM = 10 * (1 << 20)
const (
TcpDirectionReverse = 0
TcpDirectionOriginal = 1
)
type Tcp struct {
id uint32
streams *common.Cache
portMap map[uint16]protos.Protocol
protocols protos.Protocols
}
type Processor interface {
Process(flow *flows.FlowID, hdr *layers.TCP, pkt *protos.Packet)
}
var (
droppedBecauseOfGaps = expvar.NewInt("tcp.dropped_because_of_gaps")
)
type seqCompare int
const (
seqLT seqCompare = -1
seqEq seqCompare = 0
seqGT seqCompare = 1
)
var (
debugf = logp.MakeDebug("tcp")
isDebug = false
)
func (tcp *Tcp) getId() uint32 {
tcp.id += 1
return tcp.id
}
func (tcp *Tcp) decideProtocol(tuple *common.IpPortTuple) protos.Protocol {
protocol, exists := tcp.portMap[tuple.Src_port]
if exists {
return protocol
}
protocol, exists = tcp.portMap[tuple.Dst_port]
if exists {
return protocol
}
return protos.UnknownProtocol
}
func (tcp *Tcp) findStream(k common.HashableIpPortTuple) *TcpConnection {
v := tcp.streams.Get(k)
if v != nil {
return v.(*TcpConnection)
}
return nil
}
type TcpConnection struct {
id uint32
tuple *common.IpPortTuple
protocol protos.Protocol
tcptuple common.TcpTuple
tcp *Tcp
lastSeq [2]uint32
// protocols private data
data protos.ProtocolData
}
type TcpStream struct {
conn *TcpConnection
dir uint8
}
func (conn *TcpConnection) String() string {
return fmt.Sprintf("TcpStream id[%d] tuple[%s] protocol[%s] lastSeq[%d %d]",
conn.id, conn.tuple, conn.protocol, conn.lastSeq[0], conn.lastSeq[1])
}
func (stream *TcpStream) addPacket(pkt *protos.Packet, tcphdr *layers.TCP) {
conn := stream.conn
mod := conn.tcp.protocols.GetTcp(conn.protocol)
if mod == nil {
if isDebug {
protocol := conn.protocol
debugf("Ignoring protocol for which we have no module loaded: %s",
protocol)
}
return
}
if len(pkt.Payload) > 0 {
conn.data = mod.Parse(pkt, &conn.tcptuple, stream.dir, conn.data)
}
if tcphdr.FIN {
conn.data = mod.ReceivedFin(&conn.tcptuple, stream.dir, conn.data)
}
}
func (stream *TcpStream) gapInStream(nbytes int) (drop bool) {
conn := stream.conn
mod := conn.tcp.protocols.GetTcp(conn.protocol)
conn.data, drop = mod.GapInStream(&conn.tcptuple, stream.dir, nbytes, conn.data)
return drop
}
func (tcp *Tcp) Process(id *flows.FlowID, tcphdr *layers.TCP, pkt *protos.Packet) {
// This Recover should catch all exceptions in
// protocol modules.
defer logp.Recover("Process tcp exception")
stream, created := tcp.getStream(pkt)
if stream.conn == nil {
return
}
conn := stream.conn
if id != nil {
id.AddConnectionID(uint64(conn.id))
}
if isDebug {
debugf("tcp flow id: %p", id)
}
if len(pkt.Payload) == 0 && !tcphdr.FIN {
// return early if packet is not interesting. Still need to find/create
// stream first in order to update the TCP stream timer
return
}
tcpStartSeq := tcphdr.Seq
tcpSeq := tcpStartSeq + uint32(len(pkt.Payload))
lastSeq := conn.lastSeq[stream.dir]
if isDebug {
debugf("pkt.start_seq=%v pkt.last_seq=%v stream.last_seq=%v (len=%d)",
tcpStartSeq, tcpSeq, lastSeq, len(pkt.Payload))
}
if len(pkt.Payload) > 0 && lastSeq != 0 {
if tcpSeqBeforeEq(tcpSeq, lastSeq) {
if isDebug {
debugf("Ignoring retransmitted segment. pkt.seq=%v len=%v stream.seq=%v",
tcphdr.Seq, len(pkt.Payload), lastSeq)
}
return
}
switch tcpSeqCompare(lastSeq, tcpStartSeq) {
case seqLT: // lastSeq < tcpStartSeq => Gap in tcp stream detected
if created {
break
}
gap := int(tcpStartSeq - lastSeq)
debugf("Gap in tcp stream. last_seq: %d, seq: %d, gap: %d", lastSeq, tcpStartSeq, gap)
drop := stream.gapInStream(gap)
if drop {
if isDebug {
debugf("Dropping connection state because of gap")
}
droppedBecauseOfGaps.Add(1)
// drop application layer connection state and
// update stream_id for app layer analysers using stream_id for lookups
conn.id = tcp.getId()
conn.data = nil
}
case seqGT:
// lastSeq > tcpStartSeq => overlapping TCP segment detected. shrink packet
delta := lastSeq - tcpStartSeq
if isDebug {
debugf("Overlapping tcp segment. last_seq %d, seq: %d, delta: %d",
lastSeq, tcpStartSeq, delta)
}
pkt.Payload = pkt.Payload[delta:]
tcphdr.Seq += delta
}
}
conn.lastSeq[stream.dir] = tcpSeq
stream.addPacket(pkt, tcphdr)
}
func (tcp *Tcp) getStream(pkt *protos.Packet) (stream TcpStream, created bool) {
if conn := tcp.findStream(pkt.Tuple.Hashable()); conn != nil {
return TcpStream{conn: conn, dir: TcpDirectionOriginal}, false
}
if conn := tcp.findStream(pkt.Tuple.RevHashable()); conn != nil {
return TcpStream{conn: conn, dir: TcpDirectionReverse}, false
}
protocol := tcp.decideProtocol(&pkt.Tuple)
if protocol == protos.UnknownProtocol {
// don't follow
return TcpStream{}, false
}
var timeout time.Duration
mod := tcp.protocols.GetTcp(protocol)
if mod != nil {
timeout = mod.ConnectionTimeout()
}
if isDebug {
t := pkt.Tuple
debugf("Connection src[%s:%d] dst[%s:%d] doesn't exist, creating new",
t.Src_ip.String(), t.Src_port,
t.Dst_ip.String(), t.Dst_port)
}
conn := &TcpConnection{
id: tcp.getId(),
tuple: &pkt.Tuple,
protocol: protocol,
tcp: tcp}
conn.tcptuple = common.TcpTupleFromIpPort(conn.tuple, conn.id)
tcp.streams.PutWithTimeout(pkt.Tuple.Hashable(), conn, timeout)
return TcpStream{conn: conn, dir: TcpDirectionOriginal}, true
}
func tcpSeqCompare(seq1, seq2 uint32) seqCompare {
i := int32(seq1 - seq2)
switch {
case i == 0:
return seqEq
case i < 0:
return seqLT
default:
return seqGT
}
}
func tcpSeqBefore(seq1 uint32, seq2 uint32) bool {
return int32(seq1-seq2) < 0
}
func tcpSeqBeforeEq(seq1 uint32, seq2 uint32) bool {
return int32(seq1-seq2) <= 0
}
func buildPortsMap(plugins map[protos.Protocol]protos.TcpPlugin) (map[uint16]protos.Protocol, error) {
var res = map[uint16]protos.Protocol{}
for proto, protoPlugin := range plugins {
for _, port := range protoPlugin.GetPorts() {
old_proto, exists := res[uint16(port)]
if exists {
if old_proto == proto {
continue
}
return nil, fmt.Errorf("Duplicate port (%d) exists in %s and %s protocols",
port, old_proto, proto)
}
res[uint16(port)] = proto
}
}
return res, nil
}
// Creates and returns a new Tcp.
func NewTcp(p protos.Protocols) (*Tcp, error) {
isDebug = logp.IsDebug("tcp")
portMap, err := buildPortsMap(p.GetAllTcp())
if err != nil {
return nil, err
}
tcp := &Tcp{
protocols: p,
portMap: portMap,
streams: common.NewCache(
protos.DefaultTransactionExpiration,
protos.DefaultTransactionHashSize),
}
tcp.streams.StartJanitor(protos.DefaultTransactionExpiration)
if isDebug {
debugf("tcp", "Port map: %v", portMap)
}
return tcp, nil
}
| phenomenes/varnishbeat | vendor/github.com/elastic/beats/packetbeat/protos/tcp/tcp.go | GO | bsd-2-clause | 7,086 |
import batoid
import numpy as np
from test_helpers import timer, do_pickle, all_obj_diff, init_gpu, rays_allclose
@timer
def test_properties():
rng = np.random.default_rng(5)
for i in range(100):
R = rng.normal(0.0, 0.3) # negative allowed
sphere = batoid.Sphere(R)
assert sphere.R == R
do_pickle(sphere)
@timer
def test_sag():
rng = np.random.default_rng(57)
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
for j in range(10):
x = rng.uniform(-0.7*abs(R), 0.7*abs(R))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R))
result = sphere.sag(x, y)
np.testing.assert_allclose(
result,
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))
)
# Check that it returned a scalar float and not an array
assert isinstance(result, float)
# Check 0,0
np.testing.assert_allclose(sphere.sag(0, 0), 0.0, rtol=0, atol=1e-17)
# Check vectorization
x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
np.testing.assert_allclose(
sphere.sag(x, y),
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))
)
# Make sure non-unit stride arrays also work
np.testing.assert_allclose(
sphere.sag(x[::5,::2], y[::5,::2]),
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))[::5,::2]
)
do_pickle(sphere)
@timer
def test_normal():
rng = np.random.default_rng(577)
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
for j in range(10):
x = rng.uniform(-0.7*abs(R), 0.7*abs(R))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R))
result = sphere.normal(x, y)
r = np.hypot(x, y)
rat = r/R
dzdr = rat/np.sqrt(1-rat*rat)
nz = 1/np.sqrt(1+dzdr*dzdr)
normal = np.array([-x/r*dzdr*nz, -y/r*dzdr*nz, nz])
np.testing.assert_allclose(result, normal)
# Check 0,0
np.testing.assert_equal(sphere.normal(0, 0), np.array([0, 0, 1]))
# Check vectorization
x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
r = np.hypot(x, y)
rat = r/R
dzdr = rat/np.sqrt(1-rat*rat)
nz = 1/np.sqrt(1+dzdr*dzdr)
normal = np.dstack([-x/r*dzdr*nz, -y/r*dzdr*nz, nz])
np.testing.assert_allclose(
sphere.normal(x, y),
normal
)
# Make sure non-unit stride arrays also work
np.testing.assert_allclose(
sphere.normal(x[::5,::2], y[::5,::2]),
normal[::5, ::2]
)
@timer
def test_intersect():
rng = np.random.default_rng(5772)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphereCoordSys = batoid.CoordSys(origin=[0, 0, -1])
sphere = batoid.Sphere(R)
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
# If we shoot rays straight up, then it's easy to predict the intersection
vx = np.zeros_like(x)
vy = np.zeros_like(x)
vz = np.ones_like(x)
rv = batoid.RayVector(x, y, z, vx, vy, vz)
np.testing.assert_allclose(rv.z, -2*abs(R))
rv2 = batoid.intersect(sphere, rv.copy(), sphereCoordSys)
assert rv2.coordSys == sphereCoordSys
rv2 = rv2.toCoordSys(batoid.CoordSys())
np.testing.assert_allclose(rv2.x, x)
np.testing.assert_allclose(rv2.y, y)
np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
# Check default intersect coordTransform
rv2 = rv.copy().toCoordSys(sphereCoordSys)
batoid.intersect(sphere, rv2)
assert rv2.coordSys == sphereCoordSys
rv2 = rv2.toCoordSys(batoid.CoordSys())
np.testing.assert_allclose(rv2.x, x)
np.testing.assert_allclose(rv2.y, y)
np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
@timer
def test_reflect():
rng = np.random.default_rng(57721)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
vx = rng.uniform(-1e-5, 1e-5, size=size)
vy = rng.uniform(-1e-5, 1e-5, size=size)
vz = np.full_like(x, 1)
rv = batoid.RayVector(x, y, z, vx, vy, vz)
rvr = batoid.reflect(sphere, rv.copy())
rvr2 = sphere.reflect(rv.copy())
rays_allclose(rvr, rvr2)
# print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed")
normal = sphere.normal(rvr.x, rvr.y)
# Test law of reflection
a0 = np.einsum("ad,ad->a", normal, rv.v)[~rvr.failed]
a1 = np.einsum("ad,ad->a", normal, -rvr.v)[~rvr.failed]
np.testing.assert_allclose(
a0, a1,
rtol=0, atol=1e-12
)
# Test that rv.v, rvr.v and normal are all in the same plane
np.testing.assert_allclose(
np.einsum(
"ad,ad->a",
np.cross(normal, rv.v),
rv.v
)[~rvr.failed],
0.0,
rtol=0, atol=1e-12
)
@timer
def test_refract():
rng = np.random.default_rng(577215)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
m0 = batoid.ConstMedium(rng.normal(1.2, 0.01))
m1 = batoid.ConstMedium(rng.normal(1.3, 0.01))
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
vx = rng.uniform(-1e-5, 1e-5, size=size)
vy = rng.uniform(-1e-5, 1e-5, size=size)
vz = np.sqrt(1-vx*vx-vy*vy)/m0.n
rv = batoid.RayVector(x, y, z, vx, vy, vz)
rvr = batoid.refract(sphere, rv.copy(), m0, m1)
rvr2 = sphere.refract(rv.copy(), m0, m1)
rays_allclose(rvr, rvr2)
# print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed")
normal = sphere.normal(rvr.x, rvr.y)
# Test Snell's law
s0 = np.sum(np.cross(normal, rv.v*m0.n)[~rvr.failed], axis=-1)
s1 = np.sum(np.cross(normal, rvr.v*m1.n)[~rvr.failed], axis=-1)
np.testing.assert_allclose(
m0.n*s0, m1.n*s1,
rtol=0, atol=1e-9
)
# Test that rv.v, rvr.v and normal are all in the same plane
np.testing.assert_allclose(
np.einsum(
"ad,ad->a",
np.cross(normal, rv.v),
rv.v
)[~rvr.failed],
0.0,
rtol=0, atol=1e-12
)
@timer
def test_ne():
objs = [
batoid.Sphere(1.0),
batoid.Sphere(2.0),
batoid.Plane()
]
all_obj_diff(objs)
@timer
def test_fail():
sphere = batoid.Sphere(1.0)
rv = batoid.RayVector(0, 10, 0, 0, 0, -1) # Too far to side
rv2 = batoid.intersect(sphere, rv.copy())
np.testing.assert_equal(rv2.failed, np.array([True]))
# This one passes
rv = batoid.RayVector(0, 0, 0, 0, 0, -1)
rv2 = batoid.intersect(sphere, rv.copy())
np.testing.assert_equal(rv2.failed, np.array([False]))
if __name__ == '__main__':
test_properties()
test_sag()
test_normal()
test_intersect()
test_reflect()
test_refract()
test_ne()
test_fail()
| jmeyers314/batoid | tests/test_Sphere.py | Python | bsd-2-clause | 7,740 |
cask 'mpv' do
version '0.23.0'
sha256 '9c4f5873fc955920c3d570277a2a74f527a9073c27ee1a5eeb3270a1180961e8'
# laboratory.stolendata.net/~djinn/mpv_osx was verified as official when first introduced to the cask
url "https://laboratory.stolendata.net/~djinn/mpv_osx/mpv-#{version}.tar.gz"
appcast 'https://laboratory.stolendata.net/~djinn/mpv_osx/',
checkpoint: '00223d39362fa2d8764dd0e98997c6b412b21e5ec12d6bec2f90ffe7df7a4608'
name 'mpv'
homepage 'https://mpv.io'
app 'mpv.app'
end
| jiashuw/homebrew-cask | Casks/mpv.rb | Ruby | bsd-2-clause | 507 |
#!/usr/bin/env python3
import sys
import re
import mpmath as mp
mp.dps=250
mp.mp.dps = 250
if len(sys.argv) != 2:
print("Usage: format_CIAAW.py ciaawfile")
quit(1)
path = sys.argv[1]
atomre = re.compile(r'^(\d+) +(\w\w*) +(\w+) +\[?(\d+)\]?\*? +(.*) *$')
isore = re.compile(r'^(\d+)\*? +(\[?\d.*.*\]?) *$')
brange = re.compile(r'^\[([\d\.]+),([\d\.]+)\].*$')
buncertain = re.compile(r'^([\d\.]+)\((\d+)\)[a-z]*$')
bnum = re.compile(r'^([\d\d]+)$')
atommassline = re.compile(r'^(\d+) +(\w\w*) +(\w+) +(.*) *$')
def NumberStr(n):
# Replace spaces
s = n.replace(' ', '')
# remove "exactly" for the carbon mass
s = s.replace('(exactly)', '')
# if only a number, put it three times
m = bnum.match(s)
if m:
s = "{:<25} {:<25} {:<25}".format(m.group(1), m.group(1), m.group(1))
# if parentheses uncertainty...
m = buncertain.match(s)
if m:
# tricky. duplicate the first part as a string
s2 = m.group(1)
# but replace with all zero
s2 = re.sub(r'\d', '0', s2)
# now replace last characters
l = len(m.group(2))
s2 = s2[:len(s2)-l] + m.group(2)
# convert to a float
serr = mp.mpf(s2)
scenter = mp.mpf(m.group(1))
s = "{:<25} {:<25} {:<25}".format(mp.nstr(scenter, 18), mp.nstr(scenter-serr, 18), mp.nstr(scenter+serr, 18))
# Replace bracketed ranges with parentheses
m = brange.match(s)
if m:
slow = mp.mpf(m.group(1))
shigh = mp.mpf(m.group(2))
smid = (shigh + slow)/mp.mpf("2.0")
s = "{:<25} {:<25} {:<25}".format(mp.nstr(smid, 18), mp.nstr(slow, 18), mp.nstr(shigh, 18))
# just a dash?
if s == "-":
s = "{:<25} {:<25} {:<25}".format(0, 0, 0)
return s
# First 5 lines are comments
filelines = [ x.strip() for x in open(path).readlines() ]
curatom = None
for line in filelines:
matomre = atomre.match(line)
misore = isore.match(line)
matommass = atommassline.match(line)
if matomre:
curatom = "{:<5} {:<5}".format(matomre.group(1), matomre.group(2))
print("{} {:<6} {:<25}".format(curatom, matomre.group(4), NumberStr(matomre.group(5))))
elif misore:
print("{} {:<6} {:<25}".format(curatom, misore.group(1), NumberStr(misore.group(2))))
elif matommass:
curatom = "{:<5} {:<5}".format(matommass.group(1), matommass.group(2))
print("{} {:<25}".format(curatom, NumberStr(matommass.group(4))))
else:
print(line) # comment lines, etc
| pulsar-chem/Pulsar-Core | scripts/data/format_CIAAW.py | Python | bsd-3-clause | 2,449 |
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
namespace webrtc {
namespace {
enum {
kSamplesPer8kHzChannel = 80,
kSamplesPer16kHzChannel = 160,
kSamplesPer32kHzChannel = 320
};
bool HasKeyboardChannel(AudioProcessing::ChannelLayout layout) {
switch (layout) {
case AudioProcessing::kMono:
case AudioProcessing::kStereo:
return false;
case AudioProcessing::kMonoAndKeyboard:
case AudioProcessing::kStereoAndKeyboard:
return true;
}
assert(false);
return false;
}
int KeyboardChannelIndex(AudioProcessing::ChannelLayout layout) {
switch (layout) {
case AudioProcessing::kMono:
case AudioProcessing::kStereo:
assert(false);
return -1;
case AudioProcessing::kMonoAndKeyboard:
return 1;
case AudioProcessing::kStereoAndKeyboard:
return 2;
}
assert(false);
return -1;
}
void StereoToMono(const float* left, const float* right, float* out,
int samples_per_channel) {
for (int i = 0; i < samples_per_channel; ++i) {
out[i] = (left[i] + right[i]) / 2;
}
}
void StereoToMono(const int16_t* left, const int16_t* right, int16_t* out,
int samples_per_channel) {
for (int i = 0; i < samples_per_channel; ++i) {
out[i] = (left[i] + right[i]) >> 1;
}
}
} // namespace
// One int16_t and one float ChannelBuffer that are kept in sync. The sync is
// broken when someone requests write access to either ChannelBuffer, and
// reestablished when someone requests the outdated ChannelBuffer. It is
// therefore safe to use the return value of ibuf_const() and fbuf_const()
// until the next call to ibuf() or fbuf(), and the return value of ibuf() and
// fbuf() until the next call to any of the other functions.
class IFChannelBuffer {
public:
IFChannelBuffer(int samples_per_channel, int num_channels)
: ivalid_(true),
ibuf_(samples_per_channel, num_channels),
fvalid_(true),
fbuf_(samples_per_channel, num_channels) {}
ChannelBuffer<int16_t>* ibuf() { return ibuf(false); }
ChannelBuffer<float>* fbuf() { return fbuf(false); }
const ChannelBuffer<int16_t>* ibuf_const() { return ibuf(true); }
const ChannelBuffer<float>* fbuf_const() { return fbuf(true); }
private:
ChannelBuffer<int16_t>* ibuf(bool readonly) {
RefreshI();
fvalid_ = readonly;
return &ibuf_;
}
ChannelBuffer<float>* fbuf(bool readonly) {
RefreshF();
ivalid_ = readonly;
return &fbuf_;
}
void RefreshF() {
if (!fvalid_) {
assert(ivalid_);
const int16_t* const int_data = ibuf_.data();
float* const float_data = fbuf_.data();
const int length = fbuf_.length();
for (int i = 0; i < length; ++i)
float_data[i] = int_data[i];
fvalid_ = true;
}
}
void RefreshI() {
if (!ivalid_) {
assert(fvalid_);
const float* const float_data = fbuf_.data();
int16_t* const int_data = ibuf_.data();
const int length = ibuf_.length();
for (int i = 0; i < length; ++i)
int_data[i] = WEBRTC_SPL_SAT(std::numeric_limits<int16_t>::max(),
float_data[i],
std::numeric_limits<int16_t>::min());
ivalid_ = true;
}
}
bool ivalid_;
ChannelBuffer<int16_t> ibuf_;
bool fvalid_;
ChannelBuffer<float> fbuf_;
};
AudioBuffer::AudioBuffer(int input_samples_per_channel,
int num_input_channels,
int process_samples_per_channel,
int num_process_channels,
int output_samples_per_channel)
: input_samples_per_channel_(input_samples_per_channel),
num_input_channels_(num_input_channels),
proc_samples_per_channel_(process_samples_per_channel),
num_proc_channels_(num_process_channels),
output_samples_per_channel_(output_samples_per_channel),
samples_per_split_channel_(proc_samples_per_channel_),
mixed_low_pass_valid_(false),
reference_copied_(false),
activity_(AudioFrame::kVadUnknown),
keyboard_data_(NULL),
channels_(new IFChannelBuffer(proc_samples_per_channel_,
num_proc_channels_)) {
assert(input_samples_per_channel_ > 0);
assert(proc_samples_per_channel_ > 0);
assert(output_samples_per_channel_ > 0);
assert(num_input_channels_ > 0 && num_input_channels_ <= 2);
assert(num_proc_channels_ <= num_input_channels);
if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
input_buffer_.reset(new ChannelBuffer<float>(input_samples_per_channel_,
num_proc_channels_));
}
if (input_samples_per_channel_ != proc_samples_per_channel_ ||
output_samples_per_channel_ != proc_samples_per_channel_) {
// Create an intermediate buffer for resampling.
process_buffer_.reset(new ChannelBuffer<float>(proc_samples_per_channel_,
num_proc_channels_));
}
if (input_samples_per_channel_ != proc_samples_per_channel_) {
input_resamplers_.reserve(num_proc_channels_);
for (int i = 0; i < num_proc_channels_; ++i) {
input_resamplers_.push_back(
new PushSincResampler(input_samples_per_channel_,
proc_samples_per_channel_));
}
}
if (output_samples_per_channel_ != proc_samples_per_channel_) {
output_resamplers_.reserve(num_proc_channels_);
for (int i = 0; i < num_proc_channels_; ++i) {
output_resamplers_.push_back(
new PushSincResampler(proc_samples_per_channel_,
output_samples_per_channel_));
}
}
if (proc_samples_per_channel_ == kSamplesPer32kHzChannel) {
samples_per_split_channel_ = kSamplesPer16kHzChannel;
split_channels_low_.reset(new IFChannelBuffer(samples_per_split_channel_,
num_proc_channels_));
split_channels_high_.reset(new IFChannelBuffer(samples_per_split_channel_,
num_proc_channels_));
filter_states_.reset(new SplitFilterStates[num_proc_channels_]);
}
}
AudioBuffer::~AudioBuffer() {}
void AudioBuffer::CopyFrom(const float* const* data,
int samples_per_channel,
AudioProcessing::ChannelLayout layout) {
assert(samples_per_channel == input_samples_per_channel_);
assert(ChannelsFromLayout(layout) == num_input_channels_);
InitForNewData();
if (HasKeyboardChannel(layout)) {
keyboard_data_ = data[KeyboardChannelIndex(layout)];
}
// Downmix.
const float* const* data_ptr = data;
if (num_input_channels_ == 2 && num_proc_channels_ == 1) {
StereoToMono(data[0],
data[1],
input_buffer_->channel(0),
input_samples_per_channel_);
data_ptr = input_buffer_->channels();
}
// Resample.
if (input_samples_per_channel_ != proc_samples_per_channel_) {
for (int i = 0; i < num_proc_channels_; ++i) {
input_resamplers_[i]->Resample(data_ptr[i],
input_samples_per_channel_,
process_buffer_->channel(i),
proc_samples_per_channel_);
}
data_ptr = process_buffer_->channels();
}
// Convert to int16.
for (int i = 0; i < num_proc_channels_; ++i) {
ScaleAndRoundToInt16(data_ptr[i], proc_samples_per_channel_,
channels_->ibuf()->channel(i));
}
}
void AudioBuffer::CopyTo(int samples_per_channel,
AudioProcessing::ChannelLayout layout,
float* const* data) {
assert(samples_per_channel == output_samples_per_channel_);
assert(ChannelsFromLayout(layout) == num_proc_channels_);
// Convert to float.
float* const* data_ptr = data;
if (output_samples_per_channel_ != proc_samples_per_channel_) {
// Convert to an intermediate buffer for subsequent resampling.
data_ptr = process_buffer_->channels();
}
for (int i = 0; i < num_proc_channels_; ++i) {
ScaleToFloat(channels_->ibuf()->channel(i),
proc_samples_per_channel_,
data_ptr[i]);
}
// Resample.
if (output_samples_per_channel_ != proc_samples_per_channel_) {
for (int i = 0; i < num_proc_channels_; ++i) {
output_resamplers_[i]->Resample(data_ptr[i],
proc_samples_per_channel_,
data[i],
output_samples_per_channel_);
}
}
}
void AudioBuffer::InitForNewData() {
keyboard_data_ = NULL;
mixed_low_pass_valid_ = false;
reference_copied_ = false;
activity_ = AudioFrame::kVadUnknown;
}
const int16_t* AudioBuffer::data(int channel) const {
return channels_->ibuf_const()->channel(channel);
}
int16_t* AudioBuffer::data(int channel) {
mixed_low_pass_valid_ = false;
return channels_->ibuf()->channel(channel);
}
const float* AudioBuffer::data_f(int channel) const {
return channels_->fbuf_const()->channel(channel);
}
float* AudioBuffer::data_f(int channel) {
mixed_low_pass_valid_ = false;
return channels_->fbuf()->channel(channel);
}
const int16_t* AudioBuffer::low_pass_split_data(int channel) const {
return split_channels_low_.get()
? split_channels_low_->ibuf_const()->channel(channel)
: data(channel);
}
int16_t* AudioBuffer::low_pass_split_data(int channel) {
mixed_low_pass_valid_ = false;
return split_channels_low_.get()
? split_channels_low_->ibuf()->channel(channel)
: data(channel);
}
const float* AudioBuffer::low_pass_split_data_f(int channel) const {
return split_channels_low_.get()
? split_channels_low_->fbuf_const()->channel(channel)
: data_f(channel);
}
float* AudioBuffer::low_pass_split_data_f(int channel) {
mixed_low_pass_valid_ = false;
return split_channels_low_.get()
? split_channels_low_->fbuf()->channel(channel)
: data_f(channel);
}
const int16_t* AudioBuffer::high_pass_split_data(int channel) const {
return split_channels_high_.get()
? split_channels_high_->ibuf_const()->channel(channel)
: NULL;
}
int16_t* AudioBuffer::high_pass_split_data(int channel) {
return split_channels_high_.get()
? split_channels_high_->ibuf()->channel(channel)
: NULL;
}
const float* AudioBuffer::high_pass_split_data_f(int channel) const {
return split_channels_high_.get()
? split_channels_high_->fbuf_const()->channel(channel)
: NULL;
}
float* AudioBuffer::high_pass_split_data_f(int channel) {
return split_channels_high_.get()
? split_channels_high_->fbuf()->channel(channel)
: NULL;
}
const int16_t* AudioBuffer::mixed_low_pass_data() {
// Currently only mixing stereo to mono is supported.
assert(num_proc_channels_ == 1 || num_proc_channels_ == 2);
if (num_proc_channels_ == 1) {
return low_pass_split_data(0);
}
if (!mixed_low_pass_valid_) {
if (!mixed_low_pass_channels_.get()) {
mixed_low_pass_channels_.reset(
new ChannelBuffer<int16_t>(samples_per_split_channel_, 1));
}
StereoToMono(low_pass_split_data(0),
low_pass_split_data(1),
mixed_low_pass_channels_->data(),
samples_per_split_channel_);
mixed_low_pass_valid_ = true;
}
return mixed_low_pass_channels_->data();
}
const int16_t* AudioBuffer::low_pass_reference(int channel) const {
if (!reference_copied_) {
return NULL;
}
return low_pass_reference_channels_->channel(channel);
}
const float* AudioBuffer::keyboard_data() const {
return keyboard_data_;
}
SplitFilterStates* AudioBuffer::filter_states(int channel) {
assert(channel >= 0 && channel < num_proc_channels_);
return &filter_states_[channel];
}
void AudioBuffer::set_activity(AudioFrame::VADActivity activity) {
activity_ = activity;
}
AudioFrame::VADActivity AudioBuffer::activity() const {
return activity_;
}
int AudioBuffer::num_channels() const {
return num_proc_channels_;
}
int AudioBuffer::samples_per_channel() const {
return proc_samples_per_channel_;
}
int AudioBuffer::samples_per_split_channel() const {
return samples_per_split_channel_;
}
int AudioBuffer::samples_per_keyboard_channel() const {
// We don't resample the keyboard channel.
return input_samples_per_channel_;
}
// TODO(andrew): Do deinterleaving and mixing in one step?
void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
assert(proc_samples_per_channel_ == input_samples_per_channel_);
assert(num_proc_channels_ == num_input_channels_);
assert(frame->num_channels_ == num_proc_channels_);
assert(frame->samples_per_channel_ == proc_samples_per_channel_);
InitForNewData();
activity_ = frame->vad_activity_;
int16_t* interleaved = frame->data_;
for (int i = 0; i < num_proc_channels_; i++) {
int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
for (int j = 0; j < proc_samples_per_channel_; j++) {
deinterleaved[j] = interleaved[interleaved_idx];
interleaved_idx += num_proc_channels_;
}
}
}
void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
assert(proc_samples_per_channel_ == output_samples_per_channel_);
assert(num_proc_channels_ == num_input_channels_);
assert(frame->num_channels_ == num_proc_channels_);
assert(frame->samples_per_channel_ == proc_samples_per_channel_);
frame->vad_activity_ = activity_;
if (!data_changed) {
return;
}
int16_t* interleaved = frame->data_;
for (int i = 0; i < num_proc_channels_; i++) {
int16_t* deinterleaved = channels_->ibuf()->channel(i);
int interleaved_idx = i;
for (int j = 0; j < proc_samples_per_channel_; j++) {
interleaved[interleaved_idx] = deinterleaved[j];
interleaved_idx += num_proc_channels_;
}
}
}
void AudioBuffer::CopyLowPassToReference() {
reference_copied_ = true;
if (!low_pass_reference_channels_.get()) {
low_pass_reference_channels_.reset(
new ChannelBuffer<int16_t>(samples_per_split_channel_,
num_proc_channels_));
}
for (int i = 0; i < num_proc_channels_; i++) {
low_pass_reference_channels_->CopyFrom(low_pass_split_data(i), i);
}
}
} // namespace webrtc
| xin3liang/platform_external_chromium_org_third_party_webrtc | modules/audio_processing/audio_buffer.cc | C++ | bsd-3-clause | 14,986 |
<?php
namespace common\models;
use common\modules\i18n\Module;
/**
* This is the model class for table "magazine_item".
*
* @property integer $id
* @property integer $magazine_id
* @property string $image
* @property integer $sort
*
* @property Magazine $magazine
*/
class MagazineItem extends Bean
{
/**
* Variable for file storing while data saving
* @var mixed
*/
public $file;
/**
* @inheritdoc
*/
public static function tableName()
{
return 'magazine_item';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['magazine_id', 'image'], 'required'],
[['magazine_id', 'sort'], 'integer'],
[['image'], 'string', 'max' => 255],
[['magazine_id'], 'exist', 'skipOnError' => true, 'targetClass' => Magazine::className(), 'targetAttribute' => ['magazine_id' => 'id']],
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => Module::t('Id'),
'magazine_id' => Module::t('Magazine'),
'image' => Module::t('Image'),
];
}
/**
* @return \yii\db\ActiveQuery
*/
public function getMagazine()
{
return $this->hasOne(Magazine::className(), ['id' => 'magazine_id']);
}
}
| artemkramov/jenadin-test | common/models/MagazineItem.php | PHP | bsd-3-clause | 1,379 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/filters/reference_audio_renderer.h"
#include <math.h>
#include "base/bind.h"
#include "base/synchronization/waitable_event.h"
namespace media {
ReferenceAudioRenderer::ReferenceAudioRenderer(AudioManager* audio_manager)
: AudioRendererBase(),
audio_manager_(audio_manager),
bytes_per_second_(0),
has_buffered_data_(true),
buffer_capacity_(0) {
}
ReferenceAudioRenderer::~ReferenceAudioRenderer() {
// Close down the audio device.
if (controller_) {
base::WaitableEvent closed_event(true, false);
controller_->Close(base::Bind(&base::WaitableEvent::Signal,
base::Unretained(&closed_event)));
closed_event.Wait();
}
}
void ReferenceAudioRenderer::SetPlaybackRate(float rate) {
// TODO(fbarchard): limit rate to reasonable values
AudioRendererBase::SetPlaybackRate(rate);
if (controller_ && rate > 0.0f)
controller_->Play();
}
void ReferenceAudioRenderer::SetVolume(float volume) {
if (controller_)
controller_->SetVolume(volume);
}
void ReferenceAudioRenderer::OnCreated(AudioOutputController* controller) {
NOTIMPLEMENTED();
}
void ReferenceAudioRenderer::OnPlaying(AudioOutputController* controller) {
NOTIMPLEMENTED();
}
void ReferenceAudioRenderer::OnPaused(AudioOutputController* controller) {
NOTIMPLEMENTED();
}
void ReferenceAudioRenderer::OnError(AudioOutputController* controller,
int error_code) {
NOTIMPLEMENTED();
}
void ReferenceAudioRenderer::OnMoreData(AudioOutputController* controller,
AudioBuffersState buffers_state) {
// TODO(fbarchard): Waveout_output_win.h should handle zero length buffers
// without clicking.
uint32 pending_bytes = static_cast<uint32>(ceil(buffers_state.total_bytes() *
GetPlaybackRate()));
base::TimeDelta delay = base::TimeDelta::FromMicroseconds(
base::Time::kMicrosecondsPerSecond * pending_bytes /
bytes_per_second_);
has_buffered_data_ = buffers_state.pending_bytes != 0;
uint32 read = FillBuffer(buffer_.get(), buffer_capacity_, delay);
controller->EnqueueData(buffer_.get(), read);
}
void ReferenceAudioRenderer::OnRenderEndOfStream() {
// We cannot signal end of stream as long as we have buffered data.
// In such case eventually host would playback all the data, and OnMoreData()
// would be called with buffers_state.pending_bytes == 0. At that moment
// we'll call SignalEndOfStream();
if (!has_buffered_data_)
SignalEndOfStream();
}
bool ReferenceAudioRenderer::OnInitialize(int bits_per_channel,
ChannelLayout channel_layout,
int sample_rate) {
int samples_per_packet = sample_rate / 10;
int hardware_buffer_size = samples_per_packet *
ChannelLayoutToChannelCount(channel_layout) * bits_per_channel / 8;
// Allocate audio buffer based on hardware buffer size.
buffer_capacity_ = 3 * hardware_buffer_size;
buffer_.reset(new uint8[buffer_capacity_]);
AudioParameters params(AudioParameters::AUDIO_PCM_LINEAR, channel_layout,
sample_rate, bits_per_channel, samples_per_packet);
bytes_per_second_ = params.GetBytesPerSecond();
controller_ = AudioOutputController::Create(audio_manager_, this, params,
buffer_capacity_);
return controller_ != NULL;
}
void ReferenceAudioRenderer::OnStop() {
if (controller_)
controller_->Pause();
}
} // namespace media
| aYukiSekiguchi/ACCESS-Chromium | media/filters/reference_audio_renderer.cc | C++ | bsd-3-clause | 3,792 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="tr">
<context>
<name>BMMDialog</name>
<message>
<location filename="../BMMDialog.ui" line="14"/>
<source>Manage Bookmarks</source>
<translation>Yer İmlerini Yönet</translation>
</message>
<message>
<location filename="../BMMDialog.ui" line="35"/>
<source>Name</source>
<translation>Ad</translation>
</message>
<message>
<location filename="../BMMDialog.ui" line="40"/>
<source>Path</source>
<translation>Yol</translation>
</message>
<message>
<location filename="../BMMDialog.ui" line="52"/>
<source>Remove Bookmark</source>
<translation>Yer İmini Kaldır</translation>
</message>
<message>
<location filename="../BMMDialog.ui" line="65"/>
<source>Rename BookMark</source>
<translation>Yer İmini Yeniden Adlandır</translation>
</message>
<message>
<location filename="../BMMDialog.ui" line="91"/>
<source>Finished</source>
<translation>Bitti</translation>
</message>
<message>
<location filename="../BMMDialog.cpp" line="58"/>
<source>Rename Bookmark</source>
<translation>Yer İmini Yeniden Adlandır</translation>
</message>
<message>
<location filename="../BMMDialog.cpp" line="58"/>
<source>Name:</source>
<translation>Ad:</translation>
</message>
<message>
<location filename="../BMMDialog.cpp" line="64"/>
<source>Invalid Name</source>
<translation>Geçersiz Ad</translation>
</message>
<message>
<location filename="../BMMDialog.cpp" line="64"/>
<source>This bookmark name already exists. Please choose another.</source>
<translation>Bu yer imi adı zaten mevcut. Lütfen başka bir ad seçin.</translation>
</message>
</context>
<context>
<name>BrowserWidget</name>
<message>
<location filename="../BrowserWidget.cpp" line="257"/>
<source>Name</source>
<translation type="unfinished">Ad</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="258"/>
<source>Size</source>
<translation type="unfinished">Boyut</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="259"/>
<source>Type</source>
<translation type="unfinished">Tür</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="260"/>
<source>Date Modified</source>
<translation type="unfinished">Değiştirilme Tarihi</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="261"/>
<source>Date Created</source>
<translation type="unfinished">Oluşturulma Tarihi</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="438"/>
<source>Capacity: %1</source>
<translation type="unfinished">Kapasite: %1</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="463"/>
<source>Files: %1 (%2)</source>
<translation type="unfinished">Dosyalar: %1 (%2)</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="465"/>
<source>Files: %1</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="471"/>
<source>Dirs: %1</source>
<translation type="unfinished">Dizinler: %1</translation>
</message>
<message>
<location filename="../BrowserWidget.cpp" line="427"/>
<source>No Directory Contents</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>DirWidget</name>
<message>
<location filename="../widgets/DirWidget2.ui" line="20"/>
<source>Form</source>
<translation>Form</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="41"/>
<source> * - FILE MANAGER RUNNING AS ROOT- * </source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="182"/>
<source>Increase Icon Sizes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="204"/>
<source>Decrease Icon Sizes</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="228"/>
<source>Back</source>
<translation>Geri</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="231"/>
<location filename="../widgets/DirWidget2.ui" line="234"/>
<source>Go back to previous directory</source>
<translation>Önceki dizine geri git</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="242"/>
<source>Up</source>
<translation>Yukarı</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="245"/>
<location filename="../widgets/DirWidget2.ui" line="248"/>
<source>Go to parent directory</source>
<translation>Üst dizine git</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="256"/>
<source>Home</source>
<translation>Ev</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="259"/>
<location filename="../widgets/DirWidget2.ui" line="262"/>
<source>Go to home directory</source>
<translation>Ev dizinine git</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="267"/>
<source>Menu</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="270"/>
<source>Select Action</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="278"/>
<source>Single Column</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="281"/>
<source>Single column view</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="289"/>
<source>Dual Column</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.ui" line="292"/>
<source>Dual Column View</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="691"/>
<source>(Limited Access) </source>
<translation>(Sınırlı Erişim) </translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="713"/>
<location filename="../widgets/DirWidget2.cpp" line="761"/>
<source>New Document</source>
<translation>Yeni Belge</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="713"/>
<location filename="../widgets/DirWidget2.cpp" line="738"/>
<location filename="../widgets/DirWidget2.cpp" line="761"/>
<source>Name:</source>
<translation>Ad:</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="730"/>
<source>Error Creating Document</source>
<translation>Belge Oluşturmada Hata</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="730"/>
<source>The document could not be created. Please ensure that you have the proper permissions.</source>
<translation>Belge oluşturulamadı. Lütfen uygun izinlere sahip olduğunuza emin olun.</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="738"/>
<source>New Directory</source>
<translation>Yeni Dizin</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="721"/>
<location filename="../widgets/DirWidget2.cpp" line="748"/>
<location filename="../widgets/DirWidget2.cpp" line="770"/>
<source>Invalid Name</source>
<translation>Geçersiz Ad</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="289"/>
<source>Open Current Dir in a Terminal</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="570"/>
<source>File Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="607"/>
<source>Directory Operations</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="655"/>
<source>Other...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="663"/>
<source>Loading...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="721"/>
<location filename="../widgets/DirWidget2.cpp" line="748"/>
<location filename="../widgets/DirWidget2.cpp" line="770"/>
<source>A file or directory with that name already exists! Please pick a different name.</source>
<translation>Aynı adda bir dosya ya da dizin zaten mevcut! Lütfen farklı bir ad seçin.</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="752"/>
<source>Error Creating Directory</source>
<translation>Dizin Oluşturmada Hata</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="752"/>
<source>The directory could not be created. Please ensure that you have the proper permissions to modify the current directory.</source>
<translation>Dizin oluşturulamadı. Lütfen geçerli dizinde değişiklik yapmak için uygun izinlere sahip olduğunuza emin olun.</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="384"/>
<source>Current</source>
<translation>Geçerli</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="279"/>
<source>Create...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="281"/>
<source>File</source>
<translation type="unfinished">Dosya</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="282"/>
<source>Directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="283"/>
<source>Application Launcher</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="287"/>
<source>Launch...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="290"/>
<source>SlideShow</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="291"/>
<source>Multimedia Player</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="293"/>
<source>Open Current Dir as Root</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="297"/>
<source>Archive Options</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="320"/>
<source>Open with...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="326"/>
<source>View Files...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="328"/>
<source>Checksums</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="330"/>
<source>Properties</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="520"/>
<source>File Checksums:</source>
<translation>Dosya Sağlama Toplamları:</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="532"/>
<source>Missing Utility</source>
<translation>Eksik Gereç</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="532"/>
<source>The "lumina-fileinfo" utility could not be found on the system. Please install it first.</source>
<translation>Sistemde "lumina-fileinfo" gereci bulunamadı. Lütfen önce gereci yükleyin.</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="558"/>
<source>Open</source>
<translation>Aç</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="567"/>
<source>Set as Wallpaper</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="575"/>
<source>Rename...</source>
<translation>Yeniden adlandır...</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="576"/>
<source>Cut Selection</source>
<translation>Seçimi Kes</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="577"/>
<source>Copy Selection</source>
<translation>Seçimi Kopyala</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="580"/>
<source>Paste</source>
<translation>Yapıştır</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="584"/>
<source>Delete Selection</source>
<translation>Seçimi Sil</translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="597"/>
<source>Extract Here</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="599"/>
<source>Archive Selection</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="907"/>
<source>Select Archive</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="921"/>
<source>Set Wallpaper on Screen</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../widgets/DirWidget2.cpp" line="921"/>
<source>Screen</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>FODialog</name>
<message>
<location filename="../FODialog.ui" line="14"/>
<source>Performing File Operations</source>
<translation>Dosya İşlemleri Gerçekleştiriliyor</translation>
</message>
<message>
<location filename="../FODialog.ui" line="39"/>
<source>%v/%m</source>
<translation>%v/%m</translation>
</message>
<message>
<location filename="../FODialog.ui" line="74"/>
<source>Stop</source>
<translation>Dur</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="19"/>
<source>Calculating</source>
<translation>Hesaplanıyor</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="131"/>
<source>Overwrite Files?</source>
<translation>Dosyaların üzerine yazılsın mı?</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="131"/>
<source>Do you want to overwrite the existing files?</source>
<translation>Mevcut dosyaların üzerine yazmak istiyor musunuz?</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="131"/>
<source>Note: It will just add a number to the filename otherwise.</source>
<translation>Not: Aksi durumda dosya adına bir sayı eklenecek.</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="133"/>
<source>YesToAll</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../FODialog.cpp" line="134"/>
<source>NoToAll</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../FODialog.cpp" line="135"/>
<source>Cancel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../FODialog.cpp" line="154"/>
<source>Removing: %1</source>
<translation>Kaldırılıyor: %1</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="155"/>
<source>Copying: %1 to %2</source>
<translation>Kopyalanıyor: %1 %2 hedefine</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="156"/>
<source>Restoring: %1 as %2</source>
<translation>Geri yükleniyor: %1 %2 olarak</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="157"/>
<source>Moving: %1 to %2</source>
<translation>Taşınıyor: %1 %2 hedefine</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="170"/>
<source>Could not remove these files:</source>
<translation>Bu dosyalar kaldırılamadı:</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="171"/>
<source>Could not copy these files:</source>
<translation>Bu dosyalar kopyalanamadı:</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="172"/>
<source>Could not restore these files:</source>
<translation>Bu dosyalar geri yüklenemedi:</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="173"/>
<source>Could not move these files:</source>
<translation>Bu dosyalar taşınamadı:</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="175"/>
<source>File Errors</source>
<translation>Dosya Hataları</translation>
</message>
</context>
<context>
<name>FOWorker</name>
<message>
<location filename="../FODialog.cpp" line="326"/>
<source>Invalid Move</source>
<translation>Geçersiz Taşıma</translation>
</message>
<message>
<location filename="../FODialog.cpp" line="326"/>
<source>It is not possible to move a directory into itself. Please make a copy of the directory instead.
Old Location: %1
New Location: %2</source>
<translation>Bir dizini kendi içine taşımak mümkün değil. Bunun yerine lütfen dizinin bir kopyasını alın.
Eski Konum: %1
Yeni Konum: %2</translation>
</message>
</context>
<context>
<name>GitWizard</name>
<message>
<location filename="../gitWizard.ui" line="14"/>
<source>Clone a Git Repository</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="24"/>
<source>Welcome!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="27"/>
<source>This wizard will guide you through the process of downloading a GIT repository from the internet.</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="46"/>
<source>GitHub Repository Settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="55"/>
<source>Organization/User</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="65"/>
<source>Repository Name</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="75"/>
<source>Is Private Repository</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="89"/>
<source>Type of Access</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="95"/>
<source>Use my SSH Key</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="105"/>
<source>Login to server</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="114"/>
<source>Username</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="124"/>
<source>Password</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="133"/>
<source>Anonymous (public repositories only)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="143"/>
<source>Optional SSH Password</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="153"/>
<source>Advanced Options</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="159"/>
<source>Custom Depth</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="166"/>
<source>Single Branch</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="175"/>
<source>branch name</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.ui" line="232"/>
<source>Click "Next" to start downloading the repository</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.h" line="58"/>
<source>Stop Download?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../gitWizard.h" line="58"/>
<source>Kill the current download?</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>MainUI</name>
<message>
<location filename="../MainUI.ui" line="14"/>
<source>Insight</source>
<translation>Görü</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="98"/>
<source>Shift+Left</source>
<translation>Shift+Sol</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="99"/>
<source>Shift+Right</source>
<translation>Shift+Sağ</translation>
</message>
<message>
<location filename="../MainUI.ui" line="142"/>
<source>View Mode</source>
<translation>Görünüm Modu</translation>
</message>
<message>
<location filename="../MainUI.ui" line="184"/>
<source>New Tab</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="187"/>
<source>New Browser</source>
<translation>Yeni Gözatıcı</translation>
</message>
<message>
<location filename="../MainUI.ui" line="258"/>
<source>Show Image Previews</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="263"/>
<source>Search Directory...</source>
<translation>Dizinde Ara...</translation>
</message>
<message>
<location filename="../MainUI.ui" line="296"/>
<source>Increase Icon Size</source>
<translation>Simge Boyutunu Arttır</translation>
</message>
<message>
<location filename="../MainUI.ui" line="301"/>
<source>Decrease Icon Size</source>
<translation>Simge Boyutunu Azalt</translation>
</message>
<message>
<location filename="../MainUI.ui" line="306"/>
<source>Larger Icons</source>
<translation>Daha Büyük Simgeler</translation>
</message>
<message>
<location filename="../MainUI.ui" line="309"/>
<source>Ctrl++</source>
<translation>Ctrl++</translation>
</message>
<message>
<location filename="../MainUI.ui" line="317"/>
<source>Smaller Icons</source>
<translation>Daha Küçük Simgeler</translation>
</message>
<message>
<location filename="../MainUI.ui" line="320"/>
<source>Ctrl+-</source>
<translation>Ctrl+-</translation>
</message>
<message>
<location filename="../MainUI.ui" line="328"/>
<source>New Window</source>
<translation>Yeni Pencere</translation>
</message>
<message>
<location filename="../MainUI.ui" line="331"/>
<source>Ctrl+N</source>
<translation>Ctrl+N</translation>
</message>
<message>
<location filename="../MainUI.ui" line="339"/>
<source>Add Bookmark</source>
<translation>Yer İmi Ekle</translation>
</message>
<message>
<location filename="../MainUI.ui" line="342"/>
<source>Ctrl+D</source>
<translation>Ctrl+D</translation>
</message>
<message>
<location filename="../MainUI.ui" line="394"/>
<source>Delete Selection</source>
<translation>Seçimi Sil</translation>
</message>
<message>
<location filename="../MainUI.ui" line="397"/>
<source>Del</source>
<translation>Sil</translation>
</message>
<message>
<location filename="../MainUI.ui" line="405"/>
<source>Refresh</source>
<translation>Yenile</translation>
</message>
<message>
<location filename="../MainUI.ui" line="416"/>
<source>Close Tab</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="427"/>
<source>Repo Status</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="432"/>
<source>Clone Repository</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="440"/>
<source>Show Directory Tree Window</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="443"/>
<source>Show Directory Tree Pane</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="446"/>
<source>Ctrl+P</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="451"/>
<source>Open as Root</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="190"/>
<source>Ctrl+T</source>
<translation>Ctrl+T</translation>
</message>
<message>
<location filename="../MainUI.ui" line="124"/>
<source>&File</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="138"/>
<source>&View</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="155"/>
<source>&Bookmarks</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="163"/>
<source>&External Devices</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="170"/>
<source>&Git</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.ui" line="198"/>
<source>Exit</source>
<translation>Çık</translation>
</message>
<message>
<location filename="../MainUI.ui" line="201"/>
<source>Ctrl+Q</source>
<translation>Ctrl+Q</translation>
</message>
<message>
<location filename="../MainUI.ui" line="209"/>
<source>&Preferences</source>
<translation>&Tercihler</translation>
</message>
<message>
<location filename="../MainUI.ui" line="220"/>
<source>Show Hidden Files</source>
<translation>Gizli Dosyaları Göster</translation>
</message>
<message>
<location filename="../MainUI.ui" line="225"/>
<source>Scan for Devices</source>
<translation>Aygıtlar için Tara</translation>
</message>
<message>
<location filename="../MainUI.ui" line="233"/>
<source>Manage Bookmarks</source>
<translation>Yer İmlerini Yönet</translation>
</message>
<message>
<location filename="../MainUI.ui" line="247"/>
<source>Show Action Buttons</source>
<translation>Eylem Düğmelerini Göster</translation>
</message>
<message>
<location filename="../MainUI.ui" line="266"/>
<source>Ctrl+F</source>
<translation>Ctrl+F</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="69"/>
<source>Detailed List</source>
<translation>Ayrıntılı Liste</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="70"/>
<source>Basic List</source>
<translation>Temel Liste</translation>
</message>
<message>
<location filename="../MainUI.ui" line="419"/>
<source>Ctrl+W</source>
<translation>Ctrl+W</translation>
</message>
<message>
<location filename="../MainUI.ui" line="408"/>
<source>F5</source>
<translation>F5</translation>
</message>
<message>
<location filename="../MainUI.ui" line="375"/>
<source>Ctrl+C</source>
<translation>Ctrl+C</translation>
</message>
<message>
<location filename="../MainUI.ui" line="350"/>
<source>Rename...</source>
<translation>Yeniden adlandır...</translation>
</message>
<message>
<location filename="../MainUI.ui" line="353"/>
<source>F2</source>
<translation>F2</translation>
</message>
<message>
<location filename="../MainUI.ui" line="361"/>
<source>Cut Selection</source>
<translation>Seçimi Kes</translation>
</message>
<message>
<location filename="../MainUI.ui" line="372"/>
<source>Copy Selection</source>
<translation>Seçimi Kopyala</translation>
</message>
<message>
<location filename="../MainUI.ui" line="383"/>
<source>Paste</source>
<translation>Yapıştır</translation>
</message>
<message>
<location filename="../MainUI.ui" line="386"/>
<source>Ctrl+V</source>
<translation>Ctrl+V</translation>
</message>
<message>
<location filename="../MainUI.ui" line="364"/>
<source>Ctrl+X</source>
<translation>Ctrl+X</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="204"/>
<source>Invalid Directories</source>
<translation>Geçersiz Dizinler</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="204"/>
<source>The following directories are invalid and could not be opened:</source>
<translation>İzleyen dizinler geçersiz ve açılamadı:</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="238"/>
<source>CTRL+B</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.cpp" line="247"/>
<source>CTRL+E</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.cpp" line="365"/>
<source>Root</source>
<translation>Kök</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="372"/>
<source>%1 (Type: %2)</source>
<translation>%1 (Tür: %2)</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="376"/>
<source>Filesystem: %1</source>
<translation>Dosya sistemi: %1</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="612"/>
<source>New Bookmark</source>
<translation>Yeni Yer İmi</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="612"/>
<source>Name:</source>
<translation>Ad:</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="617"/>
<source>Invalid Name</source>
<translation>Geçersiz Ad</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="617"/>
<source>This bookmark name already exists. Please choose another.</source>
<translation>Bu yer imi adı zaten mevcut. Lütfen başka bir ad seçin.</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="638"/>
<source>Git Repository Status</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.cpp" line="728"/>
<source>Multimedia</source>
<translation>Multimedya</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="746"/>
<source>Slideshow</source>
<translation>Slayt gösterisi</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="926"/>
<source>Items to be removed:</source>
<translation>Kaldırılacak öğeler:</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="452"/>
<source>Verify Quit</source>
<translation>Çıkışı Doğrula</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="100"/>
<source>Ctrl+H</source>
<translation>Ctrl+H</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="101"/>
<source>Ctrl+L</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../MainUI.cpp" line="452"/>
<source>You have multiple tabs open. Are you sure you want to quit?</source>
<translation>Açık birden çok sekmeniz mevcut. Çıkmak istediğinize emin misiniz?</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="925"/>
<source>Verify Removal</source>
<translation>Kaldırmayı Doğrula</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="925"/>
<source>WARNING: This will permanently delete the file(s) from the system!</source>
<translation>UYARI: Bu işlemle dosya(lar) sistemden kalıcı olarak silinecek!</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="925"/>
<source>Are you sure you want to continue?</source>
<translation>Devam etmek istediğinize emin misiniz?</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="885"/>
<source>Rename File</source>
<translation>Dosyayı Yeniden Adlandır</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="885"/>
<source>New Name:</source>
<translation>Yeni Ad:</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="899"/>
<source>Overwrite File?</source>
<translation>Dosyanın Üzerine Yazılsın Mı?</translation>
</message>
<message>
<location filename="../MainUI.cpp" line="899"/>
<source>An existing file with the same name will be replaced. Are you sure you want to proceed?</source>
<translation>Aynı ada sahip mevcut bir dosya yenisiyle değiştirilecek. İlerlemek istediğinize emin misiniz?</translation>
</message>
</context>
<context>
<name>MultimediaWidget</name>
<message>
<location filename="../widgets/MultimediaWidget.ui" line="14"/>
<source>Form</source>
<translation>Form</translation>
</message>
<message>
<location filename="../widgets/MultimediaWidget.ui" line="28"/>
<source>Go To Next</source>
<translation>Sonrakine Git</translation>
</message>
<message>
<location filename="../widgets/MultimediaWidget.ui" line="107"/>
<source>(No Running Video)</source>
<translation>(Oynatılan Video Yok)</translation>
</message>
<message>
<location filename="../widgets/MultimediaWidget.cpp" line="124"/>
<source>Playing:</source>
<translation>Oynatılıyor:</translation>
</message>
<message>
<location filename="../widgets/MultimediaWidget.cpp" line="130"/>
<source>Stopped</source>
<translation>Durdu</translation>
</message>
<message>
<location filename="../widgets/MultimediaWidget.cpp" line="157"/>
<source>Error Playing File: %1</source>
<translation>Dosya Oynatmada Hata: %1</translation>
</message>
<message>
<location filename="../widgets/MultimediaWidget.cpp" line="168"/>
<source>Finished</source>
<translation>Tamamlandı</translation>
</message>
</context>
<context>
<name>OPWidget</name>
<message>
<location filename="../OPWidget.ui" line="14"/>
<source>Form</source>
<translation type="unfinished">Form</translation>
</message>
<message>
<location filename="../OPWidget.ui" line="44"/>
<location filename="../OPWidget.ui" line="51"/>
<source>...</source>
<translation type="unfinished">...</translation>
</message>
<message>
<location filename="../OPWidget.ui" line="60"/>
<source>Evaluating...</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../OPWidget.cpp" line="52"/>
<source>Move</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../OPWidget.cpp" line="53"/>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../OPWidget.cpp" line="54"/>
<source>Remove</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../OPWidget.cpp" line="96"/>
<source>File Operation Errors</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../OPWidget.cpp" line="108"/>
<source>%1 Finished</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../OPWidget.cpp" line="108"/>
<source>Errors Occured</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>SlideshowWidget</name>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="14"/>
<source>Form</source>
<translation>Form</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="36"/>
<source>Delete this image file</source>
<translation>Bu görüntü dosyasını sil</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="56"/>
<source>Rotate this image file counter-clockwise</source>
<translation>Bu görüntü dosyasını saat yönünün tersinde döndür</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="69"/>
<source>Rotate this image file clockwise</source>
<translation>Bu görüntü dosyasını saat yönünde döndür</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="89"/>
<location filename="../widgets/SlideshowWidget.ui" line="92"/>
<source>Zoom in</source>
<translation>Yakınlaştır</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="105"/>
<location filename="../widgets/SlideshowWidget.ui" line="108"/>
<source>Zoom out</source>
<translation>Uzaklaştır</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="212"/>
<source>Go to Beginning</source>
<translation>Başa Git</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="215"/>
<location filename="../widgets/SlideshowWidget.ui" line="231"/>
<location filename="../widgets/SlideshowWidget.ui" line="304"/>
<location filename="../widgets/SlideshowWidget.ui" line="320"/>
<source>...</source>
<translation>...</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="218"/>
<source>Shift+Left</source>
<translation>Shift+Sol</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="228"/>
<source>Go to Previous</source>
<translation>Öncekine Git</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="234"/>
<source>Left</source>
<translation>Sol</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="267"/>
<source>File Name</source>
<translation>Dosya Adı</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="301"/>
<source>Go to Next</source>
<translation>Sonrakine Git</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="307"/>
<source>Right</source>
<translation>Sağ</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="317"/>
<source>Go to End</source>
<translation>Sona Git</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.ui" line="323"/>
<source>Shift+Right</source>
<translation>Shift+Sağ</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.cpp" line="125"/>
<source>Verify Removal</source>
<translation>Kaldırmayı Doğrula</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.cpp" line="125"/>
<source>WARNING: This will permanently delete the file from the system!</source>
<translation>UYARI: Bu, dosyayı sistemden kalıcı olarak silecek!</translation>
</message>
<message>
<location filename="../widgets/SlideshowWidget.cpp" line="125"/>
<source>Are you sure you want to continue?</source>
<translation>Devam etmek istediğinize emin misiniz?</translation>
</message>
</context>
<context>
<name>TrayUI</name>
<message>
<location filename="../TrayUI.cpp" line="76"/>
<source>Finished</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../TrayUI.cpp" line="76"/>
<source>Errors during operation. Click to view details</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../TrayUI.cpp" line="91"/>
<source>New Tasks Running</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>XDGDesktopList</name>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="608"/>
<source>Multimedia</source>
<translation type="unfinished">Multimedya</translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="609"/>
<source>Development</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="610"/>
<source>Education</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="611"/>
<source>Games</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="612"/>
<source>Graphics</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="613"/>
<source>Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="614"/>
<source>Office</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="615"/>
<source>Science</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="616"/>
<source>Settings</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="617"/>
<source>System</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="618"/>
<source>Utility</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="619"/>
<source>Wine</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../core/libLumina/LuminaXDG.cpp" line="620"/>
<source>Unsorted</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS>
| cpforbes/lumina | src-qt5/desktop-utils/lumina-fm/i18n/lumina-fm_tr.ts | TypeScript | bsd-3-clause | 49,754 |
import React from 'react';
import { shallow } from 'enzyme';
import sinon from 'sinon';
import AddComment from './AddComment';
const USER = {
user: {
user: 'RSwanson',
username: 'Ron_Swanson',
imageURL: '',
users: [
{
name: 'April Ludwig',
url: 'aprilL@parksdept.com',
display: 'April',
},
],
},
};
describe('<AddComment>', () => {
it('calls submitComment function', () => {
const submitCommentFn = sinon.spy();
const wrapper = shallow(
<AddComment {...USER} submitComment={submitCommentFn} />,
);
const event = {
preventDefault: sinon.spy(),
};
wrapper.find('button').simulate('onClick', event);
expect(submitCommentFn.calledOnce).toBeTruthy;
});
});
| mathjazz/pontoon | translate/src/core/comments/components/AddComment.test.js | JavaScript | bsd-3-clause | 761 |
/*************************************************************
* Project: NetCoreCMS *
* Web: http://dotnetcorecms.org *
* Author: OnnoRokom Software Ltd. *
* Website: www.onnorokomsoftware.com *
* Email: info@onnorokomsoftware.com *
* Copyright: OnnoRokom Software Ltd. *
* License: BSD-3-Clause *
*************************************************************/
using NetCoreCMS.Framework.Core.Data;
using NetCoreCMS.Framework.Core.Models;
using NetCoreCMS.Framework.Core.Mvc.Repository;
namespace NetCoreCMS.Framework.Core.Repository
{
public class NccWebSiteRepository : BaseRepository<NccWebSite, long>
{
public NccWebSiteRepository(NccDbContext context) : base(context)
{
}
}
}
| OnnoRokomSoftware/NetCoreCMS | NetCoreCMS.Framework/Core/Repository/NccWebSiteRepository.cs | C# | bsd-3-clause | 926 |
# -*- coding: utf-8 -*-
#
# アルゴリズムデザインコンテストのさまざまな処理
#
# Copyright (C) 2015 Fujitsu
import numberlink
from datastore import *
from hashlib import sha1, sha256
from flask import make_response, render_template
import random
import datetime
from tz import gae_datetime_JST
from define import DEFAULT_YEAR
def adc_response(msg, isjson, code=200, json_encoded=False):
if json_encoded:
body = msg
else:
template = 'response.json' if isjson else 'response.html'
body = render_template(template, msg=msg)
resp = make_response(body)
if code == 200:
resp.status = 'OK'
elif code == 400:
resp.status = 'Bad Request'
elif code == 401:
resp.status = 'Unauthorized'
resp.status_code = code
resp.headers['Content-Type'] = 'application/json' if isjson else 'text/html; charset=utf-8'
return resp
def adc_response_html(html, code=200):
template = 'raw.html'
body = render_template(template, raw=html)
resp = make_response(body)
resp.status_code = code
resp.headers['Content-Type'] = 'text/html; charset=utf-8'
return resp
def adc_response_text(body, code=200):
resp = make_response(body)
resp.status_code = code
resp.headers['Content-Type'] = 'text/plain; charset=utf-8'
return resp
def adc_response_json(body, code=200):
resp = make_response(body)
resp.status_code = code
resp.headers['Content-Type'] = 'application/json'
return resp
def adc_response_Q_data(result):
"問題テキストデータを返す"
if result is None:
code = 404
text = "Not Found\r\n"
else:
code = 200
text = result.text
return adc_response_text(text, code)
def log(username, what):
root = log_key()
i = Log(parent = root,
username = username,
what = what)
i.put()
def log_get_or_delete(username=None, fetch_num=100, when=None, delete=False):
query = Log.query(ancestor = log_key()).order(-Log.date)
if username is not None:
query = query.filter(Log.username == username)
if when is not None:
before = datetime.datetime.now() - when
#print "before=", before
query = query.filter(Log.date > before)
q = query.fetch(fetch_num)
results = []
for i in q:
if delete:
tmp = { 'date': gae_datetime_JST(i.date) }
i.key.delete()
else:
tmp = { 'date': gae_datetime_JST(i.date),
'username': i.username,
'what': i.what }
results.append( tmp )
return results
def adc_login(salt, username, password, users):
"パスワードがあっているかチェックする"
hashed256 = hashed_password(username, password, salt)
u = adc_get_user_info(username, users)
if u is not None and u[1]==hashed256:
return u
else:
return None
def adc_change_password(salt, username, users, attr, priv_admin=False):
"パスワード変更。管理者は他人のパスワードも変更できる。"
if ('password_old' in attr and
'password_new1' in attr and
'password_new2' in attr):
if not priv_admin: # 管理者でないときは、現在のパスワードをチェック
u = adc_login(salt, username, attr['password_old'], users)
if u is None:
return False, "password mismatched"
if attr['password_new1'] != attr['password_new2']:
return False, "new password is not same"
if change_password(username, attr['password_new1'].encode('utf-8'), salt):
return True, "password changed"
else:
return False, "password change failed"
else:
return False, "error"
def adc_get_user_info(username, users):
# まずはローカルに定義されたユーザを検索
for u in users:
if username == (u[0]):
return u
# 次に、データベースに登録されたユーザを検索
r = get_userinfo(username)
if r is not None:
return [r.username, r.password, r.displayname, r.uid, r.gid]
else:
return None
def adc_get_user_list(users):
res = []
# まずはローカルに定義されたユーザを検索
for u in users:
res.append(u[0])
# 次に、データベースに登録されたユーザを検索
res2 = get_username_list()
res.extend(res2)
return res
def insert_Q_data(q_num, text, author="DASymposium", year=DEFAULT_YEAR, uniq=True):
"""
問題データをデータベースに登録する。
uniq==Trueのとき、q_numとauthorが重複する場合、登録は失敗する。
"""
#重複チェック
if uniq:
q = get_user_Q_data(q_num, author, year)
if q is not None:
return (False, "Error: Q%d data already exists" % q_num) # 重複エラー
# 問題データのチェック
(size, line_num, line_mat, msg, ok) = numberlink.read_input_data(text)
if not ok:
return (False, "Error: syntax error in Q data\n"+msg)
# text2は、textを正規化したテキストデータ(改行コードなど)
text2 = numberlink.generate_Q_data(size, line_num, line_mat)
# rootエンティティを決める
userinfo = get_userinfo(author)
if userinfo is None:
return (False, "Error: user not found: %s" % author)
else:
root = userinfo.key
# 問題データのエンティティ
q = Question( parent = root,
id = str(q_num),
qnum = q_num,
text = text2,
rows = size[1], # Y
cols = size[0], # X
linenum = line_num,
author = author )
# 登録する
q.put()
#
return (True, size, line_num)
def update_Q_data(q_num, text, author="DASymposium", year=DEFAULT_YEAR):
"問題データを変更する"
# 問題データの内容チェック
(size, line_num, line_mat, msg, ok) = numberlink.read_input_data(text)
if not ok:
return (False, "Error: syntax error in Q data\n"+msg, None, None)
text2 = numberlink.generate_Q_data(size, line_num, line_mat)
# 既存のエンティティを取り出す
res = get_user_Q_data(q_num, author, year)
if res is None:
num = 0
else:
num = 1
res.text = text2
res.rows = size[1]
res.cols = size[0]
res.linenum = line_num
res.put()
return (True, num, size, line_num)
def get_Q_data(q_num, year=DEFAULT_YEAR, fetch_num=5):
"出題の番号を指定して、Question問題データをデータベースから取り出す"
qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get()
if qla is None:
return None
# q_numは1から始まる整数なので、配列のインデックスとは1だけずれる
qn = q_num-1
if qn < 0 or len(qla.qs) <= qn:
return None
return qla.qs[q_num-1].get()
def get_Q_author_all():
"出題の番号から、authorを引けるテーブルを作る"
qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get()
if qla is None:
return None
authors = ['']*(len(qla.qs)+1) # q_numは1から始まるので、+1しておく
qn = 1 # 出題番号
for q_key in qla.qs:
q = q_key.get()
authors[qn] = q.author
qn += 1
# q.qnum は、問題登録したときの番号であり、出題番号ではない
return authors
def get_Q_data_text(q_num, year=DEFAULT_YEAR, fetch_num=5):
"問題のテキストを返す"
result = get_Q_data(q_num, year, fetch_num)
if result is not None:
text = result.text
ret = True
else: # result is None
text = "Error: data not found: Q%d" % q_num
ret = False
return ret, text
def get_user_Q_data(q_num, author, year=DEFAULT_YEAR, fetch_num=99):
"qnumとauthorを指定して問題データをデータベースから取り出す"
userinfo = get_userinfo(author)
if userinfo is None:
root = qdata_key(year)
else:
root = userinfo.key
key = ndb.Key(Question, str(q_num), parent=root)
return key.get()
def get_admin_Q_all():
"データベースに登録されたすべての問題の一覧リスト"
#query = Question.query().order(Question.author, Question.qnum)
query = Question.query(ancestor=userlist_key()).order(Question.author, Question.qnum)
q = query.fetch()
num = len(q)
out = str(num) + "\n"
for i in q:
dt = gae_datetime_JST(i.date)
out += "Q%02d SIZE %dX%d LINE_NUM %d (%s) %s\n" % (i.qnum, i.cols, i.rows, i.linenum, i.author, dt)
return out
def admin_Q_list_get():
"コンテストの出題リストを取り出す"
qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get()
if qla is None:
return ''
else:
return qla.text_admin
def admin_Q_list_create():
"コンテスト用の出題リストを作成する"
#query = Question.query(ancestor=userlist_key()).order(Question.author, Question.qnum)
query = Question.query(ancestor=userlist_key())
qlist = []
q = query.fetch()
num = len(q)
for i in q:
qlist.append([i.qnum, i.author, i.key])
random.shuffle(qlist)
out = str(num) + "\n"
root = qdata_key()
#既存の問題リストを削除する … のはやめた
#out += admin_Q_list_delete() + "\n"
num = 1
out_admin = ""
out_user = ""
qs = []
for i in qlist:
qs.append(i[2])
out_admin += "Q%d %s %d\n" % (num, i[1], i[0])
out_user += "Q%d\n" % num
num += 1
out += out_admin
qla = QuestionListAll.get_or_insert('master', parent=root, qs=qs, text_admin=out_admin, text_user=out_user)
if qla.text_admin != out_admin:
out += "Already inserted\n"
return out
def admin_Q_list_delete():
"コンテストの出題リストを削除する"
root = qdata_key()
ndb.Key(QuestionListAll, 'master', parent=root).delete()
return "DELETE Q-list"
def get_Q_all(html=False):
"問題データの一覧リストを返す"
qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get()
if qla is None:
return ''
if html:
out = ""
num=1
for i in qla.text_user.splitlines():
out += '<a href="/Q/%d">%s</a><br />\n' % (num, i)
num += 1
return out
else:
return qla.text_user
def menu_post_A(username):
"回答ファイルをアップロードするフォームを返す"
qla = ndb.Key(QuestionListAll, 'master', parent=qdata_key()).get()
if qla is None:
return ''
out = ""
num=1
for i in qla.text_user.splitlines():
out += '<a href="/A/%s/Q/%d">post answer %s</a><br />\n' % (username, num, i)
num += 1
return out
def post_A(username, atext, form):
anum = (int)(form['anum'])
cpu_sec = 0
mem_byte = 0
try:
cpu_sec = (float)(form['cpu_sec'])
mem_byte = (int)(form['mem_byte'])
except ValueError:
# (float)'' がエラー
pass
misc_text = form['misc_text']
print "A%d\n%f\n%d\n%s" % (anum, cpu_sec, mem_byte, misc_text.encode('utf-8'))
return put_A_data(anum, username, atext, cpu_sec, mem_byte, misc_text)
def get_user_Q_all(author, html=None):
"authorを指定して、問題データの一覧リストを返す"
userinfo = get_userinfo(author)
if userinfo is None:
root = qdata_key()
else:
root = userinfo.key
query = Question.query( ancestor = root ).order(Question.qnum)
#query = query.filter(Question.author == author )
q = query.fetch()
num = len(q)
out = ""
for i in q:
if html is None:
out += "Q%d SIZE %dX%d LINE_NUM %d (%s)\n" % (i.qnum, i.cols, i.rows, i.linenum, i.author)
else:
url = '/user/%s/Q/%d' % (author, i.qnum)
out += '<a href="%s">Q%d SIZE %dX%d LINE_NUM %d (%s)</a><br />\n' % (url, i.qnum, i.cols, i.rows, i.linenum, i.author)
return out
def delete_user_Q_data(q_num, author, year=DEFAULT_YEAR):
"qnumとauthorを指定して、問題データをデータベースから削除する"
res = get_user_Q_data(q_num, author, year)
msg = ""
if res is None:
msg = "Q%d data not found" % q_num
else:
msg += "DELETE /user/%s/Q/%d\n" % (author, q_num)
res.key.delete()
return msg
def get_admin_A_all():
"データベースに登録されたすべての回答データの一覧リスト"
#query = Answer.query(ancestor=userlist_key()).order(Answer.owner, Answer.anum)
query = Answer.query(ancestor=userlist_key())
q = query.fetch()
num = len(q)
out = str(num) + "\n"
for i in q:
dt = gae_datetime_JST(i.date)
out += "A%02d (%s) %s\n" % (i.anum, i.owner, dt)
return out
def get_A_data(a_num=None, username=None):
"""
データベースから回答データを取り出す。
a_numがNoneのとき、複数のデータを返す。
a_numが数値のとき、その数値のデータを1つだけ返す。存在しないときはNone。
"""
if username is None:
root = userlist_key()
else:
userinfo = get_userinfo(username)
if userinfo is None:
msg = "ERROR: user not found: %s" % username
return False, msg, None
root = userinfo.key
if a_num is not None:
a = ndb.Key(Answer, str(a_num), parent=root).get()
return True, a, root
#query = Answer.query(ancestor=root).order(Answer.anum)
query = Answer.query(ancestor=root)
#if a_num is not None:
# query = query.filter(Answer.anum == a_num)
q = query.fetch()
return True, q, root
def put_A_data(a_num, username, text, cpu_sec=None, mem_byte=None, misc_text=None):
"回答データをデータベースに格納する"
msg = ""
# 出題データを取り出す
ret, q_text = get_Q_data_text(a_num)
if not ret:
msg = "Error in Q%d data: " % a_num + q_text
return False, msg
# 重複回答していないかチェック
ret, q, root = get_A_data(a_num, username)
if ret==True and q is not None:
msg += "ERROR: duplicated answer\n";
return False, msg
# 回答データのチェックをする
judges, msg = numberlink.check_A_data(text, q_text)
q = 0.0
if judges[0] != True:
msg += "Error in answer A%d\n" % a_num
check_A = False
else:
check_A = True # 正解
q = judges[1]
# 解の品質
msg += "Quality factor = %1.19f\n" % q
# データベースに登録する。不正解でも登録する
a = Answer( parent = root,
id = str(a_num),
anum = a_num,
text = text,
owner = username,
cpu_sec = cpu_sec,
mem_byte = mem_byte,
misc_text = misc_text,
result = msg[-1499:], # 長さ制限がある。末尾のみ保存。
judge = int(check_A),
q_factor = q )
a_key = a.put()
return True, msg
def put_A_info(a_num, username, info):
"回答データの補足情報をデータベースに格納する"
msg = ""
# 回答データを取り出す。rootはUserInfoのkey、aはAnswer
ret, a, root = get_A_data(a_num, username)
if ret==False or a is None:
if ret==False: msg += a + "\n"
msg += "ERROR: A%d data not found" % a_num
return False, msg
a.cpu_sec = info['cpu_sec']
a.mem_byte = info['mem_byte']
a.misc_text = info['misc_text']
a.put()
msg += "UPDATE A%d info\n" % a_num
return True, msg
def get_or_delete_A_data(a_num=None, username=None, delete=False):
"回答データをデータベースから、削除or取り出し"
ret, q, root = get_A_data(a_num=a_num, username=username)
if not ret:
return False, q # q==msg
if q is None:
return ret, []
result = []
if a_num is None: # a_num==Noneのとき、データが複数個になる
q2 = q
else:
q2 = [q]
if delete:
get_or_delete_A_info(a_num=a_num, username=username, delete=True)
for i in q2:
result.append("DELETE A%d" % i.anum)
i.key.delete()
else: # GETの場合
for i in q2:
result.append("GET A%d" % i.anum)
result.append(i.text)
return True, result
def get_user_A_all(username, html=None):
"ユーザーを指定して、回答データの一覧リストを返す"
ret, q, root = get_A_data(username=username)
if not ret:
return False, q
text = ""
for i in q:
if html:
text += '<a href="/A/%s/Q/%d">A%d</a> <a href="/A/%s/Q/%d/info">info</a><br />\n' % (username, i.anum, i.anum, username, i.anum)
else:
text += 'A%d\n' % i.anum
return True, text
def get_or_delete_A_info(a_num=None, username=None, delete=False):
"回答データの補足情報をデータベースから、削除or取り出し"
msg = ""
r, a, root = get_A_data(a_num, username)
if not r:
return False, a, None
if a_num is None:
q = a
else:
if a is None:
msg += "A%d not found" % a_num
return True, msg, []
q = [a]
results = []
num = 0
for i in q:
num += 1
if delete:
results.append({'anum': i.anum})
i.cpu_sec = None
i.mem_byte = None
i.misc_text = None
i.put()
else:
tmp = i.to_dict()
del tmp['text']
results.append( tmp )
method = 'DELETE' if delete else 'GET'
a_num2 = 0 if a_num is None else a_num
msg += "%s A%d info %d" % (method, a_num2, num)
return True, msg, results
def hashed_password(username, password, salt):
"ハッシュ化したパスワード"
tmp = salt + username.encode('utf-8') + password.encode('utf-8')
return sha256(tmp).hexdigest()
def create_user(username, password, displayname, uid, gid, salt):
"ユーザーをデータベースに登録"
hashed = hashed_password(username, password, salt)
userlist = userlist_key()
u = UserInfo( parent = userlist,
id = username,
username = username,
password = hashed,
displayname = displayname,
uid = uid,
gid = gid )
u.put()
def change_password(username, password, salt):
"パスワード変更"
info = get_userinfo(username)
if info is None:
return False
hashed = hashed_password(username, password, salt)
info.password = hashed
info.put()
return True
def get_username_list():
"ユーザー名の一覧リストをデータベースから取り出す"
#query = UserInfo.query( ancestor = userlist_key() ).order(UserInfo.uid)
query = UserInfo.query( ancestor = userlist_key() )
q = query.fetch()
res = []
for u in q:
res.append(u.username)
return res
def get_userinfo(username):
"ユーザー情報をデータベースから取り出す"
key = ndb.Key(UserInfo, username, parent=userlist_key())
info = key.get()
return info
def delete_user(username):
"ユーザーをデータベースから削除"
userinfo = get_userinfo(username)
if userinfo is None:
return 0
else:
userinfo.key.delete()
return 1
return n
def Q_check(qtext):
"問題ファイルの妥当性チェックを行う"
hr = '-'*40 + "\n"
(size, line_num, line_mat, msg, ok) = numberlink.read_input_data(qtext)
if ok:
q = numberlink.generate_Q_data(size, line_num, line_mat)
out = "OK\n" + hr + q + hr
else:
out = "NG\n" + hr + qtext + hr + msg
return out, ok
def calc_score_all():
"スコア計算"
authors = get_Q_author_all()
#print "authors=", authors
q_factors = {}
q_point = {}
ok_point = {}
bonus_point = {}
result = {}
misc = {}
query = Answer.query(ancestor=userlist_key())
q = query.fetch()
all_numbers = {}
all_users = {}
for i in q:
#anum = 'A%d' % i.anum
anum = 'A%02d' % i.anum
username = i.owner
all_numbers[anum] = 1
all_users[username] = 1
# 正解ポイント
if not(anum in ok_point):
ok_point[anum] = {}
ok_point[anum][username] = i.judge
# 品質ポイント
if not(anum in q_factors):
q_factors[anum] = {}
q_factors[anum][username] = i.q_factor
# 出題ボーナスポイント
if i.judge in (0,1) and authors[i.anum] == username:
#print "check_bonus:", i.anum, i.judge, authors[i.anum], username
if not(anum in bonus_point):
bonus_point[anum] = {}
bonus_point[anum][username] = i.judge
# result(ログメッセージ)
if not(anum in result):
result[anum] = {}
result[anum][username] = i.result
# (その他) date, cpu_sec, mem_byte, misc_text
if not(anum in misc):
misc[anum] = {}
misc[anum][username] = [i.date, i.cpu_sec, i.mem_byte, i.misc_text]
#print "ok_point=", ok_point
#print "bonus_point=", bonus_point
#print "q_factors=", q_factors
#print "result=\n", result
# 品質ポイントを計算する
q_pt = 10.0
for anum, values in q_factors.iteritems(): # 問題番号ごとに
#print "anum=", anum
qf_total = 0.0 # Q_factorの合計
for user, qf in values.iteritems():
#print "qf=", qf
qf_total += qf
#print "qf_total=", qf_total
for user, qf in values.iteritems():
if qf_total == 0.0:
tmp = 0.0
else:
tmp = q_pt * qf / qf_total
if not anum in q_point:
q_point[anum] = {}
q_point[anum][user] = tmp
#print "q_point=", q_point
# 集計する
tmp = ['']*(len(all_numbers) + 1)
i = 0
for anum in sorted(all_numbers.keys()):
tmp[i] = anum
i += 1
tmp[i] = 'TOTAL'
score_board = {'/header/': tmp} # 見出しの行
for user in sorted(all_users.keys()):
#print user
if not(user in score_board):
score_board[user] = [0]*(len(all_numbers) + 1)
i = 0
ptotal = 0.0
for anum in sorted(all_numbers.keys()):
#print anum
p = 0.0
if user in ok_point[anum]: p += ok_point[anum][user]
if user in q_point[anum]: p += q_point[anum][user]
if anum in bonus_point and user in bonus_point[anum]:
p += bonus_point[anum][user]
#print "p=", p
score_board[user][i] = p
ptotal += p
i += 1
score_board[user][i] = ptotal
#print "score_board=", score_board
return score_board, ok_point, q_point, bonus_point, q_factors, result, misc
def html_score_board(score_board):
hd_key = '/header/'
out = '<table border=1>\n'
line = '<tr><th>-</th>'
for hd in score_board[hd_key]:
line += '<th>%s</th>' % hd
line += '</tr>\n'
out += line
for user in sorted(score_board.keys()):
if user == hd_key: continue
line = '<tr><th>%s</th>' % user
for val in score_board[user]:
line += '<td>%1.1f</td>' % val
line += '</tr>\n'
out += line
out += '</table>\n'
#print "out=\n", out
return out
| dasadc/conmgr | adc2018/server/adcutil.py | Python | bsd-3-clause | 23,866 |
#include "mex.h"
#include <iostream>
#include "drakeMexUtil.h"
#include "RigidBodyManipulator.h"
#include "math.h"
using namespace Eigen;
using namespace std;
/*
* mex interface for bullet collision detection
* closest-distance for each body to all other bodies (~(NB^2-NB)/2 points)
*
* MATLAB signature:
*
* [xA,xB,normal,distance,idxA,idxB] = ...
* collisionDetectmex( mex_model_ptr,allow_multiple_contacts,
* active_collision_options);
*/
void mexFunction( int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[] ) {
//check number of arguments
if (nrhs < 3) {
mexErrMsgIdAndTxt("Drake:collisionDetectmex:NotEnoughInputs", "Usage: [xA,xB,normal,distance,idxA,idxB] = collisionDetectmex(mex_model_ptr, allow_multiple_contacts, active_collision_options)");
}
//check argument types
if (!mxIsClass(prhs[0], "DrakeMexPointer")) {
mexErrMsgIdAndTxt("Drake:collisionDetectmex:InvalidInputType", "Expected a DrakeMexPointer for mex_model_ptr but got something else.");
}
if (!mxIsLogical(prhs[1])) {
mexErrMsgIdAndTxt("Drake:collisionDetectmex:InvalidInputType", "Expected a boolean logic type for allow_multiple_collisions but got something else.");
}
if (!mxIsStruct(prhs[2])) {
mexErrMsgIdAndTxt("Drake:collisionDetectmex:InvalidInputType", "Expected a struct type for active_collision_options but got something else.");
}
// first get the model_ptr back from matlab
RigidBodyManipulator *model= (RigidBodyManipulator*) getDrakeMexPointer(prhs[0]);
// Parse `active_collision_options`
vector<int> active_bodies_idx;
set<string> active_group_names;
const mxArray* active_collision_options = prhs[2];
const mxArray* allow_multiple_contacts = prhs[1];
const mxArray* body_idx = mxGetField(active_collision_options, 0, "body_idx");
if (body_idx != NULL) {
size_t n_active_bodies = static_cast<size_t>(mxGetNumberOfElements(body_idx));
active_bodies_idx.resize(n_active_bodies);
memcpy(active_bodies_idx.data(),(int*) mxGetData(body_idx),
sizeof(int)*n_active_bodies);
transform(active_bodies_idx.begin(), active_bodies_idx.end(),
active_bodies_idx.begin(),
[](int i){return --i;});
}
const mxArray* collision_groups = mxGetField(active_collision_options, 0, "collision_groups");
if (collision_groups != NULL) {
size_t num_collision_groups = static_cast<size_t>(mxGetNumberOfElements(collision_groups));
for (size_t i = 0; i < num_collision_groups; i++) {
const mxArray *ptr = mxGetCell(collision_groups, i);
size_t buflen = static_cast<size_t>(mxGetN(ptr) * sizeof(mxChar)) + 1;
char* str = (char*)mxMalloc(buflen);
mxGetString(ptr, str, buflen);
active_group_names.insert(str);
mxFree(str);
}
}
vector<int> bodyA_idx, bodyB_idx;
MatrixXd ptsA, ptsB, normals, JA, JB, Jd;
VectorXd dist;
if (active_bodies_idx.size() > 0) {
if (active_group_names.size() > 0) {
model->collisionDetect(dist, normals, ptsA, ptsB, bodyA_idx, bodyB_idx,
active_bodies_idx,active_group_names);
} else {
model->collisionDetect(dist, normals, ptsA, ptsB, bodyA_idx, bodyB_idx,
active_bodies_idx);
}
} else {
const bool multiple_contacts = mxIsLogicalScalarTrue(allow_multiple_contacts);
if(multiple_contacts) {
model->potentialCollisions(dist, normals, ptsA, ptsB, bodyA_idx, bodyB_idx);
} else if (active_group_names.size() > 0) {
model->collisionDetect(dist, normals, ptsA, ptsB, bodyA_idx, bodyB_idx,
active_group_names);
} else {
model->collisionDetect(dist, normals, ptsA, ptsB, bodyA_idx, bodyB_idx);
}
}
vector<int32_T> idxA(bodyA_idx.size());
transform(bodyA_idx.begin(), bodyA_idx.end(), idxA.begin(),
[](int i){return ++i;});
vector<int32_T> idxB(bodyB_idx.size());
transform(bodyB_idx.begin(), bodyB_idx.end(), idxB.begin(),
[](int i){return ++i;});
if (nlhs>0) {
plhs[0] = mxCreateDoubleMatrix(3,static_cast<int>(ptsA.cols()),mxREAL);
memcpy(mxGetPrSafe(plhs[0]),ptsA.data(),sizeof(double)*3*ptsA.cols());
}
if (nlhs>1) {
plhs[1] = mxCreateDoubleMatrix(3,static_cast<int>(ptsB.cols()),mxREAL);
memcpy(mxGetPrSafe(plhs[1]),ptsB.data(),sizeof(double)*3*ptsB.cols());
}
if (nlhs>2) {
plhs[2] = mxCreateDoubleMatrix(3,static_cast<int>(normals.cols()),mxREAL);
memcpy(mxGetPrSafe(plhs[2]),normals.data(),sizeof(double)*3*normals.cols());
}
if (nlhs>3) {
plhs[3] = mxCreateDoubleMatrix(1,static_cast<int>(dist.size()),mxREAL);
memcpy(mxGetPrSafe(plhs[3]),dist.data(),sizeof(double)*dist.size());
}
if (nlhs>4) {
plhs[4] = mxCreateNumericMatrix(1,static_cast<int>(idxA.size()),mxINT32_CLASS,mxREAL);
memcpy(mxGetData(plhs[4]),idxA.data(),sizeof(int32_T)*idxA.size());
}
if (nlhs>5) {
plhs[5] = mxCreateNumericMatrix(1,static_cast<int>(idxB.size()),mxINT32_CLASS,mxREAL);
memcpy(mxGetData(plhs[5]),idxB.data(),sizeof(int32_T)*idxB.size());
}
}
| thientu/drake | systems/plants/collisionDetectmex.cpp | C++ | bsd-3-clause | 5,102 |
#include "MutableValueSetterProxy.h"
#include <jsi/jsi.h>
#include "MutableValue.h"
#include "SharedParent.h"
using namespace facebook;
namespace reanimated {
void MutableValueSetterProxy::set(
jsi::Runtime &rt,
const jsi::PropNameID &name,
const jsi::Value &newValue) {
auto propName = name.utf8(rt);
if (propName == "_value") {
mutableValue->setValue(rt, newValue);
} else if (propName == "_animation") {
// TODO: assert to allow animation to be set from UI only
if (mutableValue->animation.expired()) {
mutableValue->animation = mutableValue->getWeakRef(rt);
}
*mutableValue->animation.lock() = jsi::Value(rt, newValue);
} else if (propName == "value") {
// you call `this.value` inside of value setter, we should throw
}
}
jsi::Value MutableValueSetterProxy::get(
jsi::Runtime &rt,
const jsi::PropNameID &name) {
auto propName = name.utf8(rt);
if (propName == "value") {
return mutableValue->getValue(rt);
} else if (propName == "_value") {
return mutableValue->getValue(rt);
} else if (propName == "_animation") {
if (mutableValue->animation.expired()) {
mutableValue->animation = mutableValue->getWeakRef(rt);
}
return jsi::Value(rt, *mutableValue->animation.lock());
}
return jsi::Value::undefined();
}
} // namespace reanimated
| exponentjs/exponent | ios/vendored/unversioned/react-native-reanimated/Common/cpp/SharedItems/MutableValueSetterProxy.cpp | C++ | bsd-3-clause | 1,341 |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54b.cpp
Label Definition File: CWE762_Mismatched_Memory_Management_Routines__new_free.label.xml
Template File: sources-sinks-54b.tmpl.cpp
*/
/*
* @description
* CWE: 762 Mismatched Memory Management Routines
* BadSource: Allocate data using new
* GoodSource: Allocate data using malloc()
* Sinks:
* GoodSink: Deallocate data using delete
* BadSink : Deallocate data using free()
* Flow Variant: 54 Data flow: data passed as an argument from one function through three others to a fifth; all five functions are in different source files
*
* */
#include "std_testcase.h"
namespace CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54
{
#ifndef OMITBAD
/* bad function declaration */
void badSink_c(twoIntsStruct * data);
void badSink_b(twoIntsStruct * data)
{
badSink_c(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink_c(twoIntsStruct * data);
void goodG2BSink_b(twoIntsStruct * data)
{
goodG2BSink_c(data);
}
/* goodB2G uses the BadSource with the GoodSink */
void goodB2GSink_c(twoIntsStruct * data);
void goodB2GSink_b(twoIntsStruct * data)
{
goodB2GSink_c(data);
}
#endif /* OMITGOOD */
} /* close namespace */
| JianpingZeng/xcc | xcc/test/juliet/testcases/CWE762_Mismatched_Memory_Management_Routines/s07/CWE762_Mismatched_Memory_Management_Routines__new_free_struct_54b.cpp | C++ | bsd-3-clause | 1,396 |
<?php
namespace app\models;
use Yii;
use app\models\general\GeneralLabel;
use app\models\general\GeneralMessage;
/**
* This is the model class for table "tbl_ref_jawatan_induk".
*
* @property integer $id
* @property string $desc
* @property integer $aktif
* @property integer $created_by
* @property integer $updated_by
* @property string $created
* @property string $updated
*/
class RefJawatanInduk extends \yii\db\ActiveRecord
{
const PRESIDEN = 1;
const TIMBALAN_PRESIDEN = 2;
const NAIB_PRESIDEN_1 = 3;
const NAIB_PRESIDEN_2 = 4;
const NAIB_PRESIDEN_3 = 5;
const NAIB_PRESIDEN_4 = 6;
const NAIB_PRESIDEN_5 = 7;
const NAIB_PRESIDEN_6 = 8;
const NAIB_PRESIDEN_7 = 9;
const NAIB_PRESIDEN_8 = 10;
const NAIB_PRESIDEN_9 = 11;
const NAIB_PRESIDEN_10 = 12;
const BENDAHARI = 13;
const PENOLONG_BENDAHARI = 14;
const SETIAUSAHA = 15;
const PENOLONG_SETIAUSAHA = 16;
const AHLI_JAWATANKUASA = 17;
const JURUAUDIT_1 = 18;
const JURUAUDIT_2 = 19;
const PENAUNG = 20;
const PENASIHAT = 21;
/**
* @inheritdoc
*/
public static function tableName()
{
return 'tbl_ref_jawatan_induk';
}
public function behaviors()
{
return [
'bedezign\yii2\audit\AuditTrailBehavior',
[
'class' => \yii\behaviors\BlameableBehavior::className(),
'createdByAttribute' => 'created_by',
'updatedByAttribute' => 'updated_by',
],
[
'class' => \yii\behaviors\TimestampBehavior::className(),
'createdAtAttribute' => 'created',
'updatedAtAttribute' => 'updated',
'value' => new \yii\db\Expression('NOW()'),
],
];
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['desc'], 'required', 'message' => GeneralMessage::yii_validation_required],
[['aktif', 'created_by', 'updated_by'], 'integer', 'message' => GeneralMessage::yii_validation_integer],
[['created', 'updated'], 'safe'],
[['desc'], 'string', 'max' => 80, 'tooLong' => GeneralMessage::yii_validation_string_max],
[['desc'], function ($attribute, $params) {
if (!\common\models\general\GeneralFunction::validateXSS($this->$attribute)) {
$this->addError($attribute, GeneralMessage::yii_validation_xss);
}
}],
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => GeneralLabel::id,
'desc' => GeneralLabel::desc,
'aktif' => GeneralLabel::aktif,
'created_by' => GeneralLabel::created_by,
'updated_by' => GeneralLabel::updated_by,
'created' => GeneralLabel::created,
'updated' => GeneralLabel::updated,
];
}
}
| hung101/kbs | frontend/models/RefJawatanInduk.php | PHP | bsd-3-clause | 2,995 |
// Copyright 2010 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ------------------------------------------------------------------------
#include <time.h>
#include <assert.h>
#include "engine/globals.h"
#include "public/logging.h"
#include "engine/memory.h"
#include "engine/utils.h"
#include "engine/opcode.h"
#include "engine/map.h"
#include "engine/scope.h"
#include "engine/type.h"
#include "engine/node.h"
#include "engine/proc.h"
namespace sawzall {
Scope* Scope::New(Proc* proc) {
Scope* s = NEWP(proc, Scope);
return s;
}
bool Scope::Insert(Object* obj) {
assert(obj != NULL);
if (obj->is_anonymous() || Lookup(obj->name()) == NULL) {
// object doesn't exist yet in this scope => insert it
list_.Append(obj);
obj->set_scope(this);
return true;
} else {
// object exists already
return false;
}
}
void Scope::InsertOrDie(Object* obj) {
if (!Insert(obj))
FatalError("identifier '%s' already declared in this scope", obj->name());
}
bool Scope::InsertOrOverload(Intrinsic* fun) {
assert(fun != NULL);
if (Insert(fun))
return true;
Object* obj = Lookup(fun->name());
Intrinsic* existing = obj->AsIntrinsic();
if (existing != NULL && existing->add_overload(fun)) {
fun->object()->set_scope(this);
return true;
}
return false;
}
void Scope::InsertOrOverloadOrDie(Intrinsic* fun) {
if (!InsertOrOverload(fun))
FatalError("identifier '%s' already declared in this scope", fun->name());
}
Object* Scope::Lookup(szl_string name) const {
return Lookup(name, strlen(name));
}
static bool SamePossiblyDottedName(szl_string dotted_name, szl_string name,
int length) {
const char* p;
const char* q;
for (p = dotted_name, q = name; *p != '\0' && q < name + length;
p++, q++) {
if (*p != *q) {
// Possible mismatch, check for the exception case.
if (*p == '.' && *q == '_')
continue; // Was '.' vs '_', treat it as a match
else
return false; // Not '.' vs '_', really was a mismatch
}
}
return (*p == '\0' && q == name + length);
}
Object* Scope::Lookup(szl_string name, int length) const {
assert(name != NULL);
for (int i = 0; i < list_.length(); i++) {
Object* obj = list_[i];
if (!obj->is_anonymous()) {
if (memcmp(obj->name(), name, length) == 0 && obj->name()[length] == '\0')
return obj;
// Temporarily find dotted names (package-qualified names using dot as
// the separator) when given a name that matches except for using
// underscores where the first name uses dots.
if (obj->AsTypeName() != NULL && obj->type()->is_tuple() &&
obj->type()->as_tuple()->is_message() &&
SamePossiblyDottedName(obj->name(), name, length)) {
return obj;
}
}
}
return NULL;
}
Object* Scope::LookupOrDie(szl_string name) const {
Object* obj = Lookup(name);
if (obj == NULL)
FatalError("identifier '%s' not found in this scope", name);
return obj;
}
Field* Scope::LookupByTag(int tag) const {
assert(tag > 0); // tags must be > 0, 0 indicates no tag
for (int i = 0; i < list_.length(); i++) {
Field* field = list_[i]->AsField();
if (field != NULL && field->tag() == tag)
return field;
}
return NULL;
}
void Scope::Clone(CloneMap* cmap, Scope* src, Scope* dst) {
// Scope entries are just for lookup, so we never clone them; instead
// we rely on their having already been cloned where originally written.
for (int i = 0; i < src->num_entries(); i++) {
// Block scope entries can be VarDecl, TypeName, QuantVarDecl
Object* obj = src->entry_at(i);
if (obj->AsVarDecl() != NULL) {
VarDecl* vardecl = cmap->Find(obj->AsVarDecl());
assert(vardecl != NULL);
dst->InsertOrDie(vardecl);
} else if (obj->AsTypeName() != NULL) {
TypeName* tname = cmap->Find(obj->AsTypeName());
assert(tname != NULL);
dst->InsertOrDie(tname);
} else {
ShouldNotReachHere();
}
}
}
void Scope::Print() const {
if (is_empty()) {
F.print("{}\n");
} else {
F.print("{\n");
for (int i = 0; i < num_entries(); i++) {
Object* obj = entry_at(i);
F.print(" %s: %T;", obj->display_name(), obj->type());
// print more detail, if possible
VarDecl* var = obj->AsVarDecl();
if (var != NULL) {
const char* kind = "";
if (var->is_local())
kind = "local";
else if (var->is_param())
kind = "parameter";
else if (var->is_static())
kind = "static";
else
ShouldNotReachHere();
F.print(" # %s, offset = %d", kind, var->offset());
}
F.print("\n");
}
F.print("}\n");
}
}
// Simulate multiple inheritance.
// These should be in the header but that introduces too many dependencies.
bool Scope::Insert(BadExpr* x) { return Insert(x->object()); }
bool Scope::Insert(Field* x) { return Insert(x->object()); }
bool Scope::Insert(Intrinsic* x) { return Insert(x->object()); }
bool Scope::Insert(Literal* x) { return Insert(x->object()); }
bool Scope::Insert(TypeName* x) { return Insert(x->object()); }
bool Scope::Insert(VarDecl* x) { return Insert(x->object()); }
void Scope::InsertOrDie(BadExpr* x) { InsertOrDie(x->object()); }
void Scope::InsertOrDie(Field* x) { InsertOrDie(x->object()); }
void Scope::InsertOrDie(Intrinsic* x) { InsertOrDie(x->object()); }
void Scope::InsertOrDie(Literal* x) { InsertOrDie(x->object()); }
void Scope::InsertOrDie(TypeName* x) { InsertOrDie(x->object()); }
void Scope::InsertOrDie(VarDecl* x) { InsertOrDie(x->object()); }
} // namespace sawzall
| xushiwei/szl | src/engine/scope.cc | C++ | bsd-3-clause | 6,221 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/login/version_info_updater.h"
#include <vector>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/chromeos/chromeos_version.h"
#include "base/string_util.h"
#include "base/stringprintf.h"
#include "base/utf_string_conversions.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/chromeos/settings/cros_settings.h"
#include "chrome/browser/chromeos/settings/cros_settings_names.h"
#include "chrome/browser/policy/browser_policy_connector.h"
#include "chrome/browser/policy/device_cloud_policy_manager_chromeos.h"
#include "chrome/common/chrome_notification_types.h"
#include "chrome/common/chrome_version_info.h"
#include "grit/chromium_strings.h"
#include "grit/generated_resources.h"
#include "grit/theme_resources.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/resource/resource_bundle.h"
namespace chromeos {
namespace {
const char* kReportingFlags[] = {
chromeos::kReportDeviceVersionInfo,
chromeos::kReportDeviceActivityTimes,
chromeos::kReportDeviceBootMode,
chromeos::kReportDeviceLocation,
};
}
///////////////////////////////////////////////////////////////////////////////
// VersionInfoUpdater public:
VersionInfoUpdater::VersionInfoUpdater(Delegate* delegate)
: cros_settings_(chromeos::CrosSettings::Get()),
delegate_(delegate),
ALLOW_THIS_IN_INITIALIZER_LIST(weak_pointer_factory_(this)) {
}
VersionInfoUpdater::~VersionInfoUpdater() {
policy::DeviceCloudPolicyManagerChromeOS* policy_manager =
g_browser_process->browser_policy_connector()->
GetDeviceCloudPolicyManager();
if (policy_manager)
policy_manager->core()->store()->RemoveObserver(this);
for (unsigned int i = 0; i < arraysize(kReportingFlags); ++i)
cros_settings_->RemoveSettingsObserver(kReportingFlags[i], this);
}
void VersionInfoUpdater::StartUpdate(bool is_official_build) {
if (base::chromeos::IsRunningOnChromeOS()) {
version_loader_.GetVersion(
is_official_build ? VersionLoader::VERSION_SHORT_WITH_DATE
: VersionLoader::VERSION_FULL,
base::Bind(&VersionInfoUpdater::OnVersion,
weak_pointer_factory_.GetWeakPtr()),
&tracker_);
boot_times_loader_.GetBootTimes(
base::Bind(is_official_build ? &VersionInfoUpdater::OnBootTimesNoop
: &VersionInfoUpdater::OnBootTimes,
weak_pointer_factory_.GetWeakPtr()),
&tracker_);
} else {
UpdateVersionLabel();
}
policy::CloudPolicySubsystem* cloud_policy =
g_browser_process->browser_policy_connector()->
device_cloud_policy_subsystem();
if (cloud_policy) {
// Two-step reset because we want to construct new ObserverRegistrar after
// destruction of old ObserverRegistrar to avoid DCHECK violation because
// of adding existing observer.
cloud_policy_registrar_.reset();
cloud_policy_registrar_.reset(
new policy::CloudPolicySubsystem::ObserverRegistrar(
cloud_policy, this));
// Ensure that we have up-to-date enterprise info in case enterprise policy
// is already fetched and has finished initialization.
UpdateEnterpriseInfo();
}
policy::DeviceCloudPolicyManagerChromeOS* policy_manager =
g_browser_process->browser_policy_connector()->
GetDeviceCloudPolicyManager();
if (policy_manager) {
policy_manager->core()->store()->AddObserver(this);
// Ensure that we have up-to-date enterprise info in case enterprise policy
// is already fetched and has finished initialization.
UpdateEnterpriseInfo();
}
// Watch for changes to the reporting flags.
for (unsigned int i = 0; i < arraysize(kReportingFlags); ++i)
cros_settings_->AddSettingsObserver(kReportingFlags[i], this);
}
void VersionInfoUpdater::UpdateVersionLabel() {
if (version_text_.empty())
return;
chrome::VersionInfo version_info;
std::string label_text = l10n_util::GetStringFUTF8(
IDS_LOGIN_VERSION_LABEL_FORMAT,
l10n_util::GetStringUTF16(IDS_PRODUCT_NAME),
UTF8ToUTF16(version_info.Version()),
UTF8ToUTF16(version_text_));
// Workaround over incorrect width calculation in old fonts.
// TODO(glotov): remove the following line when new fonts are used.
label_text += ' ';
if (delegate_)
delegate_->OnOSVersionLabelTextUpdated(label_text);
}
void VersionInfoUpdater::UpdateEnterpriseInfo() {
SetEnterpriseInfo(
g_browser_process->browser_policy_connector()->GetEnterpriseDomain());
}
void VersionInfoUpdater::SetEnterpriseInfo(const std::string& domain_name) {
if (domain_name != enterprise_domain_text_) {
enterprise_domain_text_ = domain_name;
UpdateVersionLabel();
// Update the notification about device status reporting.
if (delegate_) {
std::string enterprise_info;
if (!domain_name.empty()) {
enterprise_info = l10n_util::GetStringFUTF8(
IDS_DEVICE_OWNED_BY_NOTICE,
UTF8ToUTF16(domain_name));
delegate_->OnEnterpriseInfoUpdated(enterprise_info);
}
}
}
}
void VersionInfoUpdater::OnVersion(const std::string& version) {
version_text_ = version;
UpdateVersionLabel();
}
void VersionInfoUpdater::OnBootTimesNoop(
const BootTimesLoader::BootTimes& boot_times) {}
void VersionInfoUpdater::OnBootTimes(
const BootTimesLoader::BootTimes& boot_times) {
const char* kBootTimesNoChromeExec =
"Non-firmware boot took %.2f seconds (kernel %.2fs, system %.2fs)";
const char* kBootTimesChromeExec =
"Non-firmware boot took %.2f seconds "
"(kernel %.2fs, system %.2fs, chrome %.2fs)";
std::string boot_times_text;
if (boot_times.chrome > 0) {
boot_times_text =
base::StringPrintf(
kBootTimesChromeExec,
boot_times.total,
boot_times.pre_startup,
boot_times.system,
boot_times.chrome);
} else {
boot_times_text =
base::StringPrintf(
kBootTimesNoChromeExec,
boot_times.total,
boot_times.pre_startup,
boot_times.system);
}
// Use UTF8ToWide once this string is localized.
if (delegate_)
delegate_->OnBootTimesLabelTextUpdated(boot_times_text);
}
void VersionInfoUpdater::OnPolicyStateChanged(
policy::CloudPolicySubsystem::PolicySubsystemState state,
policy::CloudPolicySubsystem::ErrorDetails error_details) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::OnStoreLoaded(policy::CloudPolicyStore* store) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::OnStoreError(policy::CloudPolicyStore* store) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
if (type == chrome::NOTIFICATION_SYSTEM_SETTING_CHANGED)
UpdateEnterpriseInfo();
else
NOTREACHED();
}
} // namespace chromeos
| leiferikb/bitpop-private | chrome/browser/chromeos/login/version_info_updater.cc | C++ | bsd-3-clause | 7,139 |
package org.lemurproject.galago.core.util;
import org.lemurproject.galago.core.retrieval.iterator.BaseIterator;
import org.lemurproject.galago.core.retrieval.traversal.Traversal;
import org.lemurproject.galago.utility.Parameters;
import java.util.ArrayList;
import java.util.List;
/**
* @author jfoley.
*/
public class IterUtils {
/**
* Adds an operator into a Retrieval's parameters for usage.
* @param p the parameters object
* @param name the name of the operator, e.g. "combine" for #combine
* @param iterClass the operator to register
*/
public static void addToParameters(Parameters p, String name, Class<? extends BaseIterator> iterClass) {
if(!p.containsKey("operators")) {
p.put("operators", Parameters.create());
}
p.getMap("operators").put(name, iterClass.getName());
}
/**
* Adds a traversal into a Retrieval's parameters for usage.
* @param argp the parameters object
* @param traversalClass the traversal to register
*/
public static void addToParameters(Parameters argp, Class<? extends Traversal> traversalClass) {
if(!argp.isList("traversals")) {
argp.put("traversals", new ArrayList<>());
}
List<Parameters> traversals = argp.getList("traversals", Parameters.class);
traversals.add(Parameters.parseArray(
"name", traversalClass.getName(),
"order", "before"
));
argp.put("traversals", traversals);
}
}
| hzhao/galago-git | core/src/main/java/org/lemurproject/galago/core/util/IterUtils.java | Java | bsd-3-clause | 1,424 |
from pybrain.rl.environments.timeseries.maximizereturntask import DifferentialSharpeRatioTask
from pybrain.rl.environments.timeseries.timeseries import AR1Environment, SnPEnvironment
from pybrain.rl.learners.valuebased.linearfa import Q_LinFA
from pybrain.rl.agents.linearfa import LinearFA_Agent
from pybrain.rl.experiments import ContinuousExperiment
from matplotlib import pyplot
"""
This script aims to create a trading model that trades on a simple AR(1) process
"""
env=AR1Environment(2000)
task=DifferentialSharpeRatioTask(env)
learner = Q_LinFA(2,1)
agent = LinearFA_Agent(learner)
exp = ContinuousExperiment(task,agent)
from decimal import Decimal
ts=env.ts.tolist()
exp.doInteractionsAndLearn(1999)
actionHist=env.actionHistory
pyplot.plot(ts[0])
pyplot.plot(actionHist)
pyplot.show()
#snp_rets=env.importSnP().tolist()[0]
#print(snp_rets.tolist()[0])
#pyplot.plot(snp_rets)
#pyplot.show()
#cumret= cumsum(multiply(ts,actionHist))
#exp.doInteractions(200)
| samstern/MSc-Project | pybrain/rl/environments/timeseries/test programs/ar1TestScript.py | Python | bsd-3-clause | 976 |
using Basic.Azure.Storage.Communications.Core;
using Basic.Azure.Storage.Communications.Core.Interfaces;
using Basic.Azure.Storage.Communications.TableService.Interfaces;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
namespace Basic.Azure.Storage.Communications.TableService.EntityOperations
{
public class DeleteEntityRequest : RequestBase<EmptyResponsePayload>,
ISendAdditionalRequiredHeaders
{
private string _tableName;
private string _partitionKey;
private string _rowKey;
private string _etag;
public DeleteEntityRequest(StorageAccountSettings settings, string tableName, string partitionKey, string rowKey, string etag)
: base(settings)
{
_tableName = tableName;
_partitionKey = partitionKey;
_rowKey = rowKey;
if (string.IsNullOrEmpty(etag))
_etag = "*";
else
_etag = etag;
}
protected override string HttpMethod { get { return "DELETE"; } }
protected override StorageServiceType ServiceType { get { return StorageServiceType.TableService; } }
protected override RequestUriBuilder GetUriBase()
{
var builder = new RequestUriBuilder(Settings.TableEndpoint);
builder.AddSegment(String.Format("{0}(PartitionKey='{1}',RowKey='{2}')", _tableName, _partitionKey, _rowKey));
return builder;
}
public void ApplyAdditionalRequiredHeaders(System.Net.WebRequest request)
{
request.ContentType = "application/json;charset=utf-8";
if (request is HttpWebRequest)
{
((HttpWebRequest)request).Accept = "application/json";
}
else
{
request.Headers.Add(ProtocolConstants.Headers.Accept, "application/json");
}
request.Headers.Add(ProtocolConstants.Headers.IfMatch, _etag);
}
}
}
| tarwn/BasicAzureStorageSDK | Basic.Azure.Storage/Communications/TableService/EntityOperations/DeleteEntityRequest.cs | C# | bsd-3-clause | 2,132 |
require 'rubygems'
# The helper class exists to do string manipulation and heavy lifting
def url_escape_hash(hash)
hash.each do |k,v|
v = CGI::escapeHTML(v)
if v
# convert bullets
v = v.gsub("*-","<bullet>")
v = v.gsub("-*","</bullet>")
#convert h4
v = v.gsub("[==","<h4>")
v = v.gsub("==]","</h4>")
#convert indent text
v = v.gsub("[--","<indented>")
v = v.gsub("--]","</indented>")
#convert indent text
v = v.gsub("[~~","<italics>")
v = v.gsub("~~]","</italics>")
end
# replace linebreaks with paragraph xml elements
if v =~ /\r\n/
new_v = ""
brs = v.split("\r\n")
brs.each do |br|
new_v << "<paragraph>"
new_v << br
new_v << "</paragraph>"
end
v = new_v
elsif k == "remediation" or k == "overview" or k == "poc" or k == "affected_hosts"
new_v = "<paragraph>#{v}</paragraph>"
v = new_v
end
hash[k] = v
end
return hash
end
def meta_markup(text)
new_text = text.gsub("<paragraph>","
").gsub("</paragraph>","")
new_text = new_text.gsub("<bullet>","*-").gsub("</bullet>","-*")
new_text = new_text.gsub("<h4>","[==").gsub("</h4>","==]")
new_text = new_text.gsub("<code>","[[[").gsub("</code>","]]]")
new_text = new_text.gsub("<indented>","[--").gsub("</indented>","--]")
new_text = new_text.gsub("<italics>","[~~").gsub("</italics>","~~]")
end
# URL escaping messes up the inserted XML, this method switches it back to XML elements
def meta_markup_unencode(findings_xml, customer_name)
# code tags get added in later
findings_xml = findings_xml.gsub("[[[","<code>")
findings_xml = findings_xml.gsub("]]]","</code>")
# creates paragraphs
findings_xml = findings_xml.gsub("<paragraph>","<paragraph>")
findings_xml = findings_xml.gsub("</paragraph>","</paragraph>")
# same for the bullets
findings_xml = findings_xml.gsub("<bullet>","<bullet>")
findings_xml = findings_xml.gsub("</bullet>","</bullet>")
# same for the h4
findings_xml = findings_xml.gsub("<h4>","<h4>")
findings_xml = findings_xml.gsub("</h4>","</h4>")
# same for the code markings
findings_xml = findings_xml.gsub("<code>","<code>")
findings_xml = findings_xml.gsub("</code>","</code>")
# same for the indented text
findings_xml = findings_xml.gsub("<indented>","<indented>")
findings_xml = findings_xml.gsub("</indented>","</indented>")
# same for the indented text
findings_xml = findings_xml.gsub("<italics>","<italics>")
findings_xml = findings_xml.gsub("</italics>","</italics>")
# changes the <<CUSTOMER>> marks
if customer_name
findings_xml = findings_xml.gsub("&lt;&lt;CUSTOMER&gt;&gt;","#{customer_name}")
end
#this is for re-upping the comment fields
findings_xml = findings_xml.gsub("<modified>","<modified>")
findings_xml = findings_xml.gsub("</modified>","</modified>")
findings_xml = findings_xml.gsub("<new_finding>","<new_finding>")
findings_xml = findings_xml.gsub("</new_finding>","</new_finding>")
# these are for beautification
findings_xml = findings_xml.gsub("&quot;","\"")
findings_xml = findings_xml.gsub("&","&")
findings_xml = findings_xml.gsub("&lt;","<").gsub("&gt;",">")
return findings_xml
end
def compare_text(new_text, orig_text)
if orig_text == nil
# there is no master finding, must be new
t = ""
t << "<new_finding></new_finding>#{new_text}"
return t
end
if new_text == orig_text
return new_text
else
n_t = ""
n_t << "<modified></modified>#{new_text}"
return n_t
end
end
# CVSS helper, there is a lot of hardcoded stuff
def cvss(data)
av = data["av"].downcase
ac = data["ac"].downcase
au = data["au"].downcase
c = data["c"].downcase
i = data["i"].downcase
a = data["a"].downcase
e = data["e"].downcase
rl = data["rl"].downcase
rc = data["rc"].downcase
cdp = data["cdp"].downcase
td = data["td"].downcase
cr = data["cr"].downcase
ir = data["ir"].downcase
ar = data["ar"].downcase
if ac == "high"
cvss_ac = 0.35
elsif ac == "medium"
cvss_ac = 0.61
else
cvss_ac = 0.71
end
if au == "none"
cvss_au = 0.704
elsif au == "single"
cvss_au = 0.56
else
cvss_au = 0.45
end
if av == "local"
cvss_av = 0.395
elsif av == "local network"
cvss_av = 0.646
else
cvss_av = 1
end
if c == "none"
cvss_c = 0
elsif c == "partial"
cvss_c = 0.275
else
cvss_c = 0.660
end
if i == "none"
cvss_i = 00
elsif i == "partial"
cvss_i = 0.275
else
cvss_i = 0.660
end
if a == "none"
cvss_a = 0
elsif a == "partial"
cvss_a = 0.275
else
cvss_a = 0.660
end
# temporal score calculations
if e == "unproven exploit exists"
cvss_e = 0.85
elsif e == "proof-of-concept code"
cvss_e = 0.90
elsif e == "functional exploit exists"
cvss_e = 0.95
else
cvss_e = 1
end
if rl == "official fix"
cvss_rl = 0.87
elsif rl == "temporary fix"
cvss_rl = 0.90
elsif rl == "workaround"
cvss_rl = 0.95
else
cvss_rl = 1
end
if rc == "unconfirmed"
cvss_rc = 0.90
elsif rc == "uncorroborated"
cvss_rc = 0.95
else
cvss_rc = 1
end
#environemental
if cdp == "low"
cvss_cdp = 0.1
elsif cdp == "low-medium"
cvss_cdp = 0.3
elsif cdp == "medium-high"
cvss_cdp = 0.4
elsif cdp == "high"
cvss_cdp = 0.5
else
cvss_cdp = 0
end
if td == "none"
cvss_td = 0
elsif td == "low"
cvss_td = 0.25
elsif td == "medium"
cvss_td = 0.75
else
cvss_td = 1
end
if cr == "low"
cvss_cr = 0.5
elsif cr == "high"
cvss_cr = 1.51
else
cvss_cr = 1
end
if ir == "low"
cvss_ir = 0.5
elsif ir == "high"
cvss_ir = 1.51
else
cvss_ir = 1
end
if ar == "low"
cvss_ar = 0.5
elsif ar == "high"
cvss_ar = 1.51
else
cvss_ar = 1
end
cvss_impact = 10.41 * (1 - (1 - cvss_c) * (1 - cvss_i) * (1 - cvss_a))
cvss_exploitability = 20 * cvss_ac * cvss_au * cvss_av
if cvss_impact == 0
cvss_impact_f = 0
else
cvss_impact_f = 1.176
end
cvss_base = (0.6*cvss_impact + 0.4*cvss_exploitability-1.5)*cvss_impact_f
cvss_temporal = cvss_base * cvss_e * cvss_rl * cvss_rc
cvss_modified_impact = [10, 10.41 * (1 - (1 - cvss_c * cvss_cr) * (1 - cvss_i * cvss_ir) * (1 - cvss_a * cvss_ar))].min
if cvss_modified_impact == 0
cvss_modified_impact_f = 0
else
cvss_modified_impact_f = 1.176
end
cvss_modified_base = (0.6*cvss_modified_impact + 0.4*cvss_exploitability-1.5)*cvss_modified_impact_f
cvss_adjusted_temporal = cvss_modified_base * cvss_e * cvss_rl * cvss_rc
cvss_environmental = (cvss_adjusted_temporal + (10 - cvss_adjusted_temporal) * cvss_cdp) * cvss_td
if cvss_environmental
cvss_total = cvss_environmental
elsif cvss_temporal
cvss_total = cvss_temporal
else
cvss_total = cvss_base
end
data["cvss_base"] = sprintf("%0.1f" % cvss_base)
data["cvss_impact"] = sprintf("%0.1f" % cvss_impact)
data["cvss_exploitability"] = sprintf("%0.1f" % cvss_exploitability)
data["cvss_temporal"] = sprintf("%0.1f" % cvss_temporal)
data["cvss_environmental"] = sprintf("%0.1f" % cvss_environmental)
data["cvss_modified_impact"] = sprintf("%0.1f" % cvss_modified_impact)
data["cvss_total"] = sprintf("%0.1f" % cvss_total)
return data
end
# there are three scoring types; risk, dread and cvss
# this sets a score for all three in case the user switches later
def convert_score(finding)
if(finding.cvss_total == nil)
puts "|!| No CVSS score exists"
finding.cvss_total = 0
end
if(finding.dread_total == nil)
puts "|!| No CVSS score exists"
finding.dread_total = 0
end
if(finding.risk == nil)
puts "|!| No CVSS score exists"
finding.risk = 0
end
return finding
end
| ccammilleri/Serpico | helpers/helper.rb | Ruby | bsd-3-clause | 7,803 |
/*
* Copyright (C) Research In Motion Limited 2011. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "core/svg/SVGFEDropShadowElement.h"
#include "SVGNames.h"
#include "core/rendering/style/RenderStyle.h"
#include "core/rendering/style/SVGRenderStyle.h"
#include "core/svg/SVGElementInstance.h"
#include "core/svg/SVGParserUtilities.h"
#include "core/svg/graphics/filters/SVGFilterBuilder.h"
namespace WebCore {
// Animated property definitions
DEFINE_ANIMATED_STRING(SVGFEDropShadowElement, SVGNames::inAttr, In1, in1)
DEFINE_ANIMATED_NUMBER(SVGFEDropShadowElement, SVGNames::dxAttr, Dx, dx)
DEFINE_ANIMATED_NUMBER(SVGFEDropShadowElement, SVGNames::dyAttr, Dy, dy)
DEFINE_ANIMATED_NUMBER_MULTIPLE_WRAPPERS(SVGFEDropShadowElement, SVGNames::stdDeviationAttr, stdDeviationXIdentifier(), StdDeviationX, stdDeviationX)
DEFINE_ANIMATED_NUMBER_MULTIPLE_WRAPPERS(SVGFEDropShadowElement, SVGNames::stdDeviationAttr, stdDeviationYIdentifier(), StdDeviationY, stdDeviationY)
BEGIN_REGISTER_ANIMATED_PROPERTIES(SVGFEDropShadowElement)
REGISTER_LOCAL_ANIMATED_PROPERTY(in1)
REGISTER_LOCAL_ANIMATED_PROPERTY(dx)
REGISTER_LOCAL_ANIMATED_PROPERTY(dy)
REGISTER_LOCAL_ANIMATED_PROPERTY(stdDeviationX)
REGISTER_LOCAL_ANIMATED_PROPERTY(stdDeviationY)
REGISTER_PARENT_ANIMATED_PROPERTIES(SVGFilterPrimitiveStandardAttributes)
END_REGISTER_ANIMATED_PROPERTIES
inline SVGFEDropShadowElement::SVGFEDropShadowElement(const QualifiedName& tagName, Document* document)
: SVGFilterPrimitiveStandardAttributes(tagName, document)
, m_dx(2)
, m_dy(2)
, m_stdDeviationX(2)
, m_stdDeviationY(2)
{
ASSERT(hasTagName(SVGNames::feDropShadowTag));
ScriptWrappable::init(this);
registerAnimatedPropertiesForSVGFEDropShadowElement();
}
PassRefPtr<SVGFEDropShadowElement> SVGFEDropShadowElement::create(const QualifiedName& tagName, Document* document)
{
return adoptRef(new SVGFEDropShadowElement(tagName, document));
}
const AtomicString& SVGFEDropShadowElement::stdDeviationXIdentifier()
{
DEFINE_STATIC_LOCAL(AtomicString, s_identifier, ("SVGStdDeviationX", AtomicString::ConstructFromLiteral));
return s_identifier;
}
const AtomicString& SVGFEDropShadowElement::stdDeviationYIdentifier()
{
DEFINE_STATIC_LOCAL(AtomicString, s_identifier, ("SVGStdDeviationY", AtomicString::ConstructFromLiteral));
return s_identifier;
}
void SVGFEDropShadowElement::setStdDeviation(float x, float y)
{
setStdDeviationXBaseValue(x);
setStdDeviationYBaseValue(y);
invalidate();
}
bool SVGFEDropShadowElement::isSupportedAttribute(const QualifiedName& attrName)
{
DEFINE_STATIC_LOCAL(HashSet<QualifiedName>, supportedAttributes, ());
if (supportedAttributes.isEmpty()) {
supportedAttributes.add(SVGNames::inAttr);
supportedAttributes.add(SVGNames::dxAttr);
supportedAttributes.add(SVGNames::dyAttr);
supportedAttributes.add(SVGNames::stdDeviationAttr);
}
return supportedAttributes.contains<SVGAttributeHashTranslator>(attrName);
}
void SVGFEDropShadowElement::parseAttribute(const QualifiedName& name, const AtomicString& value)
{
if (!isSupportedAttribute(name)) {
SVGFilterPrimitiveStandardAttributes::parseAttribute(name, value);
return;
}
if (name == SVGNames::stdDeviationAttr) {
float x, y;
if (parseNumberOptionalNumber(value, x, y)) {
setStdDeviationXBaseValue(x);
setStdDeviationYBaseValue(y);
}
return;
}
if (name == SVGNames::inAttr) {
setIn1BaseValue(value);
return;
}
if (name == SVGNames::dxAttr) {
setDxBaseValue(value.toFloat());
return;
}
if (name == SVGNames::dyAttr) {
setDyBaseValue(value.toFloat());
return;
}
ASSERT_NOT_REACHED();
}
void SVGFEDropShadowElement::svgAttributeChanged(const QualifiedName& attrName)
{
if (!isSupportedAttribute(attrName)) {
SVGFilterPrimitiveStandardAttributes::svgAttributeChanged(attrName);
return;
}
SVGElementInstance::InvalidationGuard invalidationGuard(this);
if (attrName == SVGNames::inAttr
|| attrName == SVGNames::stdDeviationAttr
|| attrName == SVGNames::dxAttr
|| attrName == SVGNames::dyAttr) {
invalidate();
return;
}
ASSERT_NOT_REACHED();
}
PassRefPtr<FilterEffect> SVGFEDropShadowElement::build(SVGFilterBuilder* filterBuilder, Filter* filter)
{
RenderObject* renderer = this->renderer();
if (!renderer)
return 0;
if (stdDeviationX() < 0 || stdDeviationY() < 0)
return 0;
ASSERT(renderer->style());
const SVGRenderStyle* svgStyle = renderer->style()->svgStyle();
Color color = svgStyle->floodColor();
float opacity = svgStyle->floodOpacity();
FilterEffect* input1 = filterBuilder->getEffectById(in1());
if (!input1)
return 0;
RefPtr<FilterEffect> effect = FEDropShadow::create(filter, stdDeviationX(), stdDeviationY(), dx(), dy(), color, opacity);
effect->inputEffects().append(input1);
return effect.release();
}
}
| espadrine/opera | chromium/src/third_party/WebKit/Source/core/svg/SVGFEDropShadowElement.cpp | C++ | bsd-3-clause | 5,902 |
<?php
namespace modules\translations\controllers\console;
use modules\lang\models\Lang;
use modules\translations\models\MessageCategory;
use Yii;
use yii\console\Controller;
use yii\console\Exception;
use yii\helpers\FileHelper;
use modules\translations\models\Message;
use modules\translations\models\SourceMessage;
class I18nController extends Controller
{
/**
* @param string $sourcePath
* @throws Exception
*/
public function actionImport($sourcePath = null)
{
if (!$sourcePath) {
$sourcePath = $this->prompt('Enter a source path');
}
$sourcePath = realpath(Yii::getAlias($sourcePath));
if (!is_dir($sourcePath)) {
throw new Exception('The source path ' . $sourcePath . ' is not a valid directory.');
}
$translationsFiles = FileHelper::findFiles($sourcePath, ['only' => ['*.php']]);
foreach ($translationsFiles as $translationsFile) {
$relativePath = trim(str_replace([$sourcePath, '.php'], '', $translationsFile), '/,\\');
$relativePath = FileHelper::normalizePath($relativePath, '/');
$relativePath = explode('/', $relativePath, 2);
if (count($relativePath) > 1) {
$language = $this->prompt('Enter language.', ['default' => $relativePath[0]]);
$category = $this->prompt('Enter category.', ['default' => $relativePath[1]]);
$categoryId = $this->addCategory($category);
$translations = require_once $translationsFile;
if (is_array($translations)) {
foreach ($translations as $sourceMessage => $translation) {
if (!empty($translation)) {
$sourceMessage = $this->getSourceMessage($categoryId, $sourceMessage);
$this->setTranslation($sourceMessage, $language, $translation);
}
}
}
}
}
echo PHP_EOL . 'Done.' . PHP_EOL;
}
private function addCategory($category){
$catModel = MessageCategory::find()->where(['name'=>$category])->one();
if(!$catModel){
$catModel = new MessageCategory();
$catModel->name = $category;
$catModel->save();
}
return $catModel->id;
}
/**
* @param string $category
* @param string $message
* @return SourceMessage
*/
private function getSourceMessage($category, $message)
{
$params = [
'category_id' => $category,
'message' => $message
];
$sourceMessage = SourceMessage::find()
->where($params)
->with('messages')
->one();
if (!$sourceMessage) {
$sourceMessage = new SourceMessage;
$sourceMessage->setAttributes($params, false);
$sourceMessage->save(false);
}
return $sourceMessage;
}
/**
* @param SourceMessage $sourceMessage
* @param string $language
* @param string $translation
*/
private function setTranslation($sourceMessage, $language, $translation)
{
/** @var Message[] $messages */
$messages = $sourceMessage->messages;
$lang = Lang::find()->where(['url'=>$language])->one();
if(!$lang)
return;
$langId = $lang->id;
if (isset($messages[$langId]) /*&& $messages[$langId]->translation === null*/) {
$messages[$langId]->translation = $translation;
$messages[$langId]->save(false);
} elseif (!isset($messages[$langId])) {
$message = new Message;
$message->setAttributes([
'lang_id' => $langId,
'translation' => $translation
], false);
$sourceMessage->link('messages', $message);
}
}
public function actionFlush()
{
$tableNames = [
Message::tableName(),
SourceMessage::tableName()
];
$db = Yii::$app->getDb();
foreach ($tableNames as $tableName) {
$db->createCommand()
->delete($tableName)
->execute();
}
echo PHP_EOL . 'Done.' . PHP_EOL;
}
} | zserg84/tender | modules/translations/controllers/console/I18nController.php | PHP | bsd-3-clause | 4,308 |
#include <CGAL/Epick_d.h>
#include <CGAL/point_generators_d.h>
#include <CGAL/Manhattan_distance_iso_box_point.h>
#include <CGAL/K_neighbor_search.h>
#include <CGAL/Search_traits_d.h>
typedef CGAL::Epick_d<CGAL::Dimension_tag<4> > Kernel;
typedef Kernel::Point_d Point_d;
typedef CGAL::Random_points_in_cube_d<Point_d> Random_points_iterator;
typedef Kernel::Iso_box_d Iso_box_d;
typedef Kernel TreeTraits;
typedef CGAL::Manhattan_distance_iso_box_point<TreeTraits> Distance;
typedef CGAL::K_neighbor_search<TreeTraits, Distance> Neighbor_search;
typedef Neighbor_search::Tree Tree;
int main() {
const int N = 1000;
const unsigned int K = 10;
Tree tree;
Random_points_iterator rpit(4,1000.0);
for(int i = 0; i < N; i++){
tree.insert(*rpit++);
}
Point_d pp(0.1,0.1,0.1,0.1);
Point_d qq(0.2,0.2,0.2,0.2);
Iso_box_d query(pp,qq);
Distance tr_dist;
Neighbor_search N1(tree, query, 5, 10.0, false); // eps=10.0, nearest=false
std::cout << "For query rectangle = [0.1, 0.2]^4 " << std::endl
<< "the " << K << " approximate furthest neighbors are: " << std::endl;
for (Neighbor_search::iterator it = N1.begin();it != N1.end();it++) {
std::cout << " Point " << it->first << " at distance " << tr_dist.inverse_of_transformed_distance(it->second) << std::endl;
}
return 0;
}
| hlzz/dotfiles | graphics/cgal/Spatial_searching/examples/Spatial_searching/general_neighbor_searching.cpp | C++ | bsd-3-clause | 1,316 |
/*L
* Copyright RTI International
*
* Distributed under the OSI-approved BSD 3-Clause License.
* See http://ncip.github.com/webgenome/LICENSE.txt for details.
*/
/*
$Revision: 1.1 $
$Date: 2007-08-22 20:03:57 $
*/
package org.rti.webgenome.webui.struts.upload;
import javax.servlet.http.HttpServletRequest;
import org.apache.struts.action.ActionError;
import org.apache.struts.action.ActionErrors;
import org.apache.struts.action.ActionMapping;
import org.rti.webgenome.util.SystemUtils;
import org.rti.webgenome.webui.struts.BaseForm;
/**
* Form for inputting the name of a rectangular file
* column that contains reporter names.
* @author dhall
*
*/
public class ReporterColumnNameForm extends BaseForm {
/** Serialized version ID. */
private static final long serialVersionUID =
SystemUtils.getLongApplicationProperty("serial.version.uid");
/** Name of column containing reporter names. */
private String reporterColumnName = null;
/**
* Get name of column containing reporter names.
* @return Column heading.
*/
public String getReporterColumnName() {
return reporterColumnName;
}
/**
* Set name of column containing reporter names.
* @param reporterColumnName Column heading.
*/
public void setReporterColumnName(final String reporterColumnName) {
this.reporterColumnName = reporterColumnName;
}
/**
* {@inheritDoc}
*/
@Override
public ActionErrors validate(final ActionMapping mapping,
final HttpServletRequest request) {
ActionErrors errors = new ActionErrors();
if (this.reporterColumnName == null
|| this.reporterColumnName.length() < 1) {
errors.add("reporterColumnName", new ActionError("invalid.field"));
}
if (errors.size() > 0) {
errors.add("global", new ActionError("invalid.fields"));
}
return errors;
}
}
| NCIP/webgenome | tags/WEBGENOME_R3.2_6MAR2009_BUILD1/java/webui/src/org/rti/webgenome/webui/struts/upload/ReporterColumnNameForm.java | Java | bsd-3-clause | 1,844 |
package main
import (
"github.com/kataras/iris/v12"
"github.com/kataras/iris/v12/middleware/requestid"
"github.com/kataras/golog"
)
func main() {
app := iris.New()
app.Logger().SetLevel("debug")
app.Logger().SetFormat("json", " ")
// to register a custom Formatter:
// app.Logger().RegisterFormatter(golog.Formatter...)
// Also, see app.Logger().SetLevelOutput(level string, w io.Writer)
// to set a custom writer for a specific level.
app.Use(requestid.New())
/* Example Output:
{
"timestamp": 1591422944,
"level": "debug",
"message": "This is a message with data",
"fields": {
"username": "kataras"
},
"stacktrace": [
{
"function": "main.main",
"source": "C:/mygopath/src/github.com/kataras/iris/_examples/logging/json-logger/main.go:16"
}
]
}
*/
app.Logger().Debugf("This is a %s with data (debug prints the stacktrace too)", "message", golog.Fields{
"username": "kataras",
})
/* Example Output:
{
"timestamp": 1591422944,
"level": "info",
"message": "An info message",
"fields": {
"home": "https://iris-go.com"
}
}
*/
app.Logger().Infof("An info message", golog.Fields{"home": "https://iris-go.com"})
app.Get("/ping", ping)
// Navigate to http://localhost:8080/ping.
app.Listen(":8080" /*, iris.WithoutBanner*/)
}
func ping(ctx iris.Context) {
/* Example Output:
{
"timestamp": 1591423046,
"level": "debug",
"message": "Request path: /ping",
"fields": {
"request_id": "fc12d88a-a338-4bb9-aa5e-126f2104365c"
},
"stacktrace": [
{
"function": "main.ping",
"source": "C:/mygopath/src/github.com/kataras/iris/_examples/logging/json-logger/main.go:82"
},
...
]
}
*/
ctx.Application().Logger().Debugf("Request path: %s", ctx.Path(), golog.Fields{
"request_id": ctx.GetID(),
})
ctx.WriteString("pong")
}
| kataras/iris | _examples/logging/json-logger/main.go | GO | bsd-3-clause | 1,968 |
from django.core.management.base import BaseCommand
import amo
from mkt.webapps.models import AddonPremium
class Command(BaseCommand):
help = 'Clean up existing AddonPremium objects for free apps.'
def handle(self, *args, **options):
(AddonPremium.objects.filter(addon__premium_type__in=amo.ADDON_FREES)
.delete())
| andymckay/zamboni | mkt/developers/management/commands/cleanup_addon_premium.py | Python | bsd-3-clause | 364 |